right_aws 1.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/History.txt +3 -0
- data/Manifest.txt +21 -0
- data/README.txt +96 -0
- data/Rakefile +25 -0
- data/lib/awsbase/benchmark_fix.rb +39 -0
- data/lib/awsbase/right_awsbase.rb +231 -0
- data/lib/ec2/right_ec2.rb +1034 -0
- data/lib/right_aws.rb +63 -0
- data/lib/s3/right_s3.rb +879 -0
- data/lib/s3/right_s3_interface.rb +900 -0
- data/lib/sqs/right_sqs.rb +369 -0
- data/lib/sqs/right_sqs_interface.rb +655 -0
- data/test/awsbase/test_helper.rb +2 -0
- data/test/awsbase/test_right_awsbase.rb +12 -0
- data/test/ec2/test_helper.rb +2 -0
- data/test/ec2/test_right_ec2.rb +67 -0
- data/test/s3/test_helper.rb +2 -0
- data/test/s3/test_right_s3.rb +217 -0
- data/test/sqs/test_helper.rb +2 -0
- data/test/sqs/test_right_sqs.rb +226 -0
- data/test/test_credentials.rb +37 -0
- data/test/ts_right_aws.rb +9 -0
- metadata +102 -0
@@ -0,0 +1,900 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2007 RightScale Inc
|
3
|
+
#
|
4
|
+
# Permission is hereby granted, free of charge, to any person obtaining
|
5
|
+
# a copy of this software and associated documentation files (the
|
6
|
+
# "Software"), to deal in the Software without restriction, including
|
7
|
+
# without limitation the rights to use, copy, modify, merge, publish,
|
8
|
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
9
|
+
# permit persons to whom the Software is furnished to do so, subject to
|
10
|
+
# the following conditions:
|
11
|
+
#
|
12
|
+
# The above copyright notice and this permission notice shall be
|
13
|
+
# included in all copies or substantial portions of the Software.
|
14
|
+
#
|
15
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
16
|
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
17
|
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
18
|
+
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
19
|
+
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
20
|
+
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
21
|
+
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
22
|
+
#
|
23
|
+
|
24
|
+
module RightAws
|
25
|
+
|
26
|
+
class S3Interface
|
27
|
+
|
28
|
+
DEFAULT_HOST = 's3.amazonaws.com'
|
29
|
+
DEFAULT_PORT = 443
|
30
|
+
DEFAULT_PROTOCOL = 'https'
|
31
|
+
REQUEST_TTL = 30
|
32
|
+
DEFAULT_EXPIRES_AFTER = 1.day
|
33
|
+
AMAZON_HEADER_PREFIX = 'x-amz-'
|
34
|
+
AMAZON_METADATA_PREFIX = 'x-amz-meta-'
|
35
|
+
|
36
|
+
# A list if Amazons problems we can handle by AWSErrorHandler.
|
37
|
+
@@amazon_problems = RightAws::AMAZON_PROBLEMS
|
38
|
+
|
39
|
+
# TODO TRB 6/19/07 - all the below accessors are shared in the
|
40
|
+
# three service gems. See if is it reasonable to stick these
|
41
|
+
# in an interface class in right_awsbase that we can mixin
|
42
|
+
#
|
43
|
+
# Same for the benchmarking code - all three service gems have
|
44
|
+
# the same. Break out into a helper class. Also look at the
|
45
|
+
# benchmarking fix as a good thing to move to common code.
|
46
|
+
|
47
|
+
# Current aws_access_key_id
|
48
|
+
attr_reader :aws_access_key_id
|
49
|
+
# Last HTTP request object
|
50
|
+
attr_reader :last_request
|
51
|
+
# Last HTTP response object
|
52
|
+
attr_reader :last_response
|
53
|
+
# Last AWS errors list (used by AWSErrorHandler)
|
54
|
+
attr_accessor :last_errors
|
55
|
+
# Last AWS request id (used by AWSErrorHandler)
|
56
|
+
attr_accessor :last_request_id
|
57
|
+
# Logger object
|
58
|
+
attr_accessor :logger
|
59
|
+
# Initial params hash
|
60
|
+
attr_accessor :params
|
61
|
+
|
62
|
+
@@bench_s3 = Benchmark::Tms.new()
|
63
|
+
@@bench_xml = Benchmark::Tms.new()
|
64
|
+
|
65
|
+
# Benchmark::Tms instance for S3 access benchmark.
|
66
|
+
def self.bench_s3; @@bench_s3; end
|
67
|
+
|
68
|
+
# Benchmark::Tms instance for XML parsing benchmark.
|
69
|
+
def self.bench_xml; @@bench_xml; end # For benchmark puposes.
|
70
|
+
|
71
|
+
# Returns a list of Amazon service responses which are known as problems on Amazon side.
|
72
|
+
# We have to re-request again if we've got any of them - probably the problem will disappear. By default returns the same value as AMAZON_PROBLEMS const.
|
73
|
+
def self.amazon_problems
|
74
|
+
@@amazon_problems
|
75
|
+
end
|
76
|
+
|
77
|
+
# Sets a list of Amazon side problems.
|
78
|
+
def self.amazon_problems=(problems_list)
|
79
|
+
@@amazon_problems = problems_list
|
80
|
+
end
|
81
|
+
|
82
|
+
# Creates new RightS3 instance.
|
83
|
+
#
|
84
|
+
# s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<RightS3:0xb7b3c27c>
|
85
|
+
#
|
86
|
+
# Params is a hash:
|
87
|
+
#
|
88
|
+
# {:server => 's3.amazonaws.com' # Amazon service host: 's3.amazonaws.com'(default)
|
89
|
+
# :port => 443 # Amazon service port: 80 or 443(default)
|
90
|
+
# :protocol => 'https' # Amazon service protocol: 'http' or 'https'(default)
|
91
|
+
# :multi_thread => true|false # Multi-threaded (connection per each thread): true or false(default)
|
92
|
+
# :logger => Logger Object} # Logger instance: logs to STDOUT if omitted }
|
93
|
+
#
|
94
|
+
def initialize(aws_access_key_id, aws_secret_access_key, params={})
|
95
|
+
@params = params
|
96
|
+
raise AwsError.new("AWS access keys are required to operate on S3") \
|
97
|
+
if aws_access_key_id.blank? || aws_secret_access_key.blank?
|
98
|
+
|
99
|
+
# TODO TRB 6/19/07 - keys, basic params, and logger are all
|
100
|
+
# candidates to break out into a helper class common to all
|
101
|
+
# service gems. Stick the helper in right_awsbase
|
102
|
+
@aws_access_key_id = aws_access_key_id
|
103
|
+
@aws_secret_access_key = aws_secret_access_key
|
104
|
+
# params
|
105
|
+
@params[:server] ||= DEFAULT_HOST
|
106
|
+
@params[:port] ||= DEFAULT_PORT
|
107
|
+
@params[:protocol] ||= DEFAULT_PROTOCOL
|
108
|
+
@params[:multi_thread] ||= defined?(AWS_DAEMON)
|
109
|
+
# set logger
|
110
|
+
@logger = @params[:logger]
|
111
|
+
@logger = RAILS_DEFAULT_LOGGER if !@logger && defined?(RAILS_DEFAULT_LOGGER)
|
112
|
+
@logger = Logger.new(STDOUT) if !@logger
|
113
|
+
@logger.info "New #{self.class.name} using #{@params[:multi_thread] ? 'multi' : 'single'}-threaded mode"
|
114
|
+
end
|
115
|
+
|
116
|
+
# TODO TRB 6/19/07 - Service gem common method
|
117
|
+
def on_exception(options={:raise=>true, :log=>true}) # :nodoc:
|
118
|
+
RightAws::AwsError::on_aws_exception(self, options)
|
119
|
+
end
|
120
|
+
|
121
|
+
# TODO TRB 6/19/07 - Service gem common method
|
122
|
+
|
123
|
+
# Return the +true+ if this RightS3 instance works in multi_thread state and +false+ otherwise.
|
124
|
+
def multi_thread
|
125
|
+
@params[:multi_thread]
|
126
|
+
end
|
127
|
+
|
128
|
+
#-----------------------------------------------------------------
|
129
|
+
# Requests
|
130
|
+
#-----------------------------------------------------------------
|
131
|
+
# Produces canonical string for signing.
|
132
|
+
def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
|
133
|
+
s3_headers = {}
|
134
|
+
headers.each do |key, value|
|
135
|
+
key = key.downcase
|
136
|
+
s3_headers[key] = value.to_s.strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
|
137
|
+
end
|
138
|
+
s3_headers['content-type'] ||= ''
|
139
|
+
s3_headers['content-md5'] ||= ''
|
140
|
+
s3_headers['date'] = '' if s3_headers.has_key? 'x-amz-date'
|
141
|
+
s3_headers['date'] = expires if expires
|
142
|
+
# prepare output string
|
143
|
+
out_string = "#{method}\n"
|
144
|
+
s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
|
145
|
+
out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
|
146
|
+
end
|
147
|
+
# ignore everything after the question mark...
|
148
|
+
out_string << path.gsub(/\?.*$/, '')
|
149
|
+
# ...unless there is an acl or torrent parameter
|
150
|
+
out_string << '?acl' if path[/[&?]acl($|&|=)/]
|
151
|
+
out_string << '?torrent'if path[/[&?]torrent($|&|=)/]
|
152
|
+
out_string
|
153
|
+
end
|
154
|
+
|
155
|
+
# Generates request hash for REST API.
|
156
|
+
def generate_rest_request(method, headers) # :nodoc:
|
157
|
+
path = headers[:url]
|
158
|
+
path = "/#{path}" unless path[/^\//]
|
159
|
+
data = headers[:data]
|
160
|
+
# remove unset(==optional) and symbolyc keys
|
161
|
+
headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
|
162
|
+
#
|
163
|
+
headers['content-type'] ||= ''
|
164
|
+
headers['date'] = Time.now.httpdate
|
165
|
+
# create request
|
166
|
+
request = "Net::HTTP::#{method.titleize}".constantize.new(URI::escape(CGI::unescape(path)))
|
167
|
+
request.body = data if data
|
168
|
+
# set request headers and meta headers
|
169
|
+
headers.each { |key, value| request[key.to_s] = value }
|
170
|
+
#generate auth strings
|
171
|
+
auth_string = canonical_string(request.method, request.path, request.to_hash)
|
172
|
+
signature = Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip
|
173
|
+
# set other headers
|
174
|
+
request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
|
175
|
+
# prepare output hash
|
176
|
+
{ :request => request,
|
177
|
+
:server => @params[:server],
|
178
|
+
:port => @params[:port],
|
179
|
+
:protocol => @params[:protocol] }
|
180
|
+
end
|
181
|
+
|
182
|
+
# Sends request to Amazon and parses the response.
|
183
|
+
# Raises AwsError if any banana happened.
|
184
|
+
# TODO TRB 6/19/07:
|
185
|
+
# request_info is a candidate to move to right_awsbase
|
186
|
+
# because it currently appears (in identical form) in right_s3,
|
187
|
+
# right_ec2, and right_sqs
|
188
|
+
def request_info(request, parser, &block) # :nodoc:
|
189
|
+
thread = @params[:multi_thread] ? Thread.current : Thread.main
|
190
|
+
thread[:s3_connection] ||= Rightscale::HttpConnection.new(:exception => RightAws::AwsError)
|
191
|
+
@last_request = request[:request]
|
192
|
+
@last_response = nil
|
193
|
+
response=nil
|
194
|
+
|
195
|
+
if(block != nil)
|
196
|
+
@@bench_s3.add! do
|
197
|
+
responsehdr = thread[:s3_connection].request(request) do |response|
|
198
|
+
if response.is_a?(Net::HTTPSuccess)
|
199
|
+
@error_handler = nil
|
200
|
+
response.read_body(&block)
|
201
|
+
else
|
202
|
+
@error_handler = AWSErrorHandler.new(self, parser, @@amazon_problems) unless @error_handler
|
203
|
+
check_result = @error_handler.check(request)
|
204
|
+
if check_result
|
205
|
+
@error_handler = nil
|
206
|
+
return check_result
|
207
|
+
end
|
208
|
+
raise AwsError.new(@last_errors, @last_response.code, @last_request_id)
|
209
|
+
end
|
210
|
+
end
|
211
|
+
@@bench_xml.add! do
|
212
|
+
parser.parse(responsehdr)
|
213
|
+
end
|
214
|
+
return parser.result
|
215
|
+
end
|
216
|
+
else
|
217
|
+
@@bench_s3.add!{ response = thread[:s3_connection].request(request) }
|
218
|
+
# check response for errors...
|
219
|
+
@last_response = response
|
220
|
+
if response.is_a?(Net::HTTPSuccess)
|
221
|
+
@error_handler = nil
|
222
|
+
@@bench_xml.add! do
|
223
|
+
if parser.kind_of?(RightAWSParser)
|
224
|
+
REXML::Document.parse_stream(response.body, parser)
|
225
|
+
else
|
226
|
+
parser.parse(response)
|
227
|
+
end
|
228
|
+
end
|
229
|
+
return parser.result
|
230
|
+
else
|
231
|
+
@error_handler = AWSErrorHandler.new(self, parser, @@amazon_problems) unless @error_handler
|
232
|
+
check_result = @error_handler.check(request)
|
233
|
+
if check_result
|
234
|
+
@error_handler = nil
|
235
|
+
return check_result
|
236
|
+
end
|
237
|
+
raise AwsError.new(@last_errors, @last_response.code, @last_request_id)
|
238
|
+
end
|
239
|
+
end
|
240
|
+
rescue
|
241
|
+
@error_handler = nil
|
242
|
+
raise
|
243
|
+
end
|
244
|
+
|
245
|
+
|
246
|
+
# Returns an array of customer's buckets. Each item is a +hash+.
|
247
|
+
#
|
248
|
+
# s3.list_all_my_buckets #=>
|
249
|
+
# [{:owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
|
250
|
+
# :owner_display_name => "root",
|
251
|
+
# :name => "bucket_name",
|
252
|
+
# :creation_date => "2007-04-19T18:47:43.000Z"}, ..., {...}]
|
253
|
+
#
|
254
|
+
def list_all_my_buckets(headers={})
|
255
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>''))
|
256
|
+
request_info(req_hash, S3ListAllMyBucketsParser.new)
|
257
|
+
rescue
|
258
|
+
on_exception
|
259
|
+
end
|
260
|
+
|
261
|
+
# Creates new bucket. Returns +true+ or an exception.
|
262
|
+
#
|
263
|
+
# s3.create_bucket('my_awesome_bucket') #=> true
|
264
|
+
#
|
265
|
+
def create_bucket(bucket, headers={})
|
266
|
+
req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket))
|
267
|
+
request_info(req_hash, S3TrueParser.new)
|
268
|
+
rescue
|
269
|
+
on_exception
|
270
|
+
end
|
271
|
+
|
272
|
+
# Deletes new bucket. Bucket must be empty! Returns +true+ or an exception.
|
273
|
+
#
|
274
|
+
# s3.delete_bucket('my_awesome_bucket') #=> true
|
275
|
+
#
|
276
|
+
# See also: force_delete_bucket method
|
277
|
+
#
|
278
|
+
def delete_bucket(bucket, headers={})
|
279
|
+
req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
|
280
|
+
request_info(req_hash, S3TrueParser.new)
|
281
|
+
rescue
|
282
|
+
on_exception
|
283
|
+
end
|
284
|
+
|
285
|
+
# Returns an array of bucket's keys. Each array item (key data) is a +hash+.
|
286
|
+
#
|
287
|
+
# s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
|
288
|
+
# [{:key => "test1",
|
289
|
+
# :last_modified => "2007-05-18T07:00:59.000Z",
|
290
|
+
# :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
|
291
|
+
# :owner_display_name => "root",
|
292
|
+
# :e_tag => "000000000059075b964b07152d234b70",
|
293
|
+
# :storage_class => "STANDARD",
|
294
|
+
# :size => 3,
|
295
|
+
# :service=> {'is_truncated' => false,
|
296
|
+
# 'prefix' => "t",
|
297
|
+
# 'marker' => "",
|
298
|
+
# 'name' => "my_awesome_bucket",
|
299
|
+
# 'max-keys' => "5"}, ..., {...}]
|
300
|
+
#
|
301
|
+
def list_bucket(bucket, options={}, headers={})
|
302
|
+
bucket += '?'+options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
|
303
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
|
304
|
+
request_info(req_hash, S3ListBucketParser.new)
|
305
|
+
rescue
|
306
|
+
on_exception
|
307
|
+
end
|
308
|
+
|
309
|
+
# Incrementally list the contents of a bucket. Yields the following hash to a block:
|
310
|
+
# s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
|
311
|
+
# {
|
312
|
+
# :name => 'bucketname',
|
313
|
+
# :prefix => 'subfolder/',
|
314
|
+
# :marker => 'fileN.jpg',
|
315
|
+
# :max_keys => 234,
|
316
|
+
# :delimiter => '/',
|
317
|
+
# :is_truncated => true,
|
318
|
+
# :next_marker => 'fileX.jpg',
|
319
|
+
# :contents => [
|
320
|
+
# { :key => "file1",
|
321
|
+
# :last_modified => "2007-05-18T07:00:59.000Z",
|
322
|
+
# :e_tag => "000000000059075b964b07152d234b70",
|
323
|
+
# :size => 3,
|
324
|
+
# :storage_class => "STANDARD",
|
325
|
+
# :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
|
326
|
+
# :owner_display_name => "root"
|
327
|
+
# }, { :key, ...}, ... {:key, ...}
|
328
|
+
# ]
|
329
|
+
# :common_prefixes => [
|
330
|
+
# "prefix1",
|
331
|
+
# "prefix2",
|
332
|
+
# ...,
|
333
|
+
# "prefixN"
|
334
|
+
# ]
|
335
|
+
# }
|
336
|
+
def incrementally_list_bucket(bucket, options={}, headers={}, &block)
|
337
|
+
internal_options = options.dup
|
338
|
+
begin
|
339
|
+
internal_bucket = bucket.dup
|
340
|
+
internal_bucket += '?'+internal_options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless internal_options.blank?
|
341
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
|
342
|
+
response = request_info(req_hash, S3ImprovedListBucketParser.new)
|
343
|
+
there_are_more_keys = response[:is_truncated]
|
344
|
+
if(there_are_more_keys)
|
345
|
+
if(response[:next_marker])
|
346
|
+
internal_options['marker'] = response[:next_marker]
|
347
|
+
else
|
348
|
+
internal_options['marker'] = response[:contents].last[:key]
|
349
|
+
end
|
350
|
+
internal_options['max-keys'] ? (internal_options['max-keys'] -= response[:contents].length) : nil
|
351
|
+
end
|
352
|
+
yield response
|
353
|
+
end while there_are_more_keys
|
354
|
+
true
|
355
|
+
rescue
|
356
|
+
on_exception
|
357
|
+
end
|
358
|
+
|
359
|
+
# Saves object to Amazon. Returns +true+ or an exception.
|
360
|
+
# Any header starting with AMAZON_METADATA_PREFIX is considered user metadata. It will be stored with the object and returned when you retrieve the object. The total size of the HTTP request, not including the body, must be less than 4 KB.
|
361
|
+
#
|
362
|
+
# s3.put('my_awesome_bucket', 'log/curent/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true
|
363
|
+
#
|
364
|
+
def put(bucket, key, data=nil, headers={})
|
365
|
+
req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data))
|
366
|
+
request_info(req_hash, S3TrueParser.new)
|
367
|
+
rescue
|
368
|
+
on_exception
|
369
|
+
end
|
370
|
+
|
371
|
+
# Retrieves object data from Amazon. Returns a +hash+ or an exception.
|
372
|
+
#
|
373
|
+
# s3.get('my_awesome_bucket', 'log/curent/1.log') #=>
|
374
|
+
#
|
375
|
+
# {:object => "Ola-la!",
|
376
|
+
# :headers => {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
|
377
|
+
# "content-type" => "",
|
378
|
+
# "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
|
379
|
+
# "date" => "Wed, 23 May 2007 09:08:03 GMT",
|
380
|
+
# "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
|
381
|
+
# "x-amz-meta-family" => "Woho556!",
|
382
|
+
# "x-amz-request-id" => "0000000C246D770C",
|
383
|
+
# "server" => "AmazonS3",
|
384
|
+
# "content-length" => "7"}}
|
385
|
+
#
|
386
|
+
# If a block is provided, yields incrementally to the block as
|
387
|
+
# the response is read. For large responses, this function is ideal as
|
388
|
+
# the response can be 'streamed'. The hash containing header fields is
|
389
|
+
# still returned.
|
390
|
+
# Example:
|
391
|
+
# foo = File.new('./chunder.txt', File::CREAT|File::RDWR)
|
392
|
+
# rhdr = s3.get('aws-test', 'Cent5V1_7_1.img.part.00') do |chunk|
|
393
|
+
# foo.write(chunk)
|
394
|
+
# end
|
395
|
+
# foo.close
|
396
|
+
#
|
397
|
+
|
398
|
+
def get(bucket, key, headers={}, &block)
|
399
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
|
400
|
+
request_info(req_hash, S3HttpResponseBodyParser.new, &block)
|
401
|
+
rescue
|
402
|
+
on_exception
|
403
|
+
end
|
404
|
+
|
405
|
+
# Retrieves object metadata. Returns a +hash+ of http_response_headers.
|
406
|
+
#
|
407
|
+
# s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
|
408
|
+
# {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
|
409
|
+
# "content-type" => "",
|
410
|
+
# "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
|
411
|
+
# "date" => "Wed, 23 May 2007 09:08:03 GMT",
|
412
|
+
# "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
|
413
|
+
# "x-amz-meta-family" => "Woho556!",
|
414
|
+
# "x-amz-request-id" => "0000000C246D770C",
|
415
|
+
# "server" => "AmazonS3",
|
416
|
+
# "content-length" => "7"}
|
417
|
+
#
|
418
|
+
def head(bucket, key, headers={})
|
419
|
+
req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
|
420
|
+
request_info(req_hash, S3HttpResponseHeadParser.new)
|
421
|
+
rescue
|
422
|
+
on_exception
|
423
|
+
end
|
424
|
+
|
425
|
+
# Deletes key. Returns +true+ or an exception.
|
426
|
+
#
|
427
|
+
# s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true
|
428
|
+
#
|
429
|
+
def delete(bucket, key='', headers={})
|
430
|
+
req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
|
431
|
+
request_info(req_hash, S3TrueParser.new)
|
432
|
+
rescue
|
433
|
+
on_exception
|
434
|
+
end
|
435
|
+
|
436
|
+
|
437
|
+
# Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.
|
438
|
+
#
|
439
|
+
# s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
|
440
|
+
# {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
|
441
|
+
# "content-type"=>"application/xml;charset=ISO-8859-1",
|
442
|
+
# "date"=>"Wed, 23 May 2007 09:40:16 GMT",
|
443
|
+
# "x-amz-request-id"=>"B183FA7AB5FBB4DD",
|
444
|
+
# "server"=>"AmazonS3",
|
445
|
+
# "transfer-encoding"=>"chunked"},
|
446
|
+
# :object => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
|
447
|
+
# <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
|
448
|
+
# <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
|
449
|
+
# 16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
|
450
|
+
# <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }
|
451
|
+
#
|
452
|
+
def get_acl(bucket, key='', headers={})
|
453
|
+
key = key.blank? ? '' : "/#{CGI::escape key}"
|
454
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
|
455
|
+
request_info(req_hash, S3HttpResponseBodyParser.new)
|
456
|
+
rescue
|
457
|
+
on_exception
|
458
|
+
end
|
459
|
+
|
460
|
+
# Retieves the ACL (access control policy) for a bucket or object.
|
461
|
+
# Returns a hash of {:owner, :grantees}
|
462
|
+
#
|
463
|
+
# s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>
|
464
|
+
#
|
465
|
+
# { :grantees=>
|
466
|
+
# { "16...2a"=>
|
467
|
+
# { :display_name=>"root",
|
468
|
+
# :permissions=>["FULL_CONTROL"],
|
469
|
+
# :attributes=>
|
470
|
+
# { "xsi:type"=>"CanonicalUser",
|
471
|
+
# "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
|
472
|
+
# "http://acs.amazonaws.com/groups/global/AllUsers"=>
|
473
|
+
# { :display_name=>"AllUsers",
|
474
|
+
# :permissions=>["READ"],
|
475
|
+
# :attributes=>
|
476
|
+
# { "xsi:type"=>"Group",
|
477
|
+
# "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
|
478
|
+
# :owner=>
|
479
|
+
# { :id=>"16..2a",
|
480
|
+
# :display_name=>"root"}}
|
481
|
+
#
|
482
|
+
def get_acl_parse(bucket, key='', headers={})
|
483
|
+
key = key.blank? ? '' : "/#{CGI::escape key}"
|
484
|
+
req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
|
485
|
+
acl = request_info(req_hash, S3AclParser.new)
|
486
|
+
result = {}
|
487
|
+
result[:owner] = acl[:owner]
|
488
|
+
result[:grantees] = {}
|
489
|
+
acl[:grantees].each do |grantee|
|
490
|
+
key = grantee[:id] || grantee[:uri]
|
491
|
+
if result[:grantees].key?(key)
|
492
|
+
result[:grantees][key][:permissions] << grantee[:permissions]
|
493
|
+
else
|
494
|
+
result[:grantees][key] =
|
495
|
+
{ :display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
|
496
|
+
:permissions => grantee[:permissions].to_a,
|
497
|
+
:attributes => grantee[:attributes] }
|
498
|
+
end
|
499
|
+
end
|
500
|
+
result
|
501
|
+
rescue
|
502
|
+
on_exception
|
503
|
+
end
|
504
|
+
|
505
|
+
# Sets the ACL on a bucket or object.
|
506
|
+
def put_acl(bucket, key, acl_xml_doc, headers={})
|
507
|
+
key = key.blank? ? '' : "/#{CGI::escape key}"
|
508
|
+
req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
|
509
|
+
request_info(req_hash, S3HttpResponseBodyParser.new)
|
510
|
+
rescue
|
511
|
+
on_exception
|
512
|
+
end
|
513
|
+
|
514
|
+
# Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.
|
515
|
+
def get_bucket_acl(bucket, headers={})
|
516
|
+
return get_acl(bucket, '', headers)
|
517
|
+
rescue
|
518
|
+
on_exception
|
519
|
+
end
|
520
|
+
|
521
|
+
# Sets the ACL on a bucket only.
|
522
|
+
def put_bucket_acl(bucket, acl_xml_doc, headers={})
|
523
|
+
return put_acl(bucket, '', acl_xml_doc, headers)
|
524
|
+
rescue
|
525
|
+
on_exception
|
526
|
+
end
|
527
|
+
|
528
|
+
|
529
|
+
# Removes all keys from bucket. Returns +true+ or an exception.
|
530
|
+
#
|
531
|
+
# s3.clear_bucket('my_awesome_bucket') #=> true
|
532
|
+
#
|
533
|
+
def clear_bucket(bucket)
|
534
|
+
incrementally_list_bucket(bucket) do |results|
|
535
|
+
results[:contents].each { |key| delete(bucket, key[:key]) }
|
536
|
+
end
|
537
|
+
true
|
538
|
+
rescue
|
539
|
+
on_exception
|
540
|
+
end
|
541
|
+
|
542
|
+
# Deletes all keys in bucket then deletes bucket. Returns +true+ or an exception.
|
543
|
+
#
|
544
|
+
# s3.force_delete_bucket('my_awesome_bucket')
|
545
|
+
#
|
546
|
+
def force_delete_bucket(bucket)
|
547
|
+
clear_bucket(bucket)
|
548
|
+
delete_bucket(bucket)
|
549
|
+
rescue
|
550
|
+
on_exception
|
551
|
+
end
|
552
|
+
|
553
|
+
# Deletes all keys where the 'folder_key' may be assumed as 'folder' name. Returns an array of string keys that have been deleted.
|
554
|
+
#
|
555
|
+
# s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
|
556
|
+
# s3.delete_folder('my_awesome_bucket','test') #=> ['test','test/2/34','test/3']
|
557
|
+
#
|
558
|
+
def delete_folder(bucket, folder_key, separator='/')
|
559
|
+
folder_key.chomp!(separator)
|
560
|
+
allkeys = []
|
561
|
+
incrementally_list_bucket(bucket, { 'prefix' => folder_key }) do |results|
|
562
|
+
keys = results[:contents].map{ |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil}.compact
|
563
|
+
keys.each{ |key| delete(bucket, key) }
|
564
|
+
allkeys << keys
|
565
|
+
end
|
566
|
+
allkeys
|
567
|
+
rescue
|
568
|
+
on_exception
|
569
|
+
end
|
570
|
+
|
571
|
+
# Retrieves object data only (headers are omitted). Returns +string+ or an exception.
|
572
|
+
#
|
573
|
+
# s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'
|
574
|
+
#
|
575
|
+
def get_object(bucket, key, headers={})
|
576
|
+
get(bucket, key, headers)[:object]
|
577
|
+
rescue
|
578
|
+
on_exception
|
579
|
+
end
|
580
|
+
|
581
|
+
#-----------------------------------------------------------------
|
582
|
+
# Query API: Links
|
583
|
+
#-----------------------------------------------------------------
|
584
|
+
|
585
|
+
# Generates link for QUERY API
|
586
|
+
def generate_link(method, headers={}, expires=nil) #:nodoc:
|
587
|
+
path = headers[:url]
|
588
|
+
path = "/#{path}" unless path[/^\//]
|
589
|
+
# expiration time
|
590
|
+
expires ||= DEFAULT_EXPIRES_AFTER
|
591
|
+
expires = Time.now.utc.since(expires) if expires.is_a?(Fixnum) && (expires<1.year)
|
592
|
+
expires = expires.to_i
|
593
|
+
# remove unset(==optional) and symbolyc keys
|
594
|
+
headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
|
595
|
+
#generate auth strings
|
596
|
+
auth_string = canonical_string(method, path, headers, expires)
|
597
|
+
signature = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
|
598
|
+
# path building
|
599
|
+
addon = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
|
600
|
+
path += path[/\?/] ? "&#{addon}" : "?#{addon}"
|
601
|
+
"#{@params[:protocol]}://#{@params[:server]}:#{@params[:port]}#{path}"
|
602
|
+
rescue
|
603
|
+
on_exception
|
604
|
+
end
|
605
|
+
|
606
|
+
# Generates link for 'ListAllMyBuckets'.
|
607
|
+
#
|
608
|
+
# s3.list_all_my_buckets_link #=> url string
|
609
|
+
#
|
610
|
+
def list_all_my_buckets_link(expires=nil, headers={})
|
611
|
+
generate_link('GET', headers.merge(:url=>''), expires)
|
612
|
+
rescue
|
613
|
+
on_exception
|
614
|
+
end
|
615
|
+
|
616
|
+
# Generates link for 'CreateBucket'.
|
617
|
+
#
|
618
|
+
# s3.create_bucket_link('my_awesome_bucket') #=> url string
|
619
|
+
#
|
620
|
+
def create_bucket_link(bucket, expires=nil, headers={})
|
621
|
+
generate_link('PUT', headers.merge(:url=>bucket), expires)
|
622
|
+
rescue
|
623
|
+
on_exception
|
624
|
+
end
|
625
|
+
|
626
|
+
# Generates link for 'DeleteBucket'.
|
627
|
+
#
|
628
|
+
# s3.delete_bucket_link('my_awesome_bucket') #=> url string
|
629
|
+
#
|
630
|
+
def delete_bucket_link(bucket, expires=nil, headers={})
|
631
|
+
generate_link('DELETE', headers.merge(:url=>bucket), expires)
|
632
|
+
rescue
|
633
|
+
on_exception
|
634
|
+
end
|
635
|
+
|
636
|
+
# Generates link for 'ListBucket'.
|
637
|
+
#
|
638
|
+
# s3.list_bucket_link('my_awesome_bucket') #=> url string
|
639
|
+
#
|
640
|
+
def list_bucket_link(bucket, options=nil, expires=nil, headers={})
|
641
|
+
bucket += '?' + options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
|
642
|
+
generate_link('GET', headers.merge(:url=>bucket), expires)
|
643
|
+
rescue
|
644
|
+
on_exception
|
645
|
+
end
|
646
|
+
|
647
|
+
# Generates link for 'PutObject'.
|
648
|
+
#
|
649
|
+
# s3.put_link('my_awesome_bucket',key, object) #=> url string
|
650
|
+
#
|
651
|
+
def put_link(bucket, key, data=nil, expires=nil, headers={})
|
652
|
+
generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data), expires)
|
653
|
+
rescue
|
654
|
+
on_exception
|
655
|
+
end
|
656
|
+
|
657
|
+
# Generates link for 'GetObject'.
|
658
|
+
#
|
659
|
+
# s3.get_link('my_awesome_bucket',key) #=> url string
|
660
|
+
#
|
661
|
+
def get_link(bucket, key, expires=nil, headers={})
|
662
|
+
generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
|
663
|
+
rescue
|
664
|
+
on_exception
|
665
|
+
end
|
666
|
+
|
667
|
+
# Generates link for 'HeadObject'.
|
668
|
+
#
|
669
|
+
# s3.head_link('my_awesome_bucket',key) #=> url string
|
670
|
+
#
|
671
|
+
def head_link(bucket, key, expires=nil, headers={})
|
672
|
+
generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
|
673
|
+
rescue
|
674
|
+
on_exception
|
675
|
+
end
|
676
|
+
|
677
|
+
# Generates link for 'DeleteObject'.
|
678
|
+
#
|
679
|
+
# s3.delete_link('my_awesome_bucket',key) #=> url string
|
680
|
+
#
|
681
|
+
def delete_link(bucket, key, expires=nil, headers={})
|
682
|
+
generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
|
683
|
+
rescue
|
684
|
+
on_exception
|
685
|
+
end
|
686
|
+
|
687
|
+
|
688
|
+
# Generates link for 'GetACL'.
|
689
|
+
#
|
690
|
+
# s3.get_acl_link('my_awesome_bucket',key) #=> url string
|
691
|
+
#
|
692
|
+
def get_acl_link(bucket, key='', headers={})
|
693
|
+
return generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
|
694
|
+
rescue
|
695
|
+
on_exception
|
696
|
+
end
|
697
|
+
|
698
|
+
# Generates link for 'PutACL'.
|
699
|
+
#
|
700
|
+
# s3.put_acl_link('my_awesome_bucket',key) #=> url string
|
701
|
+
#
|
702
|
+
def put_acl_link(bucket, key='', headers={})
|
703
|
+
return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
|
704
|
+
rescue
|
705
|
+
on_exception
|
706
|
+
end
|
707
|
+
|
708
|
+
# Generates link for 'GetBucketACL'.
|
709
|
+
#
|
710
|
+
# s3.get_acl_link('my_awesome_bucket',key) #=> url string
|
711
|
+
#
|
712
|
+
def get_bucket_acl_link(bucket, headers={})
|
713
|
+
return get_acl_link(bucket, '', headers)
|
714
|
+
rescue
|
715
|
+
on_exception
|
716
|
+
end
|
717
|
+
|
718
|
+
# Generates link for 'PutBucketACL'.
|
719
|
+
#
|
720
|
+
# s3.put_acl_link('my_awesome_bucket',key) #=> url string
|
721
|
+
#
|
722
|
+
def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
|
723
|
+
return put_acl_link(bucket, '', acl_xml_doc, headers)
|
724
|
+
rescue
|
725
|
+
on_exception
|
726
|
+
end
|
727
|
+
|
728
|
+
#-----------------------------------------------------------------
|
729
|
+
# PARSERS:
|
730
|
+
#-----------------------------------------------------------------
|
731
|
+
|
732
|
+
class S3ListAllMyBucketsParser < RightAWSParser # :nodoc:
|
733
|
+
def reset
|
734
|
+
@result = []
|
735
|
+
@owner = {}
|
736
|
+
end
|
737
|
+
def tagstart(name, attributes)
|
738
|
+
@current_bucket = {} if name == 'Bucket'
|
739
|
+
end
|
740
|
+
def tagend(name)
|
741
|
+
case name
|
742
|
+
when 'ID' ; @owner[:owner_id] = @text
|
743
|
+
when 'DisplayName' ; @owner[:owner_display_name] = @text
|
744
|
+
when 'Name' ; @current_bucket[:name] = @text
|
745
|
+
when 'CreationDate'; @current_bucket[:creation_date] = @text
|
746
|
+
when 'Bucket' ; @result << @current_bucket.merge(@owner)
|
747
|
+
end
|
748
|
+
end
|
749
|
+
end
|
750
|
+
|
751
|
+
class S3ListBucketParser < RightAWSParser # :nodoc:
|
752
|
+
def reset
|
753
|
+
@result = []
|
754
|
+
@service = {}
|
755
|
+
@current_key = {}
|
756
|
+
end
|
757
|
+
def tagstart(name, attributes)
|
758
|
+
@current_key = {} if name == 'Contents'
|
759
|
+
end
|
760
|
+
def tagend(name)
|
761
|
+
case name
|
762
|
+
# service info
|
763
|
+
when 'Name' ; @service['name'] = @text
|
764
|
+
when 'Prefix' ; @service['prefix'] = @text
|
765
|
+
when 'Marker' ; @service['marker'] = @text
|
766
|
+
when 'MaxKeys' ; @service['max-keys'] = @text
|
767
|
+
when 'Delimiter' ; @service['delimiter'] = @text
|
768
|
+
when 'IsTruncated' ; @service['is_truncated'] = (@text =~ /false/ ? false : true)
|
769
|
+
# key data
|
770
|
+
when 'Key' ; @current_key[:key] = @text
|
771
|
+
when 'LastModified'; @current_key[:last_modified] = @text
|
772
|
+
when 'ETag' ; @current_key[:e_tag] = @text
|
773
|
+
when 'Size' ; @current_key[:size] = @text.to_i
|
774
|
+
when 'StorageClass'; @current_key[:storage_class] = @text
|
775
|
+
when 'ID' ; @current_key[:owner_id] = @text
|
776
|
+
when 'DisplayName' ; @current_key[:owner_display_name] = @text
|
777
|
+
when 'Contents' ; @current_key[:service] = @service; @result << @current_key
|
778
|
+
end
|
779
|
+
end
|
780
|
+
end
|
781
|
+
|
782
|
+
class S3ImprovedListBucketParser < RightAWSParser # :nodoc:
|
783
|
+
def reset
|
784
|
+
@result = {}
|
785
|
+
@result[:contents] = []
|
786
|
+
@result[:common_prefixes] = []
|
787
|
+
@contents = []
|
788
|
+
@current_key = {}
|
789
|
+
@common_prefixes = []
|
790
|
+
@in_common_prefixes = false
|
791
|
+
end
|
792
|
+
def tagstart(name, attributes)
|
793
|
+
@current_key = {} if name == 'Contents'
|
794
|
+
@in_common_prefixes = true if name == 'CommonPrefixes'
|
795
|
+
end
|
796
|
+
def tagend(name)
|
797
|
+
case name
|
798
|
+
# service info
|
799
|
+
when 'Name' ; @result[:name] = @text
|
800
|
+
# Amazon uses the same tag for the search prefix and for the entries
|
801
|
+
# in common prefix...so use our simple flag to see which element
|
802
|
+
# we are parsing
|
803
|
+
when 'Prefix' ; @in_common_prefixes ? @common_prefixes << @text : @result[:prefix] = @text
|
804
|
+
when 'Marker' ; @result[:marker] = @text
|
805
|
+
when 'MaxKeys' ; @result[:max_keys] = @text
|
806
|
+
when 'Delimiter' ; @result[:delimiter] = @text
|
807
|
+
when 'IsTruncated' ; @result[:is_truncated] = (@text =~ /false/ ? false : true)
|
808
|
+
when 'NextMarker' ; @result[:next_marker] = @text
|
809
|
+
# key data
|
810
|
+
when 'Key' ; @current_key[:key] = @text
|
811
|
+
when 'LastModified'; @current_key[:last_modified] = @text
|
812
|
+
when 'ETag' ; @current_key[:e_tag] = @text
|
813
|
+
when 'Size' ; @current_key[:size] = @text.to_i
|
814
|
+
when 'StorageClass'; @current_key[:storage_class] = @text
|
815
|
+
when 'ID' ; @current_key[:owner_id] = @text
|
816
|
+
when 'DisplayName' ; @current_key[:owner_display_name] = @text
|
817
|
+
when 'Contents' ; @result[:contents] << @current_key
|
818
|
+
# Common Prefix stuff
|
819
|
+
when 'CommonPrefixes' ; @result[:common_prefixes] = @common_prefixes; @in_common_prefixes = false
|
820
|
+
end
|
821
|
+
end
|
822
|
+
end
|
823
|
+
|
824
|
+
#-----------------------------------------------------------------
|
825
|
+
# PARSERS: Non XML
|
826
|
+
#-----------------------------------------------------------------
|
827
|
+
|
828
|
+
class S3HttpResponseParser # :nodoc:
|
829
|
+
attr_reader :result
|
830
|
+
def parse(response)
|
831
|
+
@result = response
|
832
|
+
end
|
833
|
+
def headers_to_string(headers)
|
834
|
+
result = {}
|
835
|
+
headers.each do |key, value|
|
836
|
+
value = value.to_s if value.is_a?(Array) && value.size<2
|
837
|
+
result[key] = value
|
838
|
+
end
|
839
|
+
result
|
840
|
+
end
|
841
|
+
end
|
842
|
+
|
843
|
+
class S3TrueParser < S3HttpResponseParser # :nodoc:
|
844
|
+
def parse(response)
|
845
|
+
@result = response.is_a?(Net::HTTPSuccess)
|
846
|
+
end
|
847
|
+
end
|
848
|
+
|
849
|
+
class S3HttpResponseBodyParser < S3HttpResponseParser # :nodoc:
|
850
|
+
def parse(response)
|
851
|
+
@result = {
|
852
|
+
:object => response.body,
|
853
|
+
:headers => headers_to_string(response.to_hash)
|
854
|
+
}
|
855
|
+
end
|
856
|
+
end
|
857
|
+
|
858
|
+
class S3HttpResponseHeadParser < S3HttpResponseParser # :nodoc:
|
859
|
+
def parse(response)
|
860
|
+
@result = headers_to_string(response.to_hash)
|
861
|
+
end
|
862
|
+
end
|
863
|
+
|
864
|
+
|
865
|
+
class S3AclParser < RightAWSParser # :nodoc:
|
866
|
+
def reset
|
867
|
+
@result = {:grantees=>[], :owner=>{}}
|
868
|
+
@current_grantee = {}
|
869
|
+
end
|
870
|
+
def tagstart(name, attributes)
|
871
|
+
@current_grantee = { :attributes => attributes } if name=='Grantee'
|
872
|
+
end
|
873
|
+
def tagend(name)
|
874
|
+
case name
|
875
|
+
# service info
|
876
|
+
when 'ID'
|
877
|
+
if @xmlpath == 'AccessControlPolicy/Owner'
|
878
|
+
@result[:owner][:id] = @text
|
879
|
+
else
|
880
|
+
@current_grantee[:id] = @text
|
881
|
+
end
|
882
|
+
when 'DisplayName'
|
883
|
+
if @xmlpath == 'AccessControlPolicy/Owner'
|
884
|
+
@result[:owner][:display_name] = @text
|
885
|
+
else
|
886
|
+
@current_grantee[:display_name] = @text
|
887
|
+
end
|
888
|
+
when 'URI'
|
889
|
+
@current_grantee[:uri] = @text
|
890
|
+
when 'Permission'
|
891
|
+
@current_grantee[:permissions] = @text
|
892
|
+
when 'Grant'
|
893
|
+
@result[:grantees] << @current_grantee
|
894
|
+
end
|
895
|
+
end
|
896
|
+
end
|
897
|
+
|
898
|
+
end
|
899
|
+
|
900
|
+
end
|