s3-ruby 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/History.txt +8 -0
- data/Manifest.txt +5 -0
- data/README.txt +54 -0
- data/Rakefile +15 -0
- data/lib/s3-ruby.rb +661 -0
- data/test/test_s3-ruby.rb +0 -0
- metadata +57 -0
data/History.txt
ADDED
data/Manifest.txt
ADDED
data/README.txt
ADDED
@@ -0,0 +1,54 @@
|
|
1
|
+
s3-ruby
|
2
|
+
by Chris Carter <cdcarter@gmail.com>
|
3
|
+
http://metacampsite.com/s3-ruby
|
4
|
+
based on the Amazon sample code
|
5
|
+
|
6
|
+
== SYNOPSIS:
|
7
|
+
|
8
|
+
s3-ruby is a library to access the Amazon S3 (Simple Storage Service) REST API.
|
9
|
+
|
10
|
+
== FEATURES/PROBLEMS:
|
11
|
+
|
12
|
+
* Full REST API support
|
13
|
+
* Support for streams
|
14
|
+
* no tests at this point
|
15
|
+
|
16
|
+
== DESCRIPTION:
|
17
|
+
|
18
|
+
s3-ruby interfaces with the REST API of Amazon's S3 (Simple Storage Service)
|
19
|
+
to give your ruby applications full access to the service.
|
20
|
+
|
21
|
+
== REQUIREMENTS:
|
22
|
+
|
23
|
+
+ ruby/hmac
|
24
|
+
|
25
|
+
== INSTALL:
|
26
|
+
|
27
|
+
+ sudo gem install s3-ruby
|
28
|
+
|
29
|
+
== LICENSE:
|
30
|
+
|
31
|
+
(The MIT License)
|
32
|
+
|
33
|
+
Copyright (c) 2006 Concentration Studios
|
34
|
+
|
35
|
+
Base code (c) 2006 Amazon Digital Services, Inc. or its affiliates.
|
36
|
+
|
37
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
38
|
+
a copy of this software and associated documentation files (the
|
39
|
+
'Software'), to deal in the Software without restriction, including
|
40
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
41
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
42
|
+
permit persons to whom the Software is furnished to do so, subject to
|
43
|
+
the following conditions:
|
44
|
+
|
45
|
+
The above copyright notice and this permission notice shall be
|
46
|
+
included in all copies or substantial portions of the Software.
|
47
|
+
|
48
|
+
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
49
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
50
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
51
|
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
52
|
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
53
|
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
54
|
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/Rakefile
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
# -*- ruby -*-
|
2
|
+
|
3
|
+
require 'rubygems'
|
4
|
+
require 'hoe'
|
5
|
+
require './lib/s3-ruby.rb'
|
6
|
+
|
7
|
+
Hoe.new('s3-ruby', S3::RubyBindings::VERSION) do |p|
|
8
|
+
p.rubyforge_name = 's3-ruby'
|
9
|
+
p.summary = 'A library for the Amazon S3 (Simple Storage Service) web service REST API'
|
10
|
+
p.description = p.paragraphs_of('README.txt', 2..4 ).join("\n\n")
|
11
|
+
p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[1..-1]
|
12
|
+
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
|
13
|
+
end
|
14
|
+
|
15
|
+
# vim: syntax=Ruby
|
data/lib/s3-ruby.rb
ADDED
@@ -0,0 +1,661 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
# Base Code is Copyright (c) 2006 Amazon Digital Services, Inc.
|
4
|
+
#
|
5
|
+
# Modifications made by Concentration Studios under the MIT license
|
6
|
+
|
7
|
+
require 'base64'
|
8
|
+
require 'cgi'
|
9
|
+
require 'openssl'
|
10
|
+
require 'digest/sha1'
|
11
|
+
require 'net/https'
|
12
|
+
require 'rexml/document'
|
13
|
+
require 'time'
|
14
|
+
|
15
|
+
# this wasn't added until v 1.8.3
|
16
|
+
if (RUBY_VERSION < '1.8.3')
|
17
|
+
class Net::HTTP::Delete < Net::HTTPRequest
|
18
|
+
METHOD = 'DELETE'
|
19
|
+
REQUEST_HAS_BODY = false
|
20
|
+
RESPONSE_HAS_BODY = true
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
# Fix Net::HTTP to allow streaming
|
25
|
+
module Net
|
26
|
+
class HTTP
|
27
|
+
alias __request__ request
|
28
|
+
|
29
|
+
def request(req, body = nil, &block)
|
30
|
+
if body != nil && body.respond_to?(:read)
|
31
|
+
req.body_stream = body
|
32
|
+
return __request__(req, nil, &block)
|
33
|
+
else
|
34
|
+
return __request__(req, body, &block)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
# this module has two big classes: AWSAuthConnection and
|
41
|
+
# QueryStringAuthGenerator. both use identical apis, but the first actually
|
42
|
+
# performs the operation, while the second simply outputs urls with the
|
43
|
+
# appropriate authentication query string parameters, which could be used
|
44
|
+
# in another tool (such as your web browser for GETs).
|
45
|
+
module S3
|
46
|
+
DEFAULT_HOST = 's3.amazonaws.com'
|
47
|
+
PORTS_BY_SECURITY = { true => 443, false => 80 }
|
48
|
+
METADATA_PREFIX = 'x-amz-meta-'
|
49
|
+
AMAZON_HEADER_PREFIX = 'x-amz-'
|
50
|
+
|
51
|
+
class RubyBindings
|
52
|
+
VERSION = '0.1.0'
|
53
|
+
end
|
54
|
+
|
55
|
+
# builds the canonical string for signing.
|
56
|
+
def S3.canonical_string(method, path, headers={}, expires=nil)
|
57
|
+
interesting_headers = {}
|
58
|
+
headers.each do |key, value|
|
59
|
+
lk = key.downcase
|
60
|
+
if (lk == 'content-md5' or
|
61
|
+
lk == 'content-type' or
|
62
|
+
lk == 'date' or
|
63
|
+
lk =~ /^#{AMAZON_HEADER_PREFIX}/o)
|
64
|
+
interesting_headers[lk] = value.to_s.strip
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
# these fields get empty strings if they don't exist.
|
69
|
+
interesting_headers['content-type'] ||= ''
|
70
|
+
interesting_headers['content-md5'] ||= ''
|
71
|
+
|
72
|
+
# just in case someone used this. it's not necessary in this lib.
|
73
|
+
if interesting_headers.has_key? 'x-amz-date'
|
74
|
+
interesting_headers['date'] = ''
|
75
|
+
end
|
76
|
+
|
77
|
+
# if you're using expires for query string auth, then it trumps date
|
78
|
+
# (and x-amz-date)
|
79
|
+
if not expires.nil?
|
80
|
+
interesting_headers['date'] = expires
|
81
|
+
end
|
82
|
+
|
83
|
+
buf = "#{method}\n"
|
84
|
+
interesting_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
|
85
|
+
if key =~ /^#{AMAZON_HEADER_PREFIX}/o
|
86
|
+
buf << "#{key}:#{value}\n"
|
87
|
+
else
|
88
|
+
buf << "#{value}\n"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
# ignore everything after the question mark...
|
93
|
+
buf << path.gsub(/\?.*$/, '')
|
94
|
+
|
95
|
+
# ...unless there is an acl or torrent parameter
|
96
|
+
if path =~ /[&?]acl($|&|=)/
|
97
|
+
buf << '?acl'
|
98
|
+
elsif path =~ /[&?]torrent($|&|=)/
|
99
|
+
buf << '?torrent'
|
100
|
+
elsif path =~ /[&?]logging($|&|=)/
|
101
|
+
buf << '?logging'
|
102
|
+
end
|
103
|
+
|
104
|
+
return buf
|
105
|
+
end
|
106
|
+
|
107
|
+
# encodes the given string with the aws_secret_access_key, by taking the
|
108
|
+
# hmac-sha1 sum, and then base64 encoding it. optionally, it will also
|
109
|
+
# url encode the result of that to protect the string if it's going to
|
110
|
+
# be used as a query string parameter.
|
111
|
+
def S3.encode(aws_secret_access_key, str, urlencode=false)
|
112
|
+
digest = OpenSSL::Digest::Digest.new('sha1')
|
113
|
+
b64_hmac =
|
114
|
+
Base64.encode64(
|
115
|
+
OpenSSL::HMAC.digest(digest, aws_secret_access_key, str)).strip
|
116
|
+
|
117
|
+
if urlencode
|
118
|
+
return CGI::escape(b64_hmac)
|
119
|
+
else
|
120
|
+
return b64_hmac
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
|
125
|
+
# uses Net::HTTP to interface with S3. note that this interface should only
|
126
|
+
# be used for smaller objects, as it does not stream the data. if you were
|
127
|
+
# to download a 1gb file, it would require 1gb of memory. also, this class
|
128
|
+
# creates a new http connection each time. it would be greatly improved with
|
129
|
+
# some connection pooling.
|
130
|
+
class AWSAuthConnection
|
131
|
+
def initialize(aws_access_key_id, aws_secret_access_key, is_secure=true,
|
132
|
+
server=DEFAULT_HOST, port=PORTS_BY_SECURITY[is_secure])
|
133
|
+
@aws_access_key_id = aws_access_key_id
|
134
|
+
@aws_secret_access_key = aws_secret_access_key
|
135
|
+
@http = Net::HTTP.new(server, port)
|
136
|
+
@http.use_ssl = is_secure
|
137
|
+
end
|
138
|
+
|
139
|
+
def create_bucket(bucket, headers={})
|
140
|
+
return Response.new(make_request('PUT', bucket, headers))
|
141
|
+
end
|
142
|
+
|
143
|
+
# takes options :prefix, :marker, :max_keys, and :delimiter
|
144
|
+
def list_bucket(bucket, options={}, headers={})
|
145
|
+
path = bucket
|
146
|
+
if options.size > 0
|
147
|
+
path += '?' + options.map { |k, v| "#{k}=#{CGI::escape v.to_s}" }.join('&')
|
148
|
+
end
|
149
|
+
|
150
|
+
return ListBucketResponse.new(make_request('GET', path, headers))
|
151
|
+
end
|
152
|
+
|
153
|
+
def delete_bucket(bucket, headers={})
|
154
|
+
return Response.new(make_request('DELETE', bucket, headers))
|
155
|
+
end
|
156
|
+
|
157
|
+
def put(bucket, key, object, headers={})
|
158
|
+
object = S3Object.new(object) if not object.instance_of? S3Object
|
159
|
+
|
160
|
+
return Response.new(
|
161
|
+
make_request('PUT', "#{bucket}/#{CGI::escape key}", headers, object.data, object.metadata)
|
162
|
+
)
|
163
|
+
end
|
164
|
+
|
165
|
+
def get(bucket, key, headers={})
|
166
|
+
return GetResponse.new(make_request('GET', "#{bucket}/#{CGI::escape key}", headers))
|
167
|
+
end
|
168
|
+
|
169
|
+
def delete(bucket, key, headers={})
|
170
|
+
return Response.new(make_request('DELETE', "#{bucket}/#{CGI::escape key}", headers))
|
171
|
+
end
|
172
|
+
|
173
|
+
def head(bucket, key, headers={})
|
174
|
+
return GetResponse.new(make_request('HEAD',"#{bucket}/#{CGI::escape key}", headers))
|
175
|
+
end
|
176
|
+
|
177
|
+
# a convenience method to put an object to S3 publically readbale
|
178
|
+
def put_public(bucket,key,object,headers)
|
179
|
+
put(
|
180
|
+
bucket,
|
181
|
+
key,
|
182
|
+
object,
|
183
|
+
{ 'x-amz-acl' => 'public-read'}.merge(headers)
|
184
|
+
)
|
185
|
+
end
|
186
|
+
|
187
|
+
# a convenience method that returns an array with the list of all your bucket names
|
188
|
+
def list_buckets
|
189
|
+
list_all_my_buckets.entries.map { |entry| entry.name }
|
190
|
+
end
|
191
|
+
|
192
|
+
# a convenience method that returns an array with the list of all the keys in a bucket
|
193
|
+
def list_bucket_contents(bucket)
|
194
|
+
list_bucket(bucket).entries.map { |entry| entry.key }
|
195
|
+
end
|
196
|
+
|
197
|
+
def make_public(bucket,object,headers={})
|
198
|
+
put_acl(bucket,object,'', { 'a-amz-acl' => 'public-read' }.merge(headers) )
|
199
|
+
end
|
200
|
+
|
201
|
+
def make_private(bucket,object,headers={})
|
202
|
+
put_acl(bucket,object,'', { 'a-amz-acl' => 'private' }.merge(headers) )
|
203
|
+
end
|
204
|
+
|
205
|
+
def get_bucket_logging(bucket, headers={})
|
206
|
+
return GetResponse.new(make_request('GET', "#{bucket}?logging", headers))
|
207
|
+
end
|
208
|
+
|
209
|
+
def put_bucket_logging(bucket, logging_xml_doc, headers={})
|
210
|
+
return Response.new(make_request('PUT', "#{bucket}?logging", headers, logging_xml_doc))
|
211
|
+
end
|
212
|
+
|
213
|
+
def get_bucket_acl(bucket, headers={})
|
214
|
+
return get_acl(bucket, '', headers)
|
215
|
+
end
|
216
|
+
|
217
|
+
# returns an xml document representing the access control list.
|
218
|
+
# this could be parsed into an object.
|
219
|
+
def get_acl(bucket, key, headers={})
|
220
|
+
return GetResponse.new(make_request('GET', "#{bucket}/#{CGI::escape key}?acl", headers))
|
221
|
+
end
|
222
|
+
|
223
|
+
def put_bucket_acl(bucket, acl_xml_doc, headers={})
|
224
|
+
return put_acl(bucket, '', acl_xml_doc, headers)
|
225
|
+
end
|
226
|
+
|
227
|
+
# sets the access control policy for the given resource. acl_xml_doc must
|
228
|
+
# be a string in the acl xml format.
|
229
|
+
def put_acl(bucket, key, acl_xml_doc, headers={})
|
230
|
+
return Response.new(
|
231
|
+
make_request('PUT', "#{bucket}/#{CGI::escape key}?acl", headers, acl_xml_doc, {})
|
232
|
+
)
|
233
|
+
end
|
234
|
+
|
235
|
+
def list_all_my_buckets(headers={})
|
236
|
+
return ListAllMyBucketsResponse.new(make_request('GET', '', headers))
|
237
|
+
end
|
238
|
+
|
239
|
+
private
|
240
|
+
def make_request(method, path, headers={}, data='', metadata={})
|
241
|
+
@http.start do
|
242
|
+
req = method_to_request_class(method).new("/#{path}")
|
243
|
+
|
244
|
+
set_headers(req, headers)
|
245
|
+
set_headers(req, metadata, METADATA_PREFIX)
|
246
|
+
|
247
|
+
set_aws_auth_header(req, @aws_access_key_id, @aws_secret_access_key)
|
248
|
+
if req.request_body_permitted?
|
249
|
+
return @http.request(req, data)
|
250
|
+
else
|
251
|
+
return @http.request(req)
|
252
|
+
end
|
253
|
+
end
|
254
|
+
end
|
255
|
+
|
256
|
+
def method_to_request_class(method)
|
257
|
+
case method
|
258
|
+
when 'GET'
|
259
|
+
return Net::HTTP::Get
|
260
|
+
when 'PUT'
|
261
|
+
return Net::HTTP::Put
|
262
|
+
when 'DELETE'
|
263
|
+
return Net::HTTP::Delete
|
264
|
+
when 'HEAD'
|
265
|
+
return Net::HTTP::Head
|
266
|
+
else
|
267
|
+
raise "Unsupported method #{method}"
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
271
|
+
# set the Authorization header using AWS signed header authentication
|
272
|
+
def set_aws_auth_header(request, aws_access_key_id, aws_secret_access_key)
|
273
|
+
# we want to fix the date here if it's not already been done.
|
274
|
+
request['Date'] ||= Time.now.httpdate
|
275
|
+
|
276
|
+
# ruby will automatically add a random content-type on some verbs, so
|
277
|
+
# here we add a dummy one to 'supress' it. change this logic if having
|
278
|
+
# an empty content-type header becomes semantically meaningful for any
|
279
|
+
# other verb.
|
280
|
+
request['Content-Type'] ||= ''
|
281
|
+
|
282
|
+
canonical_string =
|
283
|
+
S3.canonical_string(request.method, request.path, request.to_hash)
|
284
|
+
encoded_canonical = S3.encode(aws_secret_access_key, canonical_string)
|
285
|
+
|
286
|
+
request['Authorization'] = "AWS #{aws_access_key_id}:#{encoded_canonical}"
|
287
|
+
end
|
288
|
+
|
289
|
+
def set_headers(request, headers, prefix='')
|
290
|
+
headers.each do |key, value|
|
291
|
+
request[prefix + key] = value
|
292
|
+
end
|
293
|
+
end
|
294
|
+
end
|
295
|
+
|
296
|
+
|
297
|
+
# This interface mirrors the AWSAuthConnection class above, but instead
|
298
|
+
# of performing the operations, this class simply returns a url that can
|
299
|
+
# be used to perform the operation with the query string authentication
|
300
|
+
# parameters set.
|
301
|
+
class QueryStringAuthGenerator
|
302
|
+
attr_reader :server, :expires, :expires_in, :port
|
303
|
+
|
304
|
+
# by default, expire in 1 minute
|
305
|
+
DEFAULT_EXPIRES_IN = 60
|
306
|
+
|
307
|
+
def initialize(aws_access_key_id, aws_secret_access_key, is_secure=true, server=DEFAULT_HOST, port=PORTS_BY_SECURITY[is_secure])
|
308
|
+
@aws_access_key_id = aws_access_key_id
|
309
|
+
@aws_secret_access_key = aws_secret_access_key
|
310
|
+
@protocol = is_secure ? 'https' : 'http'
|
311
|
+
@server = server
|
312
|
+
@port = port
|
313
|
+
# by default expire
|
314
|
+
@expires_in = DEFAULT_EXPIRES_IN
|
315
|
+
end
|
316
|
+
|
317
|
+
# set the expires value to be a fixed time. the argument can
|
318
|
+
# be either a Time object or else seconds since epoch.
|
319
|
+
def expires=(value)
|
320
|
+
@expires = value
|
321
|
+
@expires_in = nil
|
322
|
+
end
|
323
|
+
|
324
|
+
# set the expires value to expire at some point in the future
|
325
|
+
# relative to when the url is generated. value is in seconds.
|
326
|
+
def expires_in=(value)
|
327
|
+
@expires_in = value
|
328
|
+
@expires = nil
|
329
|
+
end
|
330
|
+
|
331
|
+
def create_bucket(bucket, headers={})
|
332
|
+
return generate_url('PUT', bucket, headers)
|
333
|
+
end
|
334
|
+
|
335
|
+
# takes options :prefix, :marker, :max_keys, and :delimiter
|
336
|
+
def list_bucket(bucket, options={}, headers={})
|
337
|
+
path = bucket
|
338
|
+
if options.size > 0
|
339
|
+
path += '?' + options.map { |k, v| "#{k}=#{CGI::escape v}" }.join('&')
|
340
|
+
end
|
341
|
+
|
342
|
+
return generate_url('GET', path, headers)
|
343
|
+
end
|
344
|
+
|
345
|
+
def delete_bucket(bucket, headers={})
|
346
|
+
return generate_url('DELETE', bucket, headers)
|
347
|
+
end
|
348
|
+
|
349
|
+
# don't really care what object data is. it's just for conformance with the
|
350
|
+
# other interface. If this doesn't work, check tcpdump to see if the client is
|
351
|
+
# putting a Content-Type header on the wire.
|
352
|
+
def put(bucket, key, object=nil, headers={})
|
353
|
+
object = S3Object.new(object) if not object.instance_of? S3Object
|
354
|
+
return generate_url('PUT', "#{bucket}/#{CGI::escape key}", merge_meta(headers, object))
|
355
|
+
end
|
356
|
+
|
357
|
+
def get(bucket, key, headers={})
|
358
|
+
return generate_url('GET', "#{bucket}/#{CGI::escape key}", headers)
|
359
|
+
end
|
360
|
+
|
361
|
+
def delete(bucket, key, headers={})
|
362
|
+
return generate_url('DELETE', "#{bucket}/#{CGI::escape key}", headers)
|
363
|
+
end
|
364
|
+
|
365
|
+
def get_bucket_logging(bucket, headers={})
|
366
|
+
return generate_url('GET', "#{bucket}?logging", headers)
|
367
|
+
end
|
368
|
+
|
369
|
+
def put_bucket_logging(bucket, logging_xml_doc, headers={})
|
370
|
+
return generate_url('PUT', "#{bucket}?logging", headers)
|
371
|
+
end
|
372
|
+
|
373
|
+
def get_acl(bucket, key='', headers={})
|
374
|
+
return generate_url('GET', "#{bucket}/#{CGI::escape key}?acl", headers)
|
375
|
+
end
|
376
|
+
|
377
|
+
def get_bucket_acl(bucket, headers={})
|
378
|
+
return get_acl(bucket, '', headers)
|
379
|
+
end
|
380
|
+
|
381
|
+
# don't really care what acl_xml_doc is.
|
382
|
+
# again, check the wire for Content-Type if this fails.
|
383
|
+
def put_acl(bucket, key, acl_xml_doc, headers={})
|
384
|
+
return generate_url('PUT', "#{bucket}/#{CGI::escape key}?acl", headers)
|
385
|
+
end
|
386
|
+
|
387
|
+
def put_bucket_acl(bucket, acl_xml_doc, headers={})
|
388
|
+
return put_acl(bucket, '', acl_xml_doc, headers)
|
389
|
+
end
|
390
|
+
|
391
|
+
def list_all_my_buckets(headers={})
|
392
|
+
return generate_url('GET', '', headers)
|
393
|
+
end
|
394
|
+
|
395
|
+
def get_torrent(bucket, key, headers={})
|
396
|
+
return generate_url('GET', "#{bucket}/#{CGI::escape key}?torrent", headers)
|
397
|
+
end
|
398
|
+
|
399
|
+
private
|
400
|
+
# generate a url with the appropriate query string authentication
|
401
|
+
# parameters set.
|
402
|
+
def generate_url(method, path, headers)
|
403
|
+
expires = 0
|
404
|
+
if not @expires_in.nil?
|
405
|
+
expires = Time.now.to_i + @expires_in
|
406
|
+
elsif not @expires.nil?
|
407
|
+
expires = @expires
|
408
|
+
else
|
409
|
+
raise "invalid expires state"
|
410
|
+
end
|
411
|
+
|
412
|
+
canonical_string =
|
413
|
+
S3::canonical_string(method, "/" + path, headers, expires)
|
414
|
+
encoded_canonical =
|
415
|
+
S3::encode(@aws_secret_access_key, canonical_string, true)
|
416
|
+
|
417
|
+
arg_sep = path.index('?') ? '&' : '?'
|
418
|
+
|
419
|
+
return "#{@protocol}://#{@server}:#{@port}/#{path}#{arg_sep}Signature=#{encoded_canonical}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
|
420
|
+
end
|
421
|
+
|
422
|
+
def merge_meta(headers, object)
|
423
|
+
final_headers = headers.clone
|
424
|
+
if not object.nil? and not object.metadata.nil?
|
425
|
+
object.metadata.each do |k, v|
|
426
|
+
final_headers[METADATA_PREFIX + k] = v
|
427
|
+
end
|
428
|
+
end
|
429
|
+
return final_headers
|
430
|
+
end
|
431
|
+
end
|
432
|
+
|
433
|
+
class S3Object
|
434
|
+
attr_accessor :data
|
435
|
+
attr_accessor :metadata
|
436
|
+
def initialize(data, metadata={})
|
437
|
+
@data, @metadata = data, metadata
|
438
|
+
end
|
439
|
+
end
|
440
|
+
|
441
|
+
class Owner
|
442
|
+
attr_accessor :id
|
443
|
+
attr_accessor :display_name
|
444
|
+
end
|
445
|
+
|
446
|
+
class ListEntry
|
447
|
+
attr_accessor :key
|
448
|
+
attr_accessor :last_modified
|
449
|
+
attr_accessor :etag
|
450
|
+
attr_accessor :size
|
451
|
+
attr_accessor :storage_class
|
452
|
+
attr_accessor :owner
|
453
|
+
end
|
454
|
+
|
455
|
+
class ListProperties
|
456
|
+
attr_accessor :name
|
457
|
+
attr_accessor :prefix
|
458
|
+
attr_accessor :marker
|
459
|
+
attr_accessor :max_keys
|
460
|
+
attr_accessor :delimiter
|
461
|
+
attr_accessor :is_truncated
|
462
|
+
attr_accessor :next_marker
|
463
|
+
end
|
464
|
+
|
465
|
+
class CommonPrefixEntry
|
466
|
+
attr_accessor :prefix
|
467
|
+
end
|
468
|
+
|
469
|
+
# Parses the list bucket output into a list of ListEntry objects, and
|
470
|
+
# a list of CommonPrefixEntry objects if applicable.
|
471
|
+
class ListBucketParser
|
472
|
+
attr_reader :properties
|
473
|
+
attr_reader :entries
|
474
|
+
attr_reader :common_prefixes
|
475
|
+
|
476
|
+
def initialize
|
477
|
+
reset
|
478
|
+
end
|
479
|
+
|
480
|
+
def tag_start(name, attributes)
|
481
|
+
if name == 'ListBucketResult'
|
482
|
+
@properties = ListProperties.new
|
483
|
+
elsif name == 'Contents'
|
484
|
+
@curr_entry = ListEntry.new
|
485
|
+
elsif name == 'Owner'
|
486
|
+
@curr_entry.owner = Owner.new
|
487
|
+
elsif name == 'CommonPrefixes'
|
488
|
+
@common_prefix_entry = CommonPrefixEntry.new
|
489
|
+
end
|
490
|
+
end
|
491
|
+
|
492
|
+
# we have one, add him to the entries list
|
493
|
+
def tag_end(name)
|
494
|
+
# this prefix is the one we echo back from the request
|
495
|
+
if name == 'Name'
|
496
|
+
@properties.name = @curr_text
|
497
|
+
elsif name == 'Prefix' && @is_echoed_prefix
|
498
|
+
@properties.prefix = @curr_text
|
499
|
+
@is_echoed_prefix = nil
|
500
|
+
elsif name == 'Marker'
|
501
|
+
@properties.marker = @curr_text
|
502
|
+
elsif name == 'MaxKeys'
|
503
|
+
@properties.max_keys = @curr_text.to_i
|
504
|
+
elsif name == 'Delimiter'
|
505
|
+
@properties.delimiter = @curr_text
|
506
|
+
elsif name == 'IsTruncated'
|
507
|
+
@properties.is_truncated = @curr_text == 'true'
|
508
|
+
elsif name == 'NextMarker'
|
509
|
+
@properties.next_marker = @curr_text
|
510
|
+
elsif name == 'Contents'
|
511
|
+
@entries << @curr_entry
|
512
|
+
elsif name == 'Key'
|
513
|
+
@curr_entry.key = @curr_text
|
514
|
+
elsif name == 'LastModified'
|
515
|
+
@curr_entry.last_modified = @curr_text
|
516
|
+
elsif name == 'ETag'
|
517
|
+
@curr_entry.etag = @curr_text
|
518
|
+
elsif name == 'Size'
|
519
|
+
@curr_entry.size = @curr_text.to_i
|
520
|
+
elsif name == 'StorageClass'
|
521
|
+
@curr_entry.storage_class = @curr_text
|
522
|
+
elsif name == 'ID'
|
523
|
+
@curr_entry.owner.id = @curr_text
|
524
|
+
elsif name == 'DisplayName'
|
525
|
+
@curr_entry.owner.display_name = @curr_text
|
526
|
+
elsif name == 'CommonPrefixes'
|
527
|
+
@common_prefixes << @common_prefix_entry
|
528
|
+
elsif name == 'Prefix'
|
529
|
+
# this is the common prefix for keys that match up to the delimiter
|
530
|
+
@common_prefix_entry.prefix = @curr_text
|
531
|
+
end
|
532
|
+
@curr_text = ''
|
533
|
+
end
|
534
|
+
|
535
|
+
def text(text)
|
536
|
+
@curr_text += text
|
537
|
+
end
|
538
|
+
|
539
|
+
def xmldecl(version, encoding, standalone)
|
540
|
+
# ignore
|
541
|
+
end
|
542
|
+
|
543
|
+
# get ready for another parse
|
544
|
+
def reset
|
545
|
+
@is_echoed_prefix = true;
|
546
|
+
@entries = []
|
547
|
+
@curr_entry = nil
|
548
|
+
@common_prefixes = []
|
549
|
+
@common_prefix_entry = nil
|
550
|
+
@curr_text = ''
|
551
|
+
end
|
552
|
+
end
|
553
|
+
|
554
|
+
class Bucket
|
555
|
+
attr_accessor :name
|
556
|
+
attr_accessor :creation_date
|
557
|
+
end
|
558
|
+
|
559
|
+
class ListAllMyBucketsParser
|
560
|
+
attr_reader :entries
|
561
|
+
|
562
|
+
def initialize
|
563
|
+
reset
|
564
|
+
end
|
565
|
+
|
566
|
+
def tag_start(name, attributes)
|
567
|
+
if name == 'Bucket'
|
568
|
+
@curr_bucket = Bucket.new
|
569
|
+
end
|
570
|
+
end
|
571
|
+
|
572
|
+
# we have one, add him to the entries list
|
573
|
+
def tag_end(name)
|
574
|
+
if name == 'Bucket'
|
575
|
+
@entries << @curr_bucket
|
576
|
+
elsif name == 'Name'
|
577
|
+
@curr_bucket.name = @curr_text
|
578
|
+
elsif name == 'CreationDate'
|
579
|
+
@curr_bucket.creation_date = @curr_text
|
580
|
+
end
|
581
|
+
@curr_text = ''
|
582
|
+
end
|
583
|
+
|
584
|
+
def text(text)
|
585
|
+
@curr_text += text
|
586
|
+
end
|
587
|
+
|
588
|
+
def xmldecl(version, encoding, standalone)
|
589
|
+
# ignore
|
590
|
+
end
|
591
|
+
|
592
|
+
# get ready for another parse
|
593
|
+
def reset
|
594
|
+
@entries = []
|
595
|
+
@owner = nil
|
596
|
+
@curr_bucket = nil
|
597
|
+
@curr_text = ''
|
598
|
+
end
|
599
|
+
end
|
600
|
+
|
601
|
+
class Response
|
602
|
+
attr_reader :http_response
|
603
|
+
def initialize(response)
|
604
|
+
@http_response = response
|
605
|
+
end
|
606
|
+
end
|
607
|
+
|
608
|
+
class GetResponse < Response
|
609
|
+
attr_reader :object
|
610
|
+
def initialize(response)
|
611
|
+
super(response)
|
612
|
+
metadata = get_aws_metadata(response)
|
613
|
+
data = response.body
|
614
|
+
@object = S3Object.new(data, metadata)
|
615
|
+
end
|
616
|
+
|
617
|
+
# parses the request headers and pulls out the s3 metadata into a hash
|
618
|
+
def get_aws_metadata(response)
|
619
|
+
metadata = {}
|
620
|
+
response.each do |key, value|
|
621
|
+
if key =~ /^#{METADATA_PREFIX}(.*)$/oi
|
622
|
+
metadata[$1] = value
|
623
|
+
end
|
624
|
+
end
|
625
|
+
return metadata
|
626
|
+
end
|
627
|
+
end
|
628
|
+
|
629
|
+
class ListBucketResponse < Response
|
630
|
+
attr_reader :properties
|
631
|
+
attr_reader :entries
|
632
|
+
attr_reader :common_prefix_entries
|
633
|
+
|
634
|
+
def initialize(response)
|
635
|
+
super(response)
|
636
|
+
if response.is_a? Net::HTTPSuccess
|
637
|
+
parser = ListBucketParser.new
|
638
|
+
REXML::Document.parse_stream(response.body, parser)
|
639
|
+
@properties = parser.properties
|
640
|
+
@entries = parser.entries
|
641
|
+
@common_prefix_entries = parser.common_prefixes
|
642
|
+
else
|
643
|
+
@entries = []
|
644
|
+
end
|
645
|
+
end
|
646
|
+
end
|
647
|
+
|
648
|
+
class ListAllMyBucketsResponse < Response
|
649
|
+
attr_reader :entries
|
650
|
+
def initialize(response)
|
651
|
+
super(response)
|
652
|
+
if response.is_a? Net::HTTPSuccess
|
653
|
+
parser = ListAllMyBucketsParser.new
|
654
|
+
REXML::Document.parse_stream(response.body, parser)
|
655
|
+
@entries = parser.entries
|
656
|
+
else
|
657
|
+
@entries = []
|
658
|
+
end
|
659
|
+
end
|
660
|
+
end
|
661
|
+
end
|
File without changes
|
metadata
ADDED
@@ -0,0 +1,57 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
rubygems_version: 0.8.11
|
3
|
+
specification_version: 1
|
4
|
+
name: s3-ruby
|
5
|
+
version: !ruby/object:Gem::Version
|
6
|
+
version: 0.1.0
|
7
|
+
date: 2006-11-23 00:00:00 -06:00
|
8
|
+
summary: A library for the Amazon S3 (Simple Storage Service) web service REST API
|
9
|
+
require_paths:
|
10
|
+
- lib
|
11
|
+
email: ryand-ruby@zenspider.com
|
12
|
+
homepage: " by Chris Carter <cdcarter@gmail.com>"
|
13
|
+
rubyforge_project: s3-ruby
|
14
|
+
description: "== FEATURES/PROBLEMS: * Full REST API support * Support for streams * no tests at this point == DESCRIPTION: s3-ruby interfaces with the REST API of Amazon's S3 (Simple Storage Service) to give your ruby applications full access to the service."
|
15
|
+
autorequire:
|
16
|
+
default_executable:
|
17
|
+
bindir: bin
|
18
|
+
has_rdoc: true
|
19
|
+
required_ruby_version: !ruby/object:Gem::Version::Requirement
|
20
|
+
requirements:
|
21
|
+
- - ">"
|
22
|
+
- !ruby/object:Gem::Version
|
23
|
+
version: 0.0.0
|
24
|
+
version:
|
25
|
+
platform: ruby
|
26
|
+
signing_key:
|
27
|
+
cert_chain:
|
28
|
+
authors:
|
29
|
+
- Ryan Davis
|
30
|
+
files:
|
31
|
+
- History.txt
|
32
|
+
- Manifest.txt
|
33
|
+
- README.txt
|
34
|
+
- Rakefile
|
35
|
+
- lib/s3-ruby.rb
|
36
|
+
test_files:
|
37
|
+
- test/test_s3-ruby.rb
|
38
|
+
rdoc_options: []
|
39
|
+
|
40
|
+
extra_rdoc_files: []
|
41
|
+
|
42
|
+
executables: []
|
43
|
+
|
44
|
+
extensions: []
|
45
|
+
|
46
|
+
requirements: []
|
47
|
+
|
48
|
+
dependencies:
|
49
|
+
- !ruby/object:Gem::Dependency
|
50
|
+
name: hoe
|
51
|
+
version_requirement:
|
52
|
+
version_requirements: !ruby/object:Gem::Version::Requirement
|
53
|
+
requirements:
|
54
|
+
- - ">="
|
55
|
+
- !ruby/object:Gem::Version
|
56
|
+
version: 1.1.4
|
57
|
+
version:
|