s33r 0.4.2 → 0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/examples/cli/instant_download_server.rb +88 -0
- data/examples/cli/s3cli.rb +31 -52
- data/examples/cli/simple.rb +16 -6
- data/examples/fores33r/app/controllers/browser_controller.rb +12 -10
- data/examples/fores33r/app/helpers/application_helper.rb +2 -1
- data/examples/fores33r/app/views/browser/_upload.rhtml +1 -1
- data/examples/fores33r/app/views/browser/index.rhtml +4 -4
- data/examples/fores33r/config/environment.rb +5 -3
- data/examples/fores33r/log/development.log +2259 -0
- data/examples/fores33r/log/mongrel.log +59 -0
- data/examples/s3.yaml +2 -6
- data/lib/s33r/bucket.rb +103 -0
- data/lib/s33r/bucket_listing.rb +33 -76
- data/lib/s33r/client.rb +305 -446
- data/lib/s33r/networking.rb +197 -0
- data/lib/s33r/s33r_exception.rb +29 -18
- data/lib/s33r/s33r_http.rb +36 -18
- data/lib/s33r/s3_acl.rb +32 -52
- data/lib/s33r/s3_logging.rb +117 -0
- data/lib/s33r/s3_obj.rb +124 -69
- data/lib/s33r/utility.rb +447 -0
- data/test/cases/spec_acl.rb +10 -40
- data/test/cases/spec_bucket_listing.rb +12 -32
- data/test/cases/spec_logging.rb +47 -0
- data/test/cases/spec_networking.rb +11 -0
- data/test/cases/spec_s3_object.rb +44 -5
- data/test/cases/spec_utility.rb +264 -0
- data/test/files/acl.xml +0 -6
- data/test/files/config.yaml +5 -0
- data/test/files/logging_status_disabled.xml +3 -0
- data/test/files/logging_status_enabled.xml +7 -0
- data/test/test_setup.rb +7 -2
- metadata +16 -94
- data/examples/cli/acl_x.rb +0 -41
- data/examples/cli/logging_x.rb +0 -20
- data/examples/fores33r/README +0 -183
- data/html/classes/MIME.html +0 -120
- data/html/classes/MIME/InvalidContentType.html +0 -119
- data/html/classes/MIME/Type.html +0 -1173
- data/html/classes/MIME/Types.html +0 -566
- data/html/classes/Net.html +0 -108
- data/html/classes/Net/HTTPGenericRequest.html +0 -233
- data/html/classes/Net/HTTPResponse.html +0 -271
- data/html/classes/S33r.html +0 -986
- data/html/classes/S33r/BucketListing.html +0 -434
- data/html/classes/S33r/Client.html +0 -1575
- data/html/classes/S33r/LoggingResource.html +0 -222
- data/html/classes/S33r/NamedBucket.html +0 -693
- data/html/classes/S33r/OrderlyXmlMarkup.html +0 -165
- data/html/classes/S33r/S33rException.html +0 -124
- data/html/classes/S33r/S33rException/BucketListingMaxKeysError.html +0 -111
- data/html/classes/S33r/S33rException/BucketNotLogTargetable.html +0 -119
- data/html/classes/S33r/S33rException/InvalidBucketListing.html +0 -111
- data/html/classes/S33r/S33rException/InvalidPermission.html +0 -111
- data/html/classes/S33r/S33rException/InvalidS3GroupType.html +0 -111
- data/html/classes/S33r/S33rException/MalformedBucketName.html +0 -111
- data/html/classes/S33r/S33rException/MethodNotAvailable.html +0 -111
- data/html/classes/S33r/S33rException/MissingBucketName.html +0 -111
- data/html/classes/S33r/S33rException/MissingRequiredHeaders.html +0 -111
- data/html/classes/S33r/S33rException/MissingResource.html +0 -111
- data/html/classes/S33r/S33rException/S3FallenOver.html +0 -111
- data/html/classes/S33r/S33rException/TryingToPutEmptyResource.html +0 -117
- data/html/classes/S33r/S33rException/UnsupportedCannedACL.html +0 -111
- data/html/classes/S33r/S33rException/UnsupportedHTTPMethod.html +0 -111
- data/html/classes/S33r/S3ACL.html +0 -125
- data/html/classes/S33r/S3ACL/ACLDoc.html +0 -521
- data/html/classes/S33r/S3ACL/AmazonCustomer.html +0 -168
- data/html/classes/S33r/S3ACL/CanonicalUser.html +0 -212
- data/html/classes/S33r/S3ACL/Grant.html +0 -403
- data/html/classes/S33r/S3ACL/Grantee.html +0 -239
- data/html/classes/S33r/S3ACL/Group.html +0 -178
- data/html/classes/S33r/S3Object.html +0 -618
- data/html/classes/S33r/Sync.html +0 -152
- data/html/classes/XML.html +0 -202
- data/html/classes/XML/Document.html +0 -125
- data/html/classes/XML/Node.html +0 -124
- data/html/created.rid +0 -1
- data/html/files/CHANGELOG.html +0 -107
- data/html/files/MIT-LICENSE.html +0 -129
- data/html/files/README_txt.html +0 -259
- data/html/files/lib/s33r/bucket_listing_rb.html +0 -101
- data/html/files/lib/s33r/builder_rb.html +0 -108
- data/html/files/lib/s33r/client_rb.html +0 -111
- data/html/files/lib/s33r/core_rb.html +0 -113
- data/html/files/lib/s33r/libxml_extensions_rb.html +0 -101
- data/html/files/lib/s33r/libxml_loader_rb.html +0 -109
- data/html/files/lib/s33r/logging_rb.html +0 -108
- data/html/files/lib/s33r/mimetypes_rb.html +0 -120
- data/html/files/lib/s33r/named_bucket_rb.html +0 -101
- data/html/files/lib/s33r/s33r_exception_rb.html +0 -101
- data/html/files/lib/s33r/s33r_http_rb.html +0 -108
- data/html/files/lib/s33r/s3_acl_rb.html +0 -108
- data/html/files/lib/s33r/s3_obj_rb.html +0 -108
- data/html/files/lib/s33r/sync_rb.html +0 -101
- data/html/files/lib/s33r_rb.html +0 -101
- data/html/fr_class_index.html +0 -66
- data/html/fr_file_index.html +0 -44
- data/html/fr_method_index.html +0 -183
- data/html/index.html +0 -24
- data/html/rdoc-style.css +0 -208
- data/lib/s33r/core.rb +0 -296
- data/lib/s33r/logging.rb +0 -43
- data/lib/s33r/named_bucket.rb +0 -148
- data/lib/s33r/sync.rb +0 -13
- data/test/cases/spec_all_buckets.rb +0 -28
- data/test/cases/spec_client.rb +0 -101
- data/test/cases/spec_core.rb +0 -128
- data/test/cases/spec_namedbucket.rb +0 -46
- data/test/cases/spec_sync.rb +0 -34
- data/test/files/all_buckets.xml +0 -21
- data/test/files/client_config.yml +0 -5
- data/test/files/namedbucket_config.yml +0 -8
- data/test/files/namedbucket_config2.yml +0 -8
- data/test/test_bucket_setup.rb +0 -41
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
require 'rubygems'
|
|
2
|
+
require_gem 'builder'
|
|
3
|
+
require File.join(File.dirname(__FILE__), 'libxml_loader')
|
|
4
|
+
require File.join(File.dirname(__FILE__), 's3_acl')
|
|
5
|
+
|
|
6
|
+
module S33r
|
|
7
|
+
module S3Logging
|
|
8
|
+
|
|
9
|
+
# For manipulating logging directives on resources
|
|
10
|
+
# (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/LoggingHowTo.html).
|
|
11
|
+
#
|
|
12
|
+
# Creating a LoggingResource instance using new and no arguments will generate a "blank" instance;
|
|
13
|
+
# this can be put to the ?logging URL for a resource to remove logging from it.
|
|
14
|
+
#
|
|
15
|
+
# To set a Bucket up for logging, create a LoggingResource with the correct log_target and
|
|
16
|
+
# log_prefix settings and put that to the ?logging URL for a bucket.
|
|
17
|
+
class LoggingResource
|
|
18
|
+
attr_reader :log_target, :log_prefix
|
|
19
|
+
|
|
20
|
+
# +log_target+ is the bucket to put logs into.
|
|
21
|
+
# +log_prefix+ is the prefix for log files put into the +log_target+ bucket.
|
|
22
|
+
def initialize(log_target=nil, log_prefix=nil)
|
|
23
|
+
@log_target = log_target
|
|
24
|
+
@log_prefix = log_prefix
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Generate a BucketLoggingStatus XML document for putting to the ?logging
|
|
28
|
+
# URL for a resource.
|
|
29
|
+
def to_xml
|
|
30
|
+
xml_str = ""
|
|
31
|
+
xml = Builder::XmlMarkup.new(:target => xml_str, :indent => 0)
|
|
32
|
+
|
|
33
|
+
xml.instruct!
|
|
34
|
+
|
|
35
|
+
# BucketLoggingStatus XML.
|
|
36
|
+
xml.BucketLoggingStatus({"xmlns" => RESPONSE_NAMESPACE_URI}) {
|
|
37
|
+
unless @log_target.nil? and @log_prefix.nil?
|
|
38
|
+
xml.LoggingEnabled {
|
|
39
|
+
xml.TargetBucket @log_target
|
|
40
|
+
xml.TargetPrefix @log_prefix
|
|
41
|
+
}
|
|
42
|
+
end
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
xml_str
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Convert XML from S3 response into a LoggingResource
|
|
49
|
+
#
|
|
50
|
+
#-- TODO: tests
|
|
51
|
+
def self.from_xml(logging_xml)
|
|
52
|
+
return nil if logging_xml.nil?
|
|
53
|
+
|
|
54
|
+
logging_xml = S33r.remove_namespace(logging_xml)
|
|
55
|
+
doc = XML.get_xml_doc(logging_xml)
|
|
56
|
+
|
|
57
|
+
log_target = doc.xget('//TargetBucket')
|
|
58
|
+
log_prefix = doc.xget('//TargetPrefix')
|
|
59
|
+
|
|
60
|
+
self.new(log_target, log_prefix)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Extensions to the S3ACL module to cover logging.
|
|
67
|
+
module S3ACL
|
|
68
|
+
class Policy
|
|
69
|
+
# Does the ACL make the associated resource available as a log target?
|
|
70
|
+
def log_targetable?
|
|
71
|
+
log_target_grants = Grant.log_target_grants
|
|
72
|
+
log_target_grants.each { |g| return false if !grants.include?(g) }
|
|
73
|
+
return true
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Add permissions to an instances which give READ_ACL
|
|
77
|
+
# and WRITE permissions to the LogDelivery group. Used
|
|
78
|
+
# to enable a bucket as a logging destination.
|
|
79
|
+
#
|
|
80
|
+
# Returns true if grants added, false otherwise
|
|
81
|
+
# (if already a log target).
|
|
82
|
+
def add_log_target_grants
|
|
83
|
+
if log_targetable?
|
|
84
|
+
return false
|
|
85
|
+
else
|
|
86
|
+
Grant.log_target_grants.each { |g| add_grant(g) }
|
|
87
|
+
return true
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Remove log target ACLs from the document.
|
|
92
|
+
#
|
|
93
|
+
# Returns true if all log target grants were removed;
|
|
94
|
+
# false otherwise.
|
|
95
|
+
#
|
|
96
|
+
# NB even if this method returns false, that doesn't mean
|
|
97
|
+
# the bucket is still a log target. Use log_targetable? to check
|
|
98
|
+
# whether a bucket can be used as a log target.
|
|
99
|
+
def remove_log_target_grants
|
|
100
|
+
ok = true
|
|
101
|
+
Grant.log_target_grants.each { |g| ok = ok and remove_grant(g) }
|
|
102
|
+
ok
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
class Grant
|
|
107
|
+
# Generator for a grant which gives the LogDelivery group
|
|
108
|
+
# write and read_acl permissions on a bucket.
|
|
109
|
+
#
|
|
110
|
+
# Returns an array with the two required Grant instances.
|
|
111
|
+
def Grant.log_target_grants
|
|
112
|
+
log_delivery_group = Group.new(:log_delivery)
|
|
113
|
+
[Grant.new(log_delivery_group, :read_acl), Grant.new(log_delivery_group, :write)]
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
end
|
data/lib/s33r/s3_obj.rb
CHANGED
|
@@ -1,64 +1,105 @@
|
|
|
1
1
|
require 'date'
|
|
2
|
+
require File.join(File.dirname(__FILE__), 's3_acl')
|
|
3
|
+
require File.join(File.dirname(__FILE__), 'client')
|
|
2
4
|
|
|
3
5
|
# Representation of an object stored in a bucket.
|
|
4
6
|
module S33r
|
|
5
|
-
class S3Object
|
|
6
|
-
attr_accessor :key, :last_modified, :etag, :size, :owner, :storage_class, :value,
|
|
7
|
-
:content_type, :
|
|
7
|
+
class S3Object < Client
|
|
8
|
+
attr_accessor :key, :last_modified, :etag, :size, :owner, :storage_class, :value,
|
|
9
|
+
:content_type, :render_as_attachment
|
|
8
10
|
|
|
9
|
-
#
|
|
11
|
+
# Name of bucket this object is attached to.
|
|
12
|
+
attr_reader :bucket
|
|
13
|
+
|
|
14
|
+
# Metadata to set with x-amz-meta- style headers. Note that the bit after x-amz-meta-
|
|
10
15
|
# is stored for each key, rather than the full key.
|
|
11
|
-
attr_accessor :
|
|
16
|
+
attr_accessor :amz_meta
|
|
17
|
+
alias :meta :amz_meta
|
|
12
18
|
|
|
13
|
-
|
|
19
|
+
# +options+ can include:
|
|
20
|
+
# * <tt>:bucket => Bucket</tt>: Bucket this object is attached to.
|
|
21
|
+
# * <tt>:metadata => Hash</tt>: metadata to use in building the object.
|
|
22
|
+
# * <tt>:amz_meta => Hash</tt>: metadata specific to Amazon.
|
|
23
|
+
def initialize(key, value=nil, options={})
|
|
14
24
|
@key = key
|
|
15
|
-
|
|
25
|
+
|
|
16
26
|
@value = value
|
|
27
|
+
@content_type = 'text/plain'
|
|
28
|
+
@render_as_attachment = false
|
|
29
|
+
@amz_meta = options[:amz_meta] || {}
|
|
30
|
+
|
|
31
|
+
set_bucket(options[:bucket])
|
|
32
|
+
|
|
33
|
+
metadata = options[:metadata] || {}
|
|
17
34
|
set_properties(metadata) unless metadata.empty?
|
|
18
35
|
end
|
|
19
36
|
|
|
37
|
+
# Set a bucket instance as the default bucket for this object.
|
|
38
|
+
def set_bucket(bucket_instance)
|
|
39
|
+
if bucket_instance
|
|
40
|
+
@bucket = bucket_instance
|
|
41
|
+
set_options(@bucket.settings)
|
|
42
|
+
extend(InBucket)
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
alias :bucket= :set_bucket
|
|
46
|
+
|
|
20
47
|
# Set the properties of the object from some metadata name-value pairs.
|
|
21
48
|
#
|
|
22
49
|
# +metadata+ is a hash of properties and their values, used to set the
|
|
23
50
|
# corresponding properties on the object.
|
|
24
|
-
#
|
|
25
|
-
# +value+ is the data associated with the object on S3.
|
|
26
51
|
def set_properties(metadata)
|
|
27
52
|
# required properties
|
|
28
53
|
@etag = metadata[:etag].gsub("\"", "") if metadata[:etag]
|
|
29
54
|
@last_modified = DateTime.parse(metadata[:last_modified]) if metadata[:last_modified]
|
|
30
55
|
@size = metadata[:size].to_i if metadata[:size]
|
|
56
|
+
@render_as_attachment = metadata[:render_as_attachment] || false
|
|
31
57
|
|
|
32
58
|
# only set if creating object from XML (not available otherwise)
|
|
33
|
-
@owner
|
|
59
|
+
@owner ||= metadata[:owner]
|
|
34
60
|
|
|
35
61
|
# only set if creating object from HTTP response
|
|
36
|
-
@content_type = metadata[:content_type]
|
|
62
|
+
@content_type = metadata[:content_type] if metadata[:content_type]
|
|
37
63
|
end
|
|
38
64
|
|
|
39
65
|
# To create an object which reads the content in from a file;
|
|
40
|
-
#
|
|
41
|
-
|
|
42
|
-
# memory first.
|
|
43
|
-
def self.from_file(key, filename)
|
|
66
|
+
# you can then save the object to its associated bucket (if you like).
|
|
67
|
+
def self.from_file(filename, options={})
|
|
44
68
|
mime_type = guess_mime_type(filename)
|
|
45
69
|
content_type = mime_type.simplified
|
|
46
70
|
value = File.open(filename).read
|
|
47
|
-
|
|
71
|
+
key = options[:key] || filename
|
|
72
|
+
|
|
73
|
+
options.merge!(:metadata => {:content_type => content_type})
|
|
74
|
+
self.new(key, value, options)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Create a new instance from a string.
|
|
78
|
+
def self.from_text(key, text, options={})
|
|
79
|
+
content_type = 'text/plain'
|
|
80
|
+
|
|
81
|
+
options.merge!(:metadata => {:content_type => content_type})
|
|
82
|
+
self.new(key, text, options)
|
|
48
83
|
end
|
|
49
84
|
|
|
50
85
|
# Set properties of the object from an XML string.
|
|
51
86
|
#
|
|
52
87
|
# +xml_str+ should be a string representing a full XML document,
|
|
53
88
|
# containing a <Contents> element as its root element.
|
|
54
|
-
def self.from_xml_string(xml_str)
|
|
89
|
+
def self.from_xml_string(xml_str, options={})
|
|
55
90
|
self.from_xml_node(XML.get_xml_doc(xml_str))
|
|
56
91
|
end
|
|
57
92
|
|
|
58
|
-
# Create a new instance from an XML document
|
|
59
|
-
|
|
93
|
+
# Create a new instance from an XML document;
|
|
94
|
+
# N.B. this instance will have no value associated with it (yet).
|
|
95
|
+
# Call the load method to populate it.
|
|
96
|
+
#
|
|
97
|
+
# +options+ is passed to the constructor.
|
|
98
|
+
def self.from_xml_node(doc, options={})
|
|
60
99
|
metadata = self.parse_xml_node(doc)
|
|
61
|
-
|
|
100
|
+
|
|
101
|
+
options.merge!(:metadata => metadata)
|
|
102
|
+
self.new(metadata[:key], nil, options)
|
|
62
103
|
end
|
|
63
104
|
|
|
64
105
|
# Get properties of the object from an XML document, e.g. as returned in a bucket listing.
|
|
@@ -66,7 +107,6 @@ module S33r
|
|
|
66
107
|
# +doc+: XML::Document instance to parse to get properties for this object.
|
|
67
108
|
#
|
|
68
109
|
# Returns the metadata relating to the object, as stored on S3.
|
|
69
|
-
#-- TODO: include amz-meta elements
|
|
70
110
|
def self.parse_xml_node(doc)
|
|
71
111
|
metadata = {}
|
|
72
112
|
metadata[:key] = doc.xget('Key')
|
|
@@ -88,18 +128,20 @@ module S33r
|
|
|
88
128
|
# do a HEAD for that.
|
|
89
129
|
#
|
|
90
130
|
# +key+ is the key for the resource (not part of the response).
|
|
131
|
+
# +resp+ is a Net::HTTPResponse instance to parse.
|
|
132
|
+
# +options+ is passed through to the constructor (see initialize).
|
|
91
133
|
#
|
|
92
134
|
# Note that if the resp returns nil, a blank object is created.
|
|
93
|
-
def self.from_response(key, resp)
|
|
135
|
+
def self.from_response(key, resp, options={})
|
|
94
136
|
result = self.parse_response(resp)
|
|
95
137
|
if result
|
|
96
138
|
metadata, amz_meta, value = result
|
|
139
|
+
options.merge!(:metadata => metadata, :amz_meta => amz_meta)
|
|
97
140
|
else
|
|
98
|
-
metadata = {}
|
|
99
|
-
amz_meta = {}
|
|
100
141
|
value = nil
|
|
101
142
|
end
|
|
102
|
-
|
|
143
|
+
|
|
144
|
+
self.new(key, value, options)
|
|
103
145
|
end
|
|
104
146
|
|
|
105
147
|
# Parse the response returned by GET on a resource key
|
|
@@ -120,67 +162,80 @@ module S33r
|
|
|
120
162
|
metadata[:size] = resp_headers['content-length'][0]
|
|
121
163
|
metadata[:content_type] = resp_headers['content-type'][0]
|
|
122
164
|
|
|
165
|
+
content_disposition = resp_headers['content-disposition']
|
|
166
|
+
if content_disposition
|
|
167
|
+
content_disposition = content_disposition[0]
|
|
168
|
+
if /^attachment/ =~ content_disposition
|
|
169
|
+
metadata[:render_as_attachment] = true
|
|
170
|
+
end
|
|
171
|
+
end
|
|
172
|
+
|
|
123
173
|
# x-amz-meta- response headers.
|
|
124
174
|
interesting_header = Regexp.new(METADATA_PREFIX)
|
|
125
|
-
|
|
175
|
+
new_amz_meta = {}
|
|
126
176
|
resp.each_header do |key, value|
|
|
127
|
-
|
|
177
|
+
new_amz_meta[key.gsub(interesting_header, '')] = value if interesting_header =~ key
|
|
128
178
|
end
|
|
129
179
|
|
|
130
180
|
# The actual content of the S3 object.
|
|
131
181
|
value = resp.body
|
|
132
182
|
|
|
133
|
-
[metadata,
|
|
183
|
+
[metadata, new_amz_meta, value]
|
|
134
184
|
else
|
|
135
185
|
nil
|
|
136
186
|
end
|
|
137
187
|
end
|
|
138
188
|
|
|
139
|
-
#
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
#
|
|
143
|
-
# Returns false if value cannot be retrieved.
|
|
144
|
-
def load
|
|
145
|
-
if @named_bucket and @named_bucket.key_exists?(@key)
|
|
146
|
-
resp = @named_bucket.get_raw(@key)
|
|
147
|
-
if resp.ok?
|
|
148
|
-
@value = resp.body
|
|
149
|
-
@content_type = resp.to_hash['content-type']
|
|
150
|
-
return true
|
|
151
|
-
else
|
|
152
|
-
return false
|
|
153
|
-
end
|
|
154
|
-
else
|
|
155
|
-
return false
|
|
156
|
-
end
|
|
189
|
+
# Set metadata on the object.
|
|
190
|
+
def []=(key, value)
|
|
191
|
+
amz_meta[key] = value
|
|
157
192
|
end
|
|
158
193
|
|
|
159
|
-
#
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
194
|
+
# Get metadata on the object.
|
|
195
|
+
def [](key)
|
|
196
|
+
amz_meta[key]
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
module InBucket
|
|
201
|
+
# Send requests using the bucket's options.
|
|
202
|
+
def request_defaults
|
|
203
|
+
bucket.request_defaults.merge(:key => key)
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
# Fetch an object's metadata and content from S3.
|
|
207
|
+
def fetch(options={})
|
|
208
|
+
resp = do_get(options)
|
|
209
|
+
|
|
210
|
+
metadata, amz_meta, data = S3Object.parse_response(resp)
|
|
211
|
+
@amz_meta = amz_meta
|
|
212
|
+
@value = data
|
|
213
|
+
set_properties(metadata)
|
|
214
|
+
true
|
|
169
215
|
end
|
|
170
216
|
|
|
171
|
-
# Save
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
# returns false if it doesn't.
|
|
175
|
-
def save
|
|
176
|
-
if @named_bucket.nil?
|
|
177
|
-
return false
|
|
178
|
-
else
|
|
179
|
-
headers = {}
|
|
180
|
-
headers["Content-Type"] = @content_type || ''
|
|
181
|
-
headers = metadata_headers(headers, meta)
|
|
182
|
-
@named_bucket.put_stream(@value, @key, headers)
|
|
183
|
-
end
|
|
217
|
+
# Save an object to S3.
|
|
218
|
+
def save(options={})
|
|
219
|
+
bucket.put(self, options)
|
|
184
220
|
end
|
|
221
|
+
|
|
222
|
+
# Delete the object from S3.
|
|
223
|
+
def delete(options={})
|
|
224
|
+
bucket.delete(key, options)
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
# Change the object's name on S3.
|
|
228
|
+
def rename(new_key, options={})
|
|
229
|
+
# Delete the object from S3.
|
|
230
|
+
bucket.delete(key, options)
|
|
231
|
+
|
|
232
|
+
# Set the new key.
|
|
233
|
+
self.key = new_key
|
|
234
|
+
options[:key] = new_key
|
|
235
|
+
|
|
236
|
+
save(options)
|
|
237
|
+
end
|
|
238
|
+
alias :mv :rename
|
|
239
|
+
|
|
185
240
|
end
|
|
186
241
|
end
|
data/lib/s33r/utility.rb
ADDED
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
require 'base64'
|
|
2
|
+
require 'time'
|
|
3
|
+
require 'yaml'
|
|
4
|
+
require 'erb'
|
|
5
|
+
require 'cgi'
|
|
6
|
+
|
|
7
|
+
base = File.dirname(__FILE__)
|
|
8
|
+
require File.join(base, 'libxml_loader')
|
|
9
|
+
require File.join(base, 'libxml_extensions')
|
|
10
|
+
|
|
11
|
+
# Module to handle S3 operations which don't require an internet connection,
|
|
12
|
+
# i.e. data validation and request-building operations;
|
|
13
|
+
# also holds all the constants relating to S3.
|
|
14
|
+
#
|
|
15
|
+
# Parts of this code are heavily based on Amazon's code. Here's their license:
|
|
16
|
+
#
|
|
17
|
+
# This software code is made available "AS IS" without warranties of any
|
|
18
|
+
# kind. You may copy, display, modify and redistribute the software
|
|
19
|
+
# code either by itself or as incorporated into your code; provided that
|
|
20
|
+
# you do not remove any proprietary notices. Your use of this software
|
|
21
|
+
# code is at your own risk and you waive any claim against Amazon
|
|
22
|
+
# Digital Services, Inc. or its affiliates with respect to your use of
|
|
23
|
+
# this software code. (c) 2006 Amazon Digital Services, Inc. or its
|
|
24
|
+
# affiliates.
|
|
25
|
+
module S33r
|
|
26
|
+
HOST = 's3.amazonaws.com'
|
|
27
|
+
PORT = 443
|
|
28
|
+
NON_SSL_PORT = 80
|
|
29
|
+
METADATA_PREFIX = 'x-amz-meta-'
|
|
30
|
+
# Size of each chunk (in bytes) to be sent per request when putting files (1Mb).
|
|
31
|
+
DEFAULT_CHUNK_SIZE = 1048576
|
|
32
|
+
AWS_HEADER_PREFIX = 'x-amz-'
|
|
33
|
+
AWS_AUTH_HEADER_VALUE = "AWS %s:%s"
|
|
34
|
+
INTERESTING_HEADERS = ['content-md5', 'content-type', 'date']
|
|
35
|
+
# Headers which must be included with every request to S3.
|
|
36
|
+
REQUIRED_HEADERS = ['Content-Type', 'Date']
|
|
37
|
+
# Canned ACLs made available by S3.
|
|
38
|
+
CANNED_ACLS = ['private', 'public-read', 'public-read-write', 'authenticated-read']
|
|
39
|
+
# HTTP methods which S3 will respond to.
|
|
40
|
+
METHOD_VERBS = ['GET', 'PUT', 'HEAD', 'DELETE']
|
|
41
|
+
# Maximum number which can be passed in max-keys parameter when GETting bucket list.
|
|
42
|
+
BUCKET_LIST_MAX_MAX_KEYS = 1000
|
|
43
|
+
# Default number of seconds an authenticated URL will last for (15 minutes).
|
|
44
|
+
DEFAULT_EXPIRY_SECS = 60 * 15
|
|
45
|
+
# Number of years to use for expiry date when :expires is set to :far_flung_future.
|
|
46
|
+
FAR_FUTURE = 20
|
|
47
|
+
# The namespace used for response body XML documents.
|
|
48
|
+
RESPONSE_NAMESPACE_URI = "http://s3.amazonaws.com/doc/2006-03-01/"
|
|
49
|
+
|
|
50
|
+
# Permissions which can be set within a <Grant>
|
|
51
|
+
# (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingPermissions.html).
|
|
52
|
+
#
|
|
53
|
+
# NB I've missed out the WRITE_ACP permission as this is functionally
|
|
54
|
+
# equivalent to FULL_CONTROL.
|
|
55
|
+
PERMISSIONS = {
|
|
56
|
+
:read => 'READ', # permission to read
|
|
57
|
+
:write => 'WRITE', # permission to write
|
|
58
|
+
:read_acl => 'READ_ACP', # permission to read ACL settings
|
|
59
|
+
:all => 'FULL_CONTROL' # do anything
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
# Used for generating ACL XML documents.
|
|
63
|
+
NAMESPACE = 'xsi'
|
|
64
|
+
NAMESPACE_URI = 'http://www.w3.org/2001/XMLSchema-instance'
|
|
65
|
+
GRANTEE_TYPES = {
|
|
66
|
+
:amazon_customer => 'AmazonCustomerByEmail',
|
|
67
|
+
:canonical_user => 'CanonicalUser',
|
|
68
|
+
:group => 'Group'
|
|
69
|
+
}
|
|
70
|
+
S3_GROUP_TYPES = {
|
|
71
|
+
:all_users => 'global/AllUsers',
|
|
72
|
+
:authenticated_users => 'global/AuthenticatedUsers',
|
|
73
|
+
:log_delivery => 's3/LogDelivery'
|
|
74
|
+
}
|
|
75
|
+
GROUP_ACL_URI_BASE = 'http://acs.amazonaws.com/groups/'
|
|
76
|
+
|
|
77
|
+
include S3Exception
|
|
78
|
+
|
|
79
|
+
# Load YAML config. file for S33r operations. The config. file looks like this:
|
|
80
|
+
#
|
|
81
|
+
# :include: test/files/config.yaml
|
|
82
|
+
#
|
|
83
|
+
# The +options+ section of the YAML file is optional, and can be used
|
|
84
|
+
# to add application-specific settings for your application.
|
|
85
|
+
#
|
|
86
|
+
# Note that the loader also runs the config. file through ERB, so you can
|
|
87
|
+
# add dynamic blocks of ERB (Ruby) code into your files.
|
|
88
|
+
#
|
|
89
|
+
# +config_file+ is the path to the configuration file.
|
|
90
|
+
#
|
|
91
|
+
# Returns a <tt>[config, options]</tt>, where +config+ is a hash of standard S33r
|
|
92
|
+
# options (:access, :secret), and +options+ is a hash of general application options.
|
|
93
|
+
#
|
|
94
|
+
# The keys for both hashes are converted from strings into symbols.
|
|
95
|
+
def self.load_config(config_file)
|
|
96
|
+
config = YAML::load(ERB.new(IO.read(config_file)).result)
|
|
97
|
+
|
|
98
|
+
options = config.delete('options')
|
|
99
|
+
options = S33r.keys_to_symbols(options)
|
|
100
|
+
|
|
101
|
+
config = S33r.keys_to_symbols(config)
|
|
102
|
+
|
|
103
|
+
[config, options]
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Build canonical string for signing;
|
|
107
|
+
# modified (slightly) from the Amazon sample code.
|
|
108
|
+
#
|
|
109
|
+
# * +method+ is one of the available METHOD_VERBS.
|
|
110
|
+
# * +path+ is the path part of the URL to generate the canonical string for.
|
|
111
|
+
# * +headers+ is a hash of headers which are going to be sent with the request.
|
|
112
|
+
# * +expires+ is the expiry time set in the querystring for authenticated URLs:
|
|
113
|
+
# if supplied, it is used for the +date+ header.
|
|
114
|
+
def generate_canonical_string(method, path, headers={}, expires=nil)
|
|
115
|
+
interesting_headers = {}
|
|
116
|
+
headers.each do |key, value|
|
|
117
|
+
lk = key.downcase
|
|
118
|
+
if (INTERESTING_HEADERS.include?(lk) or lk =~ /^#{AWS_HEADER_PREFIX}/o)
|
|
119
|
+
interesting_headers[lk] = value
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# These fields get empty strings if they don't exist.
|
|
124
|
+
interesting_headers['content-type'] ||= ''
|
|
125
|
+
interesting_headers['content-md5'] ||= ''
|
|
126
|
+
|
|
127
|
+
# If you're using expires for query string auth, then it trumps date.
|
|
128
|
+
if not expires.nil?
|
|
129
|
+
interesting_headers['date'] = expires
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
buf = "#{method}\n"
|
|
133
|
+
interesting_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
|
|
134
|
+
if key =~ /^#{AWS_HEADER_PREFIX}/o
|
|
135
|
+
buf << "#{key}:#{value}\n"
|
|
136
|
+
else
|
|
137
|
+
buf << "#{value}\n"
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Ignore everything after the question mark...
|
|
142
|
+
buf << path.gsub(/\?.*$/, '')
|
|
143
|
+
|
|
144
|
+
# ...unless there is an acl, logging or torrent parameter
|
|
145
|
+
if path =~ /[&?]acl($|&|=)/
|
|
146
|
+
buf << '?acl'
|
|
147
|
+
elsif path =~ /[&?]torrent($|&|=)/
|
|
148
|
+
buf << '?torrent'
|
|
149
|
+
elsif path =~ /[&?]logging($|&|=)/
|
|
150
|
+
buf << '?logging'
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
buf
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Get the value for the AWS authentication header.
|
|
157
|
+
def generate_auth_header_value(method, path, headers, aws_access_key, aws_secret_access_key)
|
|
158
|
+
raise MethodNotAllowed, "Method %s not available" % method if !METHOD_VERBS.include?(method)
|
|
159
|
+
|
|
160
|
+
# check the headers needed for authentication have been set
|
|
161
|
+
missing_headers = REQUIRED_HEADERS - headers.keys
|
|
162
|
+
if !(missing_headers.empty?)
|
|
163
|
+
raise MissingRequiredHeaders,
|
|
164
|
+
"Headers required for AWS auth value are missing: " + missing_headers.join(', ')
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
raise KeysIncomplete, "Access key or secret access key nil" \
|
|
168
|
+
if aws_access_key.nil? or aws_secret_access_key.nil?
|
|
169
|
+
|
|
170
|
+
# get the AWS header
|
|
171
|
+
canonical_string = generate_canonical_string(method, path, headers)
|
|
172
|
+
signature = generate_signature(aws_secret_access_key, canonical_string)
|
|
173
|
+
AWS_AUTH_HEADER_VALUE % [aws_access_key, signature]
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
# Encode the given string with the aws_secret_access_key, by taking the
|
|
177
|
+
# hmac sha1 sum, and then base64 encoding it.
|
|
178
|
+
def generate_signature(aws_secret_access_key, str)
|
|
179
|
+
digest = OpenSSL::HMAC::digest(OpenSSL::Digest::Digest.new("SHA1"), aws_secret_access_key, str)
|
|
180
|
+
Base64.encode64(digest).strip
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
# Build the headers required with every S3 request (Date and Content-Type);
|
|
184
|
+
# options hash can contain extra header settings;
|
|
185
|
+
# +:date+ and +:content_type+ are required headers, and set to
|
|
186
|
+
# defaults if not supplied.
|
|
187
|
+
def default_headers(existing_headers, options={})
|
|
188
|
+
headers = {}
|
|
189
|
+
|
|
190
|
+
# which default headers required by AWS are missing?
|
|
191
|
+
missing_headers = REQUIRED_HEADERS - existing_headers.keys
|
|
192
|
+
|
|
193
|
+
if missing_headers.include?('Content-Type')
|
|
194
|
+
headers['Content-Type'] = options[:content_type] || ''
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
if missing_headers.include?('Date')
|
|
198
|
+
date = options[:date] || Time.now
|
|
199
|
+
headers['Date'] = date.httpdate
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
headers
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
# Add metadata headers, correctly prefixing them first,
|
|
206
|
+
# e.g. you might do metadata_headers({'myname' => 'elliot', 'myage' => 36})
|
|
207
|
+
# to add two headers to the request:
|
|
208
|
+
#
|
|
209
|
+
# x-amz-meta-myname: elliot
|
|
210
|
+
# x-amz-meta-myage: 36
|
|
211
|
+
#
|
|
212
|
+
# Keys shouldn't have spaces; they can also be represented using symbols.
|
|
213
|
+
#
|
|
214
|
+
# Returns metadata headers appended, with both keys and values as strings.
|
|
215
|
+
def metadata_headers(metadata={})
|
|
216
|
+
headers = {}
|
|
217
|
+
unless metadata.empty?
|
|
218
|
+
metadata.each { |key, value| headers[METADATA_PREFIX + key.to_s] = value.to_s }
|
|
219
|
+
end
|
|
220
|
+
headers
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
# Content transfer headers: set Content-Type, Content-Transfer-Encoding
|
|
224
|
+
# and Content-Disposition headers.
|
|
225
|
+
#
|
|
226
|
+
# <tt>content_type</tt>: content type string to send in the header, e.g. 'text/html'.
|
|
227
|
+
#
|
|
228
|
+
# +key+ is the key for the object: used as the filename if the file is downloaded; defaults to
|
|
229
|
+
# 'download' if not set. If you use a path (e.g. '/home/you/photos/me.jpg'), just the last part
|
|
230
|
+
# ('me.jpg') is used as the name of the download file.
|
|
231
|
+
#
|
|
232
|
+
# <tt>render_as_attachment</tt>: set to true if you want to add a content disposition header
|
|
233
|
+
# which enables the object to be downloaded, rather than shown inline, when fetched by a browser.
|
|
234
|
+
def content_headers(content_type, key='download', render_as_attachment=false)
|
|
235
|
+
headers = {}
|
|
236
|
+
|
|
237
|
+
headers['Content-Type'] = content_type || 'text/plain'
|
|
238
|
+
mime_type = MIME::Types[content_type][0]
|
|
239
|
+
if mime_type
|
|
240
|
+
headers['Content-Transfer-Encoding'] = 'binary' if mime_type.binary?
|
|
241
|
+
end
|
|
242
|
+
if render_as_attachment
|
|
243
|
+
headers['Content-Disposition'] = "attachment; filename=#{File.basename(key)}"
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
headers
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
# Get a canned ACL setter header.
|
|
250
|
+
def canned_acl_header(canned_acl)
|
|
251
|
+
headers = {}
|
|
252
|
+
unless canned_acl.nil?
|
|
253
|
+
unless CANNED_ACLS.include?(canned_acl)
|
|
254
|
+
raise S3Exception::UnsupportedCannedACL, "The canned ACL #{canned_acl} is not supported"
|
|
255
|
+
end
|
|
256
|
+
headers[AWS_HEADER_PREFIX + 'acl'] = canned_acl
|
|
257
|
+
end
|
|
258
|
+
headers
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
# Guess a file's mime type.
|
|
262
|
+
# If the mime_type for a file cannot be guessed, "text/plain" is used.
|
|
263
|
+
def guess_mime_type(file_name)
|
|
264
|
+
mime_type = MIME::Types.type_for(file_name)[0]
|
|
265
|
+
mime_type ||= MIME::Types['text/plain'][0]
|
|
266
|
+
mime_type
|
|
267
|
+
end
|
|
268
|
+
|
|
269
|
+
# Ensure that a bucket_name is well-formed (no leading or trailing slash).
|
|
270
|
+
def bucket_name_valid?(bucket_name)
|
|
271
|
+
if ('/' == bucket_name[0,1] || '/' == bucket_name[-1,1])
|
|
272
|
+
raise S3Exception::MalformedBucketName, "Bucket name cannot have a leading or trailing slash"
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Convert a hash of name/value pairs to querystring variables.
|
|
277
|
+
# Name for a variable can be a string or symbol.
|
|
278
|
+
def generate_querystring(pairs=nil)
|
|
279
|
+
str = ''
|
|
280
|
+
pairs ||= {}
|
|
281
|
+
if pairs.size > 0
|
|
282
|
+
name_value_pairs = pairs.map do |key, value|
|
|
283
|
+
if value.nil?
|
|
284
|
+
key
|
|
285
|
+
else
|
|
286
|
+
"#{key}=#{CGI::escape(value.to_s)}"
|
|
287
|
+
end
|
|
288
|
+
end
|
|
289
|
+
str += name_value_pairs.join('&')
|
|
290
|
+
end
|
|
291
|
+
str
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
# Returns the path for this bucket and key.
|
|
295
|
+
#
|
|
296
|
+
# +options+:
|
|
297
|
+
# * <tt>:bucket => 'my-bucket'</tt>: get a path which includes the bucket (unless
|
|
298
|
+
# :subdomain => true is also passed in)
|
|
299
|
+
# * <tt>:key => 'my-key'</tt>: get a path including a key
|
|
300
|
+
# * <tt>:querystring => {'acl' => nil, 'page' => 2, ...}</tt>: adds a querystring to path
|
|
301
|
+
# (when generating a signature for a URL, any '?acl' or '?logging' parameters must be
|
|
302
|
+
# included as part of the path before hashing)
|
|
303
|
+
# * <tt>:subdomain => true</tt>: don't include the bucket name in the path.
|
|
304
|
+
# * <tt>:acl => true</tt>: append ?acl to the front of the querystring.
|
|
305
|
+
# * <tt>:logging => true</tt>: append ?logging to the start of the querystring.
|
|
306
|
+
def s3_path(options={})
|
|
307
|
+
bucket = options[:bucket]
|
|
308
|
+
key = options[:key]
|
|
309
|
+
|
|
310
|
+
qstring_pairs = options[:querystring] || {}
|
|
311
|
+
if options[:acl]
|
|
312
|
+
qstring_pairs = {:acl => nil}.merge(qstring_pairs)
|
|
313
|
+
elsif options[:logging]
|
|
314
|
+
qstring_pairs = {:logging => nil}.merge(qstring_pairs)
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
qstring = generate_querystring(qstring_pairs)
|
|
318
|
+
|
|
319
|
+
path = '/'
|
|
320
|
+
path += (bucket + '/') if bucket and !options[:subdomain]
|
|
321
|
+
path += key if key
|
|
322
|
+
path += '?' + qstring unless '' == qstring
|
|
323
|
+
path
|
|
324
|
+
end
|
|
325
|
+
|
|
326
|
+
# Build a URL for a bucket or object on S3.
|
|
327
|
+
#
|
|
328
|
+
# +options+ are passed through to either s3_authenticated_url or s3_public_url
|
|
329
|
+
# (if :authenticated, :access and :secret options are passed, s3_authenticated_url is used):
|
|
330
|
+
# * <tt>:bucket => 'my-bucket'</tt>: bucket the URL is for.
|
|
331
|
+
# * <tt>:key => 'my-key'</tt>: the key to produce a URL for.
|
|
332
|
+
# * <tt>:use_ssl => true</tt>: return an https:// URL.
|
|
333
|
+
# * <tt>:subdomain => true</tt>: use :bucket as the subdomain
|
|
334
|
+
# to produce a bucket URL like 'elliot.s3.amazonaws.com' instead of
|
|
335
|
+
# 's3.amazonaws.com/elliot'. Note that this
|
|
336
|
+
# is NOT SUPPORTED for authenticated requests or SSL requests.
|
|
337
|
+
# * <tt>:path => '/bucket/key'</tt>: include given path on end of URL; if not set,
|
|
338
|
+
# a path is generated from any bucket and/or key given
|
|
339
|
+
# * <tt>:access => 'aws access key'</tt>: Generate authenticated URL.
|
|
340
|
+
# * <tt>:secret => 'aws secret access key'</tt>: Generate authenticated URL.
|
|
341
|
+
# * <tt>:authenticated => true</tt>: Produce an authenticated URL.
|
|
342
|
+
# * <tt>:querystring => {'name' => 'value', 'test' => nil, ...}</tt>: add querystring
|
|
343
|
+
# parameters to the URL; NB any keys with a nil value are added to the querystring
|
|
344
|
+
# as keys without values. Note that querystring parameters are just appended in the order they
|
|
345
|
+
# are returned by the map iterator for a hash.
|
|
346
|
+
# * <tt>:acl => true</tt>: append ?acl to the front of the querystring.
|
|
347
|
+
# * <tt>:logging => true</tt>: append ?logging to the start of the querystring.
|
|
348
|
+
def s3_url(options={})
|
|
349
|
+
# Turn off the subdomain option if using SSL.
|
|
350
|
+
options[:subdomain] = false if options[:use_ssl]
|
|
351
|
+
|
|
352
|
+
access = options[:access]
|
|
353
|
+
secret = options[:secret]
|
|
354
|
+
if access and secret and options[:authenticated]
|
|
355
|
+
# Turn off the subdomain option (it doesn't work with authenticated URLs).
|
|
356
|
+
options[:subdomain] = false
|
|
357
|
+
s3_authenticated_url(access, secret, options)
|
|
358
|
+
else
|
|
359
|
+
s3_public_url(options)
|
|
360
|
+
end
|
|
361
|
+
end
|
|
362
|
+
|
|
363
|
+
# Public readable URL for a bucket and resource.
|
|
364
|
+
#
|
|
365
|
+
# +options+ are passed through from s3_url; only :access and :secret are irrelevant of the options
|
|
366
|
+
# available to s3_url.
|
|
367
|
+
#
|
|
368
|
+
# Note that if a :path option is not set, a path is generated from any :bucket and/or :path
|
|
369
|
+
# parameters supplied.
|
|
370
|
+
def s3_public_url(options)
|
|
371
|
+
scheme = options[:use_ssl] ? 'https' : 'http'
|
|
372
|
+
path = options[:path]
|
|
373
|
+
path ||= s3_path(options)
|
|
374
|
+
host = HOST
|
|
375
|
+
host = (options[:bucket] + "." + host ) if options[:subdomain]
|
|
376
|
+
"#{scheme}://" + host + path
|
|
377
|
+
end
|
|
378
|
+
|
|
379
|
+
# Generate a get-able URL for an S3 resource key which passes authentication in querystring. Note
|
|
380
|
+
# that this will correctly generate authenticated URLs for logging and ACL resources.
|
|
381
|
+
#
|
|
382
|
+
# +options+ are passed through to s3_path and s3_url; an :expires option is also available:
|
|
383
|
+
# * <tt>:expires => <date time></tt>: when the URL expires (seconds since the epoch); S33r.parse_expiry
|
|
384
|
+
# is used to generate a suitable value from a date/time string, or you can use an int. Use :far_flung_future
|
|
385
|
+
# to get some time in the distant future. Defaults to current time + S33r::DEFAULT_EXPIRY_SECS.
|
|
386
|
+
def s3_authenticated_url(aws_access_key, aws_secret_access_key, options={})
|
|
387
|
+
raise KeysIncomplete, "You must supply both an AWS access key and secret access key to create \
|
|
388
|
+
an authenticated URL" if aws_access_key.nil? or aws_secret_access_key.nil?
|
|
389
|
+
|
|
390
|
+
path = s3_path(options)
|
|
391
|
+
expires = S33r.parse_expiry(options[:expires])
|
|
392
|
+
|
|
393
|
+
canonical_string = generate_canonical_string('GET', path, {}, expires)
|
|
394
|
+
signature = generate_signature(aws_secret_access_key, canonical_string)
|
|
395
|
+
|
|
396
|
+
querystring = generate_querystring({'Signature' => signature, 'Expires' => expires,
|
|
397
|
+
'AWSAccessKeyId' => aws_access_key })
|
|
398
|
+
|
|
399
|
+
options[:path] = path
|
|
400
|
+
base_url = s3_public_url(options)
|
|
401
|
+
/\?/ =~ base_url ? base_url += '&' : base_url += '?'
|
|
402
|
+
base_url += querystring
|
|
403
|
+
base_url
|
|
404
|
+
end
|
|
405
|
+
|
|
406
|
+
# Return the hash +hsh+ with keys converted to symbols.
|
|
407
|
+
def self.keys_to_symbols(hsh)
|
|
408
|
+
symbolised = {}
|
|
409
|
+
hsh.each_pair do |key, value|
|
|
410
|
+
symbolised[key.to_sym] = value
|
|
411
|
+
end
|
|
412
|
+
symbolised
|
|
413
|
+
end
|
|
414
|
+
|
|
415
|
+
# Parse an expiry date into seconds since the epoch.
|
|
416
|
+
#
|
|
417
|
+
# +expires+ can be set to :far_flung_future to get a time FAR_FUTURE years in the future;
|
|
418
|
+
# or to a specific date (parseable by ParseDate); or to an integer
|
|
419
|
+
# representing seconds since the epoch. If you leave it blank, you'll get
|
|
420
|
+
# the current time + DEFAULT_EXPIRY_SECS.
|
|
421
|
+
#
|
|
422
|
+
# Returns an integer representing seconds since the epoch.
|
|
423
|
+
def self.parse_expiry(expires=nil)
|
|
424
|
+
unless expires.kind_of?(Integer)
|
|
425
|
+
if expires.is_a?(String)
|
|
426
|
+
expires = Time.parse(expires).to_i
|
|
427
|
+
else
|
|
428
|
+
base_expires = Time.now.to_i
|
|
429
|
+
if :far_flung_future == expires
|
|
430
|
+
# 50 years (same as forever in computer terms)
|
|
431
|
+
expires = (base_expires + (60 * 60 * 24 * 365.25 * FAR_FUTURE)).to_i
|
|
432
|
+
else
|
|
433
|
+
# default to DEFAULT_EXPIRY_SECS seconds from now if expires not set
|
|
434
|
+
expires = base_expires + DEFAULT_EXPIRY_SECS
|
|
435
|
+
end
|
|
436
|
+
end
|
|
437
|
+
end
|
|
438
|
+
expires
|
|
439
|
+
end
|
|
440
|
+
|
|
441
|
+
# Remove the namespace declaration from S3 XML response bodies (libxml
|
|
442
|
+
# isn't fond of it).
|
|
443
|
+
def self.remove_namespace(xml_in)
|
|
444
|
+
namespace = S33r::RESPONSE_NAMESPACE_URI.gsub('/', '\/')
|
|
445
|
+
xml_in.gsub(/ xmlns="#{namespace}"/, '')
|
|
446
|
+
end
|
|
447
|
+
end
|