scashin133-s3 0.3.8
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +7 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +24 -0
- data/LICENSE +20 -0
- data/README.rdoc +22 -0
- data/Rakefile +21 -0
- data/extra/s3_attachment_fu.rb +159 -0
- data/extra/s3_paperclip.rb +157 -0
- data/lib/s3/bucket.rb +172 -0
- data/lib/s3/buckets_extension.rb +27 -0
- data/lib/s3/connection.rb +222 -0
- data/lib/s3/exceptions.rb +110 -0
- data/lib/s3/object.rb +251 -0
- data/lib/s3/objects_extension.rb +37 -0
- data/lib/s3/parser.rb +52 -0
- data/lib/s3/request.rb +31 -0
- data/lib/s3/service.rb +82 -0
- data/lib/s3/signature.rb +229 -0
- data/lib/s3/version.rb +3 -0
- data/lib/s3.rb +27 -0
- data/s3.gemspec +29 -0
- data/test/bucket_test.rb +215 -0
- data/test/connection_test.rb +214 -0
- data/test/object_test.rb +187 -0
- data/test/service_test.rb +111 -0
- data/test/signature_test.rb +218 -0
- data/test/test_helper.rb +3 -0
- metadata +156 -0
@@ -0,0 +1,222 @@
|
|
1
|
+
module S3
|
2
|
+
|
3
|
+
# Class responsible for handling connections to amazon hosts
|
4
|
+
class Connection
|
5
|
+
include Parser
|
6
|
+
|
7
|
+
attr_accessor :access_key_id, :secret_access_key, :use_ssl, :timeout, :debug, :proxy
|
8
|
+
alias :use_ssl? :use_ssl
|
9
|
+
|
10
|
+
# Creates new connection object.
|
11
|
+
#
|
12
|
+
# ==== Options
|
13
|
+
# * <tt>:access_key_id</tt> - Access key id (REQUIRED)
|
14
|
+
# * <tt>:secret_access_key</tt> - Secret access key (REQUIRED)
|
15
|
+
# * <tt>:use_ssl</tt> - Use https or http protocol (false by
|
16
|
+
# default)
|
17
|
+
# * <tt>:debug</tt> - Display debug information on the STDOUT
|
18
|
+
# (false by default)
|
19
|
+
# * <tt>:timeout</tt> - Timeout to use by the Net::HTTP object
|
20
|
+
# (60 by default)
|
21
|
+
# * <tt>:proxy</tt> - Hash for Net::HTTP Proxy settings
|
22
|
+
# { :host => "proxy.mydomain.com", :port => "80, :user => "user_a", :password => "secret" }
|
23
|
+
# * <tt>:proxy</tt> - Hash for Net::HTTP Proxy settings
|
24
|
+
# * <tt>:chunk_size</tt> - Size of a chunk when streaming
|
25
|
+
# (1048576 (1 MiB) by default)
|
26
|
+
def initialize(options = {})
|
27
|
+
@access_key_id = options.fetch(:access_key_id)
|
28
|
+
@secret_access_key = options.fetch(:secret_access_key)
|
29
|
+
@use_ssl = options.fetch(:use_ssl, false)
|
30
|
+
@debug = options.fetch(:debug, false)
|
31
|
+
@timeout = options.fetch(:timeout, 60)
|
32
|
+
@proxy = options.fetch(:proxy, nil)
|
33
|
+
@chunk_size = options.fetch(:chunk_size, 1048576)
|
34
|
+
end
|
35
|
+
|
36
|
+
# Makes request with given HTTP method, sets missing parameters,
|
37
|
+
# adds signature to request header and returns response object
|
38
|
+
# (Net::HTTPResponse)
|
39
|
+
#
|
40
|
+
# ==== Parameters
|
41
|
+
# * <tt>method</tt> - HTTP Method symbol, can be <tt>:get</tt>,
|
42
|
+
# <tt>:put</tt>, <tt>:delete</tt>
|
43
|
+
#
|
44
|
+
# ==== Options:
|
45
|
+
# * <tt>:host</tt> - Hostname to connecto to, defaults
|
46
|
+
# to <tt>s3.amazonaws.com</tt>
|
47
|
+
# * <tt>:path</tt> - path to send request to (REQUIRED)
|
48
|
+
# * <tt>:body</tt> - Request body, only meaningful for
|
49
|
+
# <tt>:put</tt> request
|
50
|
+
# * <tt>:params</tt> - Parameters to add to query string for
|
51
|
+
# request, can be String or Hash
|
52
|
+
# * <tt>:headers</tt> - Hash of headers fields to add to request
|
53
|
+
# header
|
54
|
+
#
|
55
|
+
# ==== Returns
|
56
|
+
# Net::HTTPResponse object -- response from the server
|
57
|
+
def request(method, options)
|
58
|
+
host = options.fetch(:host, HOST)
|
59
|
+
path = options.fetch(:path)
|
60
|
+
body = options.fetch(:body, nil)
|
61
|
+
params = options.fetch(:params, {})
|
62
|
+
headers = options.fetch(:headers, {})
|
63
|
+
|
64
|
+
if params
|
65
|
+
params = params.is_a?(String) ? params : self.class.parse_params(params)
|
66
|
+
path << "?#{params}"
|
67
|
+
end
|
68
|
+
|
69
|
+
path = URI.escape(path)
|
70
|
+
request = Request.new(@chunk_size, method.to_s.upcase, !!body, method.to_s.upcase != "HEAD", path)
|
71
|
+
|
72
|
+
headers = self.class.parse_headers(headers)
|
73
|
+
headers.each do |key, value|
|
74
|
+
request[key] = value
|
75
|
+
end
|
76
|
+
|
77
|
+
if body
|
78
|
+
if body.respond_to?(:read)
|
79
|
+
request.body_stream = body
|
80
|
+
else
|
81
|
+
request.body = body
|
82
|
+
end
|
83
|
+
request.content_length = body.respond_to?(:lstat) ? body.stat.size : body.size
|
84
|
+
end
|
85
|
+
|
86
|
+
send_request(host, request)
|
87
|
+
end
|
88
|
+
|
89
|
+
# Helper function to parser parameters and create single string of
|
90
|
+
# params added to questy string
|
91
|
+
#
|
92
|
+
# ==== Parameters
|
93
|
+
# * <tt>params</tt> - Hash of parameters
|
94
|
+
#
|
95
|
+
# ==== Returns
|
96
|
+
# String -- containing all parameters joined in one params string,
|
97
|
+
# i.e. <tt>param1=val¶m2¶m3=0</tt>
|
98
|
+
def self.parse_params(params)
|
99
|
+
interesting_keys = [:max_keys, :prefix, :marker, :delimiter, :location]
|
100
|
+
|
101
|
+
result = []
|
102
|
+
params.each do |key, value|
|
103
|
+
if interesting_keys.include?(key)
|
104
|
+
parsed_key = key.to_s.gsub("_", "-")
|
105
|
+
case value
|
106
|
+
when nil
|
107
|
+
result << parsed_key
|
108
|
+
else
|
109
|
+
result << "#{parsed_key}=#{value}"
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
result.join("&")
|
114
|
+
end
|
115
|
+
|
116
|
+
# Helper function to change headers from symbols, to in correct
|
117
|
+
# form (i.e. with '-' instead of '_')
|
118
|
+
#
|
119
|
+
# ==== Parameters
|
120
|
+
# * <tt>headers</tt> - Hash of pairs <tt>headername => value</tt>,
|
121
|
+
# where value can be Range (for Range header) or any other value
|
122
|
+
# which can be translated to string
|
123
|
+
#
|
124
|
+
# ==== Returns
|
125
|
+
# Hash of headers translated from symbol to string, containing
|
126
|
+
# only interesting headers
|
127
|
+
def self.parse_headers(headers)
|
128
|
+
interesting_keys = [:content_type, :cache_control, :x_amz_acl, :x_amz_storage_class, :range,
|
129
|
+
:if_modified_since, :if_unmodified_since,
|
130
|
+
:if_match, :if_none_match,
|
131
|
+
:content_disposition, :content_encoding,
|
132
|
+
:x_amz_copy_source, :x_amz_metadata_directive,
|
133
|
+
:x_amz_copy_source_if_match,
|
134
|
+
:x_amz_copy_source_if_none_match,
|
135
|
+
:x_amz_copy_source_if_unmodified_since,
|
136
|
+
:x_amz_copy_source_if_modified_since]
|
137
|
+
|
138
|
+
parsed_headers = {}
|
139
|
+
if headers
|
140
|
+
headers.each do |key, value|
|
141
|
+
if interesting_keys.include?(key)
|
142
|
+
parsed_key = key.to_s.gsub("_", "-")
|
143
|
+
parsed_value = value
|
144
|
+
case value
|
145
|
+
when Range
|
146
|
+
parsed_value = "bytes=#{value.first}-#{value.last}"
|
147
|
+
end
|
148
|
+
parsed_headers[parsed_key] = parsed_value
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
parsed_headers
|
153
|
+
end
|
154
|
+
|
155
|
+
private
|
156
|
+
|
157
|
+
def port
|
158
|
+
use_ssl ? 443 : 80
|
159
|
+
end
|
160
|
+
|
161
|
+
def proxy_settings
|
162
|
+
@proxy.values_at(:host, :port, :user, :password) unless @proxy.nil? || @proxy.empty?
|
163
|
+
end
|
164
|
+
|
165
|
+
def http(host)
|
166
|
+
http = Net::HTTP.new(host, port, *proxy_settings)
|
167
|
+
http.set_debug_output(STDOUT) if @debug
|
168
|
+
http.use_ssl = @use_ssl
|
169
|
+
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @use_ssl
|
170
|
+
http.read_timeout = @timeout if @timeout
|
171
|
+
http
|
172
|
+
end
|
173
|
+
|
174
|
+
def send_request(host, request, skip_authorization = false)
|
175
|
+
response = http(host).start do |http|
|
176
|
+
host = http.address
|
177
|
+
|
178
|
+
request["Date"] ||= Time.now.httpdate
|
179
|
+
|
180
|
+
if request.body
|
181
|
+
request["Content-Type"] ||= "application/octet-stream"
|
182
|
+
request["Content-MD5"] = Base64.encode64(Digest::MD5.digest(request.body)).chomp unless request.body.empty?
|
183
|
+
end
|
184
|
+
|
185
|
+
unless skip_authorization
|
186
|
+
request["Authorization"] = Signature.generate(:host => host,
|
187
|
+
:request => request,
|
188
|
+
:access_key_id => access_key_id,
|
189
|
+
:secret_access_key => secret_access_key)
|
190
|
+
end
|
191
|
+
|
192
|
+
http.request(request)
|
193
|
+
end
|
194
|
+
|
195
|
+
if response.code.to_i == 307
|
196
|
+
if response.body
|
197
|
+
doc = Document.new response.body
|
198
|
+
send_request(doc.elements["Error"].elements["Endpoint"].text, request, true)
|
199
|
+
end
|
200
|
+
else
|
201
|
+
handle_response(response)
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
def handle_response(response)
|
206
|
+
case response.code.to_i
|
207
|
+
when 200...300
|
208
|
+
response
|
209
|
+
when 300...600
|
210
|
+
if response.body.nil? || response.body.empty?
|
211
|
+
raise Error::ResponseError.new(nil, response)
|
212
|
+
else
|
213
|
+
code, message = parse_error(response.body)
|
214
|
+
raise Error::ResponseError.exception(code).new(message, response)
|
215
|
+
end
|
216
|
+
else
|
217
|
+
raise(ConnectionError.new(response, "Unknown response code: #{response.code}"))
|
218
|
+
end
|
219
|
+
response
|
220
|
+
end
|
221
|
+
end
|
222
|
+
end
|
@@ -0,0 +1,110 @@
|
|
1
|
+
module S3
|
2
|
+
module Error
|
3
|
+
|
4
|
+
# All responses with a code between 300 and 599 that contain an
|
5
|
+
# <Error></Error> body are wrapped in an ErrorResponse which
|
6
|
+
# contains an Error object. This Error class generates a custom
|
7
|
+
# exception with the name of the xml Error and its message. All
|
8
|
+
# such runtime generated exception classes descend from
|
9
|
+
# ResponseError and contain the ErrorResponse object so that all
|
10
|
+
# code that makes a request can rescue ResponseError and get
|
11
|
+
# access to the ErrorResponse.
|
12
|
+
class ResponseError < StandardError
|
13
|
+
attr_reader :response
|
14
|
+
|
15
|
+
# Creates new S3::ResponseError.
|
16
|
+
#
|
17
|
+
# ==== Parameters
|
18
|
+
# * <tt>message</tt> - what went wrong
|
19
|
+
# * <tt>response</tt> - Net::HTTPResponse object or nil
|
20
|
+
def initialize(message, response)
|
21
|
+
@response = response
|
22
|
+
super(message)
|
23
|
+
end
|
24
|
+
|
25
|
+
# Factory for all other Exception classes in module, each for
|
26
|
+
# every error response available from AmazonAWS
|
27
|
+
#
|
28
|
+
# ==== Parameters
|
29
|
+
# * <tt>code</tt> - Code name of exception
|
30
|
+
#
|
31
|
+
# ==== Returns
|
32
|
+
# Descendant of ResponseError suitable for that exception code
|
33
|
+
# or ResponseError class if no class found
|
34
|
+
def self.exception(code)
|
35
|
+
S3::Error.const_get(code)
|
36
|
+
rescue NameError
|
37
|
+
ResponseError
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
#:stopdoc:
|
42
|
+
|
43
|
+
class AccessDenied < ResponseError; end
|
44
|
+
class AccountProblem < ResponseError; end
|
45
|
+
class AmbiguousGrantByEmailAddress < ResponseError; end
|
46
|
+
class BadDigest < ResponseError; end
|
47
|
+
class BucketAlreadyExists < ResponseError; end
|
48
|
+
class BucketAlreadyOwnedByYou < ResponseError; end
|
49
|
+
class BucketNotEmpty < ResponseError; end
|
50
|
+
class CredentialsNotSupported < ResponseError; end
|
51
|
+
class CrossLocationLoggingProhibited < ResponseError; end
|
52
|
+
class EntityTooSmall < ResponseError; end
|
53
|
+
class EntityTooLarge < ResponseError; end
|
54
|
+
class ExpiredToken < ResponseError; end
|
55
|
+
class IncompleteBody < ResponseError; end
|
56
|
+
class IncorrectNumberOfFilesInPostRequestPOST < ResponseError; end
|
57
|
+
class InlineDataTooLarge < ResponseError; end
|
58
|
+
class InternalError < ResponseError; end
|
59
|
+
class InvalidAccessKeyId < ResponseError; end
|
60
|
+
class InvalidAddressingHeader < ResponseError; end
|
61
|
+
class InvalidArgument < ResponseError; end
|
62
|
+
class InvalidBucketName < ResponseError; end
|
63
|
+
class InvalidDigest < ResponseError; end
|
64
|
+
class InvalidLocationConstraint < ResponseError; end
|
65
|
+
class InvalidPayer < ResponseError; end
|
66
|
+
class InvalidPolicyDocument < ResponseError; end
|
67
|
+
class InvalidRange < ResponseError; end
|
68
|
+
class InvalidSecurity < ResponseError; end
|
69
|
+
class InvalidSOAPRequest < ResponseError; end
|
70
|
+
class InvalidStorageClass < ResponseError; end
|
71
|
+
class InvalidTargetBucketForLogging < ResponseError; end
|
72
|
+
class InvalidToken < ResponseError; end
|
73
|
+
class InvalidURI < ResponseError; end
|
74
|
+
class KeyTooLong < ResponseError; end
|
75
|
+
class MalformedACLError < ResponseError; end
|
76
|
+
class MalformedACLError < ResponseError; end
|
77
|
+
class MalformedPOSTRequest < ResponseError; end
|
78
|
+
class MalformedXML < ResponseError; end
|
79
|
+
class MaxMessageLengthExceeded < ResponseError; end
|
80
|
+
class MaxPostPreDataLengthExceededErrorYour < ResponseError; end
|
81
|
+
class MetadataTooLarge < ResponseError; end
|
82
|
+
class MethodNotAllowed < ResponseError; end
|
83
|
+
class MissingAttachment < ResponseError; end
|
84
|
+
class MissingContentLength < ResponseError; end
|
85
|
+
class MissingRequestBodyError < ResponseError; end
|
86
|
+
class MissingSecurityElement < ResponseError; end
|
87
|
+
class MissingSecurityHeader < ResponseError; end
|
88
|
+
class NoLoggingStatusForKey < ResponseError; end
|
89
|
+
class NoSuchBucket < ResponseError; end
|
90
|
+
class NoSuchKey < ResponseError; end
|
91
|
+
class NotImplemented < ResponseError; end
|
92
|
+
class NotSignedUp < ResponseError; end
|
93
|
+
class OperationAborted < ResponseError; end
|
94
|
+
class PermanentRedirect < ResponseError; end
|
95
|
+
class PreconditionFailed < ResponseError; end
|
96
|
+
class Redirect < ResponseError; end
|
97
|
+
class RequestIsNotMultiPartContent < ResponseError; end
|
98
|
+
class RequestTimeout < ResponseError; end
|
99
|
+
class RequestTimeTooSkewed < ResponseError; end
|
100
|
+
class RequestTorrentOfBucketError < ResponseError; end
|
101
|
+
class SignatureDoesNotMatch < ResponseError; end
|
102
|
+
class SlowDown < ResponseError; end
|
103
|
+
class TemporaryRedirect < ResponseError; end
|
104
|
+
class TokenRefreshRequired < ResponseError; end
|
105
|
+
class TooManyBuckets < ResponseError; end
|
106
|
+
class UnexpectedContent < ResponseError; end
|
107
|
+
class UnresolvableGrantByEmailAddress < ResponseError; end
|
108
|
+
class UserKeyMustBeSpecified < ResponseError; end
|
109
|
+
end
|
110
|
+
end
|
data/lib/s3/object.rb
ADDED
@@ -0,0 +1,251 @@
|
|
1
|
+
module S3
|
2
|
+
|
3
|
+
# Class responsible for handling objects stored in S3 buckets
|
4
|
+
class Object
|
5
|
+
include Parser
|
6
|
+
extend Forwardable
|
7
|
+
|
8
|
+
attr_accessor :content_type, :content_disposition, :content_encoding, :cache_control
|
9
|
+
attr_reader :last_modified, :etag, :size, :bucket, :key, :acl, :storage_class
|
10
|
+
attr_writer :content
|
11
|
+
|
12
|
+
def_instance_delegators :bucket, :name, :service, :bucket_request, :vhost?, :host, :path_prefix
|
13
|
+
def_instance_delegators :service, :protocol, :port, :secret_access_key
|
14
|
+
private_class_method :new
|
15
|
+
|
16
|
+
# Compares the object with other object. Returns true if the key
|
17
|
+
# of the objects are the same, and both have the same buckets (see
|
18
|
+
# Bucket equality)
|
19
|
+
def ==(other)
|
20
|
+
other.equal?(self) || (other.instance_of?(self.class) && self.key == other.key && self.bucket == other.bucket)
|
21
|
+
end
|
22
|
+
|
23
|
+
# Returns full key of the object: e.g. <tt>bucket-name/object/key.ext</tt>
|
24
|
+
def full_key
|
25
|
+
[name, key].join("/")
|
26
|
+
end
|
27
|
+
|
28
|
+
# Assigns a new +key+ to the object, raises ArgumentError if given
|
29
|
+
# key is not valid key name
|
30
|
+
def key=(key)
|
31
|
+
raise ArgumentError.new("Invalid key name: #{key}") unless key_valid?(key)
|
32
|
+
@key ||= key
|
33
|
+
end
|
34
|
+
|
35
|
+
# Assigns a new ACL to the object. Please note that ACL is not
|
36
|
+
# retrieved from the server and set to "public-read" by default.
|
37
|
+
#
|
38
|
+
# ==== Example
|
39
|
+
# object.acl = :public_read
|
40
|
+
def acl=(acl)
|
41
|
+
@acl = acl.to_s.gsub("_", "-") if acl
|
42
|
+
end
|
43
|
+
|
44
|
+
# Assigns a new storage class (RRS) to the object. Please note
|
45
|
+
# that the storage class is not retrieved from the server and set
|
46
|
+
# to "STANDARD" by default.
|
47
|
+
#
|
48
|
+
# ==== Example
|
49
|
+
# object.storage_class = :reduced_redundancy
|
50
|
+
def storage_class=(storage_class)
|
51
|
+
@storage_class = storage_class.to_s.upcase if storage_class
|
52
|
+
end
|
53
|
+
|
54
|
+
# Retrieves the object from the server. Method is used to download
|
55
|
+
# object information only (content type, size and so on). It does
|
56
|
+
# NOT download the content of the object (use the #content method
|
57
|
+
# to do it).
|
58
|
+
def retrieve
|
59
|
+
object_headers
|
60
|
+
self
|
61
|
+
end
|
62
|
+
|
63
|
+
# Retrieves the object from the server, returns true if the object
|
64
|
+
# exists or false otherwise. Uses #retrieve method, but catches
|
65
|
+
# S3::Error::NoSuchKey exception and returns false when it happens
|
66
|
+
def exists?
|
67
|
+
retrieve
|
68
|
+
true
|
69
|
+
rescue Error::NoSuchKey
|
70
|
+
false
|
71
|
+
end
|
72
|
+
|
73
|
+
# Download the content of the object, and caches it. Pass true
|
74
|
+
# to clear the cache and download the object again.
|
75
|
+
def content(reload = false)
|
76
|
+
get_object if reload or @content.nil?
|
77
|
+
@content
|
78
|
+
end
|
79
|
+
|
80
|
+
# Saves the object, returns true if successfull.
|
81
|
+
def save
|
82
|
+
put_object
|
83
|
+
true
|
84
|
+
end
|
85
|
+
|
86
|
+
# Copies the file to another key and/or bucket.
|
87
|
+
#
|
88
|
+
# ==== Options
|
89
|
+
# * <tt>:key</tt> - New key to store object in
|
90
|
+
# * <tt>:bucket</tt> - New bucket to store object in (instance of
|
91
|
+
# S3::Bucket)
|
92
|
+
# * <tt>:acl</tt> - ACL of the copied object (default:
|
93
|
+
# "public-read")
|
94
|
+
# * <tt>:content_type</tt> - Content type of the copied object
|
95
|
+
# (default: "application/octet-stream")
|
96
|
+
def copy(options = {})
|
97
|
+
copy_object(options)
|
98
|
+
end
|
99
|
+
|
100
|
+
# Destroys the file on the server
|
101
|
+
def destroy
|
102
|
+
delete_object
|
103
|
+
true
|
104
|
+
end
|
105
|
+
|
106
|
+
# Returns Object's URL using protocol specified in service,
|
107
|
+
# e.g. <tt>http://domain.com.s3.amazonaws.com/key/with/path.extension</tt>
|
108
|
+
def url
|
109
|
+
URI.escape("#{protocol}#{host}/#{path_prefix}#{key}")
|
110
|
+
end
|
111
|
+
|
112
|
+
# Returns a temporary url to the object that expires on the
|
113
|
+
# timestamp given. Defaults to one hour expire time.
|
114
|
+
def temporary_url(expires_at = Time.now + 3600)
|
115
|
+
signature = Signature.generate_temporary_url_signature(:bucket => name,
|
116
|
+
:resource => key,
|
117
|
+
:expires_at => expires_at,
|
118
|
+
:secret_access_key => secret_access_key)
|
119
|
+
|
120
|
+
"#{url}?AWSAccessKeyId=#{self.bucket.service.access_key_id}&Expires=#{expires_at.to_i.to_s}&Signature=#{signature}"
|
121
|
+
end
|
122
|
+
|
123
|
+
# Returns Object's CNAME URL (without <tt>s3.amazonaws.com</tt>
|
124
|
+
# suffix) using protocol specified in Service,
|
125
|
+
# e.g. <tt>http://domain.com/key/with/path.extension</tt>. (you
|
126
|
+
# have to set the CNAME in your DNS before using the CNAME URL
|
127
|
+
# schema).
|
128
|
+
def cname_url
|
129
|
+
URI.escape("#{protocol}#{name}/#{key}") if bucket.vhost?
|
130
|
+
end
|
131
|
+
|
132
|
+
def inspect #:nodoc:
|
133
|
+
"#<#{self.class}:/#{name}/#{key}>"
|
134
|
+
end
|
135
|
+
|
136
|
+
private
|
137
|
+
|
138
|
+
attr_writer :last_modified, :etag, :size, :original_key, :bucket
|
139
|
+
|
140
|
+
def copy_object(options = {})
|
141
|
+
key = options[:key] or raise ArgumentError, "No key given"
|
142
|
+
raise ArgumentError.new("Invalid key name: #{key}") unless key_valid?(key)
|
143
|
+
bucket = options[:bucket] || self.bucket
|
144
|
+
|
145
|
+
headers = {}
|
146
|
+
|
147
|
+
headers[:x_amz_acl] = options[:acl] || acl || "public-read"
|
148
|
+
headers[:content_type] = options[:content_type] || content_type || "application/octet-stream"
|
149
|
+
headers[:content_encoding] = options[:content_encoding] if options[:content_encoding]
|
150
|
+
headers[:content_disposition] = options[:content_disposition] if options[:content_disposition]
|
151
|
+
headers[:cache_control] = options[:cache_control] if options[:cache_control]
|
152
|
+
headers[:x_amz_copy_source] = full_key
|
153
|
+
headers[:x_amz_metadata_directive] = "REPLACE"
|
154
|
+
headers[:x_amz_copy_source_if_match] = options[:if_match] if options[:if_match]
|
155
|
+
headers[:x_amz_copy_source_if_none_match] = options[:if_none_match] if options[:if_none_match]
|
156
|
+
headers[:x_amz_copy_source_if_unmodified_since] = options[:if_modified_since] if options[:if_modified_since]
|
157
|
+
headers[:x_amz_copy_source_if_modified_since] = options[:if_unmodified_since] if options[:if_unmodified_since]
|
158
|
+
|
159
|
+
response = bucket.send(:bucket_request, :put, :path => key, :headers => headers)
|
160
|
+
object_attributes = parse_copy_object_result(response.body)
|
161
|
+
|
162
|
+
object = Object.send(:new, bucket, object_attributes.merge(:key => key, :size => size))
|
163
|
+
object.acl = response["x-amz-acl"]
|
164
|
+
object.content_type = response["content-type"]
|
165
|
+
object.content_encoding = response["content-encoding"]
|
166
|
+
object.content_disposition = response["content-disposition"]
|
167
|
+
object.cache_control = response["cache-control"]
|
168
|
+
object
|
169
|
+
end
|
170
|
+
|
171
|
+
def get_object(options = {})
|
172
|
+
response = object_request(:get, options)
|
173
|
+
parse_headers(response)
|
174
|
+
end
|
175
|
+
|
176
|
+
def object_headers(options = {})
|
177
|
+
response = object_request(:head, options)
|
178
|
+
parse_headers(response)
|
179
|
+
rescue Error::ResponseError => e
|
180
|
+
if e.response.code.to_i == 404
|
181
|
+
raise Error::ResponseError.exception("NoSuchKey").new("The specified key does not exist.", nil)
|
182
|
+
else
|
183
|
+
raise e
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
def put_object
|
188
|
+
response = object_request(:put, :body => content, :headers => dump_headers)
|
189
|
+
parse_headers(response)
|
190
|
+
end
|
191
|
+
|
192
|
+
def delete_object(options = {})
|
193
|
+
object_request(:delete)
|
194
|
+
end
|
195
|
+
|
196
|
+
def initialize(bucket, options = {})
|
197
|
+
self.bucket = bucket
|
198
|
+
self.key = options[:key]
|
199
|
+
self.last_modified = options[:last_modified]
|
200
|
+
self.etag = options[:etag]
|
201
|
+
self.size = options[:size]
|
202
|
+
self.cache_control = options[:cache_control]
|
203
|
+
end
|
204
|
+
|
205
|
+
def object_request(method, options = {})
|
206
|
+
bucket_request(method, options.merge(:path => key))
|
207
|
+
end
|
208
|
+
|
209
|
+
def last_modified=(last_modified)
|
210
|
+
@last_modified = Time.parse(last_modified) if last_modified
|
211
|
+
end
|
212
|
+
|
213
|
+
def etag=(etag)
|
214
|
+
@etag = etag[1..-2] if etag
|
215
|
+
end
|
216
|
+
|
217
|
+
def key_valid?(key)
|
218
|
+
if (key.nil? or key.empty? or key =~ %r#//#)
|
219
|
+
false
|
220
|
+
else
|
221
|
+
true
|
222
|
+
end
|
223
|
+
end
|
224
|
+
|
225
|
+
def dump_headers
|
226
|
+
headers = {}
|
227
|
+
headers[:x_amz_acl] = @acl || "public-read"
|
228
|
+
headers[:x_amz_storage_class] = @storage_class || "STANDARD"
|
229
|
+
headers[:content_type] = @content_type || "application/octet-stream"
|
230
|
+
headers[:content_encoding] = @content_encoding if @content_encoding
|
231
|
+
headers[:content_disposition] = @content_disposition if @content_disposition
|
232
|
+
headers[:cache_control] = @cache_control if @cache_control
|
233
|
+
headers
|
234
|
+
end
|
235
|
+
|
236
|
+
def parse_headers(response)
|
237
|
+
self.etag = response["etag"] if response.key?("etag")
|
238
|
+
self.content_type = response["content-type"] if response.key?("content-type")
|
239
|
+
self.content_disposition = response["content-disposition"] if response.key?("content-disposition")
|
240
|
+
self.cache_control = response["cache-control"] if response.key?("cache-control")
|
241
|
+
self.content_encoding = response["content-encoding"] if response.key?("content-encoding")
|
242
|
+
self.last_modified = response["last-modified"] if response.key?("last-modified")
|
243
|
+
if response.key?("content-range")
|
244
|
+
self.size = response["content-range"].sub(/[^\/]+\//, "").to_i
|
245
|
+
else
|
246
|
+
self.size = response["content-length"]
|
247
|
+
self.content = response.body
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
251
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module S3
|
2
|
+
module ObjectsExtension
|
3
|
+
# Builds the object in the bucket with given key
|
4
|
+
def build(key)
|
5
|
+
Object.send(:new, proxy_owner, :key => key)
|
6
|
+
end
|
7
|
+
|
8
|
+
# Finds first object with given name or raises the exception if
|
9
|
+
# not found
|
10
|
+
def find_first(name)
|
11
|
+
object = build(name)
|
12
|
+
object.retrieve
|
13
|
+
end
|
14
|
+
alias :find :find_first
|
15
|
+
|
16
|
+
# Finds the objects in the bucket.
|
17
|
+
#
|
18
|
+
# ==== Options
|
19
|
+
# * <tt>:prefix</tt> - Limits the response to keys which begin
|
20
|
+
# with the indicated prefix
|
21
|
+
# * <tt>:marker</tt> - Indicates where in the bucket to begin
|
22
|
+
# listing
|
23
|
+
# * <tt>:max_keys</tt> - The maximum number of keys you'd like
|
24
|
+
# to see
|
25
|
+
# * <tt>:delimiter</tt> - Causes keys that contain the same
|
26
|
+
# string between the prefix and the first occurrence of the
|
27
|
+
# delimiter to be rolled up into a single result element
|
28
|
+
def find_all(options = {})
|
29
|
+
proxy_owner.send(:list_bucket, options)
|
30
|
+
end
|
31
|
+
|
32
|
+
# Destroys all keys in the bucket
|
33
|
+
def destroy_all
|
34
|
+
proxy_target.each { |object| object.destroy }
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
data/lib/s3/parser.rb
ADDED
@@ -0,0 +1,52 @@
|
|
1
|
+
module S3
|
2
|
+
module Parser
|
3
|
+
include REXML
|
4
|
+
|
5
|
+
def rexml_document(xml)
|
6
|
+
xml.force_encoding(Encoding::UTF_8) if xml.respond_to? :force_encoding
|
7
|
+
Document.new(xml)
|
8
|
+
end
|
9
|
+
|
10
|
+
def parse_list_all_my_buckets_result(xml)
|
11
|
+
names = []
|
12
|
+
rexml_document(xml).elements.each("ListAllMyBucketsResult/Buckets/Bucket/Name") { |e| names << e.text }
|
13
|
+
names
|
14
|
+
end
|
15
|
+
|
16
|
+
def parse_location_constraint(xml)
|
17
|
+
rexml_document(xml).elements["LocationConstraint"].text
|
18
|
+
end
|
19
|
+
|
20
|
+
def parse_list_bucket_result(xml)
|
21
|
+
objects_attributes = []
|
22
|
+
rexml_document(xml).elements.each("ListBucketResult/Contents") do |e|
|
23
|
+
object_attributes = {}
|
24
|
+
object_attributes[:key] = e.elements["Key"].text
|
25
|
+
object_attributes[:etag] = e.elements["ETag"].text
|
26
|
+
object_attributes[:last_modified] = e.elements["LastModified"].text
|
27
|
+
object_attributes[:size] = e.elements["Size"].text
|
28
|
+
objects_attributes << object_attributes
|
29
|
+
end
|
30
|
+
objects_attributes
|
31
|
+
end
|
32
|
+
|
33
|
+
def parse_copy_object_result(xml)
|
34
|
+
object_attributes = {}
|
35
|
+
document = rexml_document(xml)
|
36
|
+
object_attributes[:etag] = document.elements["CopyObjectResult/ETag"].text
|
37
|
+
object_attributes[:last_modified] = document.elements["CopyObjectResult/LastModified"].text
|
38
|
+
object_attributes
|
39
|
+
end
|
40
|
+
|
41
|
+
def parse_error(xml)
|
42
|
+
document = rexml_document(xml)
|
43
|
+
code = document.elements["Error/Code"].text
|
44
|
+
message = document.elements["Error/Message"].text
|
45
|
+
[code, message]
|
46
|
+
end
|
47
|
+
|
48
|
+
def parse_is_truncated xml
|
49
|
+
rexml_document(xml).elements["ListBucketResult/IsTruncated"].text =='true'
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|