aliyun-oss-ruby-sdk 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +95 -0
- data/README.md +423 -0
- data/examples/aliyun/oss/bucket.rb +144 -0
- data/examples/aliyun/oss/callback.rb +61 -0
- data/examples/aliyun/oss/object.rb +182 -0
- data/examples/aliyun/oss/resumable_download.rb +42 -0
- data/examples/aliyun/oss/resumable_upload.rb +49 -0
- data/examples/aliyun/oss/streaming.rb +124 -0
- data/examples/aliyun/oss/using_sts.rb +48 -0
- data/examples/aliyun/sts/assume_role.rb +59 -0
- data/lib/aliyun_sdk/common.rb +6 -0
- data/lib/aliyun_sdk/common/exception.rb +18 -0
- data/lib/aliyun_sdk/common/logging.rb +46 -0
- data/lib/aliyun_sdk/common/struct.rb +56 -0
- data/lib/aliyun_sdk/oss.rb +16 -0
- data/lib/aliyun_sdk/oss/bucket.rb +661 -0
- data/lib/aliyun_sdk/oss/client.rb +106 -0
- data/lib/aliyun_sdk/oss/config.rb +39 -0
- data/lib/aliyun_sdk/oss/download.rb +255 -0
- data/lib/aliyun_sdk/oss/exception.rb +108 -0
- data/lib/aliyun_sdk/oss/http.rb +338 -0
- data/lib/aliyun_sdk/oss/iterator.rb +92 -0
- data/lib/aliyun_sdk/oss/multipart.rb +74 -0
- data/lib/aliyun_sdk/oss/object.rb +15 -0
- data/lib/aliyun_sdk/oss/protocol.rb +1499 -0
- data/lib/aliyun_sdk/oss/struct.rb +208 -0
- data/lib/aliyun_sdk/oss/upload.rb +238 -0
- data/lib/aliyun_sdk/oss/util.rb +89 -0
- data/lib/aliyun_sdk/sts.rb +9 -0
- data/lib/aliyun_sdk/sts/client.rb +38 -0
- data/lib/aliyun_sdk/sts/config.rb +22 -0
- data/lib/aliyun_sdk/sts/exception.rb +53 -0
- data/lib/aliyun_sdk/sts/protocol.rb +130 -0
- data/lib/aliyun_sdk/sts/struct.rb +64 -0
- data/lib/aliyun_sdk/sts/util.rb +48 -0
- data/lib/aliyun_sdk/version.rb +7 -0
- data/spec/aliyun/oss/bucket_spec.rb +597 -0
- data/spec/aliyun/oss/client/bucket_spec.rb +554 -0
- data/spec/aliyun/oss/client/client_spec.rb +297 -0
- data/spec/aliyun/oss/client/resumable_download_spec.rb +220 -0
- data/spec/aliyun/oss/client/resumable_upload_spec.rb +413 -0
- data/spec/aliyun/oss/http_spec.rb +83 -0
- data/spec/aliyun/oss/multipart_spec.rb +686 -0
- data/spec/aliyun/oss/object_spec.rb +785 -0
- data/spec/aliyun/oss/service_spec.rb +142 -0
- data/spec/aliyun/oss/util_spec.rb +50 -0
- data/spec/aliyun/sts/client_spec.rb +150 -0
- data/spec/aliyun/sts/util_spec.rb +39 -0
- data/tests/config.rb +31 -0
- data/tests/test_content_encoding.rb +54 -0
- data/tests/test_content_type.rb +95 -0
- data/tests/test_custom_headers.rb +70 -0
- data/tests/test_encoding.rb +77 -0
- data/tests/test_large_file.rb +66 -0
- data/tests/test_multipart.rb +97 -0
- data/tests/test_object_acl.rb +49 -0
- data/tests/test_object_key.rb +68 -0
- data/tests/test_object_url.rb +69 -0
- data/tests/test_resumable.rb +40 -0
- metadata +240 -0
@@ -0,0 +1,15 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
module AliyunSDK
|
4
|
+
module OSS
|
5
|
+
|
6
|
+
##
|
7
|
+
# Object表示OSS存储的一个对象
|
8
|
+
#
|
9
|
+
class Object < Common::Struct::Base
|
10
|
+
|
11
|
+
attrs :key, :type, :size, :etag, :metas, :last_modified, :headers
|
12
|
+
|
13
|
+
end # Object
|
14
|
+
end # OSS
|
15
|
+
end # Aliyun
|
@@ -0,0 +1,1499 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
require 'rest-client'
|
4
|
+
require 'nokogiri'
|
5
|
+
require 'time'
|
6
|
+
|
7
|
+
module AliyunSDK
|
8
|
+
module OSS
|
9
|
+
|
10
|
+
##
|
11
|
+
# Protocol implement the OSS Open API which is low-level. User
|
12
|
+
# should refer to {OSS::Client} for normal use.
|
13
|
+
#
|
14
|
+
class Protocol
|
15
|
+
|
16
|
+
STREAM_CHUNK_SIZE = 16 * 1024
|
17
|
+
CALLBACK_HEADER = 'x-oss-callback'
|
18
|
+
|
19
|
+
include Common::Logging
|
20
|
+
|
21
|
+
def initialize(config)
|
22
|
+
@config = config
|
23
|
+
@http = HTTP.new(config)
|
24
|
+
end
|
25
|
+
|
26
|
+
# List all the buckets.
|
27
|
+
# @param opts [Hash] options
|
28
|
+
# @option opts [String] :prefix return only those buckets
|
29
|
+
# prefixed with it if specified
|
30
|
+
# @option opts [String] :marker return buckets after where it
|
31
|
+
# indicates (exclusively). All buckets are sorted by name
|
32
|
+
# alphabetically
|
33
|
+
# @option opts [Integer] :limit return only the first N
|
34
|
+
# buckets if specified
|
35
|
+
# @return [Array<Bucket>, Hash] the returned buckets and a
|
36
|
+
# hash including the next tokens, which includes:
|
37
|
+
# * :prefix [String] the prefix used
|
38
|
+
# * :delimiter [String] the delimiter used
|
39
|
+
# * :marker [String] the marker used
|
40
|
+
# * :limit [Integer] the limit used
|
41
|
+
# * :next_marker [String] marker to continue list buckets
|
42
|
+
# * :truncated [Boolean] whether there are more buckets to
|
43
|
+
# be returned
|
44
|
+
def list_buckets(opts = {})
|
45
|
+
logger.info("Begin list buckets, options: #{opts}")
|
46
|
+
|
47
|
+
params = {
|
48
|
+
'prefix' => opts[:prefix],
|
49
|
+
'marker' => opts[:marker],
|
50
|
+
'max-keys' => opts[:limit]
|
51
|
+
}.reject { |_, v| v.nil? }
|
52
|
+
|
53
|
+
r = @http.get( {}, {:query => params})
|
54
|
+
doc = parse_xml(r.body)
|
55
|
+
|
56
|
+
buckets = doc.css("Buckets Bucket").map do |node|
|
57
|
+
Bucket.new(
|
58
|
+
{
|
59
|
+
:name => get_node_text(node, "Name"),
|
60
|
+
:location => get_node_text(node, "Location"),
|
61
|
+
:creation_time =>
|
62
|
+
get_node_text(node, "CreationDate") { |t| Time.parse(t) }
|
63
|
+
}, self
|
64
|
+
)
|
65
|
+
end
|
66
|
+
|
67
|
+
more = {
|
68
|
+
:prefix => 'Prefix',
|
69
|
+
:limit => 'MaxKeys',
|
70
|
+
:marker => 'Marker',
|
71
|
+
:next_marker => 'NextMarker',
|
72
|
+
:truncated => 'IsTruncated'
|
73
|
+
}.reduce({}) { |h, (k, v)|
|
74
|
+
value = get_node_text(doc.root, v)
|
75
|
+
value.nil?? h : h.merge(k => value)
|
76
|
+
}
|
77
|
+
|
78
|
+
update_if_exists(
|
79
|
+
more, {
|
80
|
+
:limit => ->(x) { x.to_i },
|
81
|
+
:truncated => ->(x) { x.to_bool }
|
82
|
+
}
|
83
|
+
)
|
84
|
+
|
85
|
+
logger.info("Done list buckets, buckets: #{buckets}, more: #{more}")
|
86
|
+
|
87
|
+
[buckets, more]
|
88
|
+
end
|
89
|
+
|
90
|
+
# Create a bucket
|
91
|
+
# @param name [String] the bucket name
|
92
|
+
# @param opts [Hash] options
|
93
|
+
# @option opts [String] :location the region where the bucket
|
94
|
+
# is located
|
95
|
+
# @example
|
96
|
+
# oss-cn-hangzhou
|
97
|
+
def create_bucket(name, opts = {})
|
98
|
+
logger.info("Begin create bucket, name: #{name}, opts: #{opts}")
|
99
|
+
|
100
|
+
location = opts[:location]
|
101
|
+
body = nil
|
102
|
+
if location
|
103
|
+
builder = Nokogiri::XML::Builder.new do |xml|
|
104
|
+
xml.CreateBucketConfiguration {
|
105
|
+
xml.LocationConstraint location
|
106
|
+
}
|
107
|
+
end
|
108
|
+
body = builder.to_xml
|
109
|
+
end
|
110
|
+
|
111
|
+
@http.put({:bucket => name}, {:body => body})
|
112
|
+
|
113
|
+
logger.info("Done create bucket")
|
114
|
+
end
|
115
|
+
|
116
|
+
# Put bucket acl
|
117
|
+
# @param name [String] the bucket name
|
118
|
+
# @param acl [String] the bucket acl
|
119
|
+
# @see OSS::ACL
|
120
|
+
def put_bucket_acl(name, acl)
|
121
|
+
logger.info("Begin put bucket acl, name: #{name}, acl: #{acl}")
|
122
|
+
|
123
|
+
sub_res = {'acl' => nil}
|
124
|
+
headers = {'x-oss-acl' => acl}
|
125
|
+
@http.put(
|
126
|
+
{:bucket => name, :sub_res => sub_res},
|
127
|
+
{:headers => headers, :body => nil})
|
128
|
+
|
129
|
+
logger.info("Done put bucket acl")
|
130
|
+
end
|
131
|
+
|
132
|
+
# Get bucket acl
|
133
|
+
# @param name [String] the bucket name
|
134
|
+
# @return [String] the acl of this bucket
|
135
|
+
def get_bucket_acl(name)
|
136
|
+
logger.info("Begin get bucket acl, name: #{name}")
|
137
|
+
|
138
|
+
sub_res = {'acl' => nil}
|
139
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
140
|
+
|
141
|
+
doc = parse_xml(r.body)
|
142
|
+
acl = get_node_text(doc.at_css("AccessControlList"), 'Grant')
|
143
|
+
logger.info("Done get bucket acl")
|
144
|
+
|
145
|
+
acl
|
146
|
+
end
|
147
|
+
|
148
|
+
# Put bucket logging settings
|
149
|
+
# @param name [String] the bucket name
|
150
|
+
# @param logging [BucketLogging] logging options
|
151
|
+
def put_bucket_logging(name, logging)
|
152
|
+
logger.info("Begin put bucket logging, "\
|
153
|
+
"name: #{name}, logging: #{logging}")
|
154
|
+
|
155
|
+
if logging.enabled? && !logging.target_bucket
|
156
|
+
fail ClientError,
|
157
|
+
"Must specify target bucket when enabling bucket logging."
|
158
|
+
end
|
159
|
+
|
160
|
+
sub_res = {'logging' => nil}
|
161
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
162
|
+
xml.BucketLoggingStatus {
|
163
|
+
if logging.enabled?
|
164
|
+
xml.LoggingEnabled {
|
165
|
+
xml.TargetBucket logging.target_bucket
|
166
|
+
xml.TargetPrefix logging.target_prefix if logging.target_prefix
|
167
|
+
}
|
168
|
+
end
|
169
|
+
}
|
170
|
+
end.to_xml
|
171
|
+
|
172
|
+
@http.put(
|
173
|
+
{:bucket => name, :sub_res => sub_res},
|
174
|
+
{:body => body})
|
175
|
+
|
176
|
+
logger.info("Done put bucket logging")
|
177
|
+
end
|
178
|
+
|
179
|
+
# Get bucket logging settings
|
180
|
+
# @param name [String] the bucket name
|
181
|
+
# @return [BucketLogging] logging options of this bucket
|
182
|
+
def get_bucket_logging(name)
|
183
|
+
logger.info("Begin get bucket logging, name: #{name}")
|
184
|
+
|
185
|
+
sub_res = {'logging' => nil}
|
186
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
187
|
+
|
188
|
+
doc = parse_xml(r.body)
|
189
|
+
opts = {:enable => false}
|
190
|
+
|
191
|
+
logging_node = doc.at_css("LoggingEnabled")
|
192
|
+
opts.update(
|
193
|
+
:target_bucket => get_node_text(logging_node, 'TargetBucket'),
|
194
|
+
:target_prefix => get_node_text(logging_node, 'TargetPrefix')
|
195
|
+
)
|
196
|
+
opts[:enable] = true if opts[:target_bucket]
|
197
|
+
|
198
|
+
logger.info("Done get bucket logging")
|
199
|
+
|
200
|
+
BucketLogging.new(opts)
|
201
|
+
end
|
202
|
+
|
203
|
+
# Delete bucket logging settings, a.k.a. disable bucket logging
|
204
|
+
# @param name [String] the bucket name
|
205
|
+
def delete_bucket_logging(name)
|
206
|
+
logger.info("Begin delete bucket logging, name: #{name}")
|
207
|
+
|
208
|
+
sub_res = {'logging' => nil}
|
209
|
+
@http.delete({:bucket => name, :sub_res => sub_res})
|
210
|
+
|
211
|
+
logger.info("Done delete bucket logging")
|
212
|
+
end
|
213
|
+
|
214
|
+
# Put bucket website settings
|
215
|
+
# @param name [String] the bucket name
|
216
|
+
# @param website [BucketWebsite] the bucket website options
|
217
|
+
def put_bucket_website(name, website)
|
218
|
+
logger.info("Begin put bucket website, "\
|
219
|
+
"name: #{name}, website: #{website}")
|
220
|
+
|
221
|
+
unless website.index
|
222
|
+
fail ClientError, "Must specify index to put bucket website."
|
223
|
+
end
|
224
|
+
|
225
|
+
sub_res = {'website' => nil}
|
226
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
227
|
+
xml.WebsiteConfiguration {
|
228
|
+
xml.IndexDocument {
|
229
|
+
xml.Suffix website.index
|
230
|
+
}
|
231
|
+
if website.error
|
232
|
+
xml.ErrorDocument {
|
233
|
+
xml.Key website.error
|
234
|
+
}
|
235
|
+
end
|
236
|
+
}
|
237
|
+
end.to_xml
|
238
|
+
|
239
|
+
@http.put(
|
240
|
+
{:bucket => name, :sub_res => sub_res},
|
241
|
+
{:body => body})
|
242
|
+
|
243
|
+
logger.info("Done put bucket website")
|
244
|
+
end
|
245
|
+
|
246
|
+
# Get bucket website settings
|
247
|
+
# @param name [String] the bucket name
|
248
|
+
# @return [BucketWebsite] the bucket website options
|
249
|
+
def get_bucket_website(name)
|
250
|
+
logger.info("Begin get bucket website, name: #{name}")
|
251
|
+
|
252
|
+
sub_res = {'website' => nil}
|
253
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
254
|
+
|
255
|
+
opts = {:enable => true}
|
256
|
+
doc = parse_xml(r.body)
|
257
|
+
opts.update(
|
258
|
+
:index => get_node_text(doc.at_css('IndexDocument'), 'Suffix'),
|
259
|
+
:error => get_node_text(doc.at_css('ErrorDocument'), 'Key')
|
260
|
+
)
|
261
|
+
|
262
|
+
logger.info("Done get bucket website")
|
263
|
+
|
264
|
+
BucketWebsite.new(opts)
|
265
|
+
end
|
266
|
+
|
267
|
+
# Delete bucket website settings
|
268
|
+
# @param name [String] the bucket name
|
269
|
+
def delete_bucket_website(name)
|
270
|
+
logger.info("Begin delete bucket website, name: #{name}")
|
271
|
+
|
272
|
+
sub_res = {'website' => nil}
|
273
|
+
@http.delete({:bucket => name, :sub_res => sub_res})
|
274
|
+
|
275
|
+
logger.info("Done delete bucket website")
|
276
|
+
end
|
277
|
+
|
278
|
+
# Put bucket referer
|
279
|
+
# @param name [String] the bucket name
|
280
|
+
# @param referer [BucketReferer] the bucket referer options
|
281
|
+
def put_bucket_referer(name, referer)
|
282
|
+
logger.info("Begin put bucket referer, "\
|
283
|
+
"name: #{name}, referer: #{referer}")
|
284
|
+
|
285
|
+
sub_res = {'referer' => nil}
|
286
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
287
|
+
xml.RefererConfiguration {
|
288
|
+
xml.AllowEmptyReferer referer.allow_empty?
|
289
|
+
xml.RefererList {
|
290
|
+
(referer.whitelist or []).each do |r|
|
291
|
+
xml.Referer r
|
292
|
+
end
|
293
|
+
}
|
294
|
+
}
|
295
|
+
end.to_xml
|
296
|
+
|
297
|
+
@http.put(
|
298
|
+
{:bucket => name, :sub_res => sub_res},
|
299
|
+
{:body => body})
|
300
|
+
|
301
|
+
logger.info("Done put bucket referer")
|
302
|
+
end
|
303
|
+
|
304
|
+
# Get bucket referer
|
305
|
+
# @param name [String] the bucket name
|
306
|
+
# @return [BucketReferer] the bucket referer options
|
307
|
+
def get_bucket_referer(name)
|
308
|
+
logger.info("Begin get bucket referer, name: #{name}")
|
309
|
+
|
310
|
+
sub_res = {'referer' => nil}
|
311
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
312
|
+
|
313
|
+
doc = parse_xml(r.body)
|
314
|
+
opts = {
|
315
|
+
:allow_empty =>
|
316
|
+
get_node_text(doc.root, 'AllowEmptyReferer', &:to_bool),
|
317
|
+
:whitelist => doc.css("RefererList Referer").map(&:text)
|
318
|
+
}
|
319
|
+
|
320
|
+
logger.info("Done get bucket referer")
|
321
|
+
|
322
|
+
BucketReferer.new(opts)
|
323
|
+
end
|
324
|
+
|
325
|
+
# Put bucket lifecycle settings
|
326
|
+
# @param name [String] the bucket name
|
327
|
+
# @param rules [Array<OSS::LifeCycleRule>] the
|
328
|
+
# lifecycle rules
|
329
|
+
# @see OSS::LifeCycleRule
|
330
|
+
def put_bucket_lifecycle(name, rules)
|
331
|
+
logger.info("Begin put bucket lifecycle, name: #{name}, rules: "\
|
332
|
+
"#{rules.map { |r| r.to_s }}")
|
333
|
+
|
334
|
+
sub_res = {'lifecycle' => nil}
|
335
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
336
|
+
xml.LifecycleConfiguration {
|
337
|
+
rules.each do |r|
|
338
|
+
xml.Rule {
|
339
|
+
xml.ID r.id if r.id
|
340
|
+
xml.Status r.enabled? ? 'Enabled' : 'Disabled'
|
341
|
+
|
342
|
+
xml.Prefix r.prefix
|
343
|
+
xml.Expiration {
|
344
|
+
if r.expiry.is_a?(Date)
|
345
|
+
xml.Date Time.utc(
|
346
|
+
r.expiry.year, r.expiry.month, r.expiry.day)
|
347
|
+
.iso8601.sub('Z', '.000Z')
|
348
|
+
elsif r.expiry.is_a?(Fixnum)
|
349
|
+
xml.Days r.expiry
|
350
|
+
else
|
351
|
+
fail ClientError, "Expiry must be a Date or Fixnum."
|
352
|
+
end
|
353
|
+
}
|
354
|
+
}
|
355
|
+
end
|
356
|
+
}
|
357
|
+
end.to_xml
|
358
|
+
|
359
|
+
@http.put(
|
360
|
+
{:bucket => name, :sub_res => sub_res},
|
361
|
+
{:body => body})
|
362
|
+
|
363
|
+
logger.info("Done put bucket lifecycle")
|
364
|
+
end
|
365
|
+
|
366
|
+
# Get bucket lifecycle settings
|
367
|
+
# @param name [String] the bucket name
|
368
|
+
# @return [Array<OSS::LifeCycleRule>] the
|
369
|
+
# lifecycle rules. See {OSS::LifeCycleRule}
|
370
|
+
def get_bucket_lifecycle(name)
|
371
|
+
logger.info("Begin get bucket lifecycle, name: #{name}")
|
372
|
+
|
373
|
+
sub_res = {'lifecycle' => nil}
|
374
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
375
|
+
|
376
|
+
doc = parse_xml(r.body)
|
377
|
+
rules = doc.css("Rule").map do |n|
|
378
|
+
days = n.at_css("Expiration Days")
|
379
|
+
date = n.at_css("Expiration Date")
|
380
|
+
|
381
|
+
if (days && date) || (!days && !date)
|
382
|
+
fail ClientError, "We can only have one of Date and Days for expiry."
|
383
|
+
end
|
384
|
+
|
385
|
+
LifeCycleRule.new(
|
386
|
+
:id => get_node_text(n, 'ID'),
|
387
|
+
:prefix => get_node_text(n, 'Prefix'),
|
388
|
+
:enable => get_node_text(n, 'Status') { |x| x == 'Enabled' },
|
389
|
+
:expiry => days ? days.text.to_i : Date.parse(date.text)
|
390
|
+
)
|
391
|
+
end
|
392
|
+
logger.info("Done get bucket lifecycle")
|
393
|
+
|
394
|
+
rules
|
395
|
+
end
|
396
|
+
|
397
|
+
# Delete *all* lifecycle rules on the bucket
|
398
|
+
# @note this will delete all lifecycle rules
|
399
|
+
# @param name [String] the bucket name
|
400
|
+
def delete_bucket_lifecycle(name)
|
401
|
+
logger.info("Begin delete bucket lifecycle, name: #{name}")
|
402
|
+
|
403
|
+
sub_res = {'lifecycle' => nil}
|
404
|
+
@http.delete({:bucket => name, :sub_res => sub_res})
|
405
|
+
|
406
|
+
logger.info("Done delete bucket lifecycle")
|
407
|
+
end
|
408
|
+
|
409
|
+
# Set bucket CORS(Cross-Origin Resource Sharing) rules
|
410
|
+
# @param name [String] the bucket name
|
411
|
+
# @param rules [Array<OSS::CORSRule] the CORS
|
412
|
+
# rules
|
413
|
+
# @see OSS::CORSRule
|
414
|
+
def set_bucket_cors(name, rules)
|
415
|
+
logger.info("Begin set bucket cors, bucket: #{name}, rules: "\
|
416
|
+
"#{rules.map { |r| r.to_s }.join(';')}")
|
417
|
+
|
418
|
+
sub_res = {'cors' => nil}
|
419
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
420
|
+
xml.CORSConfiguration {
|
421
|
+
rules.each do |r|
|
422
|
+
xml.CORSRule {
|
423
|
+
r.allowed_origins.each { |x| xml.AllowedOrigin x }
|
424
|
+
r.allowed_methods.each { |x| xml.AllowedMethod x }
|
425
|
+
r.allowed_headers.each { |x| xml.AllowedHeader x }
|
426
|
+
r.expose_headers.each { |x| xml.ExposeHeader x }
|
427
|
+
xml.MaxAgeSeconds r.max_age_seconds if r.max_age_seconds
|
428
|
+
}
|
429
|
+
end
|
430
|
+
}
|
431
|
+
end.to_xml
|
432
|
+
|
433
|
+
@http.put(
|
434
|
+
{:bucket => name, :sub_res => sub_res},
|
435
|
+
{:body => body})
|
436
|
+
|
437
|
+
logger.info("Done delete bucket lifecycle")
|
438
|
+
end
|
439
|
+
|
440
|
+
# Get bucket CORS rules
|
441
|
+
# @param name [String] the bucket name
|
442
|
+
# @return [Array<OSS::CORSRule] the CORS rules
|
443
|
+
def get_bucket_cors(name)
|
444
|
+
logger.info("Begin get bucket cors, bucket: #{name}")
|
445
|
+
|
446
|
+
sub_res = {'cors' => nil}
|
447
|
+
r = @http.get({:bucket => name, :sub_res => sub_res})
|
448
|
+
|
449
|
+
doc = parse_xml(r.body)
|
450
|
+
rules = []
|
451
|
+
|
452
|
+
doc.css("CORSRule").map do |n|
|
453
|
+
allowed_origins = n.css("AllowedOrigin").map(&:text)
|
454
|
+
allowed_methods = n.css("AllowedMethod").map(&:text)
|
455
|
+
allowed_headers = n.css("AllowedHeader").map(&:text)
|
456
|
+
expose_headers = n.css("ExposeHeader").map(&:text)
|
457
|
+
max_age_seconds = get_node_text(n, 'MaxAgeSeconds', &:to_i)
|
458
|
+
|
459
|
+
rules << CORSRule.new(
|
460
|
+
:allowed_origins => allowed_origins,
|
461
|
+
:allowed_methods => allowed_methods,
|
462
|
+
:allowed_headers => allowed_headers,
|
463
|
+
:expose_headers => expose_headers,
|
464
|
+
:max_age_seconds => max_age_seconds)
|
465
|
+
end
|
466
|
+
|
467
|
+
logger.info("Done get bucket cors")
|
468
|
+
|
469
|
+
rules
|
470
|
+
end
|
471
|
+
|
472
|
+
# Delete all bucket CORS rules
|
473
|
+
# @note this will delete all CORS rules of this bucket
|
474
|
+
# @param name [String] the bucket name
|
475
|
+
def delete_bucket_cors(name)
|
476
|
+
logger.info("Begin delete bucket cors, bucket: #{name}")
|
477
|
+
|
478
|
+
sub_res = {'cors' => nil}
|
479
|
+
|
480
|
+
@http.delete({:bucket => name, :sub_res => sub_res})
|
481
|
+
|
482
|
+
logger.info("Done delete bucket cors")
|
483
|
+
end
|
484
|
+
|
485
|
+
# Delete a bucket
|
486
|
+
# @param name [String] the bucket name
|
487
|
+
# @note it will fails if the bucket is not empty (it contains
|
488
|
+
# objects)
|
489
|
+
def delete_bucket(name)
|
490
|
+
logger.info("Begin delete bucket: #{name}")
|
491
|
+
|
492
|
+
@http.delete({:bucket => name})
|
493
|
+
|
494
|
+
logger.info("Done delete bucket")
|
495
|
+
end
|
496
|
+
|
497
|
+
# Put an object to the specified bucket, a block is required
|
498
|
+
# to provide the object data.
|
499
|
+
# @param bucket_name [String] the bucket name
|
500
|
+
# @param object_name [String] the object name
|
501
|
+
# @param opts [Hash] Options
|
502
|
+
# @option opts [String] :acl specify the object's ACL. See
|
503
|
+
# {OSS::ACL}
|
504
|
+
# @option opts [String] :content_type the HTTP Content-Type
|
505
|
+
# for the file, if not specified client will try to determine
|
506
|
+
# the type itself and fall back to HTTP::DEFAULT_CONTENT_TYPE
|
507
|
+
# if it fails to do so
|
508
|
+
# @option opts [Hash<Symbol, String>] :metas key-value pairs
|
509
|
+
# that serve as the object meta which will be stored together
|
510
|
+
# with the object
|
511
|
+
# @option opts [Callback] :callback the HTTP callback performed
|
512
|
+
# by OSS after `put_object` succeeds
|
513
|
+
# @option opts [Hash] :headers custom HTTP headers, case
|
514
|
+
# insensitive. Headers specified here will overwrite `:metas`
|
515
|
+
# and `:content_type`
|
516
|
+
# @yield [HTTP::StreamWriter] a stream writer is
|
517
|
+
# yielded to the caller to which it can write chunks of data
|
518
|
+
# streamingly
|
519
|
+
# @example
|
520
|
+
# chunk = get_chunk
|
521
|
+
# put_object('bucket', 'object') { |sw| sw.write(chunk) }
|
522
|
+
def put_object(bucket_name, object_name, opts = {}, &block)
|
523
|
+
logger.debug("Begin put object, bucket: #{bucket_name}, object: "\
|
524
|
+
"#{object_name}, options: #{opts}")
|
525
|
+
|
526
|
+
headers = {'content-type' => opts[:content_type]}
|
527
|
+
headers['x-oss-object-acl'] = opts[:acl] if opts.key?(:acl)
|
528
|
+
to_lower_case(opts[:metas] || {})
|
529
|
+
.each { |k, v| headers["x-oss-meta-#{k.to_s}"] = v.to_s }
|
530
|
+
|
531
|
+
headers.merge!(to_lower_case(opts[:headers])) if opts.key?(:headers)
|
532
|
+
|
533
|
+
if opts.key?(:callback)
|
534
|
+
headers[CALLBACK_HEADER] = opts[:callback].serialize
|
535
|
+
end
|
536
|
+
|
537
|
+
r = @http.put(
|
538
|
+
{:bucket => bucket_name, :object => object_name},
|
539
|
+
{:headers => headers, :body => HTTP::StreamPayload.new(&block)})
|
540
|
+
|
541
|
+
if r.code == 203
|
542
|
+
e = CallbackError.new(r)
|
543
|
+
logger.error(e.to_s)
|
544
|
+
raise e
|
545
|
+
end
|
546
|
+
|
547
|
+
logger.debug('Done put object')
|
548
|
+
end
|
549
|
+
|
550
|
+
# Append to an object of a bucket. Create an "Appendable
|
551
|
+
# Object" if the object does not exist. A block is required to
|
552
|
+
# provide the appending data.
|
553
|
+
# @param bucket_name [String] the bucket name
|
554
|
+
# @param object_name [String] the object name
|
555
|
+
# @param position [Integer] the position to append
|
556
|
+
# @param opts [Hash] Options
|
557
|
+
# @option opts [String] :acl specify the object's ACL. See
|
558
|
+
# {OSS::ACL}
|
559
|
+
# @option opts [String] :content_type the HTTP Content-Type
|
560
|
+
# for the file, if not specified client will try to determine
|
561
|
+
# the type itself and fall back to HTTP::DEFAULT_CONTENT_TYPE
|
562
|
+
# if it fails to do so
|
563
|
+
# @option opts [Hash<Symbol, String>] :metas key-value pairs
|
564
|
+
# that serve as the object meta which will be stored together
|
565
|
+
# with the object
|
566
|
+
# @option opts [Hash] :headers custom HTTP headers, case
|
567
|
+
# insensitive. Headers specified here will overwrite `:metas`
|
568
|
+
# and `:content_type`
|
569
|
+
# @return [Integer] next position to append
|
570
|
+
# @yield [HTTP::StreamWriter] a stream writer is
|
571
|
+
# yielded to the caller to which it can write chunks of data
|
572
|
+
# streamingly
|
573
|
+
# @note
|
574
|
+
# 1. Can not append to a "Normal Object"
|
575
|
+
# 2. The position must equal to the object's size before append
|
576
|
+
# 3. The :content_type is only used when the object is created
|
577
|
+
def append_object(bucket_name, object_name, position, opts = {}, &block)
|
578
|
+
logger.debug("Begin append object, bucket: #{bucket_name}, object: "\
|
579
|
+
"#{object_name}, position: #{position}, options: #{opts}")
|
580
|
+
|
581
|
+
sub_res = {'append' => nil, 'position' => position}
|
582
|
+
headers = {'content-type' => opts[:content_type]}
|
583
|
+
headers['x-oss-object-acl'] = opts[:acl] if opts.key?(:acl)
|
584
|
+
to_lower_case(opts[:metas] || {})
|
585
|
+
.each { |k, v| headers["x-oss-meta-#{k.to_s}"] = v.to_s }
|
586
|
+
|
587
|
+
headers.merge!(to_lower_case(opts[:headers])) if opts.key?(:headers)
|
588
|
+
|
589
|
+
r = @http.post(
|
590
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
591
|
+
{:headers => headers, :body => HTTP::StreamPayload.new(&block)})
|
592
|
+
|
593
|
+
logger.debug('Done append object')
|
594
|
+
|
595
|
+
wrap(r.headers[:x_oss_next_append_position], &:to_i) || -1
|
596
|
+
end
|
597
|
+
|
598
|
+
# List objects in a bucket.
|
599
|
+
# @param bucket_name [String] the bucket name
|
600
|
+
# @param opts [Hash] options
|
601
|
+
# @option opts [String] :prefix return only those buckets
|
602
|
+
# prefixed with it if specified
|
603
|
+
# @option opts [String] :marker return buckets after where it
|
604
|
+
# indicates (exclusively). All buckets are sorted by name
|
605
|
+
# alphabetically
|
606
|
+
# @option opts [Integer] :limit return only the first N
|
607
|
+
# buckets if specified
|
608
|
+
# @option opts [String] :delimiter the delimiter to get common
|
609
|
+
# prefixes of all objects
|
610
|
+
# @option opts [String] :encoding the encoding of object key
|
611
|
+
# in the response body. Only {OSS::KeyEncoding::URL} is
|
612
|
+
# supported now.
|
613
|
+
# @example
|
614
|
+
# Assume we have the following objects:
|
615
|
+
# /foo/bar/obj1
|
616
|
+
# /foo/bar/obj2
|
617
|
+
# ...
|
618
|
+
# /foo/bar/obj9999999
|
619
|
+
# /foo/xxx/
|
620
|
+
# use 'foo/' as the prefix, '/' as the delimiter, the common
|
621
|
+
# prefixes we get are: '/foo/bar/', '/foo/xxx/'. They are
|
622
|
+
# coincidentally the sub-directories under '/foo/'. Using
|
623
|
+
# delimiter we avoid list all the objects whose number may be
|
624
|
+
# large.
|
625
|
+
# @return [Array<Objects>, Hash] the returned object and a
|
626
|
+
# hash including the next tokens, which includes:
|
627
|
+
# * :common_prefixes [String] the common prefixes returned
|
628
|
+
# * :prefix [String] the prefix used
|
629
|
+
# * :delimiter [String] the delimiter used
|
630
|
+
# * :marker [String] the marker used
|
631
|
+
# * :limit [Integer] the limit used
|
632
|
+
# * :next_marker [String] marker to continue list objects
|
633
|
+
# * :truncated [Boolean] whether there are more objects to
|
634
|
+
# be returned
|
635
|
+
def list_objects(bucket_name, opts = {})
|
636
|
+
logger.debug("Begin list object, bucket: #{bucket_name}, options: #{opts}")
|
637
|
+
|
638
|
+
params = {
|
639
|
+
'prefix' => opts[:prefix],
|
640
|
+
'delimiter' => opts[:delimiter],
|
641
|
+
'marker' => opts[:marker],
|
642
|
+
'max-keys' => opts[:limit],
|
643
|
+
'encoding-type' => opts[:encoding]
|
644
|
+
}.reject { |_, v| v.nil? }
|
645
|
+
|
646
|
+
r = @http.get({:bucket => bucket_name}, {:query => params})
|
647
|
+
|
648
|
+
doc = parse_xml(r.body)
|
649
|
+
encoding = get_node_text(doc.root, 'EncodingType')
|
650
|
+
objects = doc.css("Contents").map do |node|
|
651
|
+
Object.new(
|
652
|
+
:key => get_node_text(node, "Key") { |x| decode_key(x, encoding) },
|
653
|
+
:type => get_node_text(node, "Type"),
|
654
|
+
:size => get_node_text(node, "Size", &:to_i),
|
655
|
+
:etag => get_node_text(node, "ETag"),
|
656
|
+
:last_modified =>
|
657
|
+
get_node_text(node, "LastModified") { |x| Time.parse(x) }
|
658
|
+
)
|
659
|
+
end || []
|
660
|
+
|
661
|
+
more = {
|
662
|
+
:prefix => 'Prefix',
|
663
|
+
:delimiter => 'Delimiter',
|
664
|
+
:limit => 'MaxKeys',
|
665
|
+
:marker => 'Marker',
|
666
|
+
:next_marker => 'NextMarker',
|
667
|
+
:truncated => 'IsTruncated',
|
668
|
+
:encoding => 'EncodingType'
|
669
|
+
}.reduce({}) { |h, (k, v)|
|
670
|
+
value = get_node_text(doc.root, v)
|
671
|
+
value.nil?? h : h.merge(k => value)
|
672
|
+
}
|
673
|
+
|
674
|
+
update_if_exists(
|
675
|
+
more, {
|
676
|
+
:limit => ->(x) { x.to_i },
|
677
|
+
:truncated => ->(x) { x.to_bool },
|
678
|
+
:delimiter => ->(x) { decode_key(x, encoding) },
|
679
|
+
:marker => ->(x) { decode_key(x, encoding) },
|
680
|
+
:next_marker => ->(x) { decode_key(x, encoding) }
|
681
|
+
}
|
682
|
+
)
|
683
|
+
|
684
|
+
common_prefixes = []
|
685
|
+
doc.css("CommonPrefixes Prefix").map do |node|
|
686
|
+
common_prefixes << decode_key(node.text, encoding)
|
687
|
+
end
|
688
|
+
more[:common_prefixes] = common_prefixes unless common_prefixes.empty?
|
689
|
+
|
690
|
+
logger.debug("Done list object. objects: #{objects}, more: #{more}")
|
691
|
+
|
692
|
+
[objects, more]
|
693
|
+
end
|
694
|
+
|
695
|
+
# Get an object from the bucket. A block is required to handle
|
696
|
+
# the object data chunks.
|
697
|
+
# @note User can get the whole object or only part of it by specify
|
698
|
+
# the bytes range;
|
699
|
+
# @note User can specify conditions to get the object like:
|
700
|
+
# if-modified-since, if-unmodified-since, if-match-etag,
|
701
|
+
# if-unmatch-etag. If the object to get fails to meet the
|
702
|
+
# conditions, it will not be returned;
|
703
|
+
# @note User can indicate the server to rewrite the response headers
|
704
|
+
# such as content-type, content-encoding when get the object
|
705
|
+
# by specify the :rewrite options. The specified headers will
|
706
|
+
# be returned instead of the original property of the object.
|
707
|
+
# @param bucket_name [String] the bucket name
|
708
|
+
# @param object_name [String] the object name
|
709
|
+
# @param opts [Hash] options
|
710
|
+
# @option opts [Array<Integer>] :range bytes range to get from
|
711
|
+
# the object, in the format: xx-yy
|
712
|
+
# @option opts [Hash] :condition preconditions to get the object
|
713
|
+
# * :if_modified_since (Time) get the object if its modified
|
714
|
+
# time is later than specified
|
715
|
+
# * :if_unmodified_since (Time) get the object if its
|
716
|
+
# unmodified time if earlier than specified
|
717
|
+
# * :if_match_etag (String) get the object if its etag match
|
718
|
+
# specified
|
719
|
+
# * :if_unmatch_etag (String) get the object if its etag
|
720
|
+
# doesn't match specified
|
721
|
+
# @option opts [Hash] :headers custom HTTP headers, case
|
722
|
+
# insensitive. Headers specified here will overwrite `:condition`
|
723
|
+
# and `:range`
|
724
|
+
# @option opts [Hash] :rewrite response headers to rewrite
|
725
|
+
# * :content_type (String) the Content-Type header
|
726
|
+
# * :content_language (String) the Content-Language header
|
727
|
+
# * :expires (Time) the Expires header
|
728
|
+
# * :cache_control (String) the Cache-Control header
|
729
|
+
# * :content_disposition (String) the Content-Disposition header
|
730
|
+
# * :content_encoding (String) the Content-Encoding header
|
731
|
+
# @return [OSS::Object] The object meta
|
732
|
+
# @yield [String] it gives the data chunks of the object to
|
733
|
+
# the block
|
734
|
+
def get_object(bucket_name, object_name, opts = {}, &block)
|
735
|
+
logger.debug("Begin get object, bucket: #{bucket_name}, "\
|
736
|
+
"object: #{object_name}")
|
737
|
+
|
738
|
+
range = opts[:range]
|
739
|
+
conditions = opts[:condition]
|
740
|
+
rewrites = opts[:rewrite]
|
741
|
+
|
742
|
+
headers = {}
|
743
|
+
headers['range'] = get_bytes_range(range) if range
|
744
|
+
headers.merge!(get_conditions(conditions)) if conditions
|
745
|
+
headers.merge!(to_lower_case(opts[:headers])) if opts.key?(:headers)
|
746
|
+
|
747
|
+
sub_res = {}
|
748
|
+
if rewrites
|
749
|
+
[ :content_type,
|
750
|
+
:content_language,
|
751
|
+
:cache_control,
|
752
|
+
:content_disposition,
|
753
|
+
:content_encoding
|
754
|
+
].each do |k|
|
755
|
+
key = "response-#{k.to_s.sub('_', '-')}"
|
756
|
+
sub_res[key] = rewrites[k] if rewrites.key?(k)
|
757
|
+
end
|
758
|
+
sub_res["response-expires"] =
|
759
|
+
rewrites[:expires].httpdate if rewrites.key?(:expires)
|
760
|
+
end
|
761
|
+
|
762
|
+
r = @http.get(
|
763
|
+
{:bucket => bucket_name, :object => object_name,
|
764
|
+
:sub_res => sub_res},
|
765
|
+
{:headers => headers}
|
766
|
+
) { |chunk| yield chunk if block_given? }
|
767
|
+
|
768
|
+
h = r.headers
|
769
|
+
metas = {}
|
770
|
+
meta_prefix = 'x_oss_meta_'
|
771
|
+
h.select { |k, _| k.to_s.start_with?(meta_prefix) }
|
772
|
+
.each { |k, v| metas[k.to_s.sub(meta_prefix, '')] = v.to_s }
|
773
|
+
|
774
|
+
obj = Object.new(
|
775
|
+
:key => object_name,
|
776
|
+
:type => h[:x_oss_object_type],
|
777
|
+
:size => wrap(h[:content_length], &:to_i),
|
778
|
+
:etag => h[:etag],
|
779
|
+
:metas => metas,
|
780
|
+
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
|
781
|
+
:headers => h)
|
782
|
+
|
783
|
+
logger.debug("Done get object")
|
784
|
+
|
785
|
+
obj
|
786
|
+
end
|
787
|
+
|
788
|
+
# Get the object meta rather than the whole object.
|
789
|
+
# @note User can specify conditions to get the object like:
|
790
|
+
# if-modified-since, if-unmodified-since, if-match-etag,
|
791
|
+
# if-unmatch-etag. If the object to get fails to meet the
|
792
|
+
# conditions, it will not be returned.
|
793
|
+
#
|
794
|
+
# @param bucket_name [String] the bucket name
|
795
|
+
# @param object_name [String] the object name
|
796
|
+
# @param opts [Hash] options
|
797
|
+
# @option opts [Hash] :condition preconditions to get the
|
798
|
+
# object meta. The same as #get_object
|
799
|
+
# @return [OSS::Object] The object meta
|
800
|
+
def get_object_meta(bucket_name, object_name, opts = {})
|
801
|
+
logger.debug("Begin get object meta, bucket: #{bucket_name}, "\
|
802
|
+
"object: #{object_name}, options: #{opts}")
|
803
|
+
|
804
|
+
headers = {}
|
805
|
+
headers.merge!(get_conditions(opts[:condition])) if opts[:condition]
|
806
|
+
|
807
|
+
r = @http.head(
|
808
|
+
{:bucket => bucket_name, :object => object_name},
|
809
|
+
{:headers => headers})
|
810
|
+
|
811
|
+
h = r.headers
|
812
|
+
metas = {}
|
813
|
+
meta_prefix = 'x_oss_meta_'
|
814
|
+
h.select { |k, _| k.to_s.start_with?(meta_prefix) }
|
815
|
+
.each { |k, v| metas[k.to_s.sub(meta_prefix, '')] = v.to_s }
|
816
|
+
|
817
|
+
obj = Object.new(
|
818
|
+
:key => object_name,
|
819
|
+
:type => h[:x_oss_object_type],
|
820
|
+
:size => wrap(h[:content_length], &:to_i),
|
821
|
+
:etag => h[:etag],
|
822
|
+
:metas => metas,
|
823
|
+
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
|
824
|
+
:headers => h)
|
825
|
+
|
826
|
+
logger.debug("Done get object meta")
|
827
|
+
|
828
|
+
obj
|
829
|
+
end
|
830
|
+
|
831
|
+
# Copy an object in the bucket. The source object and the dest
|
832
|
+
# object may be from different buckets of the same region.
|
833
|
+
# @param bucket_name [String] the bucket name
|
834
|
+
# @param src_object_name [String] the source object name
|
835
|
+
# @param dst_object_name [String] the dest object name
|
836
|
+
# @param opts [Hash] options
|
837
|
+
# @option opts [String] :src_bucket specify the source object's
|
838
|
+
# bucket. It MUST be in the same region as the dest bucket. It
|
839
|
+
# defaults to dest bucket if not specified.
|
840
|
+
# @option opts [String] :acl specify the dest object's
|
841
|
+
# ACL. See {OSS::ACL}
|
842
|
+
# @option opts [String] :meta_directive specify what to do
|
843
|
+
# with the object's meta: copy or replace. See
|
844
|
+
# {OSS::MetaDirective}
|
845
|
+
# @option opts [String] :content_type the HTTP Content-Type
|
846
|
+
# for the file, if not specified client will try to determine
|
847
|
+
# the type itself and fall back to HTTP::DEFAULT_CONTENT_TYPE
|
848
|
+
# if it fails to do so
|
849
|
+
# @option opts [Hash<Symbol, String>] :metas key-value pairs
|
850
|
+
# that serve as the object meta which will be stored together
|
851
|
+
# with the object
|
852
|
+
# @option opts [Hash] :condition preconditions to get the
|
853
|
+
# object. See #get_object
|
854
|
+
# @return [Hash] the copy result
|
855
|
+
# * :etag [String] the etag of the dest object
|
856
|
+
# * :last_modified [Time] the last modification time of the
|
857
|
+
# dest object
|
858
|
+
def copy_object(bucket_name, src_object_name, dst_object_name, opts = {})
|
859
|
+
logger.debug("Begin copy object, bucket: #{bucket_name}, "\
|
860
|
+
"source object: #{src_object_name}, dest object: "\
|
861
|
+
"#{dst_object_name}, options: #{opts}")
|
862
|
+
|
863
|
+
src_bucket = opts[:src_bucket] || bucket_name
|
864
|
+
headers = {
|
865
|
+
'x-oss-copy-source' =>
|
866
|
+
@http.get_resource_path(src_bucket, src_object_name),
|
867
|
+
'content-type' => opts[:content_type]
|
868
|
+
}
|
869
|
+
(opts[:metas] || {})
|
870
|
+
.each { |k, v| headers["x-oss-meta-#{k.to_s}"] = v.to_s }
|
871
|
+
|
872
|
+
{
|
873
|
+
:acl => 'x-oss-object-acl',
|
874
|
+
:meta_directive => 'x-oss-metadata-directive'
|
875
|
+
}.each { |k, v| headers[v] = opts[k] if opts[k] }
|
876
|
+
|
877
|
+
headers.merge!(get_copy_conditions(opts[:condition])) if opts[:condition]
|
878
|
+
|
879
|
+
r = @http.put(
|
880
|
+
{:bucket => bucket_name, :object => dst_object_name},
|
881
|
+
{:headers => headers})
|
882
|
+
|
883
|
+
doc = parse_xml(r.body)
|
884
|
+
copy_result = {
|
885
|
+
:last_modified => get_node_text(
|
886
|
+
doc.root, 'LastModified') { |x| Time.parse(x) },
|
887
|
+
:etag => get_node_text(doc.root, 'ETag')
|
888
|
+
}.reject { |_, v| v.nil? }
|
889
|
+
|
890
|
+
logger.debug("Done copy object")
|
891
|
+
|
892
|
+
copy_result
|
893
|
+
end
|
894
|
+
|
895
|
+
# Delete an object from the bucket
|
896
|
+
# @param bucket_name [String] the bucket name
|
897
|
+
# @param object_name [String] the object name
|
898
|
+
def delete_object(bucket_name, object_name)
|
899
|
+
logger.debug("Begin delete object, bucket: #{bucket_name}, "\
|
900
|
+
"object: #{object_name}")
|
901
|
+
|
902
|
+
@http.delete({:bucket => bucket_name, :object => object_name})
|
903
|
+
|
904
|
+
logger.debug("Done delete object")
|
905
|
+
end
|
906
|
+
|
907
|
+
# Batch delete objects
|
908
|
+
# @param bucket_name [String] the bucket name
|
909
|
+
# @param object_names [Enumerator<String>] the object names
|
910
|
+
# @param opts [Hash] options
|
911
|
+
# @option opts [Boolean] :quiet indicates whether the server
|
912
|
+
# should return the delete result of the objects
|
913
|
+
# @option opts [String] :encoding the encoding type for
|
914
|
+
# object key in the response body, only
|
915
|
+
# {OSS::KeyEncoding::URL} is supported now
|
916
|
+
# @return [Array<String>] object names that have been
|
917
|
+
# successfully deleted or empty if :quiet is true
|
918
|
+
def batch_delete_objects(bucket_name, object_names, opts = {})
|
919
|
+
logger.debug("Begin batch delete object, bucket: #{bucket_name}, "\
|
920
|
+
"objects: #{object_names}, options: #{opts}")
|
921
|
+
|
922
|
+
sub_res = {'delete' => nil}
|
923
|
+
|
924
|
+
# It may have invisible chars in object key which will corrupt
|
925
|
+
# libxml. So we're constructing xml body manually here.
|
926
|
+
body = '<?xml version="1.0"?>'
|
927
|
+
body << '<Delete>'
|
928
|
+
body << '<Quiet>' << (opts[:quiet]? true : false).to_s << '</Quiet>'
|
929
|
+
object_names.each { |k|
|
930
|
+
body << '<Object><Key>' << CGI.escapeHTML(k) << '</Key></Object>'
|
931
|
+
}
|
932
|
+
body << '</Delete>'
|
933
|
+
|
934
|
+
query = {}
|
935
|
+
query['encoding-type'] = opts[:encoding] if opts[:encoding]
|
936
|
+
|
937
|
+
r = @http.post(
|
938
|
+
{:bucket => bucket_name, :sub_res => sub_res},
|
939
|
+
{:query => query, :body => body})
|
940
|
+
|
941
|
+
deleted = []
|
942
|
+
unless opts[:quiet]
|
943
|
+
doc = parse_xml(r.body)
|
944
|
+
encoding = get_node_text(doc.root, 'EncodingType')
|
945
|
+
doc.css("Deleted").map do |n|
|
946
|
+
deleted << get_node_text(n, 'Key') { |x| decode_key(x, encoding) }
|
947
|
+
end
|
948
|
+
end
|
949
|
+
|
950
|
+
logger.debug("Done delete object")
|
951
|
+
|
952
|
+
deleted
|
953
|
+
end
|
954
|
+
|
955
|
+
# Put object acl
|
956
|
+
# @param bucket_name [String] the bucket name
|
957
|
+
# @param object_name [String] the object name
|
958
|
+
# @param acl [String] the object's ACL. See {OSS::ACL}
|
959
|
+
def put_object_acl(bucket_name, object_name, acl)
|
960
|
+
logger.debug("Begin update object acl, bucket: #{bucket_name}, "\
|
961
|
+
"object: #{object_name}, acl: #{acl}")
|
962
|
+
|
963
|
+
sub_res = {'acl' => nil}
|
964
|
+
headers = {'x-oss-object-acl' => acl}
|
965
|
+
|
966
|
+
@http.put(
|
967
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
968
|
+
{:headers => headers})
|
969
|
+
|
970
|
+
logger.debug("Done update object acl")
|
971
|
+
end
|
972
|
+
|
973
|
+
# Get object acl
|
974
|
+
# @param bucket_name [String] the bucket name
|
975
|
+
# @param object_name [String] the object name
|
976
|
+
# [return] the object's acl. See {OSS::ACL}
|
977
|
+
def get_object_acl(bucket_name, object_name)
|
978
|
+
logger.debug("Begin get object acl, bucket: #{bucket_name}, "\
|
979
|
+
"object: #{object_name}")
|
980
|
+
|
981
|
+
sub_res = {'acl' => nil}
|
982
|
+
r = @http.get(
|
983
|
+
{bucket: bucket_name, object: object_name, sub_res: sub_res})
|
984
|
+
|
985
|
+
doc = parse_xml(r.body)
|
986
|
+
acl = get_node_text(doc.at_css("AccessControlList"), 'Grant')
|
987
|
+
|
988
|
+
logger.debug("Done get object acl")
|
989
|
+
|
990
|
+
acl
|
991
|
+
end
|
992
|
+
|
993
|
+
# Get object CORS rule
|
994
|
+
# @note this is usually used by browser to make a "preflight"
|
995
|
+
# @param bucket_name [String] the bucket name
|
996
|
+
# @param object_name [String] the object name
|
997
|
+
# @param origin [String] the Origin of the reqeust
|
998
|
+
# @param method [String] the method to request access:
|
999
|
+
# Access-Control-Request-Method
|
1000
|
+
# @param headers [Array<String>] the headers to request access:
|
1001
|
+
# Access-Control-Request-Headers
|
1002
|
+
# @return [CORSRule] the CORS rule of the object
|
1003
|
+
def get_object_cors(bucket_name, object_name, origin, method, headers = [])
|
1004
|
+
logger.debug("Begin get object cors, bucket: #{bucket_name}, object: "\
|
1005
|
+
"#{object_name}, origin: #{origin}, method: #{method}, "\
|
1006
|
+
"headers: #{headers.join(',')}")
|
1007
|
+
|
1008
|
+
h = {
|
1009
|
+
'origin' => origin,
|
1010
|
+
'access-control-request-method' => method,
|
1011
|
+
'access-control-request-headers' => headers.join(',')
|
1012
|
+
}
|
1013
|
+
|
1014
|
+
r = @http.options(
|
1015
|
+
{:bucket => bucket_name, :object => object_name},
|
1016
|
+
{:headers => h})
|
1017
|
+
|
1018
|
+
logger.debug("Done get object cors")
|
1019
|
+
|
1020
|
+
CORSRule.new(
|
1021
|
+
:allowed_origins => r.headers[:access_control_allow_origin],
|
1022
|
+
:allowed_methods => r.headers[:access_control_allow_methods],
|
1023
|
+
:allowed_headers => r.headers[:access_control_allow_headers],
|
1024
|
+
:expose_headers => r.headers[:access_control_expose_headers],
|
1025
|
+
:max_age_seconds => r.headers[:access_control_max_age]
|
1026
|
+
)
|
1027
|
+
end
|
1028
|
+
|
1029
|
+
##
|
1030
|
+
# Multipart uploading
|
1031
|
+
#
|
1032
|
+
|
1033
|
+
# Initiate a a multipart uploading transaction
|
1034
|
+
# @param bucket_name [String] the bucket name
|
1035
|
+
# @param object_name [String] the object name
|
1036
|
+
# @param opts [Hash] options
|
1037
|
+
# @option opts [String] :content_type the HTTP Content-Type
|
1038
|
+
# for the file, if not specified client will try to determine
|
1039
|
+
# the type itself and fall back to HTTP::DEFAULT_CONTENT_TYPE
|
1040
|
+
# if it fails to do so
|
1041
|
+
# @option opts [Hash<Symbol, String>] :metas key-value pairs
|
1042
|
+
# that serve as the object meta which will be stored together
|
1043
|
+
# with the object
|
1044
|
+
# @option opts [Hash] :headers custom HTTP headers, case
|
1045
|
+
# insensitive. Headers specified here will overwrite `:metas`
|
1046
|
+
# and `:content_type`
|
1047
|
+
# @return [String] the upload id
|
1048
|
+
def initiate_multipart_upload(bucket_name, object_name, opts = {})
|
1049
|
+
logger.info("Begin initiate multipart upload, bucket: "\
|
1050
|
+
"#{bucket_name}, object: #{object_name}, options: #{opts}")
|
1051
|
+
|
1052
|
+
sub_res = {'uploads' => nil}
|
1053
|
+
headers = {'content-type' => opts[:content_type]}
|
1054
|
+
to_lower_case(opts[:metas] || {})
|
1055
|
+
.each { |k, v| headers["x-oss-meta-#{k.to_s}"] = v.to_s }
|
1056
|
+
|
1057
|
+
headers.merge!(to_lower_case(opts[:headers])) if opts.key?(:headers)
|
1058
|
+
|
1059
|
+
r = @http.post(
|
1060
|
+
{:bucket => bucket_name, :object => object_name,
|
1061
|
+
:sub_res => sub_res},
|
1062
|
+
{:headers => headers})
|
1063
|
+
|
1064
|
+
doc = parse_xml(r.body)
|
1065
|
+
txn_id = get_node_text(doc.root, 'UploadId')
|
1066
|
+
|
1067
|
+
logger.info("Done initiate multipart upload: #{txn_id}.")
|
1068
|
+
|
1069
|
+
txn_id
|
1070
|
+
end
|
1071
|
+
|
1072
|
+
# Upload a part in a multipart uploading transaction.
|
1073
|
+
# @param bucket_name [String] the bucket name
|
1074
|
+
# @param object_name [String] the object name
|
1075
|
+
# @param txn_id [String] the upload id
|
1076
|
+
# @param part_no [Integer] the part number
|
1077
|
+
# @yield [HTTP::StreamWriter] a stream writer is
|
1078
|
+
# yielded to the caller to which it can write chunks of data
|
1079
|
+
# streamingly
|
1080
|
+
def upload_part(bucket_name, object_name, txn_id, part_no, &block)
|
1081
|
+
logger.debug("Begin upload part, bucket: #{bucket_name}, object: "\
|
1082
|
+
"#{object_name}, txn id: #{txn_id}, part No: #{part_no}")
|
1083
|
+
|
1084
|
+
sub_res = {'partNumber' => part_no, 'uploadId' => txn_id}
|
1085
|
+
r = @http.put(
|
1086
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
1087
|
+
{:body => HTTP::StreamPayload.new(&block)})
|
1088
|
+
|
1089
|
+
logger.debug("Done upload part")
|
1090
|
+
|
1091
|
+
Multipart::Part.new(:number => part_no, :etag => r.headers[:etag])
|
1092
|
+
end
|
1093
|
+
|
1094
|
+
# Upload a part in a multipart uploading transaction by copying
|
1095
|
+
# from an existent object as the part's content. It may copy
|
1096
|
+
# only part of the object by specifying the bytes range to read.
|
1097
|
+
# @param bucket_name [String] the bucket name
|
1098
|
+
# @param object_name [String] the object name
|
1099
|
+
# @param txn_id [String] the upload id
|
1100
|
+
# @param part_no [Integer] the part number
|
1101
|
+
# @param source_object [String] the source object name to copy from
|
1102
|
+
# @param opts [Hash] options
|
1103
|
+
# @option opts [String] :src_bucket specify the source object's
|
1104
|
+
# bucket. It MUST be in the same region as the dest bucket. It
|
1105
|
+
# defaults to dest bucket if not specified.
|
1106
|
+
# @option opts [Array<Integer>] :range the bytes range to
|
1107
|
+
# copy, int the format: [begin(inclusive), end(exclusive)]
|
1108
|
+
# @option opts [Hash] :condition preconditions to copy the
|
1109
|
+
# object. See #get_object
|
1110
|
+
def upload_part_by_copy(
|
1111
|
+
bucket_name, object_name, txn_id, part_no, source_object, opts = {})
|
1112
|
+
logger.debug("Begin upload part by copy, bucket: #{bucket_name}, "\
|
1113
|
+
"object: #{object_name}, source object: #{source_object}"\
|
1114
|
+
"txn id: #{txn_id}, part No: #{part_no}, options: #{opts}")
|
1115
|
+
|
1116
|
+
range = opts[:range]
|
1117
|
+
conditions = opts[:condition]
|
1118
|
+
|
1119
|
+
if range && (!range.is_a?(Array) || range.size != 2)
|
1120
|
+
fail ClientError, "Range must be an array containing 2 Integers."
|
1121
|
+
end
|
1122
|
+
|
1123
|
+
src_bucket = opts[:src_bucket] || bucket_name
|
1124
|
+
headers = {
|
1125
|
+
'x-oss-copy-source' =>
|
1126
|
+
@http.get_resource_path(src_bucket, source_object)
|
1127
|
+
}
|
1128
|
+
headers['range'] = get_bytes_range(range) if range
|
1129
|
+
headers.merge!(get_copy_conditions(conditions)) if conditions
|
1130
|
+
|
1131
|
+
sub_res = {'partNumber' => part_no, 'uploadId' => txn_id}
|
1132
|
+
|
1133
|
+
r = @http.put(
|
1134
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
1135
|
+
{:headers => headers})
|
1136
|
+
|
1137
|
+
logger.debug("Done upload part by copy: #{source_object}.")
|
1138
|
+
|
1139
|
+
Multipart::Part.new(:number => part_no, :etag => r.headers[:etag])
|
1140
|
+
end
|
1141
|
+
|
1142
|
+
# Complete a multipart uploading transaction
|
1143
|
+
# @param bucket_name [String] the bucket name
|
1144
|
+
# @param object_name [String] the object name
|
1145
|
+
# @param txn_id [String] the upload id
|
1146
|
+
# @param parts [Array<Multipart::Part>] all the parts in this
|
1147
|
+
# transaction
|
1148
|
+
# @param callback [Callback] the HTTP callback performed by OSS
|
1149
|
+
# after this operation succeeds
|
1150
|
+
def complete_multipart_upload(
|
1151
|
+
bucket_name, object_name, txn_id, parts, callback = nil)
|
1152
|
+
logger.debug("Begin complete multipart upload, "\
|
1153
|
+
"txn id: #{txn_id}, parts: #{parts.map(&:to_s)}")
|
1154
|
+
|
1155
|
+
sub_res = {'uploadId' => txn_id}
|
1156
|
+
headers = {}
|
1157
|
+
headers[CALLBACK_HEADER] = callback.serialize if callback
|
1158
|
+
|
1159
|
+
body = Nokogiri::XML::Builder.new do |xml|
|
1160
|
+
xml.CompleteMultipartUpload {
|
1161
|
+
parts.each do |p|
|
1162
|
+
xml.Part {
|
1163
|
+
xml.PartNumber p.number
|
1164
|
+
xml.ETag p.etag
|
1165
|
+
}
|
1166
|
+
end
|
1167
|
+
}
|
1168
|
+
end.to_xml
|
1169
|
+
|
1170
|
+
r = @http.post(
|
1171
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
1172
|
+
{:headers => headers, :body => body})
|
1173
|
+
|
1174
|
+
if r.code == 203
|
1175
|
+
e = CallbackError.new(r)
|
1176
|
+
logger.error(e.to_s)
|
1177
|
+
raise e
|
1178
|
+
end
|
1179
|
+
|
1180
|
+
logger.debug("Done complete multipart upload: #{txn_id}.")
|
1181
|
+
end
|
1182
|
+
|
1183
|
+
# Abort a multipart uploading transaction
|
1184
|
+
# @note All the parts are discarded after abort. For some parts
|
1185
|
+
# being uploaded while the abort happens, they may not be
|
1186
|
+
# discarded. Call abort_multipart_upload several times for this
|
1187
|
+
# situation.
|
1188
|
+
# @param bucket_name [String] the bucket name
|
1189
|
+
# @param object_name [String] the object name
|
1190
|
+
# @param txn_id [String] the upload id
|
1191
|
+
def abort_multipart_upload(bucket_name, object_name, txn_id)
|
1192
|
+
logger.debug("Begin abort multipart upload, txn id: #{txn_id}")
|
1193
|
+
|
1194
|
+
sub_res = {'uploadId' => txn_id}
|
1195
|
+
|
1196
|
+
@http.delete(
|
1197
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res})
|
1198
|
+
|
1199
|
+
logger.debug("Done abort multipart: #{txn_id}.")
|
1200
|
+
end
|
1201
|
+
|
1202
|
+
# Get a list of all the on-going multipart uploading
|
1203
|
+
# transactions. That is: thoses started and not aborted.
|
1204
|
+
# @param bucket_name [String] the bucket name
|
1205
|
+
# @param opts [Hash] options:
|
1206
|
+
# @option opts [String] :id_marker return only thoese
|
1207
|
+
# transactions with txn id after :id_marker
|
1208
|
+
# @option opts [String] :key_marker the object key marker for
|
1209
|
+
# a multipart upload transaction.
|
1210
|
+
# 1. if +:id_marker+ is not set, return only those
|
1211
|
+
# transactions with object key *after* +:key_marker+;
|
1212
|
+
# 2. if +:id_marker+ is set, return only thoese transactions
|
1213
|
+
# with object key *equals* +:key_marker+ and txn id after
|
1214
|
+
# +:id_marker+
|
1215
|
+
# @option opts [String] :prefix the prefix of the object key
|
1216
|
+
# for a multipart upload transaction. if set only return
|
1217
|
+
# those transactions with the object key prefixed with it
|
1218
|
+
# @option opts [String] :encoding the encoding of object key
|
1219
|
+
# in the response body. Only {OSS::KeyEncoding::URL} is
|
1220
|
+
# supported now.
|
1221
|
+
# @return [Array<Multipart::Transaction>, Hash]
|
1222
|
+
# the returned transactions and a hash including next tokens,
|
1223
|
+
# which includes:
|
1224
|
+
# * :prefix [String] the prefix used
|
1225
|
+
# * :limit [Integer] the limit used
|
1226
|
+
# * :id_marker [String] the upload id marker used
|
1227
|
+
# * :next_id_marker [String] upload id marker to continue list
|
1228
|
+
# multipart transactions
|
1229
|
+
# * :key_marker [String] the object key marker used
|
1230
|
+
# * :next_key_marker [String] object key marker to continue
|
1231
|
+
# list multipart transactions
|
1232
|
+
# * :truncated [Boolean] whether there are more transactions
|
1233
|
+
# to be returned
|
1234
|
+
# * :encoding [String] the object key encoding used
|
1235
|
+
def list_multipart_uploads(bucket_name, opts = {})
|
1236
|
+
logger.debug("Begin list multipart uploads, "\
|
1237
|
+
"bucket: #{bucket_name}, opts: #{opts}")
|
1238
|
+
|
1239
|
+
sub_res = {'uploads' => nil}
|
1240
|
+
params = {
|
1241
|
+
'prefix' => opts[:prefix],
|
1242
|
+
'upload-id-marker' => opts[:id_marker],
|
1243
|
+
'key-marker' => opts[:key_marker],
|
1244
|
+
'max-uploads' => opts[:limit],
|
1245
|
+
'encoding-type' => opts[:encoding]
|
1246
|
+
}.reject { |_, v| v.nil? }
|
1247
|
+
|
1248
|
+
r = @http.get(
|
1249
|
+
{:bucket => bucket_name, :sub_res => sub_res},
|
1250
|
+
{:query => params})
|
1251
|
+
|
1252
|
+
doc = parse_xml(r.body)
|
1253
|
+
encoding = get_node_text(doc.root, 'EncodingType')
|
1254
|
+
txns = doc.css("Upload").map do |node|
|
1255
|
+
Multipart::Transaction.new(
|
1256
|
+
:id => get_node_text(node, "UploadId"),
|
1257
|
+
:object => get_node_text(node, "Key") { |x| decode_key(x, encoding) },
|
1258
|
+
:bucket => bucket_name,
|
1259
|
+
:creation_time =>
|
1260
|
+
get_node_text(node, "Initiated") { |t| Time.parse(t) }
|
1261
|
+
)
|
1262
|
+
end || []
|
1263
|
+
|
1264
|
+
more = {
|
1265
|
+
:prefix => 'Prefix',
|
1266
|
+
:limit => 'MaxUploads',
|
1267
|
+
:id_marker => 'UploadIdMarker',
|
1268
|
+
:next_id_marker => 'NextUploadIdMarker',
|
1269
|
+
:key_marker => 'KeyMarker',
|
1270
|
+
:next_key_marker => 'NextKeyMarker',
|
1271
|
+
:truncated => 'IsTruncated',
|
1272
|
+
:encoding => 'EncodingType'
|
1273
|
+
}.reduce({}) { |h, (k, v)|
|
1274
|
+
value = get_node_text(doc.root, v)
|
1275
|
+
value.nil?? h : h.merge(k => value)
|
1276
|
+
}
|
1277
|
+
|
1278
|
+
update_if_exists(
|
1279
|
+
more, {
|
1280
|
+
:limit => ->(x) { x.to_i },
|
1281
|
+
:truncated => ->(x) { x.to_bool },
|
1282
|
+
:key_marker => ->(x) { decode_key(x, encoding) },
|
1283
|
+
:next_key_marker => ->(x) { decode_key(x, encoding) }
|
1284
|
+
}
|
1285
|
+
)
|
1286
|
+
|
1287
|
+
logger.debug("Done list multipart transactions")
|
1288
|
+
|
1289
|
+
[txns, more]
|
1290
|
+
end
|
1291
|
+
|
1292
|
+
# Get a list of parts that are successfully uploaded in a
|
1293
|
+
# transaction.
|
1294
|
+
# @param txn_id [String] the upload id
|
1295
|
+
# @param opts [Hash] options:
|
1296
|
+
# @option opts [Integer] :marker the part number marker after
|
1297
|
+
# which to return parts
|
1298
|
+
# @option opts [Integer] :limit max number parts to return
|
1299
|
+
# @return [Array<Multipart::Part>, Hash] the returned parts and
|
1300
|
+
# a hash including next tokens, which includes:
|
1301
|
+
# * :marker [Integer] the marker used
|
1302
|
+
# * :limit [Integer] the limit used
|
1303
|
+
# * :next_marker [Integer] marker to continue list parts
|
1304
|
+
# * :truncated [Boolean] whether there are more parts to be
|
1305
|
+
# returned
|
1306
|
+
def list_parts(bucket_name, object_name, txn_id, opts = {})
|
1307
|
+
logger.debug("Begin list parts, bucket: #{bucket_name}, object: "\
|
1308
|
+
"#{object_name}, txn id: #{txn_id}, options: #{opts}")
|
1309
|
+
|
1310
|
+
sub_res = {'uploadId' => txn_id}
|
1311
|
+
params = {
|
1312
|
+
'part-number-marker' => opts[:marker],
|
1313
|
+
'max-parts' => opts[:limit],
|
1314
|
+
'encoding-type' => opts[:encoding]
|
1315
|
+
}.reject { |_, v| v.nil? }
|
1316
|
+
|
1317
|
+
r = @http.get(
|
1318
|
+
{:bucket => bucket_name, :object => object_name, :sub_res => sub_res},
|
1319
|
+
{:query => params})
|
1320
|
+
|
1321
|
+
doc = parse_xml(r.body)
|
1322
|
+
parts = doc.css("Part").map do |node|
|
1323
|
+
Multipart::Part.new(
|
1324
|
+
:number => get_node_text(node, 'PartNumber', &:to_i),
|
1325
|
+
:etag => get_node_text(node, 'ETag'),
|
1326
|
+
:size => get_node_text(node, 'Size', &:to_i),
|
1327
|
+
:last_modified =>
|
1328
|
+
get_node_text(node, 'LastModified') { |x| Time.parse(x) })
|
1329
|
+
end || []
|
1330
|
+
|
1331
|
+
more = {
|
1332
|
+
:limit => 'MaxParts',
|
1333
|
+
:marker => 'PartNumberMarker',
|
1334
|
+
:next_marker => 'NextPartNumberMarker',
|
1335
|
+
:truncated => 'IsTruncated',
|
1336
|
+
:encoding => 'EncodingType'
|
1337
|
+
}.reduce({}) { |h, (k, v)|
|
1338
|
+
value = get_node_text(doc.root, v)
|
1339
|
+
value.nil?? h : h.merge(k => value)
|
1340
|
+
}
|
1341
|
+
|
1342
|
+
update_if_exists(
|
1343
|
+
more, {
|
1344
|
+
:limit => ->(x) { x.to_i },
|
1345
|
+
:truncated => ->(x) { x.to_bool }
|
1346
|
+
}
|
1347
|
+
)
|
1348
|
+
|
1349
|
+
logger.debug("Done list parts, parts: #{parts}, more: #{more}")
|
1350
|
+
|
1351
|
+
[parts, more]
|
1352
|
+
end
|
1353
|
+
|
1354
|
+
# Get bucket/object url
|
1355
|
+
# @param [String] bucket the bucket name
|
1356
|
+
# @param [String] object the bucket name
|
1357
|
+
# @return [String] url for the bucket/object
|
1358
|
+
def get_request_url(bucket, object = nil)
|
1359
|
+
@http.get_request_url(bucket, object)
|
1360
|
+
end
|
1361
|
+
|
1362
|
+
# Get user's access key id
|
1363
|
+
# @return [String] the access key id
|
1364
|
+
def get_access_key_id
|
1365
|
+
@config.access_key_id
|
1366
|
+
end
|
1367
|
+
|
1368
|
+
# Get user's STS token
|
1369
|
+
# @return [String] the STS token
|
1370
|
+
def get_sts_token
|
1371
|
+
@config.sts_token
|
1372
|
+
end
|
1373
|
+
|
1374
|
+
# Sign a string using the stored access key secret
|
1375
|
+
# @param [String] string_to_sign the string to sign
|
1376
|
+
# @return [String] the signature
|
1377
|
+
def sign(string_to_sign)
|
1378
|
+
Util.sign(@config.access_key_secret, string_to_sign)
|
1379
|
+
end
|
1380
|
+
|
1381
|
+
private
|
1382
|
+
|
1383
|
+
# Parse body content to xml document
|
1384
|
+
# @param content [String] the xml content
|
1385
|
+
# @return [Nokogiri::XML::Document] the parsed document
|
1386
|
+
def parse_xml(content)
|
1387
|
+
doc = Nokogiri::XML(content) do |config|
|
1388
|
+
config.options |= Nokogiri::XML::ParseOptions::NOBLANKS
|
1389
|
+
end
|
1390
|
+
|
1391
|
+
doc
|
1392
|
+
end
|
1393
|
+
|
1394
|
+
# Get the text of a xml node
|
1395
|
+
# @param node [Nokogiri::XML::Node] the xml node
|
1396
|
+
# @param tag [String] the node tag
|
1397
|
+
# @yield [String] the node text is given to the block
|
1398
|
+
def get_node_text(node, tag, &block)
|
1399
|
+
n = node.at_css(tag) if node
|
1400
|
+
value = n.text if n
|
1401
|
+
block && value ? yield(value) : value
|
1402
|
+
end
|
1403
|
+
|
1404
|
+
# Decode object key using encoding. If encoding is nil it
|
1405
|
+
# returns the key directly.
|
1406
|
+
# @param key [String] the object key
|
1407
|
+
# @param encoding [String] the encoding used
|
1408
|
+
# @return [String] the decoded key
|
1409
|
+
def decode_key(key, encoding)
|
1410
|
+
return key unless encoding
|
1411
|
+
|
1412
|
+
unless KeyEncoding.include?(encoding)
|
1413
|
+
fail ClientError, "Unsupported key encoding: #{encoding}"
|
1414
|
+
end
|
1415
|
+
|
1416
|
+
if encoding == KeyEncoding::URL
|
1417
|
+
return CGI.unescape(key)
|
1418
|
+
end
|
1419
|
+
end
|
1420
|
+
|
1421
|
+
# Transform x if x is not nil
|
1422
|
+
# @param x [Object] the object to transform
|
1423
|
+
# @yield [Object] the object if given to the block
|
1424
|
+
# @return [Object] the transformed object
|
1425
|
+
def wrap(x, &block)
|
1426
|
+
yield x if x
|
1427
|
+
end
|
1428
|
+
|
1429
|
+
# Get conditions for HTTP headers
|
1430
|
+
# @param conditions [Hash] the conditions
|
1431
|
+
# @return [Hash] conditions for HTTP headers
|
1432
|
+
def get_conditions(conditions)
|
1433
|
+
{
|
1434
|
+
:if_modified_since => 'if-modified-since',
|
1435
|
+
:if_unmodified_since => 'if-unmodified-since',
|
1436
|
+
}.reduce({}) { |h, (k, v)|
|
1437
|
+
conditions.key?(k)? h.merge(v => conditions[k].httpdate) : h
|
1438
|
+
}.merge(
|
1439
|
+
{
|
1440
|
+
:if_match_etag => 'if-match',
|
1441
|
+
:if_unmatch_etag => 'if-none-match'
|
1442
|
+
}.reduce({}) { |h, (k, v)|
|
1443
|
+
conditions.key?(k)? h.merge(v => conditions[k]) : h
|
1444
|
+
}
|
1445
|
+
)
|
1446
|
+
end
|
1447
|
+
|
1448
|
+
# Get copy conditions for HTTP headers
|
1449
|
+
# @param conditions [Hash] the conditions
|
1450
|
+
# @return [Hash] copy conditions for HTTP headers
|
1451
|
+
def get_copy_conditions(conditions)
|
1452
|
+
{
|
1453
|
+
:if_modified_since => 'x-oss-copy-source-if-modified-since',
|
1454
|
+
:if_unmodified_since => 'x-oss-copy-source-if-unmodified-since',
|
1455
|
+
}.reduce({}) { |h, (k, v)|
|
1456
|
+
conditions.key?(k)? h.merge(v => conditions[k].httpdate) : h
|
1457
|
+
}.merge(
|
1458
|
+
{
|
1459
|
+
:if_match_etag => 'x-oss-copy-source-if-match',
|
1460
|
+
:if_unmatch_etag => 'x-oss-copy-source-if-none-match'
|
1461
|
+
}.reduce({}) { |h, (k, v)|
|
1462
|
+
conditions.key?(k)? h.merge(v => conditions[k]) : h
|
1463
|
+
}
|
1464
|
+
)
|
1465
|
+
end
|
1466
|
+
|
1467
|
+
# Get bytes range
|
1468
|
+
# @param range [Array<Integer>] range
|
1469
|
+
# @return [String] bytes range for HTTP headers
|
1470
|
+
def get_bytes_range(range)
|
1471
|
+
if range &&
|
1472
|
+
(!range.is_a?(Array) || range.size != 2 ||
|
1473
|
+
!range.at(0).is_a?(Fixnum) || !range.at(1).is_a?(Fixnum))
|
1474
|
+
fail ClientError, "Range must be an array containing 2 Integers."
|
1475
|
+
end
|
1476
|
+
|
1477
|
+
"bytes=#{range.at(0)}-#{range.at(1) - 1}"
|
1478
|
+
end
|
1479
|
+
|
1480
|
+
# Update values for keys that exist in hash
|
1481
|
+
# @param hash [Hash] the hash to be updated
|
1482
|
+
# @param kv [Hash] keys & blocks to updated
|
1483
|
+
def update_if_exists(hash, kv)
|
1484
|
+
kv.each { |k, v| hash[k] = v.call(hash[k]) if hash.key?(k) }
|
1485
|
+
end
|
1486
|
+
|
1487
|
+
# Convert hash keys to lower case Non-Recursively
|
1488
|
+
# @param hash [Hash] the hash to be converted
|
1489
|
+
# @return [Hash] hash with lower case keys
|
1490
|
+
def to_lower_case(hash)
|
1491
|
+
hash.reduce({}) do |result, (k, v)|
|
1492
|
+
result[k.to_s.downcase] = v
|
1493
|
+
result
|
1494
|
+
end
|
1495
|
+
end
|
1496
|
+
|
1497
|
+
end # Protocol
|
1498
|
+
end # OSS
|
1499
|
+
end # Aliyun
|