aliyun-sdk 0.1.3 → 0.1.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -0
- data/examples/aliyun/oss/bucket.rb +144 -0
- data/examples/aliyun/oss/object.rb +182 -0
- data/examples/aliyun/oss/resumable_download.rb +40 -0
- data/examples/aliyun/oss/resumable_upload.rb +46 -0
- data/examples/aliyun/oss/streaming.rb +124 -0
- data/lib/aliyun/oss/bucket.rb +78 -23
- data/lib/aliyun/oss/client.rb +2 -0
- data/lib/aliyun/oss/download.rb +62 -27
- data/lib/aliyun/oss/http.rb +6 -2
- data/lib/aliyun/oss/iterator.rb +18 -0
- data/lib/aliyun/oss/logging.rb +5 -2
- data/lib/aliyun/oss/multipart.rb +3 -2
- data/lib/aliyun/oss/object.rb +1 -1
- data/lib/aliyun/oss/protocol.rb +17 -21
- data/lib/aliyun/oss/upload.rb +58 -19
- data/lib/aliyun/oss/util.rb +1 -0
- data/lib/aliyun/oss/version.rb +1 -1
- data/spec/aliyun/oss/client/bucket_spec.rb +88 -2
- data/spec/aliyun/oss/client/resumable_download_spec.rb +8 -5
- data/spec/aliyun/oss/client/resumable_upload_spec.rb +7 -4
- data/spec/aliyun/oss/multipart_spec.rb +0 -10
- data/spec/aliyun/oss/object_spec.rb +9 -10
- data/tests/test_content_type.rb +100 -0
- data/tests/test_large_file.rb +66 -0
- data/tests/test_multipart.rb +105 -0
- data/tests/test_object_key.rb +71 -0
- data/tests/test_resumable.rb +41 -0
- metadata +19 -2
data/lib/aliyun/oss/http.rb
CHANGED
@@ -198,8 +198,12 @@ module Aliyun
|
|
198
198
|
headers['Date'] = Time.now.httpdate
|
199
199
|
headers['Content-Type'] ||= DEFAULT_CONTENT_TYPE
|
200
200
|
|
201
|
-
if body = http_options[:body]
|
202
|
-
|
201
|
+
if body = http_options[:body]
|
202
|
+
if body.respond_to?(:read)
|
203
|
+
headers['Transfer-Encoding'] = 'chunked'
|
204
|
+
else
|
205
|
+
headers['Content-MD5'] = Util.get_content_md5(body)
|
206
|
+
end
|
203
207
|
end
|
204
208
|
|
205
209
|
res = {
|
data/lib/aliyun/oss/iterator.rb
CHANGED
@@ -69,6 +69,24 @@ module Aliyun
|
|
69
69
|
end
|
70
70
|
end # Objects
|
71
71
|
|
72
|
+
##
|
73
|
+
# Uploads iterator
|
74
|
+
#
|
75
|
+
class Uploads < Base
|
76
|
+
def initialize(protocol, bucket_name, opts = {})
|
77
|
+
super(protocol, opts)
|
78
|
+
@bucket = bucket_name
|
79
|
+
end
|
80
|
+
|
81
|
+
def fetch(more)
|
82
|
+
@results, cont = @protocol.list_multipart_uploads(@bucket, more)
|
83
|
+
@results = cont[:common_prefixes] + @results if cont[:common_prefixes]
|
84
|
+
@more[:id_marker] = cont[:next_id_marker]
|
85
|
+
@more[:key_marker] = cont[:next_key_marker]
|
86
|
+
@more[:truncated] = cont[:truncated] || false
|
87
|
+
end
|
88
|
+
end # Objects
|
89
|
+
|
72
90
|
end # Iterator
|
73
91
|
end # OSS
|
74
92
|
end # Aliyun
|
data/lib/aliyun/oss/logging.rb
CHANGED
@@ -12,6 +12,8 @@ module Aliyun
|
|
12
12
|
module Logging
|
13
13
|
|
14
14
|
DEFAULT_LOG_FILE = "./oss_sdk.log"
|
15
|
+
MAX_NUM_LOG = 100
|
16
|
+
ROTATE_SIZE = 10 * 1024 * 1024
|
15
17
|
|
16
18
|
# level = Logger::DEBUG | Logger::INFO | Logger::ERROR | Logger::FATAL
|
17
19
|
def self.set_log_level(level)
|
@@ -20,7 +22,7 @@ module Aliyun
|
|
20
22
|
|
21
23
|
# 设置日志输出的文件
|
22
24
|
def self.set_log_file(file)
|
23
|
-
|
25
|
+
@log_file = file
|
24
26
|
end
|
25
27
|
|
26
28
|
# 获取logger
|
@@ -32,7 +34,8 @@ module Aliyun
|
|
32
34
|
|
33
35
|
def self.logger
|
34
36
|
unless @logger
|
35
|
-
@logger = Logger.new(
|
37
|
+
@logger = Logger.new(
|
38
|
+
@log_file ||= DEFAULT_LOG_FILE, MAX_NUM_LOG, ROTATE_SIZE)
|
36
39
|
@logger.level = Logger::INFO
|
37
40
|
end
|
38
41
|
@logger
|
data/lib/aliyun/oss/multipart.rb
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
2
|
|
3
3
|
require 'json'
|
4
|
+
require 'digest/md5'
|
4
5
|
|
5
6
|
module Aliyun
|
6
7
|
module OSS
|
@@ -22,8 +23,8 @@ module Aliyun
|
|
22
23
|
private
|
23
24
|
# Persist transaction states to file
|
24
25
|
def write_checkpoint(states, file)
|
25
|
-
|
26
|
-
File.open(file, 'w'){ |f| f.write(states.to_json) }
|
26
|
+
md5= Util.get_content_md5(states.to_json)
|
27
|
+
File.open(file, 'w') { |f| f.write(states.merge(md5: md5).to_json) }
|
27
28
|
end
|
28
29
|
|
29
30
|
# Load transaction states from file
|
data/lib/aliyun/oss/object.rb
CHANGED
data/lib/aliyun/oss/protocol.rb
CHANGED
@@ -745,7 +745,8 @@ module Aliyun
|
|
745
745
|
:size => wrap(h[:content_length], &:to_i),
|
746
746
|
:etag => h[:etag],
|
747
747
|
:metas => metas,
|
748
|
-
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) }
|
748
|
+
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
|
749
|
+
:content_type => h[:content_type])
|
749
750
|
|
750
751
|
logger.debug("Done get object")
|
751
752
|
|
@@ -786,7 +787,8 @@ module Aliyun
|
|
786
787
|
:size => wrap(h[:content_length], &:to_i),
|
787
788
|
:etag => h[:etag],
|
788
789
|
:metas => metas,
|
789
|
-
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) }
|
790
|
+
:last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
|
791
|
+
:content_type => h[:content_type])
|
790
792
|
|
791
793
|
logger.debug("Done get object meta")
|
792
794
|
|
@@ -871,7 +873,7 @@ module Aliyun
|
|
871
873
|
# @param opts [Hash] options
|
872
874
|
# @option opts [Boolean] :quiet indicates whether the server
|
873
875
|
# should return the delete result of the objects
|
874
|
-
# @option opts [String] :encoding
|
876
|
+
# @option opts [String] :encoding the encoding type for
|
875
877
|
# object key in the response body, only
|
876
878
|
# {OSS::KeyEncoding::URL} is supported now
|
877
879
|
# @return [Array<String>] object names that have been
|
@@ -881,16 +883,16 @@ module Aliyun
|
|
881
883
|
"objects: #{object_names}, options: #{opts}")
|
882
884
|
|
883
885
|
sub_res = {'delete' => nil}
|
884
|
-
|
885
|
-
|
886
|
-
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
891
|
-
|
892
|
-
|
893
|
-
|
886
|
+
|
887
|
+
# It may have invisible chars in object key which will corrupt
|
888
|
+
# libxml. So we're constructing xml body manually here.
|
889
|
+
body = '<?xml version="1.0"?>'
|
890
|
+
body << '<Delete>'
|
891
|
+
body << '<Quiet>' << (opts[:quiet]? true : false).to_s << '</Quiet>'
|
892
|
+
object_names.each { |k|
|
893
|
+
body << '<Object><Key>' << k << '</Key></Object>'
|
894
|
+
}
|
895
|
+
body << '</Delete>'
|
894
896
|
|
895
897
|
query = {}
|
896
898
|
query['encoding-type'] = opts[:encoding] if opts[:encoding]
|
@@ -1144,8 +1146,8 @@ module Aliyun
|
|
1144
1146
|
# transactions. That is: thoses started and not aborted.
|
1145
1147
|
# @param bucket_name [String] the bucket name
|
1146
1148
|
# @param opts [Hash] options:
|
1147
|
-
# @option opts [String] :id_marker return only thoese
|
1148
|
-
# txn id after :id_marker
|
1149
|
+
# @option opts [String] :id_marker return only thoese
|
1150
|
+
# transactions with txn id after :id_marker
|
1149
1151
|
# @option opts [String] :key_marker the object key marker for
|
1150
1152
|
# a multipart upload transaction.
|
1151
1153
|
# 1. if +:id_marker+ is not set, return only those
|
@@ -1156,8 +1158,6 @@ module Aliyun
|
|
1156
1158
|
# @option opts [String] :prefix the prefix of the object key
|
1157
1159
|
# for a multipart upload transaction. if set only return
|
1158
1160
|
# those transactions with the object key prefixed with it
|
1159
|
-
# @option opts [String] :delimiter the delimiter for the
|
1160
|
-
# object key for a multipart upload transaction.
|
1161
1161
|
# @option opts [String] :encoding the encoding of object key
|
1162
1162
|
# in the response body. Only {OSS::KeyEncoding::URL} is
|
1163
1163
|
# supported now.
|
@@ -1165,7 +1165,6 @@ module Aliyun
|
|
1165
1165
|
# the returned transactions and a hash including next tokens,
|
1166
1166
|
# which includes:
|
1167
1167
|
# * :prefix [String] the prefix used
|
1168
|
-
# * :delimiter [String] the delimiter used
|
1169
1168
|
# * :limit [Integer] the limit used
|
1170
1169
|
# * :id_marker [String] the upload id marker used
|
1171
1170
|
# * :next_id_marker [String] upload id marker to continue list
|
@@ -1183,7 +1182,6 @@ module Aliyun
|
|
1183
1182
|
sub_res = {'uploads' => nil}
|
1184
1183
|
params = {
|
1185
1184
|
'prefix' => opts[:prefix],
|
1186
|
-
'delimiter' => opts[:delimiter],
|
1187
1185
|
'upload-id-marker' => opts[:id_marker],
|
1188
1186
|
'key-marker' => opts[:key_marker],
|
1189
1187
|
'max-uploads' => opts[:limit],
|
@@ -1210,7 +1208,6 @@ module Aliyun
|
|
1210
1208
|
|
1211
1209
|
more = {
|
1212
1210
|
:prefix => 'Prefix',
|
1213
|
-
:delimiter => 'Delimiter',
|
1214
1211
|
:limit => 'MaxUploads',
|
1215
1212
|
:id_marker => 'UploadIdMarker',
|
1216
1213
|
:next_id_marker => 'NextUploadIdMarker',
|
@@ -1227,7 +1224,6 @@ module Aliyun
|
|
1227
1224
|
more, {
|
1228
1225
|
:limit => ->(x) { x.to_i },
|
1229
1226
|
:truncated => ->(x) { x.to_bool },
|
1230
|
-
:delimiter => ->(x) { decode_key(x, encoding) },
|
1231
1227
|
:key_marker => ->(x) { decode_key(x, encoding) },
|
1232
1228
|
:next_key_marker => ->(x) { decode_key(x, encoding) }
|
1233
1229
|
}
|
data/lib/aliyun/oss/upload.rb
CHANGED
@@ -9,16 +9,22 @@ module Aliyun
|
|
9
9
|
class Upload < Transaction
|
10
10
|
PART_SIZE = 4 * 1024 * 1024
|
11
11
|
READ_SIZE = 16 * 1024
|
12
|
+
NUM_THREAD = 10
|
12
13
|
|
13
14
|
def initialize(protocol, opts)
|
14
15
|
args = opts.dup
|
15
16
|
@protocol = protocol
|
16
17
|
@progress = args.delete(:progress)
|
17
18
|
@file = args.delete(:file)
|
18
|
-
@
|
19
|
+
@cpt_file = args.delete(:cpt_file)
|
20
|
+
super(args)
|
21
|
+
|
19
22
|
@file_meta = {}
|
23
|
+
@num_threads = options[:threads] || NUM_THREAD
|
24
|
+
@all_mutex = Mutex.new
|
20
25
|
@parts = []
|
21
|
-
|
26
|
+
@todo_mutex = Mutex.new
|
27
|
+
@todo_parts = []
|
22
28
|
end
|
23
29
|
|
24
30
|
# Run the upload transaction, which includes 3 stages:
|
@@ -27,8 +33,9 @@ module Aliyun
|
|
27
33
|
# * 2. upload each unfinished part
|
28
34
|
# * 3. commit the multipart upload transaction
|
29
35
|
def run
|
30
|
-
logger.info("Begin upload, file: #{@file},
|
31
|
-
"#{@
|
36
|
+
logger.info("Begin upload, file: #{@file}, "\
|
37
|
+
"checkpoint file: #{@cpt_file}, "\
|
38
|
+
"threads: #{@num_threads}")
|
32
39
|
|
33
40
|
# Rebuild transaction states from checkpoint file
|
34
41
|
# Or initiate new transaction states
|
@@ -38,7 +45,17 @@ module Aliyun
|
|
38
45
|
divide_parts if @parts.empty?
|
39
46
|
|
40
47
|
# Upload each part
|
41
|
-
@parts.reject { |p| p[:done] }
|
48
|
+
@todo_parts = @parts.reject { |p| p[:done] }
|
49
|
+
|
50
|
+
(1..@num_threads).map {
|
51
|
+
Thread.new {
|
52
|
+
loop {
|
53
|
+
p = sync_get_todo_part
|
54
|
+
break unless p
|
55
|
+
upload_part(p)
|
56
|
+
}
|
57
|
+
}
|
58
|
+
}.map(&:join)
|
42
59
|
|
43
60
|
# Commit the multipart upload transaction
|
44
61
|
commit
|
@@ -62,24 +79,26 @@ module Aliyun
|
|
62
79
|
# :md5 => 'states_md5'
|
63
80
|
# }
|
64
81
|
def checkpoint
|
65
|
-
logger.debug("Begin make checkpoint, disable_cpt:
|
82
|
+
logger.debug("Begin make checkpoint, disable_cpt: "\
|
83
|
+
"#{options[:disable_cpt] == true}")
|
66
84
|
|
67
85
|
ensure_file_not_changed
|
68
86
|
|
87
|
+
parts = sync_get_all_parts
|
69
88
|
states = {
|
70
89
|
:id => id,
|
71
90
|
:file => @file,
|
72
91
|
:file_meta => @file_meta,
|
73
|
-
:parts =>
|
92
|
+
:parts => parts
|
74
93
|
}
|
75
94
|
|
76
95
|
# report progress
|
77
96
|
if @progress
|
78
|
-
done =
|
79
|
-
@progress.call(done.to_f /
|
97
|
+
done = parts.count { |p| p[:done] }
|
98
|
+
@progress.call(done.to_f / parts.size) if done > 0
|
80
99
|
end
|
81
100
|
|
82
|
-
write_checkpoint(states, @
|
101
|
+
write_checkpoint(states, @cpt_file) unless options[:disable_cpt]
|
83
102
|
|
84
103
|
logger.debug("Done make checkpoint, states: #{states}")
|
85
104
|
end
|
@@ -91,20 +110,24 @@ module Aliyun
|
|
91
110
|
def commit
|
92
111
|
logger.info("Begin commit transaction, id: #{id}")
|
93
112
|
|
94
|
-
parts =
|
113
|
+
parts = sync_get_all_parts.map{ |p|
|
114
|
+
Part.new(:number => p[:number], :etag => p[:etag])
|
115
|
+
}
|
95
116
|
@protocol.complete_multipart_upload(bucket, object, id, parts)
|
96
117
|
|
97
|
-
File.delete(@
|
118
|
+
File.delete(@cpt_file) unless options[:disable_cpt]
|
98
119
|
|
99
120
|
logger.info("Done commit transaction, id: #{id}")
|
100
121
|
end
|
101
122
|
|
102
123
|
# Rebuild the states of the transaction from checkpoint file
|
103
124
|
def rebuild
|
104
|
-
logger.info("Begin rebuild transaction, checkpoint: #{@
|
125
|
+
logger.info("Begin rebuild transaction, checkpoint: #{@cpt_file}")
|
105
126
|
|
106
|
-
if File.exists?(@
|
107
|
-
|
127
|
+
if options[:disable_cpt] || !File.exists?(@cpt_file)
|
128
|
+
initiate
|
129
|
+
else
|
130
|
+
states = load_checkpoint(@cpt_file)
|
108
131
|
|
109
132
|
if states[:file_md5] != @file_meta[:md5]
|
110
133
|
fail FileInconsistentError.new("The file to upload is changed.")
|
@@ -113,8 +136,6 @@ module Aliyun
|
|
113
136
|
@id = states[:id]
|
114
137
|
@file_meta = states[:file_meta]
|
115
138
|
@parts = states[:parts]
|
116
|
-
else
|
117
|
-
initiate
|
118
139
|
end
|
119
140
|
|
120
141
|
logger.info("Done rebuild transaction, states: #{states}")
|
@@ -151,8 +172,8 @@ module Aliyun
|
|
151
172
|
end
|
152
173
|
end
|
153
174
|
end
|
154
|
-
|
155
|
-
p
|
175
|
+
|
176
|
+
sync_update_part(p.merge(done: true, etag: result.etag))
|
156
177
|
|
157
178
|
checkpoint
|
158
179
|
|
@@ -180,6 +201,24 @@ module Aliyun
|
|
180
201
|
logger.info("Done divide parts, parts: #{@parts}")
|
181
202
|
end
|
182
203
|
|
204
|
+
def sync_get_todo_part
|
205
|
+
@todo_mutex.synchronize {
|
206
|
+
@todo_parts.shift
|
207
|
+
}
|
208
|
+
end
|
209
|
+
|
210
|
+
def sync_update_part(p)
|
211
|
+
@all_mutex.synchronize {
|
212
|
+
@parts[p[:number] - 1] = p
|
213
|
+
}
|
214
|
+
end
|
215
|
+
|
216
|
+
def sync_get_all_parts
|
217
|
+
@all_mutex.synchronize {
|
218
|
+
@parts.dup
|
219
|
+
}
|
220
|
+
end
|
221
|
+
|
183
222
|
# Ensure file not changed during uploading
|
184
223
|
def ensure_file_not_changed
|
185
224
|
return if File.mtime(@file) == @file_meta[:mtime]
|
data/lib/aliyun/oss/util.rb
CHANGED
data/lib/aliyun/oss/version.rb
CHANGED
@@ -62,6 +62,33 @@ module Aliyun
|
|
62
62
|
end.to_xml
|
63
63
|
end
|
64
64
|
|
65
|
+
def mock_uploads(txns, more = {})
|
66
|
+
Nokogiri::XML::Builder.new do |xml|
|
67
|
+
xml.ListMultipartUploadsResult {
|
68
|
+
{
|
69
|
+
:prefix => 'Prefix',
|
70
|
+
:delimiter => 'Delimiter',
|
71
|
+
:limit => 'MaxUploads',
|
72
|
+
:key_marker => 'KeyMarker',
|
73
|
+
:id_marker => 'UploadIdMarker',
|
74
|
+
:next_key_marker => 'NextKeyMarker',
|
75
|
+
:next_id_marker => 'NextUploadIdMarker',
|
76
|
+
:truncated => 'IsTruncated',
|
77
|
+
:encoding => 'EncodingType'
|
78
|
+
}.map do |k, v|
|
79
|
+
xml.send(v, more[k]) if more[k] != nil
|
80
|
+
end
|
81
|
+
|
82
|
+
txns.each do |t|
|
83
|
+
xml.Upload {
|
84
|
+
xml.Key t.object
|
85
|
+
xml.UploadId t.id
|
86
|
+
}
|
87
|
+
end
|
88
|
+
}
|
89
|
+
end.to_xml
|
90
|
+
end
|
91
|
+
|
65
92
|
def mock_acl(acl)
|
66
93
|
Nokogiri::XML::Builder.new do |xml|
|
67
94
|
xml.AccessControlPolicy {
|
@@ -133,7 +160,8 @@ module Aliyun
|
|
133
160
|
it "should list objects" do
|
134
161
|
query_1 = {
|
135
162
|
:prefix => 'list-',
|
136
|
-
:delimiter => '-'
|
163
|
+
:delimiter => '-',
|
164
|
+
'encoding-type' => 'url'
|
137
165
|
}
|
138
166
|
return_obj_1 = (1..5).map{ |i| Object.new(
|
139
167
|
:key => "obj-#{i}",
|
@@ -148,7 +176,8 @@ module Aliyun
|
|
148
176
|
query_2 = {
|
149
177
|
:prefix => 'list-',
|
150
178
|
:delimiter => '-',
|
151
|
-
:marker => 'foo'
|
179
|
+
:marker => 'foo',
|
180
|
+
'encoding-type' => 'url'
|
152
181
|
}
|
153
182
|
return_obj_2 = (6..8).map{ |i| Object.new(
|
154
183
|
:key => "obj-#{i}",
|
@@ -337,6 +366,63 @@ module Aliyun
|
|
337
366
|
end
|
338
367
|
end # object operations
|
339
368
|
|
369
|
+
context "multipart operations" do
|
370
|
+
it "should list uploads" do
|
371
|
+
query_1 = {
|
372
|
+
:prefix => 'list-',
|
373
|
+
'encoding-type' => 'url',
|
374
|
+
'uploads' => ''
|
375
|
+
}
|
376
|
+
return_up_1 = (1..5).map{ |i| Multipart::Transaction.new(
|
377
|
+
:id => "txn-#{i}",
|
378
|
+
:object => "my-object",
|
379
|
+
:bucket => @bucket_name
|
380
|
+
)}
|
381
|
+
return_more_1 = {
|
382
|
+
:next_id_marker => "txn-5",
|
383
|
+
:truncated => true
|
384
|
+
}
|
385
|
+
|
386
|
+
query_2 = {
|
387
|
+
:prefix => 'list-',
|
388
|
+
'upload-id-marker' => 'txn-5',
|
389
|
+
'encoding-type' => 'url',
|
390
|
+
'uploads' => ''
|
391
|
+
}
|
392
|
+
return_up_2 = (6..8).map{ |i| Multipart::Transaction.new(
|
393
|
+
:id => "txn-#{i}",
|
394
|
+
:object => "my-object",
|
395
|
+
:bucket => @bucket_name
|
396
|
+
)}
|
397
|
+
return_more_2 = {
|
398
|
+
:next_id_marker => 'txn-8',
|
399
|
+
:truncated => false,
|
400
|
+
}
|
401
|
+
|
402
|
+
stub_request(:get, bucket_url)
|
403
|
+
.with(:query => query_1)
|
404
|
+
.to_return(:body => mock_uploads(return_up_1, return_more_1))
|
405
|
+
|
406
|
+
stub_request(:get, bucket_url)
|
407
|
+
.with(:query => query_2)
|
408
|
+
.to_return(:body => mock_uploads(return_up_2, return_more_2))
|
409
|
+
|
410
|
+
txns = @bucket.list_uploads(prefix: 'list-').to_a
|
411
|
+
|
412
|
+
expect(WebMock).to have_requested(:get, bucket_url)
|
413
|
+
.with(:query => query_1).times(1)
|
414
|
+
expect(WebMock).to have_requested(:get, bucket_url)
|
415
|
+
.with(:query => query_2).times(1)
|
416
|
+
|
417
|
+
all_txns = (1..8).map{ |i| Multipart::Transaction.new(
|
418
|
+
:id => "txn-#{i}",
|
419
|
+
:object => "my-object",
|
420
|
+
:bucket => @bucket_name
|
421
|
+
)}
|
422
|
+
expect(txns.map(&:to_s)).to match_array(all_txns.map(&:to_s))
|
423
|
+
end
|
424
|
+
end # multipart operations
|
425
|
+
|
340
426
|
end # Bucket
|
341
427
|
end # OSS
|
342
428
|
end # Aliyun
|