baidubce-sdk 0.9.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +12 -0
- data/.rspec +2 -0
- data/.travis.yml +5 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +6 -0
- data/LICENSE +177 -0
- data/README.md +1266 -0
- data/Rakefile +6 -0
- data/baidubce-sdk.gemspec +31 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/lib/baidubce/auth/bce_credentials.rb +31 -0
- data/lib/baidubce/auth/bce_v1_signer.rb +76 -0
- data/lib/baidubce/bce_base_client.rb +52 -0
- data/lib/baidubce/bce_client_configuration.rb +47 -0
- data/lib/baidubce/bce_constants.rb +20 -0
- data/lib/baidubce/exception.rb +34 -0
- data/lib/baidubce/http/base_http_client.rb +259 -0
- data/lib/baidubce/http/http_constants.rb +102 -0
- data/lib/baidubce/retry_policy.rb +87 -0
- data/lib/baidubce/services/bos/bos_client.rb +461 -0
- data/lib/baidubce/services/bos/bos_constants.rb +25 -0
- data/lib/baidubce/services/sts/sts_client.rb +38 -0
- data/lib/baidubce/utils/log.rb +51 -0
- data/lib/baidubce/utils/utils.rb +124 -0
- data/lib/baidubce/version.rb +7 -0
- data/samples/baidubce/bos_sample.rb +376 -0
- data/samples/baidubce/sts_sample.rb +82 -0
- metadata +174 -0
@@ -0,0 +1,102 @@
|
|
1
|
+
# Copyright 2017 Baidu, Inc.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License") you may not use this file
|
4
|
+
# except in compliance with the License. You may obtain a copy of the License at
|
5
|
+
#
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
7
|
+
#
|
8
|
+
# Unless required by applicable law or agreed to in writing, software distributed under the
|
9
|
+
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
10
|
+
# either express or implied. See the License for the specific language governing permissions
|
11
|
+
# and limitations under the License.
|
12
|
+
|
13
|
+
# This module defines string constants for HTTP
|
14
|
+
|
15
|
+
module Baidubce
|
16
|
+
module Http
|
17
|
+
|
18
|
+
# HTTP Content Types
|
19
|
+
JSON_TYPE = 'application/json; charset=utf-8'
|
20
|
+
OCTET_STREAM_TYPE = 'application/octet-stream'
|
21
|
+
|
22
|
+
# HTTP Methods
|
23
|
+
GET = 'GET'
|
24
|
+
PUT = 'PUT'
|
25
|
+
POST = 'POST'
|
26
|
+
DELETE = 'DELETE'
|
27
|
+
HEAD = 'HEAD'
|
28
|
+
|
29
|
+
# HTTP Headers
|
30
|
+
AUTHORIZATION = "Authorization"
|
31
|
+
|
32
|
+
CACHE_CONTROL = "Cache-Control"
|
33
|
+
|
34
|
+
CONTENT_DISPOSITION = "Content-Disposition"
|
35
|
+
|
36
|
+
CONTENT_ENCODING = "Content-Encoding"
|
37
|
+
|
38
|
+
CONTENT_LENGTH = "Content-Length"
|
39
|
+
|
40
|
+
CONTENT_MD5 = "Content-MD5"
|
41
|
+
|
42
|
+
CONTENT_RANGE = "Content-Range"
|
43
|
+
|
44
|
+
CONTENT_TYPE = "Content-Type"
|
45
|
+
|
46
|
+
DATE = "Date"
|
47
|
+
|
48
|
+
ETAG = "ETag"
|
49
|
+
|
50
|
+
EXPIRES = "Expires"
|
51
|
+
|
52
|
+
HOST = "Host"
|
53
|
+
|
54
|
+
LAST_MODIFIED = "Last-Modified"
|
55
|
+
|
56
|
+
RANGE = "Range"
|
57
|
+
|
58
|
+
SERVER = "Server"
|
59
|
+
|
60
|
+
USER_AGENT = "User-Agent"
|
61
|
+
|
62
|
+
# BCE Common HTTP Headers
|
63
|
+
|
64
|
+
BCE_PREFIX = "x-bce-"
|
65
|
+
|
66
|
+
BCE_ACL = "x-bce-acl"
|
67
|
+
|
68
|
+
BCE_CONTENT_SHA256 = "x-bce-content-sha256"
|
69
|
+
|
70
|
+
BCE_CONTENT_CRC32 = "x-bce-content-crc32"
|
71
|
+
|
72
|
+
BCE_COPY_METADATA_DIRECTIVE = "x-bce-metadata-directive"
|
73
|
+
|
74
|
+
BCE_COPY_SOURCE = "x-bce-copy-source"
|
75
|
+
|
76
|
+
BCE_COPY_SOURCE_IF_MATCH = "x-bce-copy-source-if-match"
|
77
|
+
|
78
|
+
BCE_COPY_SOURCE_IF_MODIFIED_SINCE = "x-bce-copy-source-if-modified-since"
|
79
|
+
|
80
|
+
BCE_COPY_SOURCE_IF_NONE_MATCH = "x-bce-copy-source-if-none-match"
|
81
|
+
|
82
|
+
BCE_COPY_SOURCE_IF_UNMODIFIED_SINCE = "x-bce-copy-source-if-unmodified-since"
|
83
|
+
|
84
|
+
BCE_COPY_SOURCE_RANGE = "x-bce-copy-source-range"
|
85
|
+
|
86
|
+
BCE_DATE = "x-bce-date"
|
87
|
+
|
88
|
+
BCE_USER_METADATA_PREFIX = "x-bce-meta-"
|
89
|
+
|
90
|
+
BCE_REQUEST_ID = "x-bce-request-id"
|
91
|
+
|
92
|
+
# BOS HTTP Headers
|
93
|
+
|
94
|
+
BOS_DEBUG_ID = "x-bce-bos-debug-id"
|
95
|
+
|
96
|
+
BOS_STORAGE_CLASS = "x-bce-storage-class"
|
97
|
+
|
98
|
+
# STS HTTP Headers
|
99
|
+
|
100
|
+
STS_SECURITY_TOKEN = "x-bce-security-token"
|
101
|
+
end
|
102
|
+
end
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# Copyright 2017 Baidu, Inc.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
4
|
+
# except in compliance with the License. You may obtain a copy of the License at
|
5
|
+
#
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
7
|
+
#
|
8
|
+
# Unless required by applicable law or agreed to in writing, software distributed under the
|
9
|
+
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
10
|
+
# either express or implied. See the License for the specific language governing permissions
|
11
|
+
# and limitations under the License.
|
12
|
+
|
13
|
+
# This module defines a retry policy for BCE.
|
14
|
+
|
15
|
+
require_relative 'utils/log'
|
16
|
+
|
17
|
+
module Baidubce
|
18
|
+
|
19
|
+
# A policy that never retries.
|
20
|
+
class NoRetryPolicy
|
21
|
+
|
22
|
+
# Always returns False.
|
23
|
+
def should_retry(http_code, retries_attempted)
|
24
|
+
false
|
25
|
+
end
|
26
|
+
|
27
|
+
# Always returns 0.
|
28
|
+
def get_delay_before_next_retry_in_millis(retries_attempted)
|
29
|
+
0
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
|
34
|
+
# A policy that retries with exponential back-off strategy.
|
35
|
+
# This policy will keep retrying until the maximum number of retries is reached. The delay time
|
36
|
+
# will be a fixed interval for the first time then 2 * interval for the second, 4 * internal for
|
37
|
+
# the third, and so on. In general, the delay time will be 2^number_of_retries_attempted*interval.
|
38
|
+
|
39
|
+
# When a maximum of delay time is specified, the delay time will never exceed this limit.
|
40
|
+
class BackOffRetryPolicy
|
41
|
+
|
42
|
+
include Log
|
43
|
+
|
44
|
+
attr_accessor :max_error_retry, :max_delay_in_millis, :base_interval_in_millis
|
45
|
+
|
46
|
+
def initialize(max_error_retry=3,
|
47
|
+
max_delay_in_millis=20 * 1000,
|
48
|
+
base_interval_in_millis=300)
|
49
|
+
|
50
|
+
max_error_retry_msg = "max_error_retry should be a non-negative integer."
|
51
|
+
max_delay_in_millis_msg = "max_delay_in_millis should be a non-negative integer."
|
52
|
+
raise BceClientException.new(max_error_retry_msg) if max_error_retry < 0
|
53
|
+
raise BceClientException.new(max_delay_in_millis_msg) if max_delay_in_millis < 0
|
54
|
+
@max_error_retry = max_error_retry
|
55
|
+
@max_delay_in_millis = max_delay_in_millis
|
56
|
+
@base_interval_in_millis = base_interval_in_millis
|
57
|
+
end
|
58
|
+
|
59
|
+
# Return true if the http client should retry the request.
|
60
|
+
def should_retry(http_code, retries_attempted)
|
61
|
+
|
62
|
+
# stop retrying when the maximum number of retries is reached
|
63
|
+
return false if retries_attempted >= @max_error_retry
|
64
|
+
return true if http_code.nil?
|
65
|
+
|
66
|
+
# Only retry on a subset of service exceptions
|
67
|
+
if http_code == 408
|
68
|
+
logger.debug('Retry for request timeout.')
|
69
|
+
return true
|
70
|
+
end
|
71
|
+
if http_code >= 500 && http_code != 501
|
72
|
+
logger.debug('Retry for server error.')
|
73
|
+
return true
|
74
|
+
end
|
75
|
+
return false
|
76
|
+
end
|
77
|
+
|
78
|
+
# Returns the delay time in milliseconds before the next retry.
|
79
|
+
def get_delay_before_next_retry_in_millis(retries_attempted)
|
80
|
+
return 0 if retries_attempted < 0
|
81
|
+
delay_in_millis = (1 << retries_attempted) * @base_interval_in_millis
|
82
|
+
return @max_delay_in_millis if delay_in_millis > @max_delay_in_millis
|
83
|
+
return delay_in_millis
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
end
|
@@ -0,0 +1,461 @@
|
|
1
|
+
# Copyright 2017 Baidu, Inc.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
4
|
+
# except in compliance with the License. You may obtain a copy of the License at
|
5
|
+
#
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
7
|
+
#
|
8
|
+
# Unless required by applicable law or agreed to in writing, software distributed under the
|
9
|
+
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
10
|
+
# either express or implied. See the License for the specific language governing permissions
|
11
|
+
# and limitations under the License.
|
12
|
+
|
13
|
+
# This module provides a client class for BOS.
|
14
|
+
|
15
|
+
require 'mimemagic'
|
16
|
+
|
17
|
+
require_relative '../../bce_base_client'
|
18
|
+
require_relative 'bos_constants'
|
19
|
+
|
20
|
+
module Baidubce
|
21
|
+
module Services
|
22
|
+
|
23
|
+
class BosClient < BceBaseClient
|
24
|
+
|
25
|
+
# List buckets of user.
|
26
|
+
# returns all buckets owned by the user.
|
27
|
+
def list_buckets()
|
28
|
+
send_request(GET)
|
29
|
+
end
|
30
|
+
|
31
|
+
# Create bucket with specific name.
|
32
|
+
def create_bucket(bucket_name)
|
33
|
+
send_request(PUT, bucket_name)
|
34
|
+
end
|
35
|
+
|
36
|
+
# Delete bucket with specific name.
|
37
|
+
def delete_bucket(bucket_name)
|
38
|
+
send_request(DELETE, bucket_name)
|
39
|
+
end
|
40
|
+
|
41
|
+
# Check whether there is a bucket with specific name.
|
42
|
+
def does_bucket_exist(bucket_name)
|
43
|
+
begin
|
44
|
+
send_request(HEAD, bucket_name)
|
45
|
+
rescue BceServerException => e
|
46
|
+
return false if e.status_code == 404
|
47
|
+
return true if e.status_code == 403
|
48
|
+
end
|
49
|
+
true
|
50
|
+
end
|
51
|
+
|
52
|
+
# Get the region which the bucket located in.
|
53
|
+
# returns region of the bucket(bj/gz/sz).
|
54
|
+
def get_bucket_location(bucket_name)
|
55
|
+
params = { location: "" }
|
56
|
+
resp = send_request(GET, bucket_name, params)
|
57
|
+
resp['locationConstraint']
|
58
|
+
end
|
59
|
+
|
60
|
+
# Get Access Control Level of bucket.
|
61
|
+
def get_bucket_acl(bucket_name)
|
62
|
+
params = { acl: "" }
|
63
|
+
send_request(GET, bucket_name, params)
|
64
|
+
end
|
65
|
+
|
66
|
+
# Set Access Control Level of bucket by body.
|
67
|
+
def set_bucket_acl(bucket_name, acl)
|
68
|
+
params = { acl: "" }
|
69
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
70
|
+
body = { accessControlList: acl }.to_json
|
71
|
+
send_request(PUT, bucket_name, params, "", headers, body)
|
72
|
+
end
|
73
|
+
|
74
|
+
# Set Access Control Level of bucket by headers.
|
75
|
+
def set_bucket_canned_acl(bucket_name, canned_acl)
|
76
|
+
params = { acl: "" }
|
77
|
+
headers = {BCE_ACL => canned_acl}
|
78
|
+
send_request(PUT, bucket_name, params, "", headers)
|
79
|
+
end
|
80
|
+
|
81
|
+
# Put Bucket Lifecycle.
|
82
|
+
def put_bucket_lifecycle(bucket_name, rules)
|
83
|
+
params = { lifecycle: "" }
|
84
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
85
|
+
body = { rule: rules }.to_json
|
86
|
+
send_request(PUT, bucket_name, params, "", headers, body)
|
87
|
+
end
|
88
|
+
|
89
|
+
# Gut Bucket Lifecycle.
|
90
|
+
def get_bucket_lifecycle(bucket_name)
|
91
|
+
params = { lifecycle: "" }
|
92
|
+
send_request(GET, bucket_name, params)
|
93
|
+
end
|
94
|
+
|
95
|
+
# Delete Bucket Lifecycle.
|
96
|
+
def delete_bucket_lifecycle(bucket_name)
|
97
|
+
params = { lifecycle: "" }
|
98
|
+
send_request(DELETE, bucket_name, params)
|
99
|
+
end
|
100
|
+
|
101
|
+
# Put Bucket Storageclass.
|
102
|
+
def put_bucket_storageclass(bucket_name, storage_class)
|
103
|
+
params = { storageClass: "" }
|
104
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
105
|
+
body = { storageClass: storage_class }.to_json
|
106
|
+
send_request(PUT, bucket_name, params, "", headers, body)
|
107
|
+
end
|
108
|
+
|
109
|
+
# Get Bucket Storageclass.
|
110
|
+
def get_bucket_storageclass(bucket_name)
|
111
|
+
params = { storageClass: "" }
|
112
|
+
resp = send_request(GET, bucket_name, params)
|
113
|
+
resp['storageClass']
|
114
|
+
end
|
115
|
+
|
116
|
+
# Put Bucket Cors.
|
117
|
+
def put_bucket_cors(bucket_name, cors_configuration)
|
118
|
+
params = { cors: "" }
|
119
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
120
|
+
body = { corsConfiguration: cors_configuration }.to_json
|
121
|
+
send_request(PUT, bucket_name, params, "", headers, body)
|
122
|
+
end
|
123
|
+
|
124
|
+
# Get Bucket Cors.
|
125
|
+
def get_bucket_cors(bucket_name)
|
126
|
+
params = { cors: "" }
|
127
|
+
send_request(GET, bucket_name, params)
|
128
|
+
end
|
129
|
+
|
130
|
+
# Delete Bucket Cors.
|
131
|
+
def delete_bucket_cors(bucket_name)
|
132
|
+
params = { cors: "" }
|
133
|
+
send_request(DELETE, bucket_name, params)
|
134
|
+
end
|
135
|
+
|
136
|
+
# Put Bucket Logging.
|
137
|
+
def put_bucket_logging(source_bucket, target_bucket, target_prefix="")
|
138
|
+
params = { logging: "" }
|
139
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
140
|
+
body = { targetBucket: target_bucket, targetPrefix: target_prefix }.to_json
|
141
|
+
send_request(PUT, source_bucket, params, "", headers, body)
|
142
|
+
end
|
143
|
+
|
144
|
+
# Get Bucket Logging.
|
145
|
+
def get_bucket_logging(bucket_name)
|
146
|
+
params = { logging: "" }
|
147
|
+
send_request(GET, bucket_name, params)
|
148
|
+
end
|
149
|
+
|
150
|
+
# Delete Bucket Logging.
|
151
|
+
def delete_bucket_logging(bucket_name)
|
152
|
+
params = { logging: "" }
|
153
|
+
send_request(DELETE, bucket_name, params)
|
154
|
+
end
|
155
|
+
|
156
|
+
# Get Object Information of bucket.
|
157
|
+
def list_objects(bucket_name, options={})
|
158
|
+
params = { maxKeys: 1000 }
|
159
|
+
params.merge! options
|
160
|
+
send_request(GET, bucket_name, params)
|
161
|
+
end
|
162
|
+
|
163
|
+
def get_object(bucket_name, key, range, save_path=nil, return_body=true)
|
164
|
+
headers = range.nil? ? {} : get_range_header_dict(range)
|
165
|
+
send_request(GET, bucket_name, {}, key, headers, "", save_path, return_body)
|
166
|
+
end
|
167
|
+
|
168
|
+
# Get Content of Object and Put Content to String.
|
169
|
+
def get_object_as_string(bucket_name, key, range=nil)
|
170
|
+
get_object(bucket_name, key, range)
|
171
|
+
end
|
172
|
+
|
173
|
+
# Get Content of Object and Put Content to File.
|
174
|
+
def get_object_to_file(bucket_name, key, save_path, range=nil)
|
175
|
+
get_object(bucket_name, key, range, save_path, false)
|
176
|
+
end
|
177
|
+
|
178
|
+
# Put an appendable object to BOS or add content to an appendable object.
|
179
|
+
def append_object(bucket_name, key, data, offset, content_md5, content_length, options={})
|
180
|
+
if content_length > MAX_APPEND_OBJECT_LENGTH
|
181
|
+
raise BceClientException.new("Object length should be less than #{MAX_APPEND_OBJECT_LENGTH}. Use multi-part upload instead.")
|
182
|
+
end
|
183
|
+
params = { append: "" }
|
184
|
+
params[:offset] = offset unless offset.nil?
|
185
|
+
headers = {
|
186
|
+
CONTENT_MD5 => content_md5,
|
187
|
+
CONTENT_LENGTH => content_length,
|
188
|
+
}
|
189
|
+
headers.merge! options
|
190
|
+
populate_headers_with_user_metadata(headers) unless headers['user-metadata'].nil?
|
191
|
+
send_request(POST, bucket_name, params, key, headers, data)
|
192
|
+
end
|
193
|
+
|
194
|
+
# Create an appendable object and put content of string to the object
|
195
|
+
# or add content of string to an appendable object.
|
196
|
+
def append_object_from_string(bucket_name, key, data, options={})
|
197
|
+
data_md5 = Digest::MD5.base64digest(data)
|
198
|
+
append_object(bucket_name, key, data, options['offset'], data_md5, data.bytesize, options)
|
199
|
+
end
|
200
|
+
|
201
|
+
# Put object to BOS.
|
202
|
+
def put_object(bucket_name, key, data, content_md5, content_length, options, &block)
|
203
|
+
if content_length > MAX_PUT_OBJECT_LENGTH
|
204
|
+
raise BceClientException.new("Object length should be less than #{MAX_PUT_OBJECT_LENGTH}. Use multi-part upload instead.")
|
205
|
+
end
|
206
|
+
|
207
|
+
headers = {
|
208
|
+
CONTENT_MD5 => content_md5,
|
209
|
+
CONTENT_LENGTH => content_length,
|
210
|
+
}
|
211
|
+
headers.merge! options
|
212
|
+
headers[CONTENT_TYPE] = OCTET_STREAM_TYPE if headers[CONTENT_TYPE].nil?
|
213
|
+
populate_headers_with_user_metadata(headers) unless headers['user-metadata'].nil?
|
214
|
+
send_request(PUT, bucket_name, {}, key, headers, data, &block)
|
215
|
+
end
|
216
|
+
|
217
|
+
# Create object and put content of string to the object.
|
218
|
+
def put_object_from_string(bucket_name, key, data, options={})
|
219
|
+
data_md5 = Digest::MD5.base64digest(data)
|
220
|
+
put_object(bucket_name, key, data, data_md5, data.length, options)
|
221
|
+
end
|
222
|
+
|
223
|
+
# Put object and put content of file to the object.
|
224
|
+
def put_object_from_file(bucket_name, key, file_name, options={})
|
225
|
+
mime = MimeMagic.by_path(file_name)
|
226
|
+
options[CONTENT_TYPE] = mime.type if options[CONTENT_TYPE].nil? && !mime.nil?
|
227
|
+
buf_size = @config.recv_buf_size
|
228
|
+
if options[CONTENT_LENGTH].nil?
|
229
|
+
data = File.open(file_name, "rb")
|
230
|
+
data_md5 = Utils.get_md5_from_file(file_name, data.size, buf_size)
|
231
|
+
put_object(bucket_name, key, data, data_md5, data.size, options)
|
232
|
+
else
|
233
|
+
left_size = options[CONTENT_LENGTH]
|
234
|
+
data_md5 = Utils.get_md5_from_file(file_name, left_size, buf_size)
|
235
|
+
put_object(bucket_name, key, "", data_md5, left_size, options) do |buf_writer|
|
236
|
+
File.open(file_name, "rb") do |part_fp|
|
237
|
+
bytes_to_read = left_size > buf_size ? buf_size : left_size
|
238
|
+
until left_size <= 0
|
239
|
+
buf_writer << part_fp.read(bytes_to_read)
|
240
|
+
left_size -= bytes_to_read
|
241
|
+
end
|
242
|
+
end
|
243
|
+
end
|
244
|
+
end
|
245
|
+
end
|
246
|
+
|
247
|
+
# Get an authorization url with expire time.
|
248
|
+
def generate_pre_signed_url(bucket_name, key, options={})
|
249
|
+
headers = options['headers'].nil? ? {} : options['headers']
|
250
|
+
params = options['params'].nil? ? {} : options['params']
|
251
|
+
|
252
|
+
path = Utils.append_uri("/", key)
|
253
|
+
url, headers[HOST] = Utils.parse_url_host(@config)
|
254
|
+
url.insert(url.index('/') + 2, bucket_name + '.')
|
255
|
+
headers[HOST] = bucket_name + '.' + headers[HOST]
|
256
|
+
params[AUTHORIZATION.downcase] = @signer.sign(@config.credentials,
|
257
|
+
GET,
|
258
|
+
path,
|
259
|
+
headers,
|
260
|
+
params,
|
261
|
+
options['timestamp'],
|
262
|
+
options['expiration_in_seconds'] || 1800,
|
263
|
+
options['headers_to_sign'])
|
264
|
+
url += Utils.url_encode_except_slash(path)
|
265
|
+
query_str = Utils.get_canonical_querystring(params, false)
|
266
|
+
url += "?#{query_str}" unless query_str.to_s.empty?
|
267
|
+
url
|
268
|
+
end
|
269
|
+
|
270
|
+
# Get meta of object.
|
271
|
+
def get_object_meta_data(bucket_name, key)
|
272
|
+
send_request(HEAD, bucket_name, {}, key)
|
273
|
+
end
|
274
|
+
|
275
|
+
# Copy one object to another object.
|
276
|
+
def copy_object(source_bucket_name, source_key, target_bucket_name, target_key, options={})
|
277
|
+
headers = options
|
278
|
+
headers[BCE_COPY_SOURCE_IF_MATCH] = headers['etag'] unless headers['etag'].nil?
|
279
|
+
if headers['user-metadata'].nil?
|
280
|
+
headers[BCE_COPY_METADATA_DIRECTIVE] = 'copy'
|
281
|
+
else
|
282
|
+
headers[BCE_COPY_METADATA_DIRECTIVE] = 'replace'
|
283
|
+
populate_headers_with_user_metadata(headers)
|
284
|
+
end
|
285
|
+
|
286
|
+
headers[BCE_COPY_SOURCE] =
|
287
|
+
Utils.url_encode_except_slash("/#{source_bucket_name}/#{source_key}")
|
288
|
+
|
289
|
+
send_request(PUT, target_bucket_name, {}, target_key, headers)
|
290
|
+
end
|
291
|
+
|
292
|
+
# Delete Object.
|
293
|
+
def delete_object(bucket_name, key)
|
294
|
+
send_request(DELETE, bucket_name, {}, key)
|
295
|
+
end
|
296
|
+
|
297
|
+
# Delete Multiple Objects.
|
298
|
+
def delete_multiple_objects(bucket_name, key_list)
|
299
|
+
params = { delete: "" }
|
300
|
+
key_arr = []
|
301
|
+
key_list.each { |item| key_arr << { key: item } }
|
302
|
+
body = { objects: key_arr }.to_json
|
303
|
+
ret = send_request(POST, bucket_name, params, "", {}, body, nil, true)
|
304
|
+
return ret.empty? ? {} : JSON.parse(ret)
|
305
|
+
end
|
306
|
+
|
307
|
+
# Initialize multi_upload_file.
|
308
|
+
def initiate_multipart_upload(bucket_name, key, options={})
|
309
|
+
params = { uploads: "" }
|
310
|
+
send_request(POST, bucket_name, params, key, options)
|
311
|
+
end
|
312
|
+
|
313
|
+
# Upload a part.
|
314
|
+
def upload_part(bucket_name, key, upload_id, part_number, part_size, options={}, &block)
|
315
|
+
headers = options
|
316
|
+
params={ partNumber: part_number, uploadId: upload_id }
|
317
|
+
if part_number < MIN_PART_NUMBER || part_number > MAX_PART_NUMBER
|
318
|
+
raise BceClientException.new(sprintf("Invalid part_number %d. The valid range is from %d to %d.",
|
319
|
+
part_number, MIN_PART_NUMBER, MAX_PART_NUMBER))
|
320
|
+
end
|
321
|
+
if part_size > MAX_PUT_OBJECT_LENGTH
|
322
|
+
raise BceClientException.new(sprintf("Single part length should be less than %d.",
|
323
|
+
MAX_PUT_OBJECT_LENGTH))
|
324
|
+
end
|
325
|
+
|
326
|
+
headers[CONTENT_LENGTH] = part_size
|
327
|
+
headers[CONTENT_TYPE] = OCTET_STREAM_TYPE
|
328
|
+
|
329
|
+
send_request(POST, bucket_name, params, key, headers, &block)
|
330
|
+
end
|
331
|
+
|
332
|
+
# Upload a part from file.
|
333
|
+
def upload_part_from_file(bucket_name, key, upload_id, part_number,
|
334
|
+
part_size, file_name, offset=0, options={})
|
335
|
+
|
336
|
+
left_size = part_size
|
337
|
+
buf_size = @config.send_buf_size
|
338
|
+
upload_part(bucket_name, key, upload_id, part_number, part_size, options) do |buf_writer|
|
339
|
+
File.open(file_name, "rb") do |part_fp|
|
340
|
+
part_fp.seek(offset)
|
341
|
+
bytes_to_read = left_size > buf_size ? buf_size : left_size
|
342
|
+
until left_size <= 0
|
343
|
+
buf_writer << part_fp.read(bytes_to_read)
|
344
|
+
left_size -= bytes_to_read
|
345
|
+
end
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
|
350
|
+
# Copy part.
|
351
|
+
def upload_part_copy(source_bucket_name, source_key, target_bucket_name, target_key, upload_id,
|
352
|
+
part_number, part_size, offset, options={})
|
353
|
+
headers = options
|
354
|
+
params={ partNumber: part_number, uploadId: upload_id }
|
355
|
+
|
356
|
+
populate_headers_with_user_metadata(headers) unless headers['user-metadata'].nil?
|
357
|
+
headers[BCE_COPY_SOURCE_IF_MATCH] = headers['etag'] unless headers['etag'].nil?
|
358
|
+
headers[BCE_COPY_SOURCE_RANGE] = sprintf("bytes=%d-%d", offset, offset + part_size - 1)
|
359
|
+
headers[BCE_COPY_SOURCE] =
|
360
|
+
Utils.url_encode_except_slash("/#{source_bucket_name}/#{source_key}")
|
361
|
+
send_request(PUT, target_bucket_name, params, target_key, headers)
|
362
|
+
|
363
|
+
end
|
364
|
+
|
365
|
+
# After finish all the task, complete multi_upload_file.
|
366
|
+
def complete_multipart_upload(bucket_name, key,upload_id, part_list, options={})
|
367
|
+
headers = options
|
368
|
+
params = { uploadId: upload_id }
|
369
|
+
|
370
|
+
populate_headers_with_user_metadata(headers) unless headers['user-metadata'].nil?
|
371
|
+
part_list.each { |part| part['eTag'].gsub!("\"", "") }
|
372
|
+
body = { parts: part_list }.to_json
|
373
|
+
send_request(POST, bucket_name, params, key, headers, body)
|
374
|
+
end
|
375
|
+
|
376
|
+
# Abort upload a part which is being uploading.
|
377
|
+
def abort_multipart_upload(bucket_name, key, upload_id)
|
378
|
+
params = { uploadId: upload_id }
|
379
|
+
send_request(DELETE, bucket_name, params, key)
|
380
|
+
end
|
381
|
+
|
382
|
+
# List all the parts that have been upload success.
|
383
|
+
def list_parts(bucket_name, key, upload_id, options={})
|
384
|
+
params = { uploadId: upload_id }
|
385
|
+
params.merge! options
|
386
|
+
send_request(GET, bucket_name, params, key)
|
387
|
+
end
|
388
|
+
|
389
|
+
# List all Multipart upload task which haven't been ended.(Completed Init_MultiPartUpload
|
390
|
+
# but not completed Complete_MultiPartUpload or Abort_MultiPartUpload).
|
391
|
+
def list_multipart_uploads(bucket_name, options={})
|
392
|
+
params = { uploads: "" }
|
393
|
+
params.merge! options
|
394
|
+
send_request(GET, bucket_name, params)
|
395
|
+
end
|
396
|
+
|
397
|
+
# Get object acl.
|
398
|
+
def get_object_acl(bucket_name, key)
|
399
|
+
params = { acl: "" }
|
400
|
+
send_request(GET, bucket_name, params, key)
|
401
|
+
end
|
402
|
+
|
403
|
+
# Set object acl by body.
|
404
|
+
def set_object_acl(bucket_name, key, acl)
|
405
|
+
params = { acl: "" }
|
406
|
+
headers = { CONTENT_TYPE => JSON_TYPE }
|
407
|
+
body = { accessControlList: acl }.to_json
|
408
|
+
send_request(PUT, bucket_name, params, key, headers, body)
|
409
|
+
end
|
410
|
+
|
411
|
+
# Set object acl by headers.
|
412
|
+
def set_object_canned_acl(bucket_name, key, canned_acl={})
|
413
|
+
params = { acl: "" }
|
414
|
+
send_request(PUT, bucket_name, params, key, canned_acl)
|
415
|
+
end
|
416
|
+
|
417
|
+
# Delete object acl.
|
418
|
+
def delete_object_acl(bucket_name, key)
|
419
|
+
params = { acl: "" }
|
420
|
+
send_request(DELETE, bucket_name, params, key)
|
421
|
+
end
|
422
|
+
|
423
|
+
def send_request(http_method, bucket_name="", params={}, key="", headers={}, body="", save_path=nil, return_body=false, &block)
|
424
|
+
path = Utils.append_uri("/", bucket_name, key)
|
425
|
+
body, headers = @http_client.send_request(@config, @signer, http_method, path, params, headers, body, save_path, &block)
|
426
|
+
# Generate result from headers and body
|
427
|
+
Utils.generate_response(headers, body, return_body)
|
428
|
+
end
|
429
|
+
|
430
|
+
def get_range_header_dict(range)
|
431
|
+
raise BceClientException.new("range type should be a array") unless range.is_a? Array
|
432
|
+
raise BceClientException.new("range should have length of 2") unless range.length == 2
|
433
|
+
raise BceClientException.new("range all element should be integer") unless range.all? { |i| i.is_a?(Integer) }
|
434
|
+
{ RANGE => "bytes=#{range[0]}-#{range[1]}" }
|
435
|
+
end
|
436
|
+
|
437
|
+
def populate_headers_with_user_metadata(headers)
|
438
|
+
meta_size = 0
|
439
|
+
user_metadata = headers['user-metadata']
|
440
|
+
raise BceClientException.new("user_metadata should be of type hash.") unless user_metadata.is_a? Hash
|
441
|
+
|
442
|
+
user_metadata.each do |k, v|
|
443
|
+
k = k.encode(DEFAULT_ENCODING)
|
444
|
+
v = v.encode(DEFAULT_ENCODING)
|
445
|
+
normalized_key = BCE_USER_METADATA_PREFIX + k
|
446
|
+
headers[normalized_key] = v
|
447
|
+
meta_size += normalized_key.length
|
448
|
+
meta_size += v.length
|
449
|
+
end
|
450
|
+
|
451
|
+
if meta_size > MAX_USER_METADATA_SIZE
|
452
|
+
raise BceClientException.new("Metadata size should not be greater than #{MAX_USER_METADATA_SIZE}")
|
453
|
+
end
|
454
|
+
headers.delete('user-metadata')
|
455
|
+
end
|
456
|
+
|
457
|
+
end
|
458
|
+
end
|
459
|
+
end
|
460
|
+
|
461
|
+
|