fog-aliyun 0.3.18 → 0.3.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +7 -1
  3. data/fog-aliyun.gemspec +2 -2
  4. data/lib/fog/aliyun/models/storage/directories.rb +30 -53
  5. data/lib/fog/aliyun/models/storage/directory.rb +96 -17
  6. data/lib/fog/aliyun/models/storage/file.rb +127 -126
  7. data/lib/fog/aliyun/models/storage/files.rb +48 -127
  8. data/lib/fog/aliyun/requests/storage/abort_multipart_upload.rb +22 -0
  9. data/lib/fog/aliyun/requests/storage/complete_multipart_upload.rb +21 -0
  10. data/lib/fog/aliyun/requests/storage/copy_object.rb +14 -18
  11. data/lib/fog/aliyun/requests/storage/delete_bucket.rb +3 -9
  12. data/lib/fog/aliyun/requests/storage/delete_multiple_objects.rb +20 -0
  13. data/lib/fog/aliyun/requests/storage/delete_object.rb +10 -11
  14. data/lib/fog/aliyun/requests/storage/get_bucket.rb +22 -99
  15. data/lib/fog/aliyun/requests/storage/get_bucket_location.rb +33 -0
  16. data/lib/fog/aliyun/requests/storage/get_object.rb +20 -12
  17. data/lib/fog/aliyun/requests/storage/get_object_acl.rb +30 -0
  18. data/lib/fog/aliyun/requests/storage/get_object_http_url.rb +6 -9
  19. data/lib/fog/aliyun/requests/storage/get_object_https_url.rb +6 -9
  20. data/lib/fog/aliyun/requests/storage/get_service.rb +13 -0
  21. data/lib/fog/aliyun/requests/storage/head_object.rb +25 -13
  22. data/lib/fog/aliyun/requests/storage/initiate_multipart_upload.rb +19 -0
  23. data/lib/fog/aliyun/requests/storage/list_buckets.rb +5 -25
  24. data/lib/fog/aliyun/requests/storage/list_objects.rb +10 -62
  25. data/lib/fog/aliyun/requests/storage/put_bucket.rb +2 -8
  26. data/lib/fog/aliyun/requests/storage/put_object.rb +16 -122
  27. data/lib/fog/aliyun/requests/storage/upload_part.rb +24 -0
  28. data/lib/fog/aliyun/storage.rb +20 -4
  29. data/lib/fog/aliyun/version.rb +1 -1
  30. metadata +14 -10
  31. data/lib/fog/aliyun/requests/storage/delete_container.rb +0 -30
  32. data/lib/fog/aliyun/requests/storage/get_container.rb +0 -57
  33. data/lib/fog/aliyun/requests/storage/get_containers.rb +0 -61
  34. data/lib/fog/aliyun/requests/storage/put_container.rb +0 -29
@@ -20,39 +20,13 @@ module Fog
20
20
 
21
21
  model Fog::Aliyun::Storage::File
22
22
 
23
- # check_directory_key have two functions:
24
- # 1. trim the directory_key suffix '/'
25
- # 2. checking whether the directory_key is a bucket.
26
- # If so, it will return directly to avoid to create a new redundant folder named with directory_key.
27
- # This point will be applied to multi-bucket and make bucket as a directory scenario.
28
- def check_directory_key(directory_key)
29
- bucket_name = nil
30
- if directory_key.is_a? Array
31
- directory_key = directory_key[0]
32
- end
33
- if directory_key != ''
34
- # trim the suffix '/'
35
- directory_key = directory_key.chomp('/')
36
- # The bucket name can not contain '/', so if directory_key, return directory.
37
- if directory_key.include? '/'
38
- directory_key
39
- elsif service.bucket_exists?(directory_key)
40
- bucket_name = directory_key
41
- directory_key = ''
42
- else
43
- directory_key
44
- end
45
- end
46
- return bucket_name, directory_key
47
- end
48
-
49
23
  def all(options = {})
50
24
  requires :directory
51
25
  options = {
52
- 'delimiter' => delimiter,
53
- 'marker' => marker,
54
- 'max-keys' => max_keys.to_i,
55
- 'prefix' => prefix
26
+ 'delimiter': delimiter,
27
+ 'marker': marker,
28
+ 'max-keys': max_keys.to_i,
29
+ 'prefix': prefix
56
30
  }.merge!(options)
57
31
  options = options.reject {|key,value| value.nil? || value.to_s.empty?}
58
32
  merge_attributes(options)
@@ -68,7 +42,8 @@ module Fog
68
42
  end
69
43
  end
70
44
 
71
- alias each_file_this_page each
45
+ alias_method :each_file_this_page, :each
46
+
72
47
  def each
73
48
  if !block_given?
74
49
  self
@@ -76,7 +51,7 @@ module Fog
76
51
  subset = dup.all
77
52
 
78
53
  subset.each_file_this_page { |f| yield f }
79
- while subset.length == (subset.limit || 10_000)
54
+ while subset.is_truncated
80
55
  subset = subset.all(marker: subset.last.key)
81
56
  subset.each_file_this_page { |f| yield f }
82
57
  end
@@ -87,123 +62,69 @@ module Fog
87
62
 
88
63
  def get(key, options = {}, &block)
89
64
  requires :directory
90
- bucket_name, directory_key = check_directory_key(directory.key)
91
- object = if directory_key == ''
92
- key
93
- else
94
- directory_key + '/' + key
95
- end
96
65
  begin
97
- data = service.get_object(object, options.merge({bucket: bucket_name}), &block)
98
- headers = data.headers
99
- lastModified = headers[:last_modified]
100
- last_modified = (Time.parse(lastModified).localtime if !lastModified.nil? && lastModified != '')
101
-
102
- date = headers[:date]
103
- date = (Time.parse(date).localtime if !date.nil? && date != '')
104
- file_data = {
105
- body: data.body,
106
- content_length: headers[:content_length].to_i,
107
- key: key,
108
- last_modified: last_modified,
109
- content_type: headers[:content_type],
110
- etag: headers[:etag],
111
- date: date,
112
- connection: headers[:connection],
113
- accept_ranges: headers[:accept_ranges],
114
- server: headers[:server],
115
- object_type: headers[:x_oss_object_type]
116
- }
117
-
66
+ data = service.get_object(directory.key, key, options, &block)
67
+ normalize_headers(data)
68
+ file_data = data.headers.merge({
69
+ :body => data.body,
70
+ :key => key
71
+ })
118
72
  new(file_data)
119
- rescue AliyunOssSdk::ServerError => error
120
- case error.error_code
121
- when %r{NoSuchKey},%r{SymlinkTargetNotExist}
122
- nil
123
- else
124
- raise(error)
73
+ rescue Exception => error
74
+ case error.http_code.to_i
75
+ when 404
76
+ nil
77
+ else
78
+ raise(error)
125
79
  end
126
80
  end
127
81
  end
128
82
 
129
- def get_url(key)
83
+ # @param options[Hash] No need to use
84
+ def get_url(key, options = {})
130
85
  requires :directory
131
- bucket_name, directory_key = check_directory_key(directory.key)
132
- object = if directory_key == ''
133
- key
134
- else
135
- directory_key + '/' + key
136
- end
137
- service.get_object_http_url_public(object, 3600, bucket: bucket_name)
86
+ service.get_object_http_url_public(directory.key, key, 3600)
138
87
  end
139
88
 
89
+ # @param options[Hash] No need to use
140
90
  def get_http_url(key, expires, options = {})
141
91
  requires :directory
142
- bucket_name, directory_key = check_directory_key(directory.key)
143
- object = if directory_key == ''
144
- key
145
- else
146
- directory_key + '/' + key
147
- end
148
- expires = expires.nil? ? 0 : expires.to_i
149
- service.get_object_http_url_public(object, expires, options.merge(bucket: bucket_name))
92
+ service.get_object_http_url_public(directory.key, key, expires)
150
93
  end
151
94
 
95
+ # @param options[Hash] No need to use
152
96
  def get_https_url(key, expires, options = {})
153
97
  requires :directory
154
- bucket_name, directory_key = check_directory_key(directory.key)
155
- object = if directory_key == ''
156
- key
157
- else
158
- directory_key + '/' + key
159
- end
160
- expires = expires.nil? ? 0 : expires.to_i
161
- service.get_object_https_url_public(object, expires, options.merge(bucket: bucket_name))
98
+ service.get_object_https_url_public(directory.key, key, expires)
162
99
  end
163
100
 
164
- def head(key, _options = {})
101
+ def head(key, options = {})
165
102
  requires :directory
166
- bucket_name, directory_key = check_directory_key(directory.key)
167
- object = if directory_key == ''
168
- key
169
- else
170
- directory_key + '/' + key
171
- end
172
- data = service.head_object(object, bucket: bucket_name).data
173
- return nil if data[:status] == 404
174
- lastModified = data[:headers]['Last-Modified']
175
- last_modified = (Time.parse(lastModified).localtime if !lastModified.nil? && lastModified != '')
176
-
177
- date = data[:headers]['Date']
178
- date = (Time.parse(date).localtime if !date.nil? && date != '')
179
-
180
- file_data = {
181
- content_length: data[:headers]['Content-Length'].to_i,
182
- key: key,
183
- last_modified: last_modified,
184
- content_type: data[:headers]['Content-Type'],
185
- etag: data[:headers]['ETag'],
186
- date: date,
187
- connection: data[:headers]['Connection'],
188
- accept_ranges: data[:headers]['Accept-Ranges'],
189
- server: data[:headers]['Server'],
190
- object_type: data[:headers]['x-oss-object-type']
191
- }
192
- new(file_data)
193
- rescue Fog::Aliyun::Storage::NotFound
194
- nil
103
+ begin
104
+ data = service.head_object(directory.key, key, options)
105
+ normalize_headers(data)
106
+ file_data = data.headers.merge({
107
+ :key => key
108
+ })
109
+ new(file_data)
110
+ rescue Exception => error
111
+ case error.http_code.to_i
112
+ when 404
113
+ nil
114
+ else
115
+ raise(error)
116
+ end
117
+ end
195
118
  end
196
119
 
197
120
  def new(attributes = {})
198
121
  requires :directory
199
- # Sometimes, the v will be a Array, like "Prefix"=>[{}], "Marker"=>[xxxx], "MaxKeys"=>["100"], "IsTruncated"=>["false"]
200
- # and there needs to parse them
201
- for k, v in attributes
202
- if !v.nil? && (v.is_a? Array) && (v.size > 0)
203
- attributes[k] = v[0]
204
- end
205
- end
206
- super({ directory: directory }.merge!(attributes))
122
+ super({ :directory => directory }.merge!(attributes))
123
+ end
124
+
125
+ def normalize_headers(data)
126
+ data.headers[:last_modified] = Time.parse(data.headers[:last_modified])
127
+ data.headers[:etag] = data.headers[:etag].gsub('"','')
207
128
  end
208
129
  end
209
130
  end
@@ -0,0 +1,22 @@
1
+
2
+ module Fog
3
+ module Aliyun
4
+ class Storage
5
+ class Real
6
+ #
7
+ # Abort a multipart upload
8
+ #
9
+ # @param [String] bucket_name Name of bucket to abort multipart upload on
10
+ # @param [String] object_name Name of object to abort multipart upload on
11
+ # @param [String] upload_id Id of upload to add part to
12
+ #
13
+ # @see https://help.aliyun.com/document_detail/31996.html
14
+ #
15
+ def abort_multipart_upload(bucket_name, object_name, upload_id)
16
+ @oss_protocol.abort_multipart_upload(bucket_name, object_name, upload_id)
17
+ end
18
+ end
19
+
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,21 @@
1
+
2
+ module Fog
3
+ module Aliyun
4
+ class Storage
5
+ class Real
6
+ # Complete a multipart upload
7
+ #
8
+ # @param [String] bucket_name Name of bucket to complete multipart upload for
9
+ # @param [String] object_name Name of object to complete multipart upload for
10
+ # @param [String] upload_id Id of upload to add part to
11
+ # @param [Array] parts Array of etag and number as Strings for parts
12
+ #
13
+ # @see https://help.aliyun.com/document_detail/31995.html
14
+ #
15
+ def complete_multipart_upload(bucket_name, object_name, upload_id, parts)
16
+ @oss_protocol.complete_multipart_upload(bucket_name, object_name, upload_id, parts)
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -7,25 +7,21 @@ module Fog
7
7
  # Copy object
8
8
  #
9
9
  # ==== Parameters
10
- # * source_bucket<~String> - Name of source bucket
11
- # * source_object<~String> - Name of source object
12
- # * target_bucket<~String> - Name of bucket to create copy in
13
- # * target_object<~String> - Name for new copy of object
10
+ # * source_bucket_name<~String> - Name of source bucket
11
+ # * source_object_name<~String> - Name of source object
12
+ # * target_bucket_name<~String> - Name of bucket to create copy in
13
+ # * target_object_name<~String> - Name for new copy of object
14
14
  # * options<~Hash> - Additional headers options={}
15
- def copy_object(source_bucket, source_object, target_bucket, target_object, options = {})
16
- options = options.reject { |_key, value| value.nil? }
17
- bucket = options[:bucket]
18
- bucket ||= @aliyun_oss_bucket
19
- source_bucket ||= bucket
20
- target_bucket ||= bucket
21
- headers = { 'x-oss-copy-source' => "/#{source_bucket}/#{source_object}" }
22
- resource = target_bucket + '/' + target_object
23
- request(expects: [200, 203],
24
- headers: headers,
25
- method: 'PUT',
26
- path: target_object,
27
- bucket: target_bucket,
28
- resource: resource)
15
+ def copy_object(source_bucket_name, source_object_name, target_bucket_name, target_object_name, options = {})
16
+ headers = { 'x-oss-copy-source' => "/#{source_bucket_name}#{object_to_path(source_object_name)}" }.merge!(options)
17
+ resources = {
18
+ :bucket => target_bucket_name,
19
+ :object => target_object_name
20
+ }
21
+ http_options = {
22
+ :headers => headers
23
+ }
24
+ @oss_http.put(resources, http_options)
29
25
  end
30
26
  end
31
27
  end
@@ -7,16 +7,10 @@ module Fog
7
7
  # Delete an existing bucket
8
8
  #
9
9
  # ==== Parameters
10
- # * bucket<~String> - Name of bucket to delete
10
+ # * bucket_name<~String> - Name of bucket to delete
11
11
  #
12
- def delete_bucket(bucket)
13
- resource = bucket + '/'
14
- request(
15
- expects: 204,
16
- method: 'DELETE',
17
- bucket: bucket,
18
- resource: resource
19
- )
12
+ def delete_bucket(bucket_name)
13
+ @oss_protocol.delete_bucket(bucket_name)
20
14
  end
21
15
  end
22
16
  end
@@ -0,0 +1,20 @@
1
+ module Fog
2
+ module Aliyun
3
+ class Storage
4
+ class Real
5
+
6
+ # Delete multiple objects from OSS
7
+ #
8
+ # @param bucket_name [String] Name of bucket containing object to delete
9
+ # @param object_names [Array] Array of object names to delete
10
+ #
11
+ # @see https://help.aliyun.com/document_detail/31983.html
12
+
13
+ def delete_multiple_objects(bucket_name, object_names, options = {})
14
+ bucket = @oss_client.get_bucket(bucket_name)
15
+ bucket.batch_delete_objects(object_names, options)
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
@@ -7,18 +7,17 @@ module Fog
7
7
  # Delete an existing object
8
8
  #
9
9
  # ==== Parameters
10
- # * object<~String> - Name of object to delete
10
+ # * bucket_name<~String> - Name of bucket to delete
11
+ # * object_name<~String> - Name of object to delete
11
12
  #
12
- def delete_object(object, options = {})
13
- bucket_name = options[:bucket]
14
- bucket_name ||= @aliyun_oss_bucket
15
- bucket = @oss_client.get_bucket(bucket_name)
16
- bucket.delete_object(object)
17
- end
18
-
19
- def abort_multipart_upload(bucket_name, object, upload_id)
20
- bucket = @oss_client.get_bucket(bucket_name)
21
- bucket.abort_upload(upload_id, object)
13
+ def delete_object(bucket_name, object_name, options = {})
14
+ # TODO Support versionId
15
+ # if version_id = options.delete('versionId')
16
+ # query = {'versionId' => version_id}
17
+ # else
18
+ # query = {}
19
+ # end
20
+ @oss_http.delete({:bucket => bucket_name, :object => object_name}, {:headers => options})
22
21
  end
23
22
  end
24
23
  end
@@ -8,121 +8,44 @@ module Fog
8
8
  @oss_client.bucket_exists?(bucket_name)
9
9
  end
10
10
 
11
- def get_bucket(bucket, options = {})
12
- prefix = options['prefix']
13
- marker = options['marker']
11
+ def get_bucket(bucket_name, options = {})
12
+ unless bucket_name
13
+ raise ArgumentError.new('bucket_name is required')
14
+ end
15
+
14
16
  # Set the GetBucket max limitation to 1000
15
- maxKeys = options['max-keys'] || 1000
17
+ maxKeys = options[:max_keys] || 1000
16
18
  maxKeys = maxKeys.to_i
17
- maxKeys = [maxKeys, 1000].min.to_s
18
- delimiter = options['delimiter']
19
- path = ''
20
- if prefix
21
- path += '/?prefix=' + prefix
22
- path += '&marker=' + marker if marker
23
- path += '&max-keys=' + maxKeys if maxKeys
24
- path += '&delimiter=' + delimiter if delimiter
19
+ maxKeys = [maxKeys, 1000].min
25
20
 
26
- elsif marker
27
- path += '/?marker=' + marker
28
- path += '&max-keys=' + maxKeys if maxKeys
29
- path += '&delimiter=' + delimiter if delimiter
30
-
31
- elsif maxKeys
32
- path += '/?max-keys=' + maxKeys
33
- path += '&delimiter=' + delimiter if delimiter
34
- elsif delimiter
35
- path += '/?delimiter=' + delimiter
36
- end
21
+ options[:limit] = maxKeys
22
+ options.delete(:max_keys)
37
23
 
38
- resource = bucket + '/'
39
- ret = request(
40
- expects: [200, 203, 404],
41
- method: 'GET',
42
- bucket: bucket,
43
- resource: resource,
44
- path: path
45
- )
46
- xml = ret.data[:body]
47
- XmlSimple.xml_in(xml)
24
+ @oss_protocol.list_objects(bucket_name, options)
48
25
  end
49
26
 
50
- def get_bucket_acl(bucket)
51
- attribute = '?acl'
52
- resource = bucket + '/' + attribute
53
- ret = request(
54
- expects: [200, 203],
55
- method: 'GET',
56
- path: attribute,
57
- bucket: bucket,
58
- resource: resource
59
- )
60
- XmlSimple.xml_in(ret.data[:body])['AccessControlList'][0]['Grant'][0]
27
+ def get_bucket_acl(bucket_name)
28
+ @oss_protocol.get_bucket_acl(bucket_name)
61
29
  end
62
30
 
63
- def get_bucket_CORSRules(bucket)
64
- attribute = '?cors'
65
- resource = bucket + '/' + attribute
66
- ret = request(
67
- expects: [200, 203, 404],
68
- method: 'GET',
69
- path: attribute,
70
- bucket: bucket,
71
- resource: resource
72
- )
73
- XmlSimple.xml_in(ret.data[:body])['CORSRule'][0] if ret.data[:status] != 404
31
+ def get_bucket_CORSRules(bucket_name)
32
+ @oss_protocol.get_bucket_cors(bucket_name)
74
33
  end
75
34
 
76
- def get_bucket_lifecycle(bucket)
77
- attribute = '?lifecycle'
78
- resource = bucket + '/' + attribute
79
- ret = request(
80
- expects: [200, 203, 404],
81
- method: 'GET',
82
- path: attribute,
83
- bucket: bucket,
84
- resource: resource
85
- )
86
- XmlSimple.xml_in(ret.data[:body])['Rule'][0] if ret.data[:status] != 404
35
+ def get_bucket_lifecycle(bucket_name)
36
+ @oss_protocol.get_bucket_lifecycle(bucket_name)
87
37
  end
88
38
 
89
- def get_bucket_logging(bucket)
90
- attribute = '?logging'
91
- resource = bucket + '/' + attribute
92
- ret = request(
93
- expects: [200, 203],
94
- method: 'GET',
95
- path: attribute,
96
- bucket: bucket,
97
- resource: resource
98
- )
99
- XmlSimple.xml_in(ret.data[:body])['LoggingEnabled'][0]['TargetPrefix']
39
+ def get_bucket_logging(bucket_name)
40
+ @oss_protocol.get_bucket_logging(bucket_name)
100
41
  end
101
42
 
102
- def get_bucket_referer(bucket)
103
- attribute = '?referer'
104
- resource = bucket + '/' + attribute
105
- ret = request(
106
- expects: [200, 203],
107
- method: 'GET',
108
- path: attribute,
109
- bucket: bucket,
110
- resource: resource
111
- )
112
- XmlSimple.xml_in(ret.data[:body])
43
+ def get_bucket_referer(bucket_name)
44
+ @oss_protocol.get_bucket_referer(bucket_name)
113
45
  end
114
46
 
115
- def get_bucket_website(bucket)
116
- attribute = '?website'
117
- resource = bucket + '/' + attribute
118
- ret = request(
119
- expects: [200, 203, 404],
120
- method: 'GET',
121
- path: attribute,
122
- bucket: bucket,
123
- resource: resource
124
- )
125
- XmlSimple.xml_in(ret.data[:body]) if ret.data[:status] != 404
47
+ def get_bucket_website(bucket_name)
48
+ @oss_protocol.get_bucket_website(bucket_name)
126
49
  end
127
50
  end
128
51
  end