aliyun-sdk 0.1.3 → 0.1.4

Sign up to get free protection for your applications and to get access to all the features.
@@ -198,8 +198,12 @@ module Aliyun
198
198
  headers['Date'] = Time.now.httpdate
199
199
  headers['Content-Type'] ||= DEFAULT_CONTENT_TYPE
200
200
 
201
- if body = http_options[:body] and body.respond_to?(:read)
202
- headers['Transfer-Encoding'] = 'chunked'
201
+ if body = http_options[:body]
202
+ if body.respond_to?(:read)
203
+ headers['Transfer-Encoding'] = 'chunked'
204
+ else
205
+ headers['Content-MD5'] = Util.get_content_md5(body)
206
+ end
203
207
  end
204
208
 
205
209
  res = {
@@ -69,6 +69,24 @@ module Aliyun
69
69
  end
70
70
  end # Objects
71
71
 
72
+ ##
73
+ # Uploads iterator
74
+ #
75
+ class Uploads < Base
76
+ def initialize(protocol, bucket_name, opts = {})
77
+ super(protocol, opts)
78
+ @bucket = bucket_name
79
+ end
80
+
81
+ def fetch(more)
82
+ @results, cont = @protocol.list_multipart_uploads(@bucket, more)
83
+ @results = cont[:common_prefixes] + @results if cont[:common_prefixes]
84
+ @more[:id_marker] = cont[:next_id_marker]
85
+ @more[:key_marker] = cont[:next_key_marker]
86
+ @more[:truncated] = cont[:truncated] || false
87
+ end
88
+ end # Objects
89
+
72
90
  end # Iterator
73
91
  end # OSS
74
92
  end # Aliyun
@@ -12,6 +12,8 @@ module Aliyun
12
12
  module Logging
13
13
 
14
14
  DEFAULT_LOG_FILE = "./oss_sdk.log"
15
+ MAX_NUM_LOG = 100
16
+ ROTATE_SIZE = 10 * 1024 * 1024
15
17
 
16
18
  # level = Logger::DEBUG | Logger::INFO | Logger::ERROR | Logger::FATAL
17
19
  def self.set_log_level(level)
@@ -20,7 +22,7 @@ module Aliyun
20
22
 
21
23
  # 设置日志输出的文件
22
24
  def self.set_log_file(file)
23
- @@log_file = file
25
+ @log_file = file
24
26
  end
25
27
 
26
28
  # 获取logger
@@ -32,7 +34,8 @@ module Aliyun
32
34
 
33
35
  def self.logger
34
36
  unless @logger
35
- @logger = Logger.new(@@log_file ||= DEFAULT_LOG_FILE)
37
+ @logger = Logger.new(
38
+ @log_file ||= DEFAULT_LOG_FILE, MAX_NUM_LOG, ROTATE_SIZE)
36
39
  @logger.level = Logger::INFO
37
40
  end
38
41
  @logger
@@ -1,6 +1,7 @@
1
1
  # -*- encoding: utf-8 -*-
2
2
 
3
3
  require 'json'
4
+ require 'digest/md5'
4
5
 
5
6
  module Aliyun
6
7
  module OSS
@@ -22,8 +23,8 @@ module Aliyun
22
23
  private
23
24
  # Persist transaction states to file
24
25
  def write_checkpoint(states, file)
25
- states[:md5] = Util.get_content_md5(states.to_json)
26
- File.open(file, 'w'){ |f| f.write(states.to_json) }
26
+ md5= Util.get_content_md5(states.to_json)
27
+ File.open(file, 'w') { |f| f.write(states.merge(md5: md5).to_json) }
27
28
  end
28
29
 
29
30
  # Load transaction states from file
@@ -8,7 +8,7 @@ module Aliyun
8
8
  #
9
9
  class Object < Struct::Base
10
10
 
11
- attrs :key, :type, :size, :etag, :metas, :last_modified
11
+ attrs :key, :type, :size, :etag, :metas, :last_modified, :content_type
12
12
 
13
13
  end # Object
14
14
  end # OSS
@@ -745,7 +745,8 @@ module Aliyun
745
745
  :size => wrap(h[:content_length], &:to_i),
746
746
  :etag => h[:etag],
747
747
  :metas => metas,
748
- :last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) })
748
+ :last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
749
+ :content_type => h[:content_type])
749
750
 
750
751
  logger.debug("Done get object")
751
752
 
@@ -786,7 +787,8 @@ module Aliyun
786
787
  :size => wrap(h[:content_length], &:to_i),
787
788
  :etag => h[:etag],
788
789
  :metas => metas,
789
- :last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) })
790
+ :last_modified => wrap(h[:last_modified]) { |x| Time.parse(x) },
791
+ :content_type => h[:content_type])
790
792
 
791
793
  logger.debug("Done get object meta")
792
794
 
@@ -871,7 +873,7 @@ module Aliyun
871
873
  # @param opts [Hash] options
872
874
  # @option opts [Boolean] :quiet indicates whether the server
873
875
  # should return the delete result of the objects
874
- # @option opts [String] :encoding-type the encoding type for
876
+ # @option opts [String] :encoding the encoding type for
875
877
  # object key in the response body, only
876
878
  # {OSS::KeyEncoding::URL} is supported now
877
879
  # @return [Array<String>] object names that have been
@@ -881,16 +883,16 @@ module Aliyun
881
883
  "objects: #{object_names}, options: #{opts}")
882
884
 
883
885
  sub_res = {'delete' => nil}
884
- body = Nokogiri::XML::Builder.new do |xml|
885
- xml.Delete {
886
- xml.Quiet opts[:quiet]? true : false
887
- object_names.each do |o|
888
- xml.Object {
889
- xml.Key o
890
- }
891
- end
892
- }
893
- end.to_xml
886
+
887
+ # It may have invisible chars in object key which will corrupt
888
+ # libxml. So we're constructing xml body manually here.
889
+ body = '<?xml version="1.0"?>'
890
+ body << '<Delete>'
891
+ body << '<Quiet>' << (opts[:quiet]? true : false).to_s << '</Quiet>'
892
+ object_names.each { |k|
893
+ body << '<Object><Key>' << k << '</Key></Object>'
894
+ }
895
+ body << '</Delete>'
894
896
 
895
897
  query = {}
896
898
  query['encoding-type'] = opts[:encoding] if opts[:encoding]
@@ -1144,8 +1146,8 @@ module Aliyun
1144
1146
  # transactions. That is: thoses started and not aborted.
1145
1147
  # @param bucket_name [String] the bucket name
1146
1148
  # @param opts [Hash] options:
1147
- # @option opts [String] :id_marker return only thoese transactions with
1148
- # txn id after :id_marker
1149
+ # @option opts [String] :id_marker return only thoese
1150
+ # transactions with txn id after :id_marker
1149
1151
  # @option opts [String] :key_marker the object key marker for
1150
1152
  # a multipart upload transaction.
1151
1153
  # 1. if +:id_marker+ is not set, return only those
@@ -1156,8 +1158,6 @@ module Aliyun
1156
1158
  # @option opts [String] :prefix the prefix of the object key
1157
1159
  # for a multipart upload transaction. if set only return
1158
1160
  # those transactions with the object key prefixed with it
1159
- # @option opts [String] :delimiter the delimiter for the
1160
- # object key for a multipart upload transaction.
1161
1161
  # @option opts [String] :encoding the encoding of object key
1162
1162
  # in the response body. Only {OSS::KeyEncoding::URL} is
1163
1163
  # supported now.
@@ -1165,7 +1165,6 @@ module Aliyun
1165
1165
  # the returned transactions and a hash including next tokens,
1166
1166
  # which includes:
1167
1167
  # * :prefix [String] the prefix used
1168
- # * :delimiter [String] the delimiter used
1169
1168
  # * :limit [Integer] the limit used
1170
1169
  # * :id_marker [String] the upload id marker used
1171
1170
  # * :next_id_marker [String] upload id marker to continue list
@@ -1183,7 +1182,6 @@ module Aliyun
1183
1182
  sub_res = {'uploads' => nil}
1184
1183
  params = {
1185
1184
  'prefix' => opts[:prefix],
1186
- 'delimiter' => opts[:delimiter],
1187
1185
  'upload-id-marker' => opts[:id_marker],
1188
1186
  'key-marker' => opts[:key_marker],
1189
1187
  'max-uploads' => opts[:limit],
@@ -1210,7 +1208,6 @@ module Aliyun
1210
1208
 
1211
1209
  more = {
1212
1210
  :prefix => 'Prefix',
1213
- :delimiter => 'Delimiter',
1214
1211
  :limit => 'MaxUploads',
1215
1212
  :id_marker => 'UploadIdMarker',
1216
1213
  :next_id_marker => 'NextUploadIdMarker',
@@ -1227,7 +1224,6 @@ module Aliyun
1227
1224
  more, {
1228
1225
  :limit => ->(x) { x.to_i },
1229
1226
  :truncated => ->(x) { x.to_bool },
1230
- :delimiter => ->(x) { decode_key(x, encoding) },
1231
1227
  :key_marker => ->(x) { decode_key(x, encoding) },
1232
1228
  :next_key_marker => ->(x) { decode_key(x, encoding) }
1233
1229
  }
@@ -9,16 +9,22 @@ module Aliyun
9
9
  class Upload < Transaction
10
10
  PART_SIZE = 4 * 1024 * 1024
11
11
  READ_SIZE = 16 * 1024
12
+ NUM_THREAD = 10
12
13
 
13
14
  def initialize(protocol, opts)
14
15
  args = opts.dup
15
16
  @protocol = protocol
16
17
  @progress = args.delete(:progress)
17
18
  @file = args.delete(:file)
18
- @checkpoint_file = args.delete(:cpt_file)
19
+ @cpt_file = args.delete(:cpt_file)
20
+ super(args)
21
+
19
22
  @file_meta = {}
23
+ @num_threads = options[:threads] || NUM_THREAD
24
+ @all_mutex = Mutex.new
20
25
  @parts = []
21
- super(args)
26
+ @todo_mutex = Mutex.new
27
+ @todo_parts = []
22
28
  end
23
29
 
24
30
  # Run the upload transaction, which includes 3 stages:
@@ -27,8 +33,9 @@ module Aliyun
27
33
  # * 2. upload each unfinished part
28
34
  # * 3. commit the multipart upload transaction
29
35
  def run
30
- logger.info("Begin upload, file: #{@file}, checkpoint file: " \
31
- "#{@checkpoint_file}")
36
+ logger.info("Begin upload, file: #{@file}, "\
37
+ "checkpoint file: #{@cpt_file}, "\
38
+ "threads: #{@num_threads}")
32
39
 
33
40
  # Rebuild transaction states from checkpoint file
34
41
  # Or initiate new transaction states
@@ -38,7 +45,17 @@ module Aliyun
38
45
  divide_parts if @parts.empty?
39
46
 
40
47
  # Upload each part
41
- @parts.reject { |p| p[:done] }.each { |p| upload_part(p) }
48
+ @todo_parts = @parts.reject { |p| p[:done] }
49
+
50
+ (1..@num_threads).map {
51
+ Thread.new {
52
+ loop {
53
+ p = sync_get_todo_part
54
+ break unless p
55
+ upload_part(p)
56
+ }
57
+ }
58
+ }.map(&:join)
42
59
 
43
60
  # Commit the multipart upload transaction
44
61
  commit
@@ -62,24 +79,26 @@ module Aliyun
62
79
  # :md5 => 'states_md5'
63
80
  # }
64
81
  def checkpoint
65
- logger.debug("Begin make checkpoint, disable_cpt: #{options[:disable_cpt]}")
82
+ logger.debug("Begin make checkpoint, disable_cpt: "\
83
+ "#{options[:disable_cpt] == true}")
66
84
 
67
85
  ensure_file_not_changed
68
86
 
87
+ parts = sync_get_all_parts
69
88
  states = {
70
89
  :id => id,
71
90
  :file => @file,
72
91
  :file_meta => @file_meta,
73
- :parts => @parts
92
+ :parts => parts
74
93
  }
75
94
 
76
95
  # report progress
77
96
  if @progress
78
- done = @parts.count { |p| p[:done] }
79
- @progress.call(done.to_f / @parts.size) if done > 0
97
+ done = parts.count { |p| p[:done] }
98
+ @progress.call(done.to_f / parts.size) if done > 0
80
99
  end
81
100
 
82
- write_checkpoint(states, @checkpoint_file) unless options[:disable_cpt]
101
+ write_checkpoint(states, @cpt_file) unless options[:disable_cpt]
83
102
 
84
103
  logger.debug("Done make checkpoint, states: #{states}")
85
104
  end
@@ -91,20 +110,24 @@ module Aliyun
91
110
  def commit
92
111
  logger.info("Begin commit transaction, id: #{id}")
93
112
 
94
- parts = @parts.map{ |p| Part.new(:number => p[:number], :etag => p[:etag])}
113
+ parts = sync_get_all_parts.map{ |p|
114
+ Part.new(:number => p[:number], :etag => p[:etag])
115
+ }
95
116
  @protocol.complete_multipart_upload(bucket, object, id, parts)
96
117
 
97
- File.delete(@checkpoint_file) unless options[:disable_cpt]
118
+ File.delete(@cpt_file) unless options[:disable_cpt]
98
119
 
99
120
  logger.info("Done commit transaction, id: #{id}")
100
121
  end
101
122
 
102
123
  # Rebuild the states of the transaction from checkpoint file
103
124
  def rebuild
104
- logger.info("Begin rebuild transaction, checkpoint: #{@checkpoint_file}")
125
+ logger.info("Begin rebuild transaction, checkpoint: #{@cpt_file}")
105
126
 
106
- if File.exists?(@checkpoint_file) and not options[:disable_cpt]
107
- states = load_checkpoint(@checkpoint_file)
127
+ if options[:disable_cpt] || !File.exists?(@cpt_file)
128
+ initiate
129
+ else
130
+ states = load_checkpoint(@cpt_file)
108
131
 
109
132
  if states[:file_md5] != @file_meta[:md5]
110
133
  fail FileInconsistentError.new("The file to upload is changed.")
@@ -113,8 +136,6 @@ module Aliyun
113
136
  @id = states[:id]
114
137
  @file_meta = states[:file_meta]
115
138
  @parts = states[:parts]
116
- else
117
- initiate
118
139
  end
119
140
 
120
141
  logger.info("Done rebuild transaction, states: #{states}")
@@ -151,8 +172,8 @@ module Aliyun
151
172
  end
152
173
  end
153
174
  end
154
- p[:done] = true
155
- p[:etag] = result.etag
175
+
176
+ sync_update_part(p.merge(done: true, etag: result.etag))
156
177
 
157
178
  checkpoint
158
179
 
@@ -180,6 +201,24 @@ module Aliyun
180
201
  logger.info("Done divide parts, parts: #{@parts}")
181
202
  end
182
203
 
204
+ def sync_get_todo_part
205
+ @todo_mutex.synchronize {
206
+ @todo_parts.shift
207
+ }
208
+ end
209
+
210
+ def sync_update_part(p)
211
+ @all_mutex.synchronize {
212
+ @parts[p[:number] - 1] = p
213
+ }
214
+ end
215
+
216
+ def sync_get_all_parts
217
+ @all_mutex.synchronize {
218
+ @parts.dup
219
+ }
220
+ end
221
+
183
222
  # Ensure file not changed during uploading
184
223
  def ensure_file_not_changed
185
224
  return if File.mtime(@file) == @file_meta[:mtime]
@@ -3,6 +3,7 @@
3
3
  require 'time'
4
4
  require 'base64'
5
5
  require 'openssl'
6
+ require 'digest/md5'
6
7
 
7
8
  module Aliyun
8
9
  module OSS
@@ -3,7 +3,7 @@
3
3
  module Aliyun
4
4
  module OSS
5
5
 
6
- VERSION = "0.1.3"
6
+ VERSION = "0.1.4"
7
7
 
8
8
  end # OSS
9
9
  end # Aliyun
@@ -62,6 +62,33 @@ module Aliyun
62
62
  end.to_xml
63
63
  end
64
64
 
65
+ def mock_uploads(txns, more = {})
66
+ Nokogiri::XML::Builder.new do |xml|
67
+ xml.ListMultipartUploadsResult {
68
+ {
69
+ :prefix => 'Prefix',
70
+ :delimiter => 'Delimiter',
71
+ :limit => 'MaxUploads',
72
+ :key_marker => 'KeyMarker',
73
+ :id_marker => 'UploadIdMarker',
74
+ :next_key_marker => 'NextKeyMarker',
75
+ :next_id_marker => 'NextUploadIdMarker',
76
+ :truncated => 'IsTruncated',
77
+ :encoding => 'EncodingType'
78
+ }.map do |k, v|
79
+ xml.send(v, more[k]) if more[k] != nil
80
+ end
81
+
82
+ txns.each do |t|
83
+ xml.Upload {
84
+ xml.Key t.object
85
+ xml.UploadId t.id
86
+ }
87
+ end
88
+ }
89
+ end.to_xml
90
+ end
91
+
65
92
  def mock_acl(acl)
66
93
  Nokogiri::XML::Builder.new do |xml|
67
94
  xml.AccessControlPolicy {
@@ -133,7 +160,8 @@ module Aliyun
133
160
  it "should list objects" do
134
161
  query_1 = {
135
162
  :prefix => 'list-',
136
- :delimiter => '-'
163
+ :delimiter => '-',
164
+ 'encoding-type' => 'url'
137
165
  }
138
166
  return_obj_1 = (1..5).map{ |i| Object.new(
139
167
  :key => "obj-#{i}",
@@ -148,7 +176,8 @@ module Aliyun
148
176
  query_2 = {
149
177
  :prefix => 'list-',
150
178
  :delimiter => '-',
151
- :marker => 'foo'
179
+ :marker => 'foo',
180
+ 'encoding-type' => 'url'
152
181
  }
153
182
  return_obj_2 = (6..8).map{ |i| Object.new(
154
183
  :key => "obj-#{i}",
@@ -337,6 +366,63 @@ module Aliyun
337
366
  end
338
367
  end # object operations
339
368
 
369
+ context "multipart operations" do
370
+ it "should list uploads" do
371
+ query_1 = {
372
+ :prefix => 'list-',
373
+ 'encoding-type' => 'url',
374
+ 'uploads' => ''
375
+ }
376
+ return_up_1 = (1..5).map{ |i| Multipart::Transaction.new(
377
+ :id => "txn-#{i}",
378
+ :object => "my-object",
379
+ :bucket => @bucket_name
380
+ )}
381
+ return_more_1 = {
382
+ :next_id_marker => "txn-5",
383
+ :truncated => true
384
+ }
385
+
386
+ query_2 = {
387
+ :prefix => 'list-',
388
+ 'upload-id-marker' => 'txn-5',
389
+ 'encoding-type' => 'url',
390
+ 'uploads' => ''
391
+ }
392
+ return_up_2 = (6..8).map{ |i| Multipart::Transaction.new(
393
+ :id => "txn-#{i}",
394
+ :object => "my-object",
395
+ :bucket => @bucket_name
396
+ )}
397
+ return_more_2 = {
398
+ :next_id_marker => 'txn-8',
399
+ :truncated => false,
400
+ }
401
+
402
+ stub_request(:get, bucket_url)
403
+ .with(:query => query_1)
404
+ .to_return(:body => mock_uploads(return_up_1, return_more_1))
405
+
406
+ stub_request(:get, bucket_url)
407
+ .with(:query => query_2)
408
+ .to_return(:body => mock_uploads(return_up_2, return_more_2))
409
+
410
+ txns = @bucket.list_uploads(prefix: 'list-').to_a
411
+
412
+ expect(WebMock).to have_requested(:get, bucket_url)
413
+ .with(:query => query_1).times(1)
414
+ expect(WebMock).to have_requested(:get, bucket_url)
415
+ .with(:query => query_2).times(1)
416
+
417
+ all_txns = (1..8).map{ |i| Multipart::Transaction.new(
418
+ :id => "txn-#{i}",
419
+ :object => "my-object",
420
+ :bucket => @bucket_name
421
+ )}
422
+ expect(txns.map(&:to_s)).to match_array(all_txns.map(&:to_s))
423
+ end
424
+ end # multipart operations
425
+
340
426
  end # Bucket
341
427
  end # OSS
342
428
  end # Aliyun