s3sync 2.0.2 → 2.0.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8672f843e8dfe49d8332c6ed8212679926efdca1
4
- data.tar.gz: 3142cb4b5b510800de393dd1a42033805f251eca
3
+ metadata.gz: 1647d25f01e1d5890e51aaddee4f23008ccc2b19
4
+ data.tar.gz: 2afdf32ee7d8cc6c15cde88e75462ecb57278141
5
5
  SHA512:
6
- metadata.gz: a937fce93d46e724f586fce116a922bf0c6e993a893054b596963f68ac4c8480ecab166097862232c6df771aece1760c9e03dd462fac43bb34d52d953d1b1933
7
- data.tar.gz: a17295b31fbc18d2d5e9a5476d0ba1bf3c54509bfd1c4425aea8d910bca15b4c84e2b1fe657d4ee447310750166d6d33d2871d7a24cdd88db1cf157b8271f105
6
+ metadata.gz: d027fe656ebfccf76cb80c61b1d7ee9926868a8a425c37c502ad88b408ac2743962840b0435d2f698ef990b0a5b78032bd0cada915384f97205104b26444359c
7
+ data.tar.gz: 2b5d672c278b9527f9821c6159145ef392b1a47b7d0e66c2467c58c21dc1d16a8f42939fc8cbc02a572229d10584e91645663352d05bf87088f27c0aa3576dbb
@@ -25,7 +25,7 @@
25
25
  require 's3sync/version'
26
26
  require 's3sync/exceptions'
27
27
  require 's3sync/sync'
28
- require 'aws/s3'
28
+ require 'aws-sdk'
29
29
  require 'cmdparse'
30
30
 
31
31
 
@@ -63,9 +63,9 @@ module S3Sync
63
63
  u.join ''
64
64
  end
65
65
 
66
- def execute(args)
66
+ def execute(*args)
67
67
  # Connecting to amazon
68
- s3 = AWS::S3.new
68
+ s3 = Aws::S3::Resource.new(client: Aws::S3::Client.new)
69
69
 
70
70
  # From the command line
71
71
  key, file = args
@@ -78,13 +78,13 @@ module S3Sync
78
78
  # of the common errors here, saving duplications in each command;
79
79
  begin
80
80
  run s3, bucket, key, file, args
81
- rescue AWS::S3::Errors::AccessDenied
82
- raise FailureFeedback.new("Access Denied")
83
- rescue AWS::S3::Errors::NoSuchBucket
81
+ rescue Aws::S3::Errors::SignatureDoesNotMatch, Aws::S3::Errors::InvalidAccessKeyId => e
82
+ raise FailureFeedback.new("Access Denied: #{e.message}")
83
+ rescue Aws::S3::Errors::NoSuchBucket
84
84
  raise FailureFeedback.new("There's no bucket named `#{bucket}'")
85
- rescue AWS::S3::Errors::NoSuchKey
85
+ rescue Aws::S3::Errors::NoSuchKey
86
86
  raise FailureFeedback.new("There's no key named `#{key}' in the bucket `#{bucket}'")
87
- rescue AWS::S3::Errors::Base => exc
87
+ rescue Aws::S3::Errors::ServiceError => exc
88
88
  raise FailureFeedback.new("Error: `#{exc.message}'")
89
89
  end
90
90
  end
@@ -101,7 +101,7 @@ module S3Sync
101
101
 
102
102
  class ListBuckets < BaseCmd
103
103
  def initialize
104
- super 'listbuckets', false, false, false
104
+ super 'listbuckets', takes_commands: false #, false, false
105
105
 
106
106
  @short_desc = "List all available buckets for your user"
107
107
  end
@@ -117,11 +117,11 @@ module S3Sync
117
117
  attr_accessor :acl
118
118
 
119
119
  def initialize
120
- super 'createbucket', false, false
120
+ super 'createbucket', takes_commands: false #, false
121
121
 
122
122
  @short_desc = "Create a new bucket under your user account"
123
123
 
124
- self.options = CmdParse::OptionParserWrapper.new do |opt|
124
+ self.options do |opt|
125
125
  parse_acl(opt)
126
126
  end
127
127
  end
@@ -136,8 +136,8 @@ module S3Sync
136
136
  params.merge!({:acl => @acl})
137
137
  end
138
138
 
139
- s3.buckets.create bucket, params
140
- rescue AWS::S3::Errors::BucketAlreadyExists
139
+ s3.client.create_bucket(params.merge(bucket: bucket))
140
+ rescue Aws::S3::Errors::BucketAlreadyExists
141
141
  raise FailureFeedback.new("Bucket `#{bucket}' already exists")
142
142
  end
143
143
  end
@@ -147,13 +147,13 @@ module S3Sync
147
147
  attr_accessor :force
148
148
 
149
149
  def initialize
150
- super 'deletebucket', false, false
150
+ super 'deletebucket', takes_commands: false #, false
151
151
 
152
152
  @short_desc = "Remove a bucket from your account"
153
153
 
154
154
  @force = false
155
155
 
156
- self.options = CmdParse::OptionParserWrapper.new do |opt|
156
+ self.options do |opt|
157
157
  opt.on("-f", "--force", "Clean the bucket then deletes it") {|f|
158
158
  @force = f
159
159
  }
@@ -164,7 +164,7 @@ module S3Sync
164
164
  raise WrongUsage.new(nil, "You need to inform a bucket") if not bucket
165
165
 
166
166
  # Getting the bucket
167
- bucket_obj = s3.buckets[bucket]
167
+ bucket_obj = s3.bucket(bucket)
168
168
 
169
169
  # Do not kill buckets with content unless explicitly asked
170
170
  if not @force and bucket_obj.objects.count > 0
@@ -179,7 +179,7 @@ module S3Sync
179
179
  attr_accessor :max_entries
180
180
 
181
181
  def initialize
182
- super 'list', false, false
182
+ super 'list', takes_commands: false #, false
183
183
 
184
184
  @short_desc = "List items filed under a given bucket"
185
185
 
@@ -189,7 +189,7 @@ module S3Sync
189
189
 
190
190
  @has_prefix = true
191
191
 
192
- self.options = CmdParse::OptionParserWrapper.new do |opt|
192
+ self.options do |opt|
193
193
  opt.on("-m", "--max-entries=NUM", "Limit the number of entries to output") {|m|
194
194
  @max_entries = m
195
195
  }
@@ -203,7 +203,7 @@ module S3Sync
203
203
  def run s3, bucket, key, file, args
204
204
  raise WrongUsage.new(nil, "You need to inform a bucket") if not bucket
205
205
 
206
- collection = s3.buckets[bucket].objects.with_prefix(key || "")
206
+ collection = s3.bucket(bucket).objects( prefix: (key || "") )
207
207
 
208
208
  if @max_entries > 0
209
209
  collection = collection.page(:per_page => @max_entries)
@@ -223,7 +223,7 @@ module S3Sync
223
223
 
224
224
  class Delete < BaseCmd
225
225
  def initialize
226
- super 'delete', false, false
226
+ super 'delete', takes_commands: false #, false
227
227
 
228
228
  @short_desc = "Delete a key from a bucket"
229
229
 
@@ -233,7 +233,7 @@ module S3Sync
233
233
  def run s3, bucket, key, file, args
234
234
  raise WrongUsage.new(nil, "You need to inform a bucket") if not bucket
235
235
  raise WrongUsage.new(nil, "You need to inform a key") if not key
236
- s3.buckets[bucket].objects[key].delete
236
+ s3.buckets(bucket).object(key).delete
237
237
  end
238
238
  end
239
239
 
@@ -242,7 +242,7 @@ module S3Sync
242
242
  attr_accessor :secure
243
243
 
244
244
  def initialize
245
- super 'url', false, false
245
+ super 'url', takes_commands: false #, false
246
246
 
247
247
  @short_desc = "Generates public urls or authenticated endpoints for the object"
248
248
  @description = "Notice that --method and --public are mutually exclusive"
@@ -252,7 +252,7 @@ module S3Sync
252
252
  @expires_in = false
253
253
  @has_prefix = 'required'
254
254
 
255
- self.options = CmdParse::OptionParserWrapper.new do |opt|
255
+ self.options do |opt|
256
256
  opt.on("-m", "--method=METHOD", "Options: #{AVAILABLE_METHODS.join ', '}") {|m|
257
257
  @method = m
258
258
  }
@@ -295,17 +295,17 @@ module S3Sync
295
295
  opts.merge!({:secure => @secure})
296
296
 
297
297
  if @public
298
- puts s3.buckets[bucket].objects[key].public_url(opts).to_s
298
+ puts s3.bucket(bucket).object(key).public_url(opts).to_s
299
299
  else
300
300
  opts.merge!({:expires => @expires_in}) if @expires_in
301
- puts s3.buckets[bucket].objects[key].url_for(method.to_sym, opts).to_s
301
+ puts s3.bucket(bucket).object(key).presigned_url(method.to_sym, opts).to_s
302
302
  end
303
303
  end
304
304
  end
305
305
 
306
306
  class Put < BaseCmd
307
307
  def initialize
308
- super 'put', false, false
308
+ super 'put', takes_commands: false #, false
309
309
 
310
310
  @short_desc = 'Upload a file to a bucket under a certain prefix'
311
311
  @has_prefix = true
@@ -320,13 +320,13 @@ module S3Sync
320
320
  raise WrongUsage.new(nil, "You need to inform a file") if not file
321
321
 
322
322
  name = S3Sync.safe_join [key, File.basename(file)]
323
- s3.buckets[bucket].objects[name].write Pathname.new(file)
323
+ s3.bucket(bucket).object(name).upload_file(Pathname.new(file))
324
324
  end
325
325
  end
326
326
 
327
327
  class Get < BaseCmd
328
328
  def initialize
329
- super 'get', false, false
329
+ super 'get', takes_commands: false #, false
330
330
  @short_desc = "Retrieve an object and save to the specified file"
331
331
  @has_prefix = 'required'
332
332
  end
@@ -345,7 +345,7 @@ module S3Sync
345
345
  path = File.absolute_path file
346
346
  path = S3Sync.safe_join [path, File.basename(key)] if File.directory? path
347
347
  File.open(path, 'wb') do |f|
348
- s3.buckets[bucket].objects[key].read do |chunk| f.write(chunk) end
348
+ s3.bucket(bucket).object(key).get do |chunk| f.write(chunk) end
349
349
  end
350
350
  end
351
351
  end
@@ -359,7 +359,7 @@ module S3Sync
359
359
  attr_accessor :acl
360
360
 
361
361
  def initialize
362
- super 'sync', false, false
362
+ super 'sync', takes_commands: false #, false
363
363
 
364
364
  @short_desc = "Synchronize an S3 and a local folder"
365
365
  @s3 = nil
@@ -368,7 +368,7 @@ module S3Sync
368
368
  @dry_run = false
369
369
  @verbose = false
370
370
 
371
- self.options = CmdParse::OptionParserWrapper.new do |opt|
371
+ self.options do |opt|
372
372
  opt.on("-x EXPR", "--exclude=EXPR", "Skip copying files that matches this pattern. (Ruby REs)") {|v|
373
373
  @exclude = v
374
374
  }
@@ -425,22 +425,16 @@ END
425
425
  end
426
426
 
427
427
  def run conf
428
- cmd = CmdParse::CommandParser.new true
429
- cmd.program_name = File.basename $0
430
- cmd.program_version = S3Sync::VERSION
428
+ cmd = CmdParse::CommandParser.new handle_exceptions: true
429
+ cmd.main_options do |opt|
430
+ opt.program_name = File.basename $0
431
+ opt.version = S3Sync::VERSION
432
+ end
431
433
 
432
- cmd.options = CmdParse::OptionParserWrapper.new do |opt|
434
+ cmd.global_options do |opt|
433
435
  opt.separator "Global options:"
434
436
  end
435
437
 
436
- cmd.main_command.short_desc = 'Tool belt for managing your S3 buckets'
437
- cmd.main_command.description =<<END.strip
438
- S3Sync provides a list of commands that will allow you to manage your content
439
- stored in S3 buckets. To learn about each feature, please use the `help`
440
- command:
441
-
442
- $ #{File.basename $0} help sync"
443
- END
444
438
  # Commands used more often
445
439
  cmd.add_command List.new
446
440
  cmd.add_command Delete.new
@@ -43,7 +43,7 @@ module S3Sync
43
43
 
44
44
  class Config < Hash
45
45
 
46
- REQUIRED_VARS = [:AWS_ACCESS_KEY_ID, :AWS_SECRET_ACCESS_KEY]
46
+ REQUIRED_VARS = [:AWS_ACCESS_KEY_ID, :AWS_SECRET_ACCESS_KEY, :AWS_REGION]
47
47
 
48
48
  CONFIG_PATHS = ["#{ENV['S3SYNC_PATH']}", "#{ENV['HOME']}/.s3sync.yml", "/etc/s3sync.yml"]
49
49
 
@@ -172,6 +172,7 @@ module S3Sync
172
172
  end
173
173
 
174
174
  def initialize args, source, destination
175
+ # @args.s3 should be of type Aws::S3::Resource
175
176
  @args = args
176
177
  @source = source
177
178
  @destination = destination
@@ -302,11 +303,14 @@ module S3Sync
302
303
  dir += '/' if not dir.empty? and not dir.end_with?('/')
303
304
 
304
305
  nodes = {}
305
- @args.s3.buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect do |obj|
306
+
307
+ @args.s3.bucket(location.bucket).objects(prefix: (dir || "")).to_a.collect do |obj|
306
308
  # etag comes back with quotes (obj.etag.inspcet # => "\"abc...def\""
307
309
  small_comparator = lambda { obj.etag[/[a-z0-9]+/] }
308
310
  node = Node.new(location.path, obj.key, obj.content_length, small_comparator)
309
- nodes[node.path] = node
311
+ # The key is relative path from dir.
312
+ key = node.path[(dir || "").length,node.path.length - 1]
313
+ nodes[key] = node
310
314
  end
311
315
  return nodes
312
316
  end
@@ -326,12 +330,12 @@ module S3Sync
326
330
  def upload_files remote, list
327
331
  list.each do |e|
328
332
  if @args.verbose
329
- puts " + #{e.full} => #{remote}#{e.path}"
333
+ puts " + #{e.full} => #{remote.path}#{e.path}"
330
334
  end
331
335
 
332
336
  unless @args.dry_run
333
337
  remote_path = "#{remote.path}#{e.path}"
334
- @args.s3.buckets[remote.bucket].objects[remote_path].write Pathname.new(e.full), :acl => @args.acl
338
+ @args.s3.bucket(remote.bucket).object(remote_path).upload_file(Pathname.new(e.full), acl: @args.acl)
335
339
  end
336
340
  end
337
341
  end
@@ -344,7 +348,9 @@ module S3Sync
344
348
  end
345
349
 
346
350
  unless @args.dry_run
347
- @args.s3.buckets[remote.bucket].objects.delete_if { |obj| list.map(&:path).include? obj.key }
351
+ list.map(&:path).each do |object_key|
352
+ @args.s3.bucket(remote.bucket).object(object_key).delete
353
+ end
348
354
  end
349
355
  end
350
356
 
@@ -354,11 +360,11 @@ module S3Sync
354
360
  path = File.join destination.path, e.path
355
361
 
356
362
  if @args.verbose
357
- puts " + #{source}#{e.path} => #{path}"
363
+ puts " + #{source.bucket}:#{e.path} => #{path}"
358
364
  end
359
365
 
360
366
  unless @args.dry_run
361
- obj = @args.s3.buckets[source.bucket].objects[e.path]
367
+ obj = @args.s3.bucket(source.bucket).object(e.path)
362
368
 
363
369
  # Making sure this new file will have a safe shelter
364
370
  FileUtils.mkdir_p File.dirname(path)
@@ -371,14 +377,10 @@ module S3Sync
371
377
  else
372
378
  # Downloading and saving the files
373
379
  File.open(path, 'wb') do |file|
374
- begin
375
- obj.read do |chunk|
376
- file.write chunk
377
- end
378
- rescue AWS::Core::Http::NetHttpHandler::TruncatedBodyError => e
379
- $stderr.puts "WARNING: (retryable) TruncatedBodyError occured, retrying in a second #{file.basename}"
380
- sleep 1
381
- retry
380
+ # By default Aws::S3::Client will retry 3 times if there is a network error.
381
+ # To increase this number or disable it, set :retry_limit when instantiating the S3 client.
382
+ obj.get do |chunk|
383
+ file.write chunk
382
384
  end
383
385
  end
384
386
  end
@@ -23,5 +23,5 @@
23
23
  # THE SOFTWARE.
24
24
 
25
25
  module S3Sync
26
- VERSION = "2.0.2"
26
+ VERSION = "2.0.4"
27
27
  end
metadata CHANGED
@@ -1,43 +1,43 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3sync
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.2
4
+ version: 2.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Lincoln de Sousa
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-03-21 00:00:00.000000000 Z
11
+ date: 2016-08-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ">="
17
+ - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '0'
19
+ version: 2.3.0
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ">="
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '0'
26
+ version: 2.3.0
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: cmdparse
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - ">="
31
+ - - "~>"
32
32
  - !ruby/object:Gem::Version
33
- version: '0'
33
+ version: '3.0'
34
34
  type: :runtime
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - ">="
38
+ - - "~>"
39
39
  - !ruby/object:Gem::Version
40
- version: '0'
40
+ version: '3.0'
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: simplecov
43
43
  requirement: !ruby/object:Gem::Requirement
@@ -134,9 +134,9 @@ require_paths:
134
134
  - lib
135
135
  required_ruby_version: !ruby/object:Gem::Requirement
136
136
  requirements:
137
- - - ">="
137
+ - - "~>"
138
138
  - !ruby/object:Gem::Version
139
- version: '0'
139
+ version: '2'
140
140
  required_rubygems_version: !ruby/object:Gem::Requirement
141
141
  requirements:
142
142
  - - ">="
@@ -144,7 +144,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
144
144
  version: '0'
145
145
  requirements: []
146
146
  rubyforge_project:
147
- rubygems_version: 2.2.0
147
+ rubygems_version: 2.5.1
148
148
  signing_key:
149
149
  specification_version: 4
150
150
  summary: s3sync is a library that aggregates a good range of features for managing