s3sync 1.2.5 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/bin/s3sync +67 -726
- data/lib/s3sync.rb +2 -0
- data/lib/s3sync/cli.rb +475 -0
- data/lib/s3sync/config.rb +98 -0
- data/lib/s3sync/exceptions.rb +55 -0
- data/lib/s3sync/sync.rb +371 -0
- data/lib/s3sync/util.rb +29 -0
- data/lib/s3sync/version.rb +27 -0
- metadata +177 -54
- data/CHANGELOG +0 -175
- data/README +0 -401
- data/README_s3cmd +0 -172
- data/Rakefile +0 -35
- data/bin/s3cmd +0 -245
- data/lib/HTTPStreaming.rb +0 -103
- data/lib/S3.rb +0 -707
- data/lib/S3_s3sync_mod.rb +0 -143
- data/lib/S3encoder.rb +0 -50
- data/lib/s3config.rb +0 -27
- data/lib/s3try.rb +0 -161
- data/lib/thread_generator.rb +0 -383
- data/lib/version.rb +0 -9
- data/setup.rb +0 -1585
@@ -0,0 +1,98 @@
|
|
1
|
+
# s3sync - Tool belt for managing your S3 buckets
|
2
|
+
#
|
3
|
+
# The MIT License (MIT)
|
4
|
+
#
|
5
|
+
# Copyright (c) 2013 Lincoln de Sousa <lincoln@clarete.li>
|
6
|
+
#
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
12
|
+
# furnished to do so, subject to the following conditions:
|
13
|
+
#
|
14
|
+
# The above copyright notice and this permission notice shall be included in
|
15
|
+
# all copies or substantial portions of the Software.
|
16
|
+
#
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
23
|
+
# THE SOFTWARE.
|
24
|
+
|
25
|
+
# Part of this software was inspired by the original s3sync, so here's their
|
26
|
+
# copyright notice:
|
27
|
+
|
28
|
+
# This software code is made available "AS IS" without warranties of any
|
29
|
+
# kind. You may copy, display, modify and redistribute the software
|
30
|
+
# code either by itself or as incorporated into your code; provided that
|
31
|
+
# you do not remove any proprietary notices. Your use of this software
|
32
|
+
# code is at your own risk and you waive any claim against the author
|
33
|
+
# with respect to your use of this software code.
|
34
|
+
# (c) 2007 alastair brunton
|
35
|
+
#
|
36
|
+
# modified to search out the yaml in several places, thanks wkharold.
|
37
|
+
|
38
|
+
require 'yaml'
|
39
|
+
require 's3sync/exceptions'
|
40
|
+
|
41
|
+
|
42
|
+
module S3Sync
|
43
|
+
|
44
|
+
class Config < Hash
|
45
|
+
|
46
|
+
REQUIRED_VARS = [:AWS_ACCESS_KEY_ID, :AWS_SECRET_ACCESS_KEY]
|
47
|
+
|
48
|
+
CONFIG_PATHS = ["#{ENV['S3SYNC_PATH']}", "#{ENV['HOME']}/.s3sync.yml", "/etc/s3sync.yml"]
|
49
|
+
|
50
|
+
def read_from_file
|
51
|
+
paths_checked = []
|
52
|
+
|
53
|
+
CONFIG_PATHS.each do |path|
|
54
|
+
|
55
|
+
# Filtering some garbage
|
56
|
+
next if path.nil? or path.strip.empty?
|
57
|
+
|
58
|
+
# Feeding the user feedback in case of failure
|
59
|
+
paths_checked << path
|
60
|
+
|
61
|
+
# Time for the dirty work, let's parse the config file and feed our
|
62
|
+
# internal hash
|
63
|
+
if File.exists? path
|
64
|
+
config = YAML.load_file path
|
65
|
+
config.each_pair do |key, value|
|
66
|
+
self[key.upcase.to_sym] = value
|
67
|
+
end
|
68
|
+
return
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
return paths_checked
|
73
|
+
end
|
74
|
+
|
75
|
+
def read_from_env
|
76
|
+
REQUIRED_VARS.each do |v|
|
77
|
+
self[v] = ENV[v.to_s] unless ENV[v.to_s].nil?
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def read
|
82
|
+
# Reading from file and then trying from env
|
83
|
+
paths_checked = read_from_file
|
84
|
+
read_from_env
|
85
|
+
|
86
|
+
# Checking which variables we have
|
87
|
+
not_found = []
|
88
|
+
|
89
|
+
REQUIRED_VARS.each {|v|
|
90
|
+
not_found << v if self[v].nil?
|
91
|
+
}
|
92
|
+
|
93
|
+
# Cleaning possibly empty env var from CONFIG_PATH
|
94
|
+
paths = (paths_checked || CONFIG_PATHS).select {|e| !e.empty?}
|
95
|
+
raise NoConfigFound.new(not_found, paths) if not_found.count > 0
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# s3sync - Tool belt for managing your S3 buckets
|
2
|
+
#
|
3
|
+
# The MIT License (MIT)
|
4
|
+
#
|
5
|
+
# Copyright (c) 2013 Lincoln de Sousa <lincoln@clarete.li>
|
6
|
+
#
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
12
|
+
# furnished to do so, subject to the following conditions:
|
13
|
+
#
|
14
|
+
# The above copyright notice and this permission notice shall be included in
|
15
|
+
# all copies or substantial portions of the Software.
|
16
|
+
#
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
23
|
+
# THE SOFTWARE.
|
24
|
+
|
25
|
+
module S3Sync
|
26
|
+
|
27
|
+
class SyncException < StandardError
|
28
|
+
end
|
29
|
+
|
30
|
+
class NoConfigFound < SyncException
|
31
|
+
|
32
|
+
attr_accessor :missing_vars
|
33
|
+
attr_accessor :paths_checked
|
34
|
+
|
35
|
+
def initialize missing_vars, paths_checked
|
36
|
+
@missing_vars = missing_vars
|
37
|
+
@paths_checked = paths_checked
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
class WrongUsage < SyncException
|
42
|
+
|
43
|
+
attr_accessor :error_code
|
44
|
+
attr_accessor :msg
|
45
|
+
|
46
|
+
def initialize(error_code, msg)
|
47
|
+
@error_code = error_code || 1
|
48
|
+
@msg = msg
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
class FailureFeedback < SyncException
|
53
|
+
end
|
54
|
+
|
55
|
+
end
|
data/lib/s3sync/sync.rb
ADDED
@@ -0,0 +1,371 @@
|
|
1
|
+
# s3sync - Tool belt for managing your S3 buckets
|
2
|
+
#
|
3
|
+
# The MIT License (MIT)
|
4
|
+
#
|
5
|
+
# Copyright (c) 2013 Lincoln de Sousa <lincoln@clarete.li>
|
6
|
+
#
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
12
|
+
# furnished to do so, subject to the following conditions:
|
13
|
+
#
|
14
|
+
# The above copyright notice and this permission notice shall be included in
|
15
|
+
# all copies or substantial portions of the Software.
|
16
|
+
#
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
23
|
+
# THE SOFTWARE.
|
24
|
+
|
25
|
+
# Part of this software was inspired by the original s3sync, so here's their
|
26
|
+
# copyright notice:
|
27
|
+
|
28
|
+
# (c) 2007 s3sync.net
|
29
|
+
#
|
30
|
+
# This software code is made available "AS IS" without warranties of any
|
31
|
+
# kind. You may copy, display, modify and redistribute the software
|
32
|
+
# code either by itself or as incorporated into your code; provided that
|
33
|
+
# you do not remove any proprietary notices. Your use of this software
|
34
|
+
# code is at your own risk and you waive any claim against the author
|
35
|
+
# with respect to your use of this software code.
|
36
|
+
|
37
|
+
require 'find'
|
38
|
+
require 'fileutils'
|
39
|
+
require 's3sync/util'
|
40
|
+
|
41
|
+
module S3Sync
|
42
|
+
|
43
|
+
class Location
|
44
|
+
attr_accessor :path
|
45
|
+
attr_accessor :bucket
|
46
|
+
|
47
|
+
def initialize path, bucket=nil
|
48
|
+
raise RuntimeError if path.nil?
|
49
|
+
@path = path
|
50
|
+
@bucket = bucket || nil
|
51
|
+
end
|
52
|
+
|
53
|
+
def to_s
|
54
|
+
out = []
|
55
|
+
out << @bucket unless @bucket.nil?
|
56
|
+
out << @path
|
57
|
+
out.join ':'
|
58
|
+
end
|
59
|
+
|
60
|
+
def local?
|
61
|
+
@bucket.nil?
|
62
|
+
end
|
63
|
+
|
64
|
+
def == other
|
65
|
+
@path == other.path and @bucket == other.bucket
|
66
|
+
end
|
67
|
+
|
68
|
+
alias eql? ==
|
69
|
+
end
|
70
|
+
|
71
|
+
class Node
|
72
|
+
include Comparable
|
73
|
+
|
74
|
+
attr_accessor :base
|
75
|
+
attr_accessor :path
|
76
|
+
attr_accessor :size
|
77
|
+
|
78
|
+
def initialize base, path, size
|
79
|
+
@base = base
|
80
|
+
@path = path
|
81
|
+
@size = size
|
82
|
+
end
|
83
|
+
|
84
|
+
def full
|
85
|
+
S3Sync.safe_join [@base, @path]
|
86
|
+
end
|
87
|
+
|
88
|
+
def == other
|
89
|
+
full == other.full and @size == other.size
|
90
|
+
end
|
91
|
+
|
92
|
+
def <=> other
|
93
|
+
if self.size < other.size
|
94
|
+
-1
|
95
|
+
elsif self.size > other.size
|
96
|
+
1
|
97
|
+
else
|
98
|
+
0
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
alias eql? ==
|
103
|
+
end
|
104
|
+
|
105
|
+
class LocalDirectory
|
106
|
+
attr_accessor :source
|
107
|
+
|
108
|
+
def initialize source
|
109
|
+
@source = source
|
110
|
+
end
|
111
|
+
|
112
|
+
def list_files
|
113
|
+
nodes = {}
|
114
|
+
Find.find(@source) do |file|
|
115
|
+
begin
|
116
|
+
st = File.stat file # Might fail
|
117
|
+
raise if not st.readable? # We're not interested in things we can't read
|
118
|
+
rescue
|
119
|
+
$stderr.puts "WARNING: Skipping unreadable file #{file}"
|
120
|
+
Find.prune
|
121
|
+
end
|
122
|
+
|
123
|
+
# We don't support following symlinks for now, we don't need to follow
|
124
|
+
# folders and I don't think we care about any other thing, right?
|
125
|
+
next unless st.file?
|
126
|
+
|
127
|
+
# We only need the relative path here
|
128
|
+
file_name = file.gsub(/^#{@source}\/?/, '').squeeze('/')
|
129
|
+
node = Node.new(@source.squeeze('/'), file_name, st.size)
|
130
|
+
nodes[node.path] = node
|
131
|
+
end
|
132
|
+
|
133
|
+
return nodes
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
class SyncCommand
|
138
|
+
|
139
|
+
def self.cmp hash1, hash2
|
140
|
+
same, to_add_to_2 = [], []
|
141
|
+
|
142
|
+
hash1.each do |key, value|
|
143
|
+
value2 = hash2.delete key
|
144
|
+
if value2.nil?
|
145
|
+
to_add_to_2 << value
|
146
|
+
elsif value2.size == value.size
|
147
|
+
same << value
|
148
|
+
else
|
149
|
+
to_add_to_2 << value
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
to_remove_from_2 = hash2.values
|
154
|
+
|
155
|
+
[same, to_add_to_2, to_remove_from_2]
|
156
|
+
end
|
157
|
+
|
158
|
+
def initialize args, source, destination
|
159
|
+
@args = args
|
160
|
+
@source = source
|
161
|
+
@destination = destination
|
162
|
+
end
|
163
|
+
|
164
|
+
def run
|
165
|
+
# Reading the source and destination using our helper method
|
166
|
+
if (source, destination, bucket = self.class.parse_params [@source, @destination]).nil?
|
167
|
+
raise WrongUsage.new(nil, 'Need a source and a destination')
|
168
|
+
end
|
169
|
+
|
170
|
+
# Getting the trees
|
171
|
+
source_tree, destination_tree = read_trees source, destination
|
172
|
+
|
173
|
+
# Getting the list of resources to be exchanged between the two peers
|
174
|
+
_, to_add, to_remove = self.class.cmp source_tree, destination_tree
|
175
|
+
|
176
|
+
# Removing the items matching the exclude pattern if requested
|
177
|
+
to_add.select! { |e|
|
178
|
+
begin
|
179
|
+
(e.path =~ /#{@args.exclude}/).nil?
|
180
|
+
rescue RegexpError => exc
|
181
|
+
raise WrongUsage.new nil, exc.message
|
182
|
+
end
|
183
|
+
} if @args.exclude
|
184
|
+
|
185
|
+
# Calling the methods that perform the actual IO
|
186
|
+
if source.local?
|
187
|
+
upload_files destination, to_add
|
188
|
+
remove_files destination, to_remove unless @args.keep
|
189
|
+
else
|
190
|
+
download_files destination, source, to_add
|
191
|
+
remove_local_files destination, source, to_remove unless @args.keep
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
def self.parse_params args
|
196
|
+
# Reading the arbitrary parameters from the command line and getting
|
197
|
+
# modifiable copies to parse
|
198
|
+
source, destination = args; return nil if source.nil? or destination.nil?
|
199
|
+
|
200
|
+
# Sync from one s3 to another is currently not supported
|
201
|
+
if remote_prefix? source and remote_prefix? destination
|
202
|
+
raise WrongUsage.new(nil, 'Both arguments can\'t be on S3')
|
203
|
+
end
|
204
|
+
|
205
|
+
# C'mon, there's rsync out there
|
206
|
+
if !remote_prefix? source and !remote_prefix? destination
|
207
|
+
raise WrongUsage.new(nil, 'One argument must be on S3')
|
208
|
+
end
|
209
|
+
|
210
|
+
source, destination = process_destination source, destination
|
211
|
+
return [Location.new(*source), Location.new(*destination)]
|
212
|
+
end
|
213
|
+
|
214
|
+
def self.remote_prefix?(prefix)
|
215
|
+
# allow for dos-like things e.g. C:\ to be treated as local even with
|
216
|
+
# colon.
|
217
|
+
prefix.include? ':' and not prefix.match '^[A-Za-z]:[\\\\/]'
|
218
|
+
end
|
219
|
+
|
220
|
+
def self.process_file_destination source, destination, file=""
|
221
|
+
if not file.empty?
|
222
|
+
sub = (remote_prefix? source) ? source.split(":")[1] : source
|
223
|
+
file = file.gsub(/^#{sub}/, '')
|
224
|
+
end
|
225
|
+
|
226
|
+
# no slash on end of source means we need to append the last src dir to
|
227
|
+
# dst prefix testing for empty isn't good enough here.. needs to be
|
228
|
+
# "empty apart from potentially having 'bucket:'"
|
229
|
+
if source =~ %r{/$}
|
230
|
+
if remote_prefix? destination and destination.end_with? ':'
|
231
|
+
S3Sync.safe_join [destination, file]
|
232
|
+
else
|
233
|
+
File.join [destination, file]
|
234
|
+
end
|
235
|
+
else
|
236
|
+
if remote_prefix? source
|
237
|
+
_, name = source.split ":"
|
238
|
+
File.join [destination, File.basename(name || ""), file]
|
239
|
+
else
|
240
|
+
source = /^\/?(.*)/.match(source)[1]
|
241
|
+
|
242
|
+
# Corner case: the root of the remote path is empty, we don't want to
|
243
|
+
# add an unnecessary slash here.
|
244
|
+
if destination.end_with? ':'
|
245
|
+
File.join [destination + source, file]
|
246
|
+
else
|
247
|
+
File.join [destination, source, file]
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
def self.process_destination source, destination
|
254
|
+
source, destination = source.dup, destination.dup
|
255
|
+
|
256
|
+
# don't repeat slashes
|
257
|
+
source.squeeze! '/'
|
258
|
+
destination.squeeze! '/'
|
259
|
+
|
260
|
+
# Making sure that local paths won't break our stuff later
|
261
|
+
source.gsub!(/^\.\//, '')
|
262
|
+
destination.gsub!(/^\.\//, '')
|
263
|
+
|
264
|
+
# Parsing the final destination
|
265
|
+
destination = process_file_destination source, destination, ""
|
266
|
+
|
267
|
+
# here's where we find out what direction we're going
|
268
|
+
source_is_s3 = remote_prefix? source
|
269
|
+
|
270
|
+
# canonicalize the S3 stuff
|
271
|
+
remote_prefix = source_is_s3 ? source : destination
|
272
|
+
bucket, remote_prefix = remote_prefix.split ":"
|
273
|
+
|
274
|
+
remote_prefix ||= ""
|
275
|
+
|
276
|
+
# Just making sure we preserve the direction
|
277
|
+
if source_is_s3
|
278
|
+
[[remote_prefix, bucket], destination]
|
279
|
+
else
|
280
|
+
[source, [remote_prefix, bucket]]
|
281
|
+
end
|
282
|
+
end
|
283
|
+
|
284
|
+
def read_tree_remote location
|
285
|
+
dir = location.path
|
286
|
+
dir += '/' if not dir.empty? and not dir.end_with?('/')
|
287
|
+
|
288
|
+
nodes = {}
|
289
|
+
@args.s3.buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect do |obj|
|
290
|
+
node = Node.new(location.path, obj.key, obj.content_length)
|
291
|
+
nodes[node.path] = node
|
292
|
+
end
|
293
|
+
return nodes
|
294
|
+
end
|
295
|
+
|
296
|
+
def read_trees source, destination
|
297
|
+
if source.local?
|
298
|
+
source_tree = LocalDirectory.new(source.path).list_files
|
299
|
+
destination_tree = read_tree_remote destination
|
300
|
+
else
|
301
|
+
source_tree = read_tree_remote source
|
302
|
+
destination_tree = LocalDirectory.new(destination.path).list_files
|
303
|
+
end
|
304
|
+
|
305
|
+
[source_tree, destination_tree]
|
306
|
+
end
|
307
|
+
|
308
|
+
def upload_files remote, list
|
309
|
+
list.each do |e|
|
310
|
+
if @args.verbose
|
311
|
+
puts " + #{e.full} => #{remote}#{e.path}"
|
312
|
+
end
|
313
|
+
|
314
|
+
unless @args.dry_run
|
315
|
+
remote_path = "#{remote.path}#{e.path}"
|
316
|
+
@args.s3.buckets[remote.bucket].objects[remote_path].write Pathname.new(e.full), :acl => @args.acl
|
317
|
+
end
|
318
|
+
end
|
319
|
+
end
|
320
|
+
|
321
|
+
def remove_files remote, list
|
322
|
+
if @args.verbose
|
323
|
+
list.each {|e|
|
324
|
+
puts " - #{remote}#{e.path}"
|
325
|
+
}
|
326
|
+
end
|
327
|
+
|
328
|
+
unless @args.dry_run
|
329
|
+
@args.s3.buckets[remote.bucket].objects.delete_if { |obj| list.map(&:path).include? obj.key }
|
330
|
+
end
|
331
|
+
end
|
332
|
+
|
333
|
+
def download_files destination, source, list
|
334
|
+
list.each {|e|
|
335
|
+
path = File.join destination.path, e.path
|
336
|
+
|
337
|
+
if @args.verbose
|
338
|
+
puts " + #{source}#{e.path} => #{path}"
|
339
|
+
end
|
340
|
+
|
341
|
+
unless @args.dry_run
|
342
|
+
obj = @args.s3.buckets[source.bucket].objects[e.path]
|
343
|
+
|
344
|
+
# Making sure this new file will have a safe shelter
|
345
|
+
FileUtils.mkdir_p File.dirname(path)
|
346
|
+
|
347
|
+
# Downloading and saving the files
|
348
|
+
File.open(path, 'wb') do |file|
|
349
|
+
obj.read do |chunk|
|
350
|
+
file.write chunk
|
351
|
+
end
|
352
|
+
end
|
353
|
+
end
|
354
|
+
}
|
355
|
+
end
|
356
|
+
|
357
|
+
def remove_local_files destination, source, list
|
358
|
+
list.each {|e|
|
359
|
+
path = File.join destination.path, e.path
|
360
|
+
|
361
|
+
if @args.verbose
|
362
|
+
puts " * #{e.path} => #{path}"
|
363
|
+
end
|
364
|
+
|
365
|
+
unless @args.dry_run
|
366
|
+
FileUtils.rm_rf path
|
367
|
+
end
|
368
|
+
}
|
369
|
+
end
|
370
|
+
end
|
371
|
+
end
|