cprobert-s3sync 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile ADDED
@@ -0,0 +1,21 @@
1
+ require 'rubygems'
2
+ require 'rake'
3
+
4
+ begin
5
+ require 'jeweler'
6
+ Jeweler::Tasks.new do |gem|
7
+ gem.name = "cprobert-s3sync"
8
+ gem.summary = %Q{Fork of s3sync to be compatible with ruby 1.9.}
9
+ gem.email = "cprobert@gmail.com"
10
+ gem.homepage = "http://s3sync.net"
11
+ gem.authors = ["cprobert"]
12
+ gem.files.include FileList.new('lib/**/*.rb', "bin/*",
13
+ "History.txt", "PostInstall.txt", "VERSION", "README.rdoc", "Rakefile")
14
+
15
+ gem.executables = ["s3sync", "s3cmd"]
16
+ # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
17
+ end
18
+
19
+ rescue LoadError
20
+ puts "Jeweler (or a dependency) not available. Install it with: sudo gem install jeweler"
21
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.3.6
data/bin/s3cmd ADDED
@@ -0,0 +1,338 @@
1
+ #!/usr/bin/env ruby
2
+ # This software code is made available "AS IS" without warranties of any
3
+ # kind. You may copy, display, modify and redistribute the software
4
+ # code either by itself or as incorporated into your code; provided that
5
+ # you do not remove any proprietary notices. Your use of this software
6
+ # code is at your own risk and you waive any claim against the author
7
+ # with respect to your use of this software code.
8
+ # (c) 2007 s3sync.net
9
+ #
10
+
11
+ module S3sync
12
+
13
+ # always look "here" for include files (thanks aktxyz)
14
+ # $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/s3sync/")
15
+ $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/")
16
+
17
+ require 's3sync'
18
+ require 'getoptlong'
19
+
20
+
21
+ def S3sync.s3cmdMain
22
+ # ---------- OPTIONS PROCESSING ---------- #
23
+
24
+ $S3syncOptions = Hash.new
25
+ optionsParser = GetoptLong.new(
26
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
27
+ [ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
28
+ [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
29
+ [ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
30
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
31
+ [ '--progress', GetoptLong::NO_ARGUMENT ],
32
+ [ '--expires-in', GetoptLong::REQUIRED_ARGUMENT ]
33
+ )
34
+
35
+ def S3sync.s3cmdUsage(message = nil)
36
+ $stderr.puts message if message
37
+ name = $0.split('/').last
38
+ $stderr.puts <<"ENDUSAGE"
39
+ #{name} [options] <command> [arg(s)]\t\tversion #{S3sync::VERSION}
40
+ --help -h --verbose -v --dryrun -n
41
+ --ssl -s --debug -d --progress
42
+ --expires-in=( <# of seconds> | [#d|#h|#m|#s] )
43
+
44
+ Commands:
45
+ #{name} listbuckets [headers]
46
+ #{name} createbucket <bucket> [constraint (i.e. EU)]
47
+ #{name} deletebucket <bucket> [headers]
48
+ #{name} list <bucket>[:prefix] [max/page] [delimiter] [headers]
49
+ #{name} location <bucket> [headers]
50
+ #{name} delete <bucket>:key [headers]
51
+ #{name} deleteall <bucket>[:prefix] [headers]
52
+ #{name} get|put <bucket>:key <file> [headers]
53
+ #{name} copy <bucket>:key <bucket>:key [headers]
54
+ #{name} copyall <bucket>:key <bucket>:key [headers]
55
+ #{name} headers <bucket>:key [headers]
56
+ ENDUSAGE
57
+ exit
58
+ end #usage
59
+
60
+ begin
61
+ optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
62
+ rescue StandardError
63
+ s3cmdUsage # the parser already printed an error message
64
+ end
65
+ s3cmdUsage if $S3syncOptions['--help']
66
+ $S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or
67
+ $S3syncOptions['--debug'] or
68
+ $S3syncOptions['--progress']
69
+
70
+ # change from "" to true to appease s3 port chooser
71
+ $S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl']
72
+
73
+ if $S3syncOptions['--expires-in'] =~ /d|h|m|s/
74
+ e = $S3syncOptions['--expires-in']
75
+ days = (e =~ /(\d+)d/)? (/(\d+)d/.match(e))[1].to_i : 0
76
+ hours = (e =~ /(\d+)h/)? (/(\d+)h/.match(e))[1].to_i : 0
77
+ minutes = (e =~ /(\d+)m/)? (/(\d+)m/.match(e))[1].to_i : 0
78
+ seconds = (e =~ /(\d+)s/)? (/(\d+)s/.match(e))[1].to_i : 0
79
+ $S3syncOptions['--expires-in'] = seconds + 60 * ( minutes + 60 * ( hours + 24 * ( days ) ) )
80
+ end
81
+
82
+ # ---------- CONNECT ---------- #
83
+ S3sync::s3trySetup
84
+
85
+
86
+ # ---------- COMMAND PROCESSING ---------- #
87
+ command, path, file = ARGV
88
+
89
+ s3cmdUsage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
90
+ s3cmdUsage("Need a command (etc)") if not command
91
+
92
+ path = '' unless path
93
+ path = path.dup # modifiable
94
+ path += ':' unless path.match(':')
95
+ bucket = (/^(.*?):/.match(path))[1]
96
+ path.replace((/:(.*)$/.match(path))[1])
97
+
98
+ case command
99
+
100
+ when "delete"
101
+ s3cmdUsage("Need a bucket") if bucket == ''
102
+ s3cmdUsage("Need a key") if path == ''
103
+ headers = hashPairs(ARGV[2...ARGV.length])
104
+ $stderr.puts "delete #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
105
+ S3try(:delete, bucket, path) unless $S3syncOptions['--dryrun']
106
+
107
+ when "deleteall"
108
+ s3cmdUsage("Need a bucket") if bucket == ''
109
+ headers = hashPairs(ARGV[2...ARGV.length])
110
+ $stderr.puts "delete ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
111
+ more = true
112
+ marker = nil
113
+ while more do
114
+ res = s3cmdList(bucket, path, nil, nil, marker)
115
+ res.entries.each do |item|
116
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
117
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
118
+ $stderr.puts "delete #{bucket}:#{key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
119
+ S3try(:delete, bucket, key) unless $S3syncOptions['--dryrun']
120
+ end
121
+
122
+ more = res.properties.is_truncated
123
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
124
+ # get this into local charset; when we pass it to s3 that is what's expected
125
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
126
+ end
127
+
128
+ when "list"
129
+ s3cmdUsage("Need a bucket") if bucket == ''
130
+ max, delim = ARGV[2..3]
131
+ headers = hashPairs(ARGV[4...ARGV.length])
132
+ $stderr.puts "list #{bucket}:#{path} #{max} #{delim} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
133
+ puts "--------------------"
134
+
135
+ more = true
136
+ marker = nil
137
+ while more do
138
+ res = s3cmdList(bucket, path, max, delim, marker, headers)
139
+ if delim
140
+ res.common_prefix_entries.each do |item|
141
+ puts "dir: " + Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
142
+ end
143
+ puts "--------------------"
144
+ end
145
+ res.entries.each do |item|
146
+ puts Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
147
+ end
148
+ if res.properties.is_truncated
149
+ printf "More? Y/n: "
150
+ more = (STDIN.gets.match('^[Yy]?$'))
151
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
152
+ # get this into local charset; when we pass it to s3 that is what's expected
153
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
154
+ else
155
+ more = false
156
+ end
157
+ end # more
158
+
159
+ when "listbuckets"
160
+ headers = hashPairs(ARGV[1...ARGV.length])
161
+ $stderr.puts "list all buckets #{headers.inspect if headers}" if $S3syncOptions['--verbose']
162
+ if $S3syncOptions['--expires-in']
163
+ $stdout.puts S3url(:list_all_my_buckets, headers)
164
+ else
165
+ res = S3try(:list_all_my_buckets, headers)
166
+ res.entries.each do |item|
167
+ puts item.name
168
+ end
169
+ end
170
+
171
+ when "createbucket"
172
+ s3cmdUsage("Need a bucket") if bucket == ''
173
+ lc = ''
174
+ if(ARGV.length > 2)
175
+ lc = '<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01"><LocationConstraint>' + ARGV[2] + '</LocationConstraint></CreateBucketConfiguration>'
176
+ end
177
+ $stderr.puts "create bucket #{bucket} #{lc}" if $S3syncOptions['--verbose']
178
+ S3try(:create_bucket, bucket, lc) unless $S3syncOptions['--dryrun']
179
+
180
+ when "deletebucket"
181
+ s3cmdUsage("Need a bucket") if bucket == ''
182
+ headers = hashPairs(ARGV[2...ARGV.length])
183
+ $stderr.puts "delete bucket #{bucket} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
184
+ S3try(:delete_bucket, bucket, headers) unless $S3syncOptions['--dryrun']
185
+
186
+ when "location"
187
+ s3cmdUsage("Need a bucket") if bucket == ''
188
+ headers = hashPairs(ARGV[2...ARGV.length])
189
+ query = Hash.new
190
+ query['location'] = 'location'
191
+ $stderr.puts "location request bucket #{bucket} #{query.inspect} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
192
+ S3try(:get_query_stream, bucket, '', query, headers, $stdout) unless $S3syncOptions['--dryrun']
193
+
194
+ when "get"
195
+ s3cmdUsage("Need a bucket") if bucket == ''
196
+ s3cmdUsage("Need a key") if path == ''
197
+ s3cmdUsage("Need a file") if file == ''
198
+ headers = hashPairs(ARGV[3...ARGV.length])
199
+ $stderr.puts "get from key #{bucket}:#{path} into #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
200
+ unless $S3syncOptions['--dryrun']
201
+ if $S3syncOptions['--expires-in']
202
+ $stdout.puts S3url(:get, bucket, path, headers)
203
+ else
204
+ outStream = File.open(file, 'wb')
205
+ outStream = ProgressStream.new(outStream) if $S3syncOptions['--progress']
206
+ S3try(:get_stream, bucket, path, headers, outStream)
207
+ outStream.close
208
+ end
209
+ end
210
+
211
+ when "put"
212
+ s3cmdUsage("Need a bucket") if bucket == ''
213
+ s3cmdUsage("Need a key") if path == ''
214
+ s3cmdUsage("Need a file") if file == ''
215
+ headers = hashPairs(ARGV[3...ARGV.length])
216
+ stream = File.open(file, 'rb')
217
+ stream = ProgressStream.new(stream, File.stat(file).size) if $S3syncOptions['--progress']
218
+ s3o = S3::S3Object.new(stream, {}) # support meta later?
219
+ headers['Content-Length'] = FileTest.size(file).to_s
220
+ $stderr.puts "put to key #{bucket}:#{path} from #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
221
+ S3try(:put, bucket, path, s3o, headers) unless $S3syncOptions['--dryrun']
222
+ stream.close
223
+
224
+
225
+ when "copy"
226
+ s3cmdUsage("Need a bucket") if bucket == ''
227
+ s3cmdUsage("Need a key") if path == ''
228
+ s3cmdUsage("Need a file") if file == ''
229
+
230
+ file = file.dup # modifiable
231
+ file += ':' unless file.match(':')
232
+ dest_bucket = (/^(.*?):/.match(file))[1]
233
+ file.replace((/:(.*)$/.match(file))[1])
234
+
235
+ headers = hashPairs(ARGV[3...ARGV.length])
236
+ if headers.length > 0
237
+ headers["x-amz-metadata-directive"] = "REPLACE"
238
+ else
239
+ headers["x-amz-metadata-directive"] = "COPY"
240
+ end
241
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
242
+
243
+
244
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
245
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
246
+
247
+ when "copyall"
248
+ s3cmdUsage("Need a bucket") if bucket == ''
249
+ s3cmdUsage("Need a path") if path == ''
250
+
251
+ file = file.dup # modifiable
252
+ file += ':' unless file.match(':')
253
+ dest_bucket = (/^(.*?):/.match(file))[1]
254
+ dest_key = file.replace((/:(.*)$/.match(file))[1])
255
+
256
+ src_path = path
257
+
258
+ headers = hashPairs(ARGV[2...ARGV.length])
259
+
260
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}:#{dest_key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
261
+ more = true
262
+ marker = nil
263
+ while more do
264
+ res = s3cmdList(bucket, path, nil, nil, marker)
265
+ res.entries.each do |item|
266
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
267
+ path = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
268
+
269
+ file = path.gsub(src_path, dest_key)
270
+
271
+ headers = hashPairs(ARGV[3...ARGV.length])
272
+ if headers.length > 0
273
+ headers["x-amz-metadata-directive"] = "REPLACE"
274
+ else
275
+ headers["x-amz-metadata-directive"] = "COPY"
276
+ end
277
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
278
+
279
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
280
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
281
+
282
+ end
283
+
284
+ more = res.properties.is_truncated
285
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
286
+ # get this into local charset; when we pass it to s3 that is what's expected
287
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
288
+ end
289
+
290
+ when "headers"
291
+ s3cmdUsage("Need a bucket") if bucket == ''
292
+ s3cmdUsage("Need a path") if path == ''
293
+
294
+ headers = hashPairs(ARGV[2...ARGV.length])
295
+
296
+ $stderr.puts "apply headers to ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
297
+ more = true
298
+ marker = nil
299
+ while more do
300
+ res = s3cmdList(bucket, path, nil, nil, marker)
301
+ res.entries.each do |item|
302
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
303
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
304
+
305
+ tmpHeaders = headers.merge({
306
+ "x-amz-copy-source" => "/#{bucket}/#{URI.escape(key)}",
307
+ "x-amz-metadata-directive" => "REPLACE"
308
+ })
309
+
310
+ #
311
+ # grab the current content-type unless its been specified explicitly
312
+ #
313
+ if not tmpHeaders.key?("content-type")
314
+ currentHeaders = S3try(:head, bucket, key).object.metadata
315
+ tmpHeaders["content-type"] = currentHeaders["content-type"]
316
+ end
317
+
318
+ $stderr.puts "apply headers to #{bucket}:#{key} #{tmpHeaders.inspect}" if $S3syncOptions['--verbose']
319
+ S3try(:put, bucket, key, nil, tmpHeaders) unless $S3syncOptions['--dryrun']
320
+ end
321
+
322
+ more = res.properties.is_truncated
323
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
324
+ # get this into local charset; when we pass it to s3 that is what's expected
325
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
326
+ end
327
+
328
+
329
+ else
330
+ s3cmdUsage
331
+ end
332
+
333
+ end #main
334
+
335
+ end #module
336
+
337
+
338
+ S3sync::s3cmdMain #go!
data/bin/s3sync ADDED
@@ -0,0 +1,739 @@
1
+ #!/usr/bin/env ruby
2
+ # This software code is made available "AS IS" without warranties of any
3
+ # kind. You may copy, display, modify and redistribute the software
4
+ # code either by itself or as incorporated into your code; provided that
5
+ # you do not remove any proprietary notices. Your use of this software
6
+ # code is at your own risk and you waive any claim against the author
7
+ # with respect to your use of this software code.
8
+ # (c) 2007 s3sync.net
9
+ #
10
+
11
+ module S3sync
12
+
13
+ $S3SYNC_MIME_TYPES_FILE = (ENV["S3SYNC_MIME_TYPES_FILE"] or '/etc/mime.types')
14
+ $S3SYNC_VERSION = '1.3.1'
15
+
16
+ # always look "here" for include files (thanks aktxyz)
17
+ $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/")
18
+
19
+ require 'getoptlong'
20
+ # require 'generator' # http://www.ruby-doc.org/stdlib/libdoc/generator/rdoc/classes/Generator.html
21
+ unless defined?(Enumerator)
22
+ require "enumerator"
23
+ Enumerator = Enumerable::Enumerator
24
+ end
25
+ # require 's3sync/thread_generator' # memory doesn't leak with this one, at least nothing near as bad
26
+ require 'digest/md5'
27
+ require 'tempfile'
28
+
29
+ require 's3sync'
30
+
31
+ $S3syncDirString = '{E40327BF-517A-46e8-A6C3-AF51BC263F59}'
32
+ $S3syncDirTag = 'd66759af42f282e1ba19144df2d405d0'
33
+ $S3syncDirFile = Tempfile.new("s3sync")
34
+ $S3syncDirFile.puts $S3syncDirString
35
+ $S3syncDirFile.close # not final; we need this file again to 'put' directory nodes
36
+
37
+ if $S3SYNC_MIME_TYPES_FILE and FileTest.exist?($S3SYNC_MIME_TYPES_FILE)
38
+ File.open($S3SYNC_MIME_TYPES_FILE, 'r') do |f|
39
+ $mimeTypes = {}
40
+ f.each_line do |l|
41
+ if l =~ /^(\w\S+)\s+(\S.*)$/
42
+ type = $1
43
+ exts = $2.split
44
+ exts.each do |e|
45
+ $mimeTypes[e.to_s] = type.to_s
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+
52
+ def S3sync.main
53
+ # ---------- OPTIONS PROCESSING ---------- #
54
+
55
+ $S3syncOptions = Hash.new
56
+ optionsParser = GetoptLong.new(
57
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
58
+ [ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
59
+ [ '--recursive','-r', GetoptLong::NO_ARGUMENT ],
60
+ [ '--public-read','-p', GetoptLong::NO_ARGUMENT ],
61
+ [ '--delete', GetoptLong::NO_ARGUMENT ],
62
+ [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
63
+ [ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
64
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
65
+ [ '--memory', '-m', GetoptLong::NO_ARGUMENT ],
66
+ [ '--progress', GetoptLong::NO_ARGUMENT ],
67
+ [ '--expires', GetoptLong::REQUIRED_ARGUMENT ],
68
+ [ '--cache-control', GetoptLong::REQUIRED_ARGUMENT ],
69
+ [ '--exclude', GetoptLong::REQUIRED_ARGUMENT ],
70
+ [ '--gzip', GetoptLong::REQUIRED_ARGUMENT ],
71
+ [ '--key', '-k', GetoptLong::REQUIRED_ARGUMENT],
72
+ [ '--secret', GetoptLong::REQUIRED_ARGUMENT],
73
+ [ '--make-dirs', GetoptLong::NO_ARGUMENT ],
74
+ [ '--no-md5', GetoptLong::NO_ARGUMENT ]
75
+ )
76
+
77
+ def S3sync.usage(message = nil)
78
+ $stderr.puts message if message
79
+ name = $0.split('/').last
80
+ $stderr.puts <<-ENDUSAGE
81
+ #{name} [options] <source> <destination>\t\tversion #{$S3SYNC_VERSION}
82
+ --help -h --verbose -v --dryrun -n
83
+ --ssl -s --recursive -r --delete
84
+ --public-read -p --expires="<exp>" --cache-control="<cc>"
85
+ --exclude="<regexp>" --progress --debug -d
86
+ --key -k --secret -s --make-dirs
87
+ --no-md5 --gzip
88
+ One of <source> or <destination> must be of S3 format, the other a local path.
89
+ Reminders:
90
+ * An S3 formatted item with bucket 'mybucket' and prefix 'mypre' looks like:
91
+ mybucket:mypre/some/key/name
92
+ * Local paths should always use forward slashes '/' even on Windows
93
+ * Whether you use a trailing slash on the source path makes a difference.
94
+ * For examples see README.
95
+ ENDUSAGE
96
+ exit
97
+ end #usage
98
+
99
+ begin
100
+ optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
101
+ rescue StandardError
102
+ usage # the parser already printed an error message
103
+ end
104
+ usage if $S3syncOptions['--help']
105
+ $S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or $S3syncOptions['--debug'] or $S3syncOptions['--progress']
106
+ $S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl'] # change from "" to true to appease s3 port chooser
107
+
108
+ if $S3syncOptions['--key']
109
+ $AWS_ACCESS_KEY_ID = $S3syncOptions['--key']
110
+ end
111
+
112
+ if $S3syncOptions['--secret']
113
+ $AWS_SECRET_ACCESS_KEY = $S3syncOptions['--secret']
114
+ end
115
+
116
+ # ---------- CONNECT ---------- #
117
+ S3sync::s3trySetup
118
+
119
+ # ---------- PREFIX PROCESSING ---------- #
120
+ def S3sync.s3Prefix?(pre)
121
+ # allow for dos-like things e.g. C:\ to be treated as local even with colon
122
+ pre.include?(':') and not pre.match('^[A-Za-z]:[\\\\/]')
123
+ end
124
+ sourcePrefix, destinationPrefix = ARGV
125
+ usage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
126
+ usage('Need a source and a destination') if sourcePrefix == nil or destinationPrefix == nil
127
+ usage('Both arguments can\'t be on S3') if s3Prefix?(sourcePrefix) and s3Prefix?(destinationPrefix)
128
+ usage('One argument must be on S3') if !s3Prefix?(sourcePrefix) and !s3Prefix?(destinationPrefix)
129
+
130
+ # so we can modify them
131
+ sourcePrefix, destinationPrefix = sourcePrefix.dup, destinationPrefix.dup
132
+
133
+ # handle trailing slash for source properly
134
+ if(sourcePrefix !~ %r{/$})
135
+ # no slash on end of source means we need to append the last src dir to dst prefix
136
+ # testing for empty isn't good enough here.. needs to be "empty apart from potentially having 'bucket:'"
137
+ slash = (destinationPrefix.empty? or destinationPrefix.match(%r{:$}))? "" : "/"
138
+ # not good enough.. sometimes this coughs up the bucket as a prefix destinationPrefix.replace(destinationPrefix + slash + sourcePrefix.split(/(?:\/|:)/).last)
139
+ # take everything at the end after a slash or colon
140
+ destinationPrefix.replace(destinationPrefix + slash + %r{([^/:]*)$}.match(sourcePrefix)[1])
141
+ end
142
+ # no trailing slash on dest, ever.
143
+ destinationPrefix.sub!(%r{/$}, "")
144
+
145
+ # don't repeat slashes
146
+ sourcePrefix.squeeze!('/')
147
+ destinationPrefix.squeeze!('/')
148
+
149
+ # here's where we find out what direction we're going
150
+ sourceIsS3 = s3Prefix?(sourcePrefix)
151
+ # alias these variables to the other strings (in ruby = does not make copies of strings)
152
+ s3Prefix = sourceIsS3 ? sourcePrefix : destinationPrefix
153
+ localPrefix = sourceIsS3 ? destinationPrefix : sourcePrefix
154
+
155
+ # canonicalize the S3 stuff
156
+ s3Bucket = (/^(.*?):/.match(s3Prefix))[1]
157
+ s3Prefix.replace((/:(.*)$/.match(s3Prefix))[1])
158
+ debug("s3Prefix #{s3Prefix}")
159
+ $S3SyncOriginalS3Prefix = s3Prefix.dup
160
+
161
+ # canonicalize the local stuff
162
+ # but that can kill a trailing slash, which we need to preserve long enough to know whether we mean "the dir" or "its contents"
163
+ # it will get re-stripped by the local generator after expressing this knowledge
164
+ localTrailingSlash = localPrefix.match(%r{/$})
165
+ localPrefix.replace(File.expand_path(localPrefix))
166
+ localPrefix += '/' if localTrailingSlash
167
+ debug("localPrefix #{localPrefix}")
168
+ # used for exclusion parsing
169
+ $S3SyncOriginalLocalPrefix = localPrefix.dup
170
+
171
+ # exclude preparation
172
+ # we don't want to build then throw away this regexp for each node in the universe; do it once globally
173
+ $S3SyncExclude = Regexp.new($S3syncOptions['--exclude']) if $S3syncOptions['--exclude']
174
+
175
+ # ---------- GENERATORS ---------- #
176
+
177
+ # a generator that will return the files/dirs of the local tree one by one
178
+ # sorted and decorated for easy comparison with the S3 tree
179
+ localTree = Enumerator.new do |g|
180
+ def S3sync.localTreeRecurse(g, prefix, path)
181
+ debug("localTreeRecurse #{prefix} #{path}")
182
+ #if $S3syncOptions['--memory']
183
+ # $stderr.puts "Starting local recurse"
184
+ # stats = ostats stats
185
+ #end
186
+ d = nil
187
+ begin
188
+ slash = prefix.empty? ? "" : "/"
189
+ d = Dir.new(prefix + slash + path)
190
+ rescue Errno::ENOENT
191
+ # ok the dir doesn't exist at all (this only really occurs for the root i.e. first dir)
192
+ return nil
193
+ rescue Errno::EACCES
194
+ # vista won't even let us touch some stuff in our own profile
195
+ return nil
196
+ end
197
+ # do some pre-processing
198
+ # the following sleight of hand is to make the recursion match the way s3 sorts
199
+ # take for example the directory 'foo' and the file 'foo.bar'
200
+ # when we encounter the dir we would want to recurse into it
201
+ # but S3 would just say 'period < slash' and sort 'foo.bar' between the dir node
202
+ # and the contents in that 'dir'
203
+ #
204
+ # so the solution is to not recurse into the directory until the point where
205
+ # it would come up "next" in the S3 list
206
+ # We have to do these hoops on the local side, because we have very little control
207
+ # over how S3 will return its results
208
+ toAdd = Array.new
209
+ d.each do |name|
210
+ slash = path.empty? ? "" : "/"
211
+ partialPath = path + slash + name
212
+ slash = prefix.empty? ? "" : "/"
213
+ fullPath = prefix + slash + partialPath
214
+ if name == "." or name == ".."
215
+ # skip
216
+ else
217
+ # add a dir node if appropriate
218
+ debug("Test #{fullPath}")
219
+ if ((not FileTest.symlink?(fullPath)) and FileTest.directory?(fullPath)) and $S3syncOptions['--recursive']
220
+ debug("Adding it as a dir node")
221
+ toAdd.push(name + '/') # always trail slash here for sorting purposes (removed below with rindex test)
222
+ end
223
+ end
224
+ end
225
+ dItems = d.collect.to_a + toAdd
226
+ d.close
227
+ d = toAdd = nil
228
+ dItems.sort! #aws says we will get alpha sorted results but ruby doesn't
229
+ dItems.each do |name|
230
+ isDirNode = false
231
+ if name.rindex('/') == name.length-1
232
+ name = name.slice(0...name.length-1)
233
+ isDirNode = true
234
+ debug("#{name} is a dir node")
235
+ end
236
+ slash = path.empty? ? "" : "/"
237
+ partialPath = path + slash + name
238
+ slash = prefix.empty? ? "" : "/"
239
+ fullPath = prefix + slash + partialPath
240
+ excludePath = fullPath.slice($S3SyncOriginalLocalPrefix.length...fullPath.length)
241
+ if name == "." or name == ".."
242
+ # skip
243
+ elsif $S3SyncExclude and $S3SyncExclude.match(excludePath)
244
+ debug("skipping local item #{excludePath} because of --exclude")
245
+ elsif isDirNode
246
+ localTreeRecurse(g, prefix, partialPath)
247
+ else
248
+ # a normal looking node we should try to process
249
+ debug("local item #{fullPath}")
250
+ g.yield(LocalNode.new(prefix, partialPath))
251
+ end
252
+ end
253
+ #if $S3syncOptions['--memory']
254
+ # $stderr.puts "Ending local recurse"
255
+ # stats = ostats stats
256
+ #end
257
+ end
258
+ # a bit of a special case for local, since "foo/" and "foo" are essentially treated the same by file systems
259
+ # so we need to think harder about what the user really meant in the command line.
260
+ localPrefixTrim = localPrefix
261
+ if localPrefix !~ %r{/$}
262
+ # no trailing slash, so yield the root itself first, then recurse if appropriate
263
+ # gork this is still not quite good enough.. if local is the dest then we don't know whether s3 will have a root dir node yielded a priori, so we can't know whether to do this. only matters for --erase though
264
+ g.yield(LocalNode.new(localPrefixTrim, "")) # technically we should check this for exclusion, but excluding the root node is kind of senseless.. and that would be a pain to set up here
265
+ localTreeRecurse(g, localPrefixTrim, "") if $S3syncOptions['--recursive']
266
+ else
267
+ # trailing slash, so ignore the root itself, and just go into the first level
268
+ localPrefixTrim.sub!(%r{/$}, "") # strip the slash because of how we do local node slash accounting in the recurse above
269
+ localTreeRecurse(g, localPrefixTrim, "")
270
+ end
271
+ end
272
+
273
+ # a generator that will return the nodes in the S3 tree one by one
274
+ # sorted and decorated for easy comparison with the local tree
275
+ s3Tree = Enumerator.new do |g|
276
+ def S3sync.s3TreeRecurse(g, bucket, prefix, path)
277
+ if $S3syncOptions['--memory']
278
+ $stderr.puts "Starting S3 recurse"
279
+ GC.start
280
+ stats = ostats stats
281
+ end
282
+ $stderr.puts "s3TreeRecurse #{bucket} #{prefix} #{path}" if $S3syncOptions['--debug']
283
+ nextPage = true
284
+ marker = ''
285
+ while nextPage do
286
+ fullPrefix = prefix + path
287
+ debug("nextPage: #{marker}") if marker != ''
288
+ options = {}
289
+ options['prefix'] = fullPrefix # start at the right depth
290
+ options['delimiter'] = '/' # only one dir at a time please
291
+ options['max-keys'] = '200' # use manageable chunks
292
+ options['marker'] = marker unless marker == ''
293
+ d = S3sync.S3try(:list_bucket, bucket, options)
294
+ $stderr.puts "S3 ERROR: #{d.http_response}" unless d.http_response.is_a? Net::HTTPSuccess
295
+ # the 'directories' and leaf nodes are in two separate collections
296
+ # because a dir will never have the same name as a node, we can just shove them together and sort
297
+ # it's important to evaluate them alphabetically for efficient comparison to the local tree
298
+ tItems = d.entries + d.common_prefix_entries
299
+ tItems.sort! do |a,b|
300
+ aName = a.respond_to?('key') ? a.key : a.prefix
301
+ bName = b.respond_to?('key') ? b.key : b.prefix
302
+ # the full path will be returned, efficient to ignore the part we know will be in common
303
+ aName.slice(fullPrefix.length..aName.length) <=> bName.slice(fullPrefix.length..bName.length)
304
+ end
305
+ # get rid of the big s3 objects asap, just save light-weight nodes and strings
306
+ items = tItems.collect do |item|
307
+ if item.respond_to?('key')
308
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
309
+ Node.new(key, item.size, item.etag, item.last_modified)
310
+ else
311
+ Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
312
+ end
313
+ end
314
+ nextPage = d.properties.is_truncated
315
+ marker = (d.properties.next_marker)? d.properties.next_marker : ((d.entries.length > 0)? d.entries.last.key : '')
316
+ # get this into native char set (because when we feed it back to s3 that's what it will expect)
317
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join
318
+ tItems = nil
319
+ d = nil # get rid of this before recursing; it's big
320
+ item = nil
321
+ GC.start # not sure but I think yielding before doing this is causing evil closure bloat
322
+ items.each do |item|
323
+ if not (item.kind_of? String)
324
+ # this is an item
325
+ excludePath = item.name.slice($S3SyncOriginalS3Prefix.length...item.name.length)
326
+ if $S3SyncExclude and $S3SyncExclude.match(excludePath)
327
+ debug("skipping S3 item #{excludePath} due to --exclude")
328
+ else
329
+ debug("S3 item #{item.name}")
330
+ g.yield(S3Node.new(bucket, prefix, item))
331
+ end
332
+ else
333
+ # it's a prefix (i.e. there are sub keys)
334
+ partialPath = item.slice(prefix.length..item.length) # will have trailing slash
335
+ excludePath = item.slice($S3SyncOriginalS3Prefix.length...item.length)
336
+ # recurse
337
+ if $S3SyncExclude and $S3SyncExclude.match(excludePath)
338
+ debug("skipping prefix #{excludePath} due to --exclude")
339
+ else
340
+ debug("prefix found: #{partialPath}")
341
+ s3TreeRecurse(g, bucket, prefix, partialPath) if $S3syncOptions['--recursive']
342
+ end
343
+ end
344
+ end
345
+ items = nil
346
+ end # of while nextPage
347
+ if $S3syncOptions['--memory']
348
+ $stderr.puts "Ending S3 recurse"
349
+ GC.start
350
+ stats = ostats stats
351
+ end
352
+ end
353
+ # this will yield the root node first and then recurse
354
+ s3TreeRecurse(g, s3Bucket, s3Prefix, "")
355
+ end
356
+
357
+ # alias the tree objects so we don't care below which direction the transfer is going
358
+ if sourceIsS3
359
+ sourceTree, destinationTree = s3Tree, localTree
360
+ else
361
+ sourceTree, destinationTree = localTree, s3Tree
362
+ end
363
+
364
+ # ---------- COMPARATOR ---------- #
365
+
366
+ # run the comparison engine and act according to what we find for each check
367
+ nodesToDelete = Array.new # a stack. have to delete in reverse order of normal create/update processing
368
+
369
+ sourceNode = sourceTree.next rescue nil
370
+ destinationNode = destinationTree.next rescue nil
371
+ while sourceNode or destinationNode do
372
+ debug("source: #{sourceNode.name}") if sourceNode
373
+ debug("dest: #{destinationNode.name}") if destinationNode
374
+ if (!destinationNode) or (sourceNode and (sourceNode.name < destinationNode.name))
375
+ dNode =
376
+ if sourceNode.kind_of? LocalNode
377
+ S3Node.new(s3Bucket, s3Prefix, sourceNode.name)
378
+ else
379
+ LocalNode.new(localPrefix, sourceNode.name)
380
+ end
381
+ puts "Create node #{sourceNode.name}" if $S3syncOptions['--verbose']
382
+ dNode.updateFrom(sourceNode) unless $S3syncOptions['--dryrun']
383
+ sourceNode = sourceNode.nil? ? nil : sourceTree.next rescue nil
384
+ elsif (!sourceNode) or (destinationNode and (sourceNode.name > destinationNode.name))
385
+ $stderr.puts "Source does not have #{destinationNode.name}" if $S3syncOptions['--debug']
386
+ if $S3syncOptions['--delete']
387
+ if destinationNode.directory?
388
+ # have to wait
389
+ nodesToDelete.push(destinationNode)
390
+ else
391
+ puts "Remove node #{destinationNode.name}" if $S3syncOptions['--verbose']
392
+ destinationNode.delete unless $S3syncOptions['--dryrun']
393
+ end
394
+ end
395
+ destinationNode = destinationNode.nil? ? nil : destinationTree.next rescue nil
396
+ elsif sourceNode.name == destinationNode.name
397
+ if (sourceNode.size != destinationNode.size) or (($S3syncOptions['--no-md5'])? (sourceNode.date > destinationNode.date) : (sourceNode.tag != destinationNode.tag))
398
+ puts "Update node #{sourceNode.name}" if $S3syncOptions['--verbose']
399
+ destinationNode.updateFrom(sourceNode) unless $S3syncOptions['--dryrun']
400
+ elsif $S3syncOptions['--debug']
401
+ $stderr.puts "Node #{sourceNode.name} unchanged"
402
+ end
403
+ sourceNode = sourceNode.nil? ? nil : sourceTree.next rescue nil
404
+ destinationNode = destinationNode.nil? ? nil : destinationTree.next rescue nil
405
+ end
406
+ end
407
+
408
+ # get rid of the (now empty, except for other directories) directories
409
+ nodesToDelete.reverse_each do |node|
410
+ puts "Remove node #{node.name}" if $S3syncOptions['--verbose']
411
+ node.delete unless $S3syncOptions['--dryrun']
412
+ end
413
+ end #main
414
+
415
+
416
+ # ---------- NODE ---------- #
417
+ class Node
418
+ attr_reader :name
419
+ attr_reader :size
420
+ attr_reader :tag
421
+ attr_reader :date
422
+ def initialize(name='', size = 0, tag = '', date = Time.now.utc)
423
+ @name = name
424
+ @size = size
425
+ @tag = tag
426
+ @date = date
427
+ end
428
+ def directory?()
429
+ @tag == $S3syncDirTag and @size == $S3syncDirString.length
430
+ end
431
+ end
432
+
433
+ # ---------- S3Node ---------- #
434
+ class S3Node < Node
435
+ @path = nil
436
+ @bucket = nil
437
+ @result = nil
438
+ def initialize(bucket, prefix, itemOrName)
439
+ @bucket = bucket
440
+ if itemOrName.kind_of? String
441
+ @name = itemOrName
442
+ @name.sub!(%r{/$}, "") # don't create directories with a slash on the end
443
+ #6/2007. the prefix can be filled but the name empty, in the case of s3sync -r somedir somebucket:
444
+ if (not prefix.empty? and @name.empty?)
445
+ @name = prefix
446
+ itemOrName = prefix
447
+ prefix = ""
448
+ end
449
+ slash = prefix.empty? ? "" : "/"
450
+ @path = prefix + slash + itemOrName
451
+ else
452
+ @name = (itemOrName.name.slice((prefix.length)..itemOrName.name.length) or '')
453
+ # depending whether the prefix is / tailed, the name might need trimming
454
+ @name.sub!(%r{^/},"") # get rid of leading slash in name if there (from above simplistic split)
455
+ @name.sub!(%r{/$}, "") # don't create directories with a slash on the end
456
+ @path = itemOrName.name
457
+ @path.sub!(%r{/$}, "") # don't create directories with a slash on the end
458
+ @size = itemOrName.size
459
+ @tag = itemOrName.tag.gsub(/"/,'')
460
+ @date = Time.xmlschema(itemOrName.date)
461
+ end
462
+ debug("s3 node object init. Name:#{@name} Path:#{@path} Size:#{@size} Tag:#{@tag} Date:#{@date}")
463
+ end
464
+ # get this item from s3 into the provided stream
465
+ # S3 pushes to the local item, due to how http streaming is implemented
466
+ def to_stream(s)
467
+ @result = S3sync.S3try(:get_stream, @bucket, @path, {}, s)
468
+ end
469
+ def symlink?()
470
+ unless @result
471
+ @result = S3sync.S3try(:head, @bucket, @path)
472
+ end
473
+ debug("symlink value is: #{@result.object.metadata['symlink']}")
474
+ @result.object.metadata['symlink'] == 'true'
475
+ end
476
+ def owner
477
+ unless @result
478
+ @result = S3sync.S3try(:head, @bucket, @path)
479
+ end
480
+ debug("Owner of this s3 node is #{@result.object.metadata['owner']}")
481
+ @result.object.metadata['owner'].to_i # if not there, will be nil => 0 which == root so good default
482
+ end
483
+ def group
484
+ unless @result
485
+ @result = S3sync.S3try(:head, @bucket, @path)
486
+ end
487
+ @result.object.metadata['group'].to_i # 0 default ok
488
+ end
489
+ def permissions
490
+ g = @result.object.metadata['permissions']
491
+ g ? g.to_i : 600 # default to owner only
492
+ end
493
+ def updateFrom(fromNode)
494
+ if fromNode.respond_to?(:stream)
495
+ meta = Hash.new
496
+ meta['owner'] = fromNode.owner.to_s
497
+ meta['group'] = fromNode.group.to_s
498
+ meta['permissions'] = fromNode.permissions.to_s
499
+ meta['symlink'] = 'true' if fromNode.symlink?
500
+ begin
501
+ theStream = fromNode.stream
502
+ theStream = ProgressStream.new(theStream, fromNode.size) if $S3syncOptions['--progress']
503
+
504
+ s3o = S3::S3Object.new(theStream, meta)
505
+ debug(@path)
506
+ headers = {'Content-Length' => (fromNode.size.respond_to?(:nonzero?) ? fromNode.size.to_s : '0')}
507
+ headers['x-amz-acl'] = 'public-read' if $S3syncOptions['--public-read']
508
+ headers['Expires'] = $S3syncOptions['--expires'] if $S3syncOptions['--expires']
509
+ headers['Cache-Control'] = $S3syncOptions['--cache-control'] if $S3syncOptions['--cache-control']
510
+ fType = @path.split('.').last
511
+ if ($S3syncOptions['--gzip'] || "gz").split(",").include? fType
512
+ headers['Content-Encoding'] = "gzip"
513
+ fType = @path.split('.')[-2]
514
+ end
515
+ debug("File extension: #{fType}")
516
+ if defined?($mimeTypes) and fType != '' and (mType = $mimeTypes[fType]) and mType != ''
517
+ debug("Mime type: #{mType}")
518
+ headers['Content-Type'] = mType
519
+ end
520
+ @result = S3sync.S3try(:put, @bucket, @path, s3o, headers)
521
+ theStream.close if (theStream and not theStream.closed?)
522
+ rescue NoMethodError
523
+ # when --progress is used and we can't get the stream object, it doesn't report as null
524
+ # so the above .closed? test will break
525
+ $stderr.puts "Skipping #{@path}: " + $!
526
+ rescue SystemCallError
527
+ theStream.close if (theStream and not theStream.closed?)
528
+ $stderr.puts "Skipping #{@path}: " + $!
529
+ end
530
+ else
531
+ raise "Node provided as update source doesn't support :stream"
532
+ end
533
+ end
534
+ def delete
535
+ @result = S3sync.S3try(:delete, @bucket, @path)
536
+ end
537
+ end
538
+
539
+ # ---------- LocalNode ---------- #
540
+
541
+ class LocalNode < Node
542
+ @path = nil
543
+ def initialize(prefix, partialPath)
544
+ slash = prefix.empty? ? "" : "/"
545
+ @path = prefix + slash + partialPath
546
+ # slash isn't at the front of this any more @name = (partialPath.slice(1..partialPath.length) or '')
547
+ @name = partialPath or ''
548
+ if FileTest.symlink?(@path)
549
+ # this could use the 'file' case below, but why create an extra temp file
550
+ linkData = File.readlink(@path)
551
+ $stderr.puts "link to: #{linkData}" if $S3syncOptions['--debug']
552
+ @size = linkData.length
553
+ unless $S3syncOptions['--no-md5']
554
+ md5 = Digest::MD5.new()
555
+ md5 << linkData
556
+ @tag = md5.hexdigest
557
+ end
558
+ @date = File.lstat(@path).mtime.utc
559
+ elsif FileTest.file?(@path)
560
+ @size = FileTest.size(@path)
561
+ data = nil
562
+ begin
563
+ unless $S3syncOptions['--no-md5']
564
+ data = self.stream
565
+ md5 = Digest::MD5.new()
566
+ while !data.eof?
567
+ md5 << data.read(2048) # stream so it's not taking all memory
568
+ end
569
+ data.close
570
+ @tag = md5.hexdigest
571
+ end
572
+ @date = File.stat(@path).mtime.utc
573
+ rescue SystemCallError
574
+ # well we're not going to have an md5 that's for sure
575
+ @tag = nil
576
+ end
577
+ elsif FileTest.directory?(@path)
578
+ # all s3 directories are dummy nodes contain the same directory string
579
+ # so for easy comparison, set our size and tag thusly
580
+ @size = $S3syncDirString.length
581
+ @tag = $S3syncDirTag
582
+ @date = File.stat(@path).mtime.utc
583
+ end
584
+ debug("local node object init. Name:#{@name} Path:#{@path} Size:#{@size} Tag:#{@tag} Date:#{@date}")
585
+ end
586
+ # return a stream that will read the contents of the local item
587
+ # local gets pulled by the S3Node update fn, due to how http streaming is implemented
588
+ def stream
589
+ begin
590
+ # 1.0.8 switch order of these tests because a symlinked file will say yes to 'file?'
591
+ if FileTest.symlink?(@path) or FileTest.directory?(@path)
592
+ tf = Tempfile.new('s3sync')
593
+ if FileTest.symlink?(@path)
594
+ tf.printf('%s', File.readlink(@path))
595
+ elsif FileTest.directory?(@path)
596
+ tf.printf('%s', $S3syncDirString)
597
+ end
598
+ tf.close
599
+ tf.open
600
+ tf
601
+ elsif FileTest.file?(@path)
602
+ File.open(@path, 'rb')
603
+ end
604
+ rescue SystemCallError
605
+ $stderr.puts "Could not read #{@path}: #{$!}"
606
+ raise
607
+ end
608
+ end
609
+ def stat
610
+ FileTest.symlink?(@path) ? File.lstat(@path) : File.stat(@path)
611
+ end
612
+ def exist?
613
+ FileTest.exist?(@path) or FileTest.symlink?(@path)
614
+ end
615
+ def owner
616
+ self.exist? ? self.stat().uid : 0
617
+ end
618
+ def group
619
+ self.exist? ? self.stat().gid : 0
620
+ end
621
+ def permissions
622
+ self.exist? ? self.stat().mode : 600
623
+ end
624
+ def updateFrom(fromNode)
625
+ if fromNode.respond_to?(:to_stream)
626
+ fName = @path + '.s3syncTemp'
627
+ # handle the case where the user wants us to create dirs that don't exist in S3
628
+ if $S3syncOptions['--make-dirs']
629
+ # ensure target's path exists
630
+ dirs = @path.split('/')
631
+ # but the last one is a file name
632
+ dirs.pop()
633
+ current = ''
634
+ dirs.each do |dir|
635
+ current << dir << '/'
636
+ begin
637
+ Dir.mkdir(current) unless FileTest.exist?(current)
638
+ rescue SystemCallError
639
+ $stderr.puts "Could not mkdir #{current}: #{$!}"
640
+ end
641
+ end
642
+ end
643
+ unless fromNode.directory?
644
+ f = File.open(fName, 'wb')
645
+ f = ProgressStream.new(f, fromNode.size) if $S3syncOptions['--progress']
646
+
647
+ fromNode.to_stream(f)
648
+ f.close
649
+ end
650
+ # get original item out of the way
651
+ File.unlink(@path) if File.exist?(@path)
652
+ if fromNode.symlink?
653
+ linkTo = ''
654
+ File.open(fName, 'rb'){|f| linkTo = f.read}
655
+ debug("#{@path} will be a symlink to #{linkTo}")
656
+ begin
657
+ File.symlink(linkTo, @path)
658
+ rescue NotImplementedError
659
+ # windows doesn't do symlinks, for example
660
+ # just bail
661
+ File.unlink(fName) if File.exist?(fName)
662
+ return
663
+ rescue SystemCallError
664
+ $stderr.puts "Could not write symlink #{@path}: #{$!}"
665
+ end
666
+ elsif fromNode.directory?
667
+ # only get here when the dir doesn't exist. else they'd compare ==
668
+ debug(@path)
669
+ begin
670
+ Dir.mkdir(@path) unless FileTest.exist?(@path)
671
+ rescue SystemCallError
672
+ $stderr.puts "Could not mkdir #{@path}: #{$!}"
673
+ end
674
+
675
+ else
676
+ begin
677
+ File.rename(fName, @path)
678
+ rescue SystemCallError
679
+ $stderr.puts "Could not write (rename) #{@path}: #{$!}"
680
+ end
681
+ end
682
+ # clean up if the temp file is still there (as for links)
683
+ File.unlink(fName) if File.exist?(fName)
684
+
685
+ # update permissions
686
+ linkCommand = fromNode.symlink? ? 'l' : ''
687
+ begin
688
+ File.send(linkCommand + 'chown', fromNode.owner, fromNode.group, @path)
689
+ File.send(linkCommand + 'chmod', fromNode.permissions, @path)
690
+ rescue NotImplementedError
691
+ # no one has lchmod, but who really cares
692
+ rescue SystemCallError
693
+ $stderr.puts "Could not change owner/permissions on #{@path}: #{$!}"
694
+ end
695
+ else
696
+ raise "Node provided as update source doesn't support :to_stream"
697
+ end
698
+ end
699
+ def symlink?()
700
+ FileTest.symlink?(@path)
701
+ end
702
+ def delete
703
+ # don't try to delete the restore root dir
704
+ # this is a quick fix to deal with the fact that the tree recurse has to visit the root node
705
+ return unless @name != ''
706
+ return unless FileTest.exist?(@path)
707
+ begin
708
+ if FileTest.directory?(@path)
709
+ Dir.rmdir(@path)
710
+ else
711
+ File.unlink(@path)
712
+ end
713
+ rescue SystemCallError
714
+ $stderr.puts "Could not delete #{@path}: #{$!}"
715
+ end
716
+ end
717
+ end
718
+
719
+ end #module
720
+
721
+ def debug(str)
722
+ $stderr.puts str if $S3syncOptions['--debug']
723
+ end
724
+
725
+ def ostats(last_stat = nil)
726
+ stats = Hash.new(0)
727
+ ObjectSpace.each_object {|o| stats[o.class] += 1}
728
+
729
+ stats.sort {|(k1,v1),(k2,v2)| v2 <=> v1}.each do |k,v|
730
+ $stderr.printf "%-30s %10d", k, v
731
+ $stderr.printf " delta %10d", (v - last_stat[k]) if last_stat
732
+ $stderr.puts
733
+ end
734
+
735
+ stats
736
+ end
737
+
738
+ # go!
739
+ S3sync::main