aproxacs-s3sync 1.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile ADDED
@@ -0,0 +1,21 @@
1
+ require 'rubygems'
2
+ require 'rake'
3
+
4
+ begin
5
+ require 'jeweler'
6
+ Jeweler::Tasks.new do |gem|
7
+ gem.name = "aproxacs-s3sync"
8
+ gem.summary = %Q{Fork of s3sync to be compatible with ruby 1.9.}
9
+ gem.email = "aproxacs@gmail.com"
10
+ gem.homepage = "http://s3sync.net"
11
+ gem.authors = ["aproxacs"]
12
+ gem.files.include FileList.new('lib/**/*.rb', "bin/*",
13
+ "History.txt", "PostInstall.txt", "VERSION", "README.rdoc", "Rakefile")
14
+
15
+ gem.executables = ["s3sync", "s3cmd"]
16
+ # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
17
+ end
18
+
19
+ rescue LoadError
20
+ puts "Jeweler (or a dependency) not available. Install it with: sudo gem install jeweler"
21
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.3.3
data/bin/s3cmd ADDED
@@ -0,0 +1,338 @@
1
+ #!/usr/bin/env ruby
2
+ # This software code is made available "AS IS" without warranties of any
3
+ # kind. You may copy, display, modify and redistribute the software
4
+ # code either by itself or as incorporated into your code; provided that
5
+ # you do not remove any proprietary notices. Your use of this software
6
+ # code is at your own risk and you waive any claim against the author
7
+ # with respect to your use of this software code.
8
+ # (c) 2007 s3sync.net
9
+ #
10
+
11
+ module S3sync
12
+
13
+ # always look "here" for include files (thanks aktxyz)
14
+ # $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/s3sync/")
15
+ $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/")
16
+
17
+ require 's3sync'
18
+ require 'getoptlong'
19
+
20
+
21
+ def S3sync.s3cmdMain
22
+ # ---------- OPTIONS PROCESSING ---------- #
23
+
24
+ $S3syncOptions = Hash.new
25
+ optionsParser = GetoptLong.new(
26
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
27
+ [ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
28
+ [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
29
+ [ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
30
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
31
+ [ '--progress', GetoptLong::NO_ARGUMENT ],
32
+ [ '--expires-in', GetoptLong::REQUIRED_ARGUMENT ]
33
+ )
34
+
35
+ def S3sync.s3cmdUsage(message = nil)
36
+ $stderr.puts message if message
37
+ name = $0.split('/').last
38
+ $stderr.puts <<"ENDUSAGE"
39
+ #{name} [options] <command> [arg(s)]\t\tversion #{S3sync::VERSION}
40
+ --help -h --verbose -v --dryrun -n
41
+ --ssl -s --debug -d --progress
42
+ --expires-in=( <# of seconds> | [#d|#h|#m|#s] )
43
+
44
+ Commands:
45
+ #{name} listbuckets [headers]
46
+ #{name} createbucket <bucket> [constraint (i.e. EU)]
47
+ #{name} deletebucket <bucket> [headers]
48
+ #{name} list <bucket>[:prefix] [max/page] [delimiter] [headers]
49
+ #{name} location <bucket> [headers]
50
+ #{name} delete <bucket>:key [headers]
51
+ #{name} deleteall <bucket>[:prefix] [headers]
52
+ #{name} get|put <bucket>:key <file> [headers]
53
+ #{name} copy <bucket>:key <bucket>:key [headers]
54
+ #{name} copyall <bucket>:key <bucket>:key [headers]
55
+ #{name} headers <bucket>:key [headers]
56
+ ENDUSAGE
57
+ exit
58
+ end #usage
59
+
60
+ begin
61
+ optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
62
+ rescue StandardError
63
+ s3cmdUsage # the parser already printed an error message
64
+ end
65
+ s3cmdUsage if $S3syncOptions['--help']
66
+ $S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or
67
+ $S3syncOptions['--debug'] or
68
+ $S3syncOptions['--progress']
69
+
70
+ # change from "" to true to appease s3 port chooser
71
+ $S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl']
72
+
73
+ if $S3syncOptions['--expires-in'] =~ /d|h|m|s/
74
+ e = $S3syncOptions['--expires-in']
75
+ days = (e =~ /(\d+)d/)? (/(\d+)d/.match(e))[1].to_i : 0
76
+ hours = (e =~ /(\d+)h/)? (/(\d+)h/.match(e))[1].to_i : 0
77
+ minutes = (e =~ /(\d+)m/)? (/(\d+)m/.match(e))[1].to_i : 0
78
+ seconds = (e =~ /(\d+)s/)? (/(\d+)s/.match(e))[1].to_i : 0
79
+ $S3syncOptions['--expires-in'] = seconds + 60 * ( minutes + 60 * ( hours + 24 * ( days ) ) )
80
+ end
81
+
82
+ # ---------- CONNECT ---------- #
83
+ S3sync::s3trySetup
84
+
85
+
86
+ # ---------- COMMAND PROCESSING ---------- #
87
+ command, path, file = ARGV
88
+
89
+ s3cmdUsage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
90
+ s3cmdUsage("Need a command (etc)") if not command
91
+
92
+ path = '' unless path
93
+ path = path.dup # modifiable
94
+ path += ':' unless path.match(':')
95
+ bucket = (/^(.*?):/.match(path))[1]
96
+ path.replace((/:(.*)$/.match(path))[1])
97
+
98
+ case command
99
+
100
+ when "delete"
101
+ s3cmdUsage("Need a bucket") if bucket == ''
102
+ s3cmdUsage("Need a key") if path == ''
103
+ headers = hashPairs(ARGV[2...ARGV.length])
104
+ $stderr.puts "delete #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
105
+ S3try(:delete, bucket, path) unless $S3syncOptions['--dryrun']
106
+
107
+ when "deleteall"
108
+ s3cmdUsage("Need a bucket") if bucket == ''
109
+ headers = hashPairs(ARGV[2...ARGV.length])
110
+ $stderr.puts "delete ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
111
+ more = true
112
+ marker = nil
113
+ while more do
114
+ res = s3cmdList(bucket, path, nil, nil, marker)
115
+ res.entries.each do |item|
116
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
117
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
118
+ $stderr.puts "delete #{bucket}:#{key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
119
+ S3try(:delete, bucket, key) unless $S3syncOptions['--dryrun']
120
+ end
121
+
122
+ more = res.properties.is_truncated
123
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
124
+ # get this into local charset; when we pass it to s3 that is what's expected
125
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
126
+ end
127
+
128
+ when "list"
129
+ s3cmdUsage("Need a bucket") if bucket == ''
130
+ max, delim = ARGV[2..3]
131
+ headers = hashPairs(ARGV[4...ARGV.length])
132
+ $stderr.puts "list #{bucket}:#{path} #{max} #{delim} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
133
+ puts "--------------------"
134
+
135
+ more = true
136
+ marker = nil
137
+ while more do
138
+ res = s3cmdList(bucket, path, max, delim, marker, headers)
139
+ if delim
140
+ res.common_prefix_entries.each do |item|
141
+ puts "dir: " + Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
142
+ end
143
+ puts "--------------------"
144
+ end
145
+ res.entries.each do |item|
146
+ puts Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
147
+ end
148
+ if res.properties.is_truncated
149
+ printf "More? Y/n: "
150
+ more = (STDIN.gets.match('^[Yy]?$'))
151
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
152
+ # get this into local charset; when we pass it to s3 that is what's expected
153
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
154
+ else
155
+ more = false
156
+ end
157
+ end # more
158
+
159
+ when "listbuckets"
160
+ headers = hashPairs(ARGV[1...ARGV.length])
161
+ $stderr.puts "list all buckets #{headers.inspect if headers}" if $S3syncOptions['--verbose']
162
+ if $S3syncOptions['--expires-in']
163
+ $stdout.puts S3url(:list_all_my_buckets, headers)
164
+ else
165
+ res = S3try(:list_all_my_buckets, headers)
166
+ res.entries.each do |item|
167
+ puts item.name
168
+ end
169
+ end
170
+
171
+ when "createbucket"
172
+ s3cmdUsage("Need a bucket") if bucket == ''
173
+ lc = ''
174
+ if(ARGV.length > 2)
175
+ lc = '<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01"><LocationConstraint>' + ARGV[2] + '</LocationConstraint></CreateBucketConfiguration>'
176
+ end
177
+ $stderr.puts "create bucket #{bucket} #{lc}" if $S3syncOptions['--verbose']
178
+ S3try(:create_bucket, bucket, lc) unless $S3syncOptions['--dryrun']
179
+
180
+ when "deletebucket"
181
+ s3cmdUsage("Need a bucket") if bucket == ''
182
+ headers = hashPairs(ARGV[2...ARGV.length])
183
+ $stderr.puts "delete bucket #{bucket} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
184
+ S3try(:delete_bucket, bucket, headers) unless $S3syncOptions['--dryrun']
185
+
186
+ when "location"
187
+ s3cmdUsage("Need a bucket") if bucket == ''
188
+ headers = hashPairs(ARGV[2...ARGV.length])
189
+ query = Hash.new
190
+ query['location'] = 'location'
191
+ $stderr.puts "location request bucket #{bucket} #{query.inspect} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
192
+ S3try(:get_query_stream, bucket, '', query, headers, $stdout) unless $S3syncOptions['--dryrun']
193
+
194
+ when "get"
195
+ s3cmdUsage("Need a bucket") if bucket == ''
196
+ s3cmdUsage("Need a key") if path == ''
197
+ s3cmdUsage("Need a file") if file == ''
198
+ headers = hashPairs(ARGV[3...ARGV.length])
199
+ $stderr.puts "get from key #{bucket}:#{path} into #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
200
+ unless $S3syncOptions['--dryrun']
201
+ if $S3syncOptions['--expires-in']
202
+ $stdout.puts S3url(:get, bucket, path, headers)
203
+ else
204
+ outStream = File.open(file, 'wb')
205
+ outStream = ProgressStream.new(outStream) if $S3syncOptions['--progress']
206
+ S3try(:get_stream, bucket, path, headers, outStream)
207
+ outStream.close
208
+ end
209
+ end
210
+
211
+ when "put"
212
+ s3cmdUsage("Need a bucket") if bucket == ''
213
+ s3cmdUsage("Need a key") if path == ''
214
+ s3cmdUsage("Need a file") if file == ''
215
+ headers = hashPairs(ARGV[3...ARGV.length])
216
+ stream = File.open(file, 'rb')
217
+ stream = ProgressStream.new(stream, File.stat(file).size) if $S3syncOptions['--progress']
218
+ s3o = S3::S3Object.new(stream, {}) # support meta later?
219
+ headers['Content-Length'] = FileTest.size(file).to_s
220
+ $stderr.puts "put to key #{bucket}:#{path} from #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
221
+ S3try(:put, bucket, path, s3o, headers) unless $S3syncOptions['--dryrun']
222
+ stream.close
223
+
224
+
225
+ when "copy"
226
+ s3cmdUsage("Need a bucket") if bucket == ''
227
+ s3cmdUsage("Need a key") if path == ''
228
+ s3cmdUsage("Need a file") if file == ''
229
+
230
+ file = file.dup # modifiable
231
+ file += ':' unless file.match(':')
232
+ dest_bucket = (/^(.*?):/.match(file))[1]
233
+ file.replace((/:(.*)$/.match(file))[1])
234
+
235
+ headers = hashPairs(ARGV[3...ARGV.length])
236
+ if headers.length > 0
237
+ headers["x-amz-metadata-directive"] = "REPLACE"
238
+ else
239
+ headers["x-amz-metadata-directive"] = "COPY"
240
+ end
241
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
242
+
243
+
244
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
245
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
246
+
247
+ when "copyall"
248
+ s3cmdUsage("Need a bucket") if bucket == ''
249
+ s3cmdUsage("Need a path") if path == ''
250
+
251
+ file = file.dup # modifiable
252
+ file += ':' unless file.match(':')
253
+ dest_bucket = (/^(.*?):/.match(file))[1]
254
+ dest_key = file.replace((/:(.*)$/.match(file))[1])
255
+
256
+ src_path = path
257
+
258
+ headers = hashPairs(ARGV[2...ARGV.length])
259
+
260
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}:#{dest_key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
261
+ more = true
262
+ marker = nil
263
+ while more do
264
+ res = s3cmdList(bucket, path, nil, nil, marker)
265
+ res.entries.each do |item|
266
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
267
+ path = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
268
+
269
+ file = path.gsub(src_path, dest_key)
270
+
271
+ headers = hashPairs(ARGV[3...ARGV.length])
272
+ if headers.length > 0
273
+ headers["x-amz-metadata-directive"] = "REPLACE"
274
+ else
275
+ headers["x-amz-metadata-directive"] = "COPY"
276
+ end
277
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
278
+
279
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
280
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
281
+
282
+ end
283
+
284
+ more = res.properties.is_truncated
285
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
286
+ # get this into local charset; when we pass it to s3 that is what's expected
287
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
288
+ end
289
+
290
+ when "headers"
291
+ s3cmdUsage("Need a bucket") if bucket == ''
292
+ s3cmdUsage("Need a path") if path == ''
293
+
294
+ headers = hashPairs(ARGV[2...ARGV.length])
295
+
296
+ $stderr.puts "apply headers to ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
297
+ more = true
298
+ marker = nil
299
+ while more do
300
+ res = s3cmdList(bucket, path, nil, nil, marker)
301
+ res.entries.each do |item|
302
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
303
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
304
+
305
+ tmpHeaders = headers.merge({
306
+ "x-amz-copy-source" => "/#{bucket}/#{URI.escape(key)}",
307
+ "x-amz-metadata-directive" => "REPLACE"
308
+ })
309
+
310
+ #
311
+ # grab the current content-type unless its been specified explicitly
312
+ #
313
+ if not tmpHeaders.key?("content-type")
314
+ currentHeaders = S3try(:head, bucket, key).object.metadata
315
+ tmpHeaders["content-type"] = currentHeaders["content-type"]
316
+ end
317
+
318
+ $stderr.puts "apply headers to #{bucket}:#{key} #{tmpHeaders.inspect}" if $S3syncOptions['--verbose']
319
+ S3try(:put, bucket, key, nil, tmpHeaders) unless $S3syncOptions['--dryrun']
320
+ end
321
+
322
+ more = res.properties.is_truncated
323
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
324
+ # get this into local charset; when we pass it to s3 that is what's expected
325
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
326
+ end
327
+
328
+
329
+ else
330
+ s3cmdUsage
331
+ end
332
+
333
+ end #main
334
+
335
+ end #module
336
+
337
+
338
+ S3sync::s3cmdMain #go!
data/bin/s3sync ADDED
@@ -0,0 +1,734 @@
1
+ #!/usr/bin/env ruby
2
+ # This software code is made available "AS IS" without warranties of any
3
+ # kind. You may copy, display, modify and redistribute the software
4
+ # code either by itself or as incorporated into your code; provided that
5
+ # you do not remove any proprietary notices. Your use of this software
6
+ # code is at your own risk and you waive any claim against the author
7
+ # with respect to your use of this software code.
8
+ # (c) 2007 s3sync.net
9
+ #
10
+
11
+ module S3sync
12
+
13
+ $S3SYNC_MIME_TYPES_FILE = (ENV["S3SYNC_MIME_TYPES_FILE"] or '/etc/mime.types')
14
+ $S3SYNC_VERSION = '1.3.1'
15
+
16
+ # always look "here" for include files (thanks aktxyz)
17
+ $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/")
18
+
19
+ require 'getoptlong'
20
+ # require 'generator' # http://www.ruby-doc.org/stdlib/libdoc/generator/rdoc/classes/Generator.html
21
+ unless defined?(Enumerator)
22
+ require "enumerator"
23
+ Enumerator = Enumerable::Enumerator
24
+ end
25
+ # require 's3sync/thread_generator' # memory doesn't leak with this one, at least nothing near as bad
26
+ require 'digest/md5'
27
+ require 'tempfile'
28
+
29
+ require 's3sync'
30
+
31
+ $S3syncDirString = '{E40327BF-517A-46e8-A6C3-AF51BC263F59}'
32
+ $S3syncDirTag = 'd66759af42f282e1ba19144df2d405d0'
33
+ $S3syncDirFile = Tempfile.new("s3sync")
34
+ $S3syncDirFile.puts $S3syncDirString
35
+ $S3syncDirFile.close # not final; we need this file again to 'put' directory nodes
36
+
37
+ if $S3SYNC_MIME_TYPES_FILE and FileTest.exist?($S3SYNC_MIME_TYPES_FILE)
38
+ File.open($S3SYNC_MIME_TYPES_FILE, 'r') do |f|
39
+ $mimeTypes = {}
40
+ f.each_line do |l|
41
+ if l =~ /^(\w\S+)\s+(\S.*)$/
42
+ type = $1
43
+ exts = $2.split
44
+ exts.each do |e|
45
+ $mimeTypes[e.to_s] = type.to_s
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+
52
+ def S3sync.main
53
+ # ---------- OPTIONS PROCESSING ---------- #
54
+
55
+ $S3syncOptions = Hash.new
56
+ optionsParser = GetoptLong.new(
57
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
58
+ [ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
59
+ [ '--recursive','-r', GetoptLong::NO_ARGUMENT ],
60
+ [ '--public-read','-p', GetoptLong::NO_ARGUMENT ],
61
+ [ '--delete', GetoptLong::NO_ARGUMENT ],
62
+ [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
63
+ [ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
64
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
65
+ [ '--memory', '-m', GetoptLong::NO_ARGUMENT ],
66
+ [ '--progress', GetoptLong::NO_ARGUMENT ],
67
+ [ '--expires', GetoptLong::REQUIRED_ARGUMENT ],
68
+ [ '--cache-control', GetoptLong::REQUIRED_ARGUMENT ],
69
+ [ '--exclude', GetoptLong::REQUIRED_ARGUMENT ],
70
+ [ '--key', '-k', GetoptLong::REQUIRED_ARGUMENT],
71
+ [ '--secret', GetoptLong::REQUIRED_ARGUMENT],
72
+ [ '--make-dirs', GetoptLong::NO_ARGUMENT ],
73
+ [ '--no-md5', GetoptLong::NO_ARGUMENT ]
74
+ )
75
+
76
+ def S3sync.usage(message = nil)
77
+ $stderr.puts message if message
78
+ name = $0.split('/').last
79
+ $stderr.puts <<-ENDUSAGE
80
+ #{name} [options] <source> <destination>\t\tversion #{$S3SYNC_VERSION}
81
+ --help -h --verbose -v --dryrun -n
82
+ --ssl -s --recursive -r --delete
83
+ --public-read -p --expires="<exp>" --cache-control="<cc>"
84
+ --exclude="<regexp>" --progress --debug -d
85
+ --key -k --secret -s --make-dirs
86
+ --no-md5
87
+ One of <source> or <destination> must be of S3 format, the other a local path.
88
+ Reminders:
89
+ * An S3 formatted item with bucket 'mybucket' and prefix 'mypre' looks like:
90
+ mybucket:mypre/some/key/name
91
+ * Local paths should always use forward slashes '/' even on Windows
92
+ * Whether you use a trailing slash on the source path makes a difference.
93
+ * For examples see README.
94
+ ENDUSAGE
95
+ exit
96
+ end #usage
97
+
98
+ begin
99
+ optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
100
+ rescue StandardError
101
+ usage # the parser already printed an error message
102
+ end
103
+ usage if $S3syncOptions['--help']
104
+ $S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or $S3syncOptions['--debug'] or $S3syncOptions['--progress']
105
+ $S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl'] # change from "" to true to appease s3 port chooser
106
+
107
+ if $S3syncOptions['--key']
108
+ $AWS_ACCESS_KEY_ID = $S3syncOptions['--key']
109
+ end
110
+
111
+ if $S3syncOptions['--secret']
112
+ $AWS_SECRET_ACCESS_KEY = $S3syncOptions['--secret']
113
+ end
114
+
115
+ # ---------- CONNECT ---------- #
116
+ S3sync::s3trySetup
117
+
118
+ # ---------- PREFIX PROCESSING ---------- #
119
+ def S3sync.s3Prefix?(pre)
120
+ # allow for dos-like things e.g. C:\ to be treated as local even with colon
121
+ pre.include?(':') and not pre.match('^[A-Za-z]:[\\\\/]')
122
+ end
123
+ sourcePrefix, destinationPrefix = ARGV
124
+ usage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
125
+ usage('Need a source and a destination') if sourcePrefix == nil or destinationPrefix == nil
126
+ usage('Both arguments can\'t be on S3') if s3Prefix?(sourcePrefix) and s3Prefix?(destinationPrefix)
127
+ usage('One argument must be on S3') if !s3Prefix?(sourcePrefix) and !s3Prefix?(destinationPrefix)
128
+
129
+ # so we can modify them
130
+ sourcePrefix, destinationPrefix = sourcePrefix.dup, destinationPrefix.dup
131
+
132
+ # handle trailing slash for source properly
133
+ if(sourcePrefix !~ %r{/$})
134
+ # no slash on end of source means we need to append the last src dir to dst prefix
135
+ # testing for empty isn't good enough here.. needs to be "empty apart from potentially having 'bucket:'"
136
+ slash = (destinationPrefix.empty? or destinationPrefix.match(%r{:$}))? "" : "/"
137
+ # not good enough.. sometimes this coughs up the bucket as a prefix destinationPrefix.replace(destinationPrefix + slash + sourcePrefix.split(/(?:\/|:)/).last)
138
+ # take everything at the end after a slash or colon
139
+ destinationPrefix.replace(destinationPrefix + slash + %r{([^/:]*)$}.match(sourcePrefix)[1])
140
+ end
141
+ # no trailing slash on dest, ever.
142
+ destinationPrefix.sub!(%r{/$}, "")
143
+
144
+ # don't repeat slashes
145
+ sourcePrefix.squeeze!('/')
146
+ destinationPrefix.squeeze!('/')
147
+
148
+ # here's where we find out what direction we're going
149
+ sourceIsS3 = s3Prefix?(sourcePrefix)
150
+ # alias these variables to the other strings (in ruby = does not make copies of strings)
151
+ s3Prefix = sourceIsS3 ? sourcePrefix : destinationPrefix
152
+ localPrefix = sourceIsS3 ? destinationPrefix : sourcePrefix
153
+
154
+ # canonicalize the S3 stuff
155
+ s3Bucket = (/^(.*?):/.match(s3Prefix))[1]
156
+ s3Prefix.replace((/:(.*)$/.match(s3Prefix))[1])
157
+ debug("s3Prefix #{s3Prefix}")
158
+ $S3SyncOriginalS3Prefix = s3Prefix.dup
159
+
160
+ # canonicalize the local stuff
161
+ # but that can kill a trailing slash, which we need to preserve long enough to know whether we mean "the dir" or "its contents"
162
+ # it will get re-stripped by the local generator after expressing this knowledge
163
+ localTrailingSlash = localPrefix.match(%r{/$})
164
+ localPrefix.replace(File.expand_path(localPrefix))
165
+ localPrefix += '/' if localTrailingSlash
166
+ debug("localPrefix #{localPrefix}")
167
+ # used for exclusion parsing
168
+ $S3SyncOriginalLocalPrefix = localPrefix.dup
169
+
170
+ # exclude preparation
171
+ # we don't want to build then throw away this regexp for each node in the universe; do it once globally
172
+ $S3SyncExclude = Regexp.new($S3syncOptions['--exclude']) if $S3syncOptions['--exclude']
173
+
174
+ # ---------- GENERATORS ---------- #
175
+
176
+ # a generator that will return the files/dirs of the local tree one by one
177
+ # sorted and decorated for easy comparison with the S3 tree
178
+ localTree = Enumerator.new do |g|
179
+ def S3sync.localTreeRecurse(g, prefix, path)
180
+ debug("localTreeRecurse #{prefix} #{path}")
181
+ #if $S3syncOptions['--memory']
182
+ # $stderr.puts "Starting local recurse"
183
+ # stats = ostats stats
184
+ #end
185
+ d = nil
186
+ begin
187
+ slash = prefix.empty? ? "" : "/"
188
+ d = Dir.new(prefix + slash + path)
189
+ rescue Errno::ENOENT
190
+ # ok the dir doesn't exist at all (this only really occurs for the root i.e. first dir)
191
+ return nil
192
+ rescue Errno::EACCES
193
+ # vista won't even let us touch some stuff in our own profile
194
+ return nil
195
+ end
196
+ # do some pre-processing
197
+ # the following sleight of hand is to make the recursion match the way s3 sorts
198
+ # take for example the directory 'foo' and the file 'foo.bar'
199
+ # when we encounter the dir we would want to recurse into it
200
+ # but S3 would just say 'period < slash' and sort 'foo.bar' between the dir node
201
+ # and the contents in that 'dir'
202
+ #
203
+ # so the solution is to not recurse into the directory until the point where
204
+ # it would come up "next" in the S3 list
205
+ # We have to do these hoops on the local side, because we have very little control
206
+ # over how S3 will return its results
207
+ toAdd = Array.new
208
+ d.each do |name|
209
+ slash = path.empty? ? "" : "/"
210
+ partialPath = path + slash + name
211
+ slash = prefix.empty? ? "" : "/"
212
+ fullPath = prefix + slash + partialPath
213
+ if name == "." or name == ".."
214
+ # skip
215
+ else
216
+ # add a dir node if appropriate
217
+ debug("Test #{fullPath}")
218
+ if ((not FileTest.symlink?(fullPath)) and FileTest.directory?(fullPath)) and $S3syncOptions['--recursive']
219
+ debug("Adding it as a dir node")
220
+ toAdd.push(name + '/') # always trail slash here for sorting purposes (removed below with rindex test)
221
+ end
222
+ end
223
+ end
224
+ dItems = d.collect.to_a + toAdd
225
+ d.close
226
+ d = toAdd = nil
227
+ dItems.sort! #aws says we will get alpha sorted results but ruby doesn't
228
+ dItems.each do |name|
229
+ isDirNode = false
230
+ if name.rindex('/') == name.length-1
231
+ name = name.slice(0...name.length-1)
232
+ isDirNode = true
233
+ debug("#{name} is a dir node")
234
+ end
235
+ slash = path.empty? ? "" : "/"
236
+ partialPath = path + slash + name
237
+ slash = prefix.empty? ? "" : "/"
238
+ fullPath = prefix + slash + partialPath
239
+ excludePath = fullPath.slice($S3SyncOriginalLocalPrefix.length...fullPath.length)
240
+ if name == "." or name == ".."
241
+ # skip
242
+ elsif $S3SyncExclude and $S3SyncExclude.match(excludePath)
243
+ debug("skipping local item #{excludePath} because of --exclude")
244
+ elsif isDirNode
245
+ localTreeRecurse(g, prefix, partialPath)
246
+ else
247
+ # a normal looking node we should try to process
248
+ debug("local item #{fullPath}")
249
+ g.yield(LocalNode.new(prefix, partialPath))
250
+ end
251
+ end
252
+ #if $S3syncOptions['--memory']
253
+ # $stderr.puts "Ending local recurse"
254
+ # stats = ostats stats
255
+ #end
256
+ end
257
+ # a bit of a special case for local, since "foo/" and "foo" are essentially treated the same by file systems
258
+ # so we need to think harder about what the user really meant in the command line.
259
+ localPrefixTrim = localPrefix
260
+ if localPrefix !~ %r{/$}
261
+ # no trailing slash, so yield the root itself first, then recurse if appropriate
262
+ # gork this is still not quite good enough.. if local is the dest then we don't know whether s3 will have a root dir node yielded a priori, so we can't know whether to do this. only matters for --erase though
263
+ g.yield(LocalNode.new(localPrefixTrim, "")) # technically we should check this for exclusion, but excluding the root node is kind of senseless.. and that would be a pain to set up here
264
+ localTreeRecurse(g, localPrefixTrim, "") if $S3syncOptions['--recursive']
265
+ else
266
+ # trailing slash, so ignore the root itself, and just go into the first level
267
+ localPrefixTrim.sub!(%r{/$}, "") # strip the slash because of how we do local node slash accounting in the recurse above
268
+ localTreeRecurse(g, localPrefixTrim, "")
269
+ end
270
+ end
271
+
272
+ # a generator that will return the nodes in the S3 tree one by one
273
+ # sorted and decorated for easy comparison with the local tree
274
+ s3Tree = Enumerator.new do |g|
275
+ def S3sync.s3TreeRecurse(g, bucket, prefix, path)
276
+ if $S3syncOptions['--memory']
277
+ $stderr.puts "Starting S3 recurse"
278
+ GC.start
279
+ stats = ostats stats
280
+ end
281
+ $stderr.puts "s3TreeRecurse #{bucket} #{prefix} #{path}" if $S3syncOptions['--debug']
282
+ nextPage = true
283
+ marker = ''
284
+ while nextPage do
285
+ fullPrefix = prefix + path
286
+ debug("nextPage: #{marker}") if marker != ''
287
+ options = {}
288
+ options['prefix'] = fullPrefix # start at the right depth
289
+ options['delimiter'] = '/' # only one dir at a time please
290
+ options['max-keys'] = '200' # use manageable chunks
291
+ options['marker'] = marker unless marker == ''
292
+ d = S3sync.S3try(:list_bucket, bucket, options)
293
+ $stderr.puts "S3 ERROR: #{d.http_response}" unless d.http_response.is_a? Net::HTTPSuccess
294
+ # the 'directories' and leaf nodes are in two separate collections
295
+ # because a dir will never have the same name as a node, we can just shove them together and sort
296
+ # it's important to evaluate them alphabetically for efficient comparison to the local tree
297
+ tItems = d.entries + d.common_prefix_entries
298
+ tItems.sort! do |a,b|
299
+ aName = a.respond_to?('key') ? a.key : a.prefix
300
+ bName = b.respond_to?('key') ? b.key : b.prefix
301
+ # the full path will be returned, efficient to ignore the part we know will be in common
302
+ aName.slice(fullPrefix.length..aName.length) <=> bName.slice(fullPrefix.length..bName.length)
303
+ end
304
+ # get rid of the big s3 objects asap, just save light-weight nodes and strings
305
+ items = tItems.collect do |item|
306
+ if item.respond_to?('key')
307
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
308
+ Node.new(key, item.size, item.etag, item.last_modified)
309
+ else
310
+ Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
311
+ end
312
+ end
313
+ nextPage = d.properties.is_truncated
314
+ marker = (d.properties.next_marker)? d.properties.next_marker : ((d.entries.length > 0)? d.entries.last.key : '')
315
+ # get this into native char set (because when we feed it back to s3 that's what it will expect)
316
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join
317
+ tItems = nil
318
+ d = nil # get rid of this before recursing; it's big
319
+ item = nil
320
+ GC.start # not sure but I think yielding before doing this is causing evil closure bloat
321
+ items.each do |item|
322
+ if not (item.kind_of? String)
323
+ # this is an item
324
+ excludePath = item.name.slice($S3SyncOriginalS3Prefix.length...item.name.length)
325
+ if $S3SyncExclude and $S3SyncExclude.match(excludePath)
326
+ debug("skipping S3 item #{excludePath} due to --exclude")
327
+ else
328
+ debug("S3 item #{item.name}")
329
+ g.yield(S3Node.new(bucket, prefix, item))
330
+ end
331
+ else
332
+ # it's a prefix (i.e. there are sub keys)
333
+ partialPath = item.slice(prefix.length..item.length) # will have trailing slash
334
+ excludePath = item.slice($S3SyncOriginalS3Prefix.length...item.length)
335
+ # recurse
336
+ if $S3SyncExclude and $S3SyncExclude.match(excludePath)
337
+ debug("skipping prefix #{excludePath} due to --exclude")
338
+ else
339
+ debug("prefix found: #{partialPath}")
340
+ s3TreeRecurse(g, bucket, prefix, partialPath) if $S3syncOptions['--recursive']
341
+ end
342
+ end
343
+ end
344
+ items = nil
345
+ end # of while nextPage
346
+ if $S3syncOptions['--memory']
347
+ $stderr.puts "Ending S3 recurse"
348
+ GC.start
349
+ stats = ostats stats
350
+ end
351
+ end
352
+ # this will yield the root node first and then recurse
353
+ s3TreeRecurse(g, s3Bucket, s3Prefix, "")
354
+ end
355
+
356
+ # alias the tree objects so we don't care below which direction the transfer is going
357
+ if sourceIsS3
358
+ sourceTree, destinationTree = s3Tree, localTree
359
+ else
360
+ sourceTree, destinationTree = localTree, s3Tree
361
+ end
362
+
363
+ # ---------- COMPARATOR ---------- #
364
+
365
+ # run the comparison engine and act according to what we find for each check
366
+ nodesToDelete = Array.new # a stack. have to delete in reverse order of normal create/update processing
367
+
368
+ sourceNode = sourceTree.next rescue nil
369
+ destinationNode = destinationTree.next rescue nil
370
+ while sourceNode or destinationNode do
371
+ debug("source: #{sourceNode.name}") if sourceNode
372
+ debug("dest: #{destinationNode.name}") if destinationNode
373
+ if (!destinationNode) or (sourceNode and (sourceNode.name < destinationNode.name))
374
+ dNode =
375
+ if sourceNode.kind_of? LocalNode
376
+ S3Node.new(s3Bucket, s3Prefix, sourceNode.name)
377
+ else
378
+ LocalNode.new(localPrefix, sourceNode.name)
379
+ end
380
+ puts "Create node #{sourceNode.name}" if $S3syncOptions['--verbose']
381
+ dNode.updateFrom(sourceNode) unless $S3syncOptions['--dryrun']
382
+ sourceNode = sourceNode.nil? ? nil : sourceTree.next rescue nil
383
+ elsif (!sourceNode) or (destinationNode and (sourceNode.name > destinationNode.name))
384
+ $stderr.puts "Source does not have #{destinationNode.name}" if $S3syncOptions['--debug']
385
+ if $S3syncOptions['--delete']
386
+ if destinationNode.directory?
387
+ # have to wait
388
+ nodesToDelete.push(destinationNode)
389
+ else
390
+ puts "Remove node #{destinationNode.name}" if $S3syncOptions['--verbose']
391
+ destinationNode.delete unless $S3syncOptions['--dryrun']
392
+ end
393
+ end
394
+ destinationNode = destinationNode.nil? ? nil : destinationTree.next rescue nil
395
+ elsif sourceNode.name == destinationNode.name
396
+ if (sourceNode.size != destinationNode.size) or (($S3syncOptions['--no-md5'])? (sourceNode.date > destinationNode.date) : (sourceNode.tag != destinationNode.tag))
397
+ puts "Update node #{sourceNode.name}" if $S3syncOptions['--verbose']
398
+ destinationNode.updateFrom(sourceNode) unless $S3syncOptions['--dryrun']
399
+ elsif $S3syncOptions['--debug']
400
+ $stderr.puts "Node #{sourceNode.name} unchanged"
401
+ end
402
+ sourceNode = sourceNode.nil? ? nil : sourceTree.next rescue nil
403
+ destinationNode = destinationNode.nil? ? nil : destinationTree.next rescue nil
404
+ end
405
+ end
406
+
407
+ # get rid of the (now empty, except for other directories) directories
408
+ nodesToDelete.reverse_each do |node|
409
+ puts "Remove node #{node.name}" if $S3syncOptions['--verbose']
410
+ node.delete unless $S3syncOptions['--dryrun']
411
+ end
412
+ end #main
413
+
414
+
415
+ # ---------- NODE ---------- #
416
+ class Node
417
+ attr_reader :name
418
+ attr_reader :size
419
+ attr_reader :tag
420
+ attr_reader :date
421
+ def initialize(name='', size = 0, tag = '', date = Time.now.utc)
422
+ @name = name
423
+ @size = size
424
+ @tag = tag
425
+ @date = date
426
+ end
427
+ def directory?()
428
+ @tag == $S3syncDirTag and @size == $S3syncDirString.length
429
+ end
430
+ end
431
+
432
+ # ---------- S3Node ---------- #
433
+ class S3Node < Node
434
+ @path = nil
435
+ @bucket = nil
436
+ @result = nil
437
+ def initialize(bucket, prefix, itemOrName)
438
+ @bucket = bucket
439
+ if itemOrName.kind_of? String
440
+ @name = itemOrName
441
+ @name.sub!(%r{/$}, "") # don't create directories with a slash on the end
442
+ #6/2007. the prefix can be filled but the name empty, in the case of s3sync -r somedir somebucket:
443
+ if (not prefix.empty? and @name.empty?)
444
+ @name = prefix
445
+ itemOrName = prefix
446
+ prefix = ""
447
+ end
448
+ slash = prefix.empty? ? "" : "/"
449
+ @path = prefix + slash + itemOrName
450
+ else
451
+ @name = (itemOrName.name.slice((prefix.length)..itemOrName.name.length) or '')
452
+ # depending whether the prefix is / tailed, the name might need trimming
453
+ @name.sub!(%r{^/},"") # get rid of leading slash in name if there (from above simplistic split)
454
+ @name.sub!(%r{/$}, "") # don't create directories with a slash on the end
455
+ @path = itemOrName.name
456
+ @path.sub!(%r{/$}, "") # don't create directories with a slash on the end
457
+ @size = itemOrName.size
458
+ @tag = itemOrName.tag.gsub(/"/,'')
459
+ @date = Time.xmlschema(itemOrName.date)
460
+ end
461
+ debug("s3 node object init. Name:#{@name} Path:#{@path} Size:#{@size} Tag:#{@tag} Date:#{@date}")
462
+ end
463
+ # get this item from s3 into the provided stream
464
+ # S3 pushes to the local item, due to how http streaming is implemented
465
+ def to_stream(s)
466
+ @result = S3sync.S3try(:get_stream, @bucket, @path, {}, s)
467
+ end
468
+ def symlink?()
469
+ unless @result
470
+ @result = S3sync.S3try(:head, @bucket, @path)
471
+ end
472
+ debug("symlink value is: #{@result.object.metadata['symlink']}")
473
+ @result.object.metadata['symlink'] == 'true'
474
+ end
475
+ def owner
476
+ unless @result
477
+ @result = S3sync.S3try(:head, @bucket, @path)
478
+ end
479
+ debug("Owner of this s3 node is #{@result.object.metadata['owner']}")
480
+ @result.object.metadata['owner'].to_i # if not there, will be nil => 0 which == root so good default
481
+ end
482
+ def group
483
+ unless @result
484
+ @result = S3sync.S3try(:head, @bucket, @path)
485
+ end
486
+ @result.object.metadata['group'].to_i # 0 default ok
487
+ end
488
+ def permissions
489
+ g = @result.object.metadata['permissions']
490
+ g ? g.to_i : 600 # default to owner only
491
+ end
492
+ def updateFrom(fromNode)
493
+ if fromNode.respond_to?(:stream)
494
+ meta = Hash.new
495
+ meta['owner'] = fromNode.owner.to_s
496
+ meta['group'] = fromNode.group.to_s
497
+ meta['permissions'] = fromNode.permissions.to_s
498
+ meta['symlink'] = 'true' if fromNode.symlink?
499
+ begin
500
+ theStream = fromNode.stream
501
+ theStream = ProgressStream.new(theStream, fromNode.size) if $S3syncOptions['--progress']
502
+
503
+ s3o = S3::S3Object.new(theStream, meta)
504
+ debug(@path)
505
+ headers = {'Content-Length' => (fromNode.size.respond_to?(:nonzero?) ? fromNode.size.to_s : '0')}
506
+ headers['x-amz-acl'] = 'public-read' if $S3syncOptions['--public-read']
507
+ headers['Expires'] = $S3syncOptions['--expires'] if $S3syncOptions['--expires']
508
+ headers['Cache-Control'] = $S3syncOptions['--cache-control'] if $S3syncOptions['--cache-control']
509
+ fType = @path.split('.').last
510
+ debug("File extension: #{fType}")
511
+ if defined?($mimeTypes) and fType != '' and (mType = $mimeTypes[fType]) and mType != ''
512
+ debug("Mime type: #{mType}")
513
+ headers['Content-Type'] = mType
514
+ end
515
+ @result = S3sync.S3try(:put, @bucket, @path, s3o, headers)
516
+ theStream.close if (theStream and not theStream.closed?)
517
+ rescue NoMethodError
518
+ # when --progress is used and we can't get the stream object, it doesn't report as null
519
+ # so the above .closed? test will break
520
+ $stderr.puts "Skipping #{@path}: " + $!
521
+ rescue SystemCallError
522
+ theStream.close if (theStream and not theStream.closed?)
523
+ $stderr.puts "Skipping #{@path}: " + $!
524
+ end
525
+ else
526
+ raise "Node provided as update source doesn't support :stream"
527
+ end
528
+ end
529
+ def delete
530
+ @result = S3sync.S3try(:delete, @bucket, @path)
531
+ end
532
+ end
533
+
534
+ # ---------- LocalNode ---------- #
535
+
536
+ class LocalNode < Node
537
+ @path = nil
538
+ def initialize(prefix, partialPath)
539
+ slash = prefix.empty? ? "" : "/"
540
+ @path = prefix + slash + partialPath
541
+ # slash isn't at the front of this any more @name = (partialPath.slice(1..partialPath.length) or '')
542
+ @name = partialPath or ''
543
+ if FileTest.symlink?(@path)
544
+ # this could use the 'file' case below, but why create an extra temp file
545
+ linkData = File.readlink(@path)
546
+ $stderr.puts "link to: #{linkData}" if $S3syncOptions['--debug']
547
+ @size = linkData.length
548
+ unless $S3syncOptions['--no-md5']
549
+ md5 = Digest::MD5.new()
550
+ md5 << linkData
551
+ @tag = md5.hexdigest
552
+ end
553
+ @date = File.lstat(@path).mtime.utc
554
+ elsif FileTest.file?(@path)
555
+ @size = FileTest.size(@path)
556
+ data = nil
557
+ begin
558
+ unless $S3syncOptions['--no-md5']
559
+ data = self.stream
560
+ md5 = Digest::MD5.new()
561
+ while !data.eof?
562
+ md5 << data.read(2048) # stream so it's not taking all memory
563
+ end
564
+ data.close
565
+ @tag = md5.hexdigest
566
+ end
567
+ @date = File.stat(@path).mtime.utc
568
+ rescue SystemCallError
569
+ # well we're not going to have an md5 that's for sure
570
+ @tag = nil
571
+ end
572
+ elsif FileTest.directory?(@path)
573
+ # all s3 directories are dummy nodes contain the same directory string
574
+ # so for easy comparison, set our size and tag thusly
575
+ @size = $S3syncDirString.length
576
+ @tag = $S3syncDirTag
577
+ @date = File.stat(@path).mtime.utc
578
+ end
579
+ debug("local node object init. Name:#{@name} Path:#{@path} Size:#{@size} Tag:#{@tag} Date:#{@date}")
580
+ end
581
+ # return a stream that will read the contents of the local item
582
+ # local gets pulled by the S3Node update fn, due to how http streaming is implemented
583
+ def stream
584
+ begin
585
+ # 1.0.8 switch order of these tests because a symlinked file will say yes to 'file?'
586
+ if FileTest.symlink?(@path) or FileTest.directory?(@path)
587
+ tf = Tempfile.new('s3sync')
588
+ if FileTest.symlink?(@path)
589
+ tf.printf('%s', File.readlink(@path))
590
+ elsif FileTest.directory?(@path)
591
+ tf.printf('%s', $S3syncDirString)
592
+ end
593
+ tf.close
594
+ tf.open
595
+ tf
596
+ elsif FileTest.file?(@path)
597
+ File.open(@path, 'rb')
598
+ end
599
+ rescue SystemCallError
600
+ $stderr.puts "Could not read #{@path}: #{$!}"
601
+ raise
602
+ end
603
+ end
604
+ def stat
605
+ FileTest.symlink?(@path) ? File.lstat(@path) : File.stat(@path)
606
+ end
607
+ def exist?
608
+ FileTest.exist?(@path) or FileTest.symlink?(@path)
609
+ end
610
+ def owner
611
+ self.exist? ? self.stat().uid : 0
612
+ end
613
+ def group
614
+ self.exist? ? self.stat().gid : 0
615
+ end
616
+ def permissions
617
+ self.exist? ? self.stat().mode : 600
618
+ end
619
+ def updateFrom(fromNode)
620
+ if fromNode.respond_to?(:to_stream)
621
+ fName = @path + '.s3syncTemp'
622
+ # handle the case where the user wants us to create dirs that don't exist in S3
623
+ if $S3syncOptions['--make-dirs']
624
+ # ensure target's path exists
625
+ dirs = @path.split('/')
626
+ # but the last one is a file name
627
+ dirs.pop()
628
+ current = ''
629
+ dirs.each do |dir|
630
+ current << dir << '/'
631
+ begin
632
+ Dir.mkdir(current) unless FileTest.exist?(current)
633
+ rescue SystemCallError
634
+ $stderr.puts "Could not mkdir #{current}: #{$!}"
635
+ end
636
+ end
637
+ end
638
+ unless fromNode.directory?
639
+ f = File.open(fName, 'wb')
640
+ f = ProgressStream.new(f, fromNode.size) if $S3syncOptions['--progress']
641
+
642
+ fromNode.to_stream(f)
643
+ f.close
644
+ end
645
+ # get original item out of the way
646
+ File.unlink(@path) if File.exist?(@path)
647
+ if fromNode.symlink?
648
+ linkTo = ''
649
+ File.open(fName, 'rb'){|f| linkTo = f.read}
650
+ debug("#{@path} will be a symlink to #{linkTo}")
651
+ begin
652
+ File.symlink(linkTo, @path)
653
+ rescue NotImplementedError
654
+ # windows doesn't do symlinks, for example
655
+ # just bail
656
+ File.unlink(fName) if File.exist?(fName)
657
+ return
658
+ rescue SystemCallError
659
+ $stderr.puts "Could not write symlink #{@path}: #{$!}"
660
+ end
661
+ elsif fromNode.directory?
662
+ # only get here when the dir doesn't exist. else they'd compare ==
663
+ debug(@path)
664
+ begin
665
+ Dir.mkdir(@path) unless FileTest.exist?(@path)
666
+ rescue SystemCallError
667
+ $stderr.puts "Could not mkdir #{@path}: #{$!}"
668
+ end
669
+
670
+ else
671
+ begin
672
+ File.rename(fName, @path)
673
+ rescue SystemCallError
674
+ $stderr.puts "Could not write (rename) #{@path}: #{$!}"
675
+ end
676
+ end
677
+ # clean up if the temp file is still there (as for links)
678
+ File.unlink(fName) if File.exist?(fName)
679
+
680
+ # update permissions
681
+ linkCommand = fromNode.symlink? ? 'l' : ''
682
+ begin
683
+ File.send(linkCommand + 'chown', fromNode.owner, fromNode.group, @path)
684
+ File.send(linkCommand + 'chmod', fromNode.permissions, @path)
685
+ rescue NotImplementedError
686
+ # no one has lchmod, but who really cares
687
+ rescue SystemCallError
688
+ $stderr.puts "Could not change owner/permissions on #{@path}: #{$!}"
689
+ end
690
+ else
691
+ raise "Node provided as update source doesn't support :to_stream"
692
+ end
693
+ end
694
+ def symlink?()
695
+ FileTest.symlink?(@path)
696
+ end
697
+ def delete
698
+ # don't try to delete the restore root dir
699
+ # this is a quick fix to deal with the fact that the tree recurse has to visit the root node
700
+ return unless @name != ''
701
+ return unless FileTest.exist?(@path)
702
+ begin
703
+ if FileTest.directory?(@path)
704
+ Dir.rmdir(@path)
705
+ else
706
+ File.unlink(@path)
707
+ end
708
+ rescue SystemCallError
709
+ $stderr.puts "Could not delete #{@path}: #{$!}"
710
+ end
711
+ end
712
+ end
713
+
714
+ end #module
715
+
716
+ def debug(str)
717
+ $stderr.puts str if $S3syncOptions['--debug']
718
+ end
719
+
720
+ def ostats(last_stat = nil)
721
+ stats = Hash.new(0)
722
+ ObjectSpace.each_object {|o| stats[o.class] += 1}
723
+
724
+ stats.sort {|(k1,v1),(k2,v2)| v2 <=> v1}.each do |k,v|
725
+ $stderr.printf "%-30s %10d", k, v
726
+ $stderr.printf " delta %10d", (v - last_stat[k]) if last_stat
727
+ $stderr.puts
728
+ end
729
+
730
+ stats
731
+ end
732
+
733
+ # go!
734
+ S3sync::main