s3sync-cf 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
data/bin/s3cmd ADDED
@@ -0,0 +1,338 @@
1
+ #!/usr/bin/env ruby
2
+ # This software code is made available "AS IS" without warranties of any
3
+ # kind. You may copy, display, modify and redistribute the software
4
+ # code either by itself or as incorporated into your code; provided that
5
+ # you do not remove any proprietary notices. Your use of this software
6
+ # code is at your own risk and you waive any claim against the author
7
+ # with respect to your use of this software code.
8
+ # (c) 2007 s3sync.net
9
+ #
10
+
11
+ module S3sync
12
+
13
+ # always look "here" for include files (thanks aktxyz)
14
+ # $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/s3sync/")
15
+ $LOAD_PATH << File.join(File.expand_path(File.dirname(__FILE__)), "../lib/")
16
+
17
+ require 's3sync'
18
+ require 'getoptlong'
19
+
20
+
21
+ def S3sync.s3cmdMain
22
+ # ---------- OPTIONS PROCESSING ---------- #
23
+
24
+ $S3syncOptions = Hash.new
25
+ optionsParser = GetoptLong.new(
26
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
27
+ [ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
28
+ [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
29
+ [ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
30
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
31
+ [ '--progress', GetoptLong::NO_ARGUMENT ],
32
+ [ '--expires-in', GetoptLong::REQUIRED_ARGUMENT ]
33
+ )
34
+
35
+ def S3sync.s3cmdUsage(message = nil)
36
+ $stderr.puts message if message
37
+ name = $0.split('/').last
38
+ $stderr.puts <<"ENDUSAGE"
39
+ #{name} [options] <command> [arg(s)]\t\tversion #{S3sync::VERSION}
40
+ --help -h --verbose -v --dryrun -n
41
+ --ssl -s --debug -d --progress
42
+ --expires-in=( <# of seconds> | [#d|#h|#m|#s] )
43
+
44
+ Commands:
45
+ #{name} listbuckets [headers]
46
+ #{name} createbucket <bucket> [constraint (i.e. EU)]
47
+ #{name} deletebucket <bucket> [headers]
48
+ #{name} list <bucket>[:prefix] [max/page] [delimiter] [headers]
49
+ #{name} location <bucket> [headers]
50
+ #{name} delete <bucket>:key [headers]
51
+ #{name} deleteall <bucket>[:prefix] [headers]
52
+ #{name} get|put <bucket>:key <file> [headers]
53
+ #{name} copy <bucket>:key <bucket>:key [headers]
54
+ #{name} copyall <bucket>:key <bucket>:key [headers]
55
+ #{name} headers <bucket>:key [headers]
56
+ ENDUSAGE
57
+ exit
58
+ end #usage
59
+
60
+ begin
61
+ optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
62
+ rescue StandardError
63
+ s3cmdUsage # the parser already printed an error message
64
+ end
65
+ s3cmdUsage if $S3syncOptions['--help']
66
+ $S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or
67
+ $S3syncOptions['--debug'] or
68
+ $S3syncOptions['--progress']
69
+
70
+ # change from "" to true to appease s3 port chooser
71
+ $S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl']
72
+
73
+ if $S3syncOptions['--expires-in'] =~ /d|h|m|s/
74
+ e = $S3syncOptions['--expires-in']
75
+ days = (e =~ /(\d+)d/)? (/(\d+)d/.match(e))[1].to_i : 0
76
+ hours = (e =~ /(\d+)h/)? (/(\d+)h/.match(e))[1].to_i : 0
77
+ minutes = (e =~ /(\d+)m/)? (/(\d+)m/.match(e))[1].to_i : 0
78
+ seconds = (e =~ /(\d+)s/)? (/(\d+)s/.match(e))[1].to_i : 0
79
+ $S3syncOptions['--expires-in'] = seconds + 60 * ( minutes + 60 * ( hours + 24 * ( days ) ) )
80
+ end
81
+
82
+ # ---------- CONNECT ---------- #
83
+ S3sync::s3trySetup
84
+
85
+
86
+ # ---------- COMMAND PROCESSING ---------- #
87
+ command, path, file = ARGV
88
+
89
+ s3cmdUsage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
90
+ s3cmdUsage("Need a command (etc)") if not command
91
+
92
+ path = '' unless path
93
+ path = path.dup # modifiable
94
+ path += ':' unless path.match(':')
95
+ bucket = (/^(.*?):/.match(path))[1]
96
+ path.replace((/:(.*)$/.match(path))[1])
97
+
98
+ case command
99
+
100
+ when "delete"
101
+ s3cmdUsage("Need a bucket") if bucket == ''
102
+ s3cmdUsage("Need a key") if path == ''
103
+ headers = hashPairs(ARGV[2...ARGV.length])
104
+ $stderr.puts "delete #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
105
+ S3try(:delete, bucket, path) unless $S3syncOptions['--dryrun']
106
+
107
+ when "deleteall"
108
+ s3cmdUsage("Need a bucket") if bucket == ''
109
+ headers = hashPairs(ARGV[2...ARGV.length])
110
+ $stderr.puts "delete ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
111
+ more = true
112
+ marker = nil
113
+ while more do
114
+ res = s3cmdList(bucket, path, nil, nil, marker)
115
+ res.entries.each do |item|
116
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
117
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
118
+ $stderr.puts "delete #{bucket}:#{key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
119
+ S3try(:delete, bucket, key) unless $S3syncOptions['--dryrun']
120
+ end
121
+
122
+ more = res.properties.is_truncated
123
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
124
+ # get this into local charset; when we pass it to s3 that is what's expected
125
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
126
+ end
127
+
128
+ when "list"
129
+ s3cmdUsage("Need a bucket") if bucket == ''
130
+ max, delim = ARGV[2..3]
131
+ headers = hashPairs(ARGV[4...ARGV.length])
132
+ $stderr.puts "list #{bucket}:#{path} #{max} #{delim} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
133
+ puts "--------------------"
134
+
135
+ more = true
136
+ marker = nil
137
+ while more do
138
+ res = s3cmdList(bucket, path, max, delim, marker, headers)
139
+ if delim
140
+ res.common_prefix_entries.each do |item|
141
+ puts "dir: " + Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
142
+ end
143
+ puts "--------------------"
144
+ end
145
+ res.entries.each do |item|
146
+ puts Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
147
+ end
148
+ if res.properties.is_truncated
149
+ printf "More? Y/n: "
150
+ more = (STDIN.gets.match('^[Yy]?$'))
151
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
152
+ # get this into local charset; when we pass it to s3 that is what's expected
153
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
154
+ else
155
+ more = false
156
+ end
157
+ end # more
158
+
159
+ when "listbuckets"
160
+ headers = hashPairs(ARGV[1...ARGV.length])
161
+ $stderr.puts "list all buckets #{headers.inspect if headers}" if $S3syncOptions['--verbose']
162
+ if $S3syncOptions['--expires-in']
163
+ $stdout.puts S3url(:list_all_my_buckets, headers)
164
+ else
165
+ res = S3try(:list_all_my_buckets, headers)
166
+ res.entries.each do |item|
167
+ puts item.name
168
+ end
169
+ end
170
+
171
+ when "createbucket"
172
+ s3cmdUsage("Need a bucket") if bucket == ''
173
+ lc = ''
174
+ if(ARGV.length > 2)
175
+ lc = '<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01"><LocationConstraint>' + ARGV[2] + '</LocationConstraint></CreateBucketConfiguration>'
176
+ end
177
+ $stderr.puts "create bucket #{bucket} #{lc}" if $S3syncOptions['--verbose']
178
+ S3try(:create_bucket, bucket, lc) unless $S3syncOptions['--dryrun']
179
+
180
+ when "deletebucket"
181
+ s3cmdUsage("Need a bucket") if bucket == ''
182
+ headers = hashPairs(ARGV[2...ARGV.length])
183
+ $stderr.puts "delete bucket #{bucket} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
184
+ S3try(:delete_bucket, bucket, headers) unless $S3syncOptions['--dryrun']
185
+
186
+ when "location"
187
+ s3cmdUsage("Need a bucket") if bucket == ''
188
+ headers = hashPairs(ARGV[2...ARGV.length])
189
+ query = Hash.new
190
+ query['location'] = 'location'
191
+ $stderr.puts "location request bucket #{bucket} #{query.inspect} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
192
+ S3try(:get_query_stream, bucket, '', query, headers, $stdout) unless $S3syncOptions['--dryrun']
193
+
194
+ when "get"
195
+ s3cmdUsage("Need a bucket") if bucket == ''
196
+ s3cmdUsage("Need a key") if path == ''
197
+ s3cmdUsage("Need a file") if file == ''
198
+ headers = hashPairs(ARGV[3...ARGV.length])
199
+ $stderr.puts "get from key #{bucket}:#{path} into #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
200
+ unless $S3syncOptions['--dryrun']
201
+ if $S3syncOptions['--expires-in']
202
+ $stdout.puts S3url(:get, bucket, path, headers)
203
+ else
204
+ outStream = File.open(file, 'wb')
205
+ outStream = ProgressStream.new(outStream) if $S3syncOptions['--progress']
206
+ S3try(:get_stream, bucket, path, headers, outStream)
207
+ outStream.close
208
+ end
209
+ end
210
+
211
+ when "put"
212
+ s3cmdUsage("Need a bucket") if bucket == ''
213
+ s3cmdUsage("Need a key") if path == ''
214
+ s3cmdUsage("Need a file") if file == ''
215
+ headers = hashPairs(ARGV[3...ARGV.length])
216
+ stream = File.open(file, 'rb')
217
+ stream = ProgressStream.new(stream, File.stat(file).size) if $S3syncOptions['--progress']
218
+ s3o = S3::S3Object.new(stream, {}) # support meta later?
219
+ headers['Content-Length'] = FileTest.size(file).to_s
220
+ $stderr.puts "put to key #{bucket}:#{path} from #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
221
+ S3try(:put, bucket, path, s3o, headers) unless $S3syncOptions['--dryrun']
222
+ stream.close
223
+
224
+
225
+ when "copy"
226
+ s3cmdUsage("Need a bucket") if bucket == ''
227
+ s3cmdUsage("Need a key") if path == ''
228
+ s3cmdUsage("Need a file") if file == ''
229
+
230
+ file = file.dup # modifiable
231
+ file += ':' unless file.match(':')
232
+ dest_bucket = (/^(.*?):/.match(file))[1]
233
+ file.replace((/:(.*)$/.match(file))[1])
234
+
235
+ headers = hashPairs(ARGV[3...ARGV.length])
236
+ if headers.length > 0
237
+ headers["x-amz-metadata-directive"] = "REPLACE"
238
+ else
239
+ headers["x-amz-metadata-directive"] = "COPY"
240
+ end
241
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
242
+
243
+
244
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
245
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
246
+
247
+ when "copyall"
248
+ s3cmdUsage("Need a bucket") if bucket == ''
249
+ s3cmdUsage("Need a path") if path == ''
250
+
251
+ file = file.dup # modifiable
252
+ file += ':' unless file.match(':')
253
+ dest_bucket = (/^(.*?):/.match(file))[1]
254
+ dest_key = file.replace((/:(.*)$/.match(file))[1])
255
+
256
+ src_path = path
257
+
258
+ headers = hashPairs(ARGV[2...ARGV.length])
259
+
260
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}:#{dest_key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
261
+ more = true
262
+ marker = nil
263
+ while more do
264
+ res = s3cmdList(bucket, path, nil, nil, marker)
265
+ res.entries.each do |item|
266
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
267
+ path = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
268
+
269
+ file = path.gsub(src_path, dest_key)
270
+
271
+ headers = hashPairs(ARGV[3...ARGV.length])
272
+ if headers.length > 0
273
+ headers["x-amz-metadata-directive"] = "REPLACE"
274
+ else
275
+ headers["x-amz-metadata-directive"] = "COPY"
276
+ end
277
+ headers["x-amz-copy-source"] = "/#{bucket}/#{URI.escape(path)}"
278
+
279
+ $stderr.puts "copy #{bucket}:#{path} to #{dest_bucket}/#{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
280
+ S3try(:put, dest_bucket, file, nil, headers) unless $S3syncOptions['--dryrun']
281
+
282
+ end
283
+
284
+ more = res.properties.is_truncated
285
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
286
+ # get this into local charset; when we pass it to s3 that is what's expected
287
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
288
+ end
289
+
290
+ when "headers"
291
+ s3cmdUsage("Need a bucket") if bucket == ''
292
+ s3cmdUsage("Need a path") if path == ''
293
+
294
+ headers = hashPairs(ARGV[2...ARGV.length])
295
+
296
+ $stderr.puts "apply headers to ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
297
+ more = true
298
+ marker = nil
299
+ while more do
300
+ res = s3cmdList(bucket, path, nil, nil, marker)
301
+ res.entries.each do |item|
302
+ # the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
303
+ key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
304
+
305
+ tmpHeaders = headers.merge({
306
+ "x-amz-copy-source" => "/#{bucket}/#{URI.escape(key)}",
307
+ "x-amz-metadata-directive" => "REPLACE"
308
+ })
309
+
310
+ #
311
+ # grab the current content-type unless its been specified explicitly
312
+ #
313
+ if not tmpHeaders.key?("content-type")
314
+ currentHeaders = S3try(:head, bucket, key).object.metadata
315
+ tmpHeaders["content-type"] = currentHeaders["content-type"]
316
+ end
317
+
318
+ $stderr.puts "apply headers to #{bucket}:#{key} #{tmpHeaders.inspect}" if $S3syncOptions['--verbose']
319
+ S3try(:put, bucket, key, nil, tmpHeaders) unless $S3syncOptions['--dryrun']
320
+ end
321
+
322
+ more = res.properties.is_truncated
323
+ marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
324
+ # get this into local charset; when we pass it to s3 that is what's expected
325
+ marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
326
+ end
327
+
328
+
329
+ else
330
+ s3cmdUsage
331
+ end
332
+
333
+ end #main
334
+
335
+ end #module
336
+
337
+
338
+ S3sync::s3cmdMain #go!