s3cp 0.2.2 → 0.2.3

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,4 +1,10 @@
1
- === 0.2.3 / (Pending)
1
+ === 0.2.4 / (Pending)
2
+
3
+ === 0.2.3 / (2012-02-24)
4
+
5
+ * Added: --include and --exclude REGEX support for s3cp
6
+
7
+ * Added: --sync mode for s3cp
2
8
 
3
9
  === 0.2.2 / (2012-02-23)
4
10
 
@@ -0,0 +1,288 @@
1
+ diff --git a/History.txt b/History.txt
2
+ index 8de2725..37eaec8 100644
3
+ --- a/History.txt
4
+ +++ b/History.txt
5
+ @@ -1,5 +1,9 @@
6
+ === 0.2.3 / (Pending)
7
+
8
+ +* Added: --include and --exclude REGEX support for s3cp
9
+ +
10
+ +* Added: --sync mode for s3cp
11
+ +
12
+ === 0.2.2 / (2012-02-23)
13
+
14
+ * Added: Progress bars during upload/download if $stdout.isatty
15
+ diff --git a/lib/s3cp/s3cp.rb b/lib/s3cp/s3cp.rb
16
+ index 0fabcdd..a1423f5 100644
17
+ --- a/lib/s3cp/s3cp.rb
18
+ +++ b/lib/s3cp/s3cp.rb
19
+ @@ -35,6 +35,9 @@ options[:overwrite] = ENV["S3CP_RETRIES"] ? (ENV["S3CP_OVERWRITE"] =~ /y|y
20
+ options[:checksum] = ENV["S3CP_CHECKSUM"] ? (ENV["S3CP_CHECKSUM"] =~ /y|yes|true|1|^\s*$/i ? true : false) : true
21
+ options[:retries] = ENV["S3CP_RETRIES"] ? ENV["S3CP_RETRIES"].to_i : 5
22
+ options[:retry_delay] = ENV["S3CP_RETRY_DELAY"] ? ENV["S3CP_RETRY_DELAY"].to_i : 1
23
+ +options[:include_regex] = []
24
+ +options[:exclude_regex] = []
25
+ +options[:sync] = false
26
+
27
+ op = OptionParser.new do |opts|
28
+ opts.banner = <<-BANNER
29
+ @@ -67,6 +70,10 @@ op = OptionParser.new do |opts|
30
+ options[:overwrite] = false
31
+ end
32
+
33
+ + opts.on("--sync", "Sync mode: use checksum to determine if files need copying.") do
34
+ + options[:sync] = true
35
+ + end
36
+ +
37
+ opts.on("--max-attempts N", "Number of attempts to upload/download until checksum matches (default #{options[:retries]})") do |attempts|
38
+ options[:retries] = attempts.to_i
39
+ end
40
+ @@ -90,6 +97,14 @@ op = OptionParser.new do |opts|
41
+ opts.separator " AMZ headers: \'x-amz-acl: public-read\'"
42
+ opts.separator ""
43
+
44
+ + opts.on("-i REGEX", "--include REGEX", "Copy only files matching the following regular expression.") do |regex|
45
+ + options[:include_regex] << regex
46
+ + end
47
+ +
48
+ + opts.on("-x REGEX", "--exclude REGEX", "Do not copy files matching provided regular expression.") do |regex|
49
+ + options[:exclude_regex] << regex
50
+ + end
51
+ +
52
+ opts.on("--verbose", "Verbose mode") do
53
+ options[:verbose] = true
54
+ end
55
+ @@ -110,6 +125,16 @@ if ARGV.size < 2
56
+ exit
57
+ end
58
+
59
+ +if options[:include_regex].any? && !options[:recursive]
60
+ + puts "-i (--include regex) option requires -r (recursive) option."
61
+ + exit(1)
62
+ +end
63
+ +
64
+ +if options[:exclude_regex].any? && !options[:recursive]
65
+ + puts "-x (--exclude regex) option requires -r (recursive) option."
66
+ + exit(1)
67
+ +end
68
+ +
69
+ destination = ARGV.last
70
+ sources = ARGV[0..-2]
71
+
72
+ @@ -144,6 +169,16 @@ end
73
+ @bucket = $1
74
+ @prefix = $2
75
+
76
+ +@includes = options[:include_regex].map { |s| Regexp.new(s) }
77
+ +@excludes = options[:exclude_regex].map { |s| Regexp.new(s) }
78
+ +
79
+ +def match(path)
80
+ + matching = true
81
+ + return false if @includes.any? && !@includes.any? { |regex| regex.match(path) }
82
+ + return false if @excludes.any? && @excludes.any? { |regex| regex.match(path) }
83
+ + true
84
+ +end
85
+ +
86
+ @s3 = S3CP.connect()
87
+
88
+ def direction(from, to)
89
+ @@ -217,68 +252,90 @@ end
90
+
91
+ def local_to_s3(bucket_to, key, file, options = {})
92
+ log(with_headers("Copy #{file} to s3://#{bucket_to}/#{key}"))
93
+ - if options[:checksum]
94
+ - expected_md5 = md5(file)
95
+ +
96
+ + expected_md5 = if options[:checksum] || options[:sync]
97
+ + md5(file)
98
+ end
99
+ - retries = 0
100
+ - begin
101
+ - if retries == options[:retries]
102
+ - fail "Unable to upload to s3://#{bucket_to}/#{key} after #{retries} attempts."
103
+ - end
104
+ - if retries > 0
105
+ - STDERR.puts "Warning: failed checksum for s3://#{bucket_to}/#{bucket_to}. Retrying #{options[:retries] - retries} more time(s)."
106
+ - sleep options[:retry_delay]
107
+ +
108
+ + actual_md5 = if options[:sync]
109
+ + md5 = s3_checksum(bucket_to, key)
110
+ + case md5
111
+ + when :not_found
112
+ + nil
113
+ + when :invalid
114
+ + STDERR.puts "Warning: invalid MD5 checksum in metadata; file will be force-copied."
115
+ + nil
116
+ + else
117
+ + md5
118
+ end
119
+ + end
120
+
121
+ - f = File.open(file)
122
+ + if actual_md5.nil? || (options[:sync] && expected_md5 != actual_md5)
123
+ + retries = 0
124
+ begin
125
+ - if $stdout.isatty
126
+ - f = Proxy.new(f)
127
+ - progress_bar = ProgressBar.new(File.basename(file), File.size(file)).tap do |p|
128
+ - p.file_transfer_mode
129
+ - end
130
+ - class << f
131
+ - attr_accessor :progress_bar
132
+ - def read(length, buffer=nil)
133
+ - begin
134
+ - result = @target.read(length, buffer)
135
+ - @progress_bar.inc result.length if result
136
+ - result
137
+ - rescue => e
138
+ - puts e
139
+ - raise e
140
+ + if retries == options[:retries]
141
+ + fail "Unable to upload to s3://#{bucket_to}/#{key} after #{retries} attempts."
142
+ + end
143
+ + if retries > 0
144
+ + STDERR.puts "Warning: failed checksum for s3://#{bucket_to}/#{bucket_to}. Retrying #{options[:retries] - retries} more time(s)."
145
+ + sleep options[:retry_delay]
146
+ + end
147
+ +
148
+ + f = File.open(file)
149
+ + begin
150
+ + if $stdout.isatty
151
+ + f = Proxy.new(f)
152
+ + progress_bar = ProgressBar.new(File.basename(file), File.size(file)).tap do |p|
153
+ + p.file_transfer_mode
154
+ + end
155
+ + class << f
156
+ + attr_accessor :progress_bar
157
+ + def read(length, buffer=nil)
158
+ + begin
159
+ + result = @target.read(length, buffer)
160
+ + @progress_bar.inc result.length if result
161
+ + result
162
+ + rescue => e
163
+ + puts e
164
+ + raise e
165
+ + end
166
+ end
167
+ end
168
+ + f.progress_bar = progress_bar
169
+ + else
170
+ + progress_bar = nil
171
+ end
172
+ - f.progress_bar = progress_bar
173
+ - else
174
+ - progress_bar = nil
175
+ - end
176
+
177
+ - meta = @s3.interface.put(bucket_to, key, f, @headers)
178
+ - progress_bar.finish if progress_bar
179
+ + meta = @s3.interface.put(bucket_to, key, f, @headers)
180
+ + progress_bar.finish if progress_bar
181
+
182
+ - if options[:checksum]
183
+ - metadata = @s3.interface.head(bucket_to, key)
184
+ - actual_md5 = metadata["etag"] or fail "Unable to get etag/md5 for #{bucket_to}:#{key}"
185
+ - actual_md5 = actual_md5.sub(/^"/, "").sub(/"$/, "") # strip beginning and trailing quotes
186
+ - if actual_md5 =~ /-/
187
+ - STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
188
+ - actual_md5 = nil
189
+ + if options[:checksum]
190
+ + actual_md5 = s3_checksum(bucket_to, key)
191
+ + unless actual_md5.is_a? String
192
+ + STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
193
+ + actual_md5 = nil
194
+ + end
195
+ end
196
+ + rescue => e
197
+ + raise e unless options[:checksum]
198
+ + STDERR.puts e
199
+ + ensure
200
+ + f.close()
201
+ end
202
+ - rescue => e
203
+ - raise e unless options[:checksum]
204
+ - STDERR.puts e
205
+ - ensure
206
+ - f.close()
207
+ - end
208
+ - retries += 1
209
+ + retries += 1
210
+ end until options[:checksum] == false || actual_md5.nil? || expected_md5 == actual_md5
211
+ + else
212
+ + log "Already synchronized."
213
+ + end
214
+ end
215
+
216
+ def s3_to_local(bucket_from, key_from, dest, options = {})
217
+ log("Copy s3://#{bucket_from}/#{key_from} to #{dest}")
218
+ +
219
+ + actual_md5 = if options[:sync] && File.exist?(dest)
220
+ + md5(dest)
221
+ + end
222
+ +
223
+ retries = 0
224
+ begin
225
+ if retries == options[:retries]
226
+ @@ -292,34 +349,38 @@ def s3_to_local(bucket_from, key_from, dest, options = {})
227
+
228
+ f = File.new(dest, "wb")
229
+ begin
230
+ - size = nil
231
+ - if options[:checksum]
232
+ - metadata = @s3.interface.head(bucket_from, key_from)
233
+ - expected_md5 = metadata["etag"] or fail "Unable to get etag/md5 for #{bucket_from}:#{key_from}"
234
+ - expected_md5 = expected_md5.sub(/^"/, "").sub(/"$/, "") # strip beginning and trailing quotes
235
+ - if expected_md5 =~ /-/
236
+ + expected_md5 = if options[:checksum] || options[:sync]
237
+ + md5 = s3_checksum(bucket_from, key_from)
238
+ + if options[:sync] && !md5.is_a?(String)
239
+ + STDERR.puts "Warning: invalid MD5 checksum in metadata; file will be force-copied."
240
+ + nil
241
+ + elsif !md5.is_a? String
242
+ STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
243
+ - expected_md5 = nil
244
+ + nil
245
+ + else
246
+ + md5
247
+ end
248
+ - size = metadata["content-length"].to_i
249
+ - elsif $stdout.isatty
250
+ - metadata = @s3.interface.head(bucket_from, key_from)
251
+ - size = metadata["content-length"].to_i
252
+ end
253
+ - begin
254
+ - progress_bar = if size
255
+ - ProgressBar.new(File.basename(key_from), size).tap do |p|
256
+ - p.file_transfer_mode
257
+ +
258
+ + if (expected_md5 == nil) || (options[:sync] && expected_md5 != actual_md5)
259
+ + begin
260
+ + progress_bar = if $stdout.isatty
261
+ + size = s3_size(bucket_from, key_from)
262
+ + ProgressBar.new(File.basename(key_from), size).tap do |p|
263
+ + p.file_transfer_mode
264
+ + end
265
+ end
266
+ + @s3.interface.get(bucket_from, key_from) do |chunk|
267
+ + f.write(chunk)
268
+ + progress_bar.inc chunk.size if progress_bar
269
+ + end
270
+ + progress_bar.finish
271
+ + rescue => e
272
+ + progress_bar.halt if progress_bar
273
+ + raise e
274
+ end
275
+ - @s3.interface.get(bucket_from, key_from) do |chunk|
276
+ - f.write(chunk)
277
+ - progress_bar.inc chunk.size if progress_bar
278
+ - end
279
+ - progress_bar.finish
280
+ - rescue => e
281
+ - progress_bar.halt if progress_bar
282
+ - raise e
283
+ + else
284
+ + log("Already synchronized")
285
+ end
286
+ rescue => e
287
+ raise e unless options[:checksum]
288
+
@@ -35,6 +35,9 @@ options[:overwrite] = ENV["S3CP_RETRIES"] ? (ENV["S3CP_OVERWRITE"] =~ /y|y
35
35
  options[:checksum] = ENV["S3CP_CHECKSUM"] ? (ENV["S3CP_CHECKSUM"] =~ /y|yes|true|1|^\s*$/i ? true : false) : true
36
36
  options[:retries] = ENV["S3CP_RETRIES"] ? ENV["S3CP_RETRIES"].to_i : 5
37
37
  options[:retry_delay] = ENV["S3CP_RETRY_DELAY"] ? ENV["S3CP_RETRY_DELAY"].to_i : 1
38
+ options[:include_regex] = []
39
+ options[:exclude_regex] = []
40
+ options[:sync] = false
38
41
 
39
42
  op = OptionParser.new do |opts|
40
43
  opts.banner = <<-BANNER
@@ -67,6 +70,10 @@ op = OptionParser.new do |opts|
67
70
  options[:overwrite] = false
68
71
  end
69
72
 
73
+ opts.on("--sync", "Sync mode: use checksum to determine if files need copying.") do
74
+ options[:sync] = true
75
+ end
76
+
70
77
  opts.on("--max-attempts N", "Number of attempts to upload/download until checksum matches (default #{options[:retries]})") do |attempts|
71
78
  options[:retries] = attempts.to_i
72
79
  end
@@ -90,6 +97,14 @@ op = OptionParser.new do |opts|
90
97
  opts.separator " AMZ headers: \'x-amz-acl: public-read\'"
91
98
  opts.separator ""
92
99
 
100
+ opts.on("-i REGEX", "--include REGEX", "Copy only files matching the following regular expression.") do |regex|
101
+ options[:include_regex] << regex
102
+ end
103
+
104
+ opts.on("-x REGEX", "--exclude REGEX", "Do not copy files matching provided regular expression.") do |regex|
105
+ options[:exclude_regex] << regex
106
+ end
107
+
93
108
  opts.on("--verbose", "Verbose mode") do
94
109
  options[:verbose] = true
95
110
  end
@@ -110,6 +125,16 @@ if ARGV.size < 2
110
125
  exit
111
126
  end
112
127
 
128
+ if options[:include_regex].any? && !options[:recursive]
129
+ puts "-i (--include regex) option requires -r (recursive) option."
130
+ exit(1)
131
+ end
132
+
133
+ if options[:exclude_regex].any? && !options[:recursive]
134
+ puts "-x (--exclude regex) option requires -r (recursive) option."
135
+ exit(1)
136
+ end
137
+
113
138
  destination = ARGV.last
114
139
  sources = ARGV[0..-2]
115
140
 
@@ -144,6 +169,16 @@ end
144
169
  @bucket = $1
145
170
  @prefix = $2
146
171
 
172
+ @includes = options[:include_regex].map { |s| Regexp.new(s) }
173
+ @excludes = options[:exclude_regex].map { |s| Regexp.new(s) }
174
+
175
+ def match(path)
176
+ matching = true
177
+ return false if @includes.any? && !@includes.any? { |regex| regex.match(path) }
178
+ return false if @excludes.any? && @excludes.any? { |regex| regex.match(path) }
179
+ true
180
+ end
181
+
147
182
  @s3 = S3CP.connect()
148
183
 
149
184
  def direction(from, to)
@@ -217,68 +252,86 @@ end
217
252
 
218
253
  def local_to_s3(bucket_to, key, file, options = {})
219
254
  log(with_headers("Copy #{file} to s3://#{bucket_to}/#{key}"))
220
- if options[:checksum]
221
- expected_md5 = md5(file)
255
+
256
+ expected_md5 = if options[:checksum] || options[:sync]
257
+ md5(file)
222
258
  end
223
- retries = 0
224
- begin
225
- if retries == options[:retries]
226
- fail "Unable to upload to s3://#{bucket_to}/#{key} after #{retries} attempts."
227
- end
228
- if retries > 0
229
- STDERR.puts "Warning: failed checksum for s3://#{bucket_to}/#{bucket_to}. Retrying #{options[:retries] - retries} more time(s)."
230
- sleep options[:retry_delay]
259
+
260
+ actual_md5 = if options[:sync]
261
+ md5 = s3_checksum(bucket_to, key)
262
+ case md5
263
+ when :not_found
264
+ nil
265
+ when :invalid
266
+ STDERR.puts "Warning: invalid MD5 checksum in metadata; file will be force-copied."
267
+ nil
268
+ else
269
+ md5
231
270
  end
271
+ end
232
272
 
233
- f = File.open(file)
273
+ if actual_md5.nil? || (options[:sync] && expected_md5 != actual_md5)
274
+ retries = 0
234
275
  begin
235
- if $stdout.isatty
236
- f = Proxy.new(f)
237
- progress_bar = ProgressBar.new(File.basename(file), File.size(file)).tap do |p|
238
- p.file_transfer_mode
239
- end
240
- class << f
241
- attr_accessor :progress_bar
242
- def read(length, buffer=nil)
243
- begin
244
- result = @target.read(length, buffer)
245
- @progress_bar.inc result.length if result
246
- result
247
- rescue => e
248
- puts e
249
- raise e
276
+ if retries == options[:retries]
277
+ fail "Unable to upload to s3://#{bucket_to}/#{key} after #{retries} attempts."
278
+ end
279
+ if retries > 0
280
+ STDERR.puts "Warning: failed checksum for s3://#{bucket_to}/#{bucket_to}. Retrying #{options[:retries] - retries} more time(s)."
281
+ sleep options[:retry_delay]
282
+ end
283
+
284
+ f = File.open(file)
285
+ begin
286
+ if $stdout.isatty
287
+ f = Proxy.new(f)
288
+ progress_bar = ProgressBar.new(File.basename(file), File.size(file)).tap do |p|
289
+ p.file_transfer_mode
290
+ end
291
+ class << f
292
+ attr_accessor :progress_bar
293
+ def read(length, buffer=nil)
294
+ begin
295
+ result = @target.read(length, buffer)
296
+ @progress_bar.inc result.length if result
297
+ result
298
+ rescue => e
299
+ puts e
300
+ raise e
301
+ end
250
302
  end
251
303
  end
304
+ f.progress_bar = progress_bar
305
+ else
306
+ progress_bar = nil
252
307
  end
253
- f.progress_bar = progress_bar
254
- else
255
- progress_bar = nil
256
- end
257
308
 
258
- meta = @s3.interface.put(bucket_to, key, f, @headers)
259
- progress_bar.finish if progress_bar
309
+ meta = @s3.interface.put(bucket_to, key, f, @headers)
310
+ progress_bar.finish if progress_bar
260
311
 
261
- if options[:checksum]
262
- metadata = @s3.interface.head(bucket_to, key)
263
- actual_md5 = metadata["etag"] or fail "Unable to get etag/md5 for #{bucket_to}:#{key}"
264
- actual_md5 = actual_md5.sub(/^"/, "").sub(/"$/, "") # strip beginning and trailing quotes
265
- if actual_md5 =~ /-/
266
- STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
267
- actual_md5 = nil
312
+ if options[:checksum]
313
+ actual_md5 = s3_checksum(bucket_to, key)
314
+ unless actual_md5.is_a? String
315
+ STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
316
+ actual_md5 = nil
317
+ end
268
318
  end
319
+ rescue => e
320
+ raise e unless options[:checksum]
321
+ STDERR.puts e
322
+ ensure
323
+ f.close()
269
324
  end
270
- rescue => e
271
- raise e unless options[:checksum]
272
- STDERR.puts e
273
- ensure
274
- f.close()
275
- end
276
- retries += 1
325
+ retries += 1
277
326
  end until options[:checksum] == false || actual_md5.nil? || expected_md5 == actual_md5
327
+ else
328
+ log "Already synchronized."
329
+ end
278
330
  end
279
331
 
280
332
  def s3_to_local(bucket_from, key_from, dest, options = {})
281
333
  log("Copy s3://#{bucket_from}/#{key_from} to #{dest}")
334
+
282
335
  retries = 0
283
336
  begin
284
337
  if retries == options[:retries]
@@ -289,43 +342,51 @@ def s3_to_local(bucket_from, key_from, dest, options = {})
289
342
  STDERR.puts "Warning: failed checksum for s3://#{bucket_from}/#{key_from}. Retrying #{options[:retries] - retries} more time(s)."
290
343
  sleep options[:retry_delay]
291
344
  end
292
-
293
- f = File.new(dest, "wb")
294
345
  begin
295
- size = nil
296
- if options[:checksum]
297
- metadata = @s3.interface.head(bucket_from, key_from)
298
- expected_md5 = metadata["etag"] or fail "Unable to get etag/md5 for #{bucket_from}:#{key_from}"
299
- expected_md5 = expected_md5.sub(/^"/, "").sub(/"$/, "") # strip beginning and trailing quotes
300
- if expected_md5 =~ /-/
346
+ expected_md5 = if options[:checksum] || options[:sync]
347
+ md5 = s3_checksum(bucket_from, key_from)
348
+ if options[:sync] && !md5.is_a?(String)
349
+ STDERR.puts "Warning: invalid MD5 checksum in metadata; file will be force-copied."
350
+ nil
351
+ elsif !md5.is_a? String
301
352
  STDERR.puts "Warning: invalid MD5 checksum in metadata; skipped checksum verification."
302
- expected_md5 = nil
353
+ nil
354
+ else
355
+ md5
303
356
  end
304
- size = metadata["content-length"].to_i
305
- elsif $stdout.isatty
306
- metadata = @s3.interface.head(bucket_from, key_from)
307
- size = metadata["content-length"].to_i
308
357
  end
309
- begin
310
- progress_bar = if size
311
- ProgressBar.new(File.basename(key_from), size).tap do |p|
312
- p.file_transfer_mode
358
+
359
+ actual_md5 = if options[:sync] && File.exist?(dest)
360
+ md5(dest)
361
+ end
362
+
363
+ if !options[:sync] || (expected_md5 != actual_md5)
364
+ f = File.new(dest, "wb")
365
+ begin
366
+ progress_bar = if $stdout.isatty
367
+ size = s3_size(bucket_from, key_from)
368
+ ProgressBar.new(File.basename(key_from), size).tap do |p|
369
+ p.file_transfer_mode
370
+ end
313
371
  end
372
+ @s3.interface.get(bucket_from, key_from) do |chunk|
373
+ f.write(chunk)
374
+ progress_bar.inc chunk.size if progress_bar
375
+ end
376
+ progress_bar.finish
377
+ rescue => e
378
+ progress_bar.halt if progress_bar
379
+ raise e
380
+ ensure
381
+ f.close()
314
382
  end
315
- @s3.interface.get(bucket_from, key_from) do |chunk|
316
- f.write(chunk)
317
- progress_bar.inc chunk.size if progress_bar
318
- end
319
- progress_bar.finish
320
- rescue => e
321
- progress_bar.halt if progress_bar
322
- raise e
383
+ else
384
+ log("Already synchronized")
385
+ return
323
386
  end
324
387
  rescue => e
325
388
  raise e unless options[:checksum]
326
389
  STDERR.puts e
327
- ensure
328
- f.close()
329
390
  end
330
391
  retries += 1
331
392
  end until options[:checksum] == false || expected_md5.nil? || md5(dest) == expected_md5
@@ -337,14 +398,33 @@ def s3_exist?(bucket, key)
337
398
  (metadata != nil)
338
399
  end
339
400
 
401
+ def s3_checksum(bucket, key)
402
+ begin
403
+ metadata = @s3.interface.head(bucket, key)
404
+ return :not_found unless metadata
405
+ rescue => e
406
+ return :not_found if e.is_a?(RightAws::AwsError) && e.http_code == "404"
407
+ raise e
408
+ end
409
+
410
+ md5 = metadata["etag"] or fail "Unable to get etag/md5 for #{bucket_to}:#{key}"
411
+ return :invalid unless md5
412
+
413
+ md5 = md5.sub(/^"/, "").sub(/"$/, "") # strip beginning and trailing quotes
414
+ return :invalid if md5 =~ /-/
415
+
416
+ md5
417
+ end
418
+
419
+ def s3_size(bucket, key)
420
+ metadata = @s3.interface.head(bucket, key)
421
+ metadata["content-length"].to_i
422
+ end
423
+
340
424
  def copy(from, to, options)
341
425
  bucket_from, key_from = S3CP.bucket_and_key(from)
342
426
  bucket_to, key_to = S3CP.bucket_and_key(to)
343
427
 
344
- #puts "bucket_from #{bucket_from.inspect} key_from #{key_from.inspect}"
345
- #puts "bucket_to #{bucket_to.inspect} key_from #{key_to.inspect}"
346
- #puts "direction #{direction(from, to)}"
347
-
348
428
  case direction(from, to)
349
429
  when :s3_to_s3
350
430
  if options[:recursive]
@@ -353,8 +433,10 @@ def copy(from, to, options)
353
433
  page[:contents].each { |entry| keys << entry[:key] }
354
434
  end
355
435
  keys.each do |key|
356
- dest = no_slash(key_to) + '/' + relative(key_from, key)
357
- s3_to_s3(bucket_from, key, bucket_to, dest) unless !options[:overwrite] && s3_exist?(bucket_to, dest)
436
+ if match(key)
437
+ dest = no_slash(key_to) + '/' + relative(key_from, key)
438
+ s3_to_s3(bucket_from, key, bucket_to, dest) unless !options[:overwrite] && s3_exist?(bucket_to, dest)
439
+ end
358
440
  end
359
441
  else
360
442
  s3_to_s3(bucket_from, key_from, bucket_to, key_to) unless !options[:overwrite] && s3_exist?(bucket_to, key_to)
@@ -363,9 +445,10 @@ def copy(from, to, options)
363
445
  if options[:recursive]
364
446
  files = Dir[from + "/**/*"]
365
447
  files.each do |f|
366
- f = File.expand_path(f)
367
- key = no_slash(key_to) + '/' + relative(from, f)
368
- local_to_s3(bucket_to, key, f, options) unless !options[:overwrite] && s3_exist?(bucket_to, key)
448
+ if File.file?(f) && match(f)
449
+ key = no_slash(key_to) + '/' + relative(from, f)
450
+ local_to_s3(bucket_to, key, File.expand_path(f), options) unless !options[:overwrite] && s3_exist?(bucket_to, key)
451
+ end
369
452
  end
370
453
  else
371
454
  local_to_s3(bucket_to, key_to, File.expand_path(from), options) unless !options[:overwrite] && s3_exist?(bucket_to, key_to)
@@ -377,12 +460,14 @@ def copy(from, to, options)
377
460
  page[:contents].each { |entry| keys << entry[:key] }
378
461
  end
379
462
  keys.each do |key|
380
- dest = File.expand_path(to) + '/' + relative(key_from, key)
381
- dest = File.join(dest, File.basename(key)) if File.directory?(dest)
382
- dir = File.dirname(dest)
383
- FileUtils.mkdir_p dir unless File.exist? dir
384
- fail "Destination path is not a directory: #{dir}" unless File.directory?(dir)
385
- s3_to_local(bucket_from, key, dest, options) unless !options[:overwrite] && File.exist?(dest)
463
+ if match(key)
464
+ dest = File.expand_path(to) + '/' + relative(key_from, key)
465
+ dest = File.join(dest, File.basename(key)) if File.directory?(dest)
466
+ dir = File.dirname(dest)
467
+ FileUtils.mkdir_p dir unless File.exist? dir
468
+ fail "Destination path is not a directory: #{dir}" unless File.directory?(dir)
469
+ s3_to_local(bucket_from, key, dest, options) unless !options[:overwrite] && File.exist?(dest)
470
+ end
386
471
  end
387
472
  else
388
473
  dest = File.expand_path(to)
@@ -390,6 +475,9 @@ def copy(from, to, options)
390
475
  s3_to_local(bucket_from, key_from, dest, options) unless !options[:overwrite] && File.exist?(dest)
391
476
  end
392
477
  when :local_to_local
478
+ if options[:include_regex].any? || options[:exclude_regex].any?
479
+ fail "Include/exclude not supported on local-to-local copies"
480
+ end
393
481
  if options[:recursive]
394
482
  FileUtils.cp_r from, to
395
483
  else
@@ -16,5 +16,5 @@
16
16
  # the License.
17
17
 
18
18
  module S3CP
19
- VERSION = "0.2.2"
19
+ VERSION = "0.2.3"
20
20
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3cp
3
3
  version: !ruby/object:Gem::Version
4
- hash: 19
4
+ hash: 17
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
8
  - 2
9
- - 2
10
- version: 0.2.2
9
+ - 3
10
+ version: 0.2.3
11
11
  platform: ruby
12
12
  authors:
13
13
  - Alex Boisvert
@@ -15,7 +15,7 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2012-02-23 00:00:00 Z
18
+ date: 2012-02-24 00:00:00 Z
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency
21
21
  prerelease: false
@@ -154,6 +154,7 @@ files:
154
154
  - lib/s3cp/utils.rb
155
155
  - lib/s3cp/s3mod.rb
156
156
  - lib/s3cp/s3cat.rb
157
+ - lib/s3cp/#Untitled-7#
157
158
  - lib/s3cp/completion.rb
158
159
  - History.txt
159
160
  - README.md