s3cp 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- data/History.txt +11 -0
- data/lib/s3cp/s3cp.rb +93 -34
- data/lib/s3cp/s3ls.rb +1 -1
- data/lib/s3cp/version.rb +1 -1
- metadata +4 -4
data/History.txt
CHANGED
@@ -1,3 +1,14 @@
|
|
1
|
+
=== 0.1.3 / (Pending)
|
2
|
+
|
3
|
+
=== 0.1.2 / 2011-09-29
|
4
|
+
|
5
|
+
* s3ls now displays entries with s3://@bucket/ prefix (Josh Carver)
|
6
|
+
* s3cp now supports passig headers (--headers) (Donnie Flood)
|
7
|
+
|
8
|
+
=== 0.1.1 / 2011-09-22
|
9
|
+
|
10
|
+
* Added s3mod command (Josh Carver)
|
11
|
+
|
1
12
|
=== 0.1 / 2011-04-05
|
2
13
|
|
3
14
|
* First release
|
data/lib/s3cp/s3cp.rb
CHANGED
@@ -10,11 +10,35 @@ require 's3cp/utils'
|
|
10
10
|
|
11
11
|
# Parse arguments
|
12
12
|
options = {}
|
13
|
-
options[:verbose] =
|
13
|
+
options[:verbose] = $stdout.isatty ? true : false
|
14
|
+
options[:headers] = []
|
14
15
|
|
15
16
|
op = OptionParser.new do |opts|
|
16
|
-
opts.banner =
|
17
|
+
opts.banner = <<-BANNER
|
18
|
+
s3cp supports 4 coyping use cases:
|
19
|
+
1. Copy from local machine to S3
|
20
|
+
2. Copy from S3 to local machine
|
21
|
+
3. Copy from S3 to S3
|
22
|
+
4. Copy from local machine to another path on local machine (for completeness)
|
23
|
+
|
24
|
+
Local to S3:
|
25
|
+
s3cp LOCAL_PATH S3_PATH
|
26
|
+
|
27
|
+
S3 to Local:
|
28
|
+
s3cp S3_PATH LOCAL_PATH
|
29
|
+
|
30
|
+
S3 to S3:
|
31
|
+
s3cp S3_PATH S3_PATH2
|
32
|
+
|
33
|
+
Local to Local:
|
34
|
+
s3cp LOCAL_PATH LOCAL_PATH2
|
35
|
+
|
36
|
+
BANNER
|
17
37
|
opts.separator ''
|
38
|
+
|
39
|
+
opts.on("-h", '--headers \'Header1: Header1Value\',\'Header2: Header2Value\'', Array, "Headers to set on the item in S3. This can include http headers like \'Content-Type: image/jpg\' or AMZ headers like: \'x-amz-acl: public-read\'" ) do |h|
|
40
|
+
options[:headers] = h
|
41
|
+
end
|
18
42
|
|
19
43
|
opts.on("-r", "Recursive mode") do
|
20
44
|
options[:recursive] = true
|
@@ -52,6 +76,10 @@ if options[:debug]
|
|
52
76
|
puts "Options: \n#{options.inspect}"
|
53
77
|
end
|
54
78
|
|
79
|
+
if options[:verbose]
|
80
|
+
@verbose = true
|
81
|
+
end
|
82
|
+
|
55
83
|
@bucket = $1
|
56
84
|
@prefix = $2
|
57
85
|
|
@@ -78,6 +106,63 @@ def relative(base, path)
|
|
78
106
|
no_slash(path[base.length..-1])
|
79
107
|
end
|
80
108
|
|
109
|
+
def log(msg)
|
110
|
+
puts msg if @verbose
|
111
|
+
end
|
112
|
+
|
113
|
+
def headers_array_to_hash(header_array)
|
114
|
+
headers = {}
|
115
|
+
header_array.each do |header|
|
116
|
+
header_parts = header.split(": ", 2)
|
117
|
+
if header_parts.size == 2
|
118
|
+
headers[header_parts[0]] = header_parts[1]
|
119
|
+
else
|
120
|
+
log("Header ignored because of error splitting [#{header}]. Expected colon delimiter; e.g. Header: Value")
|
121
|
+
end
|
122
|
+
end
|
123
|
+
headers
|
124
|
+
end
|
125
|
+
@headers = headers_array_to_hash(options[:headers])
|
126
|
+
|
127
|
+
def with_headers(msg)
|
128
|
+
unless @headers.empty?
|
129
|
+
msg += " with headers:"
|
130
|
+
msg += @headers.collect{|k,v| "'#{k}: #{v}'"}.join(", ")
|
131
|
+
end
|
132
|
+
msg
|
133
|
+
end
|
134
|
+
|
135
|
+
def s3_to_s3(bucket_from, key, bucket_to, dest)
|
136
|
+
log(with_headers("Copy s3://#{bucket_from}/#{key} to s3://#{bucket_to}/#{dest}"))
|
137
|
+
if @headers.empty?
|
138
|
+
@s3.interface.copy(bucket_from, key, bucket_to, dest)
|
139
|
+
else
|
140
|
+
@s3.interface.copy(bucket_from, key, bucket_to, dest, :copy, @headers)
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
def local_to_s3(bucket_to, key, file)
|
145
|
+
log(with_headers("Copy #{file} to s3://#{bucket_to}/#{key}"))
|
146
|
+
f = File.open(file)
|
147
|
+
begin
|
148
|
+
@s3.interface.put(bucket_to, key, f, @headers)
|
149
|
+
ensure
|
150
|
+
f.close()
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def s3_to_local(bucket_from, key_from, dest)
|
155
|
+
log("Copy s3://#{bucket_from}/#{key_from} to #{dest}")
|
156
|
+
f = File.new(dest, File::CREAT|File::RDWR)
|
157
|
+
begin
|
158
|
+
@s3.interface.get(bucket_from, key_from) do |chunk|
|
159
|
+
f.write(chunk)
|
160
|
+
end
|
161
|
+
ensure
|
162
|
+
f.close()
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
81
166
|
def copy(from, to, options)
|
82
167
|
bucket_from, key_from = S3CP.bucket_and_key(from)
|
83
168
|
bucket_to, key_to = S3CP.bucket_and_key(to)
|
@@ -95,12 +180,10 @@ def copy(from, to, options)
|
|
95
180
|
end
|
96
181
|
keys.each do |key|
|
97
182
|
dest = no_slash(key_to) + '/' + relative(key_from, key)
|
98
|
-
|
99
|
-
@s3.interface.copy(bucket_from, key, bucket_to, dest)
|
183
|
+
s3_to_s3(bucket_from, key, bucket_to, dest)
|
100
184
|
end
|
101
185
|
else
|
102
|
-
|
103
|
-
@s3.interface.copy(bucket_from, key_from, bucket_to, key_to)
|
186
|
+
s3_to_s3(bucket_from, key_from, bucket_to, key_to)
|
104
187
|
end
|
105
188
|
when :local_to_s3
|
106
189
|
if options[:recursive]
|
@@ -108,18 +191,10 @@ def copy(from, to, options)
|
|
108
191
|
files.each do |f|
|
109
192
|
f = File.expand_path(f)
|
110
193
|
key = no_slash(key_to) + '/' + relative(from, f)
|
111
|
-
|
112
|
-
@s3.interface.put(bucket_to, key, File.open(f))
|
194
|
+
local_to_s3(bucket_to, key, f)
|
113
195
|
end
|
114
196
|
else
|
115
|
-
|
116
|
-
puts "Copy #{f} to s3://#{bucket_to}/#{key_to}"
|
117
|
-
f = File.open(f)
|
118
|
-
begin
|
119
|
-
@s3.interface.put(bucket_to, key_to, f)
|
120
|
-
ensure
|
121
|
-
f.close()
|
122
|
-
end
|
197
|
+
local_to_s3(bucket_to, key_to, File.expand_path(from))
|
123
198
|
end
|
124
199
|
when :s3_to_local
|
125
200
|
if options[:recursive]
|
@@ -130,31 +205,15 @@ def copy(from, to, options)
|
|
130
205
|
keys.each do |key|
|
131
206
|
dest = File.expand_path(to) + '/' + relative(key_from, key)
|
132
207
|
dest = File.join(dest, File.basename(key)) if File.directory?(dest)
|
133
|
-
puts "Copy s3://#{bucket_from}/#{key} to #{dest}"
|
134
208
|
dir = File.dirname(dest)
|
135
209
|
FileUtils.mkdir_p dir unless File.exist? dir
|
136
210
|
fail "Destination path is not a directory: #{dir}" unless File.directory?(dir)
|
137
|
-
|
138
|
-
begin
|
139
|
-
@s3.interface.get(bucket_from, key) do |chunk|
|
140
|
-
f.write(chunk)
|
141
|
-
end
|
142
|
-
ensure
|
143
|
-
f.close()
|
144
|
-
end
|
211
|
+
s3_to_local(bucket_from, key, dest)
|
145
212
|
end
|
146
213
|
else
|
147
214
|
dest = File.expand_path(to)
|
148
215
|
dest = File.join(dest, File.basename(key_from)) if File.directory?(dest)
|
149
|
-
|
150
|
-
f = File.new(dest, File::CREAT|File::RDWR)
|
151
|
-
begin
|
152
|
-
@s3.interface.get(bucket_from, key_from) do |chunk|
|
153
|
-
f.write(chunk)
|
154
|
-
end
|
155
|
-
ensure
|
156
|
-
f.close()
|
157
|
-
end
|
216
|
+
s3_to_local(bucket_from, key_from, dest)
|
158
217
|
end
|
159
218
|
when :local_to_local
|
160
219
|
if options[:recursive]
|
data/lib/s3cp/s3ls.rb
CHANGED
@@ -64,7 +64,7 @@ end
|
|
64
64
|
rows = 0
|
65
65
|
@s3.interface.incrementally_list_bucket(@bucket, :prefix => @key) do |page|
|
66
66
|
page[:contents].each do |entry|
|
67
|
-
key = entry[:key]
|
67
|
+
key = "s3://#{@bucket}/#{entry[:key]}"
|
68
68
|
last_modified = DateTime.parse(entry[:last_modified])
|
69
69
|
if options[:long_format]
|
70
70
|
puts "#{last_modified.strftime(options[:date_format])} #{key}"
|
data/lib/s3cp/version.rb
CHANGED
metadata
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: s3cp
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
hash:
|
4
|
+
hash: 31
|
5
5
|
prerelease:
|
6
6
|
segments:
|
7
7
|
- 0
|
8
8
|
- 1
|
9
|
-
-
|
10
|
-
version: 0.1.
|
9
|
+
- 2
|
10
|
+
version: 0.1.2
|
11
11
|
platform: ruby
|
12
12
|
authors:
|
13
13
|
- Alex Boisvert
|
@@ -15,7 +15,7 @@ autorequire:
|
|
15
15
|
bindir: bin
|
16
16
|
cert_chain: []
|
17
17
|
|
18
|
-
date: 2011-09-
|
18
|
+
date: 2011-09-29 00:00:00 Z
|
19
19
|
dependencies:
|
20
20
|
- !ruby/object:Gem::Dependency
|
21
21
|
version_requirements: &id001 !ruby/object:Gem::Requirement
|