mgreenly-s3sync 1.2.4
Sign up to get free protection for your applications and to get access to all the features.
- data/README.rdoc +11 -0
- data/Rakefile +35 -0
- data/bin/s3cmd +3 -0
- data/bin/s3sync +3 -0
- data/data/original_readme.txt +387 -0
- data/data/original_s3cmd.txt +172 -0
- data/data/s3config.yml.example +3 -0
- data/lib/s3sync/HTTPStreaming.rb +103 -0
- data/lib/s3sync/S3.rb +707 -0
- data/lib/s3sync/S3_s3sync_mod.rb +143 -0
- data/lib/s3sync/S3encoder.rb +50 -0
- data/lib/s3sync/s3cmd.rb +245 -0
- data/lib/s3sync/s3config.rb +27 -0
- data/lib/s3sync/s3sync.rb +724 -0
- data/lib/s3sync/s3try.rb +161 -0
- data/lib/s3sync/thread_generator.rb +383 -0
- data/s3sync.gemspec +34 -0
- metadata +78 -0
@@ -0,0 +1,143 @@
|
|
1
|
+
# This software code is made available "AS IS" without warranties of any
|
2
|
+
# kind. You may copy, display, modify and redistribute the software
|
3
|
+
# code either by itself or as incorporated into your code; provided that
|
4
|
+
# you do not remove any proprietary notices. Your use of this software
|
5
|
+
# code is at your own risk and you waive any claim against Amazon
|
6
|
+
# Digital Services, Inc. or its affiliates with respect to your use of
|
7
|
+
# this software code. (c) 2006 Amazon Digital Services, Inc. or its
|
8
|
+
# affiliates.
|
9
|
+
#
|
10
|
+
# This software code is made available "AS IS" without warranties of any
|
11
|
+
# kind. You may copy, display, modify and redistribute the software
|
12
|
+
# code either by itself or as incorporated into your code; provided that
|
13
|
+
# you do not remove any proprietary notices. Your use of this software
|
14
|
+
# code is at your own risk and you waive any claim against the author
|
15
|
+
# with respect to your use of this software code.
|
16
|
+
# (c) 2007 s3sync.net
|
17
|
+
#
|
18
|
+
require 'S3'
|
19
|
+
require 'HTTPStreaming'
|
20
|
+
|
21
|
+
# The purpose of this file is to overlay the S3 library from AWS
|
22
|
+
# to add some functionality
|
23
|
+
# (without changing the file itself or requiring a specific version)
|
24
|
+
# It still isn't perfectly robust, i.e. if radical changes are made
|
25
|
+
# to the underlying lib this stuff will need updating.
|
26
|
+
|
27
|
+
module S3
|
28
|
+
class AWSAuthConnection
|
29
|
+
|
30
|
+
def make_http(bucket='', host='', proxy_host=nil, proxy_port=nil, proxy_user=nil, proxy_pass=nil)
|
31
|
+
|
32
|
+
# build the domain based on the calling format
|
33
|
+
server = ''
|
34
|
+
if host != ''
|
35
|
+
server = host
|
36
|
+
elsif bucket.empty?
|
37
|
+
# for a bucketless request (i.e. list all buckets)
|
38
|
+
# revert to regular domain case since this operation
|
39
|
+
# does not make sense for vanity domains
|
40
|
+
server = @server
|
41
|
+
elsif @calling_format == CallingFormat::SUBDOMAIN
|
42
|
+
server = "#{bucket}.#{@server}"
|
43
|
+
elsif @calling_format == CallingFormat::VANITY
|
44
|
+
server = bucket
|
45
|
+
else
|
46
|
+
server = @server
|
47
|
+
end
|
48
|
+
# automatically does the right thing when no proxy
|
49
|
+
http = Net::HTTP::Proxy(proxy_host, proxy_port, proxy_user, proxy_pass).new(server, @port)
|
50
|
+
#http = Net::HTTP.new(server, @port)
|
51
|
+
http.use_ssl = @is_secure
|
52
|
+
http.verify_mode=@verify_mode
|
53
|
+
http.ca_file=@ca_file
|
54
|
+
http.ca_path=@ca_path
|
55
|
+
http.start
|
56
|
+
return http
|
57
|
+
end
|
58
|
+
|
59
|
+
# add support for streaming the response body to an IO stream
|
60
|
+
alias __make_request__ make_request
|
61
|
+
def make_request(method, bucket='', key='', path_args={}, headers={}, data='', metadata={}, streamOut=nil)
|
62
|
+
# build the path based on the calling format
|
63
|
+
path = ''
|
64
|
+
if (not bucket.empty?) and (@calling_format == CallingFormat::REGULAR)
|
65
|
+
path << "/#{bucket}"
|
66
|
+
end
|
67
|
+
# add the slash after the bucket regardless
|
68
|
+
# the key will be appended if it is non-empty
|
69
|
+
path << "/#{key}"
|
70
|
+
|
71
|
+
# build the path_argument string
|
72
|
+
# add the ? in all cases since
|
73
|
+
# signature and credentials follow path args
|
74
|
+
path << '?'
|
75
|
+
path << S3.path_args_hash_to_string(path_args)
|
76
|
+
|
77
|
+
req = method_to_request_class(method).new("#{path}")
|
78
|
+
|
79
|
+
set_headers(req, headers)
|
80
|
+
set_headers(req, metadata, METADATA_PREFIX)
|
81
|
+
set_headers(req, {'Connection' => 'keep-alive', 'Keep-Alive' => '300'})
|
82
|
+
|
83
|
+
set_aws_auth_header(req, @aws_access_key_id, @aws_secret_access_key, bucket, key, path_args)
|
84
|
+
|
85
|
+
http = $S3syncHttp
|
86
|
+
|
87
|
+
if req.request_body_permitted?
|
88
|
+
return http.request(req, data, streamOut)
|
89
|
+
else
|
90
|
+
return http.request(req, nil, streamOut)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
# a "get" operation that sends the body to an IO stream
|
95
|
+
def get_stream(bucket, key, headers={}, streamOut=nil)
|
96
|
+
return GetResponse.new(make_request('GET', bucket, CGI::escape(key), {}, headers, '', {}, streamOut))
|
97
|
+
end
|
98
|
+
|
99
|
+
# a "get" operation that sends the body to an IO stream
|
100
|
+
def get_query_stream(bucket, key, path_args={}, headers={}, streamOut=nil)
|
101
|
+
return GetResponse.new(make_request('GET', bucket, CGI::escape(key), path_args, headers, '', {}, streamOut))
|
102
|
+
end
|
103
|
+
|
104
|
+
def head(bucket, key=nil, headers={})
|
105
|
+
return GetResponse.new(make_request('HEAD', bucket, CGI::escape(key), {}, headers, '', {}))
|
106
|
+
end
|
107
|
+
undef create_bucket
|
108
|
+
def create_bucket(bucket, object)
|
109
|
+
object = S3Object.new(object) if not object.instance_of? S3Object
|
110
|
+
return Response.new(
|
111
|
+
make_request('PUT', bucket, '', {}, {}, object.data, object.metadata)
|
112
|
+
)
|
113
|
+
end
|
114
|
+
# no, because internally the library does not support the header,wait,body paradigm, so this is useless
|
115
|
+
#alias __put__ put
|
116
|
+
#def put(bucket, key, object, headers={})
|
117
|
+
# headers['Expect'] = "100-continue"
|
118
|
+
# __put__(bucket, key, object, headers)
|
119
|
+
#end
|
120
|
+
|
121
|
+
|
122
|
+
# allow ssl validation
|
123
|
+
attr_accessor :verify_mode
|
124
|
+
attr_accessor :ca_path
|
125
|
+
attr_accessor :ca_file
|
126
|
+
|
127
|
+
end
|
128
|
+
module CallingFormat
|
129
|
+
def CallingFormat.string_to_format(s)
|
130
|
+
case s
|
131
|
+
when 'REGULAR'
|
132
|
+
return CallingFormat::REGULAR
|
133
|
+
when 'SUBDOMAIN'
|
134
|
+
return CallingFormat::SUBDOMAIN
|
135
|
+
when 'VANITY'
|
136
|
+
return CallingFormat::VANITY
|
137
|
+
else
|
138
|
+
raise "Unsupported calling format #{s}"
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# This software code is made available "AS IS" without warranties of any
|
2
|
+
# kind. You may copy, display, modify and redistribute the software
|
3
|
+
# code either by itself or as incorporated into your code; provided that
|
4
|
+
# you do not remove any proprietary notices. Your use of this software
|
5
|
+
# code is at your own risk and you waive any claim against the author
|
6
|
+
# with respect to your use of this software code.
|
7
|
+
# (c) 2007 s3sync.net
|
8
|
+
#
|
9
|
+
|
10
|
+
# The purpose of this file is to overlay the cgi class
|
11
|
+
# to add some functionality
|
12
|
+
# (without changing the file itself or requiring a specific version)
|
13
|
+
# It still isn't perfectly robust, i.e. if radical changes are made
|
14
|
+
# to the underlying lib this stuff will need updating.
|
15
|
+
|
16
|
+
require 'cgi'
|
17
|
+
require 'iconv' # for UTF-8 conversion
|
18
|
+
|
19
|
+
# thanks to http://www.redhillconsulting.com.au/blogs/simon/archives/000326.html
|
20
|
+
module S3ExtendCGI
|
21
|
+
def self.included(base)
|
22
|
+
base.extend(ClassMethods)
|
23
|
+
base.class_eval do
|
24
|
+
class << self
|
25
|
+
alias_method :S3Extend_escape_orig, :escape unless method_defined?(:S3Extend_escape_orig)
|
26
|
+
alias_method :escape, :S3Extend_escape
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
module ClassMethods
|
31
|
+
@@exemptSlashesInEscape = false
|
32
|
+
attr_writer :exemptSlashesInEscape
|
33
|
+
@@usePercent20InEscape = false
|
34
|
+
attr_writer :usePercent20InEscape
|
35
|
+
@@nativeCharacterEncoding = "ISO-8859-1"
|
36
|
+
attr_writer :nativeCharacterEncoding
|
37
|
+
@@useUTF8InEscape = false
|
38
|
+
attr_writer :useUTF8InEscape
|
39
|
+
|
40
|
+
def S3Extend_escape(string)
|
41
|
+
result = string
|
42
|
+
result = Iconv.iconv("UTF-8", @nativeCharacterEncoding, string).join if @useUTF8InEscape
|
43
|
+
result = S3Extend_escape_orig(result)
|
44
|
+
result.gsub!(/%2f/i, "/") if @exemptSlashesInEscape
|
45
|
+
result.gsub!("+", "%20") if @usePercent20InEscape
|
46
|
+
result
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
CGI.send(:include, S3ExtendCGI)
|
data/lib/s3sync/s3cmd.rb
ADDED
@@ -0,0 +1,245 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# This software code is made available "AS IS" without warranties of any
|
3
|
+
# kind. You may copy, display, modify and redistribute the software
|
4
|
+
# code either by itself or as incorporated into your code; provided that
|
5
|
+
# you do not remove any proprietary notices. Your use of this software
|
6
|
+
# code is at your own risk and you waive any claim against the author
|
7
|
+
# with respect to your use of this software code.
|
8
|
+
# (c) 2007 s3sync.net
|
9
|
+
#
|
10
|
+
|
11
|
+
module S3sync
|
12
|
+
|
13
|
+
# always look "here" for include files (thanks aktxyz)
|
14
|
+
$LOAD_PATH << File.expand_path(File.dirname(__FILE__))
|
15
|
+
|
16
|
+
require 's3try'
|
17
|
+
|
18
|
+
$S3CMD_VERSION = '1.2.4'
|
19
|
+
|
20
|
+
require 'getoptlong'
|
21
|
+
|
22
|
+
# after other mods, so we don't overwrite yaml vals with defaults
|
23
|
+
require 's3config'
|
24
|
+
include S3Config
|
25
|
+
|
26
|
+
def S3sync.s3cmdMain
|
27
|
+
# ---------- OPTIONS PROCESSING ---------- #
|
28
|
+
|
29
|
+
$S3syncOptions = Hash.new
|
30
|
+
optionsParser = GetoptLong.new(
|
31
|
+
[ '--help', '-h', GetoptLong::NO_ARGUMENT ],
|
32
|
+
[ '--ssl', '-s', GetoptLong::NO_ARGUMENT ],
|
33
|
+
[ '--verbose', '-v', GetoptLong::NO_ARGUMENT ],
|
34
|
+
[ '--dryrun', '-n', GetoptLong::NO_ARGUMENT ],
|
35
|
+
[ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
|
36
|
+
[ '--progress', GetoptLong::NO_ARGUMENT ],
|
37
|
+
[ '--expires-in', GetoptLong::REQUIRED_ARGUMENT ]
|
38
|
+
)
|
39
|
+
|
40
|
+
def S3sync.s3cmdUsage(message = nil)
|
41
|
+
$stderr.puts message if message
|
42
|
+
name = $0.split('/').last
|
43
|
+
$stderr.puts <<"ENDUSAGE"
|
44
|
+
#{name} [options] <command> [arg(s)]\t\tversion #{$S3CMD_VERSION}
|
45
|
+
--help -h --verbose -v --dryrun -n
|
46
|
+
--ssl -s --debug -d --progress
|
47
|
+
--expires-in=( <# of seconds> | [#d|#h|#m|#s] )
|
48
|
+
|
49
|
+
Commands:
|
50
|
+
#{name} listbuckets [headers]
|
51
|
+
#{name} createbucket <bucket> [constraint (i.e. EU)]
|
52
|
+
#{name} deletebucket <bucket> [headers]
|
53
|
+
#{name} list <bucket>[:prefix] [max/page] [delimiter] [headers]
|
54
|
+
#{name} location <bucket> [headers]
|
55
|
+
#{name} delete <bucket>:key [headers]
|
56
|
+
#{name} deleteall <bucket>[:prefix] [headers]
|
57
|
+
#{name} get|put <bucket>:key <file> [headers]
|
58
|
+
ENDUSAGE
|
59
|
+
exit
|
60
|
+
end #usage
|
61
|
+
|
62
|
+
begin
|
63
|
+
optionsParser.each {|opt, arg| $S3syncOptions[opt] = (arg || true)}
|
64
|
+
rescue StandardError
|
65
|
+
s3cmdUsage # the parser already printed an error message
|
66
|
+
end
|
67
|
+
s3cmdUsage if $S3syncOptions['--help']
|
68
|
+
$S3syncOptions['--verbose'] = true if $S3syncOptions['--dryrun'] or $S3syncOptions['--debug'] or $S3syncOptions['--progress']
|
69
|
+
$S3syncOptions['--ssl'] = true if $S3syncOptions['--ssl'] # change from "" to true to appease s3 port chooser
|
70
|
+
|
71
|
+
if $S3syncOptions['--expires-in'] =~ /d|h|m|s/
|
72
|
+
e = $S3syncOptions['--expires-in']
|
73
|
+
days = (e =~ /(\d+)d/)? (/(\d+)d/.match(e))[1].to_i : 0
|
74
|
+
hours = (e =~ /(\d+)h/)? (/(\d+)h/.match(e))[1].to_i : 0
|
75
|
+
minutes = (e =~ /(\d+)m/)? (/(\d+)m/.match(e))[1].to_i : 0
|
76
|
+
seconds = (e =~ /(\d+)s/)? (/(\d+)s/.match(e))[1].to_i : 0
|
77
|
+
$S3syncOptions['--expires-in'] = seconds + 60 * ( minutes + 60 * ( hours + 24 * ( days ) ) )
|
78
|
+
end
|
79
|
+
|
80
|
+
# ---------- CONNECT ---------- #
|
81
|
+
S3sync::s3trySetup
|
82
|
+
# ---------- COMMAND PROCESSING ---------- #
|
83
|
+
|
84
|
+
command, path, file = ARGV
|
85
|
+
|
86
|
+
s3cmdUsage("You didn't set up your environment variables; see README.txt") if not($AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY)
|
87
|
+
s3cmdUsage("Need a command (etc)") if not command
|
88
|
+
|
89
|
+
path = '' unless path
|
90
|
+
path = path.dup # modifiable
|
91
|
+
path += ':' unless path.match(':')
|
92
|
+
bucket = (/^(.*?):/.match(path))[1]
|
93
|
+
path.replace((/:(.*)$/.match(path))[1])
|
94
|
+
|
95
|
+
case command
|
96
|
+
when "delete"
|
97
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
98
|
+
s3cmdUsage("Need a key") if path == ''
|
99
|
+
headers = hashPairs(ARGV[2...ARGV.length])
|
100
|
+
$stderr.puts "delete #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
101
|
+
S3try(:delete, bucket, path) unless $S3syncOptions['--dryrun']
|
102
|
+
when "deleteall"
|
103
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
104
|
+
headers = hashPairs(ARGV[2...ARGV.length])
|
105
|
+
$stderr.puts "delete ALL entries in #{bucket}:#{path} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
106
|
+
more = true
|
107
|
+
marker = nil
|
108
|
+
while more do
|
109
|
+
res = s3cmdList(bucket, path, nil, nil, marker)
|
110
|
+
res.entries.each do |item|
|
111
|
+
# the s3 commands (with my modified UTF-8 conversion) expect native char encoding input
|
112
|
+
key = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
|
113
|
+
$stderr.puts "delete #{bucket}:#{key} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
114
|
+
S3try(:delete, bucket, key) unless $S3syncOptions['--dryrun']
|
115
|
+
end
|
116
|
+
more = res.properties.is_truncated
|
117
|
+
marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
|
118
|
+
# get this into local charset; when we pass it to s3 that is what's expected
|
119
|
+
marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
|
120
|
+
end
|
121
|
+
when "list"
|
122
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
123
|
+
max, delim = ARGV[2..3]
|
124
|
+
headers = hashPairs(ARGV[4...ARGV.length])
|
125
|
+
$stderr.puts "list #{bucket}:#{path} #{max} #{delim} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
126
|
+
puts "--------------------"
|
127
|
+
|
128
|
+
more = true
|
129
|
+
marker = nil
|
130
|
+
while more do
|
131
|
+
res = s3cmdList(bucket, path, max, delim, marker, headers)
|
132
|
+
if delim
|
133
|
+
res.common_prefix_entries.each do |item|
|
134
|
+
|
135
|
+
puts "dir: " + Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.prefix).join
|
136
|
+
end
|
137
|
+
puts "--------------------"
|
138
|
+
end
|
139
|
+
res.entries.each do |item|
|
140
|
+
puts Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", item.key).join
|
141
|
+
end
|
142
|
+
if res.properties.is_truncated
|
143
|
+
printf "More? Y/n: "
|
144
|
+
more = (STDIN.gets.match('^[Yy]?$'))
|
145
|
+
marker = (res.properties.next_marker)? res.properties.next_marker : ((res.entries.length > 0) ? res.entries.last.key : nil)
|
146
|
+
# get this into local charset; when we pass it to s3 that is what's expected
|
147
|
+
marker = Iconv.iconv($S3SYNC_NATIVE_CHARSET, "UTF-8", marker).join if marker
|
148
|
+
|
149
|
+
else
|
150
|
+
more = false
|
151
|
+
end
|
152
|
+
end # more
|
153
|
+
when "listbuckets"
|
154
|
+
headers = hashPairs(ARGV[1...ARGV.length])
|
155
|
+
$stderr.puts "list all buckets #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
156
|
+
if $S3syncOptions['--expires-in']
|
157
|
+
$stdout.puts S3url(:list_all_my_buckets, headers)
|
158
|
+
else
|
159
|
+
res = S3try(:list_all_my_buckets, headers)
|
160
|
+
res.entries.each do |item|
|
161
|
+
puts item.name
|
162
|
+
end
|
163
|
+
end
|
164
|
+
when "createbucket"
|
165
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
166
|
+
lc = ''
|
167
|
+
if(ARGV.length > 2)
|
168
|
+
lc = '<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01"><LocationConstraint>' + ARGV[2] + '</LocationConstraint></CreateBucketConfiguration>'
|
169
|
+
end
|
170
|
+
$stderr.puts "create bucket #{bucket} #{lc}" if $S3syncOptions['--verbose']
|
171
|
+
S3try(:create_bucket, bucket, lc) unless $S3syncOptions['--dryrun']
|
172
|
+
when "deletebucket"
|
173
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
174
|
+
headers = hashPairs(ARGV[2...ARGV.length])
|
175
|
+
$stderr.puts "delete bucket #{bucket} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
176
|
+
S3try(:delete_bucket, bucket, headers) unless $S3syncOptions['--dryrun']
|
177
|
+
when "location"
|
178
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
179
|
+
headers = hashPairs(ARGV[2...ARGV.length])
|
180
|
+
query = Hash.new
|
181
|
+
query['location'] = 'location'
|
182
|
+
$stderr.puts "location request bucket #{bucket} #{query.inspect} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
183
|
+
S3try(:get_query_stream, bucket, '', query, headers, $stdout) unless $S3syncOptions['--dryrun']
|
184
|
+
when "get"
|
185
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
186
|
+
s3cmdUsage("Need a key") if path == ''
|
187
|
+
s3cmdUsage("Need a file") if file == ''
|
188
|
+
headers = hashPairs(ARGV[3...ARGV.length])
|
189
|
+
$stderr.puts "get from key #{bucket}:#{path} into #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
190
|
+
unless $S3syncOptions['--dryrun']
|
191
|
+
if $S3syncOptions['--expires-in']
|
192
|
+
$stdout.puts S3url(:get, bucket, path, headers)
|
193
|
+
else
|
194
|
+
outStream = File.open(file, 'wb')
|
195
|
+
outStream = ProgressStream.new(outStream) if $S3syncOptions['--progress']
|
196
|
+
S3try(:get_stream, bucket, path, headers, outStream)
|
197
|
+
outStream.close
|
198
|
+
end
|
199
|
+
end
|
200
|
+
when "put"
|
201
|
+
s3cmdUsage("Need a bucket") if bucket == ''
|
202
|
+
s3cmdUsage("Need a key") if path == ''
|
203
|
+
s3cmdUsage("Need a file") if file == ''
|
204
|
+
headers = hashPairs(ARGV[3...ARGV.length])
|
205
|
+
stream = File.open(file, 'rb')
|
206
|
+
stream = ProgressStream.new(stream, File.stat(file).size) if $S3syncOptions['--progress']
|
207
|
+
s3o = S3::S3Object.new(stream, {}) # support meta later?
|
208
|
+
headers['Content-Length'] = FileTest.size(file).to_s
|
209
|
+
$stderr.puts "put to key #{bucket}:#{path} from #{file} #{headers.inspect if headers}" if $S3syncOptions['--verbose']
|
210
|
+
S3try(:put, bucket, path, s3o, headers) unless $S3syncOptions['--dryrun']
|
211
|
+
stream.close
|
212
|
+
else
|
213
|
+
s3cmdUsage
|
214
|
+
end
|
215
|
+
|
216
|
+
end #main
|
217
|
+
def S3sync.s3cmdList(bucket, path, max=nil, delim=nil, marker=nil, headers={})
|
218
|
+
debug(max)
|
219
|
+
options = Hash.new
|
220
|
+
options['prefix'] = path # start at the right depth
|
221
|
+
options['max-keys'] = max ? max.to_s : 100
|
222
|
+
options['delimiter'] = delim if delim
|
223
|
+
options['marker'] = marker if marker
|
224
|
+
S3try(:list_bucket, bucket, options, headers)
|
225
|
+
end
|
226
|
+
|
227
|
+
# turn an array into a hash of pairs
|
228
|
+
def S3sync.hashPairs(ar)
|
229
|
+
ret = Hash.new
|
230
|
+
ar.each do |item|
|
231
|
+
name = (/^(.*?):/.match(item))[1]
|
232
|
+
item = (/^.*?:(.*)$/.match(item))[1]
|
233
|
+
ret[name] = item
|
234
|
+
end if ar
|
235
|
+
ret
|
236
|
+
end
|
237
|
+
end #module
|
238
|
+
|
239
|
+
|
240
|
+
|
241
|
+
def debug(str)
|
242
|
+
$stderr.puts str if $S3syncOptions['--debug']
|
243
|
+
end
|
244
|
+
|
245
|
+
S3sync::s3cmdMain #go!
|
@@ -0,0 +1,27 @@
|
|
1
|
+
#!/usr/bin/ruby
|
2
|
+
# This software code is made available "AS IS" without warranties of any
|
3
|
+
# kind. You may copy, display, modify and redistribute the software
|
4
|
+
# code either by itself or as incorporated into your code; provided that
|
5
|
+
# you do not remove any proprietary notices. Your use of this software
|
6
|
+
# code is at your own risk and you waive any claim against the author
|
7
|
+
# with respect to your use of this software code.
|
8
|
+
# (c) 2007 alastair brunton
|
9
|
+
#
|
10
|
+
# modified to search out the yaml in several places, thanks wkharold.
|
11
|
+
require 'yaml'
|
12
|
+
|
13
|
+
module S3Config
|
14
|
+
|
15
|
+
confpath = ["#{ENV['S3CONF']}", "#{ENV['HOME']}/.s3conf", "/etc/s3conf"]
|
16
|
+
|
17
|
+
confpath.each do |path|
|
18
|
+
if File.exists?(path) and File.directory?(path) and File.exists?("#{path}/s3config.yml")
|
19
|
+
config = YAML.load_file("#{path}/s3config.yml")
|
20
|
+
config.each_pair do |key, value|
|
21
|
+
eval("$#{key.upcase} = '#{value}'")
|
22
|
+
end
|
23
|
+
break
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|