s3ranger 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +49 -0
- data/LICENSE.txt +22 -0
- data/README.md +29 -0
- data/README.txt +406 -0
- data/README_s3cmd.txt +172 -0
- data/Rakefile +1 -0
- data/bin/s3ranger +105 -0
- data/lib/s3ranger/cmd.rb +112 -0
- data/lib/s3ranger/commands.rb +114 -0
- data/lib/s3ranger/config.rb +43 -0
- data/lib/s3ranger/exceptions.rb +38 -0
- data/lib/s3ranger/sync.rb +338 -0
- data/lib/s3ranger/util.rb +5 -0
- data/lib/s3ranger/version.rb +3 -0
- data/lib/s3ranger.rb +2 -0
- data/s3config.yml.example +3 -0
- data/s3ranger.gemspec +31 -0
- data/spec/fixtures/nodes/1.txt +1 -0
- data/spec/local_source_spec.rb +55 -0
- data/spec/main_spec.rb +141 -0
- data/spec/spec_helper.rb +25 -0
- metadata +177 -0
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
# (c) 2013 Lincoln de Sousa <lincoln@clarete.li>
|
|
2
|
+
# (c) 2007 s3sync.net
|
|
3
|
+
#
|
|
4
|
+
# This software code is made available "AS IS" without warranties of any
|
|
5
|
+
# kind. You may copy, display, modify and redistribute the software
|
|
6
|
+
# code either by itself or as incorporated into your code; provided that
|
|
7
|
+
# you do not remove any proprietary notices. Your use of this software
|
|
8
|
+
# code is at your own risk and you waive any claim against the author
|
|
9
|
+
# with respect to your use of this software code.
|
|
10
|
+
|
|
11
|
+
require 's3ranger/util'
|
|
12
|
+
require 'fileutils'
|
|
13
|
+
|
|
14
|
+
module S3Ranger
|
|
15
|
+
|
|
16
|
+
class Location
|
|
17
|
+
attr_accessor :path
|
|
18
|
+
attr_accessor :bucket
|
|
19
|
+
|
|
20
|
+
def initialize path, bucket=nil
|
|
21
|
+
raise RuntimeError if path.nil?
|
|
22
|
+
@path = path
|
|
23
|
+
@bucket = bucket || nil
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def to_s
|
|
27
|
+
out = []
|
|
28
|
+
out << @bucket unless @bucket.nil?
|
|
29
|
+
out << @path
|
|
30
|
+
out.join ':'
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def local?
|
|
34
|
+
@bucket.nil?
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def == other
|
|
38
|
+
@path == other.path and @bucket == other.bucket
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
alias eql? ==
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
class Node
|
|
45
|
+
include Comparable
|
|
46
|
+
|
|
47
|
+
attr_accessor :base
|
|
48
|
+
attr_accessor :path
|
|
49
|
+
attr_accessor :size
|
|
50
|
+
|
|
51
|
+
def initialize base, path, size
|
|
52
|
+
@base = base.squeeze '/'
|
|
53
|
+
@path = path.squeeze '/'
|
|
54
|
+
@size = size
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def full
|
|
58
|
+
S3Ranger.safe_join [@base, @path]
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def == other
|
|
62
|
+
full == other.full and @size == other.size
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def <=> other
|
|
66
|
+
if self.size < other.size
|
|
67
|
+
-1
|
|
68
|
+
elsif self.size > other.size
|
|
69
|
+
1
|
|
70
|
+
else
|
|
71
|
+
0
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
alias eql? ==
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
class LocalDirectory
|
|
79
|
+
attr_accessor :source
|
|
80
|
+
|
|
81
|
+
def initialize source
|
|
82
|
+
@source = source
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def list_files
|
|
86
|
+
Dir["#{@source}/**/*"].collect { |file|
|
|
87
|
+
unless File.directory? file
|
|
88
|
+
file = Pathname.new(file).cleanpath.to_s
|
|
89
|
+
file_name = file.gsub(/^#{@source}\/?/, '')
|
|
90
|
+
Node.new @source, file_name, File.stat(file).size
|
|
91
|
+
end
|
|
92
|
+
}.compact
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
class SyncCommand
|
|
97
|
+
|
|
98
|
+
def SyncCommand.cmp list1, list2
|
|
99
|
+
l1 = {}; list1.each {|e| l1[e.path] = e}
|
|
100
|
+
l2 = {}; list2.each {|e| l2[e.path] = e}
|
|
101
|
+
|
|
102
|
+
same, to_add_to_2, to_remove_from_2 = [], [], []
|
|
103
|
+
|
|
104
|
+
l1.each do |key, value|
|
|
105
|
+
value2 = l2.delete key
|
|
106
|
+
if value2.nil?
|
|
107
|
+
to_add_to_2 << value
|
|
108
|
+
elsif value2.size == value.size
|
|
109
|
+
same << value
|
|
110
|
+
else
|
|
111
|
+
to_add_to_2 << value
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
to_remove_from_2 = l2.values
|
|
116
|
+
|
|
117
|
+
[same, to_add_to_2, to_remove_from_2]
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def initialize args, source, destination
|
|
121
|
+
@args = args
|
|
122
|
+
@source = source
|
|
123
|
+
@destination = destination
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def run
|
|
127
|
+
# Reading the source and destination using our helper method
|
|
128
|
+
if (source, destination, bucket = SyncCommand.parse_params [@source, @destination]).nil?
|
|
129
|
+
raise WrongUsage.new(nil, 'Need a source and a destination')
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
# Getting the trees
|
|
133
|
+
source_tree, destination_tree = read_trees source, destination
|
|
134
|
+
|
|
135
|
+
# Getting the list of resources to be exchanged between the two peers
|
|
136
|
+
_, to_add, to_remove = SyncCommand.cmp source_tree, destination_tree
|
|
137
|
+
|
|
138
|
+
# Removing the items matching the exclude pattern if requested
|
|
139
|
+
to_add.select! { |e|
|
|
140
|
+
begin
|
|
141
|
+
(e.path =~ /#{@args[:options]["--exclude"]}/).nil?
|
|
142
|
+
rescue RegexpError => exc
|
|
143
|
+
raise WrongUsage.new nil, exc.message
|
|
144
|
+
end
|
|
145
|
+
} if @args[:options]["--exclude"]
|
|
146
|
+
|
|
147
|
+
# Calling the methods that perform the actual IO
|
|
148
|
+
if source.local?
|
|
149
|
+
upload_files destination, to_add
|
|
150
|
+
remove_files destination, to_remove unless @args[:options]["--keep"]
|
|
151
|
+
else
|
|
152
|
+
download_files destination, source, to_add
|
|
153
|
+
remove_local_files destination, source, to_remove unless @args[:options]["--keep"]
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def SyncCommand.parse_params args
|
|
158
|
+
# Reading the arbitrary parameters from the command line and getting
|
|
159
|
+
# modifiable copies to parse
|
|
160
|
+
source, destination = args; return nil if source.nil? or destination.nil?
|
|
161
|
+
|
|
162
|
+
# Sync from one s3 to another is currently not supported
|
|
163
|
+
if SyncCommand.remote_prefix? source and SyncCommand.remote_prefix? destination
|
|
164
|
+
raise WrongUsage.new(nil, 'Both arguments can\'t be on S3')
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
# C'mon, there's rsync out there
|
|
168
|
+
if !SyncCommand.remote_prefix? source and !SyncCommand.remote_prefix? destination
|
|
169
|
+
raise WrongUsage.new(nil, 'One argument must be on S3')
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
source, destination = SyncCommand.process_destination source, destination
|
|
173
|
+
return [Location.new(*source), Location.new(*destination)]
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
def SyncCommand.remote_prefix?(prefix)
|
|
177
|
+
# allow for dos-like things e.g. C:\ to be treated as local even with
|
|
178
|
+
# colon.
|
|
179
|
+
prefix.include? ':' and not prefix.match '^[A-Za-z]:[\\\\/]'
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def SyncCommand.process_file_destination source, destination, file=""
|
|
183
|
+
if not file.empty?
|
|
184
|
+
sub = (remote_prefix? source) ? source.split(":")[1] : source
|
|
185
|
+
file = file.gsub /^#{sub}/, ''
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
# no slash on end of source means we need to append the last src dir to
|
|
189
|
+
# dst prefix testing for empty isn't good enough here.. needs to be
|
|
190
|
+
# "empty apart from potentially having 'bucket:'"
|
|
191
|
+
if source =~ %r{/$}
|
|
192
|
+
File.join [destination, file]
|
|
193
|
+
else
|
|
194
|
+
if remote_prefix? source
|
|
195
|
+
_, name = source.split ":"
|
|
196
|
+
File.join [destination, File.basename(name || ""), file]
|
|
197
|
+
else
|
|
198
|
+
source = /^\/?(.*)/.match(source)[1]
|
|
199
|
+
|
|
200
|
+
# Corner case: the root of the remote path is empty, we don't want to
|
|
201
|
+
# add an unnecessary slash here.
|
|
202
|
+
if destination.end_with? ':'
|
|
203
|
+
File.join [destination + source, file]
|
|
204
|
+
else
|
|
205
|
+
File.join [destination, source, file]
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def SyncCommand.process_destination source, destination
|
|
212
|
+
source, destination = source.dup, destination.dup
|
|
213
|
+
|
|
214
|
+
# don't repeat slashes
|
|
215
|
+
source.squeeze! '/'
|
|
216
|
+
destination.squeeze! '/'
|
|
217
|
+
|
|
218
|
+
# Making sure that local paths won't break our stuff later
|
|
219
|
+
source.gsub! /^\.\//, ''
|
|
220
|
+
destination.gsub! /^\.\//, ''
|
|
221
|
+
|
|
222
|
+
# Parsing the final destination
|
|
223
|
+
destination = SyncCommand.process_file_destination source, destination, ""
|
|
224
|
+
|
|
225
|
+
# here's where we find out what direction we're going
|
|
226
|
+
source_is_s3 = remote_prefix? source
|
|
227
|
+
|
|
228
|
+
# alias these variables to the other strings (in ruby = does not make
|
|
229
|
+
# copies of strings)
|
|
230
|
+
remote_prefix = source_is_s3 ? source : destination
|
|
231
|
+
local_prefix = source_is_s3 ? destination : source
|
|
232
|
+
|
|
233
|
+
# canonicalize the S3 stuff
|
|
234
|
+
bucket, remote_prefix = remote_prefix.split ":"
|
|
235
|
+
remote_prefix ||= ""
|
|
236
|
+
|
|
237
|
+
# Just making sure we preserve the direction
|
|
238
|
+
if source_is_s3
|
|
239
|
+
[[remote_prefix, bucket], destination]
|
|
240
|
+
else
|
|
241
|
+
[source, [remote_prefix, bucket]]
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def read_tree_remote location
|
|
246
|
+
begin
|
|
247
|
+
dir = location.path
|
|
248
|
+
dir += '/' if not (dir.empty? or dir.end_with? '/')
|
|
249
|
+
@args[:s3].buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect {|obj|
|
|
250
|
+
Node.new location.path, obj.key, obj.content_length
|
|
251
|
+
}
|
|
252
|
+
rescue AWS::S3::Errors::NoSuchBucket
|
|
253
|
+
raise FailureFeedback.new("There's no bucket named `#{location.bucket}'")
|
|
254
|
+
rescue AWS::S3::Errors::NoSuchKey
|
|
255
|
+
raise FailureFeedback.new("There's no key named `#{location.path}' in the bucket `#{location.bucket}'")
|
|
256
|
+
rescue AWS::S3::Errors::AccessDenied
|
|
257
|
+
raise FailureFeedback.new("Access denied")
|
|
258
|
+
end
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
def read_trees source, destination
|
|
262
|
+
if source.local?
|
|
263
|
+
source_tree = LocalDirectory.new(source.path).list_files
|
|
264
|
+
destination_tree = read_tree_remote destination
|
|
265
|
+
else
|
|
266
|
+
source_tree = read_tree_remote source
|
|
267
|
+
destination_tree = LocalDirectory.new(destination.path).list_files
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
[source_tree, destination_tree]
|
|
271
|
+
end
|
|
272
|
+
|
|
273
|
+
def upload_files remote, list
|
|
274
|
+
list.each do |e|
|
|
275
|
+
if @args[:options]["--dry-run"] or @args[:options]["--verbose"]
|
|
276
|
+
puts " + #{e.full} => #{remote}#{e.path}"
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
unless @args[:options]["--dry-run"]
|
|
280
|
+
if File.file? e.path
|
|
281
|
+
@args[:s3].buckets[remote.bucket].objects[e.path].write Pathname.new e.path
|
|
282
|
+
end
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
end
|
|
286
|
+
|
|
287
|
+
def remove_files remote, list
|
|
288
|
+
|
|
289
|
+
if @args[:options]["--dry-run"] or @args[:options]["--verbose"]
|
|
290
|
+
list.each {|e|
|
|
291
|
+
puts " - #{remote}#{e.path}"
|
|
292
|
+
}
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
unless @args[:options]["--dry-run"]
|
|
296
|
+
@args[:s3].buckets[remote.bucket].objects.delete_if { |obj| list.include? obj.key }
|
|
297
|
+
end
|
|
298
|
+
end
|
|
299
|
+
|
|
300
|
+
def download_files destination, source, list
|
|
301
|
+
list.each {|e|
|
|
302
|
+
path = File.join destination.path, e.path
|
|
303
|
+
|
|
304
|
+
if @args[:options]["--dry-run"] or @args[:options]["--verbose"]
|
|
305
|
+
puts " + #{source}#{e.path} => #{path}"
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
unless @args[:options]["--dry-run"]
|
|
309
|
+
obj = @args[:s3].buckets[source.bucket].objects[e.path]
|
|
310
|
+
|
|
311
|
+
# Making sure this new file will have a safe shelter
|
|
312
|
+
FileUtils.mkdir_p File.dirname(path)
|
|
313
|
+
|
|
314
|
+
# Downloading and saving the files
|
|
315
|
+
File.open(path, 'wb') do |file|
|
|
316
|
+
obj.read do |chunk|
|
|
317
|
+
file.write chunk
|
|
318
|
+
end
|
|
319
|
+
end
|
|
320
|
+
end
|
|
321
|
+
}
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
def remove_local_files destination, source, list
|
|
325
|
+
list.each {|e|
|
|
326
|
+
path = File.join destination.path, e.path
|
|
327
|
+
|
|
328
|
+
if @args[:options]["--dry-run"] or @args[:options]["--verbose"]
|
|
329
|
+
puts " * #{e.path} => #{path}"
|
|
330
|
+
end
|
|
331
|
+
|
|
332
|
+
unless @args[:options]["--dry-run"]
|
|
333
|
+
FileUtils.rm_rf path
|
|
334
|
+
end
|
|
335
|
+
}
|
|
336
|
+
end
|
|
337
|
+
end
|
|
338
|
+
end
|
data/lib/s3ranger.rb
ADDED
data/s3ranger.gemspec
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# -*- mode: ruby; coding: utf-8; -*-
|
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
|
+
require 's3ranger/version'
|
|
5
|
+
|
|
6
|
+
Gem::Specification.new do |spec|
|
|
7
|
+
spec.name = "s3ranger"
|
|
8
|
+
spec.version = S3Ranger::VERSION
|
|
9
|
+
spec.authors = ["Lincoln de Sousa"]
|
|
10
|
+
spec.email = ["lincoln@comum.org"]
|
|
11
|
+
spec.description = 'Tool belt for managing your S3 buckets'
|
|
12
|
+
spec.summary = 's3ranger is a library that aggregates a good range of features for managing your Amazon S3 buckets. It also provides basic interactive client'
|
|
13
|
+
|
|
14
|
+
spec.homepage = "http://github.com/clarete/s3ranger"
|
|
15
|
+
spec.license = "MIT"
|
|
16
|
+
|
|
17
|
+
spec.files = `git ls-files`.split($/)
|
|
18
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
|
19
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
|
20
|
+
spec.require_paths = ["lib"]
|
|
21
|
+
|
|
22
|
+
# Library requirements
|
|
23
|
+
spec.add_dependency "aws-sdk"
|
|
24
|
+
|
|
25
|
+
# Development requirements
|
|
26
|
+
spec.add_development_dependency "debugger"
|
|
27
|
+
spec.add_development_dependency "simplecov"
|
|
28
|
+
spec.add_development_dependency "rspec"
|
|
29
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
|
30
|
+
spec.add_development_dependency "rake"
|
|
31
|
+
end
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
s3ranger
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
require 'spec_helper.rb'
|
|
2
|
+
require 's3ranger/sync'
|
|
3
|
+
|
|
4
|
+
include S3Ranger
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
describe "Local file system IO" do
|
|
8
|
+
|
|
9
|
+
it "should list local files" do
|
|
10
|
+
|
|
11
|
+
# Given that I have remote source and a local destination with a couple
|
|
12
|
+
# files
|
|
13
|
+
source = "mybucket:path"
|
|
14
|
+
destination = directory "directory2"
|
|
15
|
+
file destination, "file1.txt", "First file"
|
|
16
|
+
file destination, "file2.txt", "Second file"
|
|
17
|
+
|
|
18
|
+
# When I create a new local directory based on that path
|
|
19
|
+
local = LocalDirectory.new destination
|
|
20
|
+
|
|
21
|
+
# Then I see that the directory nodes contain both their parent paths and
|
|
22
|
+
# their names
|
|
23
|
+
local.list_files.should be_eql [
|
|
24
|
+
Node.new(fixture("directory2"), "file1.txt", 10),
|
|
25
|
+
Node.new(fixture("directory2"), "file2.txt", 11),
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
rm destination
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
it "should skip local folders while listing files" do
|
|
32
|
+
# Given that I have remote source and a local destination with files
|
|
33
|
+
source = "mybucket:path"
|
|
34
|
+
destination = directory "directory2"
|
|
35
|
+
file destination, "file1.txt", "First file"
|
|
36
|
+
file destination, "file2.txt", "Second file"
|
|
37
|
+
|
|
38
|
+
# And with a sub-directory
|
|
39
|
+
subdir = directory "directory2/subd"
|
|
40
|
+
file subdir, "sub1.txt", "Sub content"
|
|
41
|
+
|
|
42
|
+
# When I create a new local directory based on that path
|
|
43
|
+
local = LocalDirectory.new destination
|
|
44
|
+
|
|
45
|
+
# Then I see that the directory nodes contain both their parent paths and
|
|
46
|
+
# their names
|
|
47
|
+
local.list_files.should be_eql [
|
|
48
|
+
Node.new(fixture("directory2"), "file1.txt", 10),
|
|
49
|
+
Node.new(fixture("directory2"), "file2.txt", 11),
|
|
50
|
+
Node.new(fixture("directory2"), "subd/sub1.txt", 11),
|
|
51
|
+
]
|
|
52
|
+
|
|
53
|
+
rm destination
|
|
54
|
+
end
|
|
55
|
+
end
|
data/spec/main_spec.rb
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
require 'spec_helper.rb'
|
|
2
|
+
require 's3ranger/cmd'
|
|
3
|
+
require 's3ranger/config'
|
|
4
|
+
require 's3ranger/commands'
|
|
5
|
+
require 's3ranger/sync'
|
|
6
|
+
|
|
7
|
+
include S3Ranger
|
|
8
|
+
|
|
9
|
+
describe "Parsing command line arguments" do
|
|
10
|
+
|
|
11
|
+
describe "Processing the final destination based on how the user expressed the source" do
|
|
12
|
+
|
|
13
|
+
it "Put the local etc directory itself into S3" do
|
|
14
|
+
source = "/etc"
|
|
15
|
+
destination = "mybucket:pre"
|
|
16
|
+
|
|
17
|
+
# This will yield S3 keys named pre/etc/...
|
|
18
|
+
SyncCommand.process_destination(source, destination).should be_eql ["/etc", ["pre/etc/", "mybucket"]]
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
it "Put the contents of the local /etc dir into S3, rename dir" do
|
|
22
|
+
source = "/etc/"
|
|
23
|
+
destination = "mybucket:pre/etcbackup"
|
|
24
|
+
|
|
25
|
+
# This will yield S3 keys named pre/etcbackup/...
|
|
26
|
+
SyncCommand.process_destination(source, destination).should be_eql ["/etc/", ["pre/etcbackup/", "mybucket"]]
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
it "Put contents of S3 \"directory\" etc into local dir" do
|
|
30
|
+
source = "mybucket:pre/etc/"
|
|
31
|
+
destination = "/root/etcrestore"
|
|
32
|
+
|
|
33
|
+
# This will yield local files at /root/etcrestore/...
|
|
34
|
+
SyncCommand.process_destination(source, destination).should be_eql [["pre/etc/", "mybucket"], "/root/etcrestore/"]
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
it "Put the contents of S3 \"directory\" etc into a local dir named etc" do
|
|
38
|
+
source = "mybucket:pre/etc"
|
|
39
|
+
destination = "/root"
|
|
40
|
+
|
|
41
|
+
# This will yield local files at /root/etc/...
|
|
42
|
+
SyncCommand.process_destination(source, destination).should be_eql [["pre/etc", "mybucket"], "/root/etc/"]
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
it "Put S3 nodes under the key pre/etc/ to the local dir etcrestore" do
|
|
46
|
+
source = "mybucket:pre/etc/"
|
|
47
|
+
destination = "/root/etcrestore"
|
|
48
|
+
|
|
49
|
+
# This will yield local files at /root/etcrestore/...
|
|
50
|
+
SyncCommand.process_destination(source, destination).should be_eql [["pre/etc/", "mybucket"], "/root/etcrestore/"]
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
it "Put S3 nodes under an empty key (root) to the local dir /tmp/lib" do
|
|
54
|
+
source = "mybucket:"
|
|
55
|
+
destination = "/tmp/lib"
|
|
56
|
+
|
|
57
|
+
# This will yield local files at /root/etcrestore/...
|
|
58
|
+
SyncCommand.process_destination(source, destination).should be_eql [["", "mybucket"], "/tmp/lib/"]
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
it "Should calculate the right destination for each path" do
|
|
63
|
+
file = "pre/etc/sub/path/blah.txt" # This is how it comes from s3
|
|
64
|
+
source = "mybucket:pre/etc/"
|
|
65
|
+
destination = "/root/etcrestore"
|
|
66
|
+
|
|
67
|
+
SyncCommand.process_file_destination(source, destination, file).should be_eql "/root/etcrestore/sub/path/blah.txt"
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
it "Put S3 files under an empty key (root) to the local dir /tmp/lib" do
|
|
71
|
+
source = "mybucket:"
|
|
72
|
+
destination = "/tmp/lib"
|
|
73
|
+
file = "myfile.rb"
|
|
74
|
+
|
|
75
|
+
# This will yield local files at /tmp/lib/...
|
|
76
|
+
SyncCommand.process_file_destination(source, destination, file).should be_eql "/tmp/lib/myfile.rb"
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
it "Returning locations based on the parsed destination" do
|
|
80
|
+
source = "/etc"
|
|
81
|
+
destination = "mybucket:pre"
|
|
82
|
+
|
|
83
|
+
# When I parse the above arguments using the SyncCommand
|
|
84
|
+
src_location, dst_location = SyncCommand.parse_params [source, destination]
|
|
85
|
+
|
|
86
|
+
# Then I see I got the locations with the right params
|
|
87
|
+
src_location.should be_eql S3Ranger::Location.new("/etc")
|
|
88
|
+
dst_location.should be_eql S3Ranger::Location.new("pre/etc/", "mybucket")
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
it "Location should be parsed when it is remote with no path" do
|
|
92
|
+
source = "/etc"
|
|
93
|
+
destination = "mybucket:"
|
|
94
|
+
|
|
95
|
+
# When I parse the above arguments using the SyncCommand
|
|
96
|
+
src_location, dst_location = SyncCommand.parse_params [source, destination]
|
|
97
|
+
|
|
98
|
+
# Then I see I got the locations with the right params
|
|
99
|
+
src_location.should be_eql S3Ranger::Location.new("/etc")
|
|
100
|
+
dst_location.should be_eql S3Ranger::Location.new("etc/", "mybucket")
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
it "should be possible to detect if a location is remote" do
|
|
104
|
+
SyncCommand.remote_prefix?("bucket:prefix").should be_true
|
|
105
|
+
SyncCommand.remote_prefix?("path").should be_false
|
|
106
|
+
SyncCommand.remote_prefix?("C://blah").should be_false # We support windows, LOL
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
describe "Comparing file lists" do
|
|
111
|
+
|
|
112
|
+
it "should be possible to describe nodes with their paths and size" do
|
|
113
|
+
|
|
114
|
+
# Full test
|
|
115
|
+
node = Node.new "path//to", "file1", 10
|
|
116
|
+
node.path.should be_eql "file1"
|
|
117
|
+
node.full.should be_eql "path/to/file1"
|
|
118
|
+
node.size.should be_eql 10
|
|
119
|
+
|
|
120
|
+
# Alternative constructor scenarios
|
|
121
|
+
node = Node.new "", "file1", 10
|
|
122
|
+
node.path.should be_eql "file1"
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
it "should be possible to compare two lists of files" do
|
|
126
|
+
|
|
127
|
+
# Given that I have two lists of Nodes to compare
|
|
128
|
+
list1 = [Node.new("", "file1", 10), Node.new("", "file2", 12), Node.new("", "file3", 12)]
|
|
129
|
+
list2 = [Node.new("", "file1", 10), Node.new("", "file2", 22), Node.new("", "file4", 22),]
|
|
130
|
+
|
|
131
|
+
# When I compare those two file lists
|
|
132
|
+
same_in_both, to_be_added_to_list2, to_be_removed_from_list2 = SyncCommand.cmp list1, list2
|
|
133
|
+
|
|
134
|
+
# Then I see that the three lists that I requested were returned with the
|
|
135
|
+
# right content
|
|
136
|
+
same_in_both.should == [Node.new("", "file1", 10)] # Just testing our == operator
|
|
137
|
+
same_in_both.should be_eql [Node.new("", "file1", 10)]
|
|
138
|
+
to_be_added_to_list2.should be_eql [Node.new("", "file2", 12), Node.new("", "file3", 12)]
|
|
139
|
+
to_be_removed_from_list2.should be_eql [Node.new("", "file4", 22)]
|
|
140
|
+
end
|
|
141
|
+
end
|
data/spec/spec_helper.rb
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
require 'fileutils'
|
|
2
|
+
require 'simplecov'
|
|
3
|
+
SimpleCov.start
|
|
4
|
+
|
|
5
|
+
def fixture *args
|
|
6
|
+
File.join File.dirname(__FILE__), "fixtures", *args
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
def directory path
|
|
10
|
+
full = fixture(path)
|
|
11
|
+
FileUtils::mkdir_p full
|
|
12
|
+
return full
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def file *args
|
|
16
|
+
file = File.join *args[0..-2]
|
|
17
|
+
directory File.dirname(file)
|
|
18
|
+
File.open(file, 'w') {|f| f.write args[-1] }
|
|
19
|
+
return file
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def rm path
|
|
24
|
+
FileUtils::rm_rf path
|
|
25
|
+
end
|