s3ranger 0.3.0 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
data/lib/s3ranger/cli.rb CHANGED
@@ -62,6 +62,15 @@ module S3Ranger
62
62
 
63
63
  u.join ''
64
64
  end
65
+
66
+ protected
67
+
68
+ def parse_acl(opt)
69
+ @acl = nil
70
+ opt.on("-a", "--acl=ACL", "Options: #{AVAILABLE_ACLS.join ', '}") {|acl|
71
+ @acl = acl.to_sym
72
+ }
73
+ end
65
74
  end
66
75
 
67
76
  class ListBuckets < BaseCmd
@@ -86,12 +95,8 @@ module S3Ranger
86
95
 
87
96
  @short_desc = "Create a new bucket under your user account"
88
97
 
89
- @acl = nil
90
-
91
98
  self.options = CmdParse::OptionParserWrapper.new do |opt|
92
- opt.on("-a", "--acl=ACL", "Options: #{AVAILABLE_ACLS.join ', '}") {|acl|
93
- @acl = acl.to_sym
94
- }
99
+ parse_acl(opt)
95
100
  end
96
101
  end
97
102
 
@@ -106,7 +111,7 @@ module S3Ranger
106
111
  end
107
112
 
108
113
  s3.buckets.create bucket, params
109
- rescue AWS::S3::Errors::BucketAlreadyExists => exc
114
+ rescue AWS::S3::Errors::BucketAlreadyExists
110
115
  raise FailureFeedback.new("Bucket `#{bucket}' already exists")
111
116
  end
112
117
  end
@@ -175,7 +180,7 @@ module S3Ranger
175
180
  collection = s3.buckets[bucket].objects.with_prefix(key || "")
176
181
 
177
182
  if @max_entries > 0
178
- collection = collection.page(:per_page => max = @max_entries)
183
+ collection = collection.page(:per_page => @max_entries)
179
184
  end
180
185
 
181
186
  collection.each {|object|
@@ -236,7 +241,7 @@ module S3Ranger
236
241
 
237
242
  opt.on("--expires-in=EXPR", "How long the link takes to expire. Format: <# of seconds> | [#d|#h|#m|#s]") { |expr|
238
243
  val = 0
239
- expr.scan /(\d+\w)/ do |track|
244
+ expr.scan(/(\d+\w)/) do |track|
240
245
  _, num, what = /(\d+)(\w)/.match(track[0]).to_a
241
246
  num = num.to_i
242
247
 
@@ -264,10 +269,10 @@ module S3Ranger
264
269
  opts.merge!({:secure => @secure})
265
270
 
266
271
  if @public
267
- puts (s3.buckets[bucket].objects[key].public_url opts).to_s
272
+ puts s3.buckets[bucket].objects[key].public_url(opts).to_s
268
273
  else
269
274
  opts.merge!({:expires => @expires_in}) if @expires_in
270
- puts (s3.buckets[bucket].objects[key].url_for method.to_sym, opts).to_s
275
+ puts s3.buckets[bucket].objects[key].url_for(method.to_sym, opts).to_s
271
276
  end
272
277
  end
273
278
  end
@@ -317,6 +322,7 @@ module S3Ranger
317
322
  attr_accessor :keep
318
323
  attr_accessor :dry_run
319
324
  attr_accessor :verbose
325
+ attr_accessor :acl
320
326
 
321
327
  def initialize
322
328
  super 'sync', false, false
@@ -337,6 +343,8 @@ module S3Ranger
337
343
  @keep = true
338
344
  }
339
345
 
346
+ parse_acl(opt)
347
+
340
348
  opt.on("-d", "--dry-run", "Do not download or exclude anything, just show what was planned. Implies `verbose`.") {
341
349
  @dry_run = true
342
350
  @verbose = true
data/lib/s3ranger/sync.rb CHANGED
@@ -121,11 +121,11 @@ module S3Ranger
121
121
 
122
122
  class SyncCommand
123
123
 
124
- def SyncCommand.cmp list1, list2
124
+ def self.cmp list1, list2
125
125
  l1 = {}; list1.each {|e| l1[e.path] = e}
126
126
  l2 = {}; list2.each {|e| l2[e.path] = e}
127
127
 
128
- same, to_add_to_2, to_remove_from_2 = [], [], []
128
+ same, to_add_to_2 = [], []
129
129
 
130
130
  l1.each do |key, value|
131
131
  value2 = l2.delete key
@@ -151,7 +151,7 @@ module S3Ranger
151
151
 
152
152
  def run
153
153
  # Reading the source and destination using our helper method
154
- if (source, destination, bucket = SyncCommand.parse_params [@source, @destination]).nil?
154
+ if (source, destination, bucket = self.class.parse_params [@source, @destination]).nil?
155
155
  raise WrongUsage.new(nil, 'Need a source and a destination')
156
156
  end
157
157
 
@@ -159,7 +159,7 @@ module S3Ranger
159
159
  source_tree, destination_tree = read_trees source, destination
160
160
 
161
161
  # Getting the list of resources to be exchanged between the two peers
162
- _, to_add, to_remove = SyncCommand.cmp source_tree, destination_tree
162
+ _, to_add, to_remove = self.class.cmp source_tree, destination_tree
163
163
 
164
164
  # Removing the items matching the exclude pattern if requested
165
165
  to_add.select! { |e|
@@ -180,35 +180,35 @@ module S3Ranger
180
180
  end
181
181
  end
182
182
 
183
- def SyncCommand.parse_params args
183
+ def self.parse_params args
184
184
  # Reading the arbitrary parameters from the command line and getting
185
185
  # modifiable copies to parse
186
186
  source, destination = args; return nil if source.nil? or destination.nil?
187
187
 
188
188
  # Sync from one s3 to another is currently not supported
189
- if SyncCommand.remote_prefix? source and SyncCommand.remote_prefix? destination
189
+ if remote_prefix? source and remote_prefix? destination
190
190
  raise WrongUsage.new(nil, 'Both arguments can\'t be on S3')
191
191
  end
192
192
 
193
193
  # C'mon, there's rsync out there
194
- if !SyncCommand.remote_prefix? source and !SyncCommand.remote_prefix? destination
194
+ if !remote_prefix? source and !remote_prefix? destination
195
195
  raise WrongUsage.new(nil, 'One argument must be on S3')
196
196
  end
197
197
 
198
- source, destination = SyncCommand.process_destination source, destination
198
+ source, destination = process_destination source, destination
199
199
  return [Location.new(*source), Location.new(*destination)]
200
200
  end
201
201
 
202
- def SyncCommand.remote_prefix?(prefix)
202
+ def self.remote_prefix?(prefix)
203
203
  # allow for dos-like things e.g. C:\ to be treated as local even with
204
204
  # colon.
205
205
  prefix.include? ':' and not prefix.match '^[A-Za-z]:[\\\\/]'
206
206
  end
207
207
 
208
- def SyncCommand.process_file_destination source, destination, file=""
208
+ def self.process_file_destination source, destination, file=""
209
209
  if not file.empty?
210
210
  sub = (remote_prefix? source) ? source.split(":")[1] : source
211
- file = file.gsub /^#{sub}/, ''
211
+ file = file.gsub(/^#{sub}/, '')
212
212
  end
213
213
 
214
214
  # no slash on end of source means we need to append the last src dir to
@@ -234,7 +234,7 @@ module S3Ranger
234
234
  end
235
235
  end
236
236
 
237
- def SyncCommand.process_destination source, destination
237
+ def self.process_destination source, destination
238
238
  source, destination = source.dup, destination.dup
239
239
 
240
240
  # don't repeat slashes
@@ -242,21 +242,17 @@ module S3Ranger
242
242
  destination.squeeze! '/'
243
243
 
244
244
  # Making sure that local paths won't break our stuff later
245
- source.gsub! /^\.\//, ''
246
- destination.gsub! /^\.\//, ''
245
+ source.gsub!(/^\.\//, '')
246
+ destination.gsub!(/^\.\//, '')
247
247
 
248
248
  # Parsing the final destination
249
- destination = SyncCommand.process_file_destination source, destination, ""
249
+ destination = process_file_destination source, destination, ""
250
250
 
251
251
  # here's where we find out what direction we're going
252
252
  source_is_s3 = remote_prefix? source
253
253
 
254
- # alias these variables to the other strings (in ruby = does not make
255
- # copies of strings)
256
- remote_prefix = source_is_s3 ? source : destination
257
- local_prefix = source_is_s3 ? destination : source
258
-
259
254
  # canonicalize the S3 stuff
255
+ remote_prefix = source_is_s3 ? source : destination
260
256
  bucket, remote_prefix = remote_prefix.split ":"
261
257
  remote_prefix ||= ""
262
258
 
@@ -269,19 +265,11 @@ module S3Ranger
269
265
  end
270
266
 
271
267
  def read_tree_remote location
272
- begin
273
- dir = location.path
274
- dir += '/' if not (dir.empty? or dir.end_with? '/')
275
- @args.s3.buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect {|obj|
276
- Node.new location.path, obj.key, obj.content_length
277
- }
278
- rescue AWS::S3::Errors::NoSuchBucket
279
- raise FailureFeedback.new("There's no bucket named `#{location.bucket}'")
280
- rescue AWS::S3::Errors::NoSuchKey
281
- raise FailureFeedback.new("There's no key named `#{location.path}' in the bucket `#{location.bucket}'")
282
- rescue AWS::S3::Errors::AccessDenied
283
- raise FailureFeedback.new("Access denied")
284
- end
268
+ dir = location.path
269
+ dir += '/' if not dir.empty? or dir.end_with?('/')
270
+ @args.s3.buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect {|obj|
271
+ Node.new location.path, obj.key, obj.content_length
272
+ }
285
273
  end
286
274
 
287
275
  def read_trees source, destination
@@ -304,7 +292,7 @@ module S3Ranger
304
292
 
305
293
  unless @args.dry_run
306
294
  if File.file? e.path
307
- @args.s3.buckets[remote.bucket].objects[e.path].write Pathname.new e.path
295
+ @args.s3.buckets[remote.bucket].objects[e.path].write Pathname.new(e.path), :acl => @args.acl
308
296
  end
309
297
  end
310
298
  end
data/lib/s3ranger/util.rb CHANGED
@@ -24,6 +24,6 @@
24
24
 
25
25
  module S3Ranger
26
26
  def S3Ranger.safe_join(parts)
27
- File.join *(parts.select {|v| !v.nil? && !v.empty? })
27
+ File.join(*(parts.select {|v| !v.nil? && !v.empty? }))
28
28
  end
29
29
  end
@@ -23,5 +23,5 @@
23
23
  # THE SOFTWARE.
24
24
 
25
25
  module S3Ranger
26
- VERSION = "0.3.0"
26
+ VERSION = "0.3.1"
27
27
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3ranger
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-08-30 00:00:00.000000000 Z
12
+ date: 2013-09-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: aws-sdk
@@ -123,6 +123,22 @@ dependencies:
123
123
  - - ! '>='
124
124
  - !ruby/object:Gem::Version
125
125
  version: '0'
126
+ - !ruby/object:Gem::Dependency
127
+ name: bump
128
+ requirement: !ruby/object:Gem::Requirement
129
+ none: false
130
+ requirements:
131
+ - - ! '>='
132
+ - !ruby/object:Gem::Version
133
+ version: '0'
134
+ type: :development
135
+ prerelease: false
136
+ version_requirements: !ruby/object:Gem::Requirement
137
+ none: false
138
+ requirements:
139
+ - - ! '>='
140
+ - !ruby/object:Gem::Version
141
+ version: '0'
126
142
  description: Tool belt for managing your S3 buckets
127
143
  email:
128
144
  - lincoln@comum.org
@@ -131,15 +147,7 @@ executables:
131
147
  extensions: []
132
148
  extra_rdoc_files: []
133
149
  files:
134
- - .gitignore
135
- - Gemfile
136
- - Gemfile.lock
137
- - LICENSE.txt
138
- - README.md
139
- - Rakefile
140
150
  - bin/s3ranger
141
- - doc/old/README.txt
142
- - doc/old/README_s3cmd.txt
143
151
  - lib/s3ranger.rb
144
152
  - lib/s3ranger/cli.rb
145
153
  - lib/s3ranger/config.rb
@@ -147,13 +155,7 @@ files:
147
155
  - lib/s3ranger/sync.rb
148
156
  - lib/s3ranger/util.rb
149
157
  - lib/s3ranger/version.rb
150
- - s3ranger.gemspec
151
- - s3ranger.yml.example
152
- - spec/fixtures/nodes/1.txt
153
- - spec/local_source_spec.rb
154
- - spec/main_spec.rb
155
- - spec/spec_helper.rb
156
- homepage: http://github.com/clarete/s3ranger
158
+ homepage: https://github.com/clarete/s3ranger
157
159
  licenses:
158
160
  - MIT
159
161
  post_install_message:
@@ -168,7 +170,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
168
170
  version: '0'
169
171
  segments:
170
172
  - 0
171
- hash: 1697485139363659678
173
+ hash: -3161220676864238924
172
174
  required_rubygems_version: !ruby/object:Gem::Requirement
173
175
  none: false
174
176
  requirements:
@@ -177,7 +179,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
177
179
  version: '0'
178
180
  segments:
179
181
  - 0
180
- hash: 1697485139363659678
182
+ hash: -3161220676864238924
181
183
  requirements: []
182
184
  rubyforge_project:
183
185
  rubygems_version: 1.8.24
@@ -185,8 +187,4 @@ signing_key:
185
187
  specification_version: 3
186
188
  summary: s3ranger is a library that aggregates a good range of features for managing
187
189
  your Amazon S3 buckets. It also provides basic interactive client
188
- test_files:
189
- - spec/fixtures/nodes/1.txt
190
- - spec/local_source_spec.rb
191
- - spec/main_spec.rb
192
- - spec/spec_helper.rb
190
+ test_files: []
data/.gitignore DELETED
@@ -1,17 +0,0 @@
1
- *.gem
2
- *.rbc
3
- .bundle
4
- .config
5
- .yardoc
6
- Gemfile.lock
7
- InstalledFiles
8
- _yardoc
9
- coverage
10
- doc/
11
- lib/bundler/man
12
- pkg
13
- rdoc
14
- spec/reports
15
- test/tmp
16
- test/version_tmp
17
- tmp
data/Gemfile DELETED
@@ -1,4 +0,0 @@
1
- source 'https://rubygems.org'
2
-
3
- # Specify your gem's dependencies in s3ranger.gemspec
4
- gemspec
data/Gemfile.lock DELETED
@@ -1,51 +0,0 @@
1
- PATH
2
- remote: .
3
- specs:
4
- s3ranger (0.3.0)
5
- aws-sdk
6
- cmdparse
7
-
8
- GEM
9
- remote: https://rubygems.org/
10
- specs:
11
- aws-sdk (1.16.0)
12
- json (~> 1.4)
13
- nokogiri (< 1.6.0)
14
- uuidtools (~> 2.1)
15
- cmdparse (2.0.5)
16
- columnize (0.3.6)
17
- debugger (1.6.1)
18
- columnize (>= 0.3.1)
19
- debugger-linecache (~> 1.2.0)
20
- debugger-ruby_core_source (~> 1.2.3)
21
- debugger-linecache (1.2.0)
22
- debugger-ruby_core_source (1.2.3)
23
- diff-lcs (1.2.4)
24
- json (1.8.0)
25
- multi_json (1.7.9)
26
- nokogiri (1.5.10)
27
- rake (10.1.0)
28
- rspec (2.14.1)
29
- rspec-core (~> 2.14.0)
30
- rspec-expectations (~> 2.14.0)
31
- rspec-mocks (~> 2.14.0)
32
- rspec-core (2.14.5)
33
- rspec-expectations (2.14.2)
34
- diff-lcs (>= 1.1.3, < 2.0)
35
- rspec-mocks (2.14.3)
36
- simplecov (0.7.1)
37
- multi_json (~> 1.0)
38
- simplecov-html (~> 0.7.1)
39
- simplecov-html (0.7.1)
40
- uuidtools (2.1.4)
41
-
42
- PLATFORMS
43
- ruby
44
-
45
- DEPENDENCIES
46
- bundler (~> 1.3)
47
- debugger
48
- rake
49
- rspec
50
- s3ranger!
51
- simplecov
data/LICENSE.txt DELETED
@@ -1,22 +0,0 @@
1
- Copyright (c) 2013 Lincoln de Sousa <lincoln@clarete.li>
2
-
3
- MIT License
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining
6
- a copy of this software and associated documentation files (the
7
- "Software"), to deal in the Software without restriction, including
8
- without limitation the rights to use, copy, modify, merge, publish,
9
- distribute, sublicense, and/or sell copies of the Software, and to
10
- permit persons to whom the Software is furnished to do so, subject to
11
- the following conditions:
12
-
13
- The above copyright notice and this permission notice shall be
14
- included in all copies or substantial portions of the Software.
15
-
16
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md DELETED
@@ -1,29 +0,0 @@
1
- # S3Ranger
2
-
3
- TODO: Write a gem description
4
-
5
- ## Installation
6
-
7
- Add this line to your application's Gemfile:
8
-
9
- gem 's3ranger'
10
-
11
- And then execute:
12
-
13
- $ bundle
14
-
15
- Or install it yourself as:
16
-
17
- $ gem install s3ranger
18
-
19
- ## Usage
20
-
21
- TODO: Write usage instructions here
22
-
23
- ## Contributing
24
-
25
- 1. Fork it
26
- 2. Create your feature branch (`git checkout -b my-new-feature`)
27
- 3. Commit your changes (`git commit -am 'Add some feature'`)
28
- 4. Push to the branch (`git push origin my-new-feature`)
29
- 5. Create new Pull Request
data/Rakefile DELETED
@@ -1 +0,0 @@
1
- require "bundler/gem_tasks"
data/doc/old/README.txt DELETED
@@ -1,406 +0,0 @@
1
- Welcome to s3ranger.rb
2
- --------------------
3
- Home page, wiki, forum, bug reports, etc: http://s3sync.net
4
-
5
- This is a ruby program that easily transfers directories between a local
6
- directory and an S3 bucket:prefix. It behaves somewhat, but not precisely, like
7
- the rsync program. In particular, it shares rsync's peculiar behavior that
8
- trailing slashes on the source side are meaningful. See examples below.
9
-
10
- One benefit over some other comparable tools is that s3ranger goes out of its way
11
- to mirror the directory structure on S3. Meaning you don't *need* to use s3ranger
12
- later in order to view your files on S3. You can just as easily use an S3
13
- shell, a web browser (if you used the --public-read option), etc. Note that
14
- s3ranger is NOT necessarily going to be able to read files you uploaded via some
15
- other tool. This includes things uploaded with the old perl version! For best
16
- results, start fresh!
17
-
18
- s3ranger runs happily on linux, probably other *ix, and also Windows (except that
19
- symlinks and permissions management features don't do anything on Windows). If
20
- you get it running somewhere interesting let me know (see below)
21
-
22
- s3ranger is free, and license terms are included in all the source files. If you
23
- decide to make it better, or find bugs, please let me know.
24
-
25
- The original inspiration for this tool is the perl script by the same name which
26
- was made by Thorsten von Eicken (and later updated by me). This ruby program
27
- does not share any components or logic from that utility; the only relation is
28
- that it performs a similar task.
29
-
30
-
31
- Examples:
32
- ---------
33
- (using S3 bucket 'mybucket' and prefix 'pre')
34
- Put the local etc directory itself into S3
35
- s3ranger.rb -r /etc mybucket:pre
36
- (This will yield S3 keys named pre/etc/...)
37
- Put the contents of the local /etc dir into S3, rename dir:
38
- s3ranger.rb -r /etc/ mybucket:pre/etcbackup
39
- (This will yield S3 keys named pre/etcbackup/...)
40
- Put contents of S3 "directory" etc into local dir
41
- s3ranger.rb -r mybucket:pre/etc/ /root/etcrestore
42
- (This will yield local files at /root/etcrestore/...)
43
- Put the contents of S3 "directory" etc into a local dir named etc
44
- s3ranger.rb -r mybucket:pre/etc /root
45
- (This will yield local files at /root/etc/...)
46
- Put S3 nodes under the key pre/etc/ to the local dir etcrestore
47
- **and create local dirs even if S3 side lacks dir nodes**
48
- s3ranger.rb -r --make-dirs mybucket:pre/etc/ /root/etcrestore
49
- (This will yield local files at /root/etcrestore/...)
50
-
51
-
52
- Prerequisites:
53
- --------------
54
- You need a functioning Ruby (>=1.8.4) installation, as well as the OpenSSL ruby
55
- library (which may or may not come with your ruby).
56
-
57
- How you get these items working on your system is really not any of my
58
- business, but you might find the following things helpful. If you're using
59
- Windows, the ruby site has a useful "one click installer" (although it takes
60
- more clicks than that, really). On debian (and ubuntu, and other debian-like
61
- things), there are apt packages available for ruby and the open ssl lib.
62
-
63
-
64
- Your environment:
65
- -----------------
66
- s3ranger needs to know several interesting values to work right. It looks for
67
- them in the following environment variables -or- a s3config.yml file.
68
- In the yml case, the names need to be lowercase (see example file).
69
- Furthermore, the yml is searched for in the following locations, in order:
70
- $S3CONF/s3config.yml
71
- $HOME/.s3conf/s3config.yml
72
- /etc/s3conf/s3config.yml
73
-
74
- Required:
75
- AWS_ACCESS_KEY_ID
76
- AWS_SECRET_ACCESS_KEY
77
-
78
- If you don't know what these are, then s3ranger is probably not the
79
- right tool for you to be starting out with.
80
- Optional:
81
- AWS_S3_HOST - I don't see why the default would ever be wrong
82
- HTTP_PROXY_HOST,HTTP_PROXY_PORT,HTTP_PROXY_USER,HTTP_PROXY_PASSWORD - proxy
83
- SSL_CERT_DIR - Where your Cert Authority keys live; for verification
84
- SSL_CERT_FILE - If you have just one PEM file for CA verification
85
- S3SYNC_RETRIES - How many HTTP errors to tolerate before exiting
86
- S3SYNC_WAITONERROR - How many seconds to wait after an http error
87
- S3SYNC_MIME_TYPES_FILE - Where is your mime.types file
88
- S3SYNC_NATIVE_CHARSET - For example Windows-1252. Defaults to ISO-8859-1.
89
- AWS_CALLING_FORMAT - Defaults to REGULAR
90
- REGULAR # http://s3.amazonaws.com/bucket/key
91
- SUBDOMAIN # http://bucket.s3.amazonaws.com/key
92
- VANITY # http://<vanity_domain>/key
93
-
94
- Important: For EU-located buckets you should set the calling format to SUBDOMAIN
95
- Important: For US buckets with CAPS or other weird traits set the calling format
96
- to REGULAR
97
-
98
- I use "envdir" from the daemontools package to set up my env
99
- variables easily: http://cr.yp.to/daemontools/envdir.html
100
- For example:
101
- envdir /root/s3ranger/env /root/s3ranger/s3ranger.rb -etc etc etc
102
- I know there are other similar tools out there as well.
103
-
104
- You can also just call it in a shell script where you have exported the vars
105
- first such as:
106
- #!/bin/bash
107
- export AWS_ACCESS_KEY_ID=valueGoesHere
108
- ...
109
- s3ranger.rb -etc etc etc
110
-
111
- But by far the easiest (and newest) way to set this up is to put the name:value
112
- pairs in a file named s3config.yml and let the yaml parser pick them up. There
113
- is an .example file shipped with the tar.gz to show what a yaml file looks like.
114
- Thanks to Alastair Brunton for this addition.
115
-
116
- You can also use some combination of .yaml and environment variables, if you
117
- want. Go nuts.
118
-
119
-
120
- Management tasks
121
- ----------------
122
- For low-level S3 operations not encapsulated by the sync paradigm, try the
123
- companion utility s3cmd.rb. See README_s3cmd.txt.
124
-
125
-
126
- About single files
127
- ------------------
128
- s3ranger lacks the special case code that would be needed in order to handle a
129
- source/dest that's a single file. This isn't one of the supported use cases so
130
- don't expect it to work. You can use the companion utility s3cmd.rb for single
131
- get/puts.
132
-
133
-
134
- About Directories, the bane of any S3 sync-er
135
- ---------------------------------------------
136
- In S3 there's no actual concept of folders, just keys and nodes. So, every tool
137
- uses its own proprietary way of storing dir info (my scheme being the best
138
- naturally) and in general the methods are not compatible.
139
-
140
- If you populate S3 by some means *other than* s3ranger and then try to use s3ranger
141
- to "get" the S3 stuff to a local filesystem, you will want to use the
142
- --make-dirs option. This causes the local dirs to be created even if there is no
143
- s3ranger-compatible directory node info stored on the S3 side. In other words,
144
- local folders are conjured into existence whenever they are needed to make the
145
- "get" succeed.
146
-
147
-
148
- About MD5 hashes
149
- ----------------
150
- s3ranger's normal operation is to compare the file size and MD5 hash of each item
151
- to decide whether it needs syncing. On the S3 side, these hashes are stored and
152
- returned to us as the "ETag" of each item when the bucket is listed, so it's
153
- very easy. On the local side, the MD5 must be calculated by pushing every byte
154
- in the file through the MD5 algorithm. This is CPU and IO intensive!
155
-
156
- Thus you can specify the option --no-md5. This will compare the upload time on
157
- S3 to the "last modified" time on the local item, and not do md5 calculations
158
- locally at all. This might cause more transfers than are absolutely necessary.
159
- For example if the file is "touched" to a newer modified date, but its contents
160
- didn't change. Conversely if a file's contents are modified but the date is not
161
- updated, then the sync will pass over it. Lastly, if your clock is very
162
- different from the one on the S3 servers, then you may see unanticipated
163
- behavior.
164
-
165
-
166
- A word on SSL_CERT_DIR:
167
- -----------------------
168
- On my debian install I didn't find any root authority public keys. I installed
169
- some by running this shell archive:
170
- http://mirbsd.mirsolutions.de/cvs.cgi/src/etc/ssl.certs.shar
171
- (You have to click download, and then run it wherever you want the certs to be
172
- placed). I do not in any way assert that these certificates are good,
173
- comprehensive, moral, noble, or otherwise correct. But I am using them.
174
-
175
- If you don't set up a cert dir, and try to use ssl, then you'll 1) get an ugly
176
- warning message slapped down by ruby, and 2) not have any protection AT ALL from
177
- malicious servers posing as s3.amazonaws.com. Seriously... you want to get
178
- this right if you're going to have any sensitive data being tossed around.
179
- --
180
- There is a debian package ca-certificates; this is what I'm using now.
181
- apt-get install ca-certificates
182
- and then use:
183
- SSL_CERT_DIR=/etc/ssl/certs
184
-
185
- You used to be able to use just one certificate, but recently AWS has started
186
- using more than one CA.
187
-
188
-
189
- Getting started:
190
- ----------------
191
- Invoke by typing s3ranger.rb and you should get a nice usage screen.
192
- Options can be specified in short or long form (except --delete, which has no
193
- short form)
194
-
195
- ALWAYS TEST NEW COMMANDS using --dryrun(-n) if you want to see what will be
196
- affected before actually doing it. ESPECIALLY if you use --delete. Otherwise, do
197
- not be surprised if you misplace a '/' or two and end up deleting all your
198
- precious, precious files.
199
-
200
- If you use the --public-read(-p) option, items sent to S3 will be ACL'd so that
201
- anonymous web users can download them, given the correct URL. This could be
202
- useful if you intend to publish directories of information for others to see.
203
- For example, I use s3ranger to publish itself to its home on S3 via the following
204
- command: s3ranger.rb -v -p publish/ ServEdge_pub:s3ranger Where the files live in a
205
- local folder called "publish" and I wish them to be copied to the URL:
206
- http://s3.amazonaws.com/ServEdge_pub/s3ranger/... If you use --ssl(-s) then your
207
- connections with S3 will be encrypted. Otherwise your data will be sent in clear
208
- form, i.e. easy to intercept by malicious parties.
209
-
210
- If you want to prune items from the destination side which are not found on the
211
- source side, you can use --delete. Always test this with -n first to make sure
212
- the command line you specify is not going to do something terrible to your
213
- cherished and irreplaceable data.
214
-
215
-
216
- Updates and other discussion:
217
- -----------------------------
218
- The latest version of s3ranger should normally be at:
219
- http://s3.amazonaws.com/ServEdge_pub/s3ranger/s3ranger.tar.gz
220
- and the Amazon S3 forums probably have a few threads going on it at any given
221
- time. I may not always see things posted to the threads, so if you want you can
222
- contact me at gbs-s3@10forward.com too.
223
-
224
-
225
- Change Log:
226
- -----------
227
-
228
- 2006-09-29:
229
- Added support for --expires and --cache-control. Eg:
230
- --expires="Thu, 01 Dec 2007 16:00:00 GMT"
231
- --cache-control="no-cache"
232
-
233
- Thanks to Charles for pointing out the need for this, and supplying a patch
234
- proving that it would be trivial to add =) Apologies for not including the short
235
- form (-e) for the expires. I have a rule that options taking arguments should
236
- use the long form.
237
- ----------
238
-
239
- 2006-10-04
240
- Several minor debugs and edge cases.
241
- Fixed a bug where retries didn't rewind the stream to start over.
242
- ----------
243
-
244
- 2006-10-12
245
- Version 1.0.5
246
- Finally figured out and fixed bug of trying to follow local symlink-to-directory.
247
- Fixed a really nasty sorting discrepancy that caused problems when files started
248
- with the same name as a directory.
249
- Retry on connection-reset on the S3 side.
250
- Skip files that we can't read instead of dying.
251
- ----------
252
-
253
- 2006-10-12
254
- Version 1.0.6
255
- Some GC voodoo to try and keep a handle on the memory footprint a little better.
256
- There is still room for improvement here.
257
- ----------
258
-
259
- 2006-10-13
260
- Version 1.0.7
261
- Fixed symlink dirs being stored to S3 as real dirs (and failing with 400)
262
- Added a retry catch for connection timeout error.
263
- (Hopefully) caught a bug that expected every S3 listing to contain results
264
- ----------
265
-
266
- 2006-10-14
267
- Version 1.0.8
268
- Was testing for file? before symlink? in localnode.stream. This meant that for
269
- symlink files it was trying to shove the real file contents into the symlink
270
- body on s3.
271
- ----------
272
-
273
- 2006-10-14
274
- Version 1.0.9
275
- Woops, I was using "max-entries" for some reason but the proper header is
276
- "max-keys". Not a big deal.
277
- Broke out the S3try stuff into a separate file so I could re-use it for s3cmd.rb
278
- ----------
279
-
280
- 2006-10-16
281
- Added a couple debug lines; not even enough to call it a version revision.
282
- ----------
283
-
284
- 2006-10-25
285
- Version 1.0.10
286
- UTF-8 fixes.
287
- Catching a couple more retry-able errors in s3try (instead of aborting the
288
- program).
289
- ----------
290
-
291
- 2006-10-26
292
- Version 1.0.11
293
- Revamped some details of the generators and comparator so that directories are
294
- handled in a more exact and uniform fashion across local and S3.
295
- ----------
296
-
297
- 2006-11-28
298
- Version 1.0.12
299
- Added a couple more error catches to s3try.
300
- ----------
301
-
302
- 2007-01-08
303
- Version 1.0.13
304
- Numerous small changes to slash and path handling, in order to catch several
305
- cases where "root" directory nodes were not being created on S3.
306
- This makes restores work a lot more intuitively in many cases.
307
- ----------
308
-
309
- 2007-01-25
310
- Version 1.0.14
311
- Peter Fales' marker fix.
312
- Also, markers should be decoded into native charset (because that's what s3
313
- expects to see).
314
- ----------
315
-
316
- 2007-02-19
317
- Version 1.1.0
318
- *WARNING* Lots of path-handling changes. *PLEASE* test safely before you just
319
- swap this in for your working 1.0.x version.
320
-
321
- - Adding --exclude (and there was much rejoicing).
322
- - Found Yet Another Leading Slash Bug with respect to local nodes. It was always
323
- "recursing" into the first folder even if there was no trailing slash and -r
324
- wasn't specified. What it should have done in this case is simply create a node
325
- for the directory itself, then stop (not check the dir's contents).
326
- - Local node canonicalization was (potentially) stripping the trailing slash,
327
- which we need in order to make some decisios in the local generator.
328
- - Fixed problem where it would prepend a "/" to s3 key names even with blank
329
- prefix.
330
- - Fixed S3->local when there's no "/" in the source so it doesn't try to create
331
- a folder with the bucket name.
332
- - Updated s3try and s3_s3ranger_mod to allow SSL_CERT_FILE
333
- ----------
334
-
335
- 2007-02-22
336
- Version 1.1.1
337
- Fixed dumb regression bug caused by the S3->local bucket name fix in 1.1.0
338
- ----------
339
-
340
- 2007-02-25
341
- Version 1.1.2
342
- Added --progress
343
- ----------
344
-
345
- 2007-06-02
346
- Version 1.1.3
347
- IMPORTANT!
348
- Pursuant to http://s3sync.net/forum/index.php?topic=49.0 , the tar.gz now
349
- expands into its own sub-directory named "s3ranger" instead of dumping all the
350
- files into the current directory.
351
-
352
- In the case of commands of the form:
353
- s3ranger -r somedir somebucket:
354
- The root directory node in s3 was being stored as "somedir/" instead of "somedir"
355
- which caused restores to mess up when you say:
356
- s3ranger -r somebucket: restoredir
357
- The fix to this, by coincidence, actually makes s3fox work even *less* well with
358
- s3ranger. I really need to build my own xul+javascript s3 GUI some day.
359
-
360
- Also fixed some of the NoMethodError stuff for when --progress is used
361
- and caught Errno::ETIMEDOUT
362
- ----------
363
-
364
- 2007-07-12
365
- Version 1.1.4
366
- Added Alastair Brunton's yaml config code.
367
- ----------
368
-
369
- 2007-11-17
370
- Version 1.2.1
371
- Compatibility for S3 API revisions.
372
- When retries are exhausted, emit an error.
373
- Don't ever try to delete the 'root' local dir.
374
- ----------
375
-
376
- 2007-11-20
377
- Version 1.2.2
378
- Handle EU bucket 307 redirects (in s3try.rb)
379
- --make-dirs added
380
- ----------
381
-
382
- 2007-11-20
383
- Version 1.2.3
384
- Fix SSL verification settings that broke in new S3 API.
385
- ----------
386
-
387
- 2008-01-06
388
- Version 1.2.4
389
- Run from any dir (search "here" for includes).
390
- Search out s3config.yml in some likely places.
391
- Reset connection (properly) on retry-able non-50x errors.
392
- Fix calling format bug preventing it from working from yml.
393
- Added http proxy support.
394
- ----------
395
-
396
- 2008-05-11
397
- Version 1.2.5
398
- Added option --no-md5
399
- ----------
400
-
401
- 2008-06-16
402
- Version 1.2.6
403
- Catch connect errors and retry.
404
- ----------
405
-
406
- FNORD
@@ -1,172 +0,0 @@
1
- Welcome to s3cmd.rb
2
- -------------------
3
- This is a ruby program that wraps S3 operations into a simple command-line tool.
4
- It is inspired by things like rsh3ll, #sh3ll, etc., but shares no code from
5
- them. It's meant as a companion utility to s3ranger.rb but could be used on its
6
- own (provided you have read the other readme file and know how to use s3ranger in
7
- theory).
8
-
9
- I made this even though lots of other "shell"s exist, because I wanted a
10
- single-operation utility, instead of a shell "environment". This lends itself
11
- more to scripting, etc. Also the delete operation on rsh3ll seems to be borken
12
- at the moment? =(
13
-
14
- Users not yet familiar with s3ranger should read about that first, since s3cmd and
15
- s3ranger share a tremendous amount of conventions and syntax. Particularly you
16
- have to set up environment variables prior to calling s3cmd, and s3cmd also uses
17
- the "bucket:key" syntax popularized by s3ranger. Many of the options are the same
18
- too. Really, go read the other readme first if you haven't used s3ranger yet.
19
- Otherwise you will become confused. It's OK, I'll wait.
20
-
21
- ....
22
-
23
- In general, s3ranger and s3cmd complement each other. s3ranger is useful to perform
24
- serious synchronization operations, and s3cmd allows you to do simple things
25
- such as bucket management, listing, transferring single files, and the like.
26
-
27
- Here is the usage, with examples to follow.
28
-
29
- s3cmd.rb [options] <command> [arg(s)] version 1.0.0
30
- --help -h --verbose -v --dryrun -n
31
- --ssl -s --debug -d
32
-
33
- Commands:
34
- s3cmd.rb listbuckets [headers]
35
- s3cmd.rb createbucket|deletebucket <bucket> [headers]
36
- s3cmd.rb list <bucket>[:prefix] [max/page] [delimiter] [headers]
37
- s3cmd.rb delete <bucket>:key [headers]
38
- s3cmd.rb deleteall <bucket>[:prefix] [headers]
39
- s3cmd.rb get|put <bucket>:key <file> [headers]
40
-
41
-
42
- A note about [headers]
43
- ----------------------
44
- For some S3 operations, such as "put", you might want to specify certain headers
45
- to the request such as Cache-Control, Expires, x-amz-acl, etc. Rather than
46
- supporting a load of separate command-line options for these, I just allow
47
- header specification. So to upload a file with public-read access you could
48
- say:
49
- s3cmd.rb put MyBucket:TheFile.txt x-amz-acl:public-read
50
-
51
- If you don't need to add any particular headers then you can just ignore this
52
- whole [headers] thing and pretend it's not there. This is somewhat of an
53
- advanced option.
54
-
55
-
56
- Examples
57
- --------
58
- List all the buckets your account owns:
59
- s3cmd.rb listbuckets
60
-
61
- Create a new bucket:
62
- s3cmd.rb createbucket BucketName
63
-
64
- Create a new bucket in the EU:
65
- s3cmd.rb createbucket BucketName EU
66
-
67
- Find out the location constraint of a bucket:
68
- s3cmd.rb location BucketName
69
-
70
- Delete an old bucket you don't want any more:
71
- s3cmd.rb deletebucket BucketName
72
-
73
- Find out what's in a bucket, 10 lines at a time:
74
- s3cmd.rb list BucketName 10
75
-
76
- Only look in a particular prefix:
77
- s3cmd.rb list BucketName:startsWithThis
78
-
79
- Look in the virtual "directory" named foo;
80
- lists sub-"directories" and keys that are at this level.
81
- Note that if you specify a delimiter you must specify a max before it.
82
- (until I make the options parsing smarter)
83
- s3cmd.rb list BucketName:foo/ 10 /
84
-
85
- Delete a key:
86
- s3cmd.rb delete BucketName:AKey
87
-
88
- Delete all keys that match (like a combo between list and delete):
89
- s3cmd.rb deleteall BucketName:SomePrefix
90
-
91
- Only pretend you're going to delete all keys that match, but list them:
92
- s3cmd.rb --dryrun deleteall BucketName:SomePrefix
93
-
94
- Delete all keys in a bucket (leaving the bucket):
95
- s3cmd.rb deleteall BucketName
96
-
97
- Get a file from S3 and store it to a local file
98
- s3cmd.rb get BucketName:TheFileOnS3.txt ALocalFile.txt
99
-
100
- Put a local file up to S3
101
- Note we don't automatically set mime type, etc.
102
- NOTE that the order of the options doesn't change. S3 stays first!
103
- s3cmd.rb put BucketName:TheFileOnS3.txt ALocalFile.txt
104
-
105
-
106
- Change Log:
107
- -----------
108
- 2006-10-14:
109
- Created.
110
- -----------
111
-
112
- 2006-10-16
113
- Version 1.0.1
114
- Force content length to a string value since some ruby's don't convert it right.
115
- -----------
116
-
117
- 2006-10-25
118
- UTF-8 fixes.
119
- -----------
120
-
121
- 2006-11-28
122
- Version 1.0.3
123
- Added a couple more error catches to s3try.
124
- ----------
125
-
126
- 2007-01-25
127
- Version 1.0.4
128
- Peter Fales' marker fix.
129
- Also, markers should be decoded into native charset (because that's what s3
130
- expects to see).
131
- ----------
132
-
133
- 2007-02-19
134
- - Updated s3try and s3_s3ranger_mod to allow SSL_CERT_FILE
135
- ----------
136
-
137
- 2007-2-25
138
- Added --progress
139
- ----------
140
-
141
- 2007-07-12
142
- Version 1.0.6
143
- Added Alastair Brunton's yaml config code.
144
- ----------
145
-
146
- 2007-11-17
147
- Version 1.2.1
148
- Compatibility for S3 API revisions.
149
- When retries are exhausted, emit an error.
150
- ----------
151
-
152
- 2007-11-20
153
- Version 1.2.2
154
- Handle EU bucket 307 redirects (in s3try.rb)
155
- ----------
156
-
157
- 2007-11-20
158
- Version 1.2.3
159
- Fix SSL verification settings that broke in new S3 API.
160
- ----------
161
-
162
- 2008-01-06
163
- Version 1.2.4
164
- Run from any dir (search "here" for includes).
165
- Search out s3config.yml in some likely places.
166
- Reset connection (properly) on retry-able non-50x errors.
167
- Fix calling format bug preventing it from working from yml.
168
- Added http proxy support.
169
- ----------
170
-
171
-
172
- FNORD
data/s3ranger.gemspec DELETED
@@ -1,32 +0,0 @@
1
- # -*- mode: ruby; coding: utf-8; -*-
2
- lib = File.expand_path('../lib', __FILE__)
3
- $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
- require 's3ranger/version'
5
-
6
- Gem::Specification.new do |spec|
7
- spec.name = "s3ranger"
8
- spec.version = S3Ranger::VERSION
9
- spec.authors = ["Lincoln de Sousa"]
10
- spec.email = ["lincoln@comum.org"]
11
- spec.description = 'Tool belt for managing your S3 buckets'
12
- spec.summary = 's3ranger is a library that aggregates a good range of features for managing your Amazon S3 buckets. It also provides basic interactive client'
13
-
14
- spec.homepage = "http://github.com/clarete/s3ranger"
15
- spec.license = "MIT"
16
-
17
- spec.files = `git ls-files`.split($/)
18
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
19
- spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
20
- spec.require_paths = ["lib"]
21
-
22
- # Library requirements
23
- spec.add_dependency "aws-sdk"
24
- spec.add_dependency "cmdparse"
25
-
26
- # Development requirements
27
- spec.add_development_dependency "debugger"
28
- spec.add_development_dependency "simplecov"
29
- spec.add_development_dependency "rspec"
30
- spec.add_development_dependency "bundler", "~> 1.3"
31
- spec.add_development_dependency "rake"
32
- end
data/s3ranger.yml.example DELETED
@@ -1,2 +0,0 @@
1
- AWS_ACCESS_KEY_ID: 11111111111111111111111
2
- AWS_SECRET_ACCESS_KEY: 222222222222222222222
@@ -1 +0,0 @@
1
- s3ranger
@@ -1,55 +0,0 @@
1
- require 'spec_helper.rb'
2
- require 's3ranger/sync'
3
-
4
- include S3Ranger
5
-
6
-
7
- describe "Local file system IO" do
8
-
9
- it "should list local files" do
10
-
11
- # Given that I have remote source and a local destination with a couple
12
- # files
13
- source = "mybucket:path"
14
- destination = directory "directory2"
15
- file destination, "file1.txt", "First file"
16
- file destination, "file2.txt", "Second file"
17
-
18
- # When I create a new local directory based on that path
19
- local = LocalDirectory.new destination
20
-
21
- # Then I see that the directory nodes contain both their parent paths and
22
- # their names
23
- local.list_files.should be_eql [
24
- Node.new(fixture("directory2"), "file1.txt", 10),
25
- Node.new(fixture("directory2"), "file2.txt", 11),
26
- ]
27
-
28
- rm destination
29
- end
30
-
31
- it "should skip local folders while listing files" do
32
- # Given that I have remote source and a local destination with files
33
- source = "mybucket:path"
34
- destination = directory "directory2"
35
- file destination, "file1.txt", "First file"
36
- file destination, "file2.txt", "Second file"
37
-
38
- # And with a sub-directory
39
- subdir = directory "directory2/subd"
40
- file subdir, "sub1.txt", "Sub content"
41
-
42
- # When I create a new local directory based on that path
43
- local = LocalDirectory.new destination
44
-
45
- # Then I see that the directory nodes contain both their parent paths and
46
- # their names
47
- local.list_files.should be_eql [
48
- Node.new(fixture("directory2"), "file1.txt", 10),
49
- Node.new(fixture("directory2"), "file2.txt", 11),
50
- Node.new(fixture("directory2"), "subd/sub1.txt", 11),
51
- ]
52
-
53
- rm destination
54
- end
55
- end
data/spec/main_spec.rb DELETED
@@ -1,140 +0,0 @@
1
- require 'spec_helper.rb'
2
- require 's3ranger/cli'
3
- require 's3ranger/config'
4
- require 's3ranger/sync'
5
-
6
- include S3Ranger
7
-
8
- describe "Parsing command line arguments" do
9
-
10
- describe "Processing the final destination based on how the user expressed the source" do
11
-
12
- it "Put the local etc directory itself into S3" do
13
- source = "/etc"
14
- destination = "mybucket:pre"
15
-
16
- # This will yield S3 keys named pre/etc/...
17
- SyncCommand.process_destination(source, destination).should be_eql ["/etc", ["pre/etc/", "mybucket"]]
18
- end
19
-
20
- it "Put the contents of the local /etc dir into S3, rename dir" do
21
- source = "/etc/"
22
- destination = "mybucket:pre/etcbackup"
23
-
24
- # This will yield S3 keys named pre/etcbackup/...
25
- SyncCommand.process_destination(source, destination).should be_eql ["/etc/", ["pre/etcbackup/", "mybucket"]]
26
- end
27
-
28
- it "Put contents of S3 \"directory\" etc into local dir" do
29
- source = "mybucket:pre/etc/"
30
- destination = "/root/etcrestore"
31
-
32
- # This will yield local files at /root/etcrestore/...
33
- SyncCommand.process_destination(source, destination).should be_eql [["pre/etc/", "mybucket"], "/root/etcrestore/"]
34
- end
35
-
36
- it "Put the contents of S3 \"directory\" etc into a local dir named etc" do
37
- source = "mybucket:pre/etc"
38
- destination = "/root"
39
-
40
- # This will yield local files at /root/etc/...
41
- SyncCommand.process_destination(source, destination).should be_eql [["pre/etc", "mybucket"], "/root/etc/"]
42
- end
43
-
44
- it "Put S3 nodes under the key pre/etc/ to the local dir etcrestore" do
45
- source = "mybucket:pre/etc/"
46
- destination = "/root/etcrestore"
47
-
48
- # This will yield local files at /root/etcrestore/...
49
- SyncCommand.process_destination(source, destination).should be_eql [["pre/etc/", "mybucket"], "/root/etcrestore/"]
50
- end
51
-
52
- it "Put S3 nodes under an empty key (root) to the local dir /tmp/lib" do
53
- source = "mybucket:"
54
- destination = "/tmp/lib"
55
-
56
- # This will yield local files at /root/etcrestore/...
57
- SyncCommand.process_destination(source, destination).should be_eql [["", "mybucket"], "/tmp/lib/"]
58
- end
59
- end
60
-
61
- it "Should calculate the right destination for each path" do
62
- file = "pre/etc/sub/path/blah.txt" # This is how it comes from s3
63
- source = "mybucket:pre/etc/"
64
- destination = "/root/etcrestore"
65
-
66
- SyncCommand.process_file_destination(source, destination, file).should be_eql "/root/etcrestore/sub/path/blah.txt"
67
- end
68
-
69
- it "Put S3 files under an empty key (root) to the local dir /tmp/lib" do
70
- source = "mybucket:"
71
- destination = "/tmp/lib"
72
- file = "myfile.rb"
73
-
74
- # This will yield local files at /tmp/lib/...
75
- SyncCommand.process_file_destination(source, destination, file).should be_eql "/tmp/lib/myfile.rb"
76
- end
77
-
78
- it "Returning locations based on the parsed destination" do
79
- source = "/etc"
80
- destination = "mybucket:pre"
81
-
82
- # When I parse the above arguments using the SyncCommand
83
- src_location, dst_location = SyncCommand.parse_params [source, destination]
84
-
85
- # Then I see I got the locations with the right params
86
- src_location.should be_eql S3Ranger::Location.new("/etc")
87
- dst_location.should be_eql S3Ranger::Location.new("pre/etc/", "mybucket")
88
- end
89
-
90
- it "Location should be parsed when it is remote with no path" do
91
- source = "/etc"
92
- destination = "mybucket:"
93
-
94
- # When I parse the above arguments using the SyncCommand
95
- src_location, dst_location = SyncCommand.parse_params [source, destination]
96
-
97
- # Then I see I got the locations with the right params
98
- src_location.should be_eql S3Ranger::Location.new("/etc")
99
- dst_location.should be_eql S3Ranger::Location.new("etc/", "mybucket")
100
- end
101
-
102
- it "should be possible to detect if a location is remote" do
103
- SyncCommand.remote_prefix?("bucket:prefix").should be_true
104
- SyncCommand.remote_prefix?("path").should be_false
105
- SyncCommand.remote_prefix?("C://blah").should be_false # We support windows, LOL
106
- end
107
- end
108
-
109
- describe "Comparing file lists" do
110
-
111
- it "should be possible to describe nodes with their paths and size" do
112
-
113
- # Full test
114
- node = Node.new "path//to", "file1", 10
115
- node.path.should be_eql "file1"
116
- node.full.should be_eql "path/to/file1"
117
- node.size.should be_eql 10
118
-
119
- # Alternative constructor scenarios
120
- node = Node.new "", "file1", 10
121
- node.path.should be_eql "file1"
122
- end
123
-
124
- it "should be possible to compare two lists of files" do
125
-
126
- # Given that I have two lists of Nodes to compare
127
- list1 = [Node.new("", "file1", 10), Node.new("", "file2", 12), Node.new("", "file3", 12)]
128
- list2 = [Node.new("", "file1", 10), Node.new("", "file2", 22), Node.new("", "file4", 22),]
129
-
130
- # When I compare those two file lists
131
- same_in_both, to_be_added_to_list2, to_be_removed_from_list2 = SyncCommand.cmp list1, list2
132
-
133
- # Then I see that the three lists that I requested were returned with the
134
- # right content
135
- same_in_both.should == [Node.new("", "file1", 10)] # Just testing our == operator
136
- same_in_both.should be_eql [Node.new("", "file1", 10)]
137
- to_be_added_to_list2.should be_eql [Node.new("", "file2", 12), Node.new("", "file3", 12)]
138
- to_be_removed_from_list2.should be_eql [Node.new("", "file4", 22)]
139
- end
140
- end
data/spec/spec_helper.rb DELETED
@@ -1,25 +0,0 @@
1
- require 'fileutils'
2
- require 'simplecov'
3
- SimpleCov.start
4
-
5
- def fixture *args
6
- File.join File.dirname(__FILE__), "fixtures", *args
7
- end
8
-
9
- def directory path
10
- full = fixture(path)
11
- FileUtils::mkdir_p full
12
- return full
13
- end
14
-
15
- def file *args
16
- file = File.join *args[0..-2]
17
- directory File.dirname(file)
18
- File.open(file, 'w') {|f| f.write args[-1] }
19
- return file
20
- end
21
-
22
-
23
- def rm path
24
- FileUtils::rm_rf path
25
- end