cloudsync 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +23 -0
- data/LICENSE +20 -0
- data/README.rdoc +17 -0
- data/Rakefile +55 -0
- data/VERSION +1 -0
- data/bin/cloudsync +92 -0
- data/cloudsync.gemspec +70 -0
- data/lib/cloudsync.rb +12 -0
- data/lib/cloudsync/backend/base.rb +112 -0
- data/lib/cloudsync/backend/cloudfiles.rb +128 -0
- data/lib/cloudsync/backend/s3.rb +117 -0
- data/lib/cloudsync/backend/sftp.rb +132 -0
- data/lib/cloudsync/datetime/datetime.rb +17 -0
- data/lib/cloudsync/file.rb +75 -0
- data/lib/cloudsync/sync_manager.rb +138 -0
- data/lib/cloudsync/version.rb +3 -0
- data/test/helper.rb +10 -0
- data/test/test_cloudsync.rb +7 -0
- metadata +127 -0
data/.gitignore
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
Copyright (c) 2009 Cory Forsyth
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
4
|
+
a copy of this software and associated documentation files (the
|
5
|
+
"Software"), to deal in the Software without restriction, including
|
6
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
7
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
8
|
+
permit persons to whom the Software is furnished to do so, subject to
|
9
|
+
the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be
|
12
|
+
included in all copies or substantial portions of the Software.
|
13
|
+
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
15
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
16
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
17
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
18
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
19
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
20
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.rdoc
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
= cloudsync
|
2
|
+
|
3
|
+
Description goes here.
|
4
|
+
|
5
|
+
== Note on Patches/Pull Requests
|
6
|
+
|
7
|
+
* Fork the project.
|
8
|
+
* Make your feature addition or bug fix.
|
9
|
+
* Add tests for it. This is important so I don't break it in a
|
10
|
+
future version unintentionally.
|
11
|
+
* Commit, do not mess with rakefile, version, or history.
|
12
|
+
(if you want to have your own version, that is fine but bump version in a commit by itself I can ignore when I pull)
|
13
|
+
* Send me a pull request. Bonus points for topic branches.
|
14
|
+
|
15
|
+
== Copyright
|
16
|
+
|
17
|
+
Copyright (c) 2010 Cory Forsyth. See LICENSE for details.
|
data/Rakefile
ADDED
@@ -0,0 +1,55 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'rake'
|
3
|
+
|
4
|
+
begin
|
5
|
+
require 'jeweler'
|
6
|
+
Jeweler::Tasks.new do |gem|
|
7
|
+
gem.name = "cloudsync"
|
8
|
+
gem.summary = %Q{Sync files between various clouds or sftp servers.}
|
9
|
+
gem.description = %Q{Sync files between various clouds or sftp servers. Available backends are S3, CloudFiles, and SFTP servers. Can sync, mirror, and prune.}
|
10
|
+
gem.email = "cory.forsyth@gmail.com"
|
11
|
+
gem.homepage = "http://github.com/megaphone/cloudsync"
|
12
|
+
gem.authors = ["Cory Forsyth"]
|
13
|
+
gem.add_dependency "right_aws", ">= 0"
|
14
|
+
gem.add_dependency "cloudfiles", ">= 0"
|
15
|
+
gem.add_dependency "commander", ">= 0"
|
16
|
+
# gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
|
17
|
+
end
|
18
|
+
Jeweler::GemcutterTasks.new
|
19
|
+
rescue LoadError
|
20
|
+
puts "Jeweler (or a dependency) not available. Install it with: gem install jeweler"
|
21
|
+
end
|
22
|
+
|
23
|
+
require 'rake/testtask'
|
24
|
+
Rake::TestTask.new(:test) do |test|
|
25
|
+
test.libs << 'lib' << 'test'
|
26
|
+
test.pattern = 'test/**/test_*.rb'
|
27
|
+
test.verbose = true
|
28
|
+
end
|
29
|
+
|
30
|
+
begin
|
31
|
+
require 'rcov/rcovtask'
|
32
|
+
Rcov::RcovTask.new do |test|
|
33
|
+
test.libs << 'test'
|
34
|
+
test.pattern = 'test/**/test_*.rb'
|
35
|
+
test.verbose = true
|
36
|
+
end
|
37
|
+
rescue LoadError
|
38
|
+
task :rcov do
|
39
|
+
abort "RCov is not available. In order to run rcov, you must: sudo gem install spicycode-rcov"
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
task :test => :check_dependencies
|
44
|
+
|
45
|
+
task :default => :test
|
46
|
+
|
47
|
+
require 'rake/rdoctask'
|
48
|
+
Rake::RDocTask.new do |rdoc|
|
49
|
+
version = File.exist?('VERSION') ? File.read('VERSION') : ""
|
50
|
+
|
51
|
+
rdoc.rdoc_dir = 'rdoc'
|
52
|
+
rdoc.title = "cloudsync #{version}"
|
53
|
+
rdoc.rdoc_files.include('README*')
|
54
|
+
rdoc.rdoc_files.include('lib/**/*.rb')
|
55
|
+
end
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
0.1.0
|
data/bin/cloudsync
ADDED
@@ -0,0 +1,92 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'rubygems'
|
4
|
+
require "commander/import"
|
5
|
+
require "lib/cloudsync"
|
6
|
+
|
7
|
+
program :version, Cloudsync::VERSION
|
8
|
+
program :description, "Sync between various backends (S3, Cloudfiles, SFTP)"
|
9
|
+
|
10
|
+
def add_sync_options(c)
|
11
|
+
c.option "--from from_backend", String, "From Backend"
|
12
|
+
c.option "--to to_backend", String, "To Backend"
|
13
|
+
c.option "--dry-run", "Dry run?"
|
14
|
+
c.option "--log LOGFILE", String, "Log file"
|
15
|
+
c.option "-a", "Auto mode -- skip command-line confirmations"
|
16
|
+
end
|
17
|
+
|
18
|
+
def confirm_proceed(msg)
|
19
|
+
exit unless agree(msg)
|
20
|
+
end
|
21
|
+
|
22
|
+
command :sync do |c|
|
23
|
+
c.syntax = "cloudsync sync --from from_backend --to to_backend [--dry-run]"
|
24
|
+
c.description = "Copies all files on from_backend to to_backend."
|
25
|
+
add_sync_options(c)
|
26
|
+
c.action do |args, options|
|
27
|
+
options.default :dry_run => false
|
28
|
+
|
29
|
+
from_backend = options.from.to_sym
|
30
|
+
to_backend = options.to.to_sym
|
31
|
+
|
32
|
+
sync_manager = Cloudsync::SyncManager.new \
|
33
|
+
:from => from_backend,
|
34
|
+
:to => to_backend,
|
35
|
+
:dry_run => options.dry_run,
|
36
|
+
:log_file => options.log
|
37
|
+
|
38
|
+
unless options.a
|
39
|
+
confirm_proceed("Preparing to sync from #{sync_manager.from_backend} to #{sync_manager.to_backend}. Dry-run: #{!!sync_manager.dry_run?}. Ok to proceed?")
|
40
|
+
end
|
41
|
+
|
42
|
+
sync_manager.sync!
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
command :mirror do |c|
|
47
|
+
c.syntax = "cloudsync mirror --from from_backend --to to_backend [--dry-run]"
|
48
|
+
c.description = "Syncs and then prunes all files on from_backend to to_backend."
|
49
|
+
add_sync_options(c)
|
50
|
+
c.action do |args, options|
|
51
|
+
options.default :dry_run => false
|
52
|
+
|
53
|
+
from_backend = options.from.to_sym
|
54
|
+
to_backend = options.to.to_sym
|
55
|
+
|
56
|
+
sync_manager = Cloudsync::SyncManager.new \
|
57
|
+
:from => from_backend,
|
58
|
+
:to => to_backend,
|
59
|
+
:dry_run => options.dry_run,
|
60
|
+
:log_file => options.log
|
61
|
+
|
62
|
+
unless options.a
|
63
|
+
confirm_proceed("Preparing to mirror from #{sync_manager.from_backend} to #{sync_manager.to_backend}. Dry-run: #{!!sync_manager.dry_run?}. Ok to proceed?")
|
64
|
+
end
|
65
|
+
|
66
|
+
sync_manager.mirror!
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
command :prune do |c|
|
71
|
+
c.syntax = "cloudsync prune --from from_backend --to to_backend [--dry-run]"
|
72
|
+
c.description = "Removes all on to_backend that don't exist on from_backend."
|
73
|
+
add_sync_options(c)
|
74
|
+
c.action do |args, options|
|
75
|
+
options.default :dry_run => false
|
76
|
+
|
77
|
+
from_backend = options.from.to_sym
|
78
|
+
to_backend = options.to.to_sym
|
79
|
+
|
80
|
+
sync_manager = Cloudsync::SyncManager.new \
|
81
|
+
:from => from_backend,
|
82
|
+
:to => to_backend,
|
83
|
+
:dry_run => options.dry_run,
|
84
|
+
:log_file => options.log
|
85
|
+
|
86
|
+
unless options.a
|
87
|
+
confirm_proceed("Preparing to prune from #{sync_manager.from_backend} to #{sync_manager.to_backend}. Dry-run: #{!!sync_manager.dry_run?}. Ok to proceed?")
|
88
|
+
end
|
89
|
+
|
90
|
+
sync_manager.prune!
|
91
|
+
end
|
92
|
+
end
|
data/cloudsync.gemspec
ADDED
@@ -0,0 +1,70 @@
|
|
1
|
+
# Generated by jeweler
|
2
|
+
# DO NOT EDIT THIS FILE DIRECTLY
|
3
|
+
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
|
4
|
+
# -*- encoding: utf-8 -*-
|
5
|
+
|
6
|
+
Gem::Specification.new do |s|
|
7
|
+
s.name = %q{cloudsync}
|
8
|
+
s.version = "0.1.0"
|
9
|
+
|
10
|
+
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
|
11
|
+
s.authors = ["Cory Forsyth"]
|
12
|
+
s.date = %q{2010-10-12}
|
13
|
+
s.default_executable = %q{cloudsync}
|
14
|
+
s.description = %q{Sync files between various clouds or sftp servers. Available backends are S3, CloudFiles, and SFTP servers. Can sync, mirror, and prune.}
|
15
|
+
s.email = %q{cory.forsyth@gmail.com}
|
16
|
+
s.executables = ["cloudsync"]
|
17
|
+
s.extra_rdoc_files = [
|
18
|
+
"LICENSE",
|
19
|
+
"README.rdoc"
|
20
|
+
]
|
21
|
+
s.files = [
|
22
|
+
".gitignore",
|
23
|
+
"LICENSE",
|
24
|
+
"README.rdoc",
|
25
|
+
"Rakefile",
|
26
|
+
"VERSION",
|
27
|
+
"bin/cloudsync",
|
28
|
+
"cloudsync.gemspec",
|
29
|
+
"lib/cloudsync.rb",
|
30
|
+
"lib/cloudsync/backend/base.rb",
|
31
|
+
"lib/cloudsync/backend/cloudfiles.rb",
|
32
|
+
"lib/cloudsync/backend/s3.rb",
|
33
|
+
"lib/cloudsync/backend/sftp.rb",
|
34
|
+
"lib/cloudsync/datetime/datetime.rb",
|
35
|
+
"lib/cloudsync/file.rb",
|
36
|
+
"lib/cloudsync/sync_manager.rb",
|
37
|
+
"lib/cloudsync/version.rb",
|
38
|
+
"test/helper.rb",
|
39
|
+
"test/test_cloudsync.rb"
|
40
|
+
]
|
41
|
+
s.homepage = %q{http://github.com/megaphone/cloudsync}
|
42
|
+
s.rdoc_options = ["--charset=UTF-8"]
|
43
|
+
s.require_paths = ["lib"]
|
44
|
+
s.rubygems_version = %q{1.3.7}
|
45
|
+
s.summary = %q{Sync files between various clouds or sftp servers.}
|
46
|
+
s.test_files = [
|
47
|
+
"test/helper.rb",
|
48
|
+
"test/test_cloudsync.rb"
|
49
|
+
]
|
50
|
+
|
51
|
+
if s.respond_to? :specification_version then
|
52
|
+
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
|
53
|
+
s.specification_version = 3
|
54
|
+
|
55
|
+
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
|
56
|
+
s.add_runtime_dependency(%q<right_aws>, [">= 0"])
|
57
|
+
s.add_runtime_dependency(%q<cloudfiles>, [">= 0"])
|
58
|
+
s.add_runtime_dependency(%q<commander>, [">= 0"])
|
59
|
+
else
|
60
|
+
s.add_dependency(%q<right_aws>, [">= 0"])
|
61
|
+
s.add_dependency(%q<cloudfiles>, [">= 0"])
|
62
|
+
s.add_dependency(%q<commander>, [">= 0"])
|
63
|
+
end
|
64
|
+
else
|
65
|
+
s.add_dependency(%q<right_aws>, [">= 0"])
|
66
|
+
s.add_dependency(%q<cloudfiles>, [">= 0"])
|
67
|
+
s.add_dependency(%q<commander>, [">= 0"])
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
data/lib/cloudsync.rb
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
$:.unshift(File.dirname(__FILE__))
|
2
|
+
|
3
|
+
require "cloudsync/sync_manager"
|
4
|
+
require "cloudsync/version"
|
5
|
+
require "cloudsync/file"
|
6
|
+
require "cloudsync/backend/base"
|
7
|
+
require "cloudsync/backend/cloudfiles"
|
8
|
+
require "cloudsync/backend/s3"
|
9
|
+
require "cloudsync/backend/sftp"
|
10
|
+
|
11
|
+
# monkeypatches
|
12
|
+
require "cloudsync/datetime/datetime"
|
@@ -0,0 +1,112 @@
|
|
1
|
+
require 'tempfile'
|
2
|
+
|
3
|
+
module Cloudsync
|
4
|
+
module Backend
|
5
|
+
class Base
|
6
|
+
attr_accessor :store, :sync_manager, :name, :prefix
|
7
|
+
|
8
|
+
def initialize(opts = {})
|
9
|
+
@sync_manager = opts[:sync_manager]
|
10
|
+
@name = opts[:name]
|
11
|
+
@backend_type = opts[:backend] || self.class.to_s.split("::").last
|
12
|
+
end
|
13
|
+
|
14
|
+
def upload_prefix
|
15
|
+
{:bucket => @bucket, :prefix => @prefix}
|
16
|
+
end
|
17
|
+
|
18
|
+
def upload_prefix_path
|
19
|
+
if @bucket && @prefix
|
20
|
+
"#{@bucket}/#{@prefix}"
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
# copy
|
25
|
+
def copy(file, to_backend)
|
26
|
+
start_copy = Time.now
|
27
|
+
$LOGGER.info("Copying file #{file} from #{self} to #{to_backend}")
|
28
|
+
tempfile = download(file)
|
29
|
+
if tempfile
|
30
|
+
to_backend.put(file, tempfile.path)
|
31
|
+
|
32
|
+
$LOGGER.debug("Finished copying #{file} from #{self} to #{to_backend} (#{Time.now - start_copy}s)")
|
33
|
+
tempfile.unlink
|
34
|
+
else
|
35
|
+
$LOGGER.info("Failed to download #{file}")
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def to_s
|
40
|
+
"#{@name}[:#{@backend_type}/#{upload_prefix_path}]"
|
41
|
+
end
|
42
|
+
|
43
|
+
# needs_update?
|
44
|
+
def needs_update?(file, file_list=[])
|
45
|
+
$LOGGER.debug("Checking if #{file} needs update")
|
46
|
+
|
47
|
+
local_backend_file = find_file_from_list_or_store(file, file_list)
|
48
|
+
|
49
|
+
if local_backend_file.nil?
|
50
|
+
$LOGGER.debug("File doesn't exist at #{self} (#{file})")
|
51
|
+
return true
|
52
|
+
end
|
53
|
+
|
54
|
+
if file.e_tag == local_backend_file.e_tag
|
55
|
+
$LOGGER.debug("Etags match for #{file}")
|
56
|
+
return false
|
57
|
+
else
|
58
|
+
$LOGGER.debug(["Etags don't match for #{file}.",
|
59
|
+
"#{file.backend}: #{file.e_tag}",
|
60
|
+
"#{self}: #{local_backend_file.e_tag}"].join(" "))
|
61
|
+
return true
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
# download
|
66
|
+
def download(file)
|
67
|
+
raise NotImplementedError
|
68
|
+
end
|
69
|
+
|
70
|
+
# put
|
71
|
+
def put(file, local_filepath)
|
72
|
+
raise NotImplementedError
|
73
|
+
end
|
74
|
+
|
75
|
+
# delete
|
76
|
+
def delete(file, delete_bucket_if_empty=true)
|
77
|
+
raise NotImplementedError
|
78
|
+
end
|
79
|
+
|
80
|
+
# all_files
|
81
|
+
def all_files
|
82
|
+
raise NotImplementedError
|
83
|
+
end
|
84
|
+
|
85
|
+
def files_to_sync(upload_prefix={})
|
86
|
+
all_files
|
87
|
+
end
|
88
|
+
|
89
|
+
# find_file_from_list_or_store
|
90
|
+
def find_file_from_list_or_store(file, file_list=[])
|
91
|
+
get_file_from_list(file, file_list) || get_file_from_store(file)
|
92
|
+
end
|
93
|
+
|
94
|
+
private
|
95
|
+
|
96
|
+
def dry_run?
|
97
|
+
return false unless @sync_manager
|
98
|
+
@sync_manager.dry_run?
|
99
|
+
end
|
100
|
+
|
101
|
+
# get_file_from_store
|
102
|
+
def get_file_from_store(file)
|
103
|
+
raise NotImplementedError
|
104
|
+
end
|
105
|
+
|
106
|
+
# get_file_from_list
|
107
|
+
def get_file_from_list(file, file_list)
|
108
|
+
file_list.detect {|f| f.full_name == file.full_name}
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
require "cloudfiles"
|
2
|
+
|
3
|
+
module Cloudsync
|
4
|
+
module Backend
|
5
|
+
class CloudFiles < Base
|
6
|
+
def initialize(opts={})
|
7
|
+
@store = ::CloudFiles::Connection.new \
|
8
|
+
:username => opts[:username],
|
9
|
+
:api_key => opts[:password]
|
10
|
+
super
|
11
|
+
end
|
12
|
+
|
13
|
+
def download(file)
|
14
|
+
start_time = Time.now
|
15
|
+
$LOGGER.info("Downloading file #{file}")
|
16
|
+
|
17
|
+
tempfile = file.tempfile
|
18
|
+
|
19
|
+
if !dry_run?
|
20
|
+
if obj = get_obj_from_store(file)
|
21
|
+
obj.save_to_filename(tempfile.path)
|
22
|
+
tempfile.close
|
23
|
+
else
|
24
|
+
$LOGGER.error("Error downloading file #{file}")
|
25
|
+
tempfile.unlink and return nil
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
$LOGGER.debug("Finished downloading file #{file} from #{self} (#{Time.now - start_time})")
|
30
|
+
tempfile
|
31
|
+
end
|
32
|
+
|
33
|
+
# Put the contents of the path #local_file_path# into
|
34
|
+
# the Cloudsync::File object #file#
|
35
|
+
def put(file, local_file_path)
|
36
|
+
start_time = Time.now
|
37
|
+
$LOGGER.info("Putting #{file} to #{self} (#{file.full_upload_path}).")
|
38
|
+
return if dry_run?
|
39
|
+
|
40
|
+
get_or_create_obj_from_store(file).
|
41
|
+
load_from_filename(local_file_path)
|
42
|
+
$LOGGER.debug("Finished putting #{file} to #{self} (#{Time.now - start_time}s)")
|
43
|
+
end
|
44
|
+
|
45
|
+
def files_to_sync(upload_prefix={})
|
46
|
+
$LOGGER.info("Getting files to sync [#{self}]")
|
47
|
+
|
48
|
+
containers_to_sync(upload_prefix).inject([]) do |files, container|
|
49
|
+
container = get_or_create_container(container)
|
50
|
+
objects_from_container(container, upload_prefix).each do |path, hash|
|
51
|
+
files << Cloudsync::File.from_cf_info(container, path, hash, self.to_s)
|
52
|
+
end
|
53
|
+
files
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
def delete(file, delete_container_if_empty=true)
|
58
|
+
$LOGGER.info("Deleting file #{file}")
|
59
|
+
return if dry_run?
|
60
|
+
|
61
|
+
container = @store.container(file.container)
|
62
|
+
|
63
|
+
container.delete_object(file.path)
|
64
|
+
|
65
|
+
if delete_container_if_empty
|
66
|
+
container.refresh
|
67
|
+
if container.empty?
|
68
|
+
$LOGGER.debug("Deleting empty container '#{container.name}'")
|
69
|
+
@store.delete_container(container.name)
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
rescue NoSuchContainerException, NoSuchObjectException => e
|
74
|
+
$LOGGER.error("Failed to delete file #{file}")
|
75
|
+
end
|
76
|
+
|
77
|
+
private
|
78
|
+
|
79
|
+
def get_or_create_container(container_name)
|
80
|
+
if @store.container_exists?(container_name)
|
81
|
+
container = @store.container(container_name)
|
82
|
+
else
|
83
|
+
container = @store.create_container(container_name)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
def containers_to_sync(upload_prefix)
|
88
|
+
upload_prefix[:bucket] ? [upload_prefix[:bucket]] : @store.containers
|
89
|
+
end
|
90
|
+
|
91
|
+
def objects_from_container(container, upload_prefix)
|
92
|
+
objects = []
|
93
|
+
if upload_prefix[:prefix]
|
94
|
+
container.objects_detail(:path => upload_prefix[:prefix]).collect do |path, hash|
|
95
|
+
if hash[:content_type] == "application/directory"
|
96
|
+
objects += objects_from_container(container, :prefix => path)
|
97
|
+
else
|
98
|
+
objects << [path, hash]
|
99
|
+
end
|
100
|
+
end
|
101
|
+
else
|
102
|
+
objects = container.objects_detail
|
103
|
+
end
|
104
|
+
objects
|
105
|
+
end
|
106
|
+
|
107
|
+
def get_obj_from_store(file)
|
108
|
+
@store.container(file.bucket).object(file.upload_path)
|
109
|
+
rescue NoSuchContainerException, NoSuchObjectException => e
|
110
|
+
nil
|
111
|
+
end
|
112
|
+
|
113
|
+
def get_file_from_store(file)
|
114
|
+
Cloudsync::File.from_cf_obj( get_obj_from_store(file), self.to_s )
|
115
|
+
end
|
116
|
+
|
117
|
+
def get_or_create_obj_from_store(file)
|
118
|
+
container = get_or_create_container(file.container)
|
119
|
+
|
120
|
+
if container.object_exists?(file.upload_path)
|
121
|
+
container.object(file.upload_path)
|
122
|
+
else
|
123
|
+
container.create_object(file.upload_path, true)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
require "right_aws"
|
2
|
+
|
3
|
+
module Cloudsync
|
4
|
+
module Backend
|
5
|
+
class S3 < Base
|
6
|
+
def initialize(opts={})
|
7
|
+
@store = RightAws::S3.new(opts[:username],
|
8
|
+
opts[:password])
|
9
|
+
super
|
10
|
+
end
|
11
|
+
|
12
|
+
def put(file, local_filepath)
|
13
|
+
start_time = Time.now
|
14
|
+
$LOGGER.info("Putting #{file} to #{self} (#{file.full_upload_path}).")
|
15
|
+
return if dry_run?
|
16
|
+
|
17
|
+
# Forces creation of the bucket if necessary
|
18
|
+
get_or_create_obj_from_store(file)
|
19
|
+
|
20
|
+
local_file = ::File.open(local_filepath)
|
21
|
+
@store.interface.put(file.bucket, file.upload_path, local_file)
|
22
|
+
local_file.close
|
23
|
+
|
24
|
+
$LOGGER.debug("Finished putting #{file} to #{self} (#{Time.now - start_time})")
|
25
|
+
end
|
26
|
+
|
27
|
+
def download(file)
|
28
|
+
start_time = Time.now
|
29
|
+
$LOGGER.info("Downloading file #{file}")
|
30
|
+
|
31
|
+
tempfile = file.tempfile
|
32
|
+
|
33
|
+
if !dry_run?
|
34
|
+
@store.interface.get(file.bucket, file.path) do |chunk|
|
35
|
+
tempfile.write chunk
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
tempfile.close
|
40
|
+
|
41
|
+
$LOGGER.debug("Finished downloading file #{file} from #{self} (#{Time.now - start_time})")
|
42
|
+
|
43
|
+
tempfile
|
44
|
+
rescue RightAws::AwsError => e
|
45
|
+
$LOGGER.error("Caught error: #{e} (#{file})")
|
46
|
+
if e.message =~ /NoSuchKey/
|
47
|
+
tempfile.unlink and return nil
|
48
|
+
else
|
49
|
+
raise
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def delete(file, delete_bucket_if_empty=true)
|
54
|
+
$LOGGER.info("Deleting #{file}")
|
55
|
+
return if dry_run?
|
56
|
+
|
57
|
+
get_obj_from_store(file).delete
|
58
|
+
|
59
|
+
if bucket = @store.bucket(file.bucket)
|
60
|
+
bucket.key(file.path).delete
|
61
|
+
|
62
|
+
if delete_bucket_if_empty && bucket.keys.empty?
|
63
|
+
$LOGGER.debug("Deleting empty bucket '#{bucket.name}'")
|
64
|
+
bucket.delete
|
65
|
+
end
|
66
|
+
end
|
67
|
+
rescue RightAws::AwsError => e
|
68
|
+
$LOGGER.error("Caught error: #{e} trying to delete #{file}")
|
69
|
+
end
|
70
|
+
|
71
|
+
def files_to_sync(upload_prefix={})
|
72
|
+
$LOGGER.info("Getting files to sync [#{self}]")
|
73
|
+
|
74
|
+
buckets_to_sync(upload_prefix).inject([]) do |files, bucket|
|
75
|
+
objects_from_bucket(bucket, upload_prefix).collect do |key|
|
76
|
+
files << Cloudsync::File.from_s3_obj(key, self.to_s)
|
77
|
+
end
|
78
|
+
files
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
def buckets_to_sync(upload_prefix)
|
85
|
+
if upload_prefix[:bucket]
|
86
|
+
[@store.bucket(upload_prefix[:bucket], true)]
|
87
|
+
else
|
88
|
+
@store.buckets
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def objects_from_bucket(bucket, upload_prefix)
|
93
|
+
if upload_prefix[:prefix]
|
94
|
+
bucket.keys(:prefix => upload_prefix[:prefix])
|
95
|
+
else
|
96
|
+
bucket.keys
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
# Convenience to grab a single file
|
101
|
+
def get_file_from_store(file)
|
102
|
+
Cloudsync::File.from_s3_obj( get_obj_from_store(file), self.to_s )
|
103
|
+
end
|
104
|
+
|
105
|
+
def get_or_create_obj_from_store(file)
|
106
|
+
@store.bucket(file.bucket, true).key(file.upload_path)
|
107
|
+
end
|
108
|
+
|
109
|
+
def get_obj_from_store(file)
|
110
|
+
if bucket = @store.bucket(file.bucket)
|
111
|
+
key = bucket.key(file.upload_path)
|
112
|
+
return key if key.exists?
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
@@ -0,0 +1,132 @@
|
|
1
|
+
require 'net/ssh'
|
2
|
+
require 'net/sftp'
|
3
|
+
|
4
|
+
module Cloudsync::Backend
|
5
|
+
class Sftp < Base
|
6
|
+
attr_accessor :host, :username, :password
|
7
|
+
|
8
|
+
def initialize(options = {})
|
9
|
+
@host = options[:host]
|
10
|
+
@base_path = options[:base_path]
|
11
|
+
@username = options[:username]
|
12
|
+
@password = options[:password]
|
13
|
+
prefix_parts = options[:upload_prefix].split("/")
|
14
|
+
|
15
|
+
@bucket = prefix_parts.shift
|
16
|
+
@prefix = prefix_parts.join("/")
|
17
|
+
|
18
|
+
super
|
19
|
+
end
|
20
|
+
|
21
|
+
# download
|
22
|
+
def download(file)
|
23
|
+
$LOGGER.info("Downloading #{file}")
|
24
|
+
tempfile = file.tempfile
|
25
|
+
|
26
|
+
if !dry_run?
|
27
|
+
Net::SSH.start(@host, @username, :password => @password) do |ssh|
|
28
|
+
ssh.sftp.connect do |sftp|
|
29
|
+
begin
|
30
|
+
sftp.download!(absolute_path(file.path), tempfile)
|
31
|
+
rescue RuntimeError => e
|
32
|
+
if e.message =~ /permission denied/
|
33
|
+
tempfile.close
|
34
|
+
return tempfile
|
35
|
+
else
|
36
|
+
raise
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
tempfile.close
|
43
|
+
tempfile
|
44
|
+
end
|
45
|
+
|
46
|
+
# put
|
47
|
+
def put(file, local_filepath)
|
48
|
+
$LOGGER.info("Putting #{file} to #{self}")
|
49
|
+
return if dry_run?
|
50
|
+
|
51
|
+
Net::SSH.start(@host, @username, :password => @password) do |ssh|
|
52
|
+
ssh.sftp.connect do |sftp|
|
53
|
+
sftp.upload!(local_filepath, absolute_path(file.path))
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
# delete
|
59
|
+
def delete(file, delete_bucket_if_empty=true)
|
60
|
+
$LOGGER.info("Deleting #{file}")
|
61
|
+
return if dry_run?
|
62
|
+
|
63
|
+
Net::SSH.start(@host, @username, :password => @password) do |ssh|
|
64
|
+
ssh.sftp.connect do |sftp|
|
65
|
+
sftp.remove!(absolute_path(file.path))
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def files_to_sync(upload_prefix={})
|
71
|
+
$LOGGER.info("Getting files to sync [#{self}]")
|
72
|
+
files = []
|
73
|
+
Net::SSH.start(@host, @username, :password => @password) do |ssh|
|
74
|
+
ssh.sftp.connect do |sftp|
|
75
|
+
filepaths = sftp.dir.glob(@base_path, "**/**").collect {|entry| entry.name}
|
76
|
+
|
77
|
+
files = filepaths.collect do |filepath|
|
78
|
+
attrs = sftp.stat!(absolute_path(filepath))
|
79
|
+
next unless attrs.file?
|
80
|
+
|
81
|
+
e_tag = ssh.exec!("md5sum #{absolute_path(filepath)}").split(" ").first
|
82
|
+
Cloudsync::File.new \
|
83
|
+
:bucket => @bucket,
|
84
|
+
:path => filepath,
|
85
|
+
:size => attrs.size,
|
86
|
+
:last_modified => attrs.mtime,
|
87
|
+
:e_tag => e_tag,
|
88
|
+
:backend => self.to_s
|
89
|
+
end.compact
|
90
|
+
end
|
91
|
+
end
|
92
|
+
files
|
93
|
+
end
|
94
|
+
|
95
|
+
def absolute_path(path)
|
96
|
+
@base_path + "/" + path
|
97
|
+
end
|
98
|
+
|
99
|
+
private
|
100
|
+
|
101
|
+
# get_file_from_store
|
102
|
+
def get_file_from_store(file)
|
103
|
+
local_filepath = file.path.sub(/^#{@prefix}\/?/,"")
|
104
|
+
|
105
|
+
$LOGGER.debug("Looking for local filepath: #{local_filepath}")
|
106
|
+
$LOGGER.debug("Abs filepath: #{absolute_path(local_filepath)}")
|
107
|
+
|
108
|
+
sftp_file = nil
|
109
|
+
Net::SSH.start(@host, @username, :password => @password) do |ssh|
|
110
|
+
ssh.sftp.connect do |sftp|
|
111
|
+
begin
|
112
|
+
attrs = sftp.stat!(absolute_path(local_filepath))
|
113
|
+
rescue Net::SFTP::StatusException => e
|
114
|
+
break if e.message =~ /no such file/
|
115
|
+
raise
|
116
|
+
end
|
117
|
+
break unless attrs.file?
|
118
|
+
|
119
|
+
e_tag = ssh.exec!("md5sum #{absolute_path(local_filepath)}").split(" ").first
|
120
|
+
sftp_file = Cloudsync::File.new \
|
121
|
+
:bucket => @bucket,
|
122
|
+
:path => local_filepath,
|
123
|
+
:size => attrs.size,
|
124
|
+
:last_modified => attrs.mtime,
|
125
|
+
:e_tag => e_tag,
|
126
|
+
:backend => self.to_s
|
127
|
+
end
|
128
|
+
end
|
129
|
+
sftp_file
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
class DateTime
|
2
|
+
def to_gm_time
|
3
|
+
to_time(new_offset, :gm)
|
4
|
+
end
|
5
|
+
|
6
|
+
def to_local_time
|
7
|
+
to_time(new_offset(DateTime.now.offset-offset), :local)
|
8
|
+
end
|
9
|
+
|
10
|
+
private
|
11
|
+
def to_time(dest, method)
|
12
|
+
#Convert a fraction of a day to a number of microseconds
|
13
|
+
usec = (dest.sec_fraction * 60 * 60 * 24 * (10**6)).to_i
|
14
|
+
Time.send(method, dest.year, dest.month, dest.day, dest.hour, dest.min,
|
15
|
+
dest.sec, usec)
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,75 @@
|
|
1
|
+
module Cloudsync
|
2
|
+
class File
|
3
|
+
attr_accessor :bucket, :path, :size, :last_modified, :e_tag, :backend
|
4
|
+
alias_method :container, :bucket
|
5
|
+
alias_method :container=, :bucket=
|
6
|
+
|
7
|
+
def initialize(options={})
|
8
|
+
@bucket = options[:bucket]
|
9
|
+
@path = options[:path]
|
10
|
+
@size = options[:size]
|
11
|
+
@last_modified = options[:last_modified]
|
12
|
+
@e_tag = options[:e_tag]
|
13
|
+
@backend = options[:backend]
|
14
|
+
end
|
15
|
+
|
16
|
+
def self.from_s3_obj(obj, backend=nil)
|
17
|
+
return nil if obj.nil?
|
18
|
+
new({
|
19
|
+
:bucket => obj.bucket.name,
|
20
|
+
:path => obj.name,
|
21
|
+
:size => obj.size,
|
22
|
+
:last_modified => obj.last_modified.to_i,
|
23
|
+
:e_tag => obj.e_tag.gsub('"',''),
|
24
|
+
:backend => backend})
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.from_cf_info(container, path, hash, backend)
|
28
|
+
new({ :bucket => container.name,
|
29
|
+
:path => path,
|
30
|
+
:size => hash[:bytes],
|
31
|
+
:last_modified => hash[:last_modified].to_gm_time.to_i,
|
32
|
+
:e_tag => hash[:hash],
|
33
|
+
:backend => backend })
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.from_cf_obj(obj, backend=nil)
|
37
|
+
return nil if obj.nil?
|
38
|
+
new({
|
39
|
+
:bucket => obj.container.name,
|
40
|
+
:path => obj.name,
|
41
|
+
:size => obj.bytes.to_i,
|
42
|
+
:last_modified => obj.last_modified.to_i,
|
43
|
+
:e_tag => obj.etag,
|
44
|
+
:backend => backend})
|
45
|
+
end
|
46
|
+
|
47
|
+
def to_s
|
48
|
+
"#{path}"
|
49
|
+
end
|
50
|
+
|
51
|
+
def unique_filename
|
52
|
+
[bucket,e_tag,path].join.gsub(/[^a-zA-Z\-_0-9]/,'')
|
53
|
+
end
|
54
|
+
|
55
|
+
def full_name
|
56
|
+
[bucket,path].join("/")
|
57
|
+
end
|
58
|
+
|
59
|
+
def upload_path
|
60
|
+
if @prefix
|
61
|
+
@prefix + "/" + @path
|
62
|
+
else
|
63
|
+
@path
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def full_upload_path
|
68
|
+
[bucket, upload_path].join("/")
|
69
|
+
end
|
70
|
+
|
71
|
+
def tempfile
|
72
|
+
Tempfile.new(unique_filename)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -0,0 +1,138 @@
|
|
1
|
+
module Cloudsync
|
2
|
+
class SyncManager
|
3
|
+
attr_accessor :from_backend, :to_backend, :dry_run
|
4
|
+
|
5
|
+
def initialize(opts={})
|
6
|
+
@from_backend = get_backend opts[:from]
|
7
|
+
@to_backend = get_backend opts[:to]
|
8
|
+
|
9
|
+
if @from_backend == @to_backend
|
10
|
+
raise ArgumentError, "The from_backend can't be the same as the to_backend."
|
11
|
+
end
|
12
|
+
|
13
|
+
@dry_run = opts[:dry_run]
|
14
|
+
|
15
|
+
log_file = opts[:log_file] || "cloudsync.log"
|
16
|
+
log_file = ::File.expand_path(log_file)
|
17
|
+
$LOGGER = Logger.new(log_file)
|
18
|
+
end
|
19
|
+
|
20
|
+
def sync!
|
21
|
+
sync(:sync)
|
22
|
+
end
|
23
|
+
|
24
|
+
def sync_all!
|
25
|
+
sync(:sync_all)
|
26
|
+
end
|
27
|
+
|
28
|
+
def mirror!
|
29
|
+
$LOGGER.info("Mirror from #{from_backend} to #{to_backend} started at #{mirror_start = Time.now}. Dry-run? #{!!dry_run?}")
|
30
|
+
sync!
|
31
|
+
prune!
|
32
|
+
$LOGGER.info("Mirror from #{from_backend} to #{to_backend} finished at #{Time.now}. Took #{Time.now - mirror_start}s")
|
33
|
+
end
|
34
|
+
|
35
|
+
def dry_run?
|
36
|
+
@dry_run
|
37
|
+
end
|
38
|
+
|
39
|
+
def prune!
|
40
|
+
prune
|
41
|
+
end
|
42
|
+
|
43
|
+
private
|
44
|
+
|
45
|
+
def get_backend(backend_name)
|
46
|
+
opts = configs[backend_name].merge(:name => backend_name, :sync_manager => self)
|
47
|
+
|
48
|
+
case opts[:backend]
|
49
|
+
when :s3
|
50
|
+
Cloudsync::Backend::S3.new(opts)
|
51
|
+
when :cloudfiles
|
52
|
+
Cloudsync::Backend::CloudFiles.new(opts)
|
53
|
+
when :sftp
|
54
|
+
Cloudsync::Backend::Sftp.new(opts)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def configs
|
59
|
+
@configs ||= begin
|
60
|
+
if ::File.exists?( path = ::File.expand_path("~/.cloudsync.yml") )
|
61
|
+
YAML::load_file(path)
|
62
|
+
elsif ::File.exists?( path = ::File.expand_path("cloudsync.yml") )
|
63
|
+
YAML::load_file(path)
|
64
|
+
else
|
65
|
+
raise "Couldn't find cloudsync.yml file!"
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def prune
|
71
|
+
file_stats = {:removed => [], :skipped => []}
|
72
|
+
|
73
|
+
$LOGGER.info("Prune from #{from_backend} to #{to_backend} started at #{prune_start = Time.now}. Dry-run? #{!!dry_run?}")
|
74
|
+
|
75
|
+
from_backend_files = [] # from_backend.files_to_sync(to_backend.upload_prefix)
|
76
|
+
to_backend_files = to_backend.files_to_sync(from_backend.upload_prefix)
|
77
|
+
total_files = to_backend_files.size
|
78
|
+
last_decile_complete = 0
|
79
|
+
|
80
|
+
to_backend_files.each_with_index do |file, index|
|
81
|
+
$LOGGER.debug("Checking if file #{file} exists on [#{from_backend}]")
|
82
|
+
if found_file = from_backend.find_file_from_list_or_store(file, from_backend_files)
|
83
|
+
$LOGGER.debug("Keeping file #{file} because it was found on #{from_backend}.")
|
84
|
+
file_stats[:skipped] << file
|
85
|
+
else
|
86
|
+
$LOGGER.debug("Removing #{file} because it doesn't exist on #{from_backend}.")
|
87
|
+
file_stats[:removed] << file
|
88
|
+
|
89
|
+
to_backend.delete(file)
|
90
|
+
end
|
91
|
+
|
92
|
+
if decile_complete(index, total_files) != last_decile_complete
|
93
|
+
last_decile_complete = decile_complete(index, total_files)
|
94
|
+
$LOGGER.info("Prune: Completed #{index} files. #{last_decile_complete * 10}% complete")
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
$LOGGER.info(["Prune from #{from_backend} to #{to_backend} finished at #{Time.now}, took #{Time.now - prune_start}s.",
|
99
|
+
"Skipped #{file_stats[:skipped].size} files.",
|
100
|
+
"Removed #{file_stats[:removed].size} files"].join(" "))
|
101
|
+
file_stats
|
102
|
+
end
|
103
|
+
|
104
|
+
def sync(mode)
|
105
|
+
file_stats = {:copied => [], :skipped => []}
|
106
|
+
$LOGGER.info("Sync from #{from_backend} to #{to_backend} started at #{sync_start = Time.now}. Mode: #{mode}. Dry-run? #{!!dry_run?}")
|
107
|
+
|
108
|
+
from_backend_files = from_backend.files_to_sync(to_backend.upload_prefix)
|
109
|
+
to_backend_files = to_backend.files_to_sync(from_backend.upload_prefix)
|
110
|
+
total_files = from_backend_files.size
|
111
|
+
last_decile_complete = 0
|
112
|
+
|
113
|
+
from_backend_files.each_with_index do |file, index|
|
114
|
+
if (mode == :sync_all || to_backend.needs_update?(file, to_backend_files))
|
115
|
+
file_stats[:copied] << file
|
116
|
+
from_backend.copy(file, to_backend)
|
117
|
+
else
|
118
|
+
file_stats[:skipped] << file
|
119
|
+
$LOGGER.debug("Skipping up-to-date file #{file}")
|
120
|
+
end
|
121
|
+
|
122
|
+
if decile_complete(index, total_files) != last_decile_complete
|
123
|
+
last_decile_complete = decile_complete(index, total_files)
|
124
|
+
$LOGGER.info("Sync from #{from_backend} to #{to_backend}: Completed #{index} files. #{last_decile_complete * 10}% complete")
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
$LOGGER.debug(["Sync from #{from_backend} to #{to_backend} finished at #{Time.now}, took #{Time.now - sync_start}s.",
|
129
|
+
"Copied #{file_stats[:copied].size} files.",
|
130
|
+
"Skipped #{file_stats[:skipped].size} files."].join(" "))
|
131
|
+
file_stats
|
132
|
+
end
|
133
|
+
|
134
|
+
def decile_complete(index, total_files)
|
135
|
+
(index * 100 / total_files) / 10
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
data/test/helper.rb
ADDED
metadata
ADDED
@@ -0,0 +1,127 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: cloudsync
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
hash: 27
|
5
|
+
prerelease: false
|
6
|
+
segments:
|
7
|
+
- 0
|
8
|
+
- 1
|
9
|
+
- 0
|
10
|
+
version: 0.1.0
|
11
|
+
platform: ruby
|
12
|
+
authors:
|
13
|
+
- Cory Forsyth
|
14
|
+
autorequire:
|
15
|
+
bindir: bin
|
16
|
+
cert_chain: []
|
17
|
+
|
18
|
+
date: 2010-10-12 00:00:00 -04:00
|
19
|
+
default_executable: cloudsync
|
20
|
+
dependencies:
|
21
|
+
- !ruby/object:Gem::Dependency
|
22
|
+
name: right_aws
|
23
|
+
prerelease: false
|
24
|
+
requirement: &id001 !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
hash: 3
|
30
|
+
segments:
|
31
|
+
- 0
|
32
|
+
version: "0"
|
33
|
+
type: :runtime
|
34
|
+
version_requirements: *id001
|
35
|
+
- !ruby/object:Gem::Dependency
|
36
|
+
name: cloudfiles
|
37
|
+
prerelease: false
|
38
|
+
requirement: &id002 !ruby/object:Gem::Requirement
|
39
|
+
none: false
|
40
|
+
requirements:
|
41
|
+
- - ">="
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
hash: 3
|
44
|
+
segments:
|
45
|
+
- 0
|
46
|
+
version: "0"
|
47
|
+
type: :runtime
|
48
|
+
version_requirements: *id002
|
49
|
+
- !ruby/object:Gem::Dependency
|
50
|
+
name: commander
|
51
|
+
prerelease: false
|
52
|
+
requirement: &id003 !ruby/object:Gem::Requirement
|
53
|
+
none: false
|
54
|
+
requirements:
|
55
|
+
- - ">="
|
56
|
+
- !ruby/object:Gem::Version
|
57
|
+
hash: 3
|
58
|
+
segments:
|
59
|
+
- 0
|
60
|
+
version: "0"
|
61
|
+
type: :runtime
|
62
|
+
version_requirements: *id003
|
63
|
+
description: Sync files between various clouds or sftp servers. Available backends are S3, CloudFiles, and SFTP servers. Can sync, mirror, and prune.
|
64
|
+
email: cory.forsyth@gmail.com
|
65
|
+
executables:
|
66
|
+
- cloudsync
|
67
|
+
extensions: []
|
68
|
+
|
69
|
+
extra_rdoc_files:
|
70
|
+
- LICENSE
|
71
|
+
- README.rdoc
|
72
|
+
files:
|
73
|
+
- .gitignore
|
74
|
+
- LICENSE
|
75
|
+
- README.rdoc
|
76
|
+
- Rakefile
|
77
|
+
- VERSION
|
78
|
+
- bin/cloudsync
|
79
|
+
- cloudsync.gemspec
|
80
|
+
- lib/cloudsync.rb
|
81
|
+
- lib/cloudsync/backend/base.rb
|
82
|
+
- lib/cloudsync/backend/cloudfiles.rb
|
83
|
+
- lib/cloudsync/backend/s3.rb
|
84
|
+
- lib/cloudsync/backend/sftp.rb
|
85
|
+
- lib/cloudsync/datetime/datetime.rb
|
86
|
+
- lib/cloudsync/file.rb
|
87
|
+
- lib/cloudsync/sync_manager.rb
|
88
|
+
- lib/cloudsync/version.rb
|
89
|
+
- test/helper.rb
|
90
|
+
- test/test_cloudsync.rb
|
91
|
+
has_rdoc: true
|
92
|
+
homepage: http://github.com/megaphone/cloudsync
|
93
|
+
licenses: []
|
94
|
+
|
95
|
+
post_install_message:
|
96
|
+
rdoc_options:
|
97
|
+
- --charset=UTF-8
|
98
|
+
require_paths:
|
99
|
+
- lib
|
100
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
101
|
+
none: false
|
102
|
+
requirements:
|
103
|
+
- - ">="
|
104
|
+
- !ruby/object:Gem::Version
|
105
|
+
hash: 3
|
106
|
+
segments:
|
107
|
+
- 0
|
108
|
+
version: "0"
|
109
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
110
|
+
none: false
|
111
|
+
requirements:
|
112
|
+
- - ">="
|
113
|
+
- !ruby/object:Gem::Version
|
114
|
+
hash: 3
|
115
|
+
segments:
|
116
|
+
- 0
|
117
|
+
version: "0"
|
118
|
+
requirements: []
|
119
|
+
|
120
|
+
rubyforge_project:
|
121
|
+
rubygems_version: 1.3.7
|
122
|
+
signing_key:
|
123
|
+
specification_version: 3
|
124
|
+
summary: Sync files between various clouds or sftp servers.
|
125
|
+
test_files:
|
126
|
+
- test/helper.rb
|
127
|
+
- test/test_cloudsync.rb
|