nfm-backup 4.0.1a
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.md +24 -0
- data/README.md +20 -0
- data/bin/backup +5 -0
- data/lib/backup.rb +133 -0
- data/lib/backup/archive.rb +170 -0
- data/lib/backup/binder.rb +22 -0
- data/lib/backup/cleaner.rb +116 -0
- data/lib/backup/cli.rb +364 -0
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +298 -0
- data/lib/backup/cloud_io/s3.rb +260 -0
- data/lib/backup/compressor/base.rb +35 -0
- data/lib/backup/compressor/bzip2.rb +39 -0
- data/lib/backup/compressor/custom.rb +53 -0
- data/lib/backup/compressor/gzip.rb +74 -0
- data/lib/backup/config.rb +119 -0
- data/lib/backup/config/dsl.rb +102 -0
- data/lib/backup/config/helpers.rb +143 -0
- data/lib/backup/database/base.rb +85 -0
- data/lib/backup/database/mongodb.rb +186 -0
- data/lib/backup/database/mysql.rb +123 -0
- data/lib/backup/database/postgresql.rb +133 -0
- data/lib/backup/database/redis.rb +179 -0
- data/lib/backup/database/riak.rb +82 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +747 -0
- data/lib/backup/encryptor/open_ssl.rb +72 -0
- data/lib/backup/errors.rb +58 -0
- data/lib/backup/logger.rb +199 -0
- data/lib/backup/logger/console.rb +51 -0
- data/lib/backup/logger/fog_adapter.rb +29 -0
- data/lib/backup/logger/logfile.rb +119 -0
- data/lib/backup/logger/syslog.rb +116 -0
- data/lib/backup/model.rb +454 -0
- data/lib/backup/notifier/base.rb +98 -0
- data/lib/backup/notifier/campfire.rb +69 -0
- data/lib/backup/notifier/hipchat.rb +93 -0
- data/lib/backup/notifier/http_post.rb +122 -0
- data/lib/backup/notifier/mail.rb +238 -0
- data/lib/backup/notifier/nagios.rb +69 -0
- data/lib/backup/notifier/prowl.rb +69 -0
- data/lib/backup/notifier/pushover.rb +80 -0
- data/lib/backup/notifier/slack.rb +149 -0
- data/lib/backup/notifier/twitter.rb +65 -0
- data/lib/backup/package.rb +51 -0
- data/lib/backup/packager.rb +101 -0
- data/lib/backup/pipeline.rb +124 -0
- data/lib/backup/splitter.rb +76 -0
- data/lib/backup/storage/base.rb +57 -0
- data/lib/backup/storage/cloud_files.rb +158 -0
- data/lib/backup/storage/cycler.rb +65 -0
- data/lib/backup/storage/dropbox.rb +236 -0
- data/lib/backup/storage/ftp.rb +98 -0
- data/lib/backup/storage/local.rb +64 -0
- data/lib/backup/storage/ninefold.rb +74 -0
- data/lib/backup/storage/rsync.rb +248 -0
- data/lib/backup/storage/s3.rb +154 -0
- data/lib/backup/storage/scp.rb +67 -0
- data/lib/backup/storage/sftp.rb +82 -0
- data/lib/backup/syncer/base.rb +70 -0
- data/lib/backup/syncer/cloud/base.rb +179 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
- data/lib/backup/syncer/cloud/local_file.rb +100 -0
- data/lib/backup/syncer/cloud/s3.rb +110 -0
- data/lib/backup/syncer/rsync/base.rb +48 -0
- data/lib/backup/syncer/rsync/local.rb +31 -0
- data/lib/backup/syncer/rsync/pull.rb +51 -0
- data/lib/backup/syncer/rsync/push.rb +205 -0
- data/lib/backup/template.rb +46 -0
- data/lib/backup/utilities.rb +221 -0
- data/lib/backup/version.rb +5 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/hipchat +15 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +21 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +19 -0
- data/templates/cli/storages/ftp +12 -0
- data/templates/cli/storages/local +7 -0
- data/templates/cli/storages/ninefold +9 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +14 -0
- data/templates/cli/storages/scp +14 -0
- data/templates/cli/storages/sftp +14 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +688 -0
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'net/scp'
|
|
3
|
+
|
|
4
|
+
module Backup
|
|
5
|
+
module Storage
|
|
6
|
+
class SCP < Base
|
|
7
|
+
include Storage::Cycler
|
|
8
|
+
class Error < Backup::Error; end
|
|
9
|
+
|
|
10
|
+
##
|
|
11
|
+
# Server credentials
|
|
12
|
+
attr_accessor :username, :password, :ssh_options
|
|
13
|
+
|
|
14
|
+
##
|
|
15
|
+
# Server IP Address and SCP port
|
|
16
|
+
attr_accessor :ip, :port
|
|
17
|
+
|
|
18
|
+
def initialize(model, storage_id = nil)
|
|
19
|
+
super
|
|
20
|
+
|
|
21
|
+
@port ||= 22
|
|
22
|
+
@path ||= 'backups'
|
|
23
|
+
@ssh_options ||= {}
|
|
24
|
+
path.sub!(/^~\//, '')
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
private
|
|
28
|
+
|
|
29
|
+
def connection
|
|
30
|
+
Net::SSH.start(
|
|
31
|
+
ip, username, { :password => password, :port => port }.merge(ssh_options)
|
|
32
|
+
) {|ssh| yield ssh }
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def transfer!
|
|
36
|
+
connection do |ssh|
|
|
37
|
+
ssh.exec!("mkdir -p '#{ remote_path }'")
|
|
38
|
+
|
|
39
|
+
package.filenames.each do |filename|
|
|
40
|
+
src = File.join(Config.tmp_path, filename)
|
|
41
|
+
dest = File.join(remote_path, filename)
|
|
42
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
|
43
|
+
ssh.scp.upload!(src, dest)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Called by the Cycler.
|
|
49
|
+
# Any error raised will be logged as a warning.
|
|
50
|
+
def remove!(package)
|
|
51
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
|
52
|
+
|
|
53
|
+
errors = []
|
|
54
|
+
connection do |ssh|
|
|
55
|
+
ssh.exec!("rm -r '#{ remote_path_for(package) }'") do |ch, stream, data|
|
|
56
|
+
errors << data if stream == :stderr
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
unless errors.empty?
|
|
60
|
+
raise Error, "Net::SSH reported the following errors:\n" +
|
|
61
|
+
errors.join("\n")
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'net/sftp'
|
|
3
|
+
|
|
4
|
+
module Backup
|
|
5
|
+
module Storage
|
|
6
|
+
class SFTP < Base
|
|
7
|
+
include Storage::Cycler
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
# Server credentials
|
|
11
|
+
attr_accessor :username, :password, :ssh_options
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
# Server IP Address and SFTP port
|
|
15
|
+
attr_accessor :ip, :port
|
|
16
|
+
|
|
17
|
+
def initialize(model, storage_id = nil)
|
|
18
|
+
super
|
|
19
|
+
|
|
20
|
+
@ssh_options ||= {}
|
|
21
|
+
@port ||= 22
|
|
22
|
+
@path ||= 'backups'
|
|
23
|
+
path.sub!(/^~\//, '')
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
private
|
|
27
|
+
|
|
28
|
+
def connection
|
|
29
|
+
Net::SFTP.start(
|
|
30
|
+
ip, username, { :password => password, :port => port }.merge(ssh_options)
|
|
31
|
+
) {|sftp| yield sftp }
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def transfer!
|
|
35
|
+
connection do |sftp|
|
|
36
|
+
create_remote_path(sftp)
|
|
37
|
+
|
|
38
|
+
package.filenames.each do |filename|
|
|
39
|
+
src = File.join(Config.tmp_path, filename)
|
|
40
|
+
dest = File.join(remote_path, filename)
|
|
41
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
|
42
|
+
sftp.upload!(src, dest)
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Called by the Cycler.
|
|
48
|
+
# Any error raised will be logged as a warning.
|
|
49
|
+
def remove!(package)
|
|
50
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
|
51
|
+
|
|
52
|
+
remote_path = remote_path_for(package)
|
|
53
|
+
connection do |sftp|
|
|
54
|
+
package.filenames.each do |filename|
|
|
55
|
+
sftp.remove!(File.join(remote_path, filename))
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
sftp.rmdir!(remote_path)
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
##
|
|
63
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
|
64
|
+
# server in order to upload the backup file. Net::SFTP does not support
|
|
65
|
+
# paths to directories that don't yet exist when creating new
|
|
66
|
+
# directories. Instead, we split the parts up in to an array (for each
|
|
67
|
+
# '/') and loop through that to create the directories one by one.
|
|
68
|
+
# Net::SFTP raises an exception when the directory it's trying to create
|
|
69
|
+
# already exists, so we have rescue it
|
|
70
|
+
def create_remote_path(sftp)
|
|
71
|
+
path_parts = Array.new
|
|
72
|
+
remote_path.split('/').each do |path_part|
|
|
73
|
+
path_parts << path_part
|
|
74
|
+
begin
|
|
75
|
+
sftp.mkdir!(path_parts.join('/'))
|
|
76
|
+
rescue Net::SFTP::StatusException; end
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
end
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
module Backup
|
|
4
|
+
module Syncer
|
|
5
|
+
class Base
|
|
6
|
+
include Utilities::Helpers
|
|
7
|
+
include Config::Helpers
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
# Path to store the synced files/directories to
|
|
11
|
+
attr_accessor :path
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
# Flag for mirroring the files/directories
|
|
15
|
+
attr_accessor :mirror
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# Optional user-defined identifier to differentiate multiple syncers
|
|
19
|
+
# defined within a single backup model. Currently this is only used
|
|
20
|
+
# in the log messages.
|
|
21
|
+
attr_reader :syncer_id
|
|
22
|
+
|
|
23
|
+
attr_reader :excludes
|
|
24
|
+
|
|
25
|
+
def initialize(syncer_id = nil)
|
|
26
|
+
@syncer_id = syncer_id
|
|
27
|
+
|
|
28
|
+
load_defaults!
|
|
29
|
+
|
|
30
|
+
@mirror ||= false
|
|
31
|
+
@directories = []
|
|
32
|
+
@excludes = []
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
# Syntactical suger for the DSL for adding directories
|
|
37
|
+
def directories(&block)
|
|
38
|
+
return @directories unless block_given?
|
|
39
|
+
instance_eval(&block)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def add(path)
|
|
43
|
+
directories << path
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# For Cloud Syncers, +pattern+ can be a string (with shell-style
|
|
47
|
+
# wildcards) or a regex.
|
|
48
|
+
# For RSync, each +pattern+ will be passed to rsync's --exclude option.
|
|
49
|
+
def exclude(pattern)
|
|
50
|
+
excludes << pattern
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
private
|
|
54
|
+
|
|
55
|
+
def syncer_name
|
|
56
|
+
@syncer_name ||= self.class.to_s.sub('Backup::', '') +
|
|
57
|
+
(syncer_id ? " (#{ syncer_id })" : '')
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def log!(action)
|
|
61
|
+
msg = case action
|
|
62
|
+
when :started then 'Started...'
|
|
63
|
+
when :finished then 'Finished!'
|
|
64
|
+
end
|
|
65
|
+
Logger.info "#{ syncer_name } #{ msg }"
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
module Backup
|
|
4
|
+
module Syncer
|
|
5
|
+
module Cloud
|
|
6
|
+
class Error < Backup::Error; end
|
|
7
|
+
|
|
8
|
+
class Base < Syncer::Base
|
|
9
|
+
MUTEX = Mutex.new
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
# Number of threads to use for concurrency.
|
|
13
|
+
#
|
|
14
|
+
# Default: 0 (no concurrency)
|
|
15
|
+
attr_accessor :thread_count
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# Number of times to retry failed operations.
|
|
19
|
+
#
|
|
20
|
+
# Default: 10
|
|
21
|
+
attr_accessor :max_retries
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Time in seconds to pause before each retry.
|
|
25
|
+
#
|
|
26
|
+
# Default: 30
|
|
27
|
+
attr_accessor :retry_waitsec
|
|
28
|
+
|
|
29
|
+
def initialize(syncer_id = nil, &block)
|
|
30
|
+
super
|
|
31
|
+
instance_eval(&block) if block_given?
|
|
32
|
+
|
|
33
|
+
@thread_count ||= 0
|
|
34
|
+
@max_retries ||= 10
|
|
35
|
+
@retry_waitsec ||= 30
|
|
36
|
+
|
|
37
|
+
@path ||= 'backups'
|
|
38
|
+
@path = path.sub(/^\//, '')
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def perform!
|
|
42
|
+
log!(:started)
|
|
43
|
+
@transfer_count = 0
|
|
44
|
+
@unchanged_count = 0
|
|
45
|
+
@skipped_count = 0
|
|
46
|
+
@orphans = thread_count > 0 ? Queue.new : []
|
|
47
|
+
|
|
48
|
+
directories.each {|dir| sync_directory(dir) }
|
|
49
|
+
orphans_result = process_orphans
|
|
50
|
+
|
|
51
|
+
Logger.info "\nSummary:"
|
|
52
|
+
Logger.info "\s\sTransferred Files: #{ @transfer_count }"
|
|
53
|
+
Logger.info "\s\s#{ orphans_result }"
|
|
54
|
+
Logger.info "\s\sUnchanged Files: #{ @unchanged_count }"
|
|
55
|
+
Logger.warn "\s\sSkipped Files: #{ @skipped_count }" if @skipped_count > 0
|
|
56
|
+
log!(:finished)
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
private
|
|
60
|
+
|
|
61
|
+
def sync_directory(dir)
|
|
62
|
+
remote_base = path.empty? ? File.basename(dir) :
|
|
63
|
+
File.join(path, File.basename(dir))
|
|
64
|
+
Logger.info "Gathering remote data for '#{ remote_base }'..."
|
|
65
|
+
remote_files = get_remote_files(remote_base)
|
|
66
|
+
|
|
67
|
+
Logger.info("Gathering local data for '#{ File.expand_path(dir) }'...")
|
|
68
|
+
local_files = LocalFile.find(dir, excludes)
|
|
69
|
+
|
|
70
|
+
relative_paths = (local_files.keys | remote_files.keys).sort
|
|
71
|
+
if relative_paths.empty?
|
|
72
|
+
Logger.info 'No local or remote files found'
|
|
73
|
+
else
|
|
74
|
+
Logger.info 'Syncing...'
|
|
75
|
+
sync_block = Proc.new do |relative_path|
|
|
76
|
+
local_file = local_files[relative_path]
|
|
77
|
+
remote_md5 = remote_files[relative_path]
|
|
78
|
+
remote_path = File.join(remote_base, relative_path)
|
|
79
|
+
sync_file(local_file, remote_path, remote_md5)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
if thread_count > 0
|
|
83
|
+
sync_in_threads(relative_paths, sync_block)
|
|
84
|
+
else
|
|
85
|
+
relative_paths.each(&sync_block)
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def sync_in_threads(relative_paths, sync_block)
|
|
91
|
+
queue = Queue.new
|
|
92
|
+
queue << relative_paths.shift until relative_paths.empty?
|
|
93
|
+
num_threads = [thread_count, queue.size].min
|
|
94
|
+
Logger.info "\s\sUsing #{ num_threads } Threads"
|
|
95
|
+
threads = num_threads.times.map do
|
|
96
|
+
Thread.new do
|
|
97
|
+
loop do
|
|
98
|
+
path = queue.shift(true) rescue nil
|
|
99
|
+
path ? sync_block.call(path) : break
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# abort if any thread raises an exception
|
|
105
|
+
while threads.any?(&:alive?)
|
|
106
|
+
if threads.any? {|thr| thr.status.nil? }
|
|
107
|
+
threads.each(&:kill)
|
|
108
|
+
Thread.pass while threads.any?(&:alive?)
|
|
109
|
+
break
|
|
110
|
+
end
|
|
111
|
+
sleep num_threads * 0.1
|
|
112
|
+
end
|
|
113
|
+
threads.each(&:join)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
# If an exception is raised in multiple threads, only the exception
|
|
117
|
+
# raised in the first thread that Thread#join is called on will be
|
|
118
|
+
# handled. So all exceptions are logged first with their details,
|
|
119
|
+
# then a generic exception is raised.
|
|
120
|
+
def sync_file(local_file, remote_path, remote_md5)
|
|
121
|
+
if local_file && File.exist?(local_file.path)
|
|
122
|
+
if local_file.md5 == remote_md5
|
|
123
|
+
MUTEX.synchronize { @unchanged_count += 1 }
|
|
124
|
+
else
|
|
125
|
+
Logger.info("\s\s[transferring] '#{ remote_path }'")
|
|
126
|
+
begin
|
|
127
|
+
cloud_io.upload(local_file.path, remote_path)
|
|
128
|
+
MUTEX.synchronize { @transfer_count += 1 }
|
|
129
|
+
rescue CloudIO::FileSizeError => err
|
|
130
|
+
MUTEX.synchronize { @skipped_count += 1 }
|
|
131
|
+
Logger.warn Error.wrap(err, "Skipping '#{ remote_path }'")
|
|
132
|
+
rescue => err
|
|
133
|
+
Logger.error(err)
|
|
134
|
+
raise Error, <<-EOS
|
|
135
|
+
Syncer Failed!
|
|
136
|
+
See the Retry [info] and [error] messages (if any)
|
|
137
|
+
for details on each failed operation.
|
|
138
|
+
EOS
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
elsif remote_md5
|
|
142
|
+
@orphans << remote_path
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def process_orphans
|
|
147
|
+
if @orphans.empty?
|
|
148
|
+
return mirror ? 'Deleted Files: 0' : 'Orphaned Files: 0'
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
if @orphans.is_a?(Queue)
|
|
152
|
+
@orphans = @orphans.size.times.map { @orphans.shift }
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
if mirror
|
|
156
|
+
Logger.info @orphans.map {|path|
|
|
157
|
+
"\s\s[removing] '#{ path }'"
|
|
158
|
+
}.join("\n")
|
|
159
|
+
|
|
160
|
+
begin
|
|
161
|
+
cloud_io.delete(@orphans)
|
|
162
|
+
"Deleted Files: #{ @orphans.count }"
|
|
163
|
+
rescue => err
|
|
164
|
+
Logger.warn Error.wrap(err, 'Delete Operation Failed')
|
|
165
|
+
"Attempted to Delete: #{ @orphans.count } " +
|
|
166
|
+
"(See log messages for actual results)"
|
|
167
|
+
end
|
|
168
|
+
else
|
|
169
|
+
Logger.info @orphans.map {|path|
|
|
170
|
+
"\s\s[orphaned] '#{ path }'"
|
|
171
|
+
}.join("\n")
|
|
172
|
+
"Orphaned Files: #{ @orphans.count }"
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
end
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'backup/cloud_io/cloud_files'
|
|
3
|
+
|
|
4
|
+
module Backup
|
|
5
|
+
module Syncer
|
|
6
|
+
module Cloud
|
|
7
|
+
class CloudFiles < Base
|
|
8
|
+
class Error < Backup::Error; end
|
|
9
|
+
|
|
10
|
+
##
|
|
11
|
+
# Rackspace CloudFiles Credentials
|
|
12
|
+
attr_accessor :username, :api_key
|
|
13
|
+
|
|
14
|
+
##
|
|
15
|
+
# Rackspace CloudFiles Container
|
|
16
|
+
attr_accessor :container
|
|
17
|
+
|
|
18
|
+
##
|
|
19
|
+
# Rackspace AuthURL (optional)
|
|
20
|
+
attr_accessor :auth_url
|
|
21
|
+
|
|
22
|
+
##
|
|
23
|
+
# Rackspace Region (optional)
|
|
24
|
+
attr_accessor :region
|
|
25
|
+
|
|
26
|
+
##
|
|
27
|
+
# Rackspace Service Net
|
|
28
|
+
# (LAN-based transfers to avoid charges and improve performance)
|
|
29
|
+
attr_accessor :servicenet
|
|
30
|
+
|
|
31
|
+
##
|
|
32
|
+
# Additional options to pass along to fog.
|
|
33
|
+
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
|
|
34
|
+
attr_accessor :fog_options
|
|
35
|
+
|
|
36
|
+
def initialize(syncer_id = nil)
|
|
37
|
+
super
|
|
38
|
+
|
|
39
|
+
@servicenet ||= false
|
|
40
|
+
|
|
41
|
+
check_configuration
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
private
|
|
45
|
+
|
|
46
|
+
def cloud_io
|
|
47
|
+
@cloud_io ||= CloudIO::CloudFiles.new(
|
|
48
|
+
:username => username,
|
|
49
|
+
:api_key => api_key,
|
|
50
|
+
:auth_url => auth_url,
|
|
51
|
+
:region => region,
|
|
52
|
+
:servicenet => servicenet,
|
|
53
|
+
:container => container,
|
|
54
|
+
:max_retries => max_retries,
|
|
55
|
+
:retry_waitsec => retry_waitsec,
|
|
56
|
+
# Syncer can not use SLOs.
|
|
57
|
+
:segments_container => nil,
|
|
58
|
+
:segment_size => 0,
|
|
59
|
+
:fog_options => fog_options
|
|
60
|
+
)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def get_remote_files(remote_base)
|
|
64
|
+
hash = {}
|
|
65
|
+
cloud_io.objects(remote_base).each do |object|
|
|
66
|
+
relative_path = object.name.sub(remote_base + '/', '')
|
|
67
|
+
hash[relative_path] = object.hash
|
|
68
|
+
end
|
|
69
|
+
hash
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def check_configuration
|
|
73
|
+
required = %w{ username api_key container }
|
|
74
|
+
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
|
75
|
+
Configuration Error
|
|
76
|
+
#{ required.map {|name| "##{ name }"}.join(', ') } are all required
|
|
77
|
+
EOS
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
end # class Cloudfiles < Base
|
|
81
|
+
end # module Cloud
|
|
82
|
+
end
|
|
83
|
+
end
|