backup 4.4.1 → 5.0.0.beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/LICENSE +19 -0
- data/README.md +1 -1
- data/lib/backup.rb +74 -78
- data/lib/backup/archive.rb +31 -32
- data/lib/backup/binder.rb +2 -6
- data/lib/backup/cleaner.rb +14 -18
- data/lib/backup/cli.rb +104 -108
- data/lib/backup/cloud_io/base.rb +4 -7
- data/lib/backup/cloud_io/cloud_files.rb +60 -62
- data/lib/backup/cloud_io/s3.rb +69 -76
- data/lib/backup/compressor/base.rb +4 -7
- data/lib/backup/compressor/bzip2.rb +3 -7
- data/lib/backup/compressor/custom.rb +2 -6
- data/lib/backup/compressor/gzip.rb +16 -17
- data/lib/backup/config.rb +17 -18
- data/lib/backup/config/dsl.rb +16 -17
- data/lib/backup/config/helpers.rb +10 -16
- data/lib/backup/database/base.rb +22 -21
- data/lib/backup/database/mongodb.rb +36 -37
- data/lib/backup/database/mysql.rb +40 -41
- data/lib/backup/database/openldap.rb +8 -10
- data/lib/backup/database/postgresql.rb +29 -30
- data/lib/backup/database/redis.rb +27 -30
- data/lib/backup/database/riak.rb +15 -18
- data/lib/backup/database/sqlite.rb +4 -6
- data/lib/backup/encryptor/base.rb +2 -4
- data/lib/backup/encryptor/gpg.rb +49 -59
- data/lib/backup/encryptor/open_ssl.rb +11 -14
- data/lib/backup/errors.rb +7 -12
- data/lib/backup/logger.rb +16 -18
- data/lib/backup/logger/console.rb +5 -8
- data/lib/backup/logger/fog_adapter.rb +2 -6
- data/lib/backup/logger/logfile.rb +10 -12
- data/lib/backup/logger/syslog.rb +2 -4
- data/lib/backup/model.rb +75 -40
- data/lib/backup/notifier/base.rb +24 -26
- data/lib/backup/notifier/campfire.rb +9 -11
- data/lib/backup/notifier/command.rb +0 -3
- data/lib/backup/notifier/datadog.rb +9 -12
- data/lib/backup/notifier/flowdock.rb +13 -17
- data/lib/backup/notifier/hipchat.rb +11 -13
- data/lib/backup/notifier/http_post.rb +11 -14
- data/lib/backup/notifier/mail.rb +44 -47
- data/lib/backup/notifier/nagios.rb +5 -9
- data/lib/backup/notifier/pagerduty.rb +10 -12
- data/lib/backup/notifier/prowl.rb +15 -15
- data/lib/backup/notifier/pushover.rb +7 -10
- data/lib/backup/notifier/ses.rb +34 -16
- data/lib/backup/notifier/slack.rb +39 -40
- data/lib/backup/notifier/twitter.rb +2 -5
- data/lib/backup/notifier/zabbix.rb +11 -14
- data/lib/backup/package.rb +5 -9
- data/lib/backup/packager.rb +16 -17
- data/lib/backup/pipeline.rb +17 -21
- data/lib/backup/splitter.rb +8 -11
- data/lib/backup/storage/base.rb +5 -8
- data/lib/backup/storage/cloud_files.rb +21 -23
- data/lib/backup/storage/cycler.rb +10 -15
- data/lib/backup/storage/dropbox.rb +15 -21
- data/lib/backup/storage/ftp.rb +8 -10
- data/lib/backup/storage/local.rb +5 -8
- data/lib/backup/storage/qiniu.rb +8 -8
- data/lib/backup/storage/rsync.rb +24 -26
- data/lib/backup/storage/s3.rb +27 -28
- data/lib/backup/storage/scp.rb +10 -12
- data/lib/backup/storage/sftp.rb +10 -12
- data/lib/backup/syncer/base.rb +5 -8
- data/lib/backup/syncer/cloud/base.rb +27 -30
- data/lib/backup/syncer/cloud/cloud_files.rb +16 -18
- data/lib/backup/syncer/cloud/local_file.rb +5 -8
- data/lib/backup/syncer/cloud/s3.rb +23 -24
- data/lib/backup/syncer/rsync/base.rb +6 -10
- data/lib/backup/syncer/rsync/local.rb +1 -5
- data/lib/backup/syncer/rsync/pull.rb +6 -10
- data/lib/backup/syncer/rsync/push.rb +18 -22
- data/lib/backup/template.rb +9 -14
- data/lib/backup/utilities.rb +82 -69
- data/lib/backup/version.rb +1 -3
- metadata +100 -660
data/lib/backup/storage/sftp.rb
CHANGED
@@ -1,5 +1,4 @@
|
|
1
|
-
|
2
|
-
require 'net/sftp'
|
1
|
+
require "net/sftp"
|
3
2
|
|
4
3
|
module Backup
|
5
4
|
module Storage
|
@@ -19,16 +18,16 @@ module Backup
|
|
19
18
|
|
20
19
|
@ssh_options ||= {}
|
21
20
|
@port ||= 22
|
22
|
-
@path ||=
|
23
|
-
path.sub!(/^~\//,
|
21
|
+
@path ||= "backups"
|
22
|
+
path.sub!(/^~\//, "")
|
24
23
|
end
|
25
24
|
|
26
25
|
private
|
27
26
|
|
28
27
|
def connection
|
29
28
|
Net::SFTP.start(
|
30
|
-
ip, username, { :
|
31
|
-
) {|sftp| yield sftp }
|
29
|
+
ip, username, { password: password, port: port }.merge(ssh_options)
|
30
|
+
) { |sftp| yield sftp }
|
32
31
|
end
|
33
32
|
|
34
33
|
def transfer!
|
@@ -38,7 +37,7 @@ module Backup
|
|
38
37
|
package.filenames.each do |filename|
|
39
38
|
src = File.join(Config.tmp_path, filename)
|
40
39
|
dest = File.join(remote_path, filename)
|
41
|
-
Logger.info "Storing '#{
|
40
|
+
Logger.info "Storing '#{ip}:#{dest}'..."
|
42
41
|
sftp.upload!(src, dest)
|
43
42
|
end
|
44
43
|
end
|
@@ -47,7 +46,7 @@ module Backup
|
|
47
46
|
# Called by the Cycler.
|
48
47
|
# Any error raised will be logged as a warning.
|
49
48
|
def remove!(package)
|
50
|
-
Logger.info "Removing backup package dated #{
|
49
|
+
Logger.info "Removing backup package dated #{package.time}..."
|
51
50
|
|
52
51
|
remote_path = remote_path_for(package)
|
53
52
|
connection do |sftp|
|
@@ -68,15 +67,14 @@ module Backup
|
|
68
67
|
# Net::SFTP raises an exception when the directory it's trying to create
|
69
68
|
# already exists, so we have rescue it
|
70
69
|
def create_remote_path(sftp)
|
71
|
-
path_parts =
|
72
|
-
remote_path.split(
|
70
|
+
path_parts = []
|
71
|
+
remote_path.split("/").each do |path_part|
|
73
72
|
path_parts << path_part
|
74
73
|
begin
|
75
|
-
sftp.mkdir!(path_parts.join(
|
74
|
+
sftp.mkdir!(path_parts.join("/"))
|
76
75
|
rescue Net::SFTP::StatusException; end
|
77
76
|
end
|
78
77
|
end
|
79
|
-
|
80
78
|
end
|
81
79
|
end
|
82
80
|
end
|
data/lib/backup/syncer/base.rb
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
module Backup
|
4
2
|
module Syncer
|
5
3
|
class Base
|
@@ -53,18 +51,17 @@ module Backup
|
|
53
51
|
private
|
54
52
|
|
55
53
|
def syncer_name
|
56
|
-
@syncer_name ||= self.class.to_s.sub(
|
57
|
-
|
54
|
+
@syncer_name ||= self.class.to_s.sub("Backup::", "") +
|
55
|
+
(syncer_id ? " (#{syncer_id})" : "")
|
58
56
|
end
|
59
57
|
|
60
58
|
def log!(action)
|
61
59
|
msg = case action
|
62
|
-
when :started then
|
63
|
-
when :finished then
|
60
|
+
when :started then "Started..."
|
61
|
+
when :finished then "Finished!"
|
64
62
|
end
|
65
|
-
Logger.info "#{
|
63
|
+
Logger.info "#{syncer_name} #{msg}"
|
66
64
|
end
|
67
|
-
|
68
65
|
end
|
69
66
|
end
|
70
67
|
end
|
@@ -1,5 +1,3 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
module Backup
|
4
2
|
module Syncer
|
5
3
|
module Cloud
|
@@ -34,8 +32,8 @@ module Backup
|
|
34
32
|
@max_retries ||= 10
|
35
33
|
@retry_waitsec ||= 30
|
36
34
|
|
37
|
-
@path ||=
|
38
|
-
@path = path.sub(/^\//,
|
35
|
+
@path ||= "backups"
|
36
|
+
@path = path.sub(/^\//, "")
|
39
37
|
end
|
40
38
|
|
41
39
|
def perform!
|
@@ -45,14 +43,14 @@ module Backup
|
|
45
43
|
@skipped_count = 0
|
46
44
|
@orphans = thread_count > 0 ? Queue.new : []
|
47
45
|
|
48
|
-
directories.each {|dir| sync_directory(dir) }
|
46
|
+
directories.each { |dir| sync_directory(dir) }
|
49
47
|
orphans_result = process_orphans
|
50
48
|
|
51
49
|
Logger.info "\nSummary:"
|
52
|
-
Logger.info "\s\sTransferred Files: #{
|
53
|
-
Logger.info "\s\s#{
|
54
|
-
Logger.info "\s\sUnchanged Files: #{
|
55
|
-
Logger.warn "\s\sSkipped Files: #{
|
50
|
+
Logger.info "\s\sTransferred Files: #{@transfer_count}"
|
51
|
+
Logger.info "\s\s#{orphans_result}"
|
52
|
+
Logger.info "\s\sUnchanged Files: #{@unchanged_count}"
|
53
|
+
Logger.warn "\s\sSkipped Files: #{@skipped_count}" if @skipped_count > 0
|
56
54
|
log!(:finished)
|
57
55
|
end
|
58
56
|
|
@@ -61,18 +59,18 @@ module Backup
|
|
61
59
|
def sync_directory(dir)
|
62
60
|
remote_base = path.empty? ? File.basename(dir) :
|
63
61
|
File.join(path, File.basename(dir))
|
64
|
-
Logger.info "Gathering remote data for '#{
|
62
|
+
Logger.info "Gathering remote data for '#{remote_base}'..."
|
65
63
|
remote_files = get_remote_files(remote_base)
|
66
64
|
|
67
|
-
Logger.info("Gathering local data for '#{
|
65
|
+
Logger.info("Gathering local data for '#{File.expand_path(dir)}'...")
|
68
66
|
local_files = LocalFile.find(dir, excludes)
|
69
67
|
|
70
68
|
relative_paths = (local_files.keys | remote_files.keys).sort
|
71
69
|
if relative_paths.empty?
|
72
|
-
Logger.info
|
70
|
+
Logger.info "No local or remote files found"
|
73
71
|
else
|
74
|
-
Logger.info
|
75
|
-
sync_block =
|
72
|
+
Logger.info "Syncing..."
|
73
|
+
sync_block = proc do |relative_path|
|
76
74
|
local_file = local_files[relative_path]
|
77
75
|
remote_md5 = remote_files[relative_path]
|
78
76
|
remote_path = File.join(remote_base, relative_path)
|
@@ -91,8 +89,8 @@ module Backup
|
|
91
89
|
queue = Queue.new
|
92
90
|
queue << relative_paths.shift until relative_paths.empty?
|
93
91
|
num_threads = [thread_count, queue.size].min
|
94
|
-
Logger.info "\s\sUsing #{
|
95
|
-
threads = num_threads
|
92
|
+
Logger.info "\s\sUsing #{num_threads} Threads"
|
93
|
+
threads = Array.new(num_threads) do
|
96
94
|
Thread.new do
|
97
95
|
loop do
|
98
96
|
path = queue.shift(true) rescue nil
|
@@ -103,7 +101,7 @@ module Backup
|
|
103
101
|
|
104
102
|
# abort if any thread raises an exception
|
105
103
|
while threads.any?(&:alive?)
|
106
|
-
if threads.any? {|thr| thr.status.nil? }
|
104
|
+
if threads.any? { |thr| thr.status.nil? }
|
107
105
|
threads.each(&:kill)
|
108
106
|
Thread.pass while threads.any?(&:alive?)
|
109
107
|
break
|
@@ -122,13 +120,13 @@ module Backup
|
|
122
120
|
if local_file.md5 == remote_md5
|
123
121
|
MUTEX.synchronize { @unchanged_count += 1 }
|
124
122
|
else
|
125
|
-
Logger.info("\s\s[transferring] '#{
|
123
|
+
Logger.info("\s\s[transferring] '#{remote_path}'")
|
126
124
|
begin
|
127
125
|
cloud_io.upload(local_file.path, remote_path)
|
128
126
|
MUTEX.synchronize { @transfer_count += 1 }
|
129
127
|
rescue CloudIO::FileSizeError => err
|
130
128
|
MUTEX.synchronize { @skipped_count += 1 }
|
131
|
-
Logger.warn Error.wrap(err, "Skipping '#{
|
129
|
+
Logger.warn Error.wrap(err, "Skipping '#{remote_path}'")
|
132
130
|
rescue => err
|
133
131
|
Logger.error(err)
|
134
132
|
raise Error, <<-EOS
|
@@ -145,34 +143,33 @@ module Backup
|
|
145
143
|
|
146
144
|
def process_orphans
|
147
145
|
if @orphans.empty?
|
148
|
-
return mirror ?
|
146
|
+
return mirror ? "Deleted Files: 0" : "Orphaned Files: 0"
|
149
147
|
end
|
150
148
|
|
151
149
|
if @orphans.is_a?(Queue)
|
152
|
-
@orphans = @orphans.size
|
150
|
+
@orphans = Array.new(@orphans.size) { @orphans.shift }
|
153
151
|
end
|
154
152
|
|
155
153
|
if mirror
|
156
|
-
Logger.info @orphans.map {|path|
|
157
|
-
"\s\s[removing] '#{
|
154
|
+
Logger.info @orphans.map { |path|
|
155
|
+
"\s\s[removing] '#{path}'"
|
158
156
|
}.join("\n")
|
159
157
|
|
160
158
|
begin
|
161
159
|
cloud_io.delete(@orphans)
|
162
|
-
"Deleted Files: #{
|
160
|
+
"Deleted Files: #{@orphans.count}"
|
163
161
|
rescue => err
|
164
|
-
Logger.warn Error.wrap(err,
|
165
|
-
"Attempted to Delete: #{
|
162
|
+
Logger.warn Error.wrap(err, "Delete Operation Failed")
|
163
|
+
"Attempted to Delete: #{@orphans.count} " \
|
166
164
|
"(See log messages for actual results)"
|
167
165
|
end
|
168
166
|
else
|
169
|
-
Logger.info @orphans.map {|path|
|
170
|
-
"\s\s[orphaned] '#{
|
167
|
+
Logger.info @orphans.map { |path|
|
168
|
+
"\s\s[orphaned] '#{path}'"
|
171
169
|
}.join("\n")
|
172
|
-
"Orphaned Files: #{
|
170
|
+
"Orphaned Files: #{@orphans.count}"
|
173
171
|
end
|
174
172
|
end
|
175
|
-
|
176
173
|
end
|
177
174
|
end
|
178
175
|
end
|
@@ -1,5 +1,4 @@
|
|
1
|
-
|
2
|
-
require 'backup/cloud_io/cloud_files'
|
1
|
+
require "backup/cloud_io/cloud_files"
|
3
2
|
|
4
3
|
module Backup
|
5
4
|
module Syncer
|
@@ -45,38 +44,37 @@ module Backup
|
|
45
44
|
|
46
45
|
def cloud_io
|
47
46
|
@cloud_io ||= CloudIO::CloudFiles.new(
|
48
|
-
:
|
49
|
-
:
|
50
|
-
:
|
51
|
-
:
|
52
|
-
:
|
53
|
-
:
|
54
|
-
:
|
55
|
-
:
|
47
|
+
username: username,
|
48
|
+
api_key: api_key,
|
49
|
+
auth_url: auth_url,
|
50
|
+
region: region,
|
51
|
+
servicenet: servicenet,
|
52
|
+
container: container,
|
53
|
+
max_retries: max_retries,
|
54
|
+
retry_waitsec: retry_waitsec,
|
56
55
|
# Syncer can not use SLOs.
|
57
|
-
:
|
58
|
-
:
|
59
|
-
:
|
56
|
+
segments_container: nil,
|
57
|
+
segment_size: 0,
|
58
|
+
fog_options: fog_options
|
60
59
|
)
|
61
60
|
end
|
62
61
|
|
63
62
|
def get_remote_files(remote_base)
|
64
63
|
hash = {}
|
65
64
|
cloud_io.objects(remote_base).each do |object|
|
66
|
-
relative_path = object.name.sub(remote_base +
|
65
|
+
relative_path = object.name.sub(remote_base + "/", "")
|
67
66
|
hash[relative_path] = object.hash
|
68
67
|
end
|
69
68
|
hash
|
70
69
|
end
|
71
70
|
|
72
71
|
def check_configuration
|
73
|
-
required = %w
|
74
|
-
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
72
|
+
required = %w[username api_key container]
|
73
|
+
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
|
75
74
|
Configuration Error
|
76
|
-
#{
|
75
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
77
76
|
EOS
|
78
77
|
end
|
79
|
-
|
80
78
|
end # class Cloudfiles < Base
|
81
79
|
end # module Cloud
|
82
80
|
end
|
@@ -1,5 +1,4 @@
|
|
1
|
-
|
2
|
-
require 'digest/md5'
|
1
|
+
require "digest/md5"
|
3
2
|
|
4
3
|
module Backup
|
5
4
|
module Syncer
|
@@ -9,7 +8,6 @@ module Backup
|
|
9
8
|
attr_accessor :md5
|
10
9
|
|
11
10
|
class << self
|
12
|
-
|
13
11
|
# Returns a Hash of LocalFile objects for each file within +dir+,
|
14
12
|
# except those matching any of the +excludes+.
|
15
13
|
# Hash keys are the file's path relative to +dir+.
|
@@ -17,7 +15,7 @@ module Backup
|
|
17
15
|
dir = File.expand_path(dir)
|
18
16
|
hash = {}
|
19
17
|
find_md5(dir, excludes).each do |file|
|
20
|
-
hash[file.path.sub(dir +
|
18
|
+
hash[file.path.sub(dir + "/", "")] = file
|
21
19
|
end
|
22
20
|
hash
|
23
21
|
end
|
@@ -27,7 +25,7 @@ module Backup
|
|
27
25
|
def new(*args)
|
28
26
|
file = super
|
29
27
|
if file.invalid?
|
30
|
-
Logger.warn("\s\s[skipping] #{
|
28
|
+
Logger.warn("\s\s[skipping] #{file.path}\n" \
|
31
29
|
"\s\sPath Contains Invalid UTF-8 byte sequences")
|
32
30
|
file = nil
|
33
31
|
end
|
@@ -39,7 +37,7 @@ module Backup
|
|
39
37
|
# Returns an Array of file paths and their md5 hashes.
|
40
38
|
def find_md5(dir, excludes)
|
41
39
|
found = []
|
42
|
-
(Dir.entries(dir) - %w
|
40
|
+
(Dir.entries(dir) - %w[. ..]).map { |e| File.join(dir, e) }.each do |path|
|
43
41
|
if File.directory?(path)
|
44
42
|
unless exclude?(excludes, path)
|
45
43
|
found += find_md5(path, excludes)
|
@@ -85,7 +83,7 @@ module Backup
|
|
85
83
|
def sanitize(str)
|
86
84
|
str.each_char.map do |char|
|
87
85
|
begin
|
88
|
-
char.unpack(
|
86
|
+
char.unpack("U")
|
89
87
|
char
|
90
88
|
rescue
|
91
89
|
@invalid = true
|
@@ -93,7 +91,6 @@ module Backup
|
|
93
91
|
end
|
94
92
|
end.join
|
95
93
|
end
|
96
|
-
|
97
94
|
end
|
98
95
|
end
|
99
96
|
end
|
@@ -1,5 +1,4 @@
|
|
1
|
-
|
2
|
-
require 'backup/cloud_io/s3'
|
1
|
+
require "backup/cloud_io/s3"
|
3
2
|
|
4
3
|
module Backup
|
5
4
|
module Syncer
|
@@ -57,53 +56,53 @@ module Backup
|
|
57
56
|
|
58
57
|
def cloud_io
|
59
58
|
@cloud_io ||= CloudIO::S3.new(
|
60
|
-
:
|
61
|
-
:
|
62
|
-
:
|
63
|
-
:
|
64
|
-
:
|
65
|
-
:
|
66
|
-
:
|
67
|
-
:
|
68
|
-
:
|
59
|
+
access_key_id: access_key_id,
|
60
|
+
secret_access_key: secret_access_key,
|
61
|
+
use_iam_profile: use_iam_profile,
|
62
|
+
bucket: bucket,
|
63
|
+
region: region,
|
64
|
+
encryption: encryption,
|
65
|
+
storage_class: storage_class,
|
66
|
+
max_retries: max_retries,
|
67
|
+
retry_waitsec: retry_waitsec,
|
69
68
|
# Syncer can not use multipart upload.
|
70
|
-
:
|
71
|
-
:
|
69
|
+
chunk_size: 0,
|
70
|
+
fog_options: fog_options
|
72
71
|
)
|
73
72
|
end
|
74
73
|
|
75
74
|
def get_remote_files(remote_base)
|
76
75
|
hash = {}
|
77
76
|
cloud_io.objects(remote_base).each do |object|
|
78
|
-
relative_path = object.key.sub(remote_base +
|
77
|
+
relative_path = object.key.sub(remote_base + "/", "")
|
79
78
|
hash[relative_path] = object.etag
|
80
79
|
end
|
81
80
|
hash
|
82
81
|
end
|
83
82
|
|
84
83
|
def check_configuration
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
84
|
+
required =
|
85
|
+
if use_iam_profile
|
86
|
+
%w[bucket]
|
87
|
+
else
|
88
|
+
%w[access_key_id secret_access_key bucket]
|
89
|
+
end
|
90
|
+
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
|
91
91
|
Configuration Error
|
92
|
-
#{
|
92
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
93
93
|
EOS
|
94
94
|
|
95
|
-
raise Error, <<-EOS if encryption && encryption.to_s.upcase !=
|
95
|
+
raise Error, <<-EOS if encryption && encryption.to_s.upcase != "AES256"
|
96
96
|
Configuration Error
|
97
97
|
#encryption must be :aes256 or nil
|
98
98
|
EOS
|
99
99
|
|
100
|
-
classes = [
|
100
|
+
classes = ["STANDARD", "REDUCED_REDUNDANCY"]
|
101
101
|
raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
|
102
102
|
Configuration Error
|
103
103
|
#storage_class must be :standard or :reduced_redundancy
|
104
104
|
EOS
|
105
105
|
end
|
106
|
-
|
107
106
|
end # Class S3 < Base
|
108
107
|
end # module Cloud
|
109
108
|
end
|
@@ -1,10 +1,7 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
module Backup
|
4
2
|
module Syncer
|
5
3
|
module RSync
|
6
4
|
class Base < Syncer::Base
|
7
|
-
|
8
5
|
##
|
9
6
|
# Additional String or Array of options for the rsync cli
|
10
7
|
attr_accessor :additional_rsync_options
|
@@ -14,7 +11,7 @@ module Backup
|
|
14
11
|
super
|
15
12
|
instance_eval(&block) if block_given?
|
16
13
|
|
17
|
-
@path ||=
|
14
|
+
@path ||= "~/backups"
|
18
15
|
@archive = @archive.nil? ? true : @archive
|
19
16
|
end
|
20
17
|
|
@@ -24,19 +21,19 @@ module Backup
|
|
24
21
|
# Common base command for Local/Push/Pull
|
25
22
|
def rsync_command
|
26
23
|
utility(:rsync) << archive_option << mirror_option << exclude_option <<
|
27
|
-
|
24
|
+
" #{Array(additional_rsync_options).join(" ")}".rstrip
|
28
25
|
end
|
29
26
|
|
30
27
|
def mirror_option
|
31
|
-
mirror ?
|
28
|
+
mirror ? " --delete" : ""
|
32
29
|
end
|
33
30
|
|
34
31
|
def archive_option
|
35
|
-
archive ?
|
32
|
+
archive ? " --archive" : ""
|
36
33
|
end
|
37
34
|
|
38
35
|
def exclude_option
|
39
|
-
excludes.map {|pattern| " --exclude='#{
|
36
|
+
excludes.map { |pattern| " --exclude='#{pattern}'" }.join
|
40
37
|
end
|
41
38
|
|
42
39
|
##
|
@@ -45,9 +42,8 @@ module Backup
|
|
45
42
|
# each path, as we don't want rsync's "trailing / on source directories"
|
46
43
|
# behavior. This method is used by RSync::Local and RSync::Push.
|
47
44
|
def paths_to_push
|
48
|
-
directories.map {|dir| "'#{
|
45
|
+
directories.map { |dir| "'#{File.expand_path(dir)}'" }.join(" ")
|
49
46
|
end
|
50
|
-
|
51
47
|
end
|
52
48
|
end
|
53
49
|
end
|