backup 3.6.0 → 3.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +2 -0
- data/lib/backup.rb +14 -4
- data/lib/backup/archive.rb +3 -2
- data/lib/backup/cleaner.rb +4 -2
- data/lib/backup/cli.rb +7 -5
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +296 -0
- data/lib/backup/cloud_io/s3.rb +252 -0
- data/lib/backup/compressor/gzip.rb +2 -1
- data/lib/backup/config.rb +13 -5
- data/lib/backup/configuration.rb +1 -1
- data/lib/backup/configuration/helpers.rb +3 -1
- data/lib/backup/database/base.rb +3 -1
- data/lib/backup/database/mongodb.rb +2 -2
- data/lib/backup/database/mysql.rb +2 -2
- data/lib/backup/database/postgresql.rb +12 -2
- data/lib/backup/database/redis.rb +3 -2
- data/lib/backup/encryptor/gpg.rb +8 -10
- data/lib/backup/errors.rb +39 -70
- data/lib/backup/logger.rb +7 -2
- data/lib/backup/logger/fog_adapter.rb +30 -0
- data/lib/backup/model.rb +32 -14
- data/lib/backup/notifier/base.rb +4 -3
- data/lib/backup/notifier/campfire.rb +0 -1
- data/lib/backup/notifier/http_post.rb +122 -0
- data/lib/backup/notifier/mail.rb +38 -0
- data/lib/backup/notifier/nagios.rb +69 -0
- data/lib/backup/notifier/prowl.rb +0 -1
- data/lib/backup/notifier/pushover.rb +0 -1
- data/lib/backup/package.rb +5 -0
- data/lib/backup/packager.rb +3 -2
- data/lib/backup/pipeline.rb +4 -2
- data/lib/backup/storage/base.rb +2 -1
- data/lib/backup/storage/cloud_files.rb +151 -0
- data/lib/backup/storage/cycler.rb +4 -2
- data/lib/backup/storage/dropbox.rb +20 -16
- data/lib/backup/storage/ftp.rb +1 -2
- data/lib/backup/storage/local.rb +3 -3
- data/lib/backup/storage/ninefold.rb +3 -4
- data/lib/backup/storage/rsync.rb +1 -2
- data/lib/backup/storage/s3.rb +49 -158
- data/lib/backup/storage/scp.rb +3 -4
- data/lib/backup/storage/sftp.rb +1 -2
- data/lib/backup/syncer/base.rb +0 -1
- data/lib/backup/syncer/cloud/base.rb +129 -208
- data/lib/backup/syncer/cloud/cloud_files.rb +56 -41
- data/lib/backup/syncer/cloud/local_file.rb +93 -0
- data/lib/backup/syncer/cloud/s3.rb +78 -31
- data/lib/backup/syncer/rsync/base.rb +7 -0
- data/lib/backup/syncer/rsync/local.rb +0 -5
- data/lib/backup/syncer/rsync/push.rb +1 -2
- data/lib/backup/utilities.rb +18 -15
- data/lib/backup/version.rb +1 -1
- data/templates/cli/notifier/http_post +35 -0
- data/templates/cli/notifier/nagios +13 -0
- data/templates/cli/storage/cloud_files +8 -17
- data/templates/cli/storage/s3 +3 -10
- data/templates/cli/syncer/cloud_files +3 -31
- data/templates/cli/syncer/s3 +3 -27
- data/templates/notifier/mail/failure.erb +6 -1
- data/templates/notifier/mail/success.erb +6 -1
- data/templates/notifier/mail/warning.erb +6 -1
- metadata +37 -42
- data/lib/backup/storage/cloudfiles.rb +0 -68
@@ -0,0 +1,93 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class LocalFile
|
7
|
+
attr_reader :path, :md5
|
8
|
+
|
9
|
+
class << self
|
10
|
+
include Utilities::Helpers
|
11
|
+
|
12
|
+
# Returns a Hash of LocalFile objects for each file within +dir+.
|
13
|
+
# Hash keys are the file's path relative to +dir+.
|
14
|
+
def find(dir)
|
15
|
+
dir = File.expand_path(dir)
|
16
|
+
hash = {}
|
17
|
+
find_md5(dir).each do |path, md5|
|
18
|
+
file = new(path, md5)
|
19
|
+
hash[path.sub(dir + '/', '')] = file if file
|
20
|
+
end
|
21
|
+
hash
|
22
|
+
end
|
23
|
+
|
24
|
+
# Return a new LocalFile object if it's valid.
|
25
|
+
# Otherwise, log a warning and return nil.
|
26
|
+
def new(*args)
|
27
|
+
file = super
|
28
|
+
if file.invalid?
|
29
|
+
Logger.warn("\s\s[skipping] #{ file.path }\n" +
|
30
|
+
"\s\sPath Contains Invalid UTF-8 byte sequences")
|
31
|
+
file = nil
|
32
|
+
end
|
33
|
+
file
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# Returns an Array of file paths and their md5 hashes.
|
39
|
+
#
|
40
|
+
# Lines output from `cmd` are formatted like:
|
41
|
+
# MD5(/dir/subdir/file)= 7eaabd1f53024270347800d0fdb34357
|
42
|
+
# However, if +dir+ is empty, the following is returned:
|
43
|
+
# (stdin)= d41d8cd98f00b204e9800998ecf8427e
|
44
|
+
# Which extracts as: ['in', 'd41d8cd98f00b204e9800998ecf8427e']
|
45
|
+
# I'm not sure I can rely on the fact this doesn't begin with 'MD5',
|
46
|
+
# so I'll reject entries with a path that doesn't start with +dir+.
|
47
|
+
#
|
48
|
+
# String#slice avoids `invalid byte sequence in UTF-8` errors
|
49
|
+
# that String#split would raise.
|
50
|
+
#
|
51
|
+
# Utilities#run is not used here because this would produce too much
|
52
|
+
# log output, and Pipeline does not support capturing output.
|
53
|
+
def find_md5(dir)
|
54
|
+
cmd = "#{ utility(:find) } -L '#{ dir }' -type f -print0 | " +
|
55
|
+
"#{ utility(:xargs) } -0 #{ utility(:openssl) } md5 2> /dev/null"
|
56
|
+
%x[#{ cmd }].lines.map do |line|
|
57
|
+
line.chomp!
|
58
|
+
entry = [line.slice(4..-36), line.slice(-32..-1)]
|
59
|
+
entry[0].to_s.start_with?(dir) ? entry : nil
|
60
|
+
end.compact
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# If +path+ contains invalid UTF-8, it will be sanitized
|
65
|
+
# and the LocalFile object will be flagged as invalid.
|
66
|
+
# This is done so @file.path may be logged.
|
67
|
+
def initialize(path, md5)
|
68
|
+
@path = sanitize(path)
|
69
|
+
@md5 = md5
|
70
|
+
end
|
71
|
+
|
72
|
+
def invalid?
|
73
|
+
!!@invalid
|
74
|
+
end
|
75
|
+
|
76
|
+
private
|
77
|
+
|
78
|
+
def sanitize(str)
|
79
|
+
str.each_char.map do |char|
|
80
|
+
begin
|
81
|
+
char.unpack('U')
|
82
|
+
char
|
83
|
+
rescue
|
84
|
+
@invalid = true
|
85
|
+
"\xEF\xBF\xBD" # => "\uFFFD"
|
86
|
+
end
|
87
|
+
end.join
|
88
|
+
end
|
89
|
+
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
@@ -1,67 +1,114 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
require 'backup/cloud_io/s3'
|
2
3
|
|
3
4
|
module Backup
|
4
5
|
module Syncer
|
5
6
|
module Cloud
|
6
7
|
class S3 < Base
|
8
|
+
class Error < Backup::Error; end
|
7
9
|
|
8
10
|
##
|
9
11
|
# Amazon Simple Storage Service (S3) Credentials
|
10
12
|
attr_accessor :access_key_id, :secret_access_key
|
11
13
|
|
12
14
|
##
|
13
|
-
#
|
15
|
+
# Amazon S3 bucket name
|
14
16
|
attr_accessor :bucket
|
15
17
|
|
16
18
|
##
|
17
|
-
#
|
19
|
+
# Region of the specified S3 bucket
|
18
20
|
attr_accessor :region
|
19
21
|
|
20
22
|
##
|
21
|
-
#
|
23
|
+
# Encryption algorithm to use for Amazon Server-Side Encryption
|
22
24
|
#
|
23
|
-
#
|
24
|
-
# Configuration::Syncer::Cloud::S3
|
25
|
-
# are set via a super() call to Cloud::Base,
|
26
|
-
# which in turn will invoke Syncer::Base.
|
25
|
+
# Supported values:
|
27
26
|
#
|
28
|
-
#
|
29
|
-
#
|
30
|
-
|
27
|
+
# - :aes256
|
28
|
+
#
|
29
|
+
# Default: nil
|
30
|
+
attr_accessor :encryption
|
31
|
+
|
32
|
+
##
|
33
|
+
# Storage class to use for the S3 objects uploaded
|
34
|
+
#
|
35
|
+
# Supported values:
|
36
|
+
#
|
37
|
+
# - :standard (default)
|
38
|
+
# - :reduced_redundancy
|
39
|
+
#
|
40
|
+
# Default: :standard
|
41
|
+
attr_accessor :storage_class
|
42
|
+
|
43
|
+
def initialize(syncer_id = nil)
|
31
44
|
super
|
32
45
|
|
33
|
-
|
34
|
-
|
46
|
+
@storage_class ||= :standard
|
47
|
+
|
48
|
+
check_configuration
|
35
49
|
end
|
36
50
|
|
37
51
|
private
|
38
52
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
:
|
44
|
-
:
|
45
|
-
:
|
46
|
-
:
|
53
|
+
def cloud_io
|
54
|
+
@cloud_io ||= CloudIO::S3.new(
|
55
|
+
:access_key_id => access_key_id,
|
56
|
+
:secret_access_key => secret_access_key,
|
57
|
+
:bucket => bucket,
|
58
|
+
:region => region,
|
59
|
+
:encryption => encryption,
|
60
|
+
:storage_class => storage_class,
|
61
|
+
:max_retries => max_retries,
|
62
|
+
:retry_waitsec => retry_waitsec,
|
63
|
+
# Syncer can not use multipart upload.
|
64
|
+
:chunk_size => 0
|
47
65
|
)
|
48
66
|
end
|
49
67
|
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
68
|
+
def get_remote_files(remote_base)
|
69
|
+
hash = {}
|
70
|
+
cloud_io.objects(remote_base).each do |object|
|
71
|
+
relative_path = object.key.sub(remote_base + '/', '')
|
72
|
+
hash[relative_path] = object.etag
|
73
|
+
end
|
74
|
+
hash
|
57
75
|
end
|
58
76
|
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
77
|
+
def check_configuration
|
78
|
+
required = %w{ access_key_id secret_access_key bucket }
|
79
|
+
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
80
|
+
Configuration Error
|
81
|
+
#{ required.map {|name| "##{ name }"}.join(', ') } are all required
|
82
|
+
EOS
|
83
|
+
|
84
|
+
raise Error, <<-EOS if encryption && encryption.to_s.upcase != 'AES256'
|
85
|
+
Configuration Error
|
86
|
+
#encryption must be :aes256 or nil
|
87
|
+
EOS
|
88
|
+
|
89
|
+
classes = ['STANDARD', 'REDUCED_REDUNDANCY']
|
90
|
+
raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
|
91
|
+
Configuration Error
|
92
|
+
#storage_class must be :standard or :reduced_redundancy
|
93
|
+
EOS
|
63
94
|
end
|
64
95
|
|
96
|
+
attr_deprecate :concurrency_type, :version => '3.7.0',
|
97
|
+
:message => 'Use #thread_count instead.',
|
98
|
+
:action => lambda {|klass, val|
|
99
|
+
if val == :threads
|
100
|
+
klass.thread_count = 2 unless klass.thread_count
|
101
|
+
else
|
102
|
+
klass.thread_count = 0
|
103
|
+
end
|
104
|
+
}
|
105
|
+
|
106
|
+
attr_deprecate :concurrency_level, :version => '3.7.0',
|
107
|
+
:message => 'Use #thread_count instead.',
|
108
|
+
:action => lambda {|klass, val|
|
109
|
+
klass.thread_count = val unless klass.thread_count == 0
|
110
|
+
}
|
111
|
+
|
65
112
|
end # Class S3 < Base
|
66
113
|
end # module Cloud
|
67
114
|
end
|
@@ -9,6 +9,13 @@ module Backup
|
|
9
9
|
# Additional String or Array of options for the rsync cli
|
10
10
|
attr_accessor :additional_rsync_options
|
11
11
|
|
12
|
+
def initialize(syncer_id = nil, &block)
|
13
|
+
super
|
14
|
+
instance_eval(&block) if block_given?
|
15
|
+
|
16
|
+
@path ||= '~/backups'
|
17
|
+
end
|
18
|
+
|
12
19
|
private
|
13
20
|
|
14
21
|
##
|
@@ -101,9 +101,8 @@ module Backup
|
|
101
101
|
# Flag for compressing (only compresses for the transfer)
|
102
102
|
attr_accessor :compress
|
103
103
|
|
104
|
-
def initialize(syncer_id = nil
|
104
|
+
def initialize(syncer_id = nil)
|
105
105
|
super
|
106
|
-
instance_eval(&block) if block_given?
|
107
106
|
|
108
107
|
@mode ||= :ssh
|
109
108
|
@port ||= mode == :rsync_daemon ? 873 : 22
|
data/lib/backup/utilities.rb
CHANGED
@@ -2,14 +2,17 @@
|
|
2
2
|
|
3
3
|
module Backup
|
4
4
|
module Utilities
|
5
|
+
class Error < Backup::Error; end
|
6
|
+
|
5
7
|
UTILITY = {}
|
6
8
|
NAMES = %w{
|
7
|
-
tar cat split find xargs sudo chown
|
9
|
+
tar cat split find xargs sudo chown hostname
|
8
10
|
gzip bzip2 lzma pbzip2
|
9
11
|
mongo mongodump mysqldump pg_dump pg_dumpall redis-cli riak-admin
|
10
12
|
gpg openssl
|
11
13
|
rsync ssh
|
12
14
|
sendmail exim
|
15
|
+
send_nsca
|
13
16
|
}
|
14
17
|
|
15
18
|
module DSL
|
@@ -23,7 +26,7 @@ module Backup
|
|
23
26
|
define_method name.gsub('-', '_'), lambda {|val|
|
24
27
|
path = File.expand_path(val)
|
25
28
|
unless File.executable?(path)
|
26
|
-
raise
|
29
|
+
raise Utilities::Error, <<-EOS
|
27
30
|
The path given for '#{ name }' was not found or not executable.
|
28
31
|
Path was: #{ path }
|
29
32
|
EOS
|
@@ -58,14 +61,15 @@ module Backup
|
|
58
61
|
#
|
59
62
|
# Backup::Utilities.configure do
|
60
63
|
# # General Utilites
|
61
|
-
# tar
|
64
|
+
# tar '/path/to/tar'
|
62
65
|
# tar_dist :gnu # or :bsd
|
63
|
-
# cat
|
64
|
-
# split
|
65
|
-
# find
|
66
|
-
# xargs
|
67
|
-
# sudo
|
68
|
-
# chown
|
66
|
+
# cat '/path/to/cat'
|
67
|
+
# split '/path/to/split'
|
68
|
+
# find '/path/to/find'
|
69
|
+
# xargs '/path/to/xargs'
|
70
|
+
# sudo '/path/to/sudo'
|
71
|
+
# chown '/path/to/chown'
|
72
|
+
# hostname '/path/to/hostname'
|
69
73
|
#
|
70
74
|
# # Compressors
|
71
75
|
# gzip '/path/to/gzip'
|
@@ -93,6 +97,7 @@ module Backup
|
|
93
97
|
# # Notifiers
|
94
98
|
# sendmail '/path/to/sendmail'
|
95
99
|
# exim '/path/to/exim'
|
100
|
+
# send_nsca '/path/to/send_nsca'
|
96
101
|
# end
|
97
102
|
#
|
98
103
|
# These paths may be set using absolute paths, or relative to the
|
@@ -129,11 +134,10 @@ module Backup
|
|
129
134
|
# Raises an error if utility can not be found in the system's $PATH
|
130
135
|
def utility(name)
|
131
136
|
name = name.to_s.strip
|
132
|
-
raise
|
133
|
-
'Utility Name Empty' if name.empty?
|
137
|
+
raise Error, 'Utility Name Empty' if name.empty?
|
134
138
|
|
135
139
|
UTILITY[name] ||= %x[which '#{ name }' 2>/dev/null].chomp
|
136
|
-
raise
|
140
|
+
raise Error, <<-EOS if UTILITY[name].empty?
|
137
141
|
Could not locate '#{ name }'.
|
138
142
|
Make sure the specified utility is installed
|
139
143
|
and available in your system's $PATH, or specify it's location
|
@@ -190,8 +194,7 @@ module Backup
|
|
190
194
|
out, err = stdout.read.strip, stderr.read.strip
|
191
195
|
end
|
192
196
|
rescue Exception => e
|
193
|
-
raise
|
194
|
-
e, "Failed to execute '#{ name }'")
|
197
|
+
raise Error.wrap(e, "Failed to execute '#{ name }'")
|
195
198
|
ensure
|
196
199
|
GC.enable if RUBY_VERSION < '1.9'
|
197
200
|
end
|
@@ -211,7 +214,7 @@ module Backup
|
|
211
214
|
|
212
215
|
return out
|
213
216
|
else
|
214
|
-
raise
|
217
|
+
raise Error, <<-EOS
|
215
218
|
'#{ name }' failed with exit status: #{ ps.exitstatus }
|
216
219
|
STDOUT Messages: #{ out.empty? ? 'None' : "\n#{ out }" }
|
217
220
|
STDERR Messages: #{ err.empty? ? 'None' : "\n#{ err }" }
|
data/lib/backup/version.rb
CHANGED
@@ -0,0 +1,35 @@
|
|
1
|
+
##
|
2
|
+
# HttpPost [Notifier]
|
3
|
+
#
|
4
|
+
# For details, see:
|
5
|
+
# https://github.com/meskyanichi/backup/wiki/Notifiers
|
6
|
+
#
|
7
|
+
notify_by HttpPost do |post|
|
8
|
+
post.on_success = true
|
9
|
+
post.on_warning = true
|
10
|
+
post.on_failure = true
|
11
|
+
|
12
|
+
# URI to post the notification to.
|
13
|
+
# Port may be specified if needed.
|
14
|
+
# If Basic Authentication is required, supply user:pass.
|
15
|
+
post.uri = 'https://user:pass@your.domain.com:8443/path'
|
16
|
+
|
17
|
+
##
|
18
|
+
# Optional
|
19
|
+
#
|
20
|
+
# Additional headers to send.
|
21
|
+
# post.headers = { 'Authentication' => 'my_auth_info' }
|
22
|
+
#
|
23
|
+
# Additional form params to post.
|
24
|
+
# post.params = { 'auth_token' => 'my_token' }
|
25
|
+
#
|
26
|
+
# Successful response codes. Default: 200
|
27
|
+
# post.success_codes = [200, 201, 204]
|
28
|
+
#
|
29
|
+
# Defaults to true on most systems.
|
30
|
+
# Force with +true+, disable with +false+
|
31
|
+
# post.ssl_verify_peer = false
|
32
|
+
#
|
33
|
+
# Supplied by default. Override with a custom 'cacert.pem' file.
|
34
|
+
# post.ssl_ca_file = '/my/cacert.pem'
|
35
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
##
|
2
|
+
# Nagios [Notifier]
|
3
|
+
#
|
4
|
+
notify_by Nagios do |nagios|
|
5
|
+
nagios.on_success = true
|
6
|
+
nagios.on_warning = true
|
7
|
+
nagios.on_failure = true
|
8
|
+
|
9
|
+
nagios.nagios_host = 'nagioshost'
|
10
|
+
nagios.nagios_port = 5667
|
11
|
+
nagios.service_name = 'My Backup'
|
12
|
+
nagios.service_host = 'backuphost'
|
13
|
+
end
|
@@ -1,22 +1,13 @@
|
|
1
1
|
##
|
2
2
|
# Rackspace Cloud Files [Storage]
|
3
3
|
#
|
4
|
-
#
|
5
|
-
#
|
6
|
-
# - https://auth.api.rackspacecloud.com (US - Default)
|
7
|
-
# - https://lon.auth.api.rackspacecloud.com (UK)
|
8
|
-
#
|
9
|
-
# Servicenet:
|
10
|
-
#
|
11
|
-
# Set this to 'true' if Backup runs on a Rackspace server. It will avoid
|
12
|
-
# transfer charges and it's more performant.
|
13
|
-
#
|
4
|
+
# See the documentation on the Wiki for details.
|
5
|
+
# https://github.com/meskyanichi/backup/wiki/Storages
|
14
6
|
store_with CloudFiles do |cf|
|
15
|
-
cf.api_key
|
16
|
-
cf.username
|
17
|
-
cf.container
|
18
|
-
cf.
|
19
|
-
cf.
|
20
|
-
cf.
|
21
|
-
cf.servicenet = false
|
7
|
+
cf.api_key = 'my_api_key'
|
8
|
+
cf.username = 'my_username'
|
9
|
+
cf.container = 'my_container'
|
10
|
+
cf.segments_container = 'my_segments_container' # must be different than `container`
|
11
|
+
cf.segment_size = 5 # MiB
|
12
|
+
cf.path = 'path/to/backups' # path within the container
|
22
13
|
end
|