backup 3.6.0 → 3.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +2 -0
- data/lib/backup.rb +14 -4
- data/lib/backup/archive.rb +3 -2
- data/lib/backup/cleaner.rb +4 -2
- data/lib/backup/cli.rb +7 -5
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +296 -0
- data/lib/backup/cloud_io/s3.rb +252 -0
- data/lib/backup/compressor/gzip.rb +2 -1
- data/lib/backup/config.rb +13 -5
- data/lib/backup/configuration.rb +1 -1
- data/lib/backup/configuration/helpers.rb +3 -1
- data/lib/backup/database/base.rb +3 -1
- data/lib/backup/database/mongodb.rb +2 -2
- data/lib/backup/database/mysql.rb +2 -2
- data/lib/backup/database/postgresql.rb +12 -2
- data/lib/backup/database/redis.rb +3 -2
- data/lib/backup/encryptor/gpg.rb +8 -10
- data/lib/backup/errors.rb +39 -70
- data/lib/backup/logger.rb +7 -2
- data/lib/backup/logger/fog_adapter.rb +30 -0
- data/lib/backup/model.rb +32 -14
- data/lib/backup/notifier/base.rb +4 -3
- data/lib/backup/notifier/campfire.rb +0 -1
- data/lib/backup/notifier/http_post.rb +122 -0
- data/lib/backup/notifier/mail.rb +38 -0
- data/lib/backup/notifier/nagios.rb +69 -0
- data/lib/backup/notifier/prowl.rb +0 -1
- data/lib/backup/notifier/pushover.rb +0 -1
- data/lib/backup/package.rb +5 -0
- data/lib/backup/packager.rb +3 -2
- data/lib/backup/pipeline.rb +4 -2
- data/lib/backup/storage/base.rb +2 -1
- data/lib/backup/storage/cloud_files.rb +151 -0
- data/lib/backup/storage/cycler.rb +4 -2
- data/lib/backup/storage/dropbox.rb +20 -16
- data/lib/backup/storage/ftp.rb +1 -2
- data/lib/backup/storage/local.rb +3 -3
- data/lib/backup/storage/ninefold.rb +3 -4
- data/lib/backup/storage/rsync.rb +1 -2
- data/lib/backup/storage/s3.rb +49 -158
- data/lib/backup/storage/scp.rb +3 -4
- data/lib/backup/storage/sftp.rb +1 -2
- data/lib/backup/syncer/base.rb +0 -1
- data/lib/backup/syncer/cloud/base.rb +129 -208
- data/lib/backup/syncer/cloud/cloud_files.rb +56 -41
- data/lib/backup/syncer/cloud/local_file.rb +93 -0
- data/lib/backup/syncer/cloud/s3.rb +78 -31
- data/lib/backup/syncer/rsync/base.rb +7 -0
- data/lib/backup/syncer/rsync/local.rb +0 -5
- data/lib/backup/syncer/rsync/push.rb +1 -2
- data/lib/backup/utilities.rb +18 -15
- data/lib/backup/version.rb +1 -1
- data/templates/cli/notifier/http_post +35 -0
- data/templates/cli/notifier/nagios +13 -0
- data/templates/cli/storage/cloud_files +8 -17
- data/templates/cli/storage/s3 +3 -10
- data/templates/cli/syncer/cloud_files +3 -31
- data/templates/cli/syncer/s3 +3 -27
- data/templates/notifier/mail/failure.erb +6 -1
- data/templates/notifier/mail/success.erb +6 -1
- data/templates/notifier/mail/warning.erb +6 -1
- metadata +37 -42
- data/lib/backup/storage/cloudfiles.rb +0 -68
@@ -3,6 +3,8 @@
|
|
3
3
|
module Backup
|
4
4
|
module Storage
|
5
5
|
module Cycler
|
6
|
+
class Error < Backup::Error; end
|
7
|
+
|
6
8
|
class << self
|
7
9
|
|
8
10
|
##
|
@@ -37,9 +39,9 @@ module Backup
|
|
37
39
|
def remove_packages!
|
38
40
|
@packages_to_remove.each do |pkg|
|
39
41
|
begin
|
40
|
-
@storage.send(:remove!, pkg)
|
42
|
+
@storage.send(:remove!, pkg) unless pkg.no_cycle
|
41
43
|
rescue => err
|
42
|
-
Logger.warn
|
44
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
43
45
|
There was a problem removing the following package:
|
44
46
|
Trigger: #{pkg.trigger} :: Dated: #{pkg.time}
|
45
47
|
Package included the following #{ pkg.filenames.count } file(s):
|
@@ -4,6 +4,7 @@ require 'dropbox_sdk'
|
|
4
4
|
module Backup
|
5
5
|
module Storage
|
6
6
|
class Dropbox < Base
|
7
|
+
class Error < Backup::Error; end
|
7
8
|
|
8
9
|
##
|
9
10
|
# Dropbox API credentials
|
@@ -21,23 +22,26 @@ module Backup
|
|
21
22
|
attr_accessor :chunk_size
|
22
23
|
|
23
24
|
##
|
24
|
-
# Number of times to retry
|
25
|
-
|
25
|
+
# Number of times to retry failed operations.
|
26
|
+
#
|
27
|
+
# Default: 10
|
28
|
+
attr_accessor :max_retries
|
26
29
|
|
27
30
|
##
|
28
|
-
#
|
31
|
+
# Time in seconds to pause before each retry.
|
32
|
+
#
|
33
|
+
# Default: 30
|
29
34
|
attr_accessor :retry_waitsec
|
30
35
|
|
31
36
|
##
|
32
37
|
# Creates a new instance of the storage object
|
33
|
-
def initialize(model, storage_id = nil
|
38
|
+
def initialize(model, storage_id = nil)
|
34
39
|
super
|
35
|
-
instance_eval(&block) if block_given?
|
36
40
|
|
37
41
|
@path ||= 'backups'
|
38
42
|
@access_type ||= :app_folder
|
39
43
|
@chunk_size ||= 4 # MiB
|
40
|
-
@
|
44
|
+
@max_retries ||= 10
|
41
45
|
@retry_waitsec ||= 30
|
42
46
|
path.sub!(/^\//, '')
|
43
47
|
end
|
@@ -66,7 +70,7 @@ module Backup
|
|
66
70
|
@connection = DropboxClient.new(session, access_type)
|
67
71
|
|
68
72
|
rescue => err
|
69
|
-
raise
|
73
|
+
raise Error.wrap(err, 'Authorization Failed')
|
70
74
|
end
|
71
75
|
|
72
76
|
##
|
@@ -79,7 +83,7 @@ module Backup
|
|
79
83
|
Logger.info "Session data loaded from cache!"
|
80
84
|
|
81
85
|
rescue => err
|
82
|
-
Logger.warn
|
86
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
83
87
|
Could not read session data from cache.
|
84
88
|
Cache data might be corrupt.
|
85
89
|
EOS
|
@@ -114,7 +118,7 @@ module Backup
|
|
114
118
|
end
|
115
119
|
|
116
120
|
rescue => err
|
117
|
-
raise
|
121
|
+
raise Error.wrap(err, 'Upload Failed!')
|
118
122
|
end
|
119
123
|
|
120
124
|
# Timeout::Error is not a StandardError under ruby-1.8.7
|
@@ -124,10 +128,9 @@ module Backup
|
|
124
128
|
yield
|
125
129
|
rescue StandardError, Timeout::Error => err
|
126
130
|
retries += 1
|
127
|
-
raise if retries >
|
131
|
+
raise if retries > max_retries
|
128
132
|
|
129
|
-
Logger.info
|
130
|
-
wrap(err, "Retry ##{ retries } of #{ chunk_retries }.")
|
133
|
+
Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
|
131
134
|
sleep(retry_waitsec)
|
132
135
|
retry
|
133
136
|
end
|
@@ -186,16 +189,17 @@ module Backup
|
|
186
189
|
|
187
190
|
session
|
188
191
|
|
189
|
-
|
190
|
-
|
191
|
-
err, 'Could not create or authenticate a new session'
|
192
|
-
)
|
192
|
+
rescue => err
|
193
|
+
raise Error.wrap(err, 'Could not create or authenticate a new session')
|
193
194
|
end
|
194
195
|
|
195
196
|
attr_deprecate :email, :version => '3.0.17'
|
196
197
|
attr_deprecate :password, :version => '3.0.17'
|
197
198
|
attr_deprecate :timeout, :version => '3.0.21'
|
198
199
|
|
200
|
+
attr_deprecate :chunk_retries, :version => '3.7.0',
|
201
|
+
:message => 'Use #max_retries instead.',
|
202
|
+
:action => lambda {|klass, val| klass.max_retries = val }
|
199
203
|
end
|
200
204
|
end
|
201
205
|
end
|
data/lib/backup/storage/ftp.rb
CHANGED
@@ -17,9 +17,8 @@ module Backup
|
|
17
17
|
# use passive mode?
|
18
18
|
attr_accessor :passive_mode
|
19
19
|
|
20
|
-
def initialize(model, storage_id = nil
|
20
|
+
def initialize(model, storage_id = nil)
|
21
21
|
super
|
22
|
-
instance_eval(&block) if block_given?
|
23
22
|
|
24
23
|
@port ||= 21
|
25
24
|
@path ||= 'backups'
|
data/lib/backup/storage/local.rb
CHANGED
@@ -3,10 +3,10 @@
|
|
3
3
|
module Backup
|
4
4
|
module Storage
|
5
5
|
class Local < Base
|
6
|
+
class Error < Backup::Error; end
|
6
7
|
|
7
|
-
def initialize(model, storage_id = nil
|
8
|
+
def initialize(model, storage_id = nil)
|
8
9
|
super
|
9
|
-
instance_eval(&block) if block_given?
|
10
10
|
|
11
11
|
@path ||= '~/backups'
|
12
12
|
end
|
@@ -47,7 +47,7 @@ module Backup
|
|
47
47
|
if self == model.storages.last
|
48
48
|
true
|
49
49
|
else
|
50
|
-
Logger.warn
|
50
|
+
Logger.warn Error.new(<<-EOS)
|
51
51
|
Local File Copy Warning!
|
52
52
|
The final backup file(s) for '#{ model.label }' (#{ model.trigger })
|
53
53
|
will be *copied* to '#{ remote_path }'
|
@@ -4,14 +4,14 @@ require 'fog'
|
|
4
4
|
module Backup
|
5
5
|
module Storage
|
6
6
|
class Ninefold < Base
|
7
|
+
class Error < Backup::Error; end
|
7
8
|
|
8
9
|
##
|
9
10
|
# Ninefold Credentials
|
10
11
|
attr_accessor :storage_token, :storage_secret
|
11
12
|
|
12
|
-
def initialize(model, storage_id = nil
|
13
|
+
def initialize(model, storage_id = nil)
|
13
14
|
super
|
14
|
-
instance_eval(&block) if block_given?
|
15
15
|
|
16
16
|
@path ||= 'backups'
|
17
17
|
path.sub!(/^\//, '')
|
@@ -58,8 +58,7 @@ module Backup
|
|
58
58
|
remote_path = remote_path_for(package)
|
59
59
|
directory = directory_for(remote_path)
|
60
60
|
|
61
|
-
raise
|
62
|
-
"Directory at '#{ remote_path }' not found" unless directory
|
61
|
+
raise Error, "Directory at '#{ remote_path }' not found" unless directory
|
63
62
|
|
64
63
|
package.filenames.each do |filename|
|
65
64
|
file = directory.files.get(filename)
|
data/lib/backup/storage/rsync.rb
CHANGED
@@ -132,9 +132,8 @@ module Backup
|
|
132
132
|
# will also store the files directly in the +path+ given.
|
133
133
|
attr_accessor :path
|
134
134
|
|
135
|
-
def initialize(model, storage_id = nil
|
135
|
+
def initialize(model, storage_id = nil)
|
136
136
|
super
|
137
|
-
instance_eval(&block) if block_given?
|
138
137
|
|
139
138
|
@mode ||= :ssh
|
140
139
|
@port ||= mode == :rsync_daemon ? 873 : 22
|
data/lib/backup/storage/s3.rb
CHANGED
@@ -1,11 +1,10 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
require '
|
3
|
-
require 'base64'
|
4
|
-
require 'digest/md5'
|
2
|
+
require 'backup/cloud_io/s3'
|
5
3
|
|
6
4
|
module Backup
|
7
5
|
module Storage
|
8
6
|
class S3 < Base
|
7
|
+
class Error < Backup::Error; end
|
9
8
|
|
10
9
|
##
|
11
10
|
# Amazon Simple Storage Service (S3) Credentials
|
@@ -20,32 +19,19 @@ module Backup
|
|
20
19
|
attr_accessor :region
|
21
20
|
|
22
21
|
##
|
23
|
-
#
|
22
|
+
# Multipart chunk size, specified in MiB.
|
24
23
|
#
|
25
|
-
# Each
|
26
|
-
# will be uploaded using
|
24
|
+
# Each package file larger than +chunk_size+
|
25
|
+
# will be uploaded using S3 Multipart Upload.
|
27
26
|
#
|
28
|
-
#
|
29
|
-
#
|
30
|
-
#
|
31
|
-
# Minimum allowed: 5 (but may be disabled with 0)
|
27
|
+
# Minimum: 5 (but may be disabled with 0)
|
28
|
+
# Maximum: 5120
|
32
29
|
# Default: 5
|
33
30
|
attr_accessor :chunk_size
|
34
31
|
|
35
32
|
##
|
36
33
|
# Number of times to retry failed operations.
|
37
34
|
#
|
38
|
-
# The retry count is reset when the failing operation succeeds,
|
39
|
-
# so each operation that fails will be retried this number of times.
|
40
|
-
# Once a single failed operation exceeds +max_retries+, the entire
|
41
|
-
# storage operation will fail.
|
42
|
-
#
|
43
|
-
# Operations that may fail and be retried include:
|
44
|
-
# - Multipart initiation requests.
|
45
|
-
# - Each multipart upload of +chunk_size+. (retries the chunk)
|
46
|
-
# - Multipart upload completion requests.
|
47
|
-
# - Each file uploaded not using multipart upload. (retries the file)
|
48
|
-
#
|
49
35
|
# Default: 10
|
50
36
|
attr_accessor :max_retries
|
51
37
|
|
@@ -62,8 +48,6 @@ module Backup
|
|
62
48
|
#
|
63
49
|
# - :aes256
|
64
50
|
#
|
65
|
-
# @see http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html
|
66
|
-
#
|
67
51
|
# Default: nil
|
68
52
|
attr_accessor :encryption
|
69
53
|
|
@@ -75,14 +59,11 @@ module Backup
|
|
75
59
|
# - :standard (default)
|
76
60
|
# - :reduced_redundancy
|
77
61
|
#
|
78
|
-
# @see http://docs.aws.amazon.com/AmazonS3/latest/dev/SetStoClsOfObjUploaded.html
|
79
|
-
#
|
80
62
|
# Default: :standard
|
81
63
|
attr_accessor :storage_class
|
82
64
|
|
83
|
-
def initialize(model, storage_id = nil
|
65
|
+
def initialize(model, storage_id = nil)
|
84
66
|
super
|
85
|
-
instance_eval(&block) if block_given?
|
86
67
|
|
87
68
|
@chunk_size ||= 5 # MiB
|
88
69
|
@max_retries ||= 10
|
@@ -90,21 +71,24 @@ module Backup
|
|
90
71
|
@path ||= 'backups'
|
91
72
|
@storage_class ||= :standard
|
92
73
|
path.sub!(/^\//, '')
|
74
|
+
|
75
|
+
check_configuration
|
93
76
|
end
|
94
77
|
|
95
78
|
private
|
96
79
|
|
97
|
-
def
|
98
|
-
@
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
80
|
+
def cloud_io
|
81
|
+
@cloud_io ||= CloudIO::S3.new(
|
82
|
+
:access_key_id => access_key_id,
|
83
|
+
:secret_access_key => secret_access_key,
|
84
|
+
:region => region,
|
85
|
+
:bucket => bucket,
|
86
|
+
:encryption => encryption,
|
87
|
+
:storage_class => storage_class,
|
88
|
+
:max_retries => max_retries,
|
89
|
+
:retry_waitsec => retry_waitsec,
|
90
|
+
:chunk_size => chunk_size
|
91
|
+
)
|
108
92
|
end
|
109
93
|
|
110
94
|
def transfer!
|
@@ -112,7 +96,7 @@ module Backup
|
|
112
96
|
src = File.join(Config.tmp_path, filename)
|
113
97
|
dest = File.join(remote_path, filename)
|
114
98
|
Logger.info "Storing '#{ bucket }/#{ dest }'..."
|
115
|
-
|
99
|
+
cloud_io.upload(src, dest)
|
116
100
|
end
|
117
101
|
end
|
118
102
|
|
@@ -122,129 +106,36 @@ module Backup
|
|
122
106
|
Logger.info "Removing backup package dated #{ package.time }..."
|
123
107
|
|
124
108
|
remote_path = remote_path_for(package)
|
125
|
-
|
126
|
-
keys = resp.body['Contents'].map {|entry| entry['Key'] }
|
109
|
+
objects = cloud_io.objects(remote_path)
|
127
110
|
|
128
|
-
raise
|
129
|
-
"Package at '#{ remote_path }' not found" if keys.empty?
|
111
|
+
raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
|
130
112
|
|
131
|
-
|
113
|
+
cloud_io.delete(objects)
|
132
114
|
end
|
133
115
|
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
else
|
158
|
-
upload
|
159
|
-
end
|
160
|
-
rescue => err
|
161
|
-
raise error_with(err, 'Upload Failed!')
|
162
|
-
end
|
163
|
-
|
164
|
-
private
|
165
|
-
|
166
|
-
def upload
|
167
|
-
md5 = Base64.encode64(Digest::MD5.file(src).digest).chomp
|
168
|
-
options = headers.merge('Content-MD5' => md5)
|
169
|
-
with_retries do
|
170
|
-
File.open(src, 'r') do |file|
|
171
|
-
connection.put_object(bucket, dest, file, options)
|
172
|
-
end
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
def initiate_multipart
|
177
|
-
with_retries do
|
178
|
-
resp = connection.initiate_multipart_upload(bucket, dest, headers)
|
179
|
-
@upload_id = resp.body['UploadId']
|
180
|
-
end
|
181
|
-
end
|
182
|
-
|
183
|
-
def upload_parts
|
184
|
-
File.open(src, 'r') do |file|
|
185
|
-
part_number = 0
|
186
|
-
while data = file.read(chunk_size)
|
187
|
-
part_number += 1
|
188
|
-
md5 = Base64.encode64(Digest::MD5.digest(data)).chomp
|
189
|
-
with_retries do
|
190
|
-
resp = connection.upload_part(
|
191
|
-
bucket, dest, upload_id, part_number, data,
|
192
|
-
{ 'Content-MD5' => md5 }
|
193
|
-
)
|
194
|
-
parts << resp.headers['ETag']
|
195
|
-
end
|
196
|
-
end
|
197
|
-
end
|
198
|
-
end
|
199
|
-
|
200
|
-
def headers
|
201
|
-
headers = {}
|
202
|
-
|
203
|
-
val = encryption.to_s.upcase
|
204
|
-
headers.merge!(
|
205
|
-
{ 'x-amz-server-side-encryption' => val }
|
206
|
-
) unless val.empty?
|
207
|
-
|
208
|
-
val = storage_class.to_s.upcase
|
209
|
-
headers.merge!(
|
210
|
-
{ 'x-amz-storage-class' => val }
|
211
|
-
) unless val.empty? || val == 'STANDARD'
|
212
|
-
|
213
|
-
headers
|
214
|
-
end
|
215
|
-
|
216
|
-
def complete_multipart
|
217
|
-
with_retries do
|
218
|
-
connection.complete_multipart_upload(bucket, dest, upload_id, parts)
|
219
|
-
end
|
220
|
-
end
|
221
|
-
|
222
|
-
def with_retries
|
223
|
-
retries = 0
|
224
|
-
begin
|
225
|
-
yield
|
226
|
-
rescue => err
|
227
|
-
retries += 1
|
228
|
-
raise if retries > max_retries
|
229
|
-
|
230
|
-
Logger.info error_with(err, "Retry ##{ retries } of #{ max_retries }.")
|
231
|
-
sleep(retry_waitsec)
|
232
|
-
retry
|
233
|
-
end
|
234
|
-
end
|
235
|
-
|
236
|
-
def error_with(err, msg)
|
237
|
-
if err.is_a? Excon::Errors::HTTPStatusError
|
238
|
-
Errors::Storage::S3::UploaderError.new(<<-EOS)
|
239
|
-
#{ msg }
|
240
|
-
Reason: #{ err.class }
|
241
|
-
response => #{ err.response.inspect }
|
242
|
-
EOS
|
243
|
-
else
|
244
|
-
Errors::Storage::S3::UploaderError.wrap(err, msg)
|
245
|
-
end
|
246
|
-
end
|
247
|
-
end # class Uploader
|
116
|
+
def check_configuration
|
117
|
+
required = %w{ access_key_id secret_access_key bucket }
|
118
|
+
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
119
|
+
Configuration Error
|
120
|
+
#{ required.map {|name| "##{ name }"}.join(', ') } are all required
|
121
|
+
EOS
|
122
|
+
|
123
|
+
raise Error, <<-EOS if chunk_size > 0 && !chunk_size.between?(5, 5120)
|
124
|
+
Configuration Error
|
125
|
+
#chunk_size must be between 5 and 5120 (or 0 to disable multipart)
|
126
|
+
EOS
|
127
|
+
|
128
|
+
raise Error, <<-EOS if encryption && encryption.to_s.upcase != 'AES256'
|
129
|
+
Configuration Error
|
130
|
+
#encryption must be :aes256 or nil
|
131
|
+
EOS
|
132
|
+
|
133
|
+
classes = ['STANDARD', 'REDUCED_REDUNDANCY']
|
134
|
+
raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
|
135
|
+
Configuration Error
|
136
|
+
#storage_class must be :standard or :reduced_redundancy
|
137
|
+
EOS
|
138
|
+
end
|
248
139
|
|
249
140
|
end
|
250
141
|
end
|