backup 3.3.2 → 3.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/backup/cleaner.rb +22 -29
- data/lib/backup/database/base.rb +5 -1
- data/lib/backup/model.rb +4 -4
- data/lib/backup/package.rb +3 -4
- data/lib/backup/splitter.rb +12 -14
- data/lib/backup/storage/base.rb +25 -53
- data/lib/backup/storage/cloudfiles.rb +18 -38
- data/lib/backup/storage/cycler.rb +6 -6
- data/lib/backup/storage/dropbox.rb +41 -55
- data/lib/backup/storage/ftp.rb +17 -37
- data/lib/backup/storage/local.rb +23 -42
- data/lib/backup/storage/ninefold.rb +28 -69
- data/lib/backup/storage/rsync.rb +18 -24
- data/lib/backup/storage/s3.rb +166 -48
- data/lib/backup/storage/scp.rb +13 -41
- data/lib/backup/storage/sftp.rb +16 -37
- data/lib/backup/version.rb +1 -1
- data/templates/general/links +3 -11
- metadata +3 -3
@@ -10,8 +10,9 @@ module Backup
|
|
10
10
|
# to the given +storage+ and Package#trigger (Model#trigger).
|
11
11
|
# Then, calls the +storage+ to remove the files for any older
|
12
12
|
# packages that were removed from the YAML storage file.
|
13
|
-
def cycle!(storage
|
14
|
-
@storage
|
13
|
+
def cycle!(storage)
|
14
|
+
@storage = storage
|
15
|
+
@package = storage.package
|
15
16
|
@storage_file = storage_file
|
16
17
|
|
17
18
|
update_storage_file!
|
@@ -52,10 +53,9 @@ module Backup
|
|
52
53
|
# Return full path to the YAML data file,
|
53
54
|
# based on the current values of @storage and @package
|
54
55
|
def storage_file
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
File.join(Config.data_path, @package.trigger, "#{filename}.yml")
|
56
|
+
filename = @storage.class.to_s.split('::').last
|
57
|
+
filename << "-#{ @storage.storage_id }" if @storage.storage_id
|
58
|
+
File.join(Config.data_path, @package.trigger, "#{ filename }.yml")
|
59
59
|
end
|
60
60
|
|
61
61
|
##
|
@@ -1,7 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
|
3
|
-
##
|
4
|
-
# Only load the Dropbox gem when the Backup::Storage::Dropbox class is loaded
|
5
3
|
Backup::Dependency.load('dropbox-sdk')
|
6
4
|
|
7
5
|
module Backup
|
@@ -19,10 +17,6 @@ module Backup
|
|
19
17
|
# :dropbox (full access)
|
20
18
|
attr_accessor :access_type
|
21
19
|
|
22
|
-
##
|
23
|
-
# Path to where the backups will be stored
|
24
|
-
attr_accessor :path
|
25
|
-
|
26
20
|
##
|
27
21
|
# Chunk size, specified in MiB, for the ChunkedUploader.
|
28
22
|
attr_accessor :chunk_size
|
@@ -35,23 +29,18 @@ module Backup
|
|
35
29
|
# Seconds to wait between chunk retries.
|
36
30
|
attr_accessor :retry_waitsec
|
37
31
|
|
38
|
-
attr_deprecate :email, :version => '3.0.17'
|
39
|
-
attr_deprecate :password, :version => '3.0.17'
|
40
|
-
|
41
|
-
attr_deprecate :timeout, :version => '3.0.21'
|
42
|
-
|
43
32
|
##
|
44
33
|
# Creates a new instance of the storage object
|
45
34
|
def initialize(model, storage_id = nil, &block)
|
46
|
-
super
|
35
|
+
super
|
36
|
+
instance_eval(&block) if block_given?
|
47
37
|
|
48
38
|
@path ||= 'backups'
|
49
39
|
@access_type ||= :app_folder
|
50
40
|
@chunk_size ||= 4 # MiB
|
51
41
|
@chunk_retries ||= 10
|
52
42
|
@retry_waitsec ||= 30
|
53
|
-
|
54
|
-
instance_eval(&block) if block_given?
|
43
|
+
path.sub!(/^\//, '')
|
55
44
|
end
|
56
45
|
|
57
46
|
private
|
@@ -85,7 +74,7 @@ module Backup
|
|
85
74
|
# Attempt to load a cached session
|
86
75
|
def cached_session
|
87
76
|
session = false
|
88
|
-
if
|
77
|
+
if File.exist?(cached_file)
|
89
78
|
begin
|
90
79
|
session = DropboxSession.deserialize(File.read(cached_file))
|
91
80
|
Logger.info "Session data loaded from cache!"
|
@@ -105,65 +94,58 @@ module Backup
|
|
105
94
|
# Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
|
106
95
|
# between retries, if errors occur.
|
107
96
|
def transfer!
|
108
|
-
|
97
|
+
package.filenames.each do |filename|
|
98
|
+
src = File.join(Config.tmp_path, filename)
|
99
|
+
dest = File.join(remote_path, filename)
|
100
|
+
Logger.info "Storing '#{ dest }'..."
|
109
101
|
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
uploader, retries = nil, 0
|
114
|
-
File.open(File.join(local_path, local_file), 'r') do |file|
|
102
|
+
uploader = nil
|
103
|
+
File.open(src, 'r') do |file|
|
115
104
|
uploader = connection.get_chunked_uploader(file, file.stat.size)
|
116
105
|
while uploader.offset < uploader.total_size
|
117
|
-
|
106
|
+
with_retries do
|
118
107
|
uploader.upload(1024**2 * chunk_size)
|
119
|
-
retries = 0
|
120
|
-
# Timeout::Error is not a StandardError under ruby-1.8.7
|
121
|
-
rescue StandardError, Timeout::Error => err
|
122
|
-
retries += 1
|
123
|
-
if retries <= chunk_retries
|
124
|
-
Logger.info "Chunk retry #{ retries } of #{ chunk_retries }."
|
125
|
-
sleep(retry_waitsec)
|
126
|
-
retry
|
127
|
-
end
|
128
|
-
raise Errors::Storage::Dropbox::TransferError.
|
129
|
-
wrap(err, 'Dropbox upload failed!')
|
130
108
|
end
|
131
109
|
end
|
132
110
|
end
|
133
111
|
|
134
|
-
|
112
|
+
with_retries do
|
113
|
+
uploader.finish(dest)
|
114
|
+
end
|
135
115
|
end
|
136
|
-
end
|
137
116
|
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
# and a warning will be logged, containing the error message.
|
142
|
-
def remove!(package)
|
143
|
-
remote_path = remote_path_for(package)
|
117
|
+
rescue => err
|
118
|
+
raise Errors::Storage::Dropbox::TransferError.wrap(err, 'Upload Failed!')
|
119
|
+
end
|
144
120
|
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
121
|
+
# Timeout::Error is not a StandardError under ruby-1.8.7
|
122
|
+
def with_retries
|
123
|
+
retries = 0
|
124
|
+
begin
|
125
|
+
yield
|
126
|
+
rescue StandardError, Timeout::Error => err
|
127
|
+
retries += 1
|
128
|
+
raise if retries > chunk_retries
|
129
|
+
|
130
|
+
Logger.info Errors::Storage::Dropbox::TransferError.
|
131
|
+
wrap(err, "Retry ##{ retries } of #{ chunk_retries }.")
|
132
|
+
sleep(retry_waitsec)
|
133
|
+
retry
|
149
134
|
end
|
150
|
-
|
135
|
+
end
|
151
136
|
|
152
|
-
|
137
|
+
# Called by the Cycler.
|
138
|
+
# Any error raised will be logged as a warning.
|
139
|
+
def remove!(package)
|
140
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
141
|
+
|
142
|
+
connection.file_delete(remote_path_for(package))
|
153
143
|
end
|
154
144
|
|
155
|
-
##
|
156
|
-
# Returns the path to the cached file
|
157
145
|
def cached_file
|
158
146
|
File.join(Config.cache_path, api_key + api_secret)
|
159
147
|
end
|
160
148
|
|
161
|
-
##
|
162
|
-
# Checks to see if the cache file exists
|
163
|
-
def cache_exists?
|
164
|
-
File.exist?(cached_file)
|
165
|
-
end
|
166
|
-
|
167
149
|
##
|
168
150
|
# Serializes and writes the Dropbox session to a cache file
|
169
151
|
def write_cache!(session)
|
@@ -211,6 +193,10 @@ module Backup
|
|
211
193
|
)
|
212
194
|
end
|
213
195
|
|
196
|
+
attr_deprecate :email, :version => '3.0.17'
|
197
|
+
attr_deprecate :password, :version => '3.0.17'
|
198
|
+
attr_deprecate :timeout, :version => '3.0.21'
|
199
|
+
|
214
200
|
end
|
215
201
|
end
|
216
202
|
end
|
data/lib/backup/storage/ftp.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
|
3
|
-
##
|
4
|
-
# Only load the Net::FTP library/gem when the Backup::Storage::FTP class is loaded
|
5
3
|
require 'net/ftp'
|
6
4
|
|
7
5
|
module Backup
|
@@ -16,26 +14,18 @@ module Backup
|
|
16
14
|
# Server IP Address and FTP port
|
17
15
|
attr_accessor :ip, :port
|
18
16
|
|
19
|
-
##
|
20
|
-
# Path to store backups to
|
21
|
-
attr_accessor :path
|
22
|
-
|
23
17
|
##
|
24
18
|
# use passive mode?
|
25
19
|
attr_accessor :passive_mode
|
26
20
|
|
27
|
-
##
|
28
|
-
# Creates a new instance of the storage object
|
29
21
|
def initialize(model, storage_id = nil, &block)
|
30
|
-
super
|
22
|
+
super
|
23
|
+
instance_eval(&block) if block_given?
|
31
24
|
|
32
25
|
@port ||= 21
|
33
26
|
@path ||= 'backups'
|
34
27
|
@passive_mode ||= false
|
35
|
-
|
36
|
-
instance_eval(&block) if block_given?
|
37
|
-
|
38
|
-
@path = path.sub(/^\~\//, '')
|
28
|
+
path.sub!(/^~\//, '')
|
39
29
|
end
|
40
30
|
|
41
31
|
private
|
@@ -58,38 +48,28 @@ module Backup
|
|
58
48
|
end
|
59
49
|
end
|
60
50
|
|
61
|
-
##
|
62
|
-
# Transfers the archived file to the specified remote server
|
63
51
|
def transfer!
|
64
|
-
remote_path = remote_path_for(@package)
|
65
|
-
|
66
52
|
connection do |ftp|
|
67
|
-
create_remote_path(
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
File.join(remote_path, remote_file)
|
75
|
-
)
|
53
|
+
create_remote_path(ftp)
|
54
|
+
|
55
|
+
package.filenames.each do |filename|
|
56
|
+
src = File.join(Config.tmp_path, filename)
|
57
|
+
dest = File.join(remote_path, filename)
|
58
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
59
|
+
ftp.put(src, dest)
|
76
60
|
end
|
77
61
|
end
|
78
62
|
end
|
79
63
|
|
80
|
-
|
81
|
-
#
|
82
|
-
# Any error raised will be rescued during Cycling
|
83
|
-
# and a warning will be logged, containing the error message.
|
64
|
+
# Called by the Cycler.
|
65
|
+
# Any error raised will be logged as a warning.
|
84
66
|
def remove!(package)
|
85
|
-
|
67
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
86
68
|
|
69
|
+
remote_path = remote_path_for(package)
|
87
70
|
connection do |ftp|
|
88
|
-
|
89
|
-
|
90
|
-
"'#{ local_file }' from '#{ ip }'."
|
91
|
-
|
92
|
-
ftp.delete(File.join(remote_path, remote_file))
|
71
|
+
package.filenames.each do |filename|
|
72
|
+
ftp.delete(File.join(remote_path, filename))
|
93
73
|
end
|
94
74
|
|
95
75
|
ftp.rmdir(remote_path)
|
@@ -104,7 +84,7 @@ module Backup
|
|
104
84
|
# '/') and loop through that to create the directories one by one.
|
105
85
|
# Net::FTP raises an exception when the directory it's trying to create
|
106
86
|
# already exists, so we have rescue it
|
107
|
-
def create_remote_path(
|
87
|
+
def create_remote_path(ftp)
|
108
88
|
path_parts = Array.new
|
109
89
|
remote_path.split('/').each do |path_part|
|
110
90
|
path_parts << path_part
|
data/lib/backup/storage/local.rb
CHANGED
@@ -4,76 +4,57 @@ module Backup
|
|
4
4
|
module Storage
|
5
5
|
class Local < Base
|
6
6
|
|
7
|
-
##
|
8
|
-
# Path where the backup will be stored.
|
9
|
-
attr_accessor :path
|
10
|
-
|
11
|
-
##
|
12
|
-
# Creates a new instance of the storage object
|
13
7
|
def initialize(model, storage_id = nil, &block)
|
14
|
-
super
|
15
|
-
|
16
|
-
@path ||= File.join(
|
17
|
-
File.expand_path(ENV['HOME'] || ''),
|
18
|
-
'backups'
|
19
|
-
)
|
20
|
-
|
8
|
+
super
|
21
9
|
instance_eval(&block) if block_given?
|
22
10
|
|
23
|
-
@path
|
11
|
+
@path ||= '~/backups'
|
24
12
|
end
|
25
13
|
|
26
14
|
private
|
27
15
|
|
28
|
-
##
|
29
|
-
# Transfers the archived file to the specified path
|
30
16
|
def transfer!
|
31
|
-
remote_path = remote_path_for(@package)
|
32
17
|
FileUtils.mkdir_p(remote_path)
|
33
18
|
|
34
|
-
|
35
|
-
|
19
|
+
transfer_method = package_movable? ? :mv : :cp
|
20
|
+
package.filenames.each do |filename|
|
21
|
+
src = File.join(Config.tmp_path, filename)
|
22
|
+
dest = File.join(remote_path, filename)
|
23
|
+
Logger.info "Storing '#{ dest }'..."
|
36
24
|
|
37
|
-
|
38
|
-
dst_path = File.join(remote_path, remote_file)
|
39
|
-
FileUtils.send(transfer_method, src_path, dst_path)
|
25
|
+
FileUtils.send(transfer_method, src, dest)
|
40
26
|
end
|
41
27
|
end
|
42
28
|
|
43
|
-
|
44
|
-
#
|
45
|
-
# Any error raised will be rescued during Cycling
|
46
|
-
# and a warning will be logged, containing the error message.
|
29
|
+
# Called by the Cycler.
|
30
|
+
# Any error raised will be logged as a warning.
|
47
31
|
def remove!(package)
|
48
|
-
|
32
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
49
33
|
|
50
|
-
|
51
|
-
|
52
|
-
messages << "#{storage_name} started removing '#{ local_file }'."
|
53
|
-
end
|
54
|
-
Logger.info messages.join("\n")
|
34
|
+
FileUtils.rm_r(remote_path_for(package))
|
35
|
+
end
|
55
36
|
|
56
|
-
|
37
|
+
# expanded since this is a local path
|
38
|
+
def remote_path(pkg = package)
|
39
|
+
File.expand_path(super)
|
57
40
|
end
|
41
|
+
alias :remote_path_for :remote_path
|
58
42
|
|
59
43
|
##
|
60
|
-
# Set and return the transfer method.
|
61
44
|
# If this Local Storage is not the last Storage for the Model,
|
62
45
|
# force the transfer to use a *copy* operation and issue a warning.
|
63
|
-
def
|
64
|
-
|
65
|
-
|
66
|
-
if self == @model.storages.last
|
67
|
-
@transfer_method = :mv
|
46
|
+
def package_movable?
|
47
|
+
if self == model.storages.last
|
48
|
+
true
|
68
49
|
else
|
69
50
|
Logger.warn Errors::Storage::Local::TransferError.new(<<-EOS)
|
70
51
|
Local File Copy Warning!
|
71
|
-
The final backup file(s) for '#{
|
72
|
-
will be *copied* to '#{
|
52
|
+
The final backup file(s) for '#{ model.label }' (#{ model.trigger })
|
53
|
+
will be *copied* to '#{ remote_path }'
|
73
54
|
To avoid this, when using more than one Storage, the 'Local' Storage
|
74
55
|
should be added *last* so the files may be *moved* to their destination.
|
75
56
|
EOS
|
76
|
-
|
57
|
+
false
|
77
58
|
end
|
78
59
|
end
|
79
60
|
|
@@ -1,7 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
|
3
|
-
##
|
4
|
-
# Only load the Fog gem when the Backup::Storage::Ninefold class is loaded
|
5
3
|
Backup::Dependency.load('fog')
|
6
4
|
|
7
5
|
module Backup
|
@@ -12,39 +10,36 @@ module Backup
|
|
12
10
|
# Ninefold Credentials
|
13
11
|
attr_accessor :storage_token, :storage_secret
|
14
12
|
|
15
|
-
##
|
16
|
-
# Ninefold directory path
|
17
|
-
attr_accessor :path
|
18
|
-
|
19
|
-
##
|
20
|
-
# Creates a new instance of the storage object
|
21
13
|
def initialize(model, storage_id = nil, &block)
|
22
|
-
super
|
14
|
+
super
|
15
|
+
instance_eval(&block) if block_given?
|
23
16
|
|
24
17
|
@path ||= 'backups'
|
25
|
-
|
26
|
-
instance_eval(&block) if block_given?
|
18
|
+
path.sub!(/^\//, '')
|
27
19
|
end
|
28
20
|
|
29
|
-
|
30
21
|
private
|
31
22
|
|
32
|
-
##
|
33
|
-
# This is the provider that Fog uses for the Ninefold storage
|
34
|
-
def provider
|
35
|
-
'Ninefold'
|
36
|
-
end
|
37
|
-
|
38
|
-
##
|
39
|
-
# Establishes a connection to Amazon S3
|
40
23
|
def connection
|
41
24
|
@connection ||= Fog::Storage.new(
|
42
|
-
:provider =>
|
25
|
+
:provider => 'Ninefold',
|
43
26
|
:ninefold_storage_token => storage_token,
|
44
27
|
:ninefold_storage_secret => storage_secret
|
45
28
|
)
|
46
29
|
end
|
47
30
|
|
31
|
+
def transfer!
|
32
|
+
directory = directory_for(remote_path, true)
|
33
|
+
package.filenames.each do |filename|
|
34
|
+
src = File.join(Config.tmp_path, filename)
|
35
|
+
dest = File.join(remote_path, filename)
|
36
|
+
Logger.info "Storing '#{ dest }'..."
|
37
|
+
File.open(src, 'r') do |file|
|
38
|
+
directory.files.create(:key => filename, :body => file)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
48
43
|
##
|
49
44
|
# Queries the connection for the directory for the given +remote_path+
|
50
45
|
# Returns nil if not found, or creates the directory if +create+ is true.
|
@@ -56,59 +51,23 @@ module Backup
|
|
56
51
|
directory
|
57
52
|
end
|
58
53
|
|
59
|
-
|
60
|
-
|
61
|
-
end
|
62
|
-
|
63
|
-
##
|
64
|
-
# Transfers the archived file to the specified directory
|
65
|
-
def transfer!
|
66
|
-
remote_path = remote_path_for(@package)
|
67
|
-
|
68
|
-
directory = directory_for(remote_path, true)
|
69
|
-
|
70
|
-
files_to_transfer_for(@package) do |local_file, remote_file|
|
71
|
-
Logger.info "#{storage_name} started transferring '#{ local_file }'."
|
72
|
-
|
73
|
-
File.open(File.join(local_path, local_file), 'r') do |file|
|
74
|
-
directory.files.create(:key => remote_file, :body => file)
|
75
|
-
end
|
76
|
-
end
|
77
|
-
end
|
78
|
-
|
79
|
-
##
|
80
|
-
# Removes the transferred archive file(s) from the storage location.
|
81
|
-
# Any error raised will be rescued during Cycling
|
82
|
-
# and a warning will be logged, containing the error message.
|
54
|
+
# Called by the Cycler.
|
55
|
+
# Any error raised will be logged as a warning.
|
83
56
|
def remove!(package)
|
84
|
-
|
85
|
-
|
86
|
-
if directory = directory_for(remote_path)
|
87
|
-
not_found = []
|
88
|
-
|
89
|
-
transferred_files_for(package) do |local_file, remote_file|
|
90
|
-
Logger.info "#{storage_name} started removing " +
|
91
|
-
"'#{ local_file }' from Ninefold."
|
57
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
92
58
|
|
93
|
-
|
94
|
-
|
95
|
-
else
|
96
|
-
not_found << remote_file
|
97
|
-
end
|
98
|
-
end
|
59
|
+
remote_path = remote_path_for(package)
|
60
|
+
directory = directory_for(remote_path)
|
99
61
|
|
100
|
-
|
62
|
+
raise Errors::Storage::Ninefold::NotFoundError,
|
63
|
+
"Directory at '#{ remote_path }' not found" unless directory
|
101
64
|
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
#{ not_found.join("\n") }
|
106
|
-
EOS
|
107
|
-
end
|
108
|
-
else
|
109
|
-
raise Errors::Storage::Ninefold::NotFoundError,
|
110
|
-
"Directory at '#{remote_path}' not found"
|
65
|
+
package.filenames.each do |filename|
|
66
|
+
file = directory.files.get(filename)
|
67
|
+
file.destroy if file
|
111
68
|
end
|
69
|
+
|
70
|
+
directory.destroy
|
112
71
|
end
|
113
72
|
|
114
73
|
end
|