backup_checksum 3.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +7 -0
- data/.travis.yml +10 -0
- data/Gemfile +28 -0
- data/Gemfile.lock +130 -0
- data/Guardfile +21 -0
- data/LICENSE.md +24 -0
- data/README.md +476 -0
- data/backup_checksum.gemspec +32 -0
- data/bin/backup +11 -0
- data/lib/backup.rb +217 -0
- data/lib/backup/archive.rb +117 -0
- data/lib/backup/binder.rb +22 -0
- data/lib/backup/checksum/base.rb +44 -0
- data/lib/backup/checksum/shasum.rb +16 -0
- data/lib/backup/cleaner.rb +121 -0
- data/lib/backup/cli/helpers.rb +88 -0
- data/lib/backup/cli/utility.rb +247 -0
- data/lib/backup/compressor/base.rb +29 -0
- data/lib/backup/compressor/bzip2.rb +50 -0
- data/lib/backup/compressor/gzip.rb +47 -0
- data/lib/backup/compressor/lzma.rb +50 -0
- data/lib/backup/compressor/pbzip2.rb +56 -0
- data/lib/backup/config.rb +173 -0
- data/lib/backup/configuration/base.rb +15 -0
- data/lib/backup/configuration/checksum/base.rb +9 -0
- data/lib/backup/configuration/checksum/shasum.rb +9 -0
- data/lib/backup/configuration/compressor/base.rb +9 -0
- data/lib/backup/configuration/compressor/bzip2.rb +23 -0
- data/lib/backup/configuration/compressor/gzip.rb +23 -0
- data/lib/backup/configuration/compressor/lzma.rb +23 -0
- data/lib/backup/configuration/compressor/pbzip2.rb +28 -0
- data/lib/backup/configuration/database/base.rb +19 -0
- data/lib/backup/configuration/database/mongodb.rb +49 -0
- data/lib/backup/configuration/database/mysql.rb +42 -0
- data/lib/backup/configuration/database/postgresql.rb +41 -0
- data/lib/backup/configuration/database/redis.rb +39 -0
- data/lib/backup/configuration/database/riak.rb +29 -0
- data/lib/backup/configuration/encryptor/base.rb +9 -0
- data/lib/backup/configuration/encryptor/gpg.rb +17 -0
- data/lib/backup/configuration/encryptor/open_ssl.rb +32 -0
- data/lib/backup/configuration/helpers.rb +52 -0
- data/lib/backup/configuration/notifier/base.rb +28 -0
- data/lib/backup/configuration/notifier/campfire.rb +25 -0
- data/lib/backup/configuration/notifier/hipchat.rb +41 -0
- data/lib/backup/configuration/notifier/mail.rb +112 -0
- data/lib/backup/configuration/notifier/presently.rb +25 -0
- data/lib/backup/configuration/notifier/prowl.rb +23 -0
- data/lib/backup/configuration/notifier/twitter.rb +21 -0
- data/lib/backup/configuration/storage/base.rb +18 -0
- data/lib/backup/configuration/storage/cloudfiles.rb +25 -0
- data/lib/backup/configuration/storage/dropbox.rb +58 -0
- data/lib/backup/configuration/storage/ftp.rb +29 -0
- data/lib/backup/configuration/storage/local.rb +17 -0
- data/lib/backup/configuration/storage/ninefold.rb +20 -0
- data/lib/backup/configuration/storage/rsync.rb +29 -0
- data/lib/backup/configuration/storage/s3.rb +25 -0
- data/lib/backup/configuration/storage/scp.rb +25 -0
- data/lib/backup/configuration/storage/sftp.rb +25 -0
- data/lib/backup/configuration/syncer/base.rb +10 -0
- data/lib/backup/configuration/syncer/cloud.rb +23 -0
- data/lib/backup/configuration/syncer/cloud_files.rb +30 -0
- data/lib/backup/configuration/syncer/rsync/base.rb +28 -0
- data/lib/backup/configuration/syncer/rsync/local.rb +11 -0
- data/lib/backup/configuration/syncer/rsync/pull.rb +11 -0
- data/lib/backup/configuration/syncer/rsync/push.rb +31 -0
- data/lib/backup/configuration/syncer/s3.rb +23 -0
- data/lib/backup/database/base.rb +59 -0
- data/lib/backup/database/mongodb.rb +232 -0
- data/lib/backup/database/mysql.rb +163 -0
- data/lib/backup/database/postgresql.rb +146 -0
- data/lib/backup/database/redis.rb +139 -0
- data/lib/backup/database/riak.rb +69 -0
- data/lib/backup/dependency.rb +114 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +80 -0
- data/lib/backup/encryptor/open_ssl.rb +72 -0
- data/lib/backup/errors.rb +124 -0
- data/lib/backup/logger.rb +152 -0
- data/lib/backup/model.rb +386 -0
- data/lib/backup/notifier/base.rb +81 -0
- data/lib/backup/notifier/campfire.rb +168 -0
- data/lib/backup/notifier/hipchat.rb +99 -0
- data/lib/backup/notifier/mail.rb +206 -0
- data/lib/backup/notifier/presently.rb +88 -0
- data/lib/backup/notifier/prowl.rb +65 -0
- data/lib/backup/notifier/twitter.rb +70 -0
- data/lib/backup/package.rb +51 -0
- data/lib/backup/packager.rb +108 -0
- data/lib/backup/pipeline.rb +107 -0
- data/lib/backup/splitter.rb +75 -0
- data/lib/backup/storage/base.rb +119 -0
- data/lib/backup/storage/cloudfiles.rb +87 -0
- data/lib/backup/storage/cycler.rb +117 -0
- data/lib/backup/storage/dropbox.rb +181 -0
- data/lib/backup/storage/ftp.rb +119 -0
- data/lib/backup/storage/local.rb +82 -0
- data/lib/backup/storage/ninefold.rb +116 -0
- data/lib/backup/storage/rsync.rb +149 -0
- data/lib/backup/storage/s3.rb +94 -0
- data/lib/backup/storage/scp.rb +99 -0
- data/lib/backup/storage/sftp.rb +108 -0
- data/lib/backup/syncer/base.rb +42 -0
- data/lib/backup/syncer/cloud.rb +190 -0
- data/lib/backup/syncer/cloud_files.rb +56 -0
- data/lib/backup/syncer/rsync/base.rb +52 -0
- data/lib/backup/syncer/rsync/local.rb +53 -0
- data/lib/backup/syncer/rsync/pull.rb +38 -0
- data/lib/backup/syncer/rsync/push.rb +113 -0
- data/lib/backup/syncer/s3.rb +47 -0
- data/lib/backup/template.rb +46 -0
- data/lib/backup/version.rb +43 -0
- data/spec/archive_spec.rb +335 -0
- data/spec/cleaner_spec.rb +304 -0
- data/spec/cli/helpers_spec.rb +176 -0
- data/spec/cli/utility_spec.rb +363 -0
- data/spec/compressor/base_spec.rb +31 -0
- data/spec/compressor/bzip2_spec.rb +83 -0
- data/spec/compressor/gzip_spec.rb +83 -0
- data/spec/compressor/lzma_spec.rb +83 -0
- data/spec/compressor/pbzip2_spec.rb +124 -0
- data/spec/config_spec.rb +321 -0
- data/spec/configuration/base_spec.rb +35 -0
- data/spec/configuration/compressor/bzip2_spec.rb +29 -0
- data/spec/configuration/compressor/gzip_spec.rb +29 -0
- data/spec/configuration/compressor/lzma_spec.rb +29 -0
- data/spec/configuration/compressor/pbzip2_spec.rb +32 -0
- data/spec/configuration/database/base_spec.rb +17 -0
- data/spec/configuration/database/mongodb_spec.rb +56 -0
- data/spec/configuration/database/mysql_spec.rb +53 -0
- data/spec/configuration/database/postgresql_spec.rb +53 -0
- data/spec/configuration/database/redis_spec.rb +50 -0
- data/spec/configuration/database/riak_spec.rb +35 -0
- data/spec/configuration/encryptor/gpg_spec.rb +26 -0
- data/spec/configuration/encryptor/open_ssl_spec.rb +35 -0
- data/spec/configuration/notifier/base_spec.rb +32 -0
- data/spec/configuration/notifier/campfire_spec.rb +32 -0
- data/spec/configuration/notifier/hipchat_spec.rb +44 -0
- data/spec/configuration/notifier/mail_spec.rb +71 -0
- data/spec/configuration/notifier/presently_spec.rb +35 -0
- data/spec/configuration/notifier/prowl_spec.rb +29 -0
- data/spec/configuration/notifier/twitter_spec.rb +35 -0
- data/spec/configuration/storage/cloudfiles_spec.rb +41 -0
- data/spec/configuration/storage/dropbox_spec.rb +38 -0
- data/spec/configuration/storage/ftp_spec.rb +44 -0
- data/spec/configuration/storage/local_spec.rb +29 -0
- data/spec/configuration/storage/ninefold_spec.rb +32 -0
- data/spec/configuration/storage/rsync_spec.rb +41 -0
- data/spec/configuration/storage/s3_spec.rb +38 -0
- data/spec/configuration/storage/scp_spec.rb +41 -0
- data/spec/configuration/storage/sftp_spec.rb +41 -0
- data/spec/configuration/syncer/cloud_files_spec.rb +44 -0
- data/spec/configuration/syncer/rsync/base_spec.rb +33 -0
- data/spec/configuration/syncer/rsync/local_spec.rb +10 -0
- data/spec/configuration/syncer/rsync/pull_spec.rb +10 -0
- data/spec/configuration/syncer/rsync/push_spec.rb +43 -0
- data/spec/configuration/syncer/s3_spec.rb +38 -0
- data/spec/database/base_spec.rb +54 -0
- data/spec/database/mongodb_spec.rb +428 -0
- data/spec/database/mysql_spec.rb +335 -0
- data/spec/database/postgresql_spec.rb +278 -0
- data/spec/database/redis_spec.rb +260 -0
- data/spec/database/riak_spec.rb +108 -0
- data/spec/dependency_spec.rb +49 -0
- data/spec/encryptor/base_spec.rb +30 -0
- data/spec/encryptor/gpg_spec.rb +134 -0
- data/spec/encryptor/open_ssl_spec.rb +129 -0
- data/spec/errors_spec.rb +306 -0
- data/spec/logger_spec.rb +363 -0
- data/spec/model_spec.rb +649 -0
- data/spec/notifier/base_spec.rb +89 -0
- data/spec/notifier/campfire_spec.rb +199 -0
- data/spec/notifier/hipchat_spec.rb +188 -0
- data/spec/notifier/mail_spec.rb +280 -0
- data/spec/notifier/presently_spec.rb +181 -0
- data/spec/notifier/prowl_spec.rb +117 -0
- data/spec/notifier/twitter_spec.rb +132 -0
- data/spec/package_spec.rb +61 -0
- data/spec/packager_spec.rb +225 -0
- data/spec/pipeline_spec.rb +257 -0
- data/spec/spec_helper.rb +59 -0
- data/spec/splitter_spec.rb +120 -0
- data/spec/storage/base_spec.rb +160 -0
- data/spec/storage/cloudfiles_spec.rb +230 -0
- data/spec/storage/cycler_spec.rb +239 -0
- data/spec/storage/dropbox_spec.rb +370 -0
- data/spec/storage/ftp_spec.rb +247 -0
- data/spec/storage/local_spec.rb +235 -0
- data/spec/storage/ninefold_spec.rb +319 -0
- data/spec/storage/rsync_spec.rb +345 -0
- data/spec/storage/s3_spec.rb +221 -0
- data/spec/storage/scp_spec.rb +209 -0
- data/spec/storage/sftp_spec.rb +220 -0
- data/spec/syncer/base_spec.rb +22 -0
- data/spec/syncer/cloud_files_spec.rb +192 -0
- data/spec/syncer/rsync/base_spec.rb +118 -0
- data/spec/syncer/rsync/local_spec.rb +121 -0
- data/spec/syncer/rsync/pull_spec.rb +90 -0
- data/spec/syncer/rsync/push_spec.rb +327 -0
- data/spec/syncer/s3_spec.rb +192 -0
- data/spec/version_spec.rb +21 -0
- data/templates/cli/utility/archive +25 -0
- data/templates/cli/utility/compressor/bzip2 +7 -0
- data/templates/cli/utility/compressor/gzip +7 -0
- data/templates/cli/utility/compressor/lzma +7 -0
- data/templates/cli/utility/compressor/pbzip2 +7 -0
- data/templates/cli/utility/config +31 -0
- data/templates/cli/utility/database/mongodb +18 -0
- data/templates/cli/utility/database/mysql +21 -0
- data/templates/cli/utility/database/postgresql +17 -0
- data/templates/cli/utility/database/redis +16 -0
- data/templates/cli/utility/database/riak +11 -0
- data/templates/cli/utility/encryptor/gpg +12 -0
- data/templates/cli/utility/encryptor/openssl +9 -0
- data/templates/cli/utility/model.erb +23 -0
- data/templates/cli/utility/notifier/campfire +12 -0
- data/templates/cli/utility/notifier/hipchat +15 -0
- data/templates/cli/utility/notifier/mail +22 -0
- data/templates/cli/utility/notifier/presently +13 -0
- data/templates/cli/utility/notifier/prowl +11 -0
- data/templates/cli/utility/notifier/twitter +13 -0
- data/templates/cli/utility/splitter +7 -0
- data/templates/cli/utility/storage/cloud_files +22 -0
- data/templates/cli/utility/storage/dropbox +20 -0
- data/templates/cli/utility/storage/ftp +12 -0
- data/templates/cli/utility/storage/local +7 -0
- data/templates/cli/utility/storage/ninefold +9 -0
- data/templates/cli/utility/storage/rsync +11 -0
- data/templates/cli/utility/storage/s3 +19 -0
- data/templates/cli/utility/storage/scp +11 -0
- data/templates/cli/utility/storage/sftp +11 -0
- data/templates/cli/utility/syncer/cloud_files +48 -0
- data/templates/cli/utility/syncer/rsync_local +12 -0
- data/templates/cli/utility/syncer/rsync_pull +17 -0
- data/templates/cli/utility/syncer/rsync_push +17 -0
- data/templates/cli/utility/syncer/s3 +45 -0
- data/templates/general/links +11 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +9 -0
- data/templates/notifier/mail/success.erb +7 -0
- data/templates/notifier/mail/warning.erb +9 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +311 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Fog gem when the Backup::Storage::S3 class is loaded
|
|
5
|
+
Backup::Dependency.load('fog')
|
|
6
|
+
|
|
7
|
+
module Backup
|
|
8
|
+
module Storage
|
|
9
|
+
class S3 < Base
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
# Amazon Simple Storage Service (S3) Credentials
|
|
13
|
+
attr_accessor :access_key_id, :secret_access_key
|
|
14
|
+
|
|
15
|
+
##
|
|
16
|
+
# Amazon S3 bucket name and path
|
|
17
|
+
attr_accessor :bucket, :path
|
|
18
|
+
|
|
19
|
+
##
|
|
20
|
+
# Region of the specified S3 bucket
|
|
21
|
+
attr_accessor :region
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Creates a new instance of the storage object
|
|
25
|
+
def initialize(model, storage_id = nil, &block)
|
|
26
|
+
super(model, storage_id)
|
|
27
|
+
|
|
28
|
+
@path ||= 'backups'
|
|
29
|
+
|
|
30
|
+
instance_eval(&block) if block_given?
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
private
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
# This is the provider that Fog uses for the S3 Storage
|
|
37
|
+
def provider
|
|
38
|
+
'AWS'
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
##
|
|
42
|
+
# Establishes a connection to Amazon S3
|
|
43
|
+
def connection
|
|
44
|
+
@connection ||= Fog::Storage.new(
|
|
45
|
+
:provider => provider,
|
|
46
|
+
:aws_access_key_id => access_key_id,
|
|
47
|
+
:aws_secret_access_key => secret_access_key,
|
|
48
|
+
:region => region
|
|
49
|
+
)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def remote_path_for(package)
|
|
53
|
+
super(package).sub(/^\//, '')
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
##
|
|
57
|
+
# Transfers the archived file to the specified Amazon S3 bucket
|
|
58
|
+
def transfer!
|
|
59
|
+
remote_path = remote_path_for(@package)
|
|
60
|
+
|
|
61
|
+
connection.sync_clock
|
|
62
|
+
|
|
63
|
+
files_to_transfer_for(@package) do |local_file, remote_file|
|
|
64
|
+
Logger.message "#{storage_name} started transferring " +
|
|
65
|
+
"'#{ local_file }' to bucket '#{ bucket }'."
|
|
66
|
+
|
|
67
|
+
File.open(File.join(local_path, local_file), 'r') do |file|
|
|
68
|
+
connection.put_object(
|
|
69
|
+
bucket, File.join(remote_path, remote_file), file
|
|
70
|
+
)
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
##
|
|
76
|
+
# Removes the transferred archive file(s) from the storage location.
|
|
77
|
+
# Any error raised will be rescued during Cycling
|
|
78
|
+
# and a warning will be logged, containing the error message.
|
|
79
|
+
def remove!(package)
|
|
80
|
+
remote_path = remote_path_for(package)
|
|
81
|
+
|
|
82
|
+
connection.sync_clock
|
|
83
|
+
|
|
84
|
+
transferred_files_for(package) do |local_file, remote_file|
|
|
85
|
+
Logger.message "#{storage_name} started removing " +
|
|
86
|
+
"'#{ local_file }' from bucket '#{ bucket }'."
|
|
87
|
+
|
|
88
|
+
connection.delete_object(bucket, File.join(remote_path, remote_file))
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
end
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Net::SSH and Net::SCP library/gems
|
|
5
|
+
# when the Backup::Storage::SCP class is loaded
|
|
6
|
+
Backup::Dependency.load('net-ssh')
|
|
7
|
+
Backup::Dependency.load('net-scp')
|
|
8
|
+
|
|
9
|
+
module Backup
|
|
10
|
+
module Storage
|
|
11
|
+
class SCP < Base
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
# Server credentials
|
|
15
|
+
attr_accessor :username, :password
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# Server IP Address and SCP port
|
|
19
|
+
attr_accessor :ip, :port
|
|
20
|
+
|
|
21
|
+
##
|
|
22
|
+
# Path to store backups to
|
|
23
|
+
attr_accessor :path
|
|
24
|
+
|
|
25
|
+
##
|
|
26
|
+
# Creates a new instance of the storage object
|
|
27
|
+
def initialize(model, storage_id = nil, &block)
|
|
28
|
+
super(model, storage_id)
|
|
29
|
+
|
|
30
|
+
@port ||= 22
|
|
31
|
+
@path ||= 'backups'
|
|
32
|
+
|
|
33
|
+
instance_eval(&block) if block_given?
|
|
34
|
+
|
|
35
|
+
@path = path.sub(/^\~\//, '')
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
private
|
|
39
|
+
|
|
40
|
+
##
|
|
41
|
+
# Establishes a connection to the remote server
|
|
42
|
+
# and yields the Net::SSH connection.
|
|
43
|
+
# Net::SCP will use this connection to transfer backups
|
|
44
|
+
def connection
|
|
45
|
+
Net::SSH.start(
|
|
46
|
+
ip, username, :password => password, :port => port
|
|
47
|
+
) {|ssh| yield ssh }
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
##
|
|
51
|
+
# Transfers the archived file to the specified remote server
|
|
52
|
+
def transfer!
|
|
53
|
+
remote_path = remote_path_for(@package)
|
|
54
|
+
|
|
55
|
+
connection do |ssh|
|
|
56
|
+
ssh.exec!("mkdir -p '#{ remote_path }'")
|
|
57
|
+
|
|
58
|
+
files_to_transfer_for(@package) do |local_file, remote_file|
|
|
59
|
+
Logger.message "#{storage_name} started transferring " +
|
|
60
|
+
"'#{local_file}' to '#{ip}'."
|
|
61
|
+
|
|
62
|
+
ssh.scp.upload!(
|
|
63
|
+
File.join(local_path, local_file),
|
|
64
|
+
File.join(remote_path, remote_file)
|
|
65
|
+
)
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
##
|
|
71
|
+
# Removes the transferred archive file(s) from the storage location.
|
|
72
|
+
# Any error raised will be rescued during Cycling
|
|
73
|
+
# and a warning will be logged, containing the error message.
|
|
74
|
+
def remove!(package)
|
|
75
|
+
remote_path = remote_path_for(package)
|
|
76
|
+
|
|
77
|
+
messages = []
|
|
78
|
+
transferred_files_for(package) do |local_file, remote_file|
|
|
79
|
+
messages << "#{storage_name} started removing " +
|
|
80
|
+
"'#{local_file}' from '#{ip}'."
|
|
81
|
+
end
|
|
82
|
+
Logger.message messages.join("\n")
|
|
83
|
+
|
|
84
|
+
errors = []
|
|
85
|
+
connection do |ssh|
|
|
86
|
+
ssh.exec!("rm -r '#{remote_path}'") do |ch, stream, data|
|
|
87
|
+
errors << data if stream == :stderr
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
unless errors.empty?
|
|
91
|
+
raise Errors::Storage::SCP::SSHError,
|
|
92
|
+
"Net::SSH reported the following errors:\n" +
|
|
93
|
+
errors.join("\n")
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Net::SFTP library/gem when the Backup::Storage::SFTP class is loaded
|
|
5
|
+
Backup::Dependency.load('net-ssh')
|
|
6
|
+
Backup::Dependency.load('net-sftp')
|
|
7
|
+
|
|
8
|
+
module Backup
|
|
9
|
+
module Storage
|
|
10
|
+
class SFTP < Base
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
# Server credentials
|
|
14
|
+
attr_accessor :username, :password
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
# Server IP Address and SFTP port
|
|
18
|
+
attr_accessor :ip, :port
|
|
19
|
+
|
|
20
|
+
##
|
|
21
|
+
# Path to store backups to
|
|
22
|
+
attr_accessor :path
|
|
23
|
+
|
|
24
|
+
##
|
|
25
|
+
# Creates a new instance of the storage object
|
|
26
|
+
def initialize(model, storage_id = nil, &block)
|
|
27
|
+
super(model, storage_id)
|
|
28
|
+
|
|
29
|
+
@port ||= 22
|
|
30
|
+
@path ||= 'backups'
|
|
31
|
+
|
|
32
|
+
instance_eval(&block) if block_given?
|
|
33
|
+
|
|
34
|
+
@path = path.sub(/^\~\//, '')
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
##
|
|
40
|
+
# Establishes a connection to the remote server
|
|
41
|
+
def connection
|
|
42
|
+
Net::SFTP.start(
|
|
43
|
+
ip, username,
|
|
44
|
+
:password => password,
|
|
45
|
+
:port => port
|
|
46
|
+
) {|sftp| yield sftp }
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
##
|
|
50
|
+
# Transfers the archived file to the specified remote server
|
|
51
|
+
def transfer!
|
|
52
|
+
remote_path = remote_path_for(@package)
|
|
53
|
+
|
|
54
|
+
connection do |sftp|
|
|
55
|
+
create_remote_path(remote_path, sftp)
|
|
56
|
+
|
|
57
|
+
files_to_transfer_for(@package) do |local_file, remote_file|
|
|
58
|
+
Logger.message "#{storage_name} started transferring " +
|
|
59
|
+
"'#{ local_file }' to '#{ ip }'."
|
|
60
|
+
|
|
61
|
+
sftp.upload!(
|
|
62
|
+
File.join(local_path, local_file),
|
|
63
|
+
File.join(remote_path, remote_file)
|
|
64
|
+
)
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
##
|
|
70
|
+
# Removes the transferred archive file(s) from the storage location.
|
|
71
|
+
# Any error raised will be rescued during Cycling
|
|
72
|
+
# and a warning will be logged, containing the error message.
|
|
73
|
+
def remove!(package)
|
|
74
|
+
remote_path = remote_path_for(package)
|
|
75
|
+
|
|
76
|
+
connection do |sftp|
|
|
77
|
+
transferred_files_for(package) do |local_file, remote_file|
|
|
78
|
+
Logger.message "#{storage_name} started removing " +
|
|
79
|
+
"'#{ local_file }' from '#{ ip }'."
|
|
80
|
+
|
|
81
|
+
sftp.remove!(File.join(remote_path, remote_file))
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
sftp.rmdir!(remote_path)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
##
|
|
89
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
|
90
|
+
# server in order to upload the backup file. Net::SFTP does not support
|
|
91
|
+
# paths to directories that don't yet exist when creating new
|
|
92
|
+
# directories. Instead, we split the parts up in to an array (for each
|
|
93
|
+
# '/') and loop through that to create the directories one by one.
|
|
94
|
+
# Net::SFTP raises an exception when the directory it's trying to create
|
|
95
|
+
# already exists, so we have rescue it
|
|
96
|
+
def create_remote_path(remote_path, sftp)
|
|
97
|
+
path_parts = Array.new
|
|
98
|
+
remote_path.split('/').each do |path_part|
|
|
99
|
+
path_parts << path_part
|
|
100
|
+
begin
|
|
101
|
+
sftp.mkdir!(path_parts.join('/'))
|
|
102
|
+
rescue Net::SFTP::StatusException; end
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
module Backup
|
|
4
|
+
module Syncer
|
|
5
|
+
class Base
|
|
6
|
+
include Backup::CLI::Helpers
|
|
7
|
+
include Backup::Configuration::Helpers
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
# Directories to sync
|
|
11
|
+
attr_accessor :directories
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
# Path to store the synced files/directories to
|
|
15
|
+
attr_accessor :path
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# Flag for mirroring the files/directories
|
|
19
|
+
attr_accessor :mirror
|
|
20
|
+
|
|
21
|
+
##
|
|
22
|
+
# Syntactical suger for the DSL for adding directories
|
|
23
|
+
def directories(&block)
|
|
24
|
+
return @directories unless block_given?
|
|
25
|
+
instance_eval(&block)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
##
|
|
29
|
+
# Adds a path to the @directories array
|
|
30
|
+
def add(path)
|
|
31
|
+
@directories << path
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
private
|
|
35
|
+
|
|
36
|
+
def syncer_name
|
|
37
|
+
self.class.to_s.sub('Backup::', '')
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Fog gem, along with the Parallel gem, when the Backup::Syncer::Cloud class is loaded
|
|
5
|
+
Backup::Dependency.load('fog')
|
|
6
|
+
Backup::Dependency.load('parallel')
|
|
7
|
+
|
|
8
|
+
module Backup
|
|
9
|
+
module Syncer
|
|
10
|
+
class Cloud < Base
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
# Create a Mutex to synchronize certain parts of the code
|
|
14
|
+
# in order to prevent race conditions or broken STDOUT.
|
|
15
|
+
MUTEX = Mutex.new
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# Concurrency setting - defaults to false, but can be set to:
|
|
19
|
+
# - :threads
|
|
20
|
+
# - :processes
|
|
21
|
+
attr_accessor :concurrency_type
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Concurrency level - the number of threads or processors to use. Defaults to 2.
|
|
25
|
+
attr_accessor :concurrency_level
|
|
26
|
+
|
|
27
|
+
##
|
|
28
|
+
# Instantiates a new Cloud Syncer object and sets the default
|
|
29
|
+
# configuration specified in the Backup::Configuration::Syncer::S3. Then
|
|
30
|
+
# it sets the object defaults if particular properties weren't set.
|
|
31
|
+
# Finally it'll evaluate the users configuration file and overwrite
|
|
32
|
+
# anything that's been defined.
|
|
33
|
+
def initialize(&block)
|
|
34
|
+
load_defaults!
|
|
35
|
+
|
|
36
|
+
@path ||= 'backups'
|
|
37
|
+
@directories ||= Array.new
|
|
38
|
+
@mirror ||= false
|
|
39
|
+
@concurrency_type = false
|
|
40
|
+
@concurrency_level = 2
|
|
41
|
+
|
|
42
|
+
instance_eval(&block) if block_given?
|
|
43
|
+
|
|
44
|
+
@path = path.sub(/^\//, '')
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
##
|
|
48
|
+
# Performs the Sync operation
|
|
49
|
+
def perform!
|
|
50
|
+
Logger.message("#{ syncer_name } started the syncing process:")
|
|
51
|
+
|
|
52
|
+
directories.each do |directory|
|
|
53
|
+
SyncContext.new(directory, repository_object, path).
|
|
54
|
+
sync! mirror, concurrency_type, concurrency_level
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
private
|
|
59
|
+
|
|
60
|
+
class SyncContext
|
|
61
|
+
attr_reader :directory, :bucket, :path
|
|
62
|
+
|
|
63
|
+
##
|
|
64
|
+
# Creates a new SyncContext object which handles a single directory
|
|
65
|
+
# from the Syncer::Base @directories array.
|
|
66
|
+
def initialize(directory, bucket, path)
|
|
67
|
+
@directory, @bucket, @path = directory, bucket, path
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
##
|
|
71
|
+
# Performs the sync operation using the provided techniques (mirroring/concurrency).
|
|
72
|
+
def sync!(mirror = false, concurrency_type = false, concurrency_level = 2)
|
|
73
|
+
block = Proc.new { |relative_path| sync_file relative_path, mirror }
|
|
74
|
+
|
|
75
|
+
case concurrency_type
|
|
76
|
+
when FalseClass
|
|
77
|
+
all_file_names.each &block
|
|
78
|
+
when :threads
|
|
79
|
+
Parallel.each all_file_names, :in_threads => concurrency_level, &block
|
|
80
|
+
when :processes
|
|
81
|
+
Parallel.each all_file_names, :in_processes => concurrency_level, &block
|
|
82
|
+
else
|
|
83
|
+
raise Errors::Syncer::Cloud::ConfigurationError,
|
|
84
|
+
"Unknown concurrency_type setting: #{concurrency_type.inspect}"
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
private
|
|
89
|
+
|
|
90
|
+
##
|
|
91
|
+
# Gathers all the remote and local file name and merges them together, removing
|
|
92
|
+
# duplicate keys if any, and sorts the in alphabetical order.
|
|
93
|
+
def all_file_names
|
|
94
|
+
@all_file_names ||= (local_files.keys | remote_files.keys).sort
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
##
|
|
98
|
+
# Returns a Hash of local files (the keys are the filesystem paths,
|
|
99
|
+
# the values are the LocalFile objects for that given file)
|
|
100
|
+
def local_files
|
|
101
|
+
@local_files ||= begin
|
|
102
|
+
local_hashes.split("\n").collect { |line|
|
|
103
|
+
LocalFile.new directory, line
|
|
104
|
+
}.inject({}) { |hash, file|
|
|
105
|
+
hash[file.relative_path] = file
|
|
106
|
+
hash
|
|
107
|
+
}
|
|
108
|
+
end
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
##
|
|
112
|
+
# Returns a String of file paths and their md5 hashes.
|
|
113
|
+
def local_hashes
|
|
114
|
+
MUTEX.synchronize { Logger.message("\s\sGenerating checksums for #{ directory }") }
|
|
115
|
+
`find #{directory} -print0 | xargs -0 openssl md5 2> /dev/null`
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
##
|
|
119
|
+
# Returns a Hash of remote files (the keys are the remote paths,
|
|
120
|
+
# the values are the Fog file objects for that given file)
|
|
121
|
+
def remote_files
|
|
122
|
+
@remote_files ||= bucket.files.to_a.select { |file|
|
|
123
|
+
file.key[%r{^#{remote_base}/}]
|
|
124
|
+
}.inject({}) { |hash, file|
|
|
125
|
+
key = file.key.gsub(/^#{remote_base}\//,
|
|
126
|
+
"#{directory.split('/').last}/")
|
|
127
|
+
hash[key] = file
|
|
128
|
+
hash
|
|
129
|
+
}
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
##
|
|
133
|
+
# Creates and returns a String that represents the base remote storage path
|
|
134
|
+
def remote_base
|
|
135
|
+
@remote_base ||= [path, directory.split('/').last].select { |part|
|
|
136
|
+
part && part.strip.length > 0
|
|
137
|
+
}.join('/')
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
##
|
|
141
|
+
# Performs a sync operation on a file. When mirroring is enabled
|
|
142
|
+
# and a local file has been removed since the last sync, it will also
|
|
143
|
+
# remove it from the remote location. It will no upload files that
|
|
144
|
+
# have not changed since the last sync. Checks are done using an md5 hash.
|
|
145
|
+
# If a file has changed, or has been newly added, the file will be transferred/overwritten.
|
|
146
|
+
def sync_file(relative_path, mirror)
|
|
147
|
+
local_file = local_files[relative_path]
|
|
148
|
+
remote_file = remote_files[relative_path]
|
|
149
|
+
|
|
150
|
+
if local_file && File.exist?(local_file.path)
|
|
151
|
+
unless remote_file && remote_file.etag == local_file.md5
|
|
152
|
+
MUTEX.synchronize { Logger.message("\s\s[transferring] #{relative_path}") }
|
|
153
|
+
File.open(local_file.path, 'r') do |file|
|
|
154
|
+
bucket.files.create(
|
|
155
|
+
:key => "#{path}/#{relative_path}".gsub(/^\//, ''),
|
|
156
|
+
:body => file
|
|
157
|
+
)
|
|
158
|
+
end
|
|
159
|
+
else
|
|
160
|
+
MUTEX.synchronize { Logger.message("\s\s[skipping] #{relative_path}") }
|
|
161
|
+
end
|
|
162
|
+
elsif remote_file && mirror
|
|
163
|
+
MUTEX.synchronize { Logger.message("\s\s[removing] #{relative_path}") }
|
|
164
|
+
remote_file.destroy
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
class LocalFile
|
|
170
|
+
attr_reader :directory, :path, :md5
|
|
171
|
+
|
|
172
|
+
##
|
|
173
|
+
# Creates a new LocalFile object using the given directory and line
|
|
174
|
+
# from the md5 hash checkup. This object figures out the path, relative_path and md5 hash
|
|
175
|
+
# for the file.
|
|
176
|
+
def initialize(directory, line)
|
|
177
|
+
@directory = directory
|
|
178
|
+
@path, @md5 = *line.chomp.match(/^MD5\(([^\)]+)\)= (\w+)$/).captures
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
##
|
|
182
|
+
# Returns the relative path to the file.
|
|
183
|
+
def relative_path
|
|
184
|
+
@relative_path ||= path.gsub %r{^#{directory}},
|
|
185
|
+
directory.split('/').last
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
end
|