interu-backup 3.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +2 -0
- data/Gemfile +31 -0
- data/Gemfile.lock +117 -0
- data/Guardfile +17 -0
- data/LICENSE.md +24 -0
- data/README.md +332 -0
- data/backup.gemspec +31 -0
- data/bin/backup +267 -0
- data/lib/backup.rb +181 -0
- data/lib/backup/archive.rb +73 -0
- data/lib/backup/cli.rb +82 -0
- data/lib/backup/compressor/base.rb +17 -0
- data/lib/backup/compressor/bzip2.rb +64 -0
- data/lib/backup/compressor/gzip.rb +61 -0
- data/lib/backup/configuration/base.rb +15 -0
- data/lib/backup/configuration/compressor/base.rb +10 -0
- data/lib/backup/configuration/compressor/bzip2.rb +23 -0
- data/lib/backup/configuration/compressor/gzip.rb +23 -0
- data/lib/backup/configuration/database/base.rb +18 -0
- data/lib/backup/configuration/database/mongodb.rb +41 -0
- data/lib/backup/configuration/database/mysql.rb +37 -0
- data/lib/backup/configuration/database/postgresql.rb +37 -0
- data/lib/backup/configuration/database/redis.rb +35 -0
- data/lib/backup/configuration/encryptor/base.rb +10 -0
- data/lib/backup/configuration/encryptor/gpg.rb +17 -0
- data/lib/backup/configuration/encryptor/open_ssl.rb +26 -0
- data/lib/backup/configuration/helpers.rb +54 -0
- data/lib/backup/configuration/notifier/base.rb +39 -0
- data/lib/backup/configuration/notifier/campfire.rb +25 -0
- data/lib/backup/configuration/notifier/mail.rb +52 -0
- data/lib/backup/configuration/notifier/presently.rb +25 -0
- data/lib/backup/configuration/notifier/twitter.rb +21 -0
- data/lib/backup/configuration/storage/base.rb +18 -0
- data/lib/backup/configuration/storage/cloudfiles.rb +21 -0
- data/lib/backup/configuration/storage/dropbox.rb +29 -0
- data/lib/backup/configuration/storage/ftp.rb +25 -0
- data/lib/backup/configuration/storage/rsync.rb +25 -0
- data/lib/backup/configuration/storage/s3.rb +25 -0
- data/lib/backup/configuration/storage/scp.rb +25 -0
- data/lib/backup/configuration/storage/sftp.rb +25 -0
- data/lib/backup/configuration/syncer/rsync.rb +45 -0
- data/lib/backup/configuration/syncer/s3.rb +33 -0
- data/lib/backup/database/base.rb +33 -0
- data/lib/backup/database/mongodb.rb +179 -0
- data/lib/backup/database/mysql.rb +104 -0
- data/lib/backup/database/postgresql.rb +111 -0
- data/lib/backup/database/redis.rb +105 -0
- data/lib/backup/dependency.rb +96 -0
- data/lib/backup/encryptor/base.rb +17 -0
- data/lib/backup/encryptor/gpg.rb +78 -0
- data/lib/backup/encryptor/open_ssl.rb +67 -0
- data/lib/backup/exception/command_not_found.rb +8 -0
- data/lib/backup/finder.rb +39 -0
- data/lib/backup/logger.rb +102 -0
- data/lib/backup/model.rb +272 -0
- data/lib/backup/notifier/base.rb +29 -0
- data/lib/backup/notifier/binder.rb +32 -0
- data/lib/backup/notifier/campfire.rb +194 -0
- data/lib/backup/notifier/mail.rb +141 -0
- data/lib/backup/notifier/presently.rb +105 -0
- data/lib/backup/notifier/templates/notify_failure.erb +33 -0
- data/lib/backup/notifier/templates/notify_success.erb +16 -0
- data/lib/backup/notifier/twitter.rb +87 -0
- data/lib/backup/storage/base.rb +67 -0
- data/lib/backup/storage/cloudfiles.rb +95 -0
- data/lib/backup/storage/dropbox.rb +91 -0
- data/lib/backup/storage/ftp.rb +114 -0
- data/lib/backup/storage/object.rb +45 -0
- data/lib/backup/storage/rsync.rb +129 -0
- data/lib/backup/storage/s3.rb +180 -0
- data/lib/backup/storage/scp.rb +106 -0
- data/lib/backup/storage/sftp.rb +106 -0
- data/lib/backup/syncer/base.rb +10 -0
- data/lib/backup/syncer/rsync.rb +152 -0
- data/lib/backup/syncer/s3.rb +118 -0
- data/lib/backup/version.rb +43 -0
- data/lib/templates/archive +7 -0
- data/lib/templates/compressor/bzip2 +7 -0
- data/lib/templates/compressor/gzip +7 -0
- data/lib/templates/database/mongodb +14 -0
- data/lib/templates/database/mysql +14 -0
- data/lib/templates/database/postgresql +14 -0
- data/lib/templates/database/redis +13 -0
- data/lib/templates/encryptor/gpg +12 -0
- data/lib/templates/encryptor/openssl +8 -0
- data/lib/templates/notifier/campfire +11 -0
- data/lib/templates/notifier/mail +17 -0
- data/lib/templates/notifier/presently +12 -0
- data/lib/templates/notifier/twitter +12 -0
- data/lib/templates/readme +15 -0
- data/lib/templates/storage/cloudfiles +10 -0
- data/lib/templates/storage/dropbox +12 -0
- data/lib/templates/storage/ftp +11 -0
- data/lib/templates/storage/rsync +10 -0
- data/lib/templates/storage/s3 +21 -0
- data/lib/templates/storage/scp +11 -0
- data/lib/templates/storage/sftp +11 -0
- data/lib/templates/syncer/rsync +17 -0
- data/lib/templates/syncer/s3 +15 -0
- data/spec/archive_spec.rb +90 -0
- data/spec/backup_spec.rb +11 -0
- data/spec/compressor/bzip2_spec.rb +59 -0
- data/spec/compressor/gzip_spec.rb +59 -0
- data/spec/configuration/base_spec.rb +35 -0
- data/spec/configuration/compressor/gzip_spec.rb +28 -0
- data/spec/configuration/database/base_spec.rb +16 -0
- data/spec/configuration/database/mongodb_spec.rb +30 -0
- data/spec/configuration/database/mysql_spec.rb +32 -0
- data/spec/configuration/database/postgresql_spec.rb +32 -0
- data/spec/configuration/database/redis_spec.rb +30 -0
- data/spec/configuration/encryptor/gpg_spec.rb +25 -0
- data/spec/configuration/encryptor/open_ssl_spec.rb +31 -0
- data/spec/configuration/notifier/campfire_spec.rb +20 -0
- data/spec/configuration/notifier/mail_spec.rb +32 -0
- data/spec/configuration/notifier/twitter_spec.rb +22 -0
- data/spec/configuration/storage/cloudfiles_spec.rb +34 -0
- data/spec/configuration/storage/dropbox_spec.rb +43 -0
- data/spec/configuration/storage/ftp_spec.rb +40 -0
- data/spec/configuration/storage/rsync_spec.rb +37 -0
- data/spec/configuration/storage/s3_spec.rb +37 -0
- data/spec/configuration/storage/scp_spec.rb +40 -0
- data/spec/configuration/storage/sftp_spec.rb +40 -0
- data/spec/configuration/syncer/rsync_spec.rb +46 -0
- data/spec/configuration/syncer/s3_spec.rb +43 -0
- data/spec/database/base_spec.rb +30 -0
- data/spec/database/mongodb_spec.rb +181 -0
- data/spec/database/mysql_spec.rb +150 -0
- data/spec/database/postgresql_spec.rb +164 -0
- data/spec/database/redis_spec.rb +122 -0
- data/spec/encryptor/gpg_spec.rb +57 -0
- data/spec/encryptor/open_ssl_spec.rb +102 -0
- data/spec/logger_spec.rb +58 -0
- data/spec/model_spec.rb +236 -0
- data/spec/notifier/campfire_spec.rb +96 -0
- data/spec/notifier/mail_spec.rb +97 -0
- data/spec/notifier/presently_spec.rb +99 -0
- data/spec/notifier/twitter_spec.rb +86 -0
- data/spec/spec_helper.rb +25 -0
- data/spec/storage/base_spec.rb +33 -0
- data/spec/storage/cloudfiles_spec.rb +102 -0
- data/spec/storage/dropbox_spec.rb +105 -0
- data/spec/storage/ftp_spec.rb +133 -0
- data/spec/storage/object_spec.rb +74 -0
- data/spec/storage/rsync_spec.rb +131 -0
- data/spec/storage/s3_spec.rb +110 -0
- data/spec/storage/scp_spec.rb +129 -0
- data/spec/storage/sftp_spec.rb +125 -0
- data/spec/syncer/rsync_spec.rb +195 -0
- data/spec/syncer/s3_spec.rb +139 -0
- data/spec/version_spec.rb +21 -0
- metadata +231 -0
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
module Backup
|
|
4
|
+
module Storage
|
|
5
|
+
class Object
|
|
6
|
+
|
|
7
|
+
##
|
|
8
|
+
# Holds the type attribute
|
|
9
|
+
attr_accessor :storage_file
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
# Instantiates a new Backup::Storage::Object and stores the
|
|
13
|
+
# full path to the storage file (yaml) in the @storage_file attribute
|
|
14
|
+
def initialize(type)
|
|
15
|
+
@storage_file = File.join(DATA_PATH, TRIGGER, "#{type}.yml")
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
##
|
|
19
|
+
# Tries to load an existing YAML file and returns an
|
|
20
|
+
# array of storage objects. If no file exists, an empty
|
|
21
|
+
# array gets returned
|
|
22
|
+
#
|
|
23
|
+
# If a file is loaded it'll sort the array of objects by @time
|
|
24
|
+
# descending. The newest backup storage object comes in Backup::Storage::Object.load[0]
|
|
25
|
+
# and the oldest in Backup::Storage::Object.load[-1]
|
|
26
|
+
def load
|
|
27
|
+
if File.exist?(storage_file)
|
|
28
|
+
YAML.load_file(storage_file).sort { |a,b| b.time <=> a.time }
|
|
29
|
+
else
|
|
30
|
+
[]
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
##
|
|
35
|
+
# Takes the provided objects and converts it to YAML format.
|
|
36
|
+
# The YAML data gets written to the storage file
|
|
37
|
+
def write(objects)
|
|
38
|
+
File.open(storage_file, 'w') do |file|
|
|
39
|
+
file.write(objects.to_yaml)
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Require the tempfile Ruby library when Backup::Storage::RSync is loaded
|
|
5
|
+
require 'tempfile'
|
|
6
|
+
|
|
7
|
+
##
|
|
8
|
+
# Only load the Net::SSH library when the Backup::Storage::RSync class is loaded
|
|
9
|
+
Backup::Dependency.load('net-ssh')
|
|
10
|
+
|
|
11
|
+
module Backup
|
|
12
|
+
module Storage
|
|
13
|
+
class RSync < Base
|
|
14
|
+
include Backup::CLI
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
# Server credentials
|
|
18
|
+
attr_accessor :username, :password
|
|
19
|
+
|
|
20
|
+
##
|
|
21
|
+
# Server IP Address and SSH port
|
|
22
|
+
attr_accessor :ip, :port
|
|
23
|
+
|
|
24
|
+
##
|
|
25
|
+
# Path to store backups to
|
|
26
|
+
attr_accessor :path
|
|
27
|
+
|
|
28
|
+
##
|
|
29
|
+
# Creates a new instance of the RSync storage object
|
|
30
|
+
# First it sets the defaults (if any exist) and then evaluates
|
|
31
|
+
# the configuration block which may overwrite these defaults
|
|
32
|
+
def initialize(&block)
|
|
33
|
+
load_defaults!
|
|
34
|
+
|
|
35
|
+
@port ||= 22
|
|
36
|
+
@path ||= 'backups'
|
|
37
|
+
|
|
38
|
+
instance_eval(&block) if block_given?
|
|
39
|
+
write_password_file!
|
|
40
|
+
|
|
41
|
+
@time = TIME
|
|
42
|
+
@path = path.sub(/^\~\//, '')
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
##
|
|
46
|
+
# This is the remote path to where the backup files will be stored
|
|
47
|
+
def remote_path
|
|
48
|
+
File.join(path, TRIGGER)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
##
|
|
52
|
+
# Performs the backup transfer
|
|
53
|
+
def perform!
|
|
54
|
+
transfer!
|
|
55
|
+
remove_password_file!
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
private
|
|
59
|
+
|
|
60
|
+
##
|
|
61
|
+
# Establishes a connection to the remote server and returns the Net::SSH object.
|
|
62
|
+
# Not doing any instance variable caching because this object gets persisted in YAML
|
|
63
|
+
# format to a file and will issues. This, however has no impact on performance since it only
|
|
64
|
+
# gets invoked once per object for a #transfer! and once for a remove! Backups run in the
|
|
65
|
+
# background anyway so even if it were a bit slower it shouldn't matter.
|
|
66
|
+
def connection
|
|
67
|
+
Net::SSH.start(ip, username, :password => @password, :port => port)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
##
|
|
71
|
+
# Transfers the archived file to the specified remote server
|
|
72
|
+
def transfer!
|
|
73
|
+
Logger.message("#{ self.class } started transferring \"#{ remote_file }\".")
|
|
74
|
+
create_remote_directories!
|
|
75
|
+
run("#{ utility(:rsync) } #{ options } #{ password } '#{ File.join(local_path, local_file) }' '#{ username }@#{ ip }:#{ File.join(remote_path, remote_file[20..-1]) }'")
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
##
|
|
79
|
+
# Removes the transferred archive file from the server
|
|
80
|
+
def remove!
|
|
81
|
+
response = connection.exec!("rm #{ File.join(remote_path, remote_file) }")
|
|
82
|
+
if response =~ /No such file or directory/
|
|
83
|
+
Logger.warn "Could not remove file \"#{ File.join(remote_path, remote_file) }\"."
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
##
|
|
88
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
|
89
|
+
# server in order to upload the backup file.
|
|
90
|
+
def create_remote_directories!
|
|
91
|
+
connection.exec!("mkdir -p '#{ remote_path }'")
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
##
|
|
95
|
+
# RSync options
|
|
96
|
+
# -z = Compresses the bytes that will be transferred to reduce bandwidth usage
|
|
97
|
+
# --port = the port to connect to through SSH
|
|
98
|
+
# -Phv = debug options
|
|
99
|
+
def options
|
|
100
|
+
"-z --port='#{ port }'"
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
##
|
|
104
|
+
# Returns Rsync syntax for using a password file
|
|
105
|
+
def password
|
|
106
|
+
"--password-file='#{@password_file.path}'" unless @password.nil?
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
##
|
|
110
|
+
# Writes the provided password to a temporary file so that
|
|
111
|
+
# the rsync utility can read the password from this file
|
|
112
|
+
def write_password_file!
|
|
113
|
+
unless @password.nil?
|
|
114
|
+
@password_file = Tempfile.new('backup-rsync-password')
|
|
115
|
+
@password_file.write(@password)
|
|
116
|
+
@password_file.close
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
##
|
|
121
|
+
# Removes the previously created @password_file
|
|
122
|
+
# (temporary file containing the password)
|
|
123
|
+
def remove_password_file!
|
|
124
|
+
@password_file.unlink unless @password.nil?
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
end
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Fog gem when the Backup::Storage::S3 class is loaded
|
|
5
|
+
Backup::Dependency.load('fog')
|
|
6
|
+
|
|
7
|
+
module Backup
|
|
8
|
+
module Storage
|
|
9
|
+
class S3 < Base
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
# Amazon Simple Storage Service (S3) Credentials
|
|
13
|
+
attr_accessor :access_key_id, :secret_access_key
|
|
14
|
+
|
|
15
|
+
##
|
|
16
|
+
# Amazon S3 bucket name and path
|
|
17
|
+
attr_accessor :bucket, :path
|
|
18
|
+
|
|
19
|
+
##
|
|
20
|
+
# Region of the specified S3 bucket
|
|
21
|
+
attr_accessor :region
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Creates a new instance of the Amazon S3 storage object
|
|
25
|
+
# First it sets the defaults (if any exist) and then evaluates
|
|
26
|
+
# the configuration block which may overwrite these defaults
|
|
27
|
+
#
|
|
28
|
+
# Currently available regions:
|
|
29
|
+
# eu-west-1, us-east-1, ap-southeast-1, us-west-1
|
|
30
|
+
def initialize(&block)
|
|
31
|
+
load_defaults!
|
|
32
|
+
|
|
33
|
+
@path ||= 'backups'
|
|
34
|
+
|
|
35
|
+
instance_eval(&block) if block_given?
|
|
36
|
+
|
|
37
|
+
@time = TIME
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
##
|
|
41
|
+
# This is the remote path to where the backup files will be stored
|
|
42
|
+
def remote_path
|
|
43
|
+
File.join(path, TRIGGER).sub(/^\//, '')
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
##
|
|
47
|
+
# This is the provider that Fog uses for the S3 Storage
|
|
48
|
+
def provider
|
|
49
|
+
'AWS'
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
##
|
|
53
|
+
# Performs the backup transfer
|
|
54
|
+
def perform!
|
|
55
|
+
transfer!
|
|
56
|
+
cycle!
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
private
|
|
60
|
+
|
|
61
|
+
##
|
|
62
|
+
# Establishes a connection to Amazon S3 and returns the Fog object.
|
|
63
|
+
# Not doing any instance variable caching because this object gets persisted in YAML
|
|
64
|
+
# format to a file and will issues. This, however has no impact on performance since it only
|
|
65
|
+
# gets invoked once per object for a #transfer! and once for a remove! Backups run in the
|
|
66
|
+
# background anyway so even if it were a bit slower it shouldn't matter.
|
|
67
|
+
def connection
|
|
68
|
+
Fog::Storage.new(
|
|
69
|
+
:provider => provider,
|
|
70
|
+
:aws_access_key_id => access_key_id,
|
|
71
|
+
:aws_secret_access_key => secret_access_key,
|
|
72
|
+
:region => region
|
|
73
|
+
)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
##
|
|
77
|
+
# Transfers the archived file to the specified Amazon S3 bucket
|
|
78
|
+
def transfer!
|
|
79
|
+
# maximum file size 5GB
|
|
80
|
+
#max_file_size = 5368709120
|
|
81
|
+
max_file_size = 15728640
|
|
82
|
+
# split size must be between 5MB and 5GB
|
|
83
|
+
max_split_size = max_file_size - 5242880
|
|
84
|
+
|
|
85
|
+
begin
|
|
86
|
+
local_file_path = File.join(local_path, local_file)
|
|
87
|
+
|
|
88
|
+
Logger.message("#{ self.class } started transferring \"#{ remote_file }\".")
|
|
89
|
+
connection.sync_clock
|
|
90
|
+
if File.stat(local_file_path).size <= max_file_size
|
|
91
|
+
connection.put_object(
|
|
92
|
+
bucket,
|
|
93
|
+
File.join(remote_path, remote_file),
|
|
94
|
+
File.open(File.join(local_path, local_file))
|
|
95
|
+
)
|
|
96
|
+
else
|
|
97
|
+
Logger.message("#{ self.class } started multipart uploading \"#{ remote_file }\".")
|
|
98
|
+
|
|
99
|
+
workspace_path = local_path + "/workspace"
|
|
100
|
+
create_workspace(workspace_path)
|
|
101
|
+
|
|
102
|
+
`split -b #{max_split_size} #{local_file_path} #{workspace_path}/#{local_file}.0`
|
|
103
|
+
|
|
104
|
+
upload_id = initiate_multipart_upload
|
|
105
|
+
etags = upload_part(workspace_path, upload_id)
|
|
106
|
+
|
|
107
|
+
s3_md5 = complete_multipart_upload(etags, upload_id)
|
|
108
|
+
## please check etag
|
|
109
|
+
# if it's differrent from local_file, try to upload again.
|
|
110
|
+
# ex)
|
|
111
|
+
# require 'digest/md5'
|
|
112
|
+
# original_md5 = Digest::MD5.hexdigest(File.open(local_file_path).read)
|
|
113
|
+
|
|
114
|
+
remove_workspace(workspace_path)
|
|
115
|
+
end
|
|
116
|
+
rescue Excon::Errors::NotFound => e
|
|
117
|
+
raise "An error occurred while trying to transfer the backup, please make sure the bucket exists.\n #{e.inspect}"
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def initiate_multipart_upload
|
|
122
|
+
res = connection.initiate_multipart_upload(
|
|
123
|
+
bucket,
|
|
124
|
+
File.join(remote_path, remote_file)
|
|
125
|
+
)
|
|
126
|
+
res.body['UploadId']
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def upload_part workspace_path, upload_id
|
|
130
|
+
etags = []
|
|
131
|
+
split_files = Dir.entries(workspace_path).select{|file| file != ".." and file != "."}.sort
|
|
132
|
+
|
|
133
|
+
split_files.each_with_index do |split_file, index|
|
|
134
|
+
Logger.message("uploading #{index + 1} / #{split_files.size}")
|
|
135
|
+
res = connection.upload_part(
|
|
136
|
+
bucket,
|
|
137
|
+
File.join(remote_path, remote_file),
|
|
138
|
+
upload_id,
|
|
139
|
+
index + 1,
|
|
140
|
+
File.open(File.join(workspace_path, split_file))
|
|
141
|
+
)
|
|
142
|
+
etags << res.headers['ETag']
|
|
143
|
+
end
|
|
144
|
+
etags
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
def complete_multipart_upload etags, upload_id
|
|
148
|
+
res = connection.complete_multipart_upload(
|
|
149
|
+
bucket,
|
|
150
|
+
File.join(remote_path, remote_file),
|
|
151
|
+
upload_id,
|
|
152
|
+
etags
|
|
153
|
+
)
|
|
154
|
+
res.body['ETag']
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def create_workspace workspace_path
|
|
158
|
+
Dir.mkdir(workspace_path)
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def remove_workspace workspace_path
|
|
162
|
+
split_files = Dir.entries(workspace_path).select{|file| file != ".." and file != "."}.sort
|
|
163
|
+
split_files.each do |split_file|
|
|
164
|
+
File.delete(File.join(workspace_path, split_file))
|
|
165
|
+
end
|
|
166
|
+
Dir.rmdir(workspace_path)
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
##
|
|
170
|
+
# Removes the transferred archive file from the Amazon S3 bucket
|
|
171
|
+
def remove!
|
|
172
|
+
begin
|
|
173
|
+
connection.sync_clock
|
|
174
|
+
connection.delete_object(bucket, File.join(remote_path, remote_file))
|
|
175
|
+
rescue Excon::Errors::SocketError; end
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
end
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# Only load the Net::SSH and Net::SCP library/gems
|
|
5
|
+
# when the Backup::Storage::SCP class is loaded
|
|
6
|
+
Backup::Dependency.load('net-ssh')
|
|
7
|
+
Backup::Dependency.load('net-scp')
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
module Backup
|
|
11
|
+
module Storage
|
|
12
|
+
class SCP < Base
|
|
13
|
+
|
|
14
|
+
##
|
|
15
|
+
# Server credentials
|
|
16
|
+
attr_accessor :username, :password
|
|
17
|
+
|
|
18
|
+
##
|
|
19
|
+
# Server IP Address and SCP port
|
|
20
|
+
attr_accessor :ip, :port
|
|
21
|
+
|
|
22
|
+
##
|
|
23
|
+
# Path to store backups to
|
|
24
|
+
attr_accessor :path
|
|
25
|
+
|
|
26
|
+
##
|
|
27
|
+
# Creates a new instance of the SCP storage object
|
|
28
|
+
# First it sets the defaults (if any exist) and then evaluates
|
|
29
|
+
# the configuration block which may overwrite these defaults
|
|
30
|
+
def initialize(&block)
|
|
31
|
+
load_defaults!
|
|
32
|
+
|
|
33
|
+
@port ||= 22
|
|
34
|
+
@path ||= 'backups'
|
|
35
|
+
|
|
36
|
+
instance_eval(&block) if block_given?
|
|
37
|
+
|
|
38
|
+
@time = TIME
|
|
39
|
+
@path = path.sub(/^\~\//, '')
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
##
|
|
43
|
+
# This is the remote path to where the backup files will be stored
|
|
44
|
+
def remote_path
|
|
45
|
+
File.join(path, TRIGGER)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
##
|
|
49
|
+
# Performs the backup transfer
|
|
50
|
+
def perform!
|
|
51
|
+
transfer!
|
|
52
|
+
cycle!
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
private
|
|
56
|
+
|
|
57
|
+
##
|
|
58
|
+
# Establishes a connection to the remote server and returns the Net::SCP object.
|
|
59
|
+
# Not doing any instance variable caching because this object gets persisted in YAML
|
|
60
|
+
# format to a file and will issues. This, however has no impact on performance since it only
|
|
61
|
+
# gets invoked once per object for a #transfer! and once for a remove! Backups run in the
|
|
62
|
+
# background anyway so even if it were a bit slower it shouldn't matter.
|
|
63
|
+
#
|
|
64
|
+
# We will be using Net::SSH, and use Net::SCP through Net::SSH to transfer backups
|
|
65
|
+
def connection
|
|
66
|
+
Net::SSH.start(ip, username, :password => password, :port => port)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
##
|
|
70
|
+
# Transfers the archived file to the specified remote server
|
|
71
|
+
def transfer!
|
|
72
|
+
Logger.message("#{ self.class } started transferring \"#{ remote_file }\".")
|
|
73
|
+
create_remote_directories!
|
|
74
|
+
connection.scp.upload!(
|
|
75
|
+
File.join(local_path, local_file),
|
|
76
|
+
File.join(remote_path, remote_file)
|
|
77
|
+
)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
##
|
|
81
|
+
# Removes the transferred archive file from the server
|
|
82
|
+
def remove!
|
|
83
|
+
response = connection.exec!("rm #{ File.join(remote_path, remote_file) }")
|
|
84
|
+
if response =~ /No such file or directory/
|
|
85
|
+
Logger.warn "Could not remove file \"#{ File.join(remote_path, remote_file) }\"."
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
##
|
|
90
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
|
91
|
+
# server in order to upload the backup file. Net::SCP does not support
|
|
92
|
+
# paths to directories that don't yet exist when creating new directories.
|
|
93
|
+
# Instead, we split the parts up in to an array (for each '/') and loop through
|
|
94
|
+
# that to create the directories one by one. Net::SCP raises an exception when
|
|
95
|
+
# the directory it's trying ot create already exists, so we have rescue it
|
|
96
|
+
def create_remote_directories!
|
|
97
|
+
path_parts = Array.new
|
|
98
|
+
remote_path.split('/').each do |path_part|
|
|
99
|
+
path_parts << path_part
|
|
100
|
+
connection.exec!("mkdir '#{ path_parts.join('/') }'")
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
end
|