backup 3.0.23 → 3.0.24
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile.lock +42 -45
- data/Guardfile +7 -4
- data/README.md +10 -7
- data/backup.gemspec +2 -2
- data/lib/backup.rb +27 -97
- data/lib/backup/archive.rb +14 -6
- data/lib/backup/cli/helpers.rb +52 -49
- data/lib/backup/cli/utility.rb +9 -1
- data/lib/backup/compressor/base.rb +10 -4
- data/lib/backup/compressor/bzip2.rb +22 -26
- data/lib/backup/compressor/custom.rb +53 -0
- data/lib/backup/compressor/gzip.rb +22 -23
- data/lib/backup/compressor/lzma.rb +15 -13
- data/lib/backup/compressor/pbzip2.rb +20 -17
- data/lib/backup/config.rb +6 -3
- data/lib/backup/configuration.rb +33 -0
- data/lib/backup/configuration/helpers.rb +114 -28
- data/lib/backup/configuration/store.rb +24 -0
- data/lib/backup/database/base.rb +0 -6
- data/lib/backup/database/mongodb.rb +27 -11
- data/lib/backup/database/mysql.rb +19 -14
- data/lib/backup/database/postgresql.rb +16 -11
- data/lib/backup/database/redis.rb +7 -11
- data/lib/backup/database/riak.rb +3 -6
- data/lib/backup/dependency.rb +5 -11
- data/lib/backup/model.rb +14 -5
- data/lib/backup/notifier/campfire.rb +3 -16
- data/lib/backup/notifier/hipchat.rb +1 -7
- data/lib/backup/notifier/mail.rb +1 -1
- data/lib/backup/packager.rb +29 -19
- data/lib/backup/pipeline.rb +110 -0
- data/lib/backup/storage/dropbox.rb +4 -7
- data/lib/backup/syncer/base.rb +8 -4
- data/lib/backup/syncer/cloud/base.rb +247 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +78 -0
- data/lib/backup/syncer/cloud/s3.rb +68 -0
- data/lib/backup/syncer/rsync/base.rb +1 -4
- data/lib/backup/syncer/rsync/local.rb +9 -5
- data/lib/backup/syncer/rsync/pull.rb +1 -1
- data/lib/backup/syncer/rsync/push.rb +10 -5
- data/lib/backup/version.rb +1 -1
- data/spec-live/.gitignore +6 -0
- data/spec-live/README +7 -0
- data/spec-live/backups/config.rb +153 -0
- data/spec-live/backups/config.yml.template +43 -0
- data/spec-live/compressor/custom_spec.rb +30 -0
- data/spec-live/compressor/gzip_spec.rb +30 -0
- data/spec-live/notifier/mail_spec.rb +85 -0
- data/spec-live/spec_helper.rb +85 -0
- data/spec-live/storage/dropbox_spec.rb +151 -0
- data/spec-live/storage/local_spec.rb +83 -0
- data/spec-live/storage/scp_spec.rb +193 -0
- data/spec-live/syncer/cloud/s3_spec.rb +124 -0
- data/spec/archive_spec.rb +86 -31
- data/spec/cleaner_spec.rb +8 -0
- data/spec/cli/helpers_spec.rb +200 -75
- data/spec/cli/utility_spec.rb +11 -3
- data/spec/compressor/base_spec.rb +31 -10
- data/spec/compressor/bzip2_spec.rb +212 -57
- data/spec/compressor/custom_spec.rb +106 -0
- data/spec/compressor/gzip_spec.rb +212 -57
- data/spec/compressor/lzma_spec.rb +75 -35
- data/spec/compressor/pbzip2_spec.rb +93 -52
- data/spec/configuration/helpers_spec.rb +406 -0
- data/spec/configuration/store_spec.rb +39 -0
- data/spec/configuration_spec.rb +62 -0
- data/spec/database/base_spec.rb +19 -10
- data/spec/database/mongodb_spec.rb +195 -70
- data/spec/database/mysql_spec.rb +183 -64
- data/spec/database/postgresql_spec.rb +167 -53
- data/spec/database/redis_spec.rb +121 -46
- data/spec/database/riak_spec.rb +96 -27
- data/spec/dependency_spec.rb +2 -0
- data/spec/encryptor/base_spec.rb +10 -0
- data/spec/encryptor/gpg_spec.rb +29 -13
- data/spec/encryptor/open_ssl_spec.rb +40 -21
- data/spec/logger_spec.rb +4 -0
- data/spec/model_spec.rb +19 -2
- data/spec/notifier/base_spec.rb +32 -17
- data/spec/notifier/campfire_spec.rb +63 -45
- data/spec/notifier/hipchat_spec.rb +79 -56
- data/spec/notifier/mail_spec.rb +82 -46
- data/spec/notifier/prowl_spec.rb +53 -32
- data/spec/notifier/twitter_spec.rb +62 -41
- data/spec/packager_spec.rb +95 -36
- data/spec/pipeline_spec.rb +259 -0
- data/spec/spec_helper.rb +6 -5
- data/spec/storage/base_spec.rb +61 -41
- data/spec/storage/cloudfiles_spec.rb +69 -45
- data/spec/storage/dropbox_spec.rb +158 -36
- data/spec/storage/ftp_spec.rb +69 -45
- data/spec/storage/local_spec.rb +47 -23
- data/spec/storage/ninefold_spec.rb +55 -31
- data/spec/storage/rsync_spec.rb +67 -50
- data/spec/storage/s3_spec.rb +65 -41
- data/spec/storage/scp_spec.rb +65 -41
- data/spec/storage/sftp_spec.rb +65 -41
- data/spec/syncer/base_spec.rb +91 -4
- data/spec/syncer/cloud/base_spec.rb +511 -0
- data/spec/syncer/cloud/cloud_files_spec.rb +181 -0
- data/spec/syncer/cloud/s3_spec.rb +174 -0
- data/spec/syncer/rsync/base_spec.rb +46 -66
- data/spec/syncer/rsync/local_spec.rb +55 -26
- data/spec/syncer/rsync/pull_spec.rb +15 -4
- data/spec/syncer/rsync/push_spec.rb +59 -52
- data/templates/cli/utility/compressor/bzip2 +1 -4
- data/templates/cli/utility/compressor/custom +11 -0
- data/templates/cli/utility/compressor/gzip +1 -4
- data/templates/cli/utility/compressor/lzma +3 -0
- data/templates/cli/utility/compressor/pbzip2 +3 -0
- data/templates/cli/utility/database/mysql +4 -1
- data/templates/cli/utility/syncer/cloud_files +17 -19
- data/templates/cli/utility/syncer/s3 +18 -20
- metadata +38 -92
- data/lib/backup/configuration/base.rb +0 -15
- data/lib/backup/configuration/compressor/base.rb +0 -9
- data/lib/backup/configuration/compressor/bzip2.rb +0 -23
- data/lib/backup/configuration/compressor/gzip.rb +0 -23
- data/lib/backup/configuration/compressor/lzma.rb +0 -23
- data/lib/backup/configuration/compressor/pbzip2.rb +0 -28
- data/lib/backup/configuration/database/base.rb +0 -19
- data/lib/backup/configuration/database/mongodb.rb +0 -49
- data/lib/backup/configuration/database/mysql.rb +0 -42
- data/lib/backup/configuration/database/postgresql.rb +0 -41
- data/lib/backup/configuration/database/redis.rb +0 -39
- data/lib/backup/configuration/database/riak.rb +0 -29
- data/lib/backup/configuration/encryptor/base.rb +0 -9
- data/lib/backup/configuration/encryptor/gpg.rb +0 -17
- data/lib/backup/configuration/encryptor/open_ssl.rb +0 -32
- data/lib/backup/configuration/notifier/base.rb +0 -28
- data/lib/backup/configuration/notifier/campfire.rb +0 -25
- data/lib/backup/configuration/notifier/hipchat.rb +0 -41
- data/lib/backup/configuration/notifier/mail.rb +0 -112
- data/lib/backup/configuration/notifier/presently.rb +0 -25
- data/lib/backup/configuration/notifier/prowl.rb +0 -23
- data/lib/backup/configuration/notifier/twitter.rb +0 -21
- data/lib/backup/configuration/storage/base.rb +0 -18
- data/lib/backup/configuration/storage/cloudfiles.rb +0 -25
- data/lib/backup/configuration/storage/dropbox.rb +0 -58
- data/lib/backup/configuration/storage/ftp.rb +0 -29
- data/lib/backup/configuration/storage/local.rb +0 -17
- data/lib/backup/configuration/storage/ninefold.rb +0 -20
- data/lib/backup/configuration/storage/rsync.rb +0 -29
- data/lib/backup/configuration/storage/s3.rb +0 -25
- data/lib/backup/configuration/storage/scp.rb +0 -25
- data/lib/backup/configuration/storage/sftp.rb +0 -25
- data/lib/backup/configuration/syncer/base.rb +0 -10
- data/lib/backup/configuration/syncer/cloud.rb +0 -23
- data/lib/backup/configuration/syncer/cloud_files.rb +0 -30
- data/lib/backup/configuration/syncer/rsync/base.rb +0 -28
- data/lib/backup/configuration/syncer/rsync/local.rb +0 -11
- data/lib/backup/configuration/syncer/rsync/pull.rb +0 -11
- data/lib/backup/configuration/syncer/rsync/push.rb +0 -31
- data/lib/backup/configuration/syncer/s3.rb +0 -23
- data/lib/backup/notifier/presently.rb +0 -88
- data/lib/backup/syncer/cloud.rb +0 -187
- data/lib/backup/syncer/cloud_files.rb +0 -56
- data/lib/backup/syncer/s3.rb +0 -47
- data/spec/configuration/base_spec.rb +0 -35
- data/spec/configuration/compressor/bzip2_spec.rb +0 -29
- data/spec/configuration/compressor/gzip_spec.rb +0 -29
- data/spec/configuration/compressor/lzma_spec.rb +0 -29
- data/spec/configuration/compressor/pbzip2_spec.rb +0 -32
- data/spec/configuration/database/base_spec.rb +0 -17
- data/spec/configuration/database/mongodb_spec.rb +0 -56
- data/spec/configuration/database/mysql_spec.rb +0 -53
- data/spec/configuration/database/postgresql_spec.rb +0 -53
- data/spec/configuration/database/redis_spec.rb +0 -50
- data/spec/configuration/database/riak_spec.rb +0 -35
- data/spec/configuration/encryptor/gpg_spec.rb +0 -26
- data/spec/configuration/encryptor/open_ssl_spec.rb +0 -35
- data/spec/configuration/notifier/base_spec.rb +0 -32
- data/spec/configuration/notifier/campfire_spec.rb +0 -32
- data/spec/configuration/notifier/hipchat_spec.rb +0 -44
- data/spec/configuration/notifier/mail_spec.rb +0 -71
- data/spec/configuration/notifier/presently_spec.rb +0 -35
- data/spec/configuration/notifier/prowl_spec.rb +0 -29
- data/spec/configuration/notifier/twitter_spec.rb +0 -35
- data/spec/configuration/storage/cloudfiles_spec.rb +0 -41
- data/spec/configuration/storage/dropbox_spec.rb +0 -38
- data/spec/configuration/storage/ftp_spec.rb +0 -44
- data/spec/configuration/storage/local_spec.rb +0 -29
- data/spec/configuration/storage/ninefold_spec.rb +0 -32
- data/spec/configuration/storage/rsync_spec.rb +0 -41
- data/spec/configuration/storage/s3_spec.rb +0 -38
- data/spec/configuration/storage/scp_spec.rb +0 -41
- data/spec/configuration/storage/sftp_spec.rb +0 -41
- data/spec/configuration/syncer/cloud_files_spec.rb +0 -44
- data/spec/configuration/syncer/rsync/base_spec.rb +0 -33
- data/spec/configuration/syncer/rsync/local_spec.rb +0 -10
- data/spec/configuration/syncer/rsync/pull_spec.rb +0 -10
- data/spec/configuration/syncer/rsync/push_spec.rb +0 -43
- data/spec/configuration/syncer/s3_spec.rb +0 -38
- data/spec/notifier/presently_spec.rb +0 -181
- data/spec/syncer/cloud_files_spec.rb +0 -192
- data/spec/syncer/s3_spec.rb +0 -192
- data/templates/cli/utility/notifier/presently +0 -13
@@ -0,0 +1,110 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
class Pipeline
|
5
|
+
include Backup::CLI::Helpers
|
6
|
+
|
7
|
+
attr_reader :stderr, :errors
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
@commands = []
|
11
|
+
@errors = []
|
12
|
+
@stderr = ''
|
13
|
+
end
|
14
|
+
|
15
|
+
##
|
16
|
+
# Adds a command to be executed in the pipeline.
|
17
|
+
# Each command will be run in the order in which it was added,
|
18
|
+
# with it's output being piped to the next command.
|
19
|
+
def <<(command)
|
20
|
+
@commands << command
|
21
|
+
end
|
22
|
+
|
23
|
+
##
|
24
|
+
# Runs the command line from `#pipeline` and collects STDOUT/STDERR.
|
25
|
+
# STDOUT is then parsed to determine the exit status of each command.
|
26
|
+
# For each command with a non-zero exit status, a SystemCallError is
|
27
|
+
# created and added to @errors. All STDERR output is set in @stderr.
|
28
|
+
#
|
29
|
+
# Note that there is no accumulated STDOUT from the commands themselves.
|
30
|
+
# Also, the last command should not attempt to write to STDOUT.
|
31
|
+
# Any output on STDOUT from the final command will be sent to STDERR.
|
32
|
+
# This in itself will not cause #run to fail, but will log warnings
|
33
|
+
# when all commands exit with non-zero status.
|
34
|
+
#
|
35
|
+
# Use `#success?` to determine if all commands in the pipeline succeeded.
|
36
|
+
# If `#success?` returns `false`, use `#error_messages` to get an error report.
|
37
|
+
def run
|
38
|
+
Open4.popen4(pipeline) do |pid, stdin, stdout, stderr|
|
39
|
+
pipestatus = stdout.read.gsub("\n", '').split(':').sort
|
40
|
+
pipestatus.each do |status|
|
41
|
+
index, exitstatus = status.split('|').map(&:to_i)
|
42
|
+
if exitstatus > 0
|
43
|
+
command = command_name(@commands[index])
|
44
|
+
@errors << SystemCallError.new(
|
45
|
+
"'#{ command }' returned exit code: #{ exitstatus }", exitstatus
|
46
|
+
)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
@stderr = stderr.read.strip
|
50
|
+
end
|
51
|
+
Logger.warn(stderr_messages) if success? && stderr_messages
|
52
|
+
rescue Exception => e
|
53
|
+
raise Errors::Pipeline::ExecutionError.wrap(e)
|
54
|
+
end
|
55
|
+
|
56
|
+
def success?
|
57
|
+
@errors.empty?
|
58
|
+
end
|
59
|
+
|
60
|
+
##
|
61
|
+
# Returns a multi-line String, reporting all STDERR messages received
|
62
|
+
# from the commands in the pipeline (if any), along with the SystemCallError
|
63
|
+
# (Errno) message for each command which had a non-zero exit status.
|
64
|
+
#
|
65
|
+
# Each error is wrapped by Backup::Errors to provide formatting.
|
66
|
+
def error_messages
|
67
|
+
@error_messages ||= (stderr_messages || '') +
|
68
|
+
"The following system errors were returned:\n" +
|
69
|
+
@errors.map {|err| Errors::Error.wrap(err).message }.join("\n")
|
70
|
+
end
|
71
|
+
|
72
|
+
private
|
73
|
+
|
74
|
+
##
|
75
|
+
# Each command is added as part of the pipeline, grouped with an `echo`
|
76
|
+
# command to pass along the command's index in @commands and it's exit status.
|
77
|
+
# The command's STDERR is redirected to FD#4, and the `echo` command to
|
78
|
+
# report the "index|exit status" is redirected to FD#3.
|
79
|
+
# Each command's STDOUT will be connected to the STDIN of the next subshell.
|
80
|
+
# The entire pipeline is run within a container group, which redirects
|
81
|
+
# FD#3 to STDOUT and FD#4 to STDERR so these can be collected.
|
82
|
+
# FD#1 is redirected to STDERR so that any output from the final command
|
83
|
+
# on STDOUT will generate warnings, since the final command should not
|
84
|
+
# attempt to write to STDOUT, as this would interfere with collecting
|
85
|
+
# the exit statuses.
|
86
|
+
#
|
87
|
+
# There is no guarantee as to the order of this output, which is why the
|
88
|
+
# command's index in @commands is passed along with it's exit status.
|
89
|
+
# And, if multiple commands output messages on STDERR, those messages
|
90
|
+
# may be interleaved. Interleaving of the "index|exit status" outputs
|
91
|
+
# should not be an issue, given the small byte size of the data being written.
|
92
|
+
def pipeline
|
93
|
+
parts = []
|
94
|
+
@commands.each_with_index do |command, index|
|
95
|
+
parts << %Q[{ #{ command } 2>&4 ; echo "#{ index }|$?:" >&3 ; }]
|
96
|
+
end
|
97
|
+
%Q[{ #{ parts.join(' | ') } } 3>&1 1>&2 4>&2]
|
98
|
+
end
|
99
|
+
|
100
|
+
def stderr_messages
|
101
|
+
@stderr_messages ||= @stderr.empty? ? false : <<-EOS.gsub(/^ +/, ' ')
|
102
|
+
Pipeline STDERR Messages:
|
103
|
+
(Note: may be interleaved if multiple commands returned error messages)
|
104
|
+
|
105
|
+
#{ @stderr }
|
106
|
+
EOS
|
107
|
+
end
|
108
|
+
|
109
|
+
end
|
110
|
+
end
|
@@ -23,13 +23,10 @@ module Backup
|
|
23
23
|
# Path to where the backups will be stored
|
24
24
|
attr_accessor :path
|
25
25
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
" is deprecated and will be removed at some point."
|
31
|
-
end
|
32
|
-
end
|
26
|
+
attr_deprecate :email, :version => '3.0.17'
|
27
|
+
attr_deprecate :password, :version => '3.0.17'
|
28
|
+
|
29
|
+
attr_deprecate :timeout, :version => '3.0.21'
|
33
30
|
|
34
31
|
##
|
35
32
|
# Creates a new instance of the storage object
|
data/lib/backup/syncer/base.rb
CHANGED
@@ -6,10 +6,6 @@ module Backup
|
|
6
6
|
include Backup::CLI::Helpers
|
7
7
|
include Backup::Configuration::Helpers
|
8
8
|
|
9
|
-
##
|
10
|
-
# Directories to sync
|
11
|
-
attr_accessor :directories
|
12
|
-
|
13
9
|
##
|
14
10
|
# Path to store the synced files/directories to
|
15
11
|
attr_accessor :path
|
@@ -18,6 +14,14 @@ module Backup
|
|
18
14
|
# Flag for mirroring the files/directories
|
19
15
|
attr_accessor :mirror
|
20
16
|
|
17
|
+
def initialize
|
18
|
+
load_defaults!
|
19
|
+
|
20
|
+
@path ||= 'backups'
|
21
|
+
@mirror ||= false
|
22
|
+
@directories = Array.new
|
23
|
+
end
|
24
|
+
|
21
25
|
##
|
22
26
|
# Syntactical suger for the DSL for adding directories
|
23
27
|
def directories(&block)
|
@@ -0,0 +1,247 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
##
|
4
|
+
# Only load the Fog gem, along with the Parallel gem, when the
|
5
|
+
# Backup::Syncer::Cloud class is loaded
|
6
|
+
Backup::Dependency.load('fog')
|
7
|
+
Backup::Dependency.load('parallel')
|
8
|
+
|
9
|
+
module Backup
|
10
|
+
module Syncer
|
11
|
+
module Cloud
|
12
|
+
class Base < Syncer::Base
|
13
|
+
|
14
|
+
##
|
15
|
+
# Create a Mutex to synchronize certain parts of the code
|
16
|
+
# in order to prevent race conditions or broken STDOUT.
|
17
|
+
MUTEX = Mutex.new
|
18
|
+
|
19
|
+
##
|
20
|
+
# Concurrency setting - defaults to false, but can be set to:
|
21
|
+
# - :threads
|
22
|
+
# - :processes
|
23
|
+
attr_accessor :concurrency_type
|
24
|
+
|
25
|
+
##
|
26
|
+
# Concurrency level - the number of threads or processors to use.
|
27
|
+
# Defaults to 2.
|
28
|
+
attr_accessor :concurrency_level
|
29
|
+
|
30
|
+
##
|
31
|
+
# Instantiates a new Cloud Syncer object for either
|
32
|
+
# the Cloud::S3 or Cloud::CloudFiles Syncer.
|
33
|
+
#
|
34
|
+
# Pre-configured defaults specified in either
|
35
|
+
# Configuration::Syncer::Cloud::S3 or
|
36
|
+
# Configuration::Syncer::Cloud::CloudFiles
|
37
|
+
# are set via a super() call to Syncer::Base.
|
38
|
+
#
|
39
|
+
# If not specified in the pre-configured defaults,
|
40
|
+
# the Cloud specific defaults are set here before evaluating
|
41
|
+
# any block provided in the user's configuration file.
|
42
|
+
def initialize
|
43
|
+
super
|
44
|
+
|
45
|
+
@concurrency_type ||= false
|
46
|
+
@concurrency_level ||= 2
|
47
|
+
end
|
48
|
+
|
49
|
+
##
|
50
|
+
# Performs the Sync operation
|
51
|
+
def perform!
|
52
|
+
Logger.message("#{ syncer_name } started the syncing process:")
|
53
|
+
|
54
|
+
@directories.each do |directory|
|
55
|
+
SyncContext.new(
|
56
|
+
File.expand_path(directory), repository_object, @path
|
57
|
+
).sync! @mirror, @concurrency_type, @concurrency_level
|
58
|
+
end
|
59
|
+
|
60
|
+
Logger.message("#{ syncer_name } Syncing Complete!")
|
61
|
+
end
|
62
|
+
|
63
|
+
private
|
64
|
+
|
65
|
+
class SyncContext
|
66
|
+
attr_reader :directory, :bucket, :path, :remote_base
|
67
|
+
|
68
|
+
##
|
69
|
+
# Creates a new SyncContext object which handles a single directory
|
70
|
+
# from the Syncer::Base @directories array.
|
71
|
+
def initialize(directory, bucket, path)
|
72
|
+
@directory, @bucket, @path = directory, bucket, path
|
73
|
+
@remote_base = File.join(path, File.basename(directory))
|
74
|
+
end
|
75
|
+
|
76
|
+
##
|
77
|
+
# Performs the sync operation using the provided techniques
|
78
|
+
# (mirroring/concurrency).
|
79
|
+
def sync!(mirror = false, concurrency_type = false, concurrency_level = 2)
|
80
|
+
block = Proc.new { |relative_path| sync_file relative_path, mirror }
|
81
|
+
|
82
|
+
case concurrency_type
|
83
|
+
when FalseClass
|
84
|
+
all_file_names.each &block
|
85
|
+
when :threads
|
86
|
+
Parallel.each all_file_names,
|
87
|
+
:in_threads => concurrency_level, &block
|
88
|
+
when :processes
|
89
|
+
Parallel.each all_file_names,
|
90
|
+
:in_processes => concurrency_level, &block
|
91
|
+
else
|
92
|
+
raise Errors::Syncer::Cloud::ConfigurationError,
|
93
|
+
"Unknown concurrency_type setting: #{ concurrency_type.inspect }"
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
private
|
98
|
+
|
99
|
+
##
|
100
|
+
# Gathers all the relative paths to the local files
|
101
|
+
# and merges them with the , removing
|
102
|
+
# duplicate keys if any, and sorts the in alphabetical order.
|
103
|
+
def all_file_names
|
104
|
+
@all_file_names ||= (local_files.keys | remote_files.keys).sort
|
105
|
+
end
|
106
|
+
|
107
|
+
##
|
108
|
+
# Returns a Hash of local files, validated to ensure the path
|
109
|
+
# does not contain invalid UTF-8 byte sequences.
|
110
|
+
# The keys are the filesystem paths, relative to @directory.
|
111
|
+
# The values are the LocalFile objects for that given file.
|
112
|
+
def local_files
|
113
|
+
@local_files ||= begin
|
114
|
+
hash = {}
|
115
|
+
local_hashes.lines.map do |line|
|
116
|
+
LocalFile.new(@directory, line)
|
117
|
+
end.compact.each do |file|
|
118
|
+
hash.merge!(file.relative_path => file)
|
119
|
+
end
|
120
|
+
hash
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
##
|
125
|
+
# Returns a String of file paths and their md5 hashes.
|
126
|
+
def local_hashes
|
127
|
+
MUTEX.synchronize {
|
128
|
+
Logger.message("\s\sGenerating checksums for '#{ @directory }'")
|
129
|
+
}
|
130
|
+
`find #{ @directory } -print0 | xargs -0 openssl md5 2> /dev/null`
|
131
|
+
end
|
132
|
+
|
133
|
+
##
|
134
|
+
# Returns a Hash of remote files
|
135
|
+
# The keys are the remote paths, relative to @remote_base
|
136
|
+
# The values are the Fog file objects for that given file
|
137
|
+
def remote_files
|
138
|
+
@remote_files ||= begin
|
139
|
+
hash = {}
|
140
|
+
@bucket.files.all(:prefix => @remote_base).each do |file|
|
141
|
+
hash.merge!(file.key.sub("#{ @remote_base }/", '') => file)
|
142
|
+
end
|
143
|
+
hash
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
##
|
148
|
+
# Performs a sync operation on a file. When mirroring is enabled
|
149
|
+
# and a local file has been removed since the last sync, it will also
|
150
|
+
# remove it from the remote location. It will no upload files that
|
151
|
+
# have not changed since the last sync. Checks are done using an md5
|
152
|
+
# hash. If a file has changed, or has been newly added, the file will
|
153
|
+
# be transferred/overwritten.
|
154
|
+
def sync_file(relative_path, mirror)
|
155
|
+
local_file = local_files[relative_path]
|
156
|
+
remote_file = remote_files[relative_path]
|
157
|
+
remote_path = File.join(@remote_base, relative_path)
|
158
|
+
|
159
|
+
if local_file && File.exist?(local_file.path)
|
160
|
+
unless remote_file && remote_file.etag == local_file.md5
|
161
|
+
MUTEX.synchronize {
|
162
|
+
Logger.message("\s\s[transferring] '#{ remote_path }'")
|
163
|
+
}
|
164
|
+
File.open(local_file.path, 'r') do |file|
|
165
|
+
@bucket.files.create(
|
166
|
+
:key => remote_path,
|
167
|
+
:body => file
|
168
|
+
)
|
169
|
+
end
|
170
|
+
else
|
171
|
+
MUTEX.synchronize {
|
172
|
+
Logger.message("\s\s[skipping] '#{ remote_path }'")
|
173
|
+
}
|
174
|
+
end
|
175
|
+
elsif remote_file
|
176
|
+
if mirror
|
177
|
+
MUTEX.synchronize {
|
178
|
+
Logger.message("\s\s[removing] '#{ remote_path }'")
|
179
|
+
}
|
180
|
+
remote_file.destroy
|
181
|
+
else
|
182
|
+
MUTEX.synchronize {
|
183
|
+
Logger.message("\s\s[leaving] '#{ remote_path }'")
|
184
|
+
}
|
185
|
+
end
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end # class SyncContext
|
189
|
+
|
190
|
+
class LocalFile
|
191
|
+
attr_reader :path, :relative_path, :md5
|
192
|
+
|
193
|
+
##
|
194
|
+
# Return a new LocalFile object if it's valid.
|
195
|
+
# Otherwise, log a warning and return nil.
|
196
|
+
def self.new(*args)
|
197
|
+
local_file = super(*args)
|
198
|
+
if local_file.invalid?
|
199
|
+
MUTEX.synchronize {
|
200
|
+
Logger.warn(
|
201
|
+
"\s\s[skipping] #{ local_file.path }\n" +
|
202
|
+
"\s\sPath Contains Invalid UTF-8 byte sequences"
|
203
|
+
)
|
204
|
+
}
|
205
|
+
return nil
|
206
|
+
end
|
207
|
+
local_file
|
208
|
+
end
|
209
|
+
|
210
|
+
##
|
211
|
+
# Creates a new LocalFile object using the given directory and line
|
212
|
+
# from the md5 hash checkup. This object figures out the path,
|
213
|
+
# relative_path and md5 hash for the file.
|
214
|
+
def initialize(directory, line)
|
215
|
+
@invalid = false
|
216
|
+
@directory = sanitize(directory)
|
217
|
+
@path, @md5 = sanitize(line).chomp.
|
218
|
+
match(/^MD5\(([^\)]+)\)= (\w+)$/).captures
|
219
|
+
@relative_path = @path.sub(@directory + '/', '')
|
220
|
+
end
|
221
|
+
|
222
|
+
def invalid?
|
223
|
+
@invalid
|
224
|
+
end
|
225
|
+
|
226
|
+
private
|
227
|
+
|
228
|
+
##
|
229
|
+
# Sanitize string and replace any invalid UTF-8 characters.
|
230
|
+
# If replacements are made, flag the LocalFile object as invalid.
|
231
|
+
def sanitize(str)
|
232
|
+
str.each_char.map do |char|
|
233
|
+
begin
|
234
|
+
char if !!char.unpack('U')
|
235
|
+
rescue
|
236
|
+
@invalid = true
|
237
|
+
"\xEF\xBF\xBD" # => "\uFFFD"
|
238
|
+
end
|
239
|
+
end.join
|
240
|
+
end
|
241
|
+
|
242
|
+
end # class LocalFile
|
243
|
+
|
244
|
+
end # class Base < Syncer::Base
|
245
|
+
end # module Cloud
|
246
|
+
end
|
247
|
+
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class CloudFiles < Base
|
7
|
+
|
8
|
+
##
|
9
|
+
# Rackspace CloudFiles Credentials
|
10
|
+
attr_accessor :api_key, :username
|
11
|
+
|
12
|
+
##
|
13
|
+
# Rackspace CloudFiles Container
|
14
|
+
attr_accessor :container
|
15
|
+
|
16
|
+
##
|
17
|
+
# Rackspace AuthURL allows you to connect
|
18
|
+
# to a different Rackspace datacenter
|
19
|
+
# - https://auth.api.rackspacecloud.com (Default: US)
|
20
|
+
# - https://lon.auth.api.rackspacecloud.com (UK)
|
21
|
+
attr_accessor :auth_url
|
22
|
+
|
23
|
+
##
|
24
|
+
# Improve performance and avoid data transfer costs
|
25
|
+
# by setting @servicenet to `true`
|
26
|
+
# This only works if Backup runs on a Rackspace server
|
27
|
+
attr_accessor :servicenet
|
28
|
+
|
29
|
+
##
|
30
|
+
# Instantiates a new Cloud::CloudFiles Syncer.
|
31
|
+
#
|
32
|
+
# Pre-configured defaults specified in
|
33
|
+
# Configuration::Syncer::Cloud::CloudFiles
|
34
|
+
# are set via a super() call to Cloud::Base,
|
35
|
+
# which in turn will invoke Syncer::Base.
|
36
|
+
#
|
37
|
+
# Once pre-configured defaults and Cloud specific defaults are set,
|
38
|
+
# the block from the user's configuration file is evaluated.
|
39
|
+
def initialize(&block)
|
40
|
+
super
|
41
|
+
|
42
|
+
instance_eval(&block) if block_given?
|
43
|
+
@path = path.sub(/^\//, '')
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
##
|
49
|
+
# Established and creates a new Fog storage object for CloudFiles.
|
50
|
+
def connection
|
51
|
+
@connection ||= Fog::Storage.new(
|
52
|
+
:provider => provider,
|
53
|
+
:rackspace_username => username,
|
54
|
+
:rackspace_api_key => api_key,
|
55
|
+
:rackspace_auth_url => auth_url,
|
56
|
+
:rackspace_servicenet => servicenet
|
57
|
+
)
|
58
|
+
end
|
59
|
+
|
60
|
+
##
|
61
|
+
# Creates a new @repository_object (container).
|
62
|
+
# Fetches it from Cloud Files if it already exists,
|
63
|
+
# otherwise it will create it first and fetch use that instead.
|
64
|
+
def repository_object
|
65
|
+
@repository_object ||= connection.directories.get(container) ||
|
66
|
+
connection.directories.create(:key => container)
|
67
|
+
end
|
68
|
+
|
69
|
+
##
|
70
|
+
# This is the provider that Fog uses for the Cloud Files
|
71
|
+
def provider
|
72
|
+
"Rackspace"
|
73
|
+
end
|
74
|
+
|
75
|
+
end # class Cloudfiles < Base
|
76
|
+
end # module Cloud
|
77
|
+
end
|
78
|
+
end
|