backedup 5.0.0.beta.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE +19 -0
- data/README.md +33 -0
- data/bin/backedup +5 -0
- data/bin/docker_test +24 -0
- data/lib/backup/archive.rb +169 -0
- data/lib/backup/binder.rb +18 -0
- data/lib/backup/cleaner.rb +112 -0
- data/lib/backup/cli.rb +370 -0
- data/lib/backup/cloud_io/base.rb +38 -0
- data/lib/backup/cloud_io/cloud_files.rb +296 -0
- data/lib/backup/cloud_io/gcs.rb +121 -0
- data/lib/backup/cloud_io/s3.rb +253 -0
- data/lib/backup/cloud_io/swift.rb +96 -0
- data/lib/backup/compressor/base.rb +32 -0
- data/lib/backup/compressor/bzip2.rb +35 -0
- data/lib/backup/compressor/custom.rb +49 -0
- data/lib/backup/compressor/gzip.rb +73 -0
- data/lib/backup/compressor/pbzip2.rb +45 -0
- data/lib/backup/config/dsl.rb +102 -0
- data/lib/backup/config/helpers.rb +137 -0
- data/lib/backup/config.rb +118 -0
- data/lib/backup/database/base.rb +86 -0
- data/lib/backup/database/mongodb.rb +186 -0
- data/lib/backup/database/mysql.rb +191 -0
- data/lib/backup/database/openldap.rb +93 -0
- data/lib/backup/database/postgresql.rb +164 -0
- data/lib/backup/database/redis.rb +176 -0
- data/lib/backup/database/riak.rb +79 -0
- data/lib/backup/database/sqlite.rb +55 -0
- data/lib/backup/encryptor/base.rb +27 -0
- data/lib/backup/encryptor/gpg.rb +737 -0
- data/lib/backup/encryptor/open_ssl.rb +74 -0
- data/lib/backup/errors.rb +53 -0
- data/lib/backup/logger/console.rb +48 -0
- data/lib/backup/logger/fog_adapter.rb +25 -0
- data/lib/backup/logger/logfile.rb +131 -0
- data/lib/backup/logger/syslog.rb +114 -0
- data/lib/backup/logger.rb +197 -0
- data/lib/backup/model.rb +472 -0
- data/lib/backup/notifier/base.rb +126 -0
- data/lib/backup/notifier/campfire.rb +61 -0
- data/lib/backup/notifier/command.rb +99 -0
- data/lib/backup/notifier/datadog.rb +104 -0
- data/lib/backup/notifier/flowdock.rb +99 -0
- data/lib/backup/notifier/hipchat.rb +116 -0
- data/lib/backup/notifier/http_post.rb +114 -0
- data/lib/backup/notifier/mail.rb +232 -0
- data/lib/backup/notifier/nagios.rb +65 -0
- data/lib/backup/notifier/pagerduty.rb +79 -0
- data/lib/backup/notifier/prowl.rb +68 -0
- data/lib/backup/notifier/pushover.rb +71 -0
- data/lib/backup/notifier/ses.rb +123 -0
- data/lib/backup/notifier/slack.rb +147 -0
- data/lib/backup/notifier/twitter.rb +55 -0
- data/lib/backup/notifier/zabbix.rb +60 -0
- data/lib/backup/package.rb +51 -0
- data/lib/backup/packager.rb +106 -0
- data/lib/backup/pipeline.rb +120 -0
- data/lib/backup/splitter.rb +73 -0
- data/lib/backup/storage/base.rb +66 -0
- data/lib/backup/storage/cloud_files.rb +156 -0
- data/lib/backup/storage/cycler.rb +70 -0
- data/lib/backup/storage/dropbox.rb +206 -0
- data/lib/backup/storage/ftp.rb +116 -0
- data/lib/backup/storage/gcs.rb +93 -0
- data/lib/backup/storage/local.rb +61 -0
- data/lib/backup/storage/qiniu.rb +65 -0
- data/lib/backup/storage/rsync.rb +246 -0
- data/lib/backup/storage/s3.rb +155 -0
- data/lib/backup/storage/scp.rb +65 -0
- data/lib/backup/storage/sftp.rb +80 -0
- data/lib/backup/storage/swift.rb +124 -0
- data/lib/backup/storage/webdav.rb +102 -0
- data/lib/backup/syncer/base.rb +67 -0
- data/lib/backup/syncer/cloud/base.rb +176 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +81 -0
- data/lib/backup/syncer/cloud/local_file.rb +97 -0
- data/lib/backup/syncer/cloud/s3.rb +109 -0
- data/lib/backup/syncer/rsync/base.rb +50 -0
- data/lib/backup/syncer/rsync/local.rb +27 -0
- data/lib/backup/syncer/rsync/pull.rb +47 -0
- data/lib/backup/syncer/rsync/push.rb +201 -0
- data/lib/backup/template.rb +41 -0
- data/lib/backup/utilities.rb +234 -0
- data/lib/backup/version.rb +3 -0
- data/lib/backup.rb +145 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/openldap +24 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/databases/sqlite +11 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifier/zabbix +15 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/command +32 -0
- data/templates/cli/notifiers/datadog +57 -0
- data/templates/cli/notifiers/flowdock +16 -0
- data/templates/cli/notifiers/hipchat +16 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +24 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/pagerduty +12 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/ses +15 -0
- data/templates/cli/notifiers/slack +22 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +20 -0
- data/templates/cli/storages/ftp +13 -0
- data/templates/cli/storages/gcs +8 -0
- data/templates/cli/storages/local +8 -0
- data/templates/cli/storages/qiniu +12 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +16 -0
- data/templates/cli/storages/scp +15 -0
- data/templates/cli/storages/sftp +15 -0
- data/templates/cli/storages/swift +19 -0
- data/templates/cli/storages/webdav +13 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +1255 -0
@@ -0,0 +1,124 @@
|
|
1
|
+
require "backup/cloud_io/swift"
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Storage
|
5
|
+
class Swift < Base
|
6
|
+
include Storage::Cycler
|
7
|
+
class Error < Backup::Error; end
|
8
|
+
|
9
|
+
##
|
10
|
+
# Swift credentials
|
11
|
+
attr_accessor :username, :password
|
12
|
+
|
13
|
+
##
|
14
|
+
# Keystone tenant name if using v2 auth
|
15
|
+
attr_accessor :tenant_name
|
16
|
+
|
17
|
+
##
|
18
|
+
# Swift container name
|
19
|
+
attr_accessor :container
|
20
|
+
|
21
|
+
##
|
22
|
+
# Swift region. It might be required for certain providers
|
23
|
+
attr_accessor :region
|
24
|
+
|
25
|
+
##
|
26
|
+
# OpenStack keystone url
|
27
|
+
attr_accessor :auth_url
|
28
|
+
|
29
|
+
##
|
30
|
+
# Number of times to retry failed operations.
|
31
|
+
#
|
32
|
+
# Default: 10
|
33
|
+
attr_accessor :max_retries
|
34
|
+
|
35
|
+
##
|
36
|
+
# Time in seconds to pause before each retry.
|
37
|
+
#
|
38
|
+
# Default: 30
|
39
|
+
attr_accessor :retry_waitsec
|
40
|
+
|
41
|
+
##
|
42
|
+
# The size of the batch operations (delete/list/etc.) in your
|
43
|
+
# OpenStack deployment
|
44
|
+
#
|
45
|
+
# Default: 1000
|
46
|
+
attr_accessor :batch_size
|
47
|
+
|
48
|
+
##
|
49
|
+
# Additional options to pass along to fog.
|
50
|
+
# e.g. Fog::Storage.new({ :provider => 'Swift' }.merge(fog_options))
|
51
|
+
attr_accessor :fog_options
|
52
|
+
|
53
|
+
def initialize(mode, storage_id = nil)
|
54
|
+
super
|
55
|
+
|
56
|
+
@max_retries ||= 10
|
57
|
+
@retry_waitsec ||= 30
|
58
|
+
@path ||= "backups"
|
59
|
+
@batch_size ||= 1000
|
60
|
+
@fog_options ||= {}
|
61
|
+
|
62
|
+
@path = @path.sub(%r{^\/}, "")
|
63
|
+
|
64
|
+
check_configuration
|
65
|
+
end
|
66
|
+
|
67
|
+
private
|
68
|
+
|
69
|
+
def cloud_io
|
70
|
+
@cloud_io ||= CloudIO::Swift.new(
|
71
|
+
username: username,
|
72
|
+
password: password,
|
73
|
+
tenant_name: tenant_name,
|
74
|
+
region: region,
|
75
|
+
container: container,
|
76
|
+
auth_url: auth_url,
|
77
|
+
max_retries: max_retries,
|
78
|
+
retry_waitsec: retry_waitsec,
|
79
|
+
batch_size: batch_size,
|
80
|
+
fog_options: fog_options
|
81
|
+
)
|
82
|
+
end
|
83
|
+
|
84
|
+
def transfer!
|
85
|
+
package.filenames.each do |filename|
|
86
|
+
src = File.join(Config.tmp_path, filename)
|
87
|
+
dest = File.join(remote_path, filename)
|
88
|
+
Logger.info "Storing '#{container}/#{dest}'..."
|
89
|
+
cloud_io.upload(src, dest)
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
# Called by the Cycler.
|
94
|
+
# Any error raised will be logged as a warning.
|
95
|
+
def remove!(package)
|
96
|
+
Logger.info "Removing backup package dated #{package.time}..."
|
97
|
+
|
98
|
+
remote_path = remote_path_for(package)
|
99
|
+
objects = cloud_io.objects(remote_path)
|
100
|
+
|
101
|
+
raise Error, "Package at '#{remote_path}' not found" if objects.empty?
|
102
|
+
|
103
|
+
cloud_io.delete(objects)
|
104
|
+
end
|
105
|
+
|
106
|
+
def check_configuration
|
107
|
+
if auth_url.nil?
|
108
|
+
raise Error, <<-EOS
|
109
|
+
Configuration Error
|
110
|
+
Swift auth_url is required
|
111
|
+
EOS
|
112
|
+
end
|
113
|
+
|
114
|
+
required = [:username, :password, :container, :auth_url]
|
115
|
+
required << :tenant_name if auth_url =~ /v2/
|
116
|
+
|
117
|
+
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
|
118
|
+
Configuration Error
|
119
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
120
|
+
EOS
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
@@ -0,0 +1,102 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'faraday'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class Webdav < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
|
9
|
+
##
|
10
|
+
# Webdav credentials
|
11
|
+
attr_accessor :username, :password
|
12
|
+
|
13
|
+
##
|
14
|
+
# Server hostname and port, SSL settings
|
15
|
+
attr_accessor :ip, :port, :use_ssl, :ssl_verify
|
16
|
+
|
17
|
+
##
|
18
|
+
# configure a connection timeout
|
19
|
+
attr_accessor :timeout
|
20
|
+
|
21
|
+
def initialize(model, storage_id = nil)
|
22
|
+
super
|
23
|
+
|
24
|
+
@port ||= 80
|
25
|
+
@use_ssl ||= false
|
26
|
+
@ssl_verify = true if @ssl_verify.nil?
|
27
|
+
@path ||= 'backups'
|
28
|
+
@timeout ||= nil
|
29
|
+
path.sub!(/^~\//, '')
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
##
|
35
|
+
# create the connection object used for initializing the requests
|
36
|
+
#
|
37
|
+
# Note:
|
38
|
+
# Webdav has a special HTTP verb for creating collections that needs to
|
39
|
+
# be added the the list of allowed verbs in Faraday
|
40
|
+
def connection
|
41
|
+
if Faraday::Connection.const_defined?(:METHODS)
|
42
|
+
Faraday::Connection::METHODS.add(:mkcol)
|
43
|
+
end
|
44
|
+
|
45
|
+
conn_hash = {
|
46
|
+
url: base_url,
|
47
|
+
request: {
|
48
|
+
timeout: timeout,
|
49
|
+
open_timeout: timeout
|
50
|
+
},
|
51
|
+
ssl: { verify: ssl_verify }
|
52
|
+
}
|
53
|
+
|
54
|
+
Faraday::Connection.new(conn_hash) do |builder|
|
55
|
+
builder.request :multipart
|
56
|
+
builder.request :basic_auth, username, password
|
57
|
+
builder.adapter :net_http
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def base_url
|
62
|
+
"#{(use_ssl ? 'https' : 'http')}://#{ip}:#{port}"
|
63
|
+
end
|
64
|
+
|
65
|
+
def transfer!
|
66
|
+
create_remote_path
|
67
|
+
|
68
|
+
package.filenames.each do |filename|
|
69
|
+
src = File.join(Config.tmp_path, filename)
|
70
|
+
dest = File.join(remote_path, filename)
|
71
|
+
|
72
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
73
|
+
connection.put(dest) do |req|
|
74
|
+
req.headers['Content-Type'] = 'octet/stream'
|
75
|
+
req.headers['Transfer-Encoding'] = 'chunked'
|
76
|
+
req.headers['Content-Length'] = File.size(src).to_s
|
77
|
+
req.body = Faraday::UploadIO.new(src, 'octet/stream')
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def create_remote_path
|
83
|
+
path_parts = Array.new
|
84
|
+
remote_path.split('/').each do |path_part|
|
85
|
+
path_parts << path_part
|
86
|
+
connection.run_request(:mkcol, path_parts.join('/'), nil, nil)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
##
|
91
|
+
# called by the Cycler.
|
92
|
+
def remove!(package)
|
93
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
94
|
+
remote_path = remote_path_for(package)
|
95
|
+
package.filenames.each do |filename|
|
96
|
+
connection.delete(File.join(remote_path, filename))
|
97
|
+
end
|
98
|
+
connection.delete(File.join(remote_path, ''))
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
module Backup
|
2
|
+
module Syncer
|
3
|
+
class Base
|
4
|
+
include Utilities::Helpers
|
5
|
+
include Config::Helpers
|
6
|
+
|
7
|
+
##
|
8
|
+
# Path to store the synced files/directories to
|
9
|
+
attr_accessor :path
|
10
|
+
|
11
|
+
##
|
12
|
+
# Flag for mirroring the files/directories
|
13
|
+
attr_accessor :mirror
|
14
|
+
|
15
|
+
##
|
16
|
+
# Optional user-defined identifier to differentiate multiple syncers
|
17
|
+
# defined within a single backup model. Currently this is only used
|
18
|
+
# in the log messages.
|
19
|
+
attr_reader :syncer_id
|
20
|
+
|
21
|
+
attr_reader :excludes
|
22
|
+
|
23
|
+
def initialize(syncer_id = nil)
|
24
|
+
@syncer_id = syncer_id
|
25
|
+
|
26
|
+
load_defaults!
|
27
|
+
|
28
|
+
@mirror ||= false
|
29
|
+
@directories ||= []
|
30
|
+
@excludes ||= []
|
31
|
+
end
|
32
|
+
|
33
|
+
##
|
34
|
+
# Syntactical suger for the DSL for adding directories
|
35
|
+
def directories(&block)
|
36
|
+
return @directories unless block_given?
|
37
|
+
instance_eval(&block)
|
38
|
+
end
|
39
|
+
|
40
|
+
def add(path)
|
41
|
+
directories << path
|
42
|
+
end
|
43
|
+
|
44
|
+
# For Cloud Syncers, +pattern+ can be a string (with shell-style
|
45
|
+
# wildcards) or a regex.
|
46
|
+
# For RSync, each +pattern+ will be passed to rsync's --exclude option.
|
47
|
+
def exclude(pattern)
|
48
|
+
excludes << pattern
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def syncer_name
|
54
|
+
@syncer_name ||= self.class.to_s.sub("Backup::", "") +
|
55
|
+
(syncer_id ? " (#{syncer_id})" : "")
|
56
|
+
end
|
57
|
+
|
58
|
+
def log!(action)
|
59
|
+
msg = case action
|
60
|
+
when :started then "Started..."
|
61
|
+
when :finished then "Finished!"
|
62
|
+
end
|
63
|
+
Logger.info "#{syncer_name} #{msg}"
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,176 @@
|
|
1
|
+
module Backup
|
2
|
+
module Syncer
|
3
|
+
module Cloud
|
4
|
+
class Error < Backup::Error; end
|
5
|
+
|
6
|
+
class Base < Syncer::Base
|
7
|
+
MUTEX = Mutex.new
|
8
|
+
|
9
|
+
##
|
10
|
+
# Number of threads to use for concurrency.
|
11
|
+
#
|
12
|
+
# Default: 0 (no concurrency)
|
13
|
+
attr_accessor :thread_count
|
14
|
+
|
15
|
+
##
|
16
|
+
# Number of times to retry failed operations.
|
17
|
+
#
|
18
|
+
# Default: 10
|
19
|
+
attr_accessor :max_retries
|
20
|
+
|
21
|
+
##
|
22
|
+
# Time in seconds to pause before each retry.
|
23
|
+
#
|
24
|
+
# Default: 30
|
25
|
+
attr_accessor :retry_waitsec
|
26
|
+
|
27
|
+
def initialize(syncer_id = nil, &block)
|
28
|
+
super
|
29
|
+
instance_eval(&block) if block_given?
|
30
|
+
|
31
|
+
@thread_count ||= 0
|
32
|
+
@max_retries ||= 10
|
33
|
+
@retry_waitsec ||= 30
|
34
|
+
|
35
|
+
@path ||= "backups"
|
36
|
+
@path = path.sub(/^\//, "")
|
37
|
+
end
|
38
|
+
|
39
|
+
def perform!
|
40
|
+
log!(:started)
|
41
|
+
@transfer_count = 0
|
42
|
+
@unchanged_count = 0
|
43
|
+
@skipped_count = 0
|
44
|
+
@orphans = thread_count > 0 ? Queue.new : []
|
45
|
+
|
46
|
+
directories.each { |dir| sync_directory(dir) }
|
47
|
+
orphans_result = process_orphans
|
48
|
+
|
49
|
+
Logger.info "\nSummary:"
|
50
|
+
Logger.info "\s\sTransferred Files: #{@transfer_count}"
|
51
|
+
Logger.info "\s\s#{orphans_result}"
|
52
|
+
Logger.info "\s\sUnchanged Files: #{@unchanged_count}"
|
53
|
+
Logger.warn "\s\sSkipped Files: #{@skipped_count}" if @skipped_count > 0
|
54
|
+
log!(:finished)
|
55
|
+
end
|
56
|
+
|
57
|
+
private
|
58
|
+
|
59
|
+
def sync_directory(dir)
|
60
|
+
remote_base = path.empty? ? File.basename(dir) :
|
61
|
+
File.join(path, File.basename(dir))
|
62
|
+
Logger.info "Gathering remote data for '#{remote_base}'..."
|
63
|
+
remote_files = get_remote_files(remote_base)
|
64
|
+
|
65
|
+
Logger.info("Gathering local data for '#{File.expand_path(dir)}'...")
|
66
|
+
local_files = LocalFile.find(dir, excludes)
|
67
|
+
|
68
|
+
relative_paths = (local_files.keys | remote_files.keys).sort
|
69
|
+
if relative_paths.empty?
|
70
|
+
Logger.info "No local or remote files found"
|
71
|
+
else
|
72
|
+
Logger.info "Syncing..."
|
73
|
+
sync_block = proc do |relative_path|
|
74
|
+
local_file = local_files[relative_path]
|
75
|
+
remote_md5 = remote_files[relative_path]
|
76
|
+
remote_path = File.join(remote_base, relative_path)
|
77
|
+
sync_file(local_file, remote_path, remote_md5)
|
78
|
+
end
|
79
|
+
|
80
|
+
if thread_count > 0
|
81
|
+
sync_in_threads(relative_paths, sync_block)
|
82
|
+
else
|
83
|
+
relative_paths.each(&sync_block)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
def sync_in_threads(relative_paths, sync_block)
|
89
|
+
queue = Queue.new
|
90
|
+
queue << relative_paths.shift until relative_paths.empty?
|
91
|
+
num_threads = [thread_count, queue.size].min
|
92
|
+
Logger.info "\s\sUsing #{num_threads} Threads"
|
93
|
+
threads = Array.new(num_threads) do
|
94
|
+
Thread.new do
|
95
|
+
loop do
|
96
|
+
path = queue.shift(true) rescue nil
|
97
|
+
path ? sync_block.call(path) : break
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
# abort if any thread raises an exception
|
103
|
+
while threads.any?(&:alive?)
|
104
|
+
if threads.any? { |thr| thr.status.nil? }
|
105
|
+
threads.each(&:kill)
|
106
|
+
Thread.pass while threads.any?(&:alive?)
|
107
|
+
break
|
108
|
+
end
|
109
|
+
sleep num_threads * 0.1
|
110
|
+
end
|
111
|
+
threads.each(&:join)
|
112
|
+
end
|
113
|
+
|
114
|
+
# If an exception is raised in multiple threads, only the exception
|
115
|
+
# raised in the first thread that Thread#join is called on will be
|
116
|
+
# handled. So all exceptions are logged first with their details,
|
117
|
+
# then a generic exception is raised.
|
118
|
+
def sync_file(local_file, remote_path, remote_md5)
|
119
|
+
if local_file && File.exist?(local_file.path)
|
120
|
+
if local_file.md5 == remote_md5
|
121
|
+
MUTEX.synchronize { @unchanged_count += 1 }
|
122
|
+
else
|
123
|
+
Logger.info("\s\s[transferring] '#{remote_path}'")
|
124
|
+
begin
|
125
|
+
cloud_io.upload(local_file.path, remote_path)
|
126
|
+
MUTEX.synchronize { @transfer_count += 1 }
|
127
|
+
rescue CloudIO::FileSizeError => err
|
128
|
+
MUTEX.synchronize { @skipped_count += 1 }
|
129
|
+
Logger.warn Error.wrap(err, "Skipping '#{remote_path}'")
|
130
|
+
rescue => err
|
131
|
+
Logger.error(err)
|
132
|
+
raise Error, <<-EOS
|
133
|
+
Syncer Failed!
|
134
|
+
See the Retry [info] and [error] messages (if any)
|
135
|
+
for details on each failed operation.
|
136
|
+
EOS
|
137
|
+
end
|
138
|
+
end
|
139
|
+
elsif remote_md5
|
140
|
+
@orphans << remote_path
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
def process_orphans
|
145
|
+
if @orphans.empty?
|
146
|
+
return mirror ? "Deleted Files: 0" : "Orphaned Files: 0"
|
147
|
+
end
|
148
|
+
|
149
|
+
if @orphans.is_a?(Queue)
|
150
|
+
@orphans = Array.new(@orphans.size) { @orphans.shift }
|
151
|
+
end
|
152
|
+
|
153
|
+
if mirror
|
154
|
+
Logger.info @orphans.map { |path|
|
155
|
+
"\s\s[removing] '#{path}'"
|
156
|
+
}.join("\n")
|
157
|
+
|
158
|
+
begin
|
159
|
+
cloud_io.delete(@orphans)
|
160
|
+
"Deleted Files: #{@orphans.count}"
|
161
|
+
rescue => err
|
162
|
+
Logger.warn Error.wrap(err, "Delete Operation Failed")
|
163
|
+
"Attempted to Delete: #{@orphans.count} " \
|
164
|
+
"(See log messages for actual results)"
|
165
|
+
end
|
166
|
+
else
|
167
|
+
Logger.info @orphans.map { |path|
|
168
|
+
"\s\s[orphaned] '#{path}'"
|
169
|
+
}.join("\n")
|
170
|
+
"Orphaned Files: #{@orphans.count}"
|
171
|
+
end
|
172
|
+
end
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|
176
|
+
end
|
@@ -0,0 +1,81 @@
|
|
1
|
+
require "backup/cloud_io/cloud_files"
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class CloudFiles < Base
|
7
|
+
class Error < Backup::Error; end
|
8
|
+
|
9
|
+
##
|
10
|
+
# Rackspace CloudFiles Credentials
|
11
|
+
attr_accessor :username, :api_key
|
12
|
+
|
13
|
+
##
|
14
|
+
# Rackspace CloudFiles Container
|
15
|
+
attr_accessor :container
|
16
|
+
|
17
|
+
##
|
18
|
+
# Rackspace AuthURL (optional)
|
19
|
+
attr_accessor :auth_url
|
20
|
+
|
21
|
+
##
|
22
|
+
# Rackspace Region (optional)
|
23
|
+
attr_accessor :region
|
24
|
+
|
25
|
+
##
|
26
|
+
# Rackspace Service Net
|
27
|
+
# (LAN-based transfers to avoid charges and improve performance)
|
28
|
+
attr_accessor :servicenet
|
29
|
+
|
30
|
+
##
|
31
|
+
# Additional options to pass along to fog.
|
32
|
+
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
|
33
|
+
attr_accessor :fog_options
|
34
|
+
|
35
|
+
def initialize(syncer_id = nil)
|
36
|
+
super
|
37
|
+
|
38
|
+
@servicenet ||= false
|
39
|
+
|
40
|
+
check_configuration
|
41
|
+
end
|
42
|
+
|
43
|
+
private
|
44
|
+
|
45
|
+
def cloud_io
|
46
|
+
@cloud_io ||= CloudIO::CloudFiles.new(
|
47
|
+
username: username,
|
48
|
+
api_key: api_key,
|
49
|
+
auth_url: auth_url,
|
50
|
+
region: region,
|
51
|
+
servicenet: servicenet,
|
52
|
+
container: container,
|
53
|
+
max_retries: max_retries,
|
54
|
+
retry_waitsec: retry_waitsec,
|
55
|
+
# Syncer can not use SLOs.
|
56
|
+
segments_container: nil,
|
57
|
+
segment_size: 0,
|
58
|
+
fog_options: fog_options
|
59
|
+
)
|
60
|
+
end
|
61
|
+
|
62
|
+
def get_remote_files(remote_base)
|
63
|
+
hash = {}
|
64
|
+
cloud_io.objects(remote_base).each do |object|
|
65
|
+
relative_path = object.name.sub(remote_base + "/", "")
|
66
|
+
hash[relative_path] = object.hash
|
67
|
+
end
|
68
|
+
hash
|
69
|
+
end
|
70
|
+
|
71
|
+
def check_configuration
|
72
|
+
required = %w[username api_key container]
|
73
|
+
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
|
74
|
+
Configuration Error
|
75
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
76
|
+
EOS
|
77
|
+
end
|
78
|
+
end # class Cloudfiles < Base
|
79
|
+
end # module Cloud
|
80
|
+
end
|
81
|
+
end
|
@@ -0,0 +1,97 @@
|
|
1
|
+
require "digest/md5"
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class LocalFile
|
7
|
+
attr_reader :path
|
8
|
+
attr_accessor :md5
|
9
|
+
|
10
|
+
class << self
|
11
|
+
# Returns a Hash of LocalFile objects for each file within +dir+,
|
12
|
+
# except those matching any of the +excludes+.
|
13
|
+
# Hash keys are the file's path relative to +dir+.
|
14
|
+
def find(dir, excludes = [])
|
15
|
+
dir = File.expand_path(dir)
|
16
|
+
hash = {}
|
17
|
+
find_md5(dir, excludes).each do |file|
|
18
|
+
hash[file.path.sub(dir + "/", "")] = file
|
19
|
+
end
|
20
|
+
hash
|
21
|
+
end
|
22
|
+
|
23
|
+
# Return a new LocalFile object if it's valid.
|
24
|
+
# Otherwise, log a warning and return nil.
|
25
|
+
def new(*args)
|
26
|
+
file = super
|
27
|
+
if file.invalid?
|
28
|
+
Logger.warn("\s\s[skipping] #{file.path}\n" \
|
29
|
+
"\s\sPath Contains Invalid UTF-8 byte sequences")
|
30
|
+
file = nil
|
31
|
+
end
|
32
|
+
file
|
33
|
+
end
|
34
|
+
|
35
|
+
private
|
36
|
+
|
37
|
+
# Returns an Array of file paths and their md5 hashes.
|
38
|
+
def find_md5(dir, excludes)
|
39
|
+
found = []
|
40
|
+
(Dir.entries(dir) - %w[. ..]).map { |e| File.join(dir, e) }.each do |path|
|
41
|
+
if File.directory?(path)
|
42
|
+
unless exclude?(excludes, path)
|
43
|
+
found += find_md5(path, excludes)
|
44
|
+
end
|
45
|
+
elsif File.file?(path)
|
46
|
+
if file = new(path)
|
47
|
+
unless exclude?(excludes, file.path)
|
48
|
+
file.md5 = Digest::MD5.file(file.path).hexdigest
|
49
|
+
found << file
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
found
|
55
|
+
end
|
56
|
+
|
57
|
+
# Returns true if +path+ matches any of the +excludes+.
|
58
|
+
# Note this can not be called if +path+ includes invalid UTF-8.
|
59
|
+
def exclude?(excludes, path)
|
60
|
+
excludes.any? do |ex|
|
61
|
+
if ex.is_a?(String)
|
62
|
+
File.fnmatch?(ex, path)
|
63
|
+
elsif ex.is_a?(Regexp)
|
64
|
+
ex.match(path)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
# If +path+ contains invalid UTF-8, it will be sanitized
|
71
|
+
# and the LocalFile object will be flagged as invalid.
|
72
|
+
# This is done so @file.path may be logged.
|
73
|
+
def initialize(path)
|
74
|
+
@path = sanitize(path)
|
75
|
+
end
|
76
|
+
|
77
|
+
def invalid?
|
78
|
+
!!@invalid
|
79
|
+
end
|
80
|
+
|
81
|
+
private
|
82
|
+
|
83
|
+
def sanitize(str)
|
84
|
+
str.each_char.map do |char|
|
85
|
+
begin
|
86
|
+
char.unpack("U")
|
87
|
+
char
|
88
|
+
rescue
|
89
|
+
@invalid = true
|
90
|
+
"\xEF\xBF\xBD" # => "\uFFFD"
|
91
|
+
end
|
92
|
+
end.join
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|