backupii 0.1.0.pre.alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +19 -0
- data/README.md +37 -0
- data/bin/backupii +5 -0
- data/bin/docker_test +24 -0
- data/lib/backup/archive.rb +171 -0
- data/lib/backup/binder.rb +23 -0
- data/lib/backup/cleaner.rb +114 -0
- data/lib/backup/cli.rb +376 -0
- data/lib/backup/cloud_io/base.rb +40 -0
- data/lib/backup/cloud_io/cloud_files.rb +301 -0
- data/lib/backup/cloud_io/s3.rb +256 -0
- data/lib/backup/compressor/base.rb +34 -0
- data/lib/backup/compressor/bzip2.rb +37 -0
- data/lib/backup/compressor/custom.rb +51 -0
- data/lib/backup/compressor/gzip.rb +76 -0
- data/lib/backup/config/dsl.rb +103 -0
- data/lib/backup/config/helpers.rb +139 -0
- data/lib/backup/config.rb +122 -0
- data/lib/backup/database/base.rb +89 -0
- data/lib/backup/database/mongodb.rb +189 -0
- data/lib/backup/database/mysql.rb +194 -0
- data/lib/backup/database/openldap.rb +97 -0
- data/lib/backup/database/postgresql.rb +134 -0
- data/lib/backup/database/redis.rb +179 -0
- data/lib/backup/database/riak.rb +82 -0
- data/lib/backup/database/sqlite.rb +57 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +745 -0
- data/lib/backup/encryptor/open_ssl.rb +76 -0
- data/lib/backup/errors.rb +55 -0
- data/lib/backup/logger/console.rb +50 -0
- data/lib/backup/logger/fog_adapter.rb +27 -0
- data/lib/backup/logger/logfile.rb +134 -0
- data/lib/backup/logger/syslog.rb +116 -0
- data/lib/backup/logger.rb +199 -0
- data/lib/backup/model.rb +478 -0
- data/lib/backup/notifier/base.rb +128 -0
- data/lib/backup/notifier/campfire.rb +63 -0
- data/lib/backup/notifier/command.rb +101 -0
- data/lib/backup/notifier/datadog.rb +107 -0
- data/lib/backup/notifier/flowdock.rb +101 -0
- data/lib/backup/notifier/hipchat.rb +118 -0
- data/lib/backup/notifier/http_post.rb +116 -0
- data/lib/backup/notifier/mail.rb +235 -0
- data/lib/backup/notifier/nagios.rb +67 -0
- data/lib/backup/notifier/pagerduty.rb +82 -0
- data/lib/backup/notifier/prowl.rb +70 -0
- data/lib/backup/notifier/pushover.rb +73 -0
- data/lib/backup/notifier/ses.rb +126 -0
- data/lib/backup/notifier/slack.rb +149 -0
- data/lib/backup/notifier/twitter.rb +57 -0
- data/lib/backup/notifier/zabbix.rb +62 -0
- data/lib/backup/package.rb +53 -0
- data/lib/backup/packager.rb +108 -0
- data/lib/backup/pipeline.rb +122 -0
- data/lib/backup/splitter.rb +75 -0
- data/lib/backup/storage/base.rb +72 -0
- data/lib/backup/storage/cloud_files.rb +158 -0
- data/lib/backup/storage/cycler.rb +73 -0
- data/lib/backup/storage/dropbox.rb +208 -0
- data/lib/backup/storage/ftp.rb +118 -0
- data/lib/backup/storage/local.rb +63 -0
- data/lib/backup/storage/qiniu.rb +68 -0
- data/lib/backup/storage/rsync.rb +251 -0
- data/lib/backup/storage/s3.rb +157 -0
- data/lib/backup/storage/scp.rb +67 -0
- data/lib/backup/storage/sftp.rb +82 -0
- data/lib/backup/syncer/base.rb +70 -0
- data/lib/backup/syncer/cloud/base.rb +180 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
- data/lib/backup/syncer/cloud/local_file.rb +99 -0
- data/lib/backup/syncer/cloud/s3.rb +118 -0
- data/lib/backup/syncer/rsync/base.rb +55 -0
- data/lib/backup/syncer/rsync/local.rb +29 -0
- data/lib/backup/syncer/rsync/pull.rb +49 -0
- data/lib/backup/syncer/rsync/push.rb +206 -0
- data/lib/backup/template.rb +45 -0
- data/lib/backup/utilities.rb +235 -0
- data/lib/backup/version.rb +5 -0
- data/lib/backup.rb +141 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/openldap +24 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/databases/sqlite +11 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifier/zabbix +15 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/command +32 -0
- data/templates/cli/notifiers/datadog +57 -0
- data/templates/cli/notifiers/flowdock +16 -0
- data/templates/cli/notifiers/hipchat +16 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +24 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/pagerduty +12 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/ses +15 -0
- data/templates/cli/notifiers/slack +22 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +20 -0
- data/templates/cli/storages/ftp +13 -0
- data/templates/cli/storages/local +8 -0
- data/templates/cli/storages/qiniu +12 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +16 -0
- data/templates/cli/storages/scp +15 -0
- data/templates/cli/storages/sftp +15 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +507 -0
@@ -0,0 +1,180 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class Error < Backup::Error; end
|
7
|
+
|
8
|
+
class Base < Syncer::Base
|
9
|
+
MUTEX = Mutex.new
|
10
|
+
|
11
|
+
##
|
12
|
+
# Number of threads to use for concurrency.
|
13
|
+
#
|
14
|
+
# Default: 0 (no concurrency)
|
15
|
+
attr_accessor :thread_count
|
16
|
+
|
17
|
+
##
|
18
|
+
# Number of times to retry failed operations.
|
19
|
+
#
|
20
|
+
# Default: 10
|
21
|
+
attr_accessor :max_retries
|
22
|
+
|
23
|
+
##
|
24
|
+
# Time in seconds to pause before each retry.
|
25
|
+
#
|
26
|
+
# Default: 30
|
27
|
+
attr_accessor :retry_waitsec
|
28
|
+
|
29
|
+
def initialize(syncer_id = nil, &block)
|
30
|
+
super
|
31
|
+
instance_eval(&block) if block_given?
|
32
|
+
|
33
|
+
@thread_count ||= 0
|
34
|
+
@max_retries ||= 10
|
35
|
+
@retry_waitsec ||= 30
|
36
|
+
|
37
|
+
@path ||= "backups"
|
38
|
+
@path = path.sub(%r{^/}, "")
|
39
|
+
end
|
40
|
+
|
41
|
+
def perform!
|
42
|
+
log!(:started)
|
43
|
+
@transfer_count = 0
|
44
|
+
@unchanged_count = 0
|
45
|
+
@skipped_count = 0
|
46
|
+
@orphans = thread_count > 0 ? Queue.new : []
|
47
|
+
|
48
|
+
directories.each { |dir| sync_directory(dir) }
|
49
|
+
orphans_result = process_orphans
|
50
|
+
|
51
|
+
Logger.info "\nSummary:"
|
52
|
+
Logger.info "\s\sTransferred Files: #{@transfer_count}"
|
53
|
+
Logger.info "\s\s#{orphans_result}"
|
54
|
+
Logger.info "\s\sUnchanged Files: #{@unchanged_count}"
|
55
|
+
if @skipped_count > 0
|
56
|
+
Logger.warn "\s\sSkipped Files: #{@skipped_count}"
|
57
|
+
end
|
58
|
+
log!(:finished)
|
59
|
+
end
|
60
|
+
|
61
|
+
private
|
62
|
+
|
63
|
+
def sync_directory(dir)
|
64
|
+
remote_base = path.empty? ? File.basename(dir) :
|
65
|
+
File.join(path, File.basename(dir))
|
66
|
+
Logger.info "Gathering remote data for '#{remote_base}'..."
|
67
|
+
remote_files = get_remote_files(remote_base)
|
68
|
+
|
69
|
+
Logger.info("Gathering local data for '#{File.expand_path(dir)}'...")
|
70
|
+
local_files = LocalFile.find(dir, excludes)
|
71
|
+
|
72
|
+
relative_paths = (local_files.keys | remote_files.keys).sort
|
73
|
+
if relative_paths.empty?
|
74
|
+
Logger.info "No local or remote files found"
|
75
|
+
else
|
76
|
+
Logger.info "Syncing..."
|
77
|
+
sync_block = proc do |relative_path|
|
78
|
+
local_file = local_files[relative_path]
|
79
|
+
remote_md5 = remote_files[relative_path]
|
80
|
+
remote_path = File.join(remote_base, relative_path)
|
81
|
+
sync_file(local_file, remote_path, remote_md5)
|
82
|
+
end
|
83
|
+
|
84
|
+
if thread_count > 0
|
85
|
+
sync_in_threads(relative_paths, sync_block)
|
86
|
+
else
|
87
|
+
relative_paths.each(&sync_block)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def sync_in_threads(relative_paths, sync_block)
|
93
|
+
queue = Queue.new
|
94
|
+
queue << relative_paths.shift until relative_paths.empty?
|
95
|
+
num_threads = [thread_count, queue.size].min
|
96
|
+
Logger.info "\s\sUsing #{num_threads} Threads"
|
97
|
+
threads = Array.new(num_threads) do
|
98
|
+
Thread.new do
|
99
|
+
loop do
|
100
|
+
path = queue.shift(true) rescue nil
|
101
|
+
path ? sync_block.call(path) : break
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
# abort if any thread raises an exception
|
107
|
+
while threads.any?(&:alive?)
|
108
|
+
if threads.any? { |thr| thr.status.nil? }
|
109
|
+
threads.each(&:kill)
|
110
|
+
Thread.pass while threads.any?(&:alive?)
|
111
|
+
break
|
112
|
+
end
|
113
|
+
sleep num_threads * 0.1
|
114
|
+
end
|
115
|
+
threads.each(&:join)
|
116
|
+
end
|
117
|
+
|
118
|
+
# If an exception is raised in multiple threads, only the exception
|
119
|
+
# raised in the first thread that Thread#join is called on will be
|
120
|
+
# handled. So all exceptions are logged first with their details,
|
121
|
+
# then a generic exception is raised.
|
122
|
+
def sync_file(local_file, remote_path, remote_md5)
|
123
|
+
if local_file && File.exist?(local_file.path)
|
124
|
+
if local_file.md5 == remote_md5
|
125
|
+
MUTEX.synchronize { @unchanged_count += 1 }
|
126
|
+
else
|
127
|
+
Logger.info("\s\s[transferring] '#{remote_path}'")
|
128
|
+
begin
|
129
|
+
cloud_io.upload(local_file.path, remote_path)
|
130
|
+
MUTEX.synchronize { @transfer_count += 1 }
|
131
|
+
rescue CloudIO::FileSizeError => err
|
132
|
+
MUTEX.synchronize { @skipped_count += 1 }
|
133
|
+
Logger.warn Error.wrap(err, "Skipping '#{remote_path}'")
|
134
|
+
rescue => err
|
135
|
+
Logger.error(err)
|
136
|
+
raise Error, <<-EOS
|
137
|
+
Syncer Failed!
|
138
|
+
See the Retry [info] and [error] messages (if any)
|
139
|
+
for details on each failed operation.
|
140
|
+
EOS
|
141
|
+
end
|
142
|
+
end
|
143
|
+
elsif remote_md5
|
144
|
+
@orphans << remote_path
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
def process_orphans
|
149
|
+
if @orphans.empty?
|
150
|
+
return mirror ? "Deleted Files: 0" : "Orphaned Files: 0"
|
151
|
+
end
|
152
|
+
|
153
|
+
if @orphans.is_a?(Queue)
|
154
|
+
@orphans = Array.new(@orphans.size) { @orphans.shift }
|
155
|
+
end
|
156
|
+
|
157
|
+
if mirror
|
158
|
+
Logger.info @orphans.map { |path|
|
159
|
+
"\s\s[removing] '#{path}'"
|
160
|
+
}.join("\n")
|
161
|
+
|
162
|
+
begin
|
163
|
+
cloud_io.delete(@orphans)
|
164
|
+
"Deleted Files: #{@orphans.count}"
|
165
|
+
rescue => err
|
166
|
+
Logger.warn Error.wrap(err, "Delete Operation Failed")
|
167
|
+
"Attempted to Delete: #{@orphans.count} " \
|
168
|
+
"(See log messages for actual results)"
|
169
|
+
end
|
170
|
+
else
|
171
|
+
Logger.info @orphans.map { |path|
|
172
|
+
"\s\s[orphaned] '#{path}'"
|
173
|
+
}.join("\n")
|
174
|
+
"Orphaned Files: #{@orphans.count}"
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
@@ -0,0 +1,83 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "backup/cloud_io/cloud_files"
|
4
|
+
|
5
|
+
module Backup
|
6
|
+
module Syncer
|
7
|
+
module Cloud
|
8
|
+
class CloudFiles < Base
|
9
|
+
class Error < Backup::Error; end
|
10
|
+
|
11
|
+
##
|
12
|
+
# Rackspace CloudFiles Credentials
|
13
|
+
attr_accessor :username, :api_key
|
14
|
+
|
15
|
+
##
|
16
|
+
# Rackspace CloudFiles Container
|
17
|
+
attr_accessor :container
|
18
|
+
|
19
|
+
##
|
20
|
+
# Rackspace AuthURL (optional)
|
21
|
+
attr_accessor :auth_url
|
22
|
+
|
23
|
+
##
|
24
|
+
# Rackspace Region (optional)
|
25
|
+
attr_accessor :region
|
26
|
+
|
27
|
+
##
|
28
|
+
# Rackspace Service Net
|
29
|
+
# (LAN-based transfers to avoid charges and improve performance)
|
30
|
+
attr_accessor :servicenet
|
31
|
+
|
32
|
+
##
|
33
|
+
# Additional options to pass along to fog.
|
34
|
+
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
|
35
|
+
attr_accessor :fog_options
|
36
|
+
|
37
|
+
def initialize(syncer_id = nil)
|
38
|
+
super
|
39
|
+
|
40
|
+
@servicenet ||= false
|
41
|
+
|
42
|
+
check_configuration
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
def cloud_io
|
48
|
+
@cloud_io ||= CloudIO::CloudFiles.new(
|
49
|
+
username: username,
|
50
|
+
api_key: api_key,
|
51
|
+
auth_url: auth_url,
|
52
|
+
region: region,
|
53
|
+
servicenet: servicenet,
|
54
|
+
container: container,
|
55
|
+
max_retries: max_retries,
|
56
|
+
retry_waitsec: retry_waitsec,
|
57
|
+
# Syncer can not use SLOs.
|
58
|
+
segments_container: nil,
|
59
|
+
segment_size: 0,
|
60
|
+
fog_options: fog_options
|
61
|
+
)
|
62
|
+
end
|
63
|
+
|
64
|
+
def get_remote_files(remote_base)
|
65
|
+
hash = {}
|
66
|
+
cloud_io.objects(remote_base).each do |object|
|
67
|
+
relative_path = object.name.sub(remote_base + "/", "")
|
68
|
+
hash[relative_path] = object.hash
|
69
|
+
end
|
70
|
+
hash
|
71
|
+
end
|
72
|
+
|
73
|
+
def check_configuration
|
74
|
+
required = %w[username api_key container]
|
75
|
+
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
|
76
|
+
Configuration Error
|
77
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
78
|
+
EOS
|
79
|
+
end
|
80
|
+
end # class Cloudfiles < Base
|
81
|
+
end # module Cloud
|
82
|
+
end
|
83
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "digest/md5"
|
4
|
+
|
5
|
+
module Backup
|
6
|
+
module Syncer
|
7
|
+
module Cloud
|
8
|
+
class LocalFile
|
9
|
+
attr_reader :path
|
10
|
+
attr_accessor :md5
|
11
|
+
|
12
|
+
class << self
|
13
|
+
# Returns a Hash of LocalFile objects for each file within +dir+,
|
14
|
+
# except those matching any of the +excludes+.
|
15
|
+
# Hash keys are the file's path relative to +dir+.
|
16
|
+
def find(dir, excludes = [])
|
17
|
+
dir = File.expand_path(dir)
|
18
|
+
hash = {}
|
19
|
+
find_md5(dir, excludes).each do |file|
|
20
|
+
hash[file.path.sub(dir + "/", "")] = file
|
21
|
+
end
|
22
|
+
hash
|
23
|
+
end
|
24
|
+
|
25
|
+
# Return a new LocalFile object if it's valid.
|
26
|
+
# Otherwise, log a warning and return nil.
|
27
|
+
def new(*args)
|
28
|
+
file = super
|
29
|
+
if file.invalid?
|
30
|
+
Logger.warn("\s\s[skipping] #{file.path}\n" \
|
31
|
+
"\s\sPath Contains Invalid UTF-8 byte sequences")
|
32
|
+
file = nil
|
33
|
+
end
|
34
|
+
file
|
35
|
+
end
|
36
|
+
|
37
|
+
private
|
38
|
+
|
39
|
+
# Returns an Array of file paths and their md5 hashes.
|
40
|
+
def find_md5(dir, excludes)
|
41
|
+
found = []
|
42
|
+
(Dir.entries(dir) - %w[. ..]).map { |e| File.join(dir, e) }.each do |path|
|
43
|
+
if File.directory?(path)
|
44
|
+
unless exclude?(excludes, path)
|
45
|
+
found += find_md5(path, excludes)
|
46
|
+
end
|
47
|
+
elsif File.file?(path)
|
48
|
+
if (file = new(path))
|
49
|
+
unless exclude?(excludes, file.path)
|
50
|
+
file.md5 = Digest::MD5.file(file.path).hexdigest
|
51
|
+
found << file
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
found
|
57
|
+
end
|
58
|
+
|
59
|
+
# Returns true if +path+ matches any of the +excludes+.
|
60
|
+
# Note this can not be called if +path+ includes invalid UTF-8.
|
61
|
+
def exclude?(excludes, path)
|
62
|
+
excludes.any? do |ex|
|
63
|
+
if ex.is_a?(String)
|
64
|
+
File.fnmatch?(ex, path)
|
65
|
+
elsif ex.is_a?(Regexp)
|
66
|
+
ex.match(path)
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
# If +path+ contains invalid UTF-8, it will be sanitized
|
73
|
+
# and the LocalFile object will be flagged as invalid.
|
74
|
+
# This is done so @file.path may be logged.
|
75
|
+
def initialize(path)
|
76
|
+
@path = sanitize(path)
|
77
|
+
end
|
78
|
+
|
79
|
+
def invalid?
|
80
|
+
!!@invalid
|
81
|
+
end
|
82
|
+
|
83
|
+
private
|
84
|
+
|
85
|
+
def sanitize(str)
|
86
|
+
str.each_char.map do |char|
|
87
|
+
begin
|
88
|
+
char.unpack("U")
|
89
|
+
char
|
90
|
+
rescue
|
91
|
+
@invalid = true
|
92
|
+
"\xEF\xBF\xBD" # => "\uFFFD"
|
93
|
+
end
|
94
|
+
end.join
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
@@ -0,0 +1,118 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "backup/cloud_io/s3"
|
4
|
+
|
5
|
+
module Backup
|
6
|
+
module Syncer
|
7
|
+
module Cloud
|
8
|
+
class S3 < Base
|
9
|
+
class Error < Backup::Error; end
|
10
|
+
|
11
|
+
##
|
12
|
+
# Amazon Simple Storage Service (S3) Credentials
|
13
|
+
attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
|
14
|
+
|
15
|
+
##
|
16
|
+
# Amazon S3 bucket name
|
17
|
+
attr_accessor :bucket
|
18
|
+
|
19
|
+
##
|
20
|
+
# Region of the specified S3 bucket
|
21
|
+
attr_accessor :region
|
22
|
+
|
23
|
+
##
|
24
|
+
# Encryption algorithm to use for Amazon Server-Side Encryption
|
25
|
+
#
|
26
|
+
# Supported values:
|
27
|
+
#
|
28
|
+
# - :aes256
|
29
|
+
#
|
30
|
+
# Default: nil
|
31
|
+
attr_accessor :encryption
|
32
|
+
|
33
|
+
##
|
34
|
+
# Storage class to use for the S3 objects uploaded
|
35
|
+
#
|
36
|
+
# Supported values:
|
37
|
+
#
|
38
|
+
# - :standard (default)
|
39
|
+
# - :reduced_redundancy
|
40
|
+
#
|
41
|
+
# Default: :standard
|
42
|
+
attr_accessor :storage_class
|
43
|
+
|
44
|
+
##
|
45
|
+
# Additional options to pass along to fog.
|
46
|
+
# e.g. Fog::Storage.new({ :provider => 'AWS' }.merge(fog_options))
|
47
|
+
attr_accessor :fog_options
|
48
|
+
|
49
|
+
def initialize(syncer_id = nil)
|
50
|
+
super
|
51
|
+
|
52
|
+
@storage_class ||= :standard
|
53
|
+
|
54
|
+
check_configuration
|
55
|
+
end
|
56
|
+
|
57
|
+
private
|
58
|
+
|
59
|
+
def cloud_io
|
60
|
+
@cloud_io ||= CloudIO::S3.new(
|
61
|
+
access_key_id: access_key_id,
|
62
|
+
secret_access_key: secret_access_key,
|
63
|
+
use_iam_profile: use_iam_profile,
|
64
|
+
bucket: bucket,
|
65
|
+
region: region,
|
66
|
+
encryption: encryption,
|
67
|
+
storage_class: storage_class,
|
68
|
+
max_retries: max_retries,
|
69
|
+
retry_waitsec: retry_waitsec,
|
70
|
+
# Syncer can not use multipart upload.
|
71
|
+
chunk_size: 0,
|
72
|
+
fog_options: fog_options
|
73
|
+
)
|
74
|
+
end
|
75
|
+
|
76
|
+
def get_remote_files(remote_base)
|
77
|
+
hash = {}
|
78
|
+
cloud_io.objects(remote_base).each do |object|
|
79
|
+
relative_path = object.key.sub(remote_base + "/", "")
|
80
|
+
hash[relative_path] = object.etag
|
81
|
+
end
|
82
|
+
hash
|
83
|
+
end
|
84
|
+
|
85
|
+
def check_configuration
|
86
|
+
required =
|
87
|
+
if use_iam_profile
|
88
|
+
%w[bucket]
|
89
|
+
else
|
90
|
+
%w[access_key_id secret_access_key bucket]
|
91
|
+
end
|
92
|
+
|
93
|
+
if required.map { |name| send(name) }.any?(&:nil?)
|
94
|
+
raise Error, <<-EOS
|
95
|
+
Configuration Error
|
96
|
+
#{required.map { |name| "##{name}" }.join(", ")} are all required
|
97
|
+
EOS
|
98
|
+
end
|
99
|
+
|
100
|
+
if encryption && encryption.to_s.upcase != "AES256"
|
101
|
+
raise Error, <<-EOS
|
102
|
+
Configuration Error
|
103
|
+
#encryption must be :aes256 or nil
|
104
|
+
EOS
|
105
|
+
end
|
106
|
+
classes = %w[STANDARD REDUCED_REDUNDANCY]
|
107
|
+
|
108
|
+
unless classes.include?(storage_class.to_s.upcase)
|
109
|
+
raise Error, <<-EOS
|
110
|
+
Configuration Error
|
111
|
+
#storage_class must be :standard or :reduced_redundancy
|
112
|
+
EOS
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end # Class S3 < Base
|
116
|
+
end # module Cloud
|
117
|
+
end
|
118
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module RSync
|
6
|
+
class Base < Syncer::Base
|
7
|
+
##
|
8
|
+
# Additional String or Array of options for the rsync cli
|
9
|
+
attr_accessor :additional_rsync_options
|
10
|
+
attr_accessor :archive
|
11
|
+
|
12
|
+
def initialize(syncer_id = nil, &block)
|
13
|
+
super
|
14
|
+
instance_eval(&block) if block_given?
|
15
|
+
|
16
|
+
@path ||= "~/backups"
|
17
|
+
@archive = @archive.nil? ? true : @archive
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
##
|
23
|
+
# Common base command for Local/Push/Pull
|
24
|
+
def rsync_command
|
25
|
+
utility(:rsync).dup <<
|
26
|
+
archive_option <<
|
27
|
+
mirror_option <<
|
28
|
+
exclude_option <<
|
29
|
+
" #{Array(additional_rsync_options).join(" ")}".rstrip
|
30
|
+
end
|
31
|
+
|
32
|
+
def mirror_option
|
33
|
+
mirror ? " --delete" : ""
|
34
|
+
end
|
35
|
+
|
36
|
+
def archive_option
|
37
|
+
archive ? " --archive" : ""
|
38
|
+
end
|
39
|
+
|
40
|
+
def exclude_option
|
41
|
+
excludes.map { |pattern| " --exclude='#{pattern}'" }.join
|
42
|
+
end
|
43
|
+
|
44
|
+
##
|
45
|
+
# Each path is expanded, since these refer to local paths and are
|
46
|
+
# being shell-quoted. This will also remove any trailing `/` from
|
47
|
+
# each path, as we don't want rsync's "trailing / on source directories"
|
48
|
+
# behavior. This method is used by RSync::Local and RSync::Push.
|
49
|
+
def paths_to_push
|
50
|
+
directories.map { |dir| "'#{File.expand_path(dir)}'" }.join(" ")
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module RSync
|
6
|
+
class Local < Base
|
7
|
+
def perform!
|
8
|
+
log!(:started)
|
9
|
+
|
10
|
+
create_dest_path!
|
11
|
+
run("#{rsync_command} #{paths_to_push} '#{dest_path}'")
|
12
|
+
|
13
|
+
log!(:finished)
|
14
|
+
end
|
15
|
+
|
16
|
+
private
|
17
|
+
|
18
|
+
# Expand path, since this is local and shell-quoted.
|
19
|
+
def dest_path
|
20
|
+
@dest_path ||= File.expand_path(path)
|
21
|
+
end
|
22
|
+
|
23
|
+
def create_dest_path!
|
24
|
+
FileUtils.mkdir_p dest_path
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module RSync
|
6
|
+
class Pull < Push
|
7
|
+
def perform!
|
8
|
+
log!(:started)
|
9
|
+
write_password_file!
|
10
|
+
|
11
|
+
create_dest_path!
|
12
|
+
run("#{rsync_command} #{host_options}#{paths_to_pull} " \
|
13
|
+
"'#{dest_path}'")
|
14
|
+
|
15
|
+
log!(:finished)
|
16
|
+
ensure
|
17
|
+
remove_password_file!
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
##
|
23
|
+
# Returns the syntax for pulling multiple paths from the remote host.
|
24
|
+
# e.g.
|
25
|
+
# rsync -a -e "ssh -p 22" host:'path1' :'path2' '/dest'
|
26
|
+
# rsync -a rsync_user@host::'modname/path1' ::'modname/path2' '/dest'
|
27
|
+
#
|
28
|
+
# Remove any preceeding '~/', since these paths are on the remote.
|
29
|
+
# Also remove any trailing `/`, since we don't want rsync's
|
30
|
+
# "trailing / on source directories" behavior.
|
31
|
+
def paths_to_pull
|
32
|
+
sep = mode == :ssh ? ":" : "::"
|
33
|
+
directories.map do |dir|
|
34
|
+
"#{sep}'#{dir.sub(%r{^~/}, "").sub(%r{/$}, "")}'"
|
35
|
+
end.join(" ").sub(%r{^#{sep}}, "")
|
36
|
+
end
|
37
|
+
|
38
|
+
# Expand path, since this is local and shell-quoted.
|
39
|
+
def dest_path
|
40
|
+
@dest_path ||= File.expand_path(path)
|
41
|
+
end
|
42
|
+
|
43
|
+
def create_dest_path!
|
44
|
+
FileUtils.mkdir_p dest_path
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|