cm-backup 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +20 -0
- data/bin/backup +5 -0
- data/lib/backup.rb +144 -0
- data/lib/backup/archive.rb +170 -0
- data/lib/backup/binder.rb +22 -0
- data/lib/backup/cleaner.rb +116 -0
- data/lib/backup/cli.rb +374 -0
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +298 -0
- data/lib/backup/cloud_io/s3.rb +260 -0
- data/lib/backup/compressor/base.rb +35 -0
- data/lib/backup/compressor/bzip2.rb +39 -0
- data/lib/backup/compressor/custom.rb +53 -0
- data/lib/backup/compressor/gzip.rb +74 -0
- data/lib/backup/config.rb +119 -0
- data/lib/backup/config/dsl.rb +103 -0
- data/lib/backup/config/helpers.rb +143 -0
- data/lib/backup/database/base.rb +85 -0
- data/lib/backup/database/mongodb.rb +187 -0
- data/lib/backup/database/mysql.rb +192 -0
- data/lib/backup/database/openldap.rb +95 -0
- data/lib/backup/database/postgresql.rb +133 -0
- data/lib/backup/database/redis.rb +179 -0
- data/lib/backup/database/riak.rb +82 -0
- data/lib/backup/database/sqlite.rb +57 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +747 -0
- data/lib/backup/encryptor/open_ssl.rb +77 -0
- data/lib/backup/errors.rb +58 -0
- data/lib/backup/logger.rb +199 -0
- data/lib/backup/logger/console.rb +51 -0
- data/lib/backup/logger/fog_adapter.rb +29 -0
- data/lib/backup/logger/logfile.rb +133 -0
- data/lib/backup/logger/syslog.rb +116 -0
- data/lib/backup/model.rb +479 -0
- data/lib/backup/notifier/base.rb +128 -0
- data/lib/backup/notifier/campfire.rb +63 -0
- data/lib/backup/notifier/command.rb +102 -0
- data/lib/backup/notifier/datadog.rb +107 -0
- data/lib/backup/notifier/flowdock.rb +103 -0
- data/lib/backup/notifier/hipchat.rb +118 -0
- data/lib/backup/notifier/http_post.rb +117 -0
- data/lib/backup/notifier/mail.rb +249 -0
- data/lib/backup/notifier/nagios.rb +69 -0
- data/lib/backup/notifier/pagerduty.rb +81 -0
- data/lib/backup/notifier/prowl.rb +68 -0
- data/lib/backup/notifier/pushover.rb +74 -0
- data/lib/backup/notifier/ses.rb +105 -0
- data/lib/backup/notifier/slack.rb +148 -0
- data/lib/backup/notifier/twitter.rb +58 -0
- data/lib/backup/notifier/zabbix.rb +63 -0
- data/lib/backup/package.rb +55 -0
- data/lib/backup/packager.rb +107 -0
- data/lib/backup/pipeline.rb +124 -0
- data/lib/backup/splitter.rb +76 -0
- data/lib/backup/storage/base.rb +69 -0
- data/lib/backup/storage/cloud_files.rb +158 -0
- data/lib/backup/storage/cycler.rb +75 -0
- data/lib/backup/storage/dropbox.rb +212 -0
- data/lib/backup/storage/ftp.rb +112 -0
- data/lib/backup/storage/local.rb +64 -0
- data/lib/backup/storage/qiniu.rb +65 -0
- data/lib/backup/storage/rsync.rb +248 -0
- data/lib/backup/storage/s3.rb +156 -0
- data/lib/backup/storage/scp.rb +67 -0
- data/lib/backup/storage/sftp.rb +82 -0
- data/lib/backup/syncer/base.rb +70 -0
- data/lib/backup/syncer/cloud/base.rb +179 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
- data/lib/backup/syncer/cloud/local_file.rb +100 -0
- data/lib/backup/syncer/cloud/s3.rb +110 -0
- data/lib/backup/syncer/rsync/base.rb +54 -0
- data/lib/backup/syncer/rsync/local.rb +31 -0
- data/lib/backup/syncer/rsync/pull.rb +51 -0
- data/lib/backup/syncer/rsync/push.rb +205 -0
- data/lib/backup/template.rb +46 -0
- data/lib/backup/utilities.rb +224 -0
- data/lib/backup/version.rb +5 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/openldap +24 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/databases/sqlite +11 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifier/zabbix +15 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/command +32 -0
- data/templates/cli/notifiers/datadog +57 -0
- data/templates/cli/notifiers/flowdock +16 -0
- data/templates/cli/notifiers/hipchat +16 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +24 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/pagerduty +12 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/ses +15 -0
- data/templates/cli/notifiers/slack +22 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +20 -0
- data/templates/cli/storages/ftp +13 -0
- data/templates/cli/storages/local +8 -0
- data/templates/cli/storages/qiniu +12 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +16 -0
- data/templates/cli/storages/scp +15 -0
- data/templates/cli/storages/sftp +15 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +1077 -0
@@ -0,0 +1,156 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'backup/cloud_io/s3'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class S3 < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
class Error < Backup::Error; end
|
9
|
+
|
10
|
+
##
|
11
|
+
# Amazon Simple Storage Service (S3) Credentials
|
12
|
+
attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
|
13
|
+
|
14
|
+
##
|
15
|
+
# Amazon S3 bucket name
|
16
|
+
attr_accessor :bucket
|
17
|
+
|
18
|
+
##
|
19
|
+
# Region of the specified S3 bucket
|
20
|
+
attr_accessor :region
|
21
|
+
|
22
|
+
##
|
23
|
+
# Multipart chunk size, specified in MiB.
|
24
|
+
#
|
25
|
+
# Each package file larger than +chunk_size+
|
26
|
+
# will be uploaded using S3 Multipart Upload.
|
27
|
+
#
|
28
|
+
# Minimum: 5 (but may be disabled with 0)
|
29
|
+
# Maximum: 5120
|
30
|
+
# Default: 5
|
31
|
+
attr_accessor :chunk_size
|
32
|
+
|
33
|
+
##
|
34
|
+
# Number of times to retry failed operations.
|
35
|
+
#
|
36
|
+
# Default: 10
|
37
|
+
attr_accessor :max_retries
|
38
|
+
|
39
|
+
##
|
40
|
+
# Time in seconds to pause before each retry.
|
41
|
+
#
|
42
|
+
# Default: 30
|
43
|
+
attr_accessor :retry_waitsec
|
44
|
+
|
45
|
+
##
|
46
|
+
# Encryption algorithm to use for Amazon Server-Side Encryption
|
47
|
+
#
|
48
|
+
# Supported values:
|
49
|
+
#
|
50
|
+
# - :aes256
|
51
|
+
#
|
52
|
+
# Default: nil
|
53
|
+
attr_accessor :encryption
|
54
|
+
|
55
|
+
##
|
56
|
+
# Storage class to use for the S3 objects uploaded
|
57
|
+
#
|
58
|
+
# Supported values:
|
59
|
+
#
|
60
|
+
# - :standard (default)
|
61
|
+
# - :standard_ia
|
62
|
+
# - :reduced_redundancy
|
63
|
+
#
|
64
|
+
# Default: :standard
|
65
|
+
attr_accessor :storage_class
|
66
|
+
|
67
|
+
##
|
68
|
+
# Additional options to pass along to fog.
|
69
|
+
# e.g. Fog::Storage.new({ :provider => 'AWS' }.merge(fog_options))
|
70
|
+
attr_accessor :fog_options
|
71
|
+
|
72
|
+
def initialize(model, storage_id = nil)
|
73
|
+
super
|
74
|
+
|
75
|
+
@chunk_size ||= 5 # MiB
|
76
|
+
@max_retries ||= 10
|
77
|
+
@retry_waitsec ||= 30
|
78
|
+
@path ||= 'backups'
|
79
|
+
@storage_class ||= :standard
|
80
|
+
|
81
|
+
@path = @path.sub(/^\//, '')
|
82
|
+
|
83
|
+
check_configuration
|
84
|
+
end
|
85
|
+
|
86
|
+
private
|
87
|
+
|
88
|
+
def cloud_io
|
89
|
+
@cloud_io ||= CloudIO::S3.new(
|
90
|
+
:access_key_id => access_key_id,
|
91
|
+
:secret_access_key => secret_access_key,
|
92
|
+
:use_iam_profile => use_iam_profile,
|
93
|
+
:region => region,
|
94
|
+
:bucket => bucket,
|
95
|
+
:encryption => encryption,
|
96
|
+
:storage_class => storage_class,
|
97
|
+
:max_retries => max_retries,
|
98
|
+
:retry_waitsec => retry_waitsec,
|
99
|
+
:chunk_size => chunk_size,
|
100
|
+
:fog_options => fog_options
|
101
|
+
)
|
102
|
+
end
|
103
|
+
|
104
|
+
def transfer!
|
105
|
+
package.filenames.each do |filename|
|
106
|
+
src = File.join(Config.tmp_path, filename)
|
107
|
+
dest = File.join(remote_path, filename)
|
108
|
+
Logger.info "Storing '#{ bucket }/#{ dest }'..."
|
109
|
+
cloud_io.upload(src, dest)
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
# Called by the Cycler.
|
114
|
+
# Any error raised will be logged as a warning.
|
115
|
+
def remove!(package)
|
116
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
117
|
+
|
118
|
+
remote_path = remote_path_for(package)
|
119
|
+
objects = cloud_io.objects(remote_path)
|
120
|
+
|
121
|
+
raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
|
122
|
+
|
123
|
+
cloud_io.delete(objects)
|
124
|
+
end
|
125
|
+
|
126
|
+
def check_configuration
|
127
|
+
if use_iam_profile
|
128
|
+
required = %w{ bucket }
|
129
|
+
else
|
130
|
+
required = %w{ access_key_id secret_access_key bucket }
|
131
|
+
end
|
132
|
+
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
133
|
+
Configuration Error
|
134
|
+
#{ required.map {|name| "##{ name }"}.join(', ') } are all required
|
135
|
+
EOS
|
136
|
+
|
137
|
+
raise Error, <<-EOS if chunk_size > 0 && !chunk_size.between?(5, 5120)
|
138
|
+
Configuration Error
|
139
|
+
#chunk_size must be between 5 and 5120 (or 0 to disable multipart)
|
140
|
+
EOS
|
141
|
+
|
142
|
+
raise Error, <<-EOS if encryption && encryption.to_s.upcase != 'AES256'
|
143
|
+
Configuration Error
|
144
|
+
#encryption must be :aes256 or nil
|
145
|
+
EOS
|
146
|
+
|
147
|
+
classes = ['STANDARD', 'STANDARD_IA', 'REDUCED_REDUNDANCY']
|
148
|
+
raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
|
149
|
+
Configuration Error
|
150
|
+
#storage_class must be :standard or :standard_ia or :reduced_redundancy
|
151
|
+
EOS
|
152
|
+
end
|
153
|
+
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'net/scp'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class SCP < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
class Error < Backup::Error; end
|
9
|
+
|
10
|
+
##
|
11
|
+
# Server credentials
|
12
|
+
attr_accessor :username, :password, :ssh_options
|
13
|
+
|
14
|
+
##
|
15
|
+
# Server IP Address and SCP port
|
16
|
+
attr_accessor :ip, :port
|
17
|
+
|
18
|
+
def initialize(model, storage_id = nil)
|
19
|
+
super
|
20
|
+
|
21
|
+
@port ||= 22
|
22
|
+
@path ||= 'backups'
|
23
|
+
@ssh_options ||= {}
|
24
|
+
path.sub!(/^~\//, '')
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
def connection
|
30
|
+
Net::SSH.start(
|
31
|
+
ip, username, { :password => password, :port => port }.merge(ssh_options)
|
32
|
+
) {|ssh| yield ssh }
|
33
|
+
end
|
34
|
+
|
35
|
+
def transfer!
|
36
|
+
connection do |ssh|
|
37
|
+
ssh.exec!("mkdir -p '#{ remote_path }'")
|
38
|
+
|
39
|
+
package.filenames.each do |filename|
|
40
|
+
src = File.join(Config.tmp_path, filename)
|
41
|
+
dest = File.join(remote_path, filename)
|
42
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
43
|
+
ssh.scp.upload!(src, dest)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
# Called by the Cycler.
|
49
|
+
# Any error raised will be logged as a warning.
|
50
|
+
def remove!(package)
|
51
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
52
|
+
|
53
|
+
errors = []
|
54
|
+
connection do |ssh|
|
55
|
+
ssh.exec!("rm -r '#{ remote_path_for(package) }'") do |ch, stream, data|
|
56
|
+
errors << data if stream == :stderr
|
57
|
+
end
|
58
|
+
end
|
59
|
+
unless errors.empty?
|
60
|
+
raise Error, "Net::SSH reported the following errors:\n" +
|
61
|
+
errors.join("\n")
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'net/sftp'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class SFTP < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
|
9
|
+
##
|
10
|
+
# Server credentials
|
11
|
+
attr_accessor :username, :password, :ssh_options
|
12
|
+
|
13
|
+
##
|
14
|
+
# Server IP Address and SFTP port
|
15
|
+
attr_accessor :ip, :port
|
16
|
+
|
17
|
+
def initialize(model, storage_id = nil)
|
18
|
+
super
|
19
|
+
|
20
|
+
@ssh_options ||= {}
|
21
|
+
@port ||= 22
|
22
|
+
@path ||= 'backups'
|
23
|
+
path.sub!(/^~\//, '')
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
def connection
|
29
|
+
Net::SFTP.start(
|
30
|
+
ip, username, { :password => password, :port => port }.merge(ssh_options)
|
31
|
+
) {|sftp| yield sftp }
|
32
|
+
end
|
33
|
+
|
34
|
+
def transfer!
|
35
|
+
connection do |sftp|
|
36
|
+
create_remote_path(sftp)
|
37
|
+
|
38
|
+
package.filenames.each do |filename|
|
39
|
+
src = File.join(Config.tmp_path, filename)
|
40
|
+
dest = File.join(remote_path, filename)
|
41
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
42
|
+
sftp.upload!(src, dest)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
# Called by the Cycler.
|
48
|
+
# Any error raised will be logged as a warning.
|
49
|
+
def remove!(package)
|
50
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
51
|
+
|
52
|
+
remote_path = remote_path_for(package)
|
53
|
+
connection do |sftp|
|
54
|
+
package.filenames.each do |filename|
|
55
|
+
sftp.remove!(File.join(remote_path, filename))
|
56
|
+
end
|
57
|
+
|
58
|
+
sftp.rmdir!(remote_path)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
##
|
63
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
64
|
+
# server in order to upload the backup file. Net::SFTP does not support
|
65
|
+
# paths to directories that don't yet exist when creating new
|
66
|
+
# directories. Instead, we split the parts up in to an array (for each
|
67
|
+
# '/') and loop through that to create the directories one by one.
|
68
|
+
# Net::SFTP raises an exception when the directory it's trying to create
|
69
|
+
# already exists, so we have rescue it
|
70
|
+
def create_remote_path(sftp)
|
71
|
+
path_parts = Array.new
|
72
|
+
remote_path.split('/').each do |path_part|
|
73
|
+
path_parts << path_part
|
74
|
+
begin
|
75
|
+
sftp.mkdir!(path_parts.join('/'))
|
76
|
+
rescue Net::SFTP::StatusException; end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
class Base
|
6
|
+
include Utilities::Helpers
|
7
|
+
include Config::Helpers
|
8
|
+
|
9
|
+
##
|
10
|
+
# Path to store the synced files/directories to
|
11
|
+
attr_accessor :path
|
12
|
+
|
13
|
+
##
|
14
|
+
# Flag for mirroring the files/directories
|
15
|
+
attr_accessor :mirror
|
16
|
+
|
17
|
+
##
|
18
|
+
# Optional user-defined identifier to differentiate multiple syncers
|
19
|
+
# defined within a single backup model. Currently this is only used
|
20
|
+
# in the log messages.
|
21
|
+
attr_reader :syncer_id
|
22
|
+
|
23
|
+
attr_reader :excludes
|
24
|
+
|
25
|
+
def initialize(syncer_id = nil)
|
26
|
+
@syncer_id = syncer_id
|
27
|
+
|
28
|
+
load_defaults!
|
29
|
+
|
30
|
+
@mirror ||= false
|
31
|
+
@directories ||= []
|
32
|
+
@excludes ||= []
|
33
|
+
end
|
34
|
+
|
35
|
+
##
|
36
|
+
# Syntactical suger for the DSL for adding directories
|
37
|
+
def directories(&block)
|
38
|
+
return @directories unless block_given?
|
39
|
+
instance_eval(&block)
|
40
|
+
end
|
41
|
+
|
42
|
+
def add(path)
|
43
|
+
directories << path
|
44
|
+
end
|
45
|
+
|
46
|
+
# For Cloud Syncers, +pattern+ can be a string (with shell-style
|
47
|
+
# wildcards) or a regex.
|
48
|
+
# For RSync, each +pattern+ will be passed to rsync's --exclude option.
|
49
|
+
def exclude(pattern)
|
50
|
+
excludes << pattern
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
|
55
|
+
def syncer_name
|
56
|
+
@syncer_name ||= self.class.to_s.sub('Backup::', '') +
|
57
|
+
(syncer_id ? " (#{ syncer_id })" : '')
|
58
|
+
end
|
59
|
+
|
60
|
+
def log!(action)
|
61
|
+
msg = case action
|
62
|
+
when :started then 'Started...'
|
63
|
+
when :finished then 'Finished!'
|
64
|
+
end
|
65
|
+
Logger.info "#{ syncer_name } #{ msg }"
|
66
|
+
end
|
67
|
+
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,179 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Syncer
|
5
|
+
module Cloud
|
6
|
+
class Error < Backup::Error; end
|
7
|
+
|
8
|
+
class Base < Syncer::Base
|
9
|
+
MUTEX = Mutex.new
|
10
|
+
|
11
|
+
##
|
12
|
+
# Number of threads to use for concurrency.
|
13
|
+
#
|
14
|
+
# Default: 0 (no concurrency)
|
15
|
+
attr_accessor :thread_count
|
16
|
+
|
17
|
+
##
|
18
|
+
# Number of times to retry failed operations.
|
19
|
+
#
|
20
|
+
# Default: 10
|
21
|
+
attr_accessor :max_retries
|
22
|
+
|
23
|
+
##
|
24
|
+
# Time in seconds to pause before each retry.
|
25
|
+
#
|
26
|
+
# Default: 30
|
27
|
+
attr_accessor :retry_waitsec
|
28
|
+
|
29
|
+
def initialize(syncer_id = nil, &block)
|
30
|
+
super
|
31
|
+
instance_eval(&block) if block_given?
|
32
|
+
|
33
|
+
@thread_count ||= 0
|
34
|
+
@max_retries ||= 10
|
35
|
+
@retry_waitsec ||= 30
|
36
|
+
|
37
|
+
@path ||= 'backups'
|
38
|
+
@path = path.sub(/^\//, '')
|
39
|
+
end
|
40
|
+
|
41
|
+
def perform!
|
42
|
+
log!(:started)
|
43
|
+
@transfer_count = 0
|
44
|
+
@unchanged_count = 0
|
45
|
+
@skipped_count = 0
|
46
|
+
@orphans = thread_count > 0 ? Queue.new : []
|
47
|
+
|
48
|
+
directories.each {|dir| sync_directory(dir) }
|
49
|
+
orphans_result = process_orphans
|
50
|
+
|
51
|
+
Logger.info "\nSummary:"
|
52
|
+
Logger.info "\s\sTransferred Files: #{ @transfer_count }"
|
53
|
+
Logger.info "\s\s#{ orphans_result }"
|
54
|
+
Logger.info "\s\sUnchanged Files: #{ @unchanged_count }"
|
55
|
+
Logger.warn "\s\sSkipped Files: #{ @skipped_count }" if @skipped_count > 0
|
56
|
+
log!(:finished)
|
57
|
+
end
|
58
|
+
|
59
|
+
private
|
60
|
+
|
61
|
+
def sync_directory(dir)
|
62
|
+
remote_base = path.empty? ? File.basename(dir) :
|
63
|
+
File.join(path, File.basename(dir))
|
64
|
+
Logger.info "Gathering remote data for '#{ remote_base }'..."
|
65
|
+
remote_files = get_remote_files(remote_base)
|
66
|
+
|
67
|
+
Logger.info("Gathering local data for '#{ File.expand_path(dir) }'...")
|
68
|
+
local_files = LocalFile.find(dir, excludes)
|
69
|
+
|
70
|
+
relative_paths = (local_files.keys | remote_files.keys).sort
|
71
|
+
if relative_paths.empty?
|
72
|
+
Logger.info 'No local or remote files found'
|
73
|
+
else
|
74
|
+
Logger.info 'Syncing...'
|
75
|
+
sync_block = Proc.new do |relative_path|
|
76
|
+
local_file = local_files[relative_path]
|
77
|
+
remote_md5 = remote_files[relative_path]
|
78
|
+
remote_path = File.join(remote_base, relative_path)
|
79
|
+
sync_file(local_file, remote_path, remote_md5)
|
80
|
+
end
|
81
|
+
|
82
|
+
if thread_count > 0
|
83
|
+
sync_in_threads(relative_paths, sync_block)
|
84
|
+
else
|
85
|
+
relative_paths.each(&sync_block)
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def sync_in_threads(relative_paths, sync_block)
|
91
|
+
queue = Queue.new
|
92
|
+
queue << relative_paths.shift until relative_paths.empty?
|
93
|
+
num_threads = [thread_count, queue.size].min
|
94
|
+
Logger.info "\s\sUsing #{ num_threads } Threads"
|
95
|
+
threads = num_threads.times.map do
|
96
|
+
Thread.new do
|
97
|
+
loop do
|
98
|
+
path = queue.shift(true) rescue nil
|
99
|
+
path ? sync_block.call(path) : break
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
# abort if any thread raises an exception
|
105
|
+
while threads.any?(&:alive?)
|
106
|
+
if threads.any? {|thr| thr.status.nil? }
|
107
|
+
threads.each(&:kill)
|
108
|
+
Thread.pass while threads.any?(&:alive?)
|
109
|
+
break
|
110
|
+
end
|
111
|
+
sleep num_threads * 0.1
|
112
|
+
end
|
113
|
+
threads.each(&:join)
|
114
|
+
end
|
115
|
+
|
116
|
+
# If an exception is raised in multiple threads, only the exception
|
117
|
+
# raised in the first thread that Thread#join is called on will be
|
118
|
+
# handled. So all exceptions are logged first with their details,
|
119
|
+
# then a generic exception is raised.
|
120
|
+
def sync_file(local_file, remote_path, remote_md5)
|
121
|
+
if local_file && File.exist?(local_file.path)
|
122
|
+
if local_file.md5 == remote_md5
|
123
|
+
MUTEX.synchronize { @unchanged_count += 1 }
|
124
|
+
else
|
125
|
+
Logger.info("\s\s[transferring] '#{ remote_path }'")
|
126
|
+
begin
|
127
|
+
cloud_io.upload(local_file.path, remote_path)
|
128
|
+
MUTEX.synchronize { @transfer_count += 1 }
|
129
|
+
rescue CloudIO::FileSizeError => err
|
130
|
+
MUTEX.synchronize { @skipped_count += 1 }
|
131
|
+
Logger.warn Error.wrap(err, "Skipping '#{ remote_path }'")
|
132
|
+
rescue => err
|
133
|
+
Logger.error(err)
|
134
|
+
raise Error, <<-EOS
|
135
|
+
Syncer Failed!
|
136
|
+
See the Retry [info] and [error] messages (if any)
|
137
|
+
for details on each failed operation.
|
138
|
+
EOS
|
139
|
+
end
|
140
|
+
end
|
141
|
+
elsif remote_md5
|
142
|
+
@orphans << remote_path
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def process_orphans
|
147
|
+
if @orphans.empty?
|
148
|
+
return mirror ? 'Deleted Files: 0' : 'Orphaned Files: 0'
|
149
|
+
end
|
150
|
+
|
151
|
+
if @orphans.is_a?(Queue)
|
152
|
+
@orphans = @orphans.size.times.map { @orphans.shift }
|
153
|
+
end
|
154
|
+
|
155
|
+
if mirror
|
156
|
+
Logger.info @orphans.map {|path|
|
157
|
+
"\s\s[removing] '#{ path }'"
|
158
|
+
}.join("\n")
|
159
|
+
|
160
|
+
begin
|
161
|
+
cloud_io.delete(@orphans)
|
162
|
+
"Deleted Files: #{ @orphans.count }"
|
163
|
+
rescue => err
|
164
|
+
Logger.warn Error.wrap(err, 'Delete Operation Failed')
|
165
|
+
"Attempted to Delete: #{ @orphans.count } " +
|
166
|
+
"(See log messages for actual results)"
|
167
|
+
end
|
168
|
+
else
|
169
|
+
Logger.info @orphans.map {|path|
|
170
|
+
"\s\s[orphaned] '#{ path }'"
|
171
|
+
}.join("\n")
|
172
|
+
"Orphaned Files: #{ @orphans.count }"
|
173
|
+
end
|
174
|
+
end
|
175
|
+
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|