backup-agoddard 3.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. data/.gitignore +8 -0
  2. data/.travis.yml +10 -0
  3. data/Gemfile +28 -0
  4. data/Guardfile +23 -0
  5. data/LICENSE.md +24 -0
  6. data/README.md +478 -0
  7. data/backup.gemspec +32 -0
  8. data/bin/backup +11 -0
  9. data/lib/backup.rb +133 -0
  10. data/lib/backup/archive.rb +117 -0
  11. data/lib/backup/binder.rb +22 -0
  12. data/lib/backup/cleaner.rb +121 -0
  13. data/lib/backup/cli/helpers.rb +93 -0
  14. data/lib/backup/cli/utility.rb +255 -0
  15. data/lib/backup/compressor/base.rb +35 -0
  16. data/lib/backup/compressor/bzip2.rb +50 -0
  17. data/lib/backup/compressor/custom.rb +53 -0
  18. data/lib/backup/compressor/gzip.rb +50 -0
  19. data/lib/backup/compressor/lzma.rb +52 -0
  20. data/lib/backup/compressor/pbzip2.rb +59 -0
  21. data/lib/backup/config.rb +174 -0
  22. data/lib/backup/configuration.rb +33 -0
  23. data/lib/backup/configuration/helpers.rb +130 -0
  24. data/lib/backup/configuration/store.rb +24 -0
  25. data/lib/backup/database/base.rb +53 -0
  26. data/lib/backup/database/mongodb.rb +230 -0
  27. data/lib/backup/database/mysql.rb +160 -0
  28. data/lib/backup/database/postgresql.rb +144 -0
  29. data/lib/backup/database/redis.rb +136 -0
  30. data/lib/backup/database/riak.rb +67 -0
  31. data/lib/backup/dependency.rb +108 -0
  32. data/lib/backup/encryptor/base.rb +29 -0
  33. data/lib/backup/encryptor/gpg.rb +760 -0
  34. data/lib/backup/encryptor/open_ssl.rb +72 -0
  35. data/lib/backup/errors.rb +124 -0
  36. data/lib/backup/hooks.rb +68 -0
  37. data/lib/backup/logger.rb +152 -0
  38. data/lib/backup/model.rb +409 -0
  39. data/lib/backup/notifier/base.rb +81 -0
  40. data/lib/backup/notifier/campfire.rb +155 -0
  41. data/lib/backup/notifier/hipchat.rb +93 -0
  42. data/lib/backup/notifier/mail.rb +206 -0
  43. data/lib/backup/notifier/prowl.rb +65 -0
  44. data/lib/backup/notifier/pushover.rb +88 -0
  45. data/lib/backup/notifier/twitter.rb +70 -0
  46. data/lib/backup/package.rb +47 -0
  47. data/lib/backup/packager.rb +100 -0
  48. data/lib/backup/pipeline.rb +110 -0
  49. data/lib/backup/splitter.rb +75 -0
  50. data/lib/backup/storage/base.rb +99 -0
  51. data/lib/backup/storage/cloudfiles.rb +87 -0
  52. data/lib/backup/storage/cycler.rb +117 -0
  53. data/lib/backup/storage/dropbox.rb +178 -0
  54. data/lib/backup/storage/ftp.rb +119 -0
  55. data/lib/backup/storage/local.rb +82 -0
  56. data/lib/backup/storage/ninefold.rb +116 -0
  57. data/lib/backup/storage/rsync.rb +149 -0
  58. data/lib/backup/storage/s3.rb +94 -0
  59. data/lib/backup/storage/scp.rb +99 -0
  60. data/lib/backup/storage/sftp.rb +108 -0
  61. data/lib/backup/syncer/base.rb +46 -0
  62. data/lib/backup/syncer/cloud/base.rb +247 -0
  63. data/lib/backup/syncer/cloud/cloud_files.rb +78 -0
  64. data/lib/backup/syncer/cloud/s3.rb +68 -0
  65. data/lib/backup/syncer/rsync/base.rb +49 -0
  66. data/lib/backup/syncer/rsync/local.rb +55 -0
  67. data/lib/backup/syncer/rsync/pull.rb +36 -0
  68. data/lib/backup/syncer/rsync/push.rb +116 -0
  69. data/lib/backup/template.rb +46 -0
  70. data/lib/backup/version.rb +43 -0
  71. data/spec-live/.gitignore +6 -0
  72. data/spec-live/README +7 -0
  73. data/spec-live/backups/config.rb +83 -0
  74. data/spec-live/backups/config.yml.template +46 -0
  75. data/spec-live/backups/models.rb +184 -0
  76. data/spec-live/compressor/custom_spec.rb +30 -0
  77. data/spec-live/compressor/gzip_spec.rb +30 -0
  78. data/spec-live/encryptor/gpg_keys.rb +239 -0
  79. data/spec-live/encryptor/gpg_spec.rb +287 -0
  80. data/spec-live/notifier/mail_spec.rb +121 -0
  81. data/spec-live/spec_helper.rb +151 -0
  82. data/spec-live/storage/dropbox_spec.rb +151 -0
  83. data/spec-live/storage/local_spec.rb +83 -0
  84. data/spec-live/storage/scp_spec.rb +193 -0
  85. data/spec-live/syncer/cloud/s3_spec.rb +124 -0
  86. data/spec/archive_spec.rb +335 -0
  87. data/spec/cleaner_spec.rb +312 -0
  88. data/spec/cli/helpers_spec.rb +301 -0
  89. data/spec/cli/utility_spec.rb +411 -0
  90. data/spec/compressor/base_spec.rb +52 -0
  91. data/spec/compressor/bzip2_spec.rb +217 -0
  92. data/spec/compressor/custom_spec.rb +106 -0
  93. data/spec/compressor/gzip_spec.rb +217 -0
  94. data/spec/compressor/lzma_spec.rb +123 -0
  95. data/spec/compressor/pbzip2_spec.rb +165 -0
  96. data/spec/config_spec.rb +321 -0
  97. data/spec/configuration/helpers_spec.rb +247 -0
  98. data/spec/configuration/store_spec.rb +39 -0
  99. data/spec/configuration_spec.rb +62 -0
  100. data/spec/database/base_spec.rb +63 -0
  101. data/spec/database/mongodb_spec.rb +510 -0
  102. data/spec/database/mysql_spec.rb +411 -0
  103. data/spec/database/postgresql_spec.rb +353 -0
  104. data/spec/database/redis_spec.rb +334 -0
  105. data/spec/database/riak_spec.rb +176 -0
  106. data/spec/dependency_spec.rb +51 -0
  107. data/spec/encryptor/base_spec.rb +40 -0
  108. data/spec/encryptor/gpg_spec.rb +909 -0
  109. data/spec/encryptor/open_ssl_spec.rb +148 -0
  110. data/spec/errors_spec.rb +306 -0
  111. data/spec/hooks_spec.rb +35 -0
  112. data/spec/logger_spec.rb +367 -0
  113. data/spec/model_spec.rb +694 -0
  114. data/spec/notifier/base_spec.rb +104 -0
  115. data/spec/notifier/campfire_spec.rb +217 -0
  116. data/spec/notifier/hipchat_spec.rb +211 -0
  117. data/spec/notifier/mail_spec.rb +316 -0
  118. data/spec/notifier/prowl_spec.rb +138 -0
  119. data/spec/notifier/pushover_spec.rb +123 -0
  120. data/spec/notifier/twitter_spec.rb +153 -0
  121. data/spec/package_spec.rb +61 -0
  122. data/spec/packager_spec.rb +213 -0
  123. data/spec/pipeline_spec.rb +259 -0
  124. data/spec/spec_helper.rb +60 -0
  125. data/spec/splitter_spec.rb +120 -0
  126. data/spec/storage/base_spec.rb +166 -0
  127. data/spec/storage/cloudfiles_spec.rb +254 -0
  128. data/spec/storage/cycler_spec.rb +247 -0
  129. data/spec/storage/dropbox_spec.rb +480 -0
  130. data/spec/storage/ftp_spec.rb +271 -0
  131. data/spec/storage/local_spec.rb +259 -0
  132. data/spec/storage/ninefold_spec.rb +343 -0
  133. data/spec/storage/rsync_spec.rb +362 -0
  134. data/spec/storage/s3_spec.rb +245 -0
  135. data/spec/storage/scp_spec.rb +233 -0
  136. data/spec/storage/sftp_spec.rb +244 -0
  137. data/spec/syncer/base_spec.rb +109 -0
  138. data/spec/syncer/cloud/base_spec.rb +515 -0
  139. data/spec/syncer/cloud/cloud_files_spec.rb +181 -0
  140. data/spec/syncer/cloud/s3_spec.rb +174 -0
  141. data/spec/syncer/rsync/base_spec.rb +98 -0
  142. data/spec/syncer/rsync/local_spec.rb +149 -0
  143. data/spec/syncer/rsync/pull_spec.rb +98 -0
  144. data/spec/syncer/rsync/push_spec.rb +333 -0
  145. data/spec/version_spec.rb +21 -0
  146. data/templates/cli/utility/archive +25 -0
  147. data/templates/cli/utility/compressor/bzip2 +4 -0
  148. data/templates/cli/utility/compressor/custom +11 -0
  149. data/templates/cli/utility/compressor/gzip +4 -0
  150. data/templates/cli/utility/compressor/lzma +10 -0
  151. data/templates/cli/utility/compressor/pbzip2 +10 -0
  152. data/templates/cli/utility/config +32 -0
  153. data/templates/cli/utility/database/mongodb +18 -0
  154. data/templates/cli/utility/database/mysql +21 -0
  155. data/templates/cli/utility/database/postgresql +17 -0
  156. data/templates/cli/utility/database/redis +16 -0
  157. data/templates/cli/utility/database/riak +11 -0
  158. data/templates/cli/utility/encryptor/gpg +27 -0
  159. data/templates/cli/utility/encryptor/openssl +9 -0
  160. data/templates/cli/utility/model.erb +23 -0
  161. data/templates/cli/utility/notifier/campfire +12 -0
  162. data/templates/cli/utility/notifier/hipchat +15 -0
  163. data/templates/cli/utility/notifier/mail +22 -0
  164. data/templates/cli/utility/notifier/prowl +11 -0
  165. data/templates/cli/utility/notifier/pushover +11 -0
  166. data/templates/cli/utility/notifier/twitter +13 -0
  167. data/templates/cli/utility/splitter +7 -0
  168. data/templates/cli/utility/storage/cloud_files +22 -0
  169. data/templates/cli/utility/storage/dropbox +20 -0
  170. data/templates/cli/utility/storage/ftp +12 -0
  171. data/templates/cli/utility/storage/local +7 -0
  172. data/templates/cli/utility/storage/ninefold +9 -0
  173. data/templates/cli/utility/storage/rsync +11 -0
  174. data/templates/cli/utility/storage/s3 +19 -0
  175. data/templates/cli/utility/storage/scp +11 -0
  176. data/templates/cli/utility/storage/sftp +11 -0
  177. data/templates/cli/utility/syncer/cloud_files +46 -0
  178. data/templates/cli/utility/syncer/rsync_local +12 -0
  179. data/templates/cli/utility/syncer/rsync_pull +17 -0
  180. data/templates/cli/utility/syncer/rsync_push +17 -0
  181. data/templates/cli/utility/syncer/s3 +43 -0
  182. data/templates/general/links +11 -0
  183. data/templates/general/version.erb +2 -0
  184. data/templates/notifier/mail/failure.erb +9 -0
  185. data/templates/notifier/mail/success.erb +7 -0
  186. data/templates/notifier/mail/warning.erb +9 -0
  187. data/templates/storage/dropbox/authorization_url.erb +6 -0
  188. data/templates/storage/dropbox/authorized.erb +4 -0
  189. data/templates/storage/dropbox/cache_file_written.erb +10 -0
  190. metadata +277 -0
@@ -0,0 +1,94 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # Only load the Fog gem when the Backup::Storage::S3 class is loaded
5
+ Backup::Dependency.load('fog')
6
+
7
+ module Backup
8
+ module Storage
9
+ class S3 < Base
10
+
11
+ ##
12
+ # Amazon Simple Storage Service (S3) Credentials
13
+ attr_accessor :access_key_id, :secret_access_key
14
+
15
+ ##
16
+ # Amazon S3 bucket name and path
17
+ attr_accessor :bucket, :path
18
+
19
+ ##
20
+ # Region of the specified S3 bucket
21
+ attr_accessor :region
22
+
23
+ ##
24
+ # Creates a new instance of the storage object
25
+ def initialize(model, storage_id = nil, &block)
26
+ super(model, storage_id)
27
+
28
+ @path ||= 'backups'
29
+
30
+ instance_eval(&block) if block_given?
31
+ end
32
+
33
+ private
34
+
35
+ ##
36
+ # This is the provider that Fog uses for the S3 Storage
37
+ def provider
38
+ 'AWS'
39
+ end
40
+
41
+ ##
42
+ # Establishes a connection to Amazon S3
43
+ def connection
44
+ @connection ||= Fog::Storage.new(
45
+ :provider => provider,
46
+ :aws_access_key_id => access_key_id,
47
+ :aws_secret_access_key => secret_access_key,
48
+ :region => region
49
+ )
50
+ end
51
+
52
+ def remote_path_for(package)
53
+ super(package).sub(/^\//, '')
54
+ end
55
+
56
+ ##
57
+ # Transfers the archived file to the specified Amazon S3 bucket
58
+ def transfer!
59
+ remote_path = remote_path_for(@package)
60
+
61
+ connection.sync_clock
62
+
63
+ files_to_transfer_for(@package) do |local_file, remote_file|
64
+ Logger.message "#{storage_name} started transferring " +
65
+ "'#{ local_file }' to bucket '#{ bucket }'."
66
+
67
+ File.open(File.join(local_path, local_file), 'r') do |file|
68
+ connection.put_object(
69
+ bucket, File.join(remote_path, remote_file), file
70
+ )
71
+ end
72
+ end
73
+ end
74
+
75
+ ##
76
+ # Removes the transferred archive file(s) from the storage location.
77
+ # Any error raised will be rescued during Cycling
78
+ # and a warning will be logged, containing the error message.
79
+ def remove!(package)
80
+ remote_path = remote_path_for(package)
81
+
82
+ connection.sync_clock
83
+
84
+ transferred_files_for(package) do |local_file, remote_file|
85
+ Logger.message "#{storage_name} started removing " +
86
+ "'#{ local_file }' from bucket '#{ bucket }'."
87
+
88
+ connection.delete_object(bucket, File.join(remote_path, remote_file))
89
+ end
90
+ end
91
+
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,99 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # Only load the Net::SSH and Net::SCP library/gems
5
+ # when the Backup::Storage::SCP class is loaded
6
+ Backup::Dependency.load('net-ssh')
7
+ Backup::Dependency.load('net-scp')
8
+
9
+ module Backup
10
+ module Storage
11
+ class SCP < Base
12
+
13
+ ##
14
+ # Server credentials
15
+ attr_accessor :username, :password
16
+
17
+ ##
18
+ # Server IP Address and SCP port
19
+ attr_accessor :ip, :port
20
+
21
+ ##
22
+ # Path to store backups to
23
+ attr_accessor :path
24
+
25
+ ##
26
+ # Creates a new instance of the storage object
27
+ def initialize(model, storage_id = nil, &block)
28
+ super(model, storage_id)
29
+
30
+ @port ||= 22
31
+ @path ||= 'backups'
32
+
33
+ instance_eval(&block) if block_given?
34
+
35
+ @path = path.sub(/^\~\//, '')
36
+ end
37
+
38
+ private
39
+
40
+ ##
41
+ # Establishes a connection to the remote server
42
+ # and yields the Net::SSH connection.
43
+ # Net::SCP will use this connection to transfer backups
44
+ def connection
45
+ Net::SSH.start(
46
+ ip, username, :password => password, :port => port
47
+ ) {|ssh| yield ssh }
48
+ end
49
+
50
+ ##
51
+ # Transfers the archived file to the specified remote server
52
+ def transfer!
53
+ remote_path = remote_path_for(@package)
54
+
55
+ connection do |ssh|
56
+ ssh.exec!("mkdir -p '#{ remote_path }'")
57
+
58
+ files_to_transfer_for(@package) do |local_file, remote_file|
59
+ Logger.message "#{storage_name} started transferring " +
60
+ "'#{local_file}' to '#{ip}'."
61
+
62
+ ssh.scp.upload!(
63
+ File.join(local_path, local_file),
64
+ File.join(remote_path, remote_file)
65
+ )
66
+ end
67
+ end
68
+ end
69
+
70
+ ##
71
+ # Removes the transferred archive file(s) from the storage location.
72
+ # Any error raised will be rescued during Cycling
73
+ # and a warning will be logged, containing the error message.
74
+ def remove!(package)
75
+ remote_path = remote_path_for(package)
76
+
77
+ messages = []
78
+ transferred_files_for(package) do |local_file, remote_file|
79
+ messages << "#{storage_name} started removing " +
80
+ "'#{local_file}' from '#{ip}'."
81
+ end
82
+ Logger.message messages.join("\n")
83
+
84
+ errors = []
85
+ connection do |ssh|
86
+ ssh.exec!("rm -r '#{remote_path}'") do |ch, stream, data|
87
+ errors << data if stream == :stderr
88
+ end
89
+ end
90
+ unless errors.empty?
91
+ raise Errors::Storage::SCP::SSHError,
92
+ "Net::SSH reported the following errors:\n" +
93
+ errors.join("\n")
94
+ end
95
+ end
96
+
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,108 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # Only load the Net::SFTP library/gem when the Backup::Storage::SFTP class is loaded
5
+ Backup::Dependency.load('net-ssh')
6
+ Backup::Dependency.load('net-sftp')
7
+
8
+ module Backup
9
+ module Storage
10
+ class SFTP < Base
11
+
12
+ ##
13
+ # Server credentials
14
+ attr_accessor :username, :password
15
+
16
+ ##
17
+ # Server IP Address and SFTP port
18
+ attr_accessor :ip, :port
19
+
20
+ ##
21
+ # Path to store backups to
22
+ attr_accessor :path
23
+
24
+ ##
25
+ # Creates a new instance of the storage object
26
+ def initialize(model, storage_id = nil, &block)
27
+ super(model, storage_id)
28
+
29
+ @port ||= 22
30
+ @path ||= 'backups'
31
+
32
+ instance_eval(&block) if block_given?
33
+
34
+ @path = path.sub(/^\~\//, '')
35
+ end
36
+
37
+ private
38
+
39
+ ##
40
+ # Establishes a connection to the remote server
41
+ def connection
42
+ Net::SFTP.start(
43
+ ip, username,
44
+ :password => password,
45
+ :port => port
46
+ ) {|sftp| yield sftp }
47
+ end
48
+
49
+ ##
50
+ # Transfers the archived file to the specified remote server
51
+ def transfer!
52
+ remote_path = remote_path_for(@package)
53
+
54
+ connection do |sftp|
55
+ create_remote_path(remote_path, sftp)
56
+
57
+ files_to_transfer_for(@package) do |local_file, remote_file|
58
+ Logger.message "#{storage_name} started transferring " +
59
+ "'#{ local_file }' to '#{ ip }'."
60
+
61
+ sftp.upload!(
62
+ File.join(local_path, local_file),
63
+ File.join(remote_path, remote_file)
64
+ )
65
+ end
66
+ end
67
+ end
68
+
69
+ ##
70
+ # Removes the transferred archive file(s) from the storage location.
71
+ # Any error raised will be rescued during Cycling
72
+ # and a warning will be logged, containing the error message.
73
+ def remove!(package)
74
+ remote_path = remote_path_for(package)
75
+
76
+ connection do |sftp|
77
+ transferred_files_for(package) do |local_file, remote_file|
78
+ Logger.message "#{storage_name} started removing " +
79
+ "'#{ local_file }' from '#{ ip }'."
80
+
81
+ sftp.remove!(File.join(remote_path, remote_file))
82
+ end
83
+
84
+ sftp.rmdir!(remote_path)
85
+ end
86
+ end
87
+
88
+ ##
89
+ # Creates (if they don't exist yet) all the directories on the remote
90
+ # server in order to upload the backup file. Net::SFTP does not support
91
+ # paths to directories that don't yet exist when creating new
92
+ # directories. Instead, we split the parts up in to an array (for each
93
+ # '/') and loop through that to create the directories one by one.
94
+ # Net::SFTP raises an exception when the directory it's trying to create
95
+ # already exists, so we have rescue it
96
+ def create_remote_path(remote_path, sftp)
97
+ path_parts = Array.new
98
+ remote_path.split('/').each do |path_part|
99
+ path_parts << path_part
100
+ begin
101
+ sftp.mkdir!(path_parts.join('/'))
102
+ rescue Net::SFTP::StatusException; end
103
+ end
104
+ end
105
+
106
+ end
107
+ end
108
+ end
@@ -0,0 +1,46 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Syncer
5
+ class Base
6
+ include Backup::CLI::Helpers
7
+ include Backup::Configuration::Helpers
8
+
9
+ ##
10
+ # Path to store the synced files/directories to
11
+ attr_accessor :path
12
+
13
+ ##
14
+ # Flag for mirroring the files/directories
15
+ attr_accessor :mirror
16
+
17
+ def initialize
18
+ load_defaults!
19
+
20
+ @path ||= 'backups'
21
+ @mirror ||= false
22
+ @directories = Array.new
23
+ end
24
+
25
+ ##
26
+ # Syntactical suger for the DSL for adding directories
27
+ def directories(&block)
28
+ return @directories unless block_given?
29
+ instance_eval(&block)
30
+ end
31
+
32
+ ##
33
+ # Adds a path to the @directories array
34
+ def add(path)
35
+ @directories << path
36
+ end
37
+
38
+ private
39
+
40
+ def syncer_name
41
+ self.class.to_s.sub('Backup::', '')
42
+ end
43
+
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,247 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # Only load the Fog gem, along with the Parallel gem, when the
5
+ # Backup::Syncer::Cloud class is loaded
6
+ Backup::Dependency.load('fog')
7
+ Backup::Dependency.load('parallel')
8
+
9
+ module Backup
10
+ module Syncer
11
+ module Cloud
12
+ class Base < Syncer::Base
13
+
14
+ ##
15
+ # Create a Mutex to synchronize certain parts of the code
16
+ # in order to prevent race conditions or broken STDOUT.
17
+ MUTEX = Mutex.new
18
+
19
+ ##
20
+ # Concurrency setting - defaults to false, but can be set to:
21
+ # - :threads
22
+ # - :processes
23
+ attr_accessor :concurrency_type
24
+
25
+ ##
26
+ # Concurrency level - the number of threads or processors to use.
27
+ # Defaults to 2.
28
+ attr_accessor :concurrency_level
29
+
30
+ ##
31
+ # Instantiates a new Cloud Syncer object for either
32
+ # the Cloud::S3 or Cloud::CloudFiles Syncer.
33
+ #
34
+ # Pre-configured defaults specified in either
35
+ # Configuration::Syncer::Cloud::S3 or
36
+ # Configuration::Syncer::Cloud::CloudFiles
37
+ # are set via a super() call to Syncer::Base.
38
+ #
39
+ # If not specified in the pre-configured defaults,
40
+ # the Cloud specific defaults are set here before evaluating
41
+ # any block provided in the user's configuration file.
42
+ def initialize
43
+ super
44
+
45
+ @concurrency_type ||= false
46
+ @concurrency_level ||= 2
47
+ end
48
+
49
+ ##
50
+ # Performs the Sync operation
51
+ def perform!
52
+ Logger.message(
53
+ "#{ syncer_name } started the syncing process:\n" +
54
+ "\s\sConcurrency: #{ @concurrency_type } Level: #{ @concurrency_level }"
55
+ )
56
+
57
+ @directories.each do |directory|
58
+ SyncContext.new(
59
+ File.expand_path(directory), repository_object, @path
60
+ ).sync! @mirror, @concurrency_type, @concurrency_level
61
+ end
62
+
63
+ Logger.message("#{ syncer_name } Syncing Complete!")
64
+ end
65
+
66
+ private
67
+
68
+ class SyncContext
69
+ attr_reader :directory, :bucket, :path, :remote_base
70
+
71
+ ##
72
+ # Creates a new SyncContext object which handles a single directory
73
+ # from the Syncer::Base @directories array.
74
+ def initialize(directory, bucket, path)
75
+ @directory, @bucket, @path = directory, bucket, path
76
+ @remote_base = File.join(path, File.basename(directory))
77
+ end
78
+
79
+ ##
80
+ # Performs the sync operation using the provided techniques
81
+ # (mirroring/concurrency).
82
+ def sync!(mirror = false, concurrency_type = false, concurrency_level = 2)
83
+ block = Proc.new { |relative_path| sync_file relative_path, mirror }
84
+
85
+ case concurrency_type
86
+ when FalseClass
87
+ all_file_names.each(&block)
88
+ when :threads
89
+ Parallel.each all_file_names,
90
+ :in_threads => concurrency_level, &block
91
+ when :processes
92
+ Parallel.each all_file_names,
93
+ :in_processes => concurrency_level, &block
94
+ else
95
+ raise Errors::Syncer::Cloud::ConfigurationError,
96
+ "Unknown concurrency_type setting: #{ concurrency_type.inspect }"
97
+ end
98
+ end
99
+
100
+ private
101
+
102
+ ##
103
+ # Gathers all the relative paths to the local files
104
+ # and merges them with the , removing
105
+ # duplicate keys if any, and sorts the in alphabetical order.
106
+ def all_file_names
107
+ @all_file_names ||= (local_files.keys | remote_files.keys).sort
108
+ end
109
+
110
+ ##
111
+ # Returns a Hash of local files, validated to ensure the path
112
+ # does not contain invalid UTF-8 byte sequences.
113
+ # The keys are the filesystem paths, relative to @directory.
114
+ # The values are the LocalFile objects for that given file.
115
+ def local_files
116
+ @local_files ||= begin
117
+ hash = {}
118
+ local_hashes.lines.map do |line|
119
+ LocalFile.new(@directory, line)
120
+ end.compact.each do |file|
121
+ hash.merge!(file.relative_path => file)
122
+ end
123
+ hash
124
+ end
125
+ end
126
+
127
+ ##
128
+ # Returns a String of file paths and their md5 hashes.
129
+ def local_hashes
130
+ Logger.message("\s\sGenerating checksums for '#{ @directory }'")
131
+ `find '#{ @directory }' -print0 | xargs -0 openssl md5 2> /dev/null`
132
+ end
133
+
134
+ ##
135
+ # Returns a Hash of remote files
136
+ # The keys are the remote paths, relative to @remote_base
137
+ # The values are the Fog file objects for that given file
138
+ def remote_files
139
+ @remote_files ||= begin
140
+ hash = {}
141
+ @bucket.files.all(:prefix => @remote_base).each do |file|
142
+ hash.merge!(file.key.sub("#{ @remote_base }/", '') => file)
143
+ end
144
+ hash
145
+ end
146
+ end
147
+
148
+ ##
149
+ # Performs a sync operation on a file. When mirroring is enabled
150
+ # and a local file has been removed since the last sync, it will also
151
+ # remove it from the remote location. It will no upload files that
152
+ # have not changed since the last sync. Checks are done using an md5
153
+ # hash. If a file has changed, or has been newly added, the file will
154
+ # be transferred/overwritten.
155
+ def sync_file(relative_path, mirror)
156
+ local_file = local_files[relative_path]
157
+ remote_file = remote_files[relative_path]
158
+ remote_path = File.join(@remote_base, relative_path)
159
+
160
+ if local_file && File.exist?(local_file.path)
161
+ unless remote_file && remote_file.etag == local_file.md5
162
+ MUTEX.synchronize {
163
+ Logger.message("\s\s[transferring] '#{ remote_path }'")
164
+ }
165
+ File.open(local_file.path, 'r') do |file|
166
+ @bucket.files.create(
167
+ :key => remote_path,
168
+ :body => file
169
+ )
170
+ end
171
+ else
172
+ MUTEX.synchronize {
173
+ Logger.message("\s\s[skipping] '#{ remote_path }'")
174
+ }
175
+ end
176
+ elsif remote_file
177
+ if mirror
178
+ MUTEX.synchronize {
179
+ Logger.message("\s\s[removing] '#{ remote_path }'")
180
+ }
181
+ remote_file.destroy
182
+ else
183
+ MUTEX.synchronize {
184
+ Logger.message("\s\s[leaving] '#{ remote_path }'")
185
+ }
186
+ end
187
+ end
188
+ end
189
+ end # class SyncContext
190
+
191
+ class LocalFile
192
+ attr_reader :path, :relative_path, :md5
193
+
194
+ ##
195
+ # Return a new LocalFile object if it's valid.
196
+ # Otherwise, log a warning and return nil.
197
+ def self.new(*args)
198
+ local_file = super(*args)
199
+ if local_file.invalid?
200
+ Logger.warn(
201
+ "\s\s[skipping] #{ local_file.path }\n" +
202
+ "\s\sPath Contains Invalid UTF-8 byte sequences"
203
+ )
204
+ return nil
205
+ end
206
+ local_file
207
+ end
208
+
209
+ ##
210
+ # Creates a new LocalFile object using the given directory and line
211
+ # from the md5 hash checkup. This object figures out the path,
212
+ # relative_path and md5 hash for the file.
213
+ def initialize(directory, line)
214
+ @invalid = false
215
+ @directory = sanitize(directory)
216
+ line = sanitize(line).chomp
217
+ @path = line.slice(4..-36)
218
+ @md5 = line.slice(-32..-1)
219
+ @relative_path = @path.sub(@directory + '/', '')
220
+ end
221
+
222
+ def invalid?
223
+ @invalid
224
+ end
225
+
226
+ private
227
+
228
+ ##
229
+ # Sanitize string and replace any invalid UTF-8 characters.
230
+ # If replacements are made, flag the LocalFile object as invalid.
231
+ def sanitize(str)
232
+ str.each_char.map do |char|
233
+ begin
234
+ char if !!char.unpack('U')
235
+ rescue
236
+ @invalid = true
237
+ "\xEF\xBF\xBD" # => "\uFFFD"
238
+ end
239
+ end.join
240
+ end
241
+
242
+ end # class LocalFile
243
+
244
+ end # class Base < Syncer::Base
245
+ end # module Cloud
246
+ end
247
+ end