backup 3.0.21 → 3.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. data/Gemfile.lock +3 -1
  2. data/README.md +4 -3
  3. data/lib/backup.rb +8 -4
  4. data/lib/backup/config.rb +1 -1
  5. data/lib/backup/configuration/syncer/cloud.rb +23 -0
  6. data/lib/backup/configuration/syncer/cloud_files.rb +30 -0
  7. data/lib/backup/configuration/syncer/s3.rb +5 -11
  8. data/lib/backup/dependency.rb +6 -0
  9. data/lib/backup/notifier/twitter.rb +1 -1
  10. data/lib/backup/syncer/base.rb +25 -0
  11. data/lib/backup/syncer/cloud.rb +187 -0
  12. data/lib/backup/syncer/cloud_files.rb +56 -0
  13. data/lib/backup/syncer/rsync/base.rb +0 -26
  14. data/lib/backup/syncer/s3.rb +21 -102
  15. data/lib/backup/version.rb +1 -1
  16. data/spec/cli/utility_spec.rb +2 -2
  17. data/spec/configuration/syncer/cloud_files_spec.rb +44 -0
  18. data/spec/configuration/syncer/s3_spec.rb +0 -4
  19. data/spec/notifier/twitter_spec.rb +3 -3
  20. data/spec/syncer/cloud_files_spec.rb +192 -0
  21. data/spec/syncer/s3_spec.rb +155 -191
  22. data/templates/cli/utility/archive +20 -8
  23. data/templates/cli/utility/database/mongodb +3 -3
  24. data/templates/cli/utility/database/mysql +4 -4
  25. data/templates/cli/utility/database/postgresql +4 -4
  26. data/templates/cli/utility/database/redis +1 -1
  27. data/templates/cli/utility/encryptor/openssl +2 -2
  28. data/templates/cli/utility/notifier/campfire +3 -3
  29. data/templates/cli/utility/notifier/hipchat +6 -6
  30. data/templates/cli/utility/notifier/mail +7 -7
  31. data/templates/cli/utility/notifier/presently +4 -4
  32. data/templates/cli/utility/notifier/prowl +2 -2
  33. data/templates/cli/utility/notifier/twitter +4 -4
  34. data/templates/cli/utility/storage/cloud_files +22 -0
  35. data/templates/cli/utility/storage/dropbox +15 -10
  36. data/templates/cli/utility/storage/ftp +4 -4
  37. data/templates/cli/utility/storage/local +1 -1
  38. data/templates/cli/utility/storage/ninefold +3 -3
  39. data/templates/cli/utility/storage/rsync +4 -4
  40. data/templates/cli/utility/storage/s3 +6 -6
  41. data/templates/cli/utility/storage/scp +4 -4
  42. data/templates/cli/utility/storage/sftp +4 -4
  43. data/templates/cli/utility/syncer/cloud_files +48 -0
  44. data/templates/cli/utility/syncer/s3 +31 -1
  45. metadata +69 -39
  46. data/templates/cli/utility/storage/cloudfiles +0 -12
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- backup (3.0.21)
4
+ backup (3.0.22)
5
5
  POpen4 (~> 0.1.4)
6
6
  thor (~> 0.14.6)
7
7
 
@@ -71,6 +71,7 @@ GEM
71
71
  net-ssh (2.1.4)
72
72
  nokogiri (1.5.0)
73
73
  open4 (1.3.0)
74
+ parallel (0.5.12)
74
75
  polyglot (0.3.3)
75
76
  prowler (1.3.1)
76
77
  rack (1.4.0)
@@ -120,6 +121,7 @@ DEPENDENCIES
120
121
  net-scp (~> 1.0.4)
121
122
  net-sftp (~> 2.0.5)
122
123
  net-ssh (~> 2.1.4)
124
+ parallel (~> 0.5.12)
123
125
  prowler (>= 1.3.1)
124
126
  rb-fsevent
125
127
  rb-inotify
data/README.md CHANGED
@@ -18,7 +18,7 @@ Drop me a message for any questions, suggestions, requests, bugs or submit them
18
18
  Installation
19
19
  ------------
20
20
 
21
- To get the latest stable version (3.0.20)
21
+ To get the latest stable version
22
22
 
23
23
  gem install backup
24
24
 
@@ -94,7 +94,8 @@ Syncers
94
94
  -------
95
95
 
96
96
  - RSync (Push, Pull and Local)
97
- - Amazon Simple Storage Service (S3)
97
+ - Amazon S3
98
+ - Rackspce Cloud Files
98
99
 
99
100
  [Syncer Wiki Page](https://github.com/meskyanichi/backup/wiki/Syncers)
100
101
 
@@ -238,7 +239,7 @@ First, it will dump the two Databases (MySQL and MongoDB). The MySQL dump will b
238
239
  `sample_backup/databases/MySQL/my_sample_mysql_db.sql.gz`. The MongoDB dump will be dumped into
239
240
  `sample_backup/databases/MongoDB/`, which will then be packaged into `sample_backup/databases/MongoDB-#####.tar.gz`
240
241
  (`#####` will be a simple unique identifier, in case multiple dumps are performed.)
241
- Next, it will create two _tar_ Archives (user_avatars and logs). Each will be piped through the Gzip Compressor into
242
+ Next, it will create two _tar_ Archives (user\_avatars and logs). Each will be piped through the Gzip Compressor into
242
243
  `sample_backup/archives/` as `user_archives.tar.gz` and `logs.tar.gz`.
243
244
  Finally, the `sample_backup` directory will be packaged into an uncompressed _tar_ archive, which will be piped through
244
245
  the OpenSSL Encryptor to encrypt this final package into `YYYY-MM-DD-hh-mm-ss.sample_backup.tar.enc`. This final
@@ -78,8 +78,10 @@ module Backup
78
78
  ##
79
79
  # Autoload Backup syncer files
80
80
  module Syncer
81
- autoload :Base, File.join(SYNCER_PATH, 'base')
82
- autoload :S3, File.join(SYNCER_PATH, 's3')
81
+ autoload :Base, File.join(SYNCER_PATH, 'base')
82
+ autoload :Cloud, File.join(SYNCER_PATH, 'cloud')
83
+ autoload :CloudFiles, File.join(SYNCER_PATH, 'cloud_files')
84
+ autoload :S3, File.join(SYNCER_PATH, 's3')
83
85
  module RSync
84
86
  autoload :Base, File.join(SYNCER_PATH, 'rsync', 'base')
85
87
  autoload :Local, File.join(SYNCER_PATH, 'rsync', 'local')
@@ -174,8 +176,10 @@ module Backup
174
176
  end
175
177
 
176
178
  module Syncer
177
- autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'base')
178
- autoload :S3, File.join(CONFIGURATION_PATH, 'syncer', 's3')
179
+ autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'base')
180
+ autoload :Cloud, File.join(CONFIGURATION_PATH, 'syncer', 'cloud')
181
+ autoload :CloudFiles, File.join(CONFIGURATION_PATH, 'syncer', 'cloud_files')
182
+ autoload :S3, File.join(CONFIGURATION_PATH, 'syncer', 's3')
179
183
  module RSync
180
184
  autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'rsync', 'base')
181
185
  autoload :Local, File.join(CONFIGURATION_PATH, 'syncer', 'rsync', 'local')
@@ -111,7 +111,7 @@ module Backup
111
111
  # Encryptors
112
112
  ['OpenSSL', 'GPG'],
113
113
  # Syncers
114
- ['S3', { 'RSync' => ['Push', 'Pull', 'Local'] }],
114
+ ['Rackspace', 'S3', { 'RSync' => ['Push', 'Pull', 'Local'] }],
115
115
  # Notifiers
116
116
  ['Mail', 'Twitter', 'Campfire', 'Presently', 'Prowl', 'Hipchat']
117
117
  ]
@@ -0,0 +1,23 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Configuration
5
+ module Syncer
6
+ class Cloud < Base
7
+ class << self
8
+ ##
9
+ # Amazon S3 bucket name and path to sync to
10
+ attr_accessor :bucket, :path
11
+
12
+ ##
13
+ # Directories to sync
14
+ attr_accessor :directories
15
+
16
+ ##
17
+ # Flag to enable mirroring
18
+ attr_accessor :mirror
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,30 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Configuration
5
+ module Syncer
6
+ class CloudFiles < Cloud
7
+ class << self
8
+ ##
9
+ # Rackspace CloudFiles Credentials
10
+ attr_accessor :api_key, :username
11
+
12
+ ##
13
+ # Rackspace CloudFiles Container
14
+ attr_accessor :container
15
+
16
+ ##
17
+ # Rackspace AuthURL allows you to connect to a different Rackspace datacenter
18
+ # - https://auth.api.rackspacecloud.com (Default: US)
19
+ # - https://lon.auth.api.rackspacecloud.com (UK)
20
+ attr_accessor :auth_url
21
+
22
+ ##
23
+ # Improve performance and avoid data transfer costs by setting @servicenet to `true`
24
+ # This only works if Backup runs on a Rackspace server
25
+ attr_accessor :servicenet
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -3,25 +3,19 @@
3
3
  module Backup
4
4
  module Configuration
5
5
  module Syncer
6
- class S3 < Base
6
+ class S3 < Cloud
7
7
  class << self
8
-
9
8
  ##
10
9
  # Amazon Simple Storage Service (S3) Credentials
11
10
  attr_accessor :access_key_id, :secret_access_key
12
11
 
13
12
  ##
14
- # Amazon S3 bucket name and path to sync to
15
- attr_accessor :bucket, :path
16
-
17
- ##
18
- # Flag to enable mirroring
19
- attr_accessor :mirror
13
+ # The S3 bucket to store files to
14
+ attr_accessor :bucket
20
15
 
21
16
  ##
22
- # Additional options for the s3sync cli
23
- attr_accessor :additional_options
24
-
17
+ # The AWS region of the specified S3 bucket
18
+ attr_accessor :region
25
19
  end
26
20
  end
27
21
  end
@@ -79,6 +79,12 @@ module Backup
79
79
  :require => 'hipchat',
80
80
  :version => '~> 0.4.1',
81
81
  :for => 'Sending notifications to Hipchat'
82
+ },
83
+
84
+ 'parallel' => {
85
+ :require => 'parallel',
86
+ :version => '~> 0.5.12',
87
+ :for => 'Adding concurrency to Cloud-based syncers.'
82
88
  }
83
89
  }
84
90
  end
@@ -49,7 +49,7 @@ module Backup
49
49
  when :warning then 'Warning'
50
50
  when :failure then 'Failure'
51
51
  end
52
- message = "[Backup::%s] #{@model.label} (#{@model.trigger})" % name
52
+ message = "[Backup::%s] #{@model.label} (#{@model.trigger}) (@ #{@model.time})" % name
53
53
  send_message(message)
54
54
  end
55
55
 
@@ -6,6 +6,31 @@ module Backup
6
6
  include Backup::CLI::Helpers
7
7
  include Backup::Configuration::Helpers
8
8
 
9
+ ##
10
+ # Directories to sync
11
+ attr_accessor :directories
12
+
13
+ ##
14
+ # Path to store the synced files/directories to
15
+ attr_accessor :path
16
+
17
+ ##
18
+ # Flag for mirroring the files/directories
19
+ attr_accessor :mirror
20
+
21
+ ##
22
+ # Syntactical suger for the DSL for adding directories
23
+ def directories(&block)
24
+ return @directories unless block_given?
25
+ instance_eval(&block)
26
+ end
27
+
28
+ ##
29
+ # Adds a path to the @directories array
30
+ def add(path)
31
+ @directories << path
32
+ end
33
+
9
34
  private
10
35
 
11
36
  def syncer_name
@@ -0,0 +1,187 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # Only load the Fog gem, along with the Parallel gem, when the Backup::Syncer::Cloud class is loaded
5
+ Backup::Dependency.load('fog')
6
+ Backup::Dependency.load('parallel')
7
+
8
+ module Backup
9
+ module Syncer
10
+ class Cloud < Base
11
+
12
+ ##
13
+ # Create a Mutex to synchronize certain parts of the code
14
+ # in order to prevent race conditions or broken STDOUT.
15
+ MUTEX = Mutex.new
16
+
17
+ ##
18
+ # Concurrency setting - defaults to false, but can be set to:
19
+ # - :threads
20
+ # - :processes
21
+ attr_accessor :concurrency_type
22
+
23
+ ##
24
+ # Concurrency level - the number of threads or processors to use. Defaults to 2.
25
+ attr_accessor :concurrency_level
26
+
27
+ ##
28
+ # Instantiates a new Cloud Syncer object and sets the default
29
+ # configuration specified in the Backup::Configuration::Syncer::S3. Then
30
+ # it sets the object defaults if particular properties weren't set.
31
+ # Finally it'll evaluate the users configuration file and overwrite
32
+ # anything that's been defined.
33
+ def initialize(&block)
34
+ load_defaults!
35
+
36
+ @path ||= 'backups'
37
+ @directories ||= Array.new
38
+ @mirror ||= false
39
+ @concurrency_type = false
40
+ @concurrency_level = 2
41
+
42
+ instance_eval(&block) if block_given?
43
+
44
+ @path = path.sub(/^\//, '')
45
+ end
46
+
47
+ ##
48
+ # Performs the Sync operation
49
+ def perform!
50
+ Logger.message("#{ self.class } started the syncing process:")
51
+
52
+ directories.each do |directory|
53
+ SyncContext.new(directory, repository_object, path).
54
+ sync! mirror, concurrency_type, concurrency_level
55
+ end
56
+ end
57
+
58
+ private
59
+
60
+ class SyncContext
61
+ attr_reader :directory, :bucket, :path
62
+
63
+ ##
64
+ # Creates a new SyncContext object which handles a single directory
65
+ # from the Syncer::Base @directories array.
66
+ def initialize(directory, bucket, path)
67
+ @directory, @bucket, @path = directory, bucket, path
68
+ end
69
+
70
+ ##
71
+ # Performs the sync operation using the provided techniques (mirroring/concurrency).
72
+ def sync!(mirror = false, concurrency_type = false, concurrency_level = 2)
73
+ block = Proc.new { |relative_path| sync_file relative_path, mirror }
74
+
75
+ case concurrency_type
76
+ when FalseClass
77
+ all_file_names.each &block
78
+ when :threads
79
+ Parallel.each all_file_names, :in_threads => concurrency_level, &block
80
+ when :processes
81
+ Parallel.each all_file_names, :in_processes => concurrency_level, &block
82
+ else
83
+ raise "Unknown concurrency_type setting: #{concurrency_type.inspect}"
84
+ end
85
+ end
86
+
87
+ private
88
+
89
+ ##
90
+ # Gathers all the remote and local file name and merges them together, removing
91
+ # duplicate keys if any, and sorts the in alphabetical order.
92
+ def all_file_names
93
+ @all_file_names ||= (local_files.keys | remote_files.keys).sort
94
+ end
95
+
96
+ ##
97
+ # Returns a Hash of local files (the keys are the filesystem paths,
98
+ # the values are the LocalFile objects for that given file)
99
+ def local_files
100
+ @local_files ||= begin
101
+ local_hashes.split("\n").collect { |line|
102
+ LocalFile.new directory, line
103
+ }.inject({}) { |hash, file|
104
+ hash[file.relative_path] = file
105
+ hash
106
+ }
107
+ end
108
+ end
109
+
110
+ ##
111
+ # Returns a String of file paths and their md5 hashes.
112
+ def local_hashes
113
+ MUTEX.synchronize { Logger.message("\s\sGenerating checksums for #{ directory }") }
114
+ `find #{directory} -print0 | xargs -0 openssl md5 2> /dev/null`
115
+ end
116
+
117
+ ##
118
+ # Returns a Hash of remote files (the keys are the remote paths,
119
+ # the values are the Fog file objects for that given file)
120
+ def remote_files
121
+ @remote_files ||= bucket.files.to_a.select { |file|
122
+ file.key[%r{^#{remote_base}/}]
123
+ }.inject({}) { |hash, file|
124
+ key = file.key.gsub(/^#{remote_base}\//,
125
+ "#{directory.split('/').last}/")
126
+ hash[key] = file
127
+ hash
128
+ }
129
+ end
130
+
131
+ ##
132
+ # Creates and returns a String that represents the base remote storage path
133
+ def remote_base
134
+ @remote_base ||= [path, directory.split('/').last].select { |part|
135
+ part && part.strip.length > 0
136
+ }.join('/')
137
+ end
138
+
139
+ ##
140
+ # Performs a sync operation on a file. When mirroring is enabled
141
+ # and a local file has been removed since the last sync, it will also
142
+ # remove it from the remote location. It will no upload files that
143
+ # have not changed since the last sync. Checks are done using an md5 hash.
144
+ # If a file has changed, or has been newly added, the file will be transferred/overwritten.
145
+ def sync_file(relative_path, mirror)
146
+ local_file = local_files[relative_path]
147
+ remote_file = remote_files[relative_path]
148
+
149
+ if local_file && File.exist?(local_file.path)
150
+ unless remote_file && remote_file.etag == local_file.md5
151
+ MUTEX.synchronize { Logger.message("\s\s[transferring] #{relative_path}") }
152
+ bucket.files.create(
153
+ :key => "#{path}/#{relative_path}".gsub(/^\//, ''),
154
+ :body => File.open(local_file.path)
155
+ )
156
+ else
157
+ MUTEX.synchronize { Logger.message("\s\s[skipping] #{relative_path}") }
158
+ end
159
+ elsif remote_file && mirror
160
+ MUTEX.synchronize { Logger.message("\s\s[removing] #{relative_path}") }
161
+ remote_file.destroy
162
+ end
163
+ end
164
+ end
165
+
166
+ class LocalFile
167
+ attr_reader :directory, :path, :md5
168
+
169
+ ##
170
+ # Creates a new LocalFile object using the given directory and line
171
+ # from the md5 hash checkup. This object figures out the path, relative_path and md5 hash
172
+ # for the file.
173
+ def initialize(directory, line)
174
+ @directory = directory
175
+ @path, @md5 = *line.chomp.match(/^MD5\(([^\)]+)\)= (\w+)$/).captures
176
+ end
177
+
178
+ ##
179
+ # Returns the relative path to the file.
180
+ def relative_path
181
+ @relative_path ||= path.gsub %r{^#{directory}},
182
+ directory.split('/').last
183
+ end
184
+ end
185
+ end
186
+ end
187
+ end
@@ -0,0 +1,56 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Syncer
5
+ class CloudFiles < Cloud
6
+
7
+ ##
8
+ # Rackspace CloudFiles Credentials
9
+ attr_accessor :api_key, :username
10
+
11
+ ##
12
+ # Rackspace CloudFiles Container
13
+ attr_accessor :container
14
+
15
+ ##
16
+ # Rackspace AuthURL allows you to connect to a different Rackspace datacenter
17
+ # - https://auth.api.rackspacecloud.com (Default: US)
18
+ # - https://lon.auth.api.rackspacecloud.com (UK)
19
+ attr_accessor :auth_url
20
+
21
+ ##
22
+ # Improve performance and avoid data transfer costs by setting @servicenet to `true`
23
+ # This only works if Backup runs on a Rackspace server
24
+ attr_accessor :servicenet
25
+
26
+ private
27
+
28
+ ##
29
+ # Established and creates a new Fog storage object for CloudFiles.
30
+ def connection
31
+ @connection ||= Fog::Storage.new(
32
+ :provider => provider,
33
+ :rackspace_username => username,
34
+ :rackspace_api_key => api_key,
35
+ :rackspace_auth_url => auth_url,
36
+ :rackspace_servicenet => servicenet
37
+ )
38
+ end
39
+
40
+ ##
41
+ # Creates a new @repository_object (container). Fetches it from Cloud Files
42
+ # if it already exists, otherwise it will create it first and fetch use that instead.
43
+ def repository_object
44
+ @repository_object ||= connection.directories.get(container) ||
45
+ connection.directories.create(:key => container)
46
+ end
47
+
48
+ ##
49
+ # This is the provider that Fog uses for the Cloud Files
50
+ def provider
51
+ "Rackspace"
52
+ end
53
+
54
+ end
55
+ end
56
+ end