backup 3.6.0 → 3.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (65) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +2 -0
  3. data/lib/backup.rb +14 -4
  4. data/lib/backup/archive.rb +3 -2
  5. data/lib/backup/cleaner.rb +4 -2
  6. data/lib/backup/cli.rb +7 -5
  7. data/lib/backup/cloud_io/base.rb +41 -0
  8. data/lib/backup/cloud_io/cloud_files.rb +296 -0
  9. data/lib/backup/cloud_io/s3.rb +252 -0
  10. data/lib/backup/compressor/gzip.rb +2 -1
  11. data/lib/backup/config.rb +13 -5
  12. data/lib/backup/configuration.rb +1 -1
  13. data/lib/backup/configuration/helpers.rb +3 -1
  14. data/lib/backup/database/base.rb +3 -1
  15. data/lib/backup/database/mongodb.rb +2 -2
  16. data/lib/backup/database/mysql.rb +2 -2
  17. data/lib/backup/database/postgresql.rb +12 -2
  18. data/lib/backup/database/redis.rb +3 -2
  19. data/lib/backup/encryptor/gpg.rb +8 -10
  20. data/lib/backup/errors.rb +39 -70
  21. data/lib/backup/logger.rb +7 -2
  22. data/lib/backup/logger/fog_adapter.rb +30 -0
  23. data/lib/backup/model.rb +32 -14
  24. data/lib/backup/notifier/base.rb +4 -3
  25. data/lib/backup/notifier/campfire.rb +0 -1
  26. data/lib/backup/notifier/http_post.rb +122 -0
  27. data/lib/backup/notifier/mail.rb +38 -0
  28. data/lib/backup/notifier/nagios.rb +69 -0
  29. data/lib/backup/notifier/prowl.rb +0 -1
  30. data/lib/backup/notifier/pushover.rb +0 -1
  31. data/lib/backup/package.rb +5 -0
  32. data/lib/backup/packager.rb +3 -2
  33. data/lib/backup/pipeline.rb +4 -2
  34. data/lib/backup/storage/base.rb +2 -1
  35. data/lib/backup/storage/cloud_files.rb +151 -0
  36. data/lib/backup/storage/cycler.rb +4 -2
  37. data/lib/backup/storage/dropbox.rb +20 -16
  38. data/lib/backup/storage/ftp.rb +1 -2
  39. data/lib/backup/storage/local.rb +3 -3
  40. data/lib/backup/storage/ninefold.rb +3 -4
  41. data/lib/backup/storage/rsync.rb +1 -2
  42. data/lib/backup/storage/s3.rb +49 -158
  43. data/lib/backup/storage/scp.rb +3 -4
  44. data/lib/backup/storage/sftp.rb +1 -2
  45. data/lib/backup/syncer/base.rb +0 -1
  46. data/lib/backup/syncer/cloud/base.rb +129 -208
  47. data/lib/backup/syncer/cloud/cloud_files.rb +56 -41
  48. data/lib/backup/syncer/cloud/local_file.rb +93 -0
  49. data/lib/backup/syncer/cloud/s3.rb +78 -31
  50. data/lib/backup/syncer/rsync/base.rb +7 -0
  51. data/lib/backup/syncer/rsync/local.rb +0 -5
  52. data/lib/backup/syncer/rsync/push.rb +1 -2
  53. data/lib/backup/utilities.rb +18 -15
  54. data/lib/backup/version.rb +1 -1
  55. data/templates/cli/notifier/http_post +35 -0
  56. data/templates/cli/notifier/nagios +13 -0
  57. data/templates/cli/storage/cloud_files +8 -17
  58. data/templates/cli/storage/s3 +3 -10
  59. data/templates/cli/syncer/cloud_files +3 -31
  60. data/templates/cli/syncer/s3 +3 -27
  61. data/templates/notifier/mail/failure.erb +6 -1
  62. data/templates/notifier/mail/success.erb +6 -1
  63. data/templates/notifier/mail/warning.erb +6 -1
  64. metadata +37 -42
  65. data/lib/backup/storage/cloudfiles.rb +0 -68
@@ -0,0 +1,252 @@
1
+ # encoding: utf-8
2
+ require 'backup/cloud_io/base'
3
+ require 'fog'
4
+ require 'digest/md5'
5
+ require 'base64'
6
+ require 'stringio'
7
+
8
+ module Backup
9
+ module CloudIO
10
+ class S3 < Base
11
+ class Error < Backup::Error; end
12
+
13
+ MAX_FILE_SIZE = 1024**3 * 5 # 5 GiB
14
+ MAX_MULTIPART_SIZE = 1024**4 * 5 # 5 TiB
15
+
16
+ attr_reader :access_key_id, :secret_access_key, :region, :bucket,
17
+ :chunk_size, :encryption, :storage_class
18
+
19
+ def initialize(options = {})
20
+ super
21
+
22
+ @access_key_id = options[:access_key_id]
23
+ @secret_access_key = options[:secret_access_key]
24
+ @region = options[:region]
25
+ @bucket = options[:bucket]
26
+ @chunk_size = options[:chunk_size]
27
+ @encryption = options[:encryption]
28
+ @storage_class = options[:storage_class]
29
+ end
30
+
31
+ # The Syncer may call this method in multiple threads.
32
+ # However, #objects is always called prior to multithreading.
33
+ def upload(src, dest)
34
+ file_size = File.size(src)
35
+ chunk_bytes = chunk_size * 1024**2
36
+ if chunk_bytes > 0 && file_size > chunk_bytes
37
+ raise FileSizeError, <<-EOS if file_size > MAX_MULTIPART_SIZE
38
+ File Too Large
39
+ File: #{ src }
40
+ Size: #{ file_size }
41
+ Max Multipart Upload Size is #{ MAX_MULTIPART_SIZE } (5 TiB)
42
+ EOS
43
+
44
+ chunk_bytes = adjusted_chunk_bytes(chunk_bytes, file_size)
45
+ upload_id = initiate_multipart(dest)
46
+ parts = upload_parts(src, dest, upload_id, chunk_bytes, file_size)
47
+ complete_multipart(dest, upload_id, parts)
48
+ else
49
+ raise FileSizeError, <<-EOS if file_size > MAX_FILE_SIZE
50
+ File Too Large
51
+ File: #{ src }
52
+ Size: #{ file_size }
53
+ Max File Size is #{ MAX_FILE_SIZE } (5 GiB)
54
+ EOS
55
+
56
+ put_object(src, dest)
57
+ end
58
+ end
59
+
60
+ # Returns all objects in the bucket with the given prefix.
61
+ #
62
+ # - #get_bucket returns a max of 1000 objects per request.
63
+ # - Returns objects in alphabetical order.
64
+ # - If marker is given, only objects after the marker are in the response.
65
+ def objects(prefix)
66
+ objects = []
67
+ resp = nil
68
+ prefix = prefix.chomp('/')
69
+ opts = { :prefix => prefix + '/' }
70
+
71
+ while resp.nil? || resp.body['IsTruncated']
72
+ opts.merge!(:marker => objects.last.key) unless objects.empty?
73
+ with_retries("GET '#{ bucket }/#{ prefix }/*'") do
74
+ resp = connection.get_bucket(bucket, opts)
75
+ end
76
+ resp.body['Contents'].each do |obj_data|
77
+ objects << Object.new(self, obj_data)
78
+ end
79
+ end
80
+
81
+ objects
82
+ end
83
+
84
+ # Used by Object to fetch metadata if needed.
85
+ def head_object(object)
86
+ resp = nil
87
+ with_retries("HEAD '#{ bucket }/#{ object.key }'") do
88
+ resp = connection.head_object(bucket, object.key)
89
+ end
90
+ resp
91
+ end
92
+
93
+ # Delete object(s) from the bucket.
94
+ #
95
+ # - Called by the Storage (with objects) and the Syncer (with keys)
96
+ # - Deletes 1000 objects per request.
97
+ # - Missing objects will be ignored.
98
+ def delete(objects_or_keys)
99
+ keys = Array(objects_or_keys).dup
100
+ keys.map!(&:key) if keys.first.is_a?(Object)
101
+
102
+ opts = { :quiet => true } # only report Errors in DeleteResult
103
+ until keys.empty?
104
+ _keys = keys.slice!(0, 1000)
105
+ with_retries('DELETE Multiple Objects') do
106
+ resp = connection.delete_multiple_objects(bucket, _keys, opts)
107
+ unless resp.body['DeleteResult'].empty?
108
+ errors = resp.body['DeleteResult'].map do |result|
109
+ error = result['Error']
110
+ "Failed to delete: #{ error['Key'] }\n" +
111
+ "Reason: #{ error['Code'] }: #{ error['Message'] }"
112
+ end.join("\n")
113
+ raise Error, "The server returned the following:\n#{ errors }"
114
+ end
115
+ end
116
+ end
117
+ end
118
+
119
+ private
120
+
121
+ def connection
122
+ @connection ||= begin
123
+ conn = Fog::Storage.new(
124
+ :provider => 'AWS',
125
+ :aws_access_key_id => access_key_id,
126
+ :aws_secret_access_key => secret_access_key,
127
+ :region => region
128
+ )
129
+ conn.sync_clock
130
+ conn
131
+ end
132
+ end
133
+
134
+ def put_object(src, dest)
135
+ md5 = Base64.encode64(Digest::MD5.file(src).digest).chomp
136
+ options = headers.merge('Content-MD5' => md5)
137
+ with_retries("PUT '#{ bucket }/#{ dest }'") do
138
+ File.open(src, 'r') do |file|
139
+ connection.put_object(bucket, dest, file, options)
140
+ end
141
+ end
142
+ end
143
+
144
+ def initiate_multipart(dest)
145
+ Logger.info "\s\sInitiate Multipart '#{ bucket }/#{ dest }'"
146
+
147
+ resp = nil
148
+ with_retries("POST '#{ bucket }/#{ dest }' (Initiate)") do
149
+ resp = connection.initiate_multipart_upload(bucket, dest, headers)
150
+ end
151
+ resp.body['UploadId']
152
+ end
153
+
154
+ # Each part's MD5 is sent to verify the transfer.
155
+ # AWS will concatenate all parts into a single object
156
+ # once the multipart upload is completed.
157
+ def upload_parts(src, dest, upload_id, chunk_bytes, file_size)
158
+ total_parts = (file_size / chunk_bytes.to_f).ceil
159
+ progress = (0.1..0.9).step(0.1).map {|n| (total_parts * n).floor }
160
+ Logger.info "\s\sUploading #{ total_parts } Parts..."
161
+
162
+ parts = []
163
+ File.open(src, 'r') do |file|
164
+ part_number = 0
165
+ while data = file.read(chunk_bytes)
166
+ part_number += 1
167
+ md5 = Base64.encode64(Digest::MD5.digest(data)).chomp
168
+
169
+ with_retries("PUT '#{ bucket }/#{ dest }' Part ##{ part_number }") do
170
+ resp = connection.upload_part(
171
+ bucket, dest, upload_id, part_number, StringIO.new(data),
172
+ { 'Content-MD5' => md5 }
173
+ )
174
+ parts << resp.headers['ETag']
175
+ end
176
+
177
+ if i = progress.rindex(part_number)
178
+ Logger.info "\s\s...#{ i + 1 }0% Complete..."
179
+ end
180
+ end
181
+ end
182
+ parts
183
+ end
184
+
185
+ def complete_multipart(dest, upload_id, parts)
186
+ Logger.info "\s\sComplete Multipart '#{ bucket }/#{ dest }'"
187
+
188
+ with_retries("POST '#{ bucket }/#{ dest }' (Complete)") do
189
+ resp = connection.complete_multipart_upload(bucket, dest, upload_id, parts)
190
+ raise Error, <<-EOS if resp.body['Code']
191
+ The server returned the following error:
192
+ #{ resp.body['Code'] }: #{ resp.body['Message'] }
193
+ EOS
194
+ end
195
+ end
196
+
197
+ def headers
198
+ headers = {}
199
+
200
+ enc = encryption.to_s.upcase
201
+ headers.merge!(
202
+ { 'x-amz-server-side-encryption' => enc}
203
+ ) unless enc.empty?
204
+
205
+ sc = storage_class.to_s.upcase
206
+ headers.merge!(
207
+ { 'x-amz-storage-class' => sc }
208
+ ) unless sc.empty? || sc == 'STANDARD'
209
+
210
+ headers
211
+ end
212
+
213
+ def adjusted_chunk_bytes(chunk_bytes, file_size)
214
+ return chunk_bytes if file_size / chunk_bytes.to_f <= 10_000
215
+
216
+ mb = orig_mb = chunk_bytes / 1024**2
217
+ mb += 1 until file_size / (1024**2 * mb).to_f <= 10_000
218
+ Logger.warn Error.new(<<-EOS)
219
+ Chunk Size Adjusted
220
+ Your original #chunk_size of #{ orig_mb } MiB has been adjusted
221
+ to #{ mb } MiB in order to satisfy the limit of 10,000 chunks.
222
+ To enforce your chosen #chunk_size, you should use the Splitter.
223
+ e.g. split_into_chunks_of #{ mb * 10_000 } (#chunk_size * 10_000)
224
+ EOS
225
+ 1024**2 * mb
226
+ end
227
+
228
+ class Object
229
+ attr_reader :key, :etag, :storage_class
230
+
231
+ def initialize(cloud_io, data)
232
+ @cloud_io = cloud_io
233
+ @key = data['Key']
234
+ @etag = data['ETag']
235
+ @storage_class = data['StorageClass']
236
+ end
237
+
238
+ # currently 'AES256' or nil
239
+ def encryption
240
+ metadata['x-amz-server-side-encryption']
241
+ end
242
+
243
+ private
244
+
245
+ def metadata
246
+ @metadata ||= @cloud_io.head_object(self).headers
247
+ end
248
+ end
249
+
250
+ end
251
+ end
252
+ end
@@ -3,6 +3,7 @@
3
3
  module Backup
4
4
  module Compressor
5
5
  class Gzip < Base
6
+ class Error < Backup::Error; end
6
7
  extend Utilities::Helpers
7
8
 
8
9
  ##
@@ -71,7 +72,7 @@ module Backup
71
72
  if self.class.has_rsyncable?
72
73
  opts << ' --rsyncable'
73
74
  else
74
- Logger.warn Errors::Compressor::Gzip::RsyncableError.new(<<-EOS)
75
+ Logger.warn Error.new(<<-EOS)
75
76
  'rsyncable' option ignored.
76
77
  Your system's 'gzip' does not support the `--rsyncable` option.
77
78
  EOS
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Backup
4
4
  module Config
5
+ class Error < Backup::Error; end
6
+
5
7
  DEFAULTS = {
6
8
  :config_file => 'config.rb',
7
9
  :data_path => 'data',
@@ -10,6 +12,8 @@ module Backup
10
12
  }
11
13
 
12
14
  class << self
15
+ include Backup::Utilities::Helpers
16
+
13
17
  attr_reader :user, :root_path, :config_file,
14
18
  :data_path, :cache_path, :tmp_path
15
19
 
@@ -28,13 +32,16 @@ module Backup
28
32
  # Tries to find and load the configuration file
29
33
  def load_config!
30
34
  unless File.exist?(@config_file)
31
- raise Errors::Config::NotFoundError,
32
- "Could not find configuration file: '#{@config_file}'."
35
+ raise Error, "Could not find configuration file: '#{@config_file}'."
33
36
  end
34
37
 
35
38
  module_eval(File.read(@config_file), @config_file)
36
39
  end
37
40
 
41
+ def hostname
42
+ @hostname ||= run(utility(:hostname))
43
+ end
44
+
38
45
  private
39
46
 
40
47
  ##
@@ -48,7 +55,7 @@ module Backup
48
55
 
49
56
  path = File.expand_path(path)
50
57
  unless File.directory?(path)
51
- raise Errors::Config::NotFoundError, <<-EOS
58
+ raise Error, <<-EOS
52
59
  Root Path Not Found
53
60
  When specifying a --root-path, the path must exist.
54
61
  Path was: #{ path }
@@ -115,7 +122,8 @@ module Backup
115
122
  { 'RSync' => ['Push', 'Pull', 'Local'] }
116
123
  ],
117
124
  # Notifiers
118
- ['Mail', 'Twitter', 'Campfire', 'Prowl', 'Hipchat', 'Pushover']
125
+ ['Mail', 'Twitter', 'Campfire', 'Prowl',
126
+ 'Hipchat', 'Pushover', 'HttpPost', 'Nagios']
119
127
  ]
120
128
  )
121
129
  end
@@ -153,7 +161,7 @@ module Backup
153
161
  class << self
154
162
  def const_missing(const)
155
163
  if const.to_s == 'CONFIG_FILE'
156
- Logger.warn Errors::ConfigError.new(<<-EOS)
164
+ Logger.warn Error.new(<<-EOS)
157
165
  Configuration File Upgrade Needed
158
166
  Your configuration file, located at #{ Config.config_file }
159
167
  needs to be upgraded for this version of Backup.
@@ -14,7 +14,7 @@ module Backup
14
14
  # Pass calls on to the proper class and log a warning
15
15
  def defaults(&block)
16
16
  klass = eval(self.to_s.sub('Configuration::', ''))
17
- Logger.warn Errors::ConfigurationError.new <<-EOS
17
+ Logger.warn Error.new(<<-EOS)
18
18
  [DEPRECATION WARNING]
19
19
  #{ self }.defaults is being deprecated.
20
20
  To set pre-configured defaults for #{ klass }, use:
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Backup
4
4
  module Configuration
5
+ class Error < Backup::Error; end
6
+
5
7
  module Helpers
6
8
 
7
9
  def self.included(klass)
@@ -37,7 +39,7 @@ module Backup
37
39
  msg = "#{ self }##{ name } has been deprecated as of " +
38
40
  "backup v.#{ deprecation[:version] }"
39
41
  msg << "\n#{ deprecation[:message] }" if deprecation[:message]
40
- Logger.warn Backup::Errors::ConfigurationError.new <<-EOS
42
+ Logger.warn Error.new(<<-EOS)
41
43
  [DEPRECATION WARNING]
42
44
  #{ msg }
43
45
  EOS
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Backup
4
4
  module Database
5
+ class Error < Backup::Error; end
6
+
5
7
  class Base
6
8
  include Backup::Utilities::Helpers
7
9
  include Backup::Configuration::Helpers
@@ -47,7 +49,7 @@ module Backup
47
49
  unless database_id
48
50
  if model.databases.select {|d| d.class == self.class }.count > 1
49
51
  sleep 1; @database_id = Time.now.to_i.to_s[-5, 5]
50
- Logger.warn Errors::Database::ConfigurationError.new(<<-EOS)
52
+ Logger.warn Error.new(<<-EOS)
51
53
  Database Identifier Missing
52
54
  When multiple Databases are configured in a single Backup Model
53
55
  that have the same class (MySQL, PostgreSQL, etc.), the optional
@@ -3,6 +3,7 @@
3
3
  module Backup
4
4
  module Database
5
5
  class MongoDB < Base
6
+ class Error < Backup::Error; end
6
7
 
7
8
  ##
8
9
  # Name of the database that needs to get dumped
@@ -109,8 +110,7 @@ module Backup
109
110
  FileUtils.rm_rf dump_packaging_path
110
111
  log!(:finished)
111
112
  else
112
- raise Errors::Database::PipelineError,
113
- "#{ database_name } Dump Failed!\n" + pipeline.error_messages
113
+ raise Error, "Dump Failed!\n" + pipeline.error_messages
114
114
  end
115
115
  end
116
116
 
@@ -3,6 +3,7 @@
3
3
  module Backup
4
4
  module Database
5
5
  class MySQL < Base
6
+ class Error < Backup::Error; end
6
7
 
7
8
  ##
8
9
  # Name of the database that needs to get dumped
@@ -66,8 +67,7 @@ module Backup
66
67
  if pipeline.success?
67
68
  log!(:finished)
68
69
  else
69
- raise Errors::Database::PipelineError,
70
- "#{ database_name } Dump Failed!\n" + pipeline.error_messages
70
+ raise Error, "Dump Failed!\n" + pipeline.error_messages
71
71
  end
72
72
  end
73
73
 
@@ -3,6 +3,7 @@
3
3
  module Backup
4
4
  module Database
5
5
  class PostgreSQL < Base
6
+ class Error < Backup::Error; end
6
7
 
7
8
  ##
8
9
  # Name of the database that needs to get dumped.
@@ -14,6 +15,10 @@ module Backup
14
15
  # Credentials for the specified database
15
16
  attr_accessor :username, :password
16
17
 
18
+ ##
19
+ # If set the pg_dump(all) command is executed as the given user
20
+ attr_accessor :sudo_user
21
+
17
22
  ##
18
23
  # Connectivity options
19
24
  attr_accessor :host, :port, :socket
@@ -64,19 +69,20 @@ module Backup
64
69
  if pipeline.success?
65
70
  log!(:finished)
66
71
  else
67
- raise Errors::Database::PipelineError,
68
- "#{ database_name } Dump Failed!\n" + pipeline.error_messages
72
+ raise Error, "Dump Failed!\n" + pipeline.error_messages
69
73
  end
70
74
  end
71
75
 
72
76
  def pgdump
73
77
  "#{ password_option }" +
78
+ "#{ sudo_option }" +
74
79
  "#{ utility(:pg_dump) } #{ username_option } #{ connectivity_options } " +
75
80
  "#{ user_options } #{ tables_to_dump } #{ tables_to_skip } #{ name }"
76
81
  end
77
82
 
78
83
  def pgdumpall
79
84
  "#{ password_option }" +
85
+ "#{ sudo_option }" +
80
86
  "#{ utility(:pg_dumpall) } #{ username_option } " +
81
87
  "#{ connectivity_options } #{ user_options }"
82
88
  end
@@ -85,6 +91,10 @@ module Backup
85
91
  "PGPASSWORD='#{ password }' " if password
86
92
  end
87
93
 
94
+ def sudo_option
95
+ "#{ utility(:sudo) } -n -u #{ sudo_user } " if sudo_user
96
+ end
97
+
88
98
  def username_option
89
99
  "--username='#{ username }'" if username
90
100
  end