backup 3.3.2 → 3.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e889097f40e239697f1c8b2d7cec1e2783674c15
4
- data.tar.gz: f70369a752f7fb511ad3e0de69b98ef0d0959a15
3
+ metadata.gz: 552f524acf300a8918b07f535d29b23506cb1b9d
4
+ data.tar.gz: 31a993d965e008dba36bdcb1319e7be4bc4ab0c2
5
5
  SHA512:
6
- metadata.gz: 5f38d5b86b108d53afdbffc33c120672364960abcc54bdf3f4ccbb12d85793ef118931b30091d5774cb8516b16aedaf9eaf84fc7b220551606cb44f5ab596c53
7
- data.tar.gz: 108266c5088cabe8ec5d125626daed7af0260055a11d4ea35afb7d29edf5f89666e0068415fbe936b8206263f24c0c1438078eb872a9a1af92b7e02871f158c4
6
+ metadata.gz: 76ce5ab435d082d9947211a394ce1985f69f965758595e89a14b0f57f001b852b6bc4d2df89437c9ee5267c3196ae7c57c7075c006a9ba0637323e7c8e986623
7
+ data.tar.gz: 4b5c1ad79f13079a3a009a85012c80065c76dd16f62883bfd3ffb28b86822f06759e7a5990d5646c6b71611128d53c0d61a7c008a905e5404f6d99abc6cef09d
@@ -9,22 +9,22 @@ module Backup
9
9
  # from the last time this model/trigger was run,
10
10
  # then removes the files.
11
11
  def prepare(model)
12
- @model = model
13
-
14
12
  messages = []
15
- if packaging_folder_dirty?
13
+
14
+ packaging_folder = File.join(Config.tmp_path, model.trigger)
15
+ if File.exist?(packaging_folder)
16
16
  messages << <<-EOS
17
- The temporary backup folder still contains files!
18
- '#{ File.join(Config.tmp_path, @model.trigger) }'
19
- These files will now be removed.
17
+ The temporary packaging folder still exists!
18
+ '#{ packaging_folder }'
19
+ It will now be removed.
20
20
  EOS
21
- FileUtils.rm_rf(File.join(Config.tmp_path, @model.trigger))
21
+ FileUtils.rm_rf(packaging_folder)
22
22
  end
23
23
 
24
- package_files = tmp_path_package_files
24
+ package_files = package_files_for(model.trigger)
25
25
  unless package_files.empty?
26
- # the chances that tmp_path would be dirty
27
- # AND package files exist are practically nil
26
+ # the chances of the packaging folder AND
27
+ # the package files existing are practically nil
28
28
  messages << ('-' * 74) unless messages.empty?
29
29
 
30
30
  messages << <<-EOS
@@ -41,7 +41,7 @@ module Backup
41
41
  Cleanup Warning
42
42
  #{ messages.join("\n") }
43
43
  Please check the log for messages and/or your notifications
44
- concerning this backup: '#{ @model.label } (#{ @model.trigger })'
44
+ concerning this backup: '#{ model.label } (#{ model.trigger })'
45
45
  The temporary files which had to be removed should not have existed.
46
46
  EOS
47
47
  end
@@ -68,21 +68,21 @@ module Backup
68
68
  # Logs warnings if any temporary files still exist
69
69
  # when errors occur during the backup
70
70
  def warnings(model)
71
- @model = model
72
-
73
71
  messages = []
74
- if packaging_folder_dirty?
72
+
73
+ packaging_folder = File.join(Config.tmp_path, model.trigger)
74
+ if File.exist?(packaging_folder)
75
75
  messages << <<-EOS
76
- The temporary backup folder still contains files!
77
- '#{ File.join(Config.tmp_path, @model.trigger) }'
76
+ The temporary packaging folder still exists!
77
+ '#{ packaging_folder }'
78
78
  This folder may contain completed Archives and/or Database backups.
79
79
  EOS
80
80
  end
81
81
 
82
- package_files = tmp_path_package_files
82
+ package_files = package_files_for(model.trigger)
83
83
  unless package_files.empty?
84
- # the chances that tmp_path would be dirty
85
- # AND package files exist are practically nil
84
+ # the chances of the packaging folder AND
85
+ # the package files existing are practically nil
86
86
  messages << ('-' * 74) unless messages.empty?
87
87
 
88
88
  messages << <<-EOS
@@ -97,7 +97,7 @@ module Backup
97
97
  Cleanup Warning
98
98
  #{ messages.join("\n") }
99
99
  Make sure you check these files before the next scheduled backup for
100
- '#{ @model.label } (#{ @model.trigger })'
100
+ '#{ model.label } (#{ model.trigger })'
101
101
  These files will be removed at that time!
102
102
  EOS
103
103
  end
@@ -105,15 +105,8 @@ module Backup
105
105
 
106
106
  private
107
107
 
108
- def packaging_folder_dirty?
109
- !Dir[File.join(Config.tmp_path, @model.trigger, '*')].empty?
110
- end
111
-
112
- def tmp_path_package_files
113
- Dir[File.join(
114
- Config.tmp_path,
115
- "????.??.??.??.??.??.#{ @model.trigger }.tar{,[.-]*}"
116
- )]
108
+ def package_files_for(trigger)
109
+ Dir[File.join(Config.tmp_path,"#{ trigger }.tar{,[.-]*}")]
117
110
  end
118
111
 
119
112
  end
@@ -8,6 +8,10 @@ module Backup
8
8
 
9
9
  attr_reader :model, :database_id, :dump_path
10
10
 
11
+ ##
12
+ # If given, +database_id+ will be appended to the #dump_filename.
13
+ # This is required if multiple Databases of the same class are added to
14
+ # the model.
11
15
  def initialize(model, database_id = nil)
12
16
  @model = model
13
17
  @database_id = database_id.to_s.gsub(/\W/, '_') if database_id
@@ -37,7 +41,7 @@ module Backup
37
41
  # is defined, the user will be warned and one will be auto-generated.
38
42
  #
39
43
  # Model#initialize calls this method *after* all defined databases have
40
- # been initialized so `backup perform --check` can report these warnings.
44
+ # been initialized so `backup check` can report these warnings.
41
45
  def dump_filename
42
46
  @dump_filename ||= begin
43
47
  unless database_id
@@ -80,6 +80,7 @@ module Backup
80
80
  def initialize(trigger, label, &block)
81
81
  @trigger = trigger.to_s
82
82
  @label = label.to_s
83
+ @package = Package.new(self)
83
84
 
84
85
  procedure_instance_variables.each do |variable|
85
86
  instance_variable_set(variable, Array.new)
@@ -230,7 +231,7 @@ module Backup
230
231
  #
231
232
  def perform!
232
233
  @started_at = Time.now
233
- @time = @started_at.strftime("%Y.%m.%d.%H.%M.%S")
234
+ @time = package.time = @started_at.strftime("%Y.%m.%d.%H.%M.%S")
234
235
  log!(:started)
235
236
 
236
237
  prepare!
@@ -263,13 +264,12 @@ module Backup
263
264
  end
264
265
 
265
266
  ##
266
- # After all the databases and archives have been dumped and sorted,
267
+ # After all the databases and archives have been dumped and stored,
267
268
  # these files will be bundled in to a .tar archive (uncompressed),
268
269
  # which may be optionally Encrypted and/or Split into multiple "chunks".
269
270
  # All information about this final archive is stored in the @package.
270
271
  # Once complete, the temporary folder used during packaging is removed.
271
272
  def package!
272
- @package = Package.new(self)
273
273
  Packager.package!(self)
274
274
  Cleaner.remove_packaging(self)
275
275
  end
@@ -277,7 +277,7 @@ module Backup
277
277
  ##
278
278
  # Removes the final package file(s) once all configured Storages have run.
279
279
  def clean!
280
- Cleaner.remove_package(@package)
280
+ Cleaner.remove_package(package)
281
281
  end
282
282
 
283
283
  ##
@@ -5,7 +5,7 @@ module Backup
5
5
 
6
6
  ##
7
7
  # The time when the backup initiated (in format: 2011.02.20.03.29.59)
8
- attr_reader :time
8
+ attr_accessor :time
9
9
 
10
10
  ##
11
11
  # The trigger which initiated the backup process
@@ -24,11 +24,10 @@ module Backup
24
24
  attr_reader :version
25
25
 
26
26
  def initialize(model)
27
- @time = model.time
28
27
  @trigger = model.trigger
29
28
  @extension = 'tar'
30
29
  @chunk_suffixes = Array.new
31
- @version = Backup::Version.current
30
+ @version = Version.current
32
31
  end
33
32
 
34
33
  def filenames
@@ -40,7 +39,7 @@ module Backup
40
39
  end
41
40
 
42
41
  def basename
43
- "#{ time }.#{ trigger }.#{ extension }"
42
+ "#{ trigger }.#{ extension }"
44
43
  end
45
44
 
46
45
  end
@@ -4,8 +4,10 @@ module Backup
4
4
  class Splitter
5
5
  include Backup::Utilities::Helpers
6
6
 
7
+ attr_reader :package, :chunk_size
8
+
7
9
  def initialize(model, chunk_size)
8
- @model = model
10
+ @package = model.package
9
11
  @chunk_size = chunk_size
10
12
  end
11
13
 
@@ -17,8 +19,8 @@ module Backup
17
19
  # Once the packaging procedure is complete, it will return and
18
20
  # @package.chunk_suffixes will be set based on the resulting files.
19
21
  def split_with
20
- before_packaging
21
- yield @split_command
22
+ Logger.info "Splitter configured with a chunk size of #{ chunk_size }MB."
23
+ yield split_command
22
24
  after_packaging
23
25
  end
24
26
 
@@ -30,13 +32,9 @@ module Backup
30
32
  # written using the given `prefix`, which is the full path to the
31
33
  # final @package.basename, plus a '-' separator. This `prefix` will then
32
34
  # be suffixed using 'aa', 'ab', and so on... for each file.
33
- def before_packaging
34
- @package = @model.package
35
- Logger.info "Splitter configured with a chunk size of " +
36
- "#{ @chunk_size }MB."
37
-
38
- @split_command = "#{ utility(:split) } -b #{ @chunk_size }m - " +
39
- "'#{ File.join(Config.tmp_path, @package.basename + '-') }'"
35
+ def split_command
36
+ "#{ utility(:split) } -b #{ chunk_size }m - " +
37
+ "'#{ File.join(Config.tmp_path, package.basename + '-') }'"
40
38
  end
41
39
 
42
40
  ##
@@ -49,11 +47,11 @@ module Backup
49
47
  suffixes = chunk_suffixes
50
48
  if suffixes == ['aa']
51
49
  FileUtils.mv(
52
- File.join(Config.tmp_path, @package.basename + '-aa'),
53
- File.join(Config.tmp_path, @package.basename)
50
+ File.join(Config.tmp_path, package.basename + '-aa'),
51
+ File.join(Config.tmp_path, package.basename)
54
52
  )
55
53
  else
56
- @package.chunk_suffixes = suffixes
54
+ package.chunk_suffixes = suffixes
57
55
  end
58
56
  end
59
57
 
@@ -68,7 +66,7 @@ module Backup
68
66
  # Returns an array of full paths to the backup chunks.
69
67
  # Chunks are sorted in alphabetical order.
70
68
  def chunks
71
- Dir[File.join(Config.tmp_path, @package.basename + '-*')].sort
69
+ Dir[File.join(Config.tmp_path, package.basename + '-*')].sort
72
70
  end
73
71
 
74
72
  end
@@ -5,77 +5,45 @@ module Backup
5
5
  class Base
6
6
  include Backup::Configuration::Helpers
7
7
 
8
+ ##
9
+ # Base path on the remote where backup package files will be stored.
10
+ attr_accessor :path
11
+
8
12
  ##
9
13
  # Sets the limit to how many backups to keep in the remote location.
10
14
  # If exceeded, the oldest will be removed to make room for the newest
11
15
  attr_accessor :keep
12
16
 
13
- ##
14
- # User-defined string used to uniquely identify multiple storages of the
15
- # same type. If multiple storages of the same type are added to a single
16
- # backup model, then this identifier must be set. This will be appended
17
- # to the YAML storage file used for cycling backups.
18
- attr_reader :storage_id
17
+ attr_reader :model, :package, :storage_id
19
18
 
19
+ ##
20
+ # +storage_id+ is a user-defined string used to uniquely identify
21
+ # multiple storages of the same type. If multiple storages of the same
22
+ # type are added to a single backup model, this identifier must be set.
23
+ # This will be appended to the YAML storage file used for cycling backups.
20
24
  def initialize(model, storage_id = nil)
21
25
  @model = model
22
- @storage_id = storage_id
26
+ @package = model.package
27
+ @storage_id = storage_id.to_s.gsub(/\W/, '_') if storage_id
23
28
 
24
29
  load_defaults!
25
30
  end
26
31
 
27
32
  def perform!
28
- @package = @model.package
33
+ Logger.info "#{ storage_name } Started..."
29
34
  transfer!
30
35
  cycle!
36
+ Logger.info "#{ storage_name } Finished!"
31
37
  end
32
38
 
33
39
  private
34
40
 
35
41
  ##
36
- # Provider defaults to false. Overridden when using a service-based
37
- # storage such as Amazon S3, Rackspace Cloud Files or Dropbox
38
- def provider
39
- false
40
- end
41
-
42
- ##
43
- # Each subclass must define a +path+ where remote files will be stored
44
- def path; end
45
-
46
- ##
47
- # Return the storage name, with optional storage_id
48
- def storage_name
49
- @storage_name ||= self.class.to_s.sub('Backup::', '') +
50
- (storage_id ? " (#{ storage_id })" : '')
51
- end
52
-
53
- ##
54
- # Returns the local path
55
- # This is where any Package to be transferred is located.
56
- def local_path
57
- Config.tmp_path
42
+ # Return the remote path for the current or given package.
43
+ def remote_path(pkg = package)
44
+ File.join(path, pkg.trigger, pkg.time)
58
45
  end
59
-
60
- ##
61
- # Returns the remote path for the given Package
62
- # This is where the Package will be stored, or was previously stored.
63
- def remote_path_for(package)
64
- File.join(path, package.trigger, package.time)
65
- end
66
-
67
- ##
68
- # Yields two arguments to the given block: "local_file, remote_file"
69
- # The local_file is the full file name:
70
- # e.g. "2011.08.30.11.00.02.trigger.tar.enc"
71
- # The remote_file is the full file name, minus the timestamp:
72
- # e.g. "trigger.tar.enc"
73
- def files_to_transfer_for(package)
74
- package.filenames.each do |filename|
75
- yield filename, filename[20..-1]
76
- end
77
- end
78
- alias :transferred_files_for :files_to_transfer_for
46
+ alias :remote_path_for :remote_path
79
47
 
80
48
  ##
81
49
  # Adds the current package being stored to the YAML cycle data file
@@ -85,9 +53,13 @@ module Backup
85
53
  # containing the original error message.
86
54
  def cycle!
87
55
  return unless keep.to_i > 0
88
- Logger.info "#{ storage_name }: Cycling Started..."
89
- Cycler.cycle!(self, @package)
90
- Logger.info "#{ storage_name }: Cycling Complete!"
56
+ Logger.info "Cycling Started..."
57
+ Cycler.cycle!(self)
58
+ end
59
+
60
+ def storage_name
61
+ @storage_name ||= self.class.to_s.sub('Backup::', '') +
62
+ (storage_id ? " (#{ storage_id })" : '')
91
63
  end
92
64
 
93
65
  end
@@ -1,7 +1,5 @@
1
1
  # encoding: utf-8
2
2
 
3
- ##
4
- # Only load the Fog gem when the Backup::Storage::CloudFiles class is loaded
5
3
  Backup::Dependency.load('fog')
6
4
 
7
5
  module Backup
@@ -18,33 +16,23 @@ module Backup
18
16
  attr_accessor :servicenet
19
17
 
20
18
  ##
21
- # Rackspace Cloud Files container name and path
22
- attr_accessor :container, :path
19
+ # Rackspace Cloud Files container name
20
+ attr_accessor :container
23
21
 
24
- ##
25
- # Creates a new instance of the storage object
26
22
  def initialize(model, storage_id = nil, &block)
27
- super(model, storage_id)
23
+ super
24
+ instance_eval(&block) if block_given?
28
25
 
29
26
  @servicenet ||= false
30
27
  @path ||= 'backups'
31
-
32
- instance_eval(&block) if block_given?
28
+ path.sub!(/^\//, '')
33
29
  end
34
30
 
35
31
  private
36
32
 
37
- ##
38
- # This is the provider that Fog uses for the Cloud Files Storage
39
- def provider
40
- 'Rackspace'
41
- end
42
-
43
- ##
44
- # Establishes a connection to Rackspace Cloud Files
45
33
  def connection
46
34
  @connection ||= Fog::Storage.new(
47
- :provider => provider,
35
+ :provider => 'Rackspace',
48
36
  :rackspace_username => username,
49
37
  :rackspace_api_key => api_key,
50
38
  :rackspace_auth_url => auth_url,
@@ -52,35 +40,27 @@ module Backup
52
40
  )
53
41
  end
54
42
 
55
- ##
56
- # Transfers the archived file to the specified Cloud Files container
57
43
  def transfer!
58
- remote_path = remote_path_for(@package)
59
-
60
44
  connection.put_container(container)
61
45
 
62
- files_to_transfer_for(@package) do |local_file, remote_file|
63
- Logger.info "#{storage_name} started transferring '#{ local_file }'."
64
-
65
- File.open(File.join(local_path, local_file), 'r') do |file|
66
- connection.put_object(
67
- container, File.join(remote_path, remote_file), file
68
- )
46
+ package.filenames.each do |filename|
47
+ src = File.join(Config.tmp_path, filename)
48
+ dest = File.join(remote_path, filename)
49
+ Logger.info "Storing '#{ container }/#{ dest }'..."
50
+ File.open(src, 'r') do |file|
51
+ connection.put_object(container, dest, file)
69
52
  end
70
53
  end
71
54
  end
72
55
 
73
- ##
74
- # Removes the transferred archive file(s) from the storage location.
75
- # Any error raised will be rescued during Cycling
76
- # and a warning will be logged, containing the error message.
56
+ # Called by the Cycler.
57
+ # Any error raised will be logged as a warning.
77
58
  def remove!(package)
78
- remote_path = remote_path_for(package)
59
+ Logger.info "Removing backup package dated #{ package.time }..."
79
60
 
80
- transferred_files_for(package) do |local_file, remote_file|
81
- Logger.info "#{storage_name} started removing '#{ local_file }' " +
82
- "from container '#{ container }'."
83
- connection.delete_object(container, File.join(remote_path, remote_file))
61
+ remote_path = remote_path_for(package)
62
+ package.filenames.each do |filename|
63
+ connection.delete_object(container, File.join(remote_path, filename))
84
64
  end
85
65
  end
86
66