backup 3.7.2 → 3.8.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 6cc0cefe15e7fb9dc06e2204ce90aad256ea474a
4
- data.tar.gz: 073692acde24bfe0dc958334a3c3ecfb955c62a2
3
+ metadata.gz: 697c751e37f7ee0a9c4485ba7c793980a7a5b087
4
+ data.tar.gz: 2bc9a792c855ed04d7d8e32a95612b7510836a93
5
5
  SHA512:
6
- metadata.gz: 09747bffc3eae9f9769c5d3b1d86a198ce5d6560ad34877b0f7b491fb514844751e8ce5e3b8c2b5a69786ca0904e09f1f94023ed4bf3739429a10297d9c43398
7
- data.tar.gz: 0ae326031c81d8c3ad905427463018bccdee68bade8dc8c7a5398ff241fd6572dd428cba04ed47f0d09f280c8dc390c16922fafb777fd01378952ee0a3925af3
6
+ metadata.gz: 66fee3c688e30e8c0dca0083d3b65b989578f9cdb42b6d043e6d6880e129bfa2ee08b220ee1b778322f02d52b58ad119a33a46458ced5f4e24663122963f9a1e
7
+ data.tar.gz: 44d7d2382a074f6f5e83df6b360d26137c6bc2733031cb2b22ce0f5ce1d83b97712366254f248ceb6681aaf5f8d42fcd138484b670bb52acab51d5fdbfe8026b
data/README.md CHANGED
@@ -8,12 +8,12 @@ was built with modularity, extensibility and simplicity in mind.
8
8
 
9
9
  ## Installation
10
10
 
11
+ **_Do not add `gem backup` to an application's `Gemfile`_**
12
+
11
13
  To install the latest version, run:
12
14
 
13
15
  $ [sudo] gem install backup
14
16
 
15
- _Do not add `gem backup` to an application's `Gemfile`_
16
-
17
17
  See [Installation](https://github.com/meskyanichi/backup/wiki/Installation) for more information about installing and
18
18
  updating your installation of Backup.
19
19
 
@@ -71,21 +71,23 @@ module Backup
71
71
  FileUtils.mkdir_p(path)
72
72
 
73
73
  pipeline = Pipeline.new
74
- pipeline.add(
75
- "#{ tar_command } #{ tar_options } -cPf -#{ tar_root } " +
76
- "#{ paths_to_exclude } #{ paths_to_package }",
77
- tar_success_codes
78
- )
79
-
80
- extension = 'tar'
81
- @model.compressor.compress_with do |command, ext|
82
- pipeline << command
83
- extension << ext
84
- end if @model.compressor
85
-
86
- pipeline << "#{ utility(:cat) } > " +
87
- "'#{ File.join(path, "#{ name }.#{ extension }") }'"
88
- pipeline.run
74
+ with_files_from(paths_to_package) do |files_from|
75
+ pipeline.add(
76
+ "#{ tar_command } #{ tar_options } -cPf -#{ tar_root } " +
77
+ "#{ paths_to_exclude } #{ files_from }",
78
+ tar_success_codes
79
+ )
80
+
81
+ extension = 'tar'
82
+ @model.compressor.compress_with do |command, ext|
83
+ pipeline << command
84
+ extension << ext
85
+ end if @model.compressor
86
+
87
+ pipeline << "#{ utility(:cat) } > " +
88
+ "'#{ File.join(path, "#{ name }.#{ extension }") }'"
89
+ pipeline.run
90
+ end
89
91
 
90
92
  if pipeline.success?
91
93
  Logger.info "Archive '#{ name }' Complete!"
@@ -107,9 +109,16 @@ module Backup
107
109
  end
108
110
 
109
111
  def paths_to_package
110
- options[:paths].map {|path|
111
- "'#{ prepare_path(path) }'"
112
- }.join(' ')
112
+ options[:paths].map {|path| prepare_path(path) }
113
+ end
114
+
115
+ def with_files_from(paths)
116
+ tmpfile = Tempfile.new('backup-archive-paths')
117
+ paths.each {|path| tmpfile.puts path }
118
+ tmpfile.close
119
+ yield "-T '#{ tmpfile.path }'"
120
+ ensure
121
+ tmpfile.delete
113
122
  end
114
123
 
115
124
  def paths_to_exclude
@@ -13,14 +13,15 @@ module Backup
13
13
  MAX_FILE_SIZE = 1024**3 * 5 # 5 GiB
14
14
  MAX_MULTIPART_SIZE = 1024**4 * 5 # 5 TiB
15
15
 
16
- attr_reader :access_key_id, :secret_access_key, :region, :bucket,
17
- :chunk_size, :encryption, :storage_class
16
+ attr_reader :access_key_id, :secret_access_key, :use_iam_profile,
17
+ :region, :bucket, :chunk_size, :encryption, :storage_class
18
18
 
19
19
  def initialize(options = {})
20
20
  super
21
21
 
22
22
  @access_key_id = options[:access_key_id]
23
23
  @secret_access_key = options[:secret_access_key]
24
+ @use_iam_profile = options[:use_iam_profile]
24
25
  @region = options[:region]
25
26
  @bucket = options[:bucket]
26
27
  @chunk_size = options[:chunk_size]
@@ -120,12 +121,16 @@ module Backup
120
121
 
121
122
  def connection
122
123
  @connection ||= begin
123
- conn = Fog::Storage.new(
124
- :provider => 'AWS',
125
- :aws_access_key_id => access_key_id,
126
- :aws_secret_access_key => secret_access_key,
127
- :region => region
128
- )
124
+ opts = { :provider => 'AWS', :region => region }
125
+ if use_iam_profile
126
+ opts.merge!(:use_iam_profile => true)
127
+ else
128
+ opts.merge!(
129
+ :aws_access_key_id => access_key_id,
130
+ :aws_secret_access_key => secret_access_key
131
+ )
132
+ end
133
+ conn = Fog::Storage.new(opts)
129
134
  conn.sync_clock
130
135
  conn
131
136
  end
@@ -3,6 +3,8 @@
3
3
  module Backup
4
4
  class Logger
5
5
  class Logfile
6
+ class Error < Backup::Error; end
7
+
6
8
  class Options
7
9
  ##
8
10
  # Enable the use of Backup's log file.
@@ -87,7 +89,11 @@ module Backup
87
89
  path = File.join(Backup::Config.root_path, path)
88
90
  end
89
91
  FileUtils.mkdir_p(path)
90
- File.join(path, 'backup.log')
92
+ path = File.join(path, 'backup.log')
93
+ if File.exist?(path) && !File.writable?(path)
94
+ raise Error, "Log File at '#{ path }' is not writable"
95
+ end
96
+ path
91
97
  end
92
98
 
93
99
  ##
@@ -96,7 +102,7 @@ module Backup
96
102
  return unless File.exist?(@logfile)
97
103
 
98
104
  if File.stat(@logfile).size > @options.max_bytes
99
- FileUtils.mv(@logfile, @logfile + '~')
105
+ FileUtils.cp(@logfile, @logfile + '~')
100
106
  File.open(@logfile + '~', 'r') do |io_in|
101
107
  File.open(@logfile, 'w') do |io_out|
102
108
  io_in.seek(-@options.max_bytes, IO::SEEK_END) && io_in.gets
@@ -8,7 +8,7 @@ module Backup
8
8
 
9
9
  ##
10
10
  # Amazon Simple Storage Service (S3) Credentials
11
- attr_accessor :access_key_id, :secret_access_key
11
+ attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
12
12
 
13
13
  ##
14
14
  # Amazon S3 bucket name
@@ -81,6 +81,7 @@ module Backup
81
81
  @cloud_io ||= CloudIO::S3.new(
82
82
  :access_key_id => access_key_id,
83
83
  :secret_access_key => secret_access_key,
84
+ :use_iam_profile => use_iam_profile,
84
85
  :region => region,
85
86
  :bucket => bucket,
86
87
  :encryption => encryption,
@@ -114,7 +115,11 @@ module Backup
114
115
  end
115
116
 
116
117
  def check_configuration
117
- required = %w{ access_key_id secret_access_key bucket }
118
+ if use_iam_profile
119
+ required = %w{ bucket }
120
+ else
121
+ required = %w{ access_key_id secret_access_key bucket }
122
+ end
118
123
  raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
119
124
  Configuration Error
120
125
  #{ required.map {|name| "##{ name }"}.join(', ') } are all required
@@ -7,7 +7,7 @@ module Backup
7
7
 
8
8
  ##
9
9
  # Server credentials
10
- attr_accessor :username, :password
10
+ attr_accessor :username, :password, :ssh_options
11
11
 
12
12
  ##
13
13
  # Server IP Address and SFTP port
@@ -16,8 +16,9 @@ module Backup
16
16
  def initialize(model, storage_id = nil)
17
17
  super
18
18
 
19
- @port ||= 22
20
- @path ||= 'backups'
19
+ @ssh_options ||= {}
20
+ @port ||= 22
21
+ @path ||= 'backups'
21
22
  path.sub!(/^~\//, '')
22
23
  end
23
24
 
@@ -25,9 +26,7 @@ module Backup
25
26
 
26
27
  def connection
27
28
  Net::SFTP.start(
28
- ip, username,
29
- :password => password,
30
- :port => port
29
+ ip, username, { :password => password, :port => port }.merge(ssh_options)
31
30
  ) {|sftp| yield sftp }
32
31
  end
33
32
 
@@ -20,13 +20,16 @@ module Backup
20
20
  # in the log messages.
21
21
  attr_reader :syncer_id
22
22
 
23
+ attr_reader :excludes
24
+
23
25
  def initialize(syncer_id = nil)
24
26
  @syncer_id = syncer_id
25
27
 
26
28
  load_defaults!
27
29
 
28
30
  @mirror ||= false
29
- @directories = Array.new
31
+ @directories = []
32
+ @excludes = []
30
33
  end
31
34
 
32
35
  ##
@@ -40,6 +43,13 @@ module Backup
40
43
  directories << path
41
44
  end
42
45
 
46
+ # For Cloud Syncers, +pattern+ can be a string (with shell-style
47
+ # wildcards) or a regex.
48
+ # For RSync, each +pattern+ will be passed to rsync's --exclude option.
49
+ def exclude(pattern)
50
+ excludes << pattern
51
+ end
52
+
43
53
  private
44
54
 
45
55
  def syncer_name
@@ -64,7 +64,7 @@ module Backup
64
64
  remote_files = get_remote_files(remote_base)
65
65
 
66
66
  Logger.info("Gathering local data for '#{ File.expand_path(dir) }'...")
67
- local_files = LocalFile.find(dir)
67
+ local_files = LocalFile.find(dir, excludes)
68
68
 
69
69
  relative_paths = (local_files.keys | remote_files.keys).sort
70
70
  if relative_paths.empty?
@@ -1,22 +1,23 @@
1
1
  # encoding: utf-8
2
+ require 'digest/md5'
2
3
 
3
4
  module Backup
4
5
  module Syncer
5
6
  module Cloud
6
7
  class LocalFile
7
- attr_reader :path, :md5
8
+ attr_reader :path
9
+ attr_accessor :md5
8
10
 
9
11
  class << self
10
- include Utilities::Helpers
11
12
 
12
- # Returns a Hash of LocalFile objects for each file within +dir+.
13
+ # Returns a Hash of LocalFile objects for each file within +dir+,
14
+ # except those matching any of the +excludes+.
13
15
  # Hash keys are the file's path relative to +dir+.
14
- def find(dir)
16
+ def find(dir, excludes = [])
15
17
  dir = File.expand_path(dir)
16
18
  hash = {}
17
- find_md5(dir).each do |path, md5|
18
- file = new(path, md5)
19
- hash[path.sub(dir + '/', '')] = file if file
19
+ find_md5(dir, excludes).each do |file|
20
+ hash[file.path.sub(dir + '/', '')] = file
20
21
  end
21
22
  hash
22
23
  end
@@ -36,37 +37,43 @@ module Backup
36
37
  private
37
38
 
38
39
  # Returns an Array of file paths and their md5 hashes.
39
- #
40
- # Lines output from `cmd` are formatted like:
41
- # MD5(/dir/subdir/file)= 7eaabd1f53024270347800d0fdb34357
42
- # However, if +dir+ is empty, the following is returned:
43
- # (stdin)= d41d8cd98f00b204e9800998ecf8427e
44
- # Which extracts as: ['in', 'd41d8cd98f00b204e9800998ecf8427e']
45
- # I'm not sure I can rely on the fact this doesn't begin with 'MD5',
46
- # so I'll reject entries with a path that doesn't start with +dir+.
47
- #
48
- # String#slice avoids `invalid byte sequence in UTF-8` errors
49
- # that String#split would raise.
50
- #
51
- # Utilities#run is not used here because this would produce too much
52
- # log output, and Pipeline does not support capturing output.
53
- def find_md5(dir)
54
- cmd = "#{ utility(:find) } -L '#{ dir }' -type f -print0 | " +
55
- "#{ utility(:xargs) } -0 #{ utility(:openssl) } md5 2> /dev/null"
56
- %x[#{ cmd }].lines.map do |line|
57
- line.chomp!
58
- entry = [line.slice(4..-36), line.slice(-32..-1)]
59
- entry[0].to_s.start_with?(dir) ? entry : nil
60
- end.compact
40
+ def find_md5(dir, excludes)
41
+ found = []
42
+ (Dir.entries(dir) - %w{. ..}).map {|e| File.join(dir, e) }.each do |path|
43
+ if File.directory?(path)
44
+ unless exclude?(excludes, path)
45
+ found += find_md5(path, excludes)
46
+ end
47
+ elsif File.file?(path)
48
+ if file = new(path)
49
+ unless exclude?(excludes, file.path)
50
+ file.md5 = Digest::MD5.file(file.path).hexdigest
51
+ found << file
52
+ end
53
+ end
54
+ end
55
+ end
56
+ found
57
+ end
58
+
59
+ # Returns true if +path+ matches any of the +excludes+.
60
+ # Note this can not be called if +path+ includes invalid UTF-8.
61
+ def exclude?(excludes, path)
62
+ excludes.any? do |ex|
63
+ if ex.is_a?(String)
64
+ File.fnmatch?(ex, path)
65
+ elsif ex.is_a?(Regexp)
66
+ ex.match(path)
67
+ end
68
+ end
61
69
  end
62
70
  end
63
71
 
64
72
  # If +path+ contains invalid UTF-8, it will be sanitized
65
73
  # and the LocalFile object will be flagged as invalid.
66
74
  # This is done so @file.path may be logged.
67
- def initialize(path, md5)
75
+ def initialize(path)
68
76
  @path = sanitize(path)
69
- @md5 = md5
70
77
  end
71
78
 
72
79
  def invalid?
@@ -9,7 +9,7 @@ module Backup
9
9
 
10
10
  ##
11
11
  # Amazon Simple Storage Service (S3) Credentials
12
- attr_accessor :access_key_id, :secret_access_key
12
+ attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
13
13
 
14
14
  ##
15
15
  # Amazon S3 bucket name
@@ -54,6 +54,7 @@ module Backup
54
54
  @cloud_io ||= CloudIO::S3.new(
55
55
  :access_key_id => access_key_id,
56
56
  :secret_access_key => secret_access_key,
57
+ :use_iam_profile => use_iam_profile,
57
58
  :bucket => bucket,
58
59
  :region => region,
59
60
  :encryption => encryption,
@@ -75,7 +76,11 @@ module Backup
75
76
  end
76
77
 
77
78
  def check_configuration
78
- required = %w{ access_key_id secret_access_key bucket }
79
+ if use_iam_profile
80
+ required = %w{ bucket }
81
+ else
82
+ required = %w{ access_key_id secret_access_key bucket }
83
+ end
79
84
  raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
80
85
  Configuration Error
81
86
  #{ required.map {|name| "##{ name }"}.join(', ') } are all required
@@ -21,7 +21,7 @@ module Backup
21
21
  ##
22
22
  # Common base command for Local/Push/Pull
23
23
  def rsync_command
24
- utility(:rsync) << ' --archive' << mirror_option <<
24
+ utility(:rsync) << ' --archive' << mirror_option << exclude_option <<
25
25
  " #{ Array(additional_rsync_options).join(' ') }".rstrip
26
26
  end
27
27
 
@@ -29,6 +29,10 @@ module Backup
29
29
  mirror ? ' --delete' : ''
30
30
  end
31
31
 
32
+ def exclude_option
33
+ excludes.map {|pattern| " --exclude='#{ pattern }'" }.join
34
+ end
35
+
32
36
  ##
33
37
  # Each path is expanded, since these refer to local paths and are
34
38
  # being shell-quoted. This will also remove any trailing `/` from
@@ -6,7 +6,7 @@ module Backup
6
6
 
7
7
  UTILITY = {}
8
8
  NAMES = %w{
9
- tar cat split find xargs sudo chown hostname
9
+ tar cat split sudo chown hostname
10
10
  gzip bzip2 lzma pbzip2
11
11
  mongo mongodump mysqldump pg_dump pg_dumpall redis-cli riak-admin
12
12
  gpg openssl
@@ -65,8 +65,6 @@ module Backup
65
65
  # tar_dist :gnu # or :bsd
66
66
  # cat '/path/to/cat'
67
67
  # split '/path/to/split'
68
- # find '/path/to/find'
69
- # xargs '/path/to/xargs'
70
68
  # sudo '/path/to/sudo'
71
69
  # chown '/path/to/chown'
72
70
  # hostname '/path/to/hostname'
@@ -1,5 +1,5 @@
1
1
  # encoding: utf-8
2
2
 
3
3
  module Backup
4
- VERSION = '3.7.2'
4
+ VERSION = '3.8.0'
5
5
  end
@@ -4,8 +4,12 @@
4
4
  # See the documentation on the Wiki for details.
5
5
  # https://github.com/meskyanichi/backup/wiki/Storages
6
6
  store_with S3 do |s3|
7
+ # AWS Credentials
7
8
  s3.access_key_id = "my_access_key_id"
8
9
  s3.secret_access_key = "my_secret_access_key"
10
+ # Or, to use a IAM Profile:
11
+ # s3.use_iam_profile = true
12
+
9
13
  s3.region = "us-east-1"
10
14
  s3.bucket = "bucket-name"
11
15
  s3.path = "path/to/backups"
@@ -8,4 +8,7 @@
8
8
  server.port = 22
9
9
  server.path = "~/backups/"
10
10
  server.keep = 5
11
+
12
+ # Additional options for the SSH connection.
13
+ # server.ssh_options = {}
11
14
  end
@@ -8,4 +8,7 @@
8
8
  server.port = 22
9
9
  server.path = "~/backups/"
10
10
  server.keep = 5
11
+
12
+ # Additional options for the SSH connection.
13
+ # server.ssh_options = {}
11
14
  end
@@ -14,5 +14,11 @@
14
14
  cf.directories do |directory|
15
15
  directory.add "/path/to/directory/to/sync"
16
16
  directory.add "/path/to/other/directory/to/sync"
17
+
18
+ # Exclude files/folders from the sync.
19
+ # The pattern may be a shell glob pattern (see `File.fnmatch`) or a Regexp.
20
+ # All patterns will be applied when traversing each added directory.
21
+ directory.exclude '**/*~'
22
+ directory.exclude /\/tmp$/
17
23
  end
18
24
  end
@@ -8,5 +8,13 @@
8
8
  rsync.directories do |directory|
9
9
  directory.add "/var/apps/my_app/public/uploads"
10
10
  directory.add "/var/apps/my_app/logs"
11
+
12
+ # Exclude files/folders.
13
+ # Each pattern will be passed to rsync's `--exclude` option.
14
+ #
15
+ # Note: rsync is run using the `--archive` option,
16
+ # so be sure to read the `FILTER RULES` in `man rsync`.
17
+ directory.exclude '*~'
18
+ directory.exclude 'tmp/'
11
19
  end
12
20
  end
@@ -18,5 +18,13 @@
18
18
  rsync.directories do |directory|
19
19
  directory.add "/var/apps/my_app/public/uploads"
20
20
  directory.add "/var/apps/my_app/logs"
21
+
22
+ # Exclude files/folders.
23
+ # Each pattern will be passed to rsync's `--exclude` option.
24
+ #
25
+ # Note: rsync is run using the `--archive` option,
26
+ # so be sure to read the `FILTER RULES` in `man rsync`.
27
+ directory.exclude '*~'
28
+ directory.exclude 'tmp/'
21
29
  end
22
30
  end
@@ -18,5 +18,13 @@
18
18
  rsync.directories do |directory|
19
19
  directory.add "/var/apps/my_app/public/uploads"
20
20
  directory.add "/var/apps/my_app/logs"
21
+
22
+ # Exclude files/folders.
23
+ # Each pattern will be passed to rsync's `--exclude` option.
24
+ #
25
+ # Note: rsync is run using the `--archive` option,
26
+ # so be sure to read the `FILTER RULES` in `man rsync`.
27
+ directory.exclude '*~'
28
+ directory.exclude 'tmp/'
21
29
  end
22
30
  end
@@ -4,8 +4,12 @@
4
4
  # See the documentation on the Wiki for details.
5
5
  # https://github.com/meskyanichi/backup/wiki/Syncers
6
6
  sync_with Cloud::S3 do |s3|
7
+ # AWS Credentials
7
8
  s3.access_key_id = "my_access_key_id"
8
9
  s3.secret_access_key = "my_secret_access_key"
10
+ # Or, to use a IAM Profile:
11
+ # s3.use_iam_profile = true
12
+
9
13
  s3.bucket = "my-bucket"
10
14
  s3.region = "us-east-1"
11
15
  s3.path = "/backups"
@@ -15,5 +19,11 @@
15
19
  s3.directories do |directory|
16
20
  directory.add "/path/to/directory/to/sync"
17
21
  directory.add "/path/to/other/directory/to/sync"
22
+
23
+ # Exclude files/folders from the sync.
24
+ # The pattern may be a shell glob pattern (see `File.fnmatch`) or a Regexp.
25
+ # All patterns will be applied when traversing each added directory.
26
+ directory.exclude '**/*~'
27
+ directory.exclude /\/tmp$/
18
28
  end
19
29
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: backup
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.7.2
4
+ version: 3.8.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Michael van Rooijen
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-09-28 00:00:00.000000000 Z
11
+ date: 2013-10-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: builder