s3-tar-backup 1.1.0 → 1.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 842cea517d3276df06766424c4354d644069df99
4
- data.tar.gz: 1ad93ccb5dd87a00b794692643cdf9dfc4936bcd
3
+ metadata.gz: aa76416c2881739829d7dde209216071f4889caa
4
+ data.tar.gz: b98758ddff06cf3125180a851a94a2b5f4a97277
5
5
  SHA512:
6
- metadata.gz: bc7a8b17a71626eef16fc974d4725a2cb11482d4cdb51201093c3a51e1fe12565228666a2302f5d1498f298b8579ceb23c2e913a75682961e5425f00e842ca84
7
- data.tar.gz: f7b8d94f5f23c8f234c2f1003b6a48132baacba2df14db0b9754b45cdea812f783e20ffee995b20595864fbf8141280ffd6d65160b33ef097abc1d5ed3966d02
6
+ metadata.gz: f4c2b4a6fe2035c3461c1745e1b82d273dea3603d096dbcef6f49673111907b0166ea50c95737141912d5de1bae4d19d9ff22f7882dbf1b07633636eff5071a9
7
+ data.tar.gz: fbc2f9093c5cc8df641316fdde22a484c340f487a14a854ba72805ef902bba32ab807167ce1a0c42b7ccc4d27100adddba2c09067d1d7f2c465318090207bec8
@@ -1,6 +1,6 @@
1
1
  The MIT License (MIT)
2
2
 
3
- Copyright (c) <year> <copyright holders>
3
+ Copyright (c) 2012 Antony Male
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
data/README.md CHANGED
@@ -9,16 +9,18 @@ You can then restore the files at a later date.
9
9
 
10
10
  This tool was built as a replacement for duplicity, after duplicity started throwing errors and generally being a bit unreliable.
11
11
  It uses command-line tar to create incremental backup snapshots, and the aws-s3 gem to upload these to S3.
12
+ It can also optionally use command-ling gpg to encrypt backups.
12
13
 
13
14
  In practice, it turns out that this tool has few lower bandwidth and CPU requirements, and can restore a backup in a fraction of the time that duplicity would take.
14
15
 
15
16
  Installation
16
17
  ------------
17
18
 
18
- This tool is not yet a ruby gem, and unless people ask me, it will remain that way.
19
+ This tool is available as a ruby gem, or you can build it youself.
19
20
 
20
- Therefore, to install:
21
+ To install from rubygems: `gem install s3-tar-backup`.
21
22
 
23
+ To build it yourself:
22
24
  ```
23
25
  $ git clone git://github.com/canton7/s3-tar-backup.git
24
26
  $ cd s3-tar-backup
@@ -58,13 +60,20 @@ remove_all_but_n_full = <number>
58
60
 
59
61
  backup_dir = </path/to/dir>
60
62
  dest = <bucket_name>/<path>
63
+
64
+ ; Optional: specifies commands to run before and after each backup
61
65
  pre-backup = <some command>
62
66
  post-backup = <some command>
63
67
 
68
+ ; Optional: defaults to bzip2
64
69
  compression = <compression_type>
65
70
 
71
+ ; Optional: defaults to false
66
72
  always_full = <bool>
67
73
 
74
+ ; Optional: defaults to no key
75
+ gpg_key = <key ID>
76
+
68
77
  ; You have have multiple lines of the following types.
69
78
  ; Values from here and from your profiles will be combined
70
79
  source = </path/to/another/source>
@@ -105,6 +114,10 @@ s3-tar-backup is capable of auto-detecting the format of a previously-backed-up
105
114
  `always_full` is an optional key which have have the value `True` or `False`.
106
115
  This is used to say that incremental backups should never be used.
107
116
 
117
+ `gpg_key` is an optional GPG Key ID to use to encrypt backups.
118
+ This key must exist in your keyring.
119
+ By default, no key is used and backups are not encrypted.
120
+
108
121
  `source` contains the folders to be backed up.
109
122
 
110
123
  `exclude` lines specify files/dirs to exclude from the backup.
@@ -131,10 +144,23 @@ dest = <bucket_name>/<path>
131
144
  exclude = </some/dir>
132
145
  pre-backup = <some command>
133
146
  post-backup = <some command>
147
+ gpg_key = <key ID>
134
148
  ```
135
149
 
136
150
  `profile_name` is the name of the profile. You'll use this later.
137
151
 
152
+ ### Encryption
153
+
154
+ `s3-tar-backup` will encrypt your backups if you specify the config key `gpg_key`, which is the ID of the key to use for encrypting backups.
155
+ In order to create an encrypted backup, the public key with this ID must exist in your keyring: it doesn't matter if it has a passphrase or not.
156
+ In order to restore an encrypted backup, the private key corresponding to the public key which encrypted the backup must exist in your keyring: your `gpg-agent` will prompt you for the passphrase if required.
157
+ The `gpg_key` option is not used when restoring from backup (instead gpg works out which key to use to decrypt the backup by looking at the backup itself), which means that you can safely change the key that `s3-tar-backup` uses to encrypt backups without losing access to older backups.
158
+
159
+ `s3-tar-backup` works out whether or not to try and decrypt a backup by looking at its file extension, which means you can safely enable or disable encryption without losing access to older backups.
160
+
161
+ To create a key, run `gpg --gen-key`, and follow the prompts.
162
+ Make sure you create a backup of the private key using `gpg -a --export-secret-keys <key ID> > s3-tar-backup-secret-key.asc`.
163
+
138
164
  ### Example config file
139
165
 
140
166
  ```ini
@@ -150,9 +176,12 @@ backup_dir = /root/.backup
150
176
  dest = my-backups/tar
151
177
  ; You may prefer bzip2, as it has a much lower CPU cost
152
178
  compression = lzma2
179
+ gpg_key = ABCD1234
153
180
 
154
181
  [profile "www"]
155
182
  source = /srv/http
183
+ ; Don't encrypt this (for some reason)
184
+ gpg_key =
156
185
 
157
186
  [profile "home"]
158
187
  source = /home/me
File without changes
@@ -84,9 +84,11 @@ module S3TarBackup
84
84
 
85
85
  def gen_backup_config(profile, config)
86
86
  bucket, dest_prefix = (config.get("profile.#{profile}.dest", false) || config['settings.dest']).split('/', 2)
87
+ gpg_key = config.get("profile.#{profile}.gpg_key", false) || config['settings.gpg_key']
87
88
  backup_config = {
88
89
  :backup_dir => config.get("profile.#{profile}.backup_dir", false) || config['settings.backup_dir'],
89
90
  :name => profile,
91
+ :gpg_key => gpg_key && !gpg_key.empty? ? gpg_key : nil,
90
92
  :sources => [*config.get("profile.#{profile}.source", [])] + [*config.get("settings.source", [])],
91
93
  :exclude => [*config.get("profile.#{profile}.exclude", [])] + [*config.get("settings.exclude", [])],
92
94
  :bucket => bucket,
@@ -96,7 +98,7 @@ module S3TarBackup
96
98
  :full_if_older_than => config.get("profile.#{profile}.full_if_older_than", false) || config['settings.full_if_older_than'],
97
99
  :remove_older_than => config.get("profile.#{profile}.remove_older_than", false) || config.get('settings.remove_older_than', false),
98
100
  :remove_all_but_n_full => config.get("profile.#{profile}.remove_all_but_n_full", false) || config.get('settings.remove_all_but_n_full', false),
99
- :compression => (config.get("profile.#{profile}.compression", false) || config.get('settings.compression', 'gzip')).to_sym,
101
+ :compression => (config.get("profile.#{profile}.compression", false) || config.get('settings.compression', 'bzip2')).to_sym,
100
102
  :always_full => config.get('settings.always_full', false) || config.get("profile.#{profile}.always_full", false),
101
103
  }
102
104
  backup_config
@@ -106,7 +108,7 @@ module S3TarBackup
106
108
  puts "===== Backing up profile #{backup_config[:name]} ====="
107
109
  backup_config[:pre_backup].each_with_index do |cmd, i|
108
110
  puts "Executing pre-backup hook #{i+1}"
109
- system(cmd)
111
+ exec(cmd)
110
112
  end
111
113
  full_required = full_required?(backup_config[:full_if_older_than], prev_backups)
112
114
  puts "Last full backup is too old. Forcing a full backup" if full_required && !opts[:full] && backup_config[:always_full]
@@ -117,7 +119,7 @@ module S3TarBackup
117
119
  end
118
120
  backup_config[:post_backup].each_with_index do |cmd, i|
119
121
  puts "Executing post-backup hook #{i+1}"
120
- system(cmd)
122
+ exec(cmd)
121
123
  end
122
124
  end
123
125
 
@@ -155,7 +157,7 @@ module S3TarBackup
155
157
  # backup_dir, name, soruces, exclude, bucket, dest_prefix
156
158
  def backup_incr(config, verbose=false)
157
159
  puts "Starting new incremental backup"
158
- backup = Backup.new(config[:backup_dir], config[:name], config[:sources], config[:exclude], config[:compression])
160
+ backup = Backup.new(config[:backup_dir], config[:name], config[:sources], config[:exclude], config[:compression], config[:gpg_key])
159
161
 
160
162
  # Try and get hold of the snar file
161
163
  unless backup.snar_exists?
@@ -179,14 +181,14 @@ module S3TarBackup
179
181
 
180
182
  def backup_full(config, verbose=false)
181
183
  puts "Starting new full backup"
182
- backup = Backup.new(config[:backup_dir], config[:name], config[:sources], config[:exclude], config[:compression])
184
+ backup = Backup.new(config[:backup_dir], config[:name], config[:sources], config[:exclude], config[:compression], config[:gpg_key])
183
185
  # Nuke the snar file -- forces a full backup
184
186
  File.delete(backup.snar_path) if File.exists?(backup.snar_path)
185
187
  backup(config, backup, verbose)
186
188
  end
187
189
 
188
190
  def backup(config, backup, verbose=false)
189
- system(backup.backup_cmd(verbose))
191
+ exec(backup.backup_cmd(verbose))
190
192
  puts "Uploading #{config[:bucket]}/#{config[:dest_prefix]}/#{File.basename(backup.archive)} (#{bytes_to_human(File.size(backup.archive))})"
191
193
  upload(backup.archive, config[:bucket], "#{config[:dest_prefix]}/#{File.basename(backup.archive)}")
192
194
  puts "Uploading snar (#{bytes_to_human(File.size(backup.snar_path))})"
@@ -232,7 +234,7 @@ module S3TarBackup
232
234
  restore_dir = opts[:restore].chomp('/') << '/'
233
235
 
234
236
  Dir.mkdir(restore_dir) unless Dir.exists?(restore_dir)
235
- raise "Detination dir is not a directory" unless File.directory?(restore_dir)
237
+ raise "Destination dir is not a directory" unless File.directory?(restore_dir)
236
238
 
237
239
  prev_backups[restore_start_index..restore_end_index].each do |object|
238
240
  puts "Fetching #{backup_config[:bucket]}/#{backup_config[:dest_prefix]}/#{object[:name]} (#{bytes_to_human(object[:size])})"
@@ -243,7 +245,7 @@ module S3TarBackup
243
245
  end
244
246
  end
245
247
  puts "Extracting..."
246
- system(Backup.restore_cmd(restore_dir, dl_file, opts[:verbose]))
248
+ exec(Backup.restore_cmd(restore_dir, dl_file, opts[:verbose]))
247
249
 
248
250
  File.delete(dl_file)
249
251
  end
@@ -252,7 +254,7 @@ module S3TarBackup
252
254
  def perform_list_backups(prev_backups, backup_config)
253
255
  # prev_backups alreays contains just the files for the current profile
254
256
  puts "===== Backups list for #{backup_config[:name]} ====="
255
- puts "Type: N: Date:#{' '*18}Size: Chain Size: Format:\n\n"
257
+ puts "Type: N: Date:#{' '*18}Size: Chain Size: Format: Encrypted:\n\n"
256
258
  prev_type = ''
257
259
  total_size = 0
258
260
  chain_length = 0
@@ -268,7 +270,7 @@ module S3TarBackup
268
270
  chain_length_str = (chain_length == 0 ? '' : chain_length.to_s).ljust(3)
269
271
  chain_cum_size_str = (object[:type] == :full ? '' : bytes_to_human(chain_cum_size)).ljust(8)
270
272
  puts "#{type} #{chain_length_str} #{object[:date].strftime('%F %T')} #{bytes_to_human(object[:size]).ljust(8)} " \
271
- "#{chain_cum_size_str} (#{object[:compression]})"
273
+ "#{chain_cum_size_str} #{object[:compression].to_s.ljust(7)} #{object[:encryption] ? 'Y' : 'N'}"
272
274
  total_size += object[:size]
273
275
  end
274
276
  puts "\n"
@@ -308,5 +310,10 @@ module S3TarBackup
308
310
  end
309
311
  format("%.2f", n) << %w(B KB MB GB TB)[count]
310
312
  end
313
+
314
+ def exec(cmd)
315
+ puts "Executing: #{cmd}"
316
+ system(cmd)
317
+ end
311
318
  end
312
319
  end
@@ -1,4 +1,4 @@
1
- require 'aws/s3'
1
+ require 'aws-sdk'
2
2
 
3
3
  module S3TarBackup
4
4
  class Backup
@@ -10,6 +10,7 @@ module S3TarBackup
10
10
  @archive
11
11
  @compression_flag
12
12
  @compression_ext
13
+ @gpg_key
13
14
 
14
15
  COMPRESSIONS = {
15
16
  :gzip => {:flag => '-z', :ext => 'tar.gz'},
@@ -18,13 +19,16 @@ module S3TarBackup
18
19
  :lzma2 => {:flag => '-J', :ext => 'tar.xz'}
19
20
  }
20
21
 
22
+ ENCRYPTED_EXTENSION = 'asc'
21
23
 
22
- def initialize(backup_dir, name, sources, exclude, compression=:bzip2)
24
+
25
+ def initialize(backup_dir, name, sources, exclude, compression=:bzip2, gpg_key=nil)
23
26
  @backup_dir, @name, @sources, @exclude = backup_dir, name, [*sources], [*exclude]
24
27
  raise "Unknown compression #{compression}. Valid options are #{COMPRESSIONS.keys.join(', ')}" unless COMPRESSIONS.has_key?(compression)
25
28
  @compression_flag = COMPRESSIONS[compression][:flag]
26
29
  @compression_ext = COMPRESSIONS[compression][:ext]
27
30
  @time = Time.now
31
+ @gpg_key = gpg_key
28
32
 
29
33
  Dir.mkdir(@backup_dir) unless File.directory?(@backup_dir)
30
34
  end
@@ -44,20 +48,24 @@ module S3TarBackup
44
48
  def archive
45
49
  return @archive if @archive
46
50
  type = snar_exists? ? 'incr' : 'full'
47
- File.join(@backup_dir, "backup-#{@name}-#{@time.strftime('%Y%m%d_%H%M%S')}-#{type}.#{@compression_ext}")
51
+ encrypted_bit = @gpg_key ? ".#{ENCRYPTED_EXTENSION}" : ''
52
+ File.join(@backup_dir, "backup-#{@name}-#{@time.strftime('%Y%m%d_%H%M%S')}-#{type}.#{@compression_ext}#{encrypted_bit}")
48
53
  end
49
54
 
50
55
  def backup_cmd(verbose=false)
51
- exclude = @exclude.map{ |e| "\"#{e}\""}.join(' ')
56
+ exclude = @exclude.map{ |e| " --exclude \"#{e}\""}.join
52
57
  sources = @sources.map{ |s| "\"#{s}\""}.join(' ')
53
58
  @archive = archive
54
- "tar c#{verbose ? 'v' : ''}f \"#{@archive}\" #{@compression_flag} -g \"#{snar_path}\" --exclude #{exclude} --no-check-device #{sources}"
59
+ tar_archive = @gpg_key ? '' : "f \"#{@archive}\""
60
+ gpg_cmd = @gpg_key ? " | gpg -r #{@gpg_key} -o \"#{@archive}\" --always-trust --yes --batch --no-tty -e" : ''
61
+ "tar c#{verbose ? 'v' : ''}#{tar_archive} #{@compression_flag} -g \"#{snar_path}\"#{exclude} --no-check-device #{sources}#{gpg_cmd}"
55
62
  end
56
63
 
57
64
  def self.parse_object(object, profile)
58
65
  name = File.basename(object.key)
59
- match = name.match(/^backup-([\w\-]+)-(\d\d\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d)-(\w+)\.(.*)$/)
66
+ match = name.match(/^backup-([\w\-]+)-(\d\d\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d)-(\w+)\.(.*?)(\.#{ENCRYPTED_EXTENSION})?$/)
60
67
  return nil unless match && match[1] == profile
68
+
61
69
  return {
62
70
  :type => match[8].to_sym,
63
71
  :date => Time.new(match[2].to_i, match[3].to_i, match[4].to_i, match[5].to_i, match[6].to_i, match[7].to_i),
@@ -66,14 +74,17 @@ module S3TarBackup
66
74
  :size => object.content_length,
67
75
  :profile => match[1],
68
76
  :compression => COMPRESSIONS.find{ |k,v| v[:ext] == match[9] }[0],
77
+ :encryption => !match[10].nil?
69
78
  }
70
79
  end
71
80
 
72
81
  # No real point in creating a whole new class for this one
73
82
  def self.restore_cmd(restore_into, restore_from, verbose=false)
74
- ext = restore_from.match(/[^\.\\\/]+\.(.*)$/)[1]
83
+ ext, encrypted = restore_from.match(/[^\.\\\/]+\.(.*?)(\.#{ENCRYPTED_EXTENSION})?$/)[1..2]
75
84
  compression_flag = COMPRESSIONS.find{ |k,v| v[:ext] == ext }[1][:flag]
76
- "tar xp#{verbose ? 'v' : ''}f #{restore_from} #{compression_flag} -G -C #{restore_into}"
85
+ tar_archive = encrypted ? '' : "f \"#{restore_from}\""
86
+ gpg_cmd = encrypted ? "gpg --yes --batch --no-tty -d \"#{restore_from}\" | " : ''
87
+ "#{gpg_cmd}tar xp#{verbose ? 'v' : ''}#{tar_archive} #{compression_flag} -G -C #{restore_into}"
77
88
  end
78
89
 
79
90
  end
@@ -1,3 +1,3 @@
1
1
  module S3TarBackup
2
- VERSION = '1.1.0'
2
+ VERSION = '1.1.1'
3
3
  end
metadata CHANGED
@@ -1,27 +1,27 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3-tar-backup
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0
4
+ version: 1.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antony Male
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-03-16 00:00:00.000000000 Z
11
+ date: 2016-01-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ~>
18
18
  - !ruby/object:Gem::Version
19
19
  version: '1.0'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "~>"
24
+ - - ~>
25
25
  - !ruby/object:Gem::Version
26
26
  version: '1.0'
27
27
  description: Uses tar's incremental backups to backup data to, and restore from, Amazon's
@@ -50,17 +50,17 @@ require_paths:
50
50
  - lib
51
51
  required_ruby_version: !ruby/object:Gem::Requirement
52
52
  requirements:
53
- - - ">="
53
+ - - '>='
54
54
  - !ruby/object:Gem::Version
55
55
  version: 1.9.2
56
56
  required_rubygems_version: !ruby/object:Gem::Requirement
57
57
  requirements:
58
- - - ">="
58
+ - - '>='
59
59
  - !ruby/object:Gem::Version
60
60
  version: '0'
61
61
  requirements: []
62
62
  rubyforge_project:
63
- rubygems_version: 2.2.0
63
+ rubygems_version: 2.5.0
64
64
  signing_key:
65
65
  specification_version: 4
66
66
  summary: 's3-tar-backup: Incrementally backup/restore to Amazon S3 using tar'