backup 3.0.1 → 3.0.2.build.0

Sign up to get free protection for your applications and to get access to all the features.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- backup (3.0.0)
4
+ backup (3.0.2.build.0)
5
5
  dropbox (~> 1.2.3)
6
6
  fog (~> 0.5.3)
7
7
  mail (~> 2.2.15)
data/README.md CHANGED
@@ -173,7 +173,7 @@ Below you see a sample configuration file you could create for Backup 3. Just re
173
173
 
174
174
  ### Explanation for the above example
175
175
 
176
- First it dumps all the tables inside the MySQL database "my_sample_mysql_db", except for the "logs" table. It also dumps the MongoDB database "my_sample_mongo_db", but only the collections "users", "events" and "posts". After that it'll create a "user_avatars.tar" archive with all the uploaded avatars of the users. After that it'll create a "logs.tar" archive with the "production.log", "newrelic_agent.log" and "other.log" logs. After that it'll encrypt the whole backup file (everything included: databases, archives) using "OpenSSL". Now the Backup can only be extracted when you know the password to decrypt it ("my_secret_password" in this case). After that it'll compress the backup file using Gzip (with the mode set to "best", rather than "fast" for best compression). Then it'll store the backup file to Amazon S3 in to 'my_bucket/backups'. Next it'll also transfer a copy of the backup file to a remote server using the RSync protocol, and it'll be stored in to the "$HOME/backups/" path on this server. Finally, it'll notify me by email if the backup raises an error/exception during the process indicating that something went wrong. (When setting `mail.on_success = true` it'll also notify you of every successful backup)
176
+ First it dumps all the tables inside the MySQL database "my_sample_mysql_db", except for the "logs" table. It also dumps the MongoDB database "my_sample_mongo_db", but only the collections "users", "events" and "posts". After that it'll create a "user_avatars.tar" archive with all the uploaded avatars of the users. After that it'll create a "logs.tar" archive with the "production.log", "newrelic_agent.log" and "other.log" logs. After that it'll compress the backup file using Gzip (with the mode set to "best", rather than "fast" for best compression). After that it'll encrypt the whole backup file (everything included: databases, archives) using "OpenSSL". Now the Backup can only be extracted when you know the password to decrypt it ("my_secret_password" in this case). Then it'll store the backup file to Amazon S3 in to 'my_bucket/backups'. Next it'll also transfer a copy of the backup file to a remote server using the RSync protocol, and it'll be stored in to the "$HOME/backups/" path on this server. Finally, it'll notify me by email if the backup raises an error/exception during the process indicating that something went wrong. (When setting `mail.on_success = true` it'll also notify you of every successful backup)
177
177
 
178
178
  ### Things to note
179
179
 
data/bin/backup CHANGED
@@ -22,6 +22,7 @@ require File.expand_path("../../lib/backup", __FILE__)
22
22
  ##
23
23
  # Build the Backup Command Line Interface using Thor
24
24
  class BackupCLI < Thor
25
+ include Thor::Actions
25
26
 
26
27
  TEMPLATE_DIR = File.expand_path("../../lib/templates", __FILE__)
27
28
 
@@ -128,12 +129,14 @@ class BackupCLI < Thor
128
129
  temp_file.close
129
130
  if options[:path]
130
131
  FileUtils.mkdir_p(options[:path])
132
+ overwrite?(File.join(Backup::PATH, 'config.rb'))
131
133
  File.open(File.join(options[:path], 'config.rb'), 'w') do |file|
132
134
  file.write( File.read(temp_file.path) )
133
135
  puts "Generated configuration file in '#{File.join(options[:path], 'config.rb')}'"
134
136
  end
135
137
  else
136
138
  FileUtils.mkdir_p(Backup::PATH)
139
+ overwrite?(File.join(Backup::PATH, 'config.rb'))
137
140
  File.open(File.join(Backup::PATH, 'config.rb'), 'w') do |file|
138
141
  file.write( File.read(temp_file.path) )
139
142
  puts "Generated configuration file in '#{File.join(Backup::PATH, 'config.rb')}'"
@@ -172,6 +175,16 @@ class BackupCLI < Thor
172
175
  puts "Backup #{Backup::Version.current}"
173
176
  end
174
177
 
178
+ private
179
+
180
+ ##
181
+ # Helper method for asking the user if he/she wants to overwrite the file
182
+ def overwrite?(path)
183
+ if File.exist?(path)
184
+ exit if no? "A configuration file already exists in #{ path }. Do you want to overwrite? [y/n]"
185
+ end
186
+ end
187
+
175
188
  end
176
189
 
177
190
  ##
data/lib/backup/model.rb CHANGED
@@ -197,8 +197,8 @@ module Backup
197
197
  databases.each { |d| d.perform! }
198
198
  archives.each { |a| a.perform! }
199
199
  package!
200
- encryptors.each { |e| e.perform! }
201
200
  compressors.each { |c| c.perform! }
201
+ encryptors.each { |e| e.perform! }
202
202
  storages.each { |s| s.perform! }
203
203
  notifiers.each { |n| n.perform!(self) }
204
204
  clean!
@@ -22,7 +22,7 @@ module Backup
22
22
  # Defines the patch version
23
23
  # BUILD:
24
24
  # Defines the build version ( use 'false' if no build )
25
- MAJOR, MINOR, PATCH, BUILD = 3, 0, 1, false
25
+ MAJOR, MINOR, PATCH, BUILD = 3, 0, 2, 0
26
26
 
27
27
  # ========================================================= #
28
28
  # ADJUST THE CONSTANTS ABOVE TO CHANGE THE BACKUP VERSION #
@@ -71,7 +71,7 @@ describe Backup::Storage::CloudFiles do
71
71
  it 'should transfer the provided file to the container' do
72
72
  Backup::Model.new('blah', 'blah') {}
73
73
  file = mock("Backup::Storage::CloudFiles::File")
74
- File.expects(:read).with("#{File.join(Backup::TMP_PATH, "#{ Backup::TIME }.#{ Backup::TRIGGER}")}.tar").returns(file)
74
+ File.expects(:open).with("#{File.join(Backup::TMP_PATH, "#{ Backup::TIME }.#{ Backup::TRIGGER}")}.tar").returns(file)
75
75
  cf.expects(:remote_file).returns("#{ Backup::TIME }.#{ Backup::TRIGGER }.tar").twice
76
76
  connection.expects(:put_object).with('my_container', "backups/myapp/#{ Backup::TIME }.#{ Backup::TRIGGER }.tar", file)
77
77
  cf.send(:transfer!)
@@ -77,7 +77,7 @@ describe Backup::Storage::S3 do
77
77
  it 'should transfer the provided file to the bucket' do
78
78
  Backup::Model.new('blah', 'blah') {}
79
79
  file = mock("Backup::Storage::S3::File")
80
- File.expects(:read).with("#{File.join(Backup::TMP_PATH, "#{ Backup::TIME }.#{ Backup::TRIGGER}")}.tar").returns(file)
80
+ File.expects(:open).with("#{File.join(Backup::TMP_PATH, "#{ Backup::TIME }.#{ Backup::TRIGGER}")}.tar").returns(file)
81
81
  s3.expects(:remote_file).returns("#{ Backup::TIME }.#{ Backup::TRIGGER }.tar").twice
82
82
  connection.expects(:sync_clock)
83
83
  connection.expects(:put_object).with('my-bucket', "backups/myapp/#{ Backup::TIME }.#{ Backup::TRIGGER }.tar", file)
metadata CHANGED
@@ -1,8 +1,8 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: backup
3
3
  version: !ruby/object:Gem::Version
4
- prerelease:
5
- version: 3.0.1
4
+ prerelease: 6
5
+ version: 3.0.2.build.0
6
6
  platform: ruby
7
7
  authors:
8
8
  - Michael van Rooijen
@@ -10,7 +10,7 @@ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
12
 
13
- date: 2011-03-10 00:00:00 +01:00
13
+ date: 2011-03-11 00:00:00 +01:00
14
14
  default_executable:
15
15
  dependencies:
16
16
  - !ruby/object:Gem::Dependency
@@ -231,9 +231,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
231
231
  required_rubygems_version: !ruby/object:Gem::Requirement
232
232
  none: false
233
233
  requirements:
234
- - - ">="
234
+ - - ">"
235
235
  - !ruby/object:Gem::Version
236
- version: "0"
236
+ version: 1.3.1
237
237
  requirements: []
238
238
 
239
239
  rubyforge_project: