backup-pants 0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/backup-pants ADDED
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env ruby
2
+ require 'backup-pants'
3
+
4
+ if path = ARGV[0]
5
+ BackupPants::Backup.new(path).run()
6
+ end
@@ -0,0 +1,104 @@
1
+ module BackupPants
2
+ class Backup
3
+ def initialize(conf_path)
4
+ @conf_path = conf_path ? conf_path : 'pants.conf'
5
+ end
6
+
7
+ def run
8
+ self.dirs.each {|dir| backup_dir(dir)}
9
+ self.dbs.each {|db| backup_db(db)}
10
+ end
11
+
12
+ protected
13
+ def backup_dir(dir)
14
+ basename = File.basename(dir)
15
+ tar_chdir = File.dirname(dir)
16
+ tar_destination = self.tmp_dir + '/' + basename + '.tgz'
17
+
18
+ # make sure tar_destination dir is created.
19
+ FileUtils.mkdir_p(File.dirname(tar_destination))
20
+
21
+ run = system "tar -cpzf #{tar_destination} -C #{tar_chdir} #{basename}"
22
+ unless run
23
+ puts "tar of #{dir} failed"
24
+ return
25
+ end
26
+
27
+ dir_prefix = self.date_folder_name + '/dirs/'
28
+ push_to_s3(tar_destination, dir_prefix)
29
+ end
30
+
31
+ def backup_db(db)
32
+ gzip_destination = "#{self.tmp_dir}/#{db}.sql.gz"
33
+
34
+ mysqldump = "mysqldump --opt --skip-add-locks"
35
+ mysqldump += " -u #{self.db_username}" unless self.db_username.nil?
36
+ mysqldump += " -p'#{self.db_password}'" unless self.db_password.nil?
37
+ mysqldump += " #{db}"
38
+ mysqldump += " | gzip > #{gzip_destination}"
39
+ run = system mysqldump
40
+ unless run
41
+ print "mysqldump of #{db} failed"
42
+ return
43
+ end
44
+
45
+ dir_prefix = self.date_folder_name + '/dbs/'
46
+ push_to_s3(gzip_destination, dir_prefix)
47
+ end
48
+
49
+ def push_to_s3(file_path, dir_prefix='')
50
+ self.s3.upload(self.s3_bucket, file_path, dir_prefix)
51
+ end
52
+
53
+ def s3
54
+ @s3 ||= BackupPants::S3.new(self.s3_key, self.s3_secret)
55
+ end
56
+
57
+ def conf
58
+ @conf ||= YAML.load_file(@conf_path)
59
+ end
60
+
61
+ def s3_bucket
62
+ self.conf['s3_bucket'] ? self.conf['s3_bucket'] : 'backuppants'
63
+ end
64
+
65
+ def s3_key
66
+ self.conf['s3_key'] ? self.conf['s3_key'] : ''
67
+ end
68
+
69
+ def s3_secret
70
+ self.conf['s3_secret'] ? self.conf['s3_secret'] : ''
71
+ end
72
+
73
+ def db_username
74
+ self.conf['db_username'] ? self.conf['db_username'] : nil
75
+ end
76
+
77
+ def db_password
78
+ self.conf['db_password'] ? self.conf['db_password'] : nil
79
+ end
80
+
81
+ def dirs
82
+ @dirs ||= self.conf['dirs'] ? self.conf['dirs'] : []
83
+ end
84
+
85
+ def dbs
86
+ @dbs ||= self.conf['dbs'] ? self.conf['dbs'] : []
87
+ end
88
+
89
+ def tmp_dir
90
+ self.conf['tmp_dir'] ? self.conf['tmp_dir'] : '/tmp/backuppants'
91
+ end
92
+
93
+ # Folder name based on today's date
94
+ def date_folder_name
95
+ return @date_folder_name unless @date_folder_name.nil?
96
+ time = Time.new
97
+ @date_folder_name = time.year.to_s + time.month.to_s + time.day.to_s
98
+ end
99
+
100
+ def s3_bucket_name
101
+ self.conf['s3_bucket'] ? self.conf['s3_bucket'] : 'backup_pants'
102
+ end
103
+ end
104
+ end
@@ -0,0 +1,35 @@
1
+ module BackupPants
2
+ class S3
3
+ def initialize(key, secret)
4
+ @key = key
5
+ @secret = secret
6
+ @prepped_buckets = {}
7
+ establish_connection
8
+ end
9
+
10
+ # dir_prefix should end with slash: 'hello/hi/'
11
+ def upload(bucket, file, dir_prefix='')
12
+ prep_bucket(bucket)
13
+ filename = file.split(/\//).last
14
+ AWS::S3::S3Object.store(dir_prefix + filename, open(file), bucket)
15
+ end
16
+
17
+ protected
18
+ def prep_bucket(bucket)
19
+ return if @prepped_buckets.has_key?(bucket)
20
+ begin
21
+ AWS::S3::Bucket.find(bucket)
22
+ rescue
23
+ AWS::S3::Bucket.create(bucket)
24
+ end
25
+ @prepped_buckets[bucket] = true
26
+ end
27
+
28
+ def establish_connection
29
+ AWS::S3::Base.establish_connection!(
30
+ :access_key_id => @key,
31
+ :secret_access_key => @secret
32
+ )
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,6 @@
1
+ require 'yaml'
2
+ require 'fileutils'
3
+ require 'aws/s3'
4
+
5
+ require 'backup-pants/backup'
6
+ require 'backup-pants/s3'
metadata ADDED
@@ -0,0 +1,85 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: backup-pants
3
+ version: !ruby/object:Gem::Version
4
+ hash: 9
5
+ prerelease: false
6
+ segments:
7
+ - 0
8
+ - 1
9
+ version: "0.1"
10
+ platform: ruby
11
+ authors:
12
+ - Ivan Kanevski
13
+ autorequire:
14
+ bindir: bin
15
+ cert_chain: []
16
+
17
+ date: 2010-09-28 00:00:00 -07:00
18
+ default_executable:
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ name: aws-s3
22
+ prerelease: false
23
+ requirement: &id001 !ruby/object:Gem::Requirement
24
+ none: false
25
+ requirements:
26
+ - - ">="
27
+ - !ruby/object:Gem::Version
28
+ hash: 3
29
+ segments:
30
+ - 0
31
+ version: "0"
32
+ type: :runtime
33
+ version_requirements: *id001
34
+ description: Nice for backing up low priority personal servers with multiple sites & accounts
35
+ email:
36
+ - kanevski@gmail.com
37
+ executables:
38
+ - backup-pants
39
+ extensions: []
40
+
41
+ extra_rdoc_files: []
42
+
43
+ files:
44
+ - bin/backup-pants
45
+ - lib/backup-pants.rb
46
+ - lib/backup-pants/backup.rb
47
+ - lib/backup-pants/s3.rb
48
+ has_rdoc: true
49
+ homepage: http://github.com/ephramzerb/backup-pants
50
+ licenses: []
51
+
52
+ post_install_message:
53
+ rdoc_options: []
54
+
55
+ require_paths:
56
+ - lib
57
+ required_ruby_version: !ruby/object:Gem::Requirement
58
+ none: false
59
+ requirements:
60
+ - - ">="
61
+ - !ruby/object:Gem::Version
62
+ hash: 3
63
+ segments:
64
+ - 0
65
+ version: "0"
66
+ required_rubygems_version: !ruby/object:Gem::Requirement
67
+ none: false
68
+ requirements:
69
+ - - ">="
70
+ - !ruby/object:Gem::Version
71
+ hash: 23
72
+ segments:
73
+ - 1
74
+ - 3
75
+ - 6
76
+ version: 1.3.6
77
+ requirements: []
78
+
79
+ rubyforge_project:
80
+ rubygems_version: 1.3.7
81
+ signing_key:
82
+ specification_version: 3
83
+ summary: Daily backups of dirs and mysql databases to S3
84
+ test_files: []
85
+