heroku-mongo-backup 0.4.31 → 0.4.32
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +6 -1
- data/heroku-mongo-backup.gemspec +2 -2
- data/lib/heroku-mongo-backup.rb +15 -3
- data/lib/s3_helpers.rb +21 -21
- metadata +8 -7
data/README.md
CHANGED
@@ -45,6 +45,11 @@ If you want to automatically remove old backup files pass ```MAX_BACKUPS``` para
|
|
45
45
|
|
46
46
|
* ```heroku run rake mongo:backup MAX_BACKUPS=7```
|
47
47
|
|
48
|
+
If you're uploading to S3, backup files will be stored as ```backups/YYYY-MM-DD_hh-mm-ss.gz``` by default. To change the directory name, pass in the ```S3_BACKUP_DIR``` parameter:
|
49
|
+
|
50
|
+
* ```heroku run rake mongo:backup S3_BACKUP_DIR=daily```
|
51
|
+
* Backup files would then be stored as ```daily/backup-file-name.gz``` instead of ```backups/backup-file-name.gz```.
|
52
|
+
|
48
53
|
Restore from backup:
|
49
54
|
|
50
55
|
* ```heroku run rake mongo:restore FILE=backup-file-name.gz```
|
@@ -66,7 +71,7 @@ For Rails 2 add this to your Rakefile to import rake tasks:
|
|
66
71
|
5. [wolfpakz](https://github.com/wolfpakz "Dan Porter") - Rails2 support
|
67
72
|
6. [solacreative](http://sola-la.com/creative "Taro Murao") - Max backups feature for aws/s3 and s3 gems
|
68
73
|
7. [aarti](https://github.com/aarti "aarti") - minor fixes
|
69
|
-
|
74
|
+
8. [strayduy](https://github.com/strayduy "strayduy") - [Configurable S3 directory name](https://github.com/alexkravets/heroku-mongo-backup/pull/17)
|
70
75
|
|
71
76
|
|
72
77
|
|
data/heroku-mongo-backup.gemspec
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'heroku-mongo-backup'
|
3
|
-
s.version = '0.4.
|
3
|
+
s.version = '0.4.32'
|
4
4
|
s.summary = 'Rake task backups mongo database on Heroku and push gzipped file to Amazon S3 or FTP.'
|
5
5
|
s.description = 'Rake task for backing up mongo database on heroku and push it to S3 or FTP. Library can be used as rake task or be easily integrated into daily cron job.'
|
6
6
|
|
7
|
-
s.authors = ['Alex Kravets', 'matyi', 'Stef Lewandowski', 'David Hall', 'Dan Porter', 'aarti']
|
7
|
+
s.authors = ['Alex Kravets', 'matyi', 'Stef Lewandowski', 'David Hall', 'Dan Porter', 'aarti', 'strayduy']
|
8
8
|
s.email = 'santyor@gmail.com'
|
9
9
|
s.homepage = 'https://github.com/alexkravets/heroku-mongo-backup'
|
10
10
|
|
data/lib/heroku-mongo-backup.rb
CHANGED
@@ -118,6 +118,18 @@ module HerokuMongoBackup
|
|
118
118
|
bucket = ENV['S3_BUCKET']
|
119
119
|
end
|
120
120
|
|
121
|
+
dir_name = ENV['S3_BACKUP_DIR']
|
122
|
+
if dir_name.nil?
|
123
|
+
dir_name = ENV['S3_BACKUP_DIRNAME']
|
124
|
+
end
|
125
|
+
if dir_name.nil?
|
126
|
+
dir_name = ENV['S3_BACKUP_DIR_NAME']
|
127
|
+
end
|
128
|
+
if dir_name.nil?
|
129
|
+
dir_name = 'backups'
|
130
|
+
end
|
131
|
+
@dir_name = dir_name
|
132
|
+
|
121
133
|
access_key_id = ENV['S3_KEY_ID']
|
122
134
|
if access_key_id.nil?
|
123
135
|
access_key_id = ENV['S3_KEY']
|
@@ -138,12 +150,12 @@ module HerokuMongoBackup
|
|
138
150
|
end
|
139
151
|
|
140
152
|
def s3_upload
|
141
|
-
HerokuMongoBackup::s3_upload(@bucket, @file_name)
|
153
|
+
HerokuMongoBackup::s3_upload(@bucket, @dir_name, @file_name)
|
142
154
|
end
|
143
155
|
|
144
156
|
def s3_download
|
145
157
|
open(@file_name, 'w') do |file|
|
146
|
-
file_content = HerokuMongoBackup::s3_download(@bucket, @file_name)
|
158
|
+
file_content = HerokuMongoBackup::s3_download(@bucket, @dir_name, @file_name)
|
147
159
|
file.binmode
|
148
160
|
file.write file_content
|
149
161
|
end
|
@@ -215,7 +227,7 @@ module HerokuMongoBackup
|
|
215
227
|
end
|
216
228
|
|
217
229
|
if files_number_to_leave > 0
|
218
|
-
HerokuMongoBackup::remove_old_backup_files(@bucket, files_number_to_leave)
|
230
|
+
HerokuMongoBackup::remove_old_backup_files(@bucket, @dir_name, files_number_to_leave)
|
219
231
|
end
|
220
232
|
end
|
221
233
|
|
data/lib/s3_helpers.rb
CHANGED
@@ -20,14 +20,14 @@ if defined?(S3)
|
|
20
20
|
return bucket
|
21
21
|
end
|
22
22
|
|
23
|
-
def HerokuMongoBackup::s3_upload(bucket, filename)
|
24
|
-
object = bucket.objects.build("
|
23
|
+
def HerokuMongoBackup::s3_upload(bucket, dirname, filename)
|
24
|
+
object = bucket.objects.build("#{dirname}/#{filename}")
|
25
25
|
object.content = open(filename)
|
26
26
|
object.save
|
27
27
|
end
|
28
28
|
|
29
|
-
def HerokuMongoBackup::s3_download(bucket, filename)
|
30
|
-
object = bucket.objects.find("
|
29
|
+
def HerokuMongoBackup::s3_download(bucket, dirname, filename)
|
30
|
+
object = bucket.objects.find("#{dirname}/#{filename}")
|
31
31
|
content = object.content(reload=true)
|
32
32
|
|
33
33
|
puts "Backup file:"
|
@@ -39,8 +39,8 @@ if defined?(S3)
|
|
39
39
|
return content
|
40
40
|
end
|
41
41
|
|
42
|
-
def HerokuMongoBackup::remove_old_backup_files(bucket, files_number_to_leave)
|
43
|
-
excess = ( object_keys = bucket.objects.find_all(:prefix => "
|
42
|
+
def HerokuMongoBackup::remove_old_backup_files(bucket, dirname, files_number_to_leave)
|
43
|
+
excess = ( object_keys = bucket.objects.find_all(:prefix => "#{dirname}/").map { |o| o.key }.sort ).count - files_number_to_leave
|
44
44
|
(0..excess-1).each { |i| bucket.objects.find(object_keys[i]).destroy } if excess > 0
|
45
45
|
end
|
46
46
|
|
@@ -68,17 +68,17 @@ if defined?(AWS)
|
|
68
68
|
return bucket
|
69
69
|
end
|
70
70
|
|
71
|
-
def HerokuMongoBackup::s3_upload(bucket, filename)
|
72
|
-
AWS::S3::S3Object.store("
|
71
|
+
def HerokuMongoBackup::s3_upload(bucket, dirname, filename)
|
72
|
+
AWS::S3::S3Object.store("#{dirname}/#{filename}", open(filename), bucket)
|
73
73
|
end
|
74
74
|
|
75
|
-
def HerokuMongoBackup::s3_download(bucket, filename)
|
76
|
-
content = AWS::S3::S3Object.value("
|
75
|
+
def HerokuMongoBackup::s3_download(bucket, dirname, filename)
|
76
|
+
content = AWS::S3::S3Object.value("#{dirname}/#{filename}", bucket)
|
77
77
|
return content
|
78
78
|
end
|
79
79
|
|
80
|
-
def HerokuMongoBackup::remove_old_backup_files(bucket, files_number_to_leave)
|
81
|
-
excess = ( object_keys = AWS::S3::Bucket.find(bucket).objects(:prefix =>
|
80
|
+
def HerokuMongoBackup::remove_old_backup_files(bucket, dirname, files_number_to_leave)
|
81
|
+
excess = ( object_keys = AWS::S3::Bucket.find(bucket).objects(:prefix => "#{dirname}/").map { |o| o.key }.sort ).count - files_number_to_leave
|
82
82
|
(0..excess-1).each { |i| AWS::S3::S3Object.find(object_keys[i], bucket).delete } if excess > 0
|
83
83
|
end
|
84
84
|
|
@@ -98,9 +98,9 @@ end
|
|
98
98
|
|
99
99
|
if defined?(Fog)
|
100
100
|
#
|
101
|
-
# Using '
|
101
|
+
# Using 'fog' gem as Amazon S3 interface
|
102
102
|
#
|
103
|
-
#puts "Using \'
|
103
|
+
#puts "Using \'fog\' gem as Amazon S3 interface."
|
104
104
|
def HerokuMongoBackup::s3_connect(bucket, key, secret)
|
105
105
|
connection = Fog::Storage.new({
|
106
106
|
:provider => 'AWS',
|
@@ -111,24 +111,24 @@ if defined?(Fog)
|
|
111
111
|
return directory
|
112
112
|
end
|
113
113
|
|
114
|
-
def HerokuMongoBackup::s3_upload(directory, filename)
|
114
|
+
def HerokuMongoBackup::s3_upload(directory, dirname, filename)
|
115
115
|
file = directory.files.create(
|
116
|
-
:key => "
|
116
|
+
:key => "#{dirname}/#{filename}",
|
117
117
|
:body => open(filename)
|
118
118
|
)
|
119
119
|
end
|
120
120
|
|
121
|
-
def HerokuMongoBackup::s3_download(directory, filename)
|
122
|
-
file = directory.files.get("
|
121
|
+
def HerokuMongoBackup::s3_download(directory, dirname, filename)
|
122
|
+
file = directory.files.get("#{dirname}/#{filename}")
|
123
123
|
return file.body
|
124
124
|
end
|
125
125
|
|
126
|
-
def HerokuMongoBackup::remove_old_backup_files(directory, files_number_to_leave)
|
127
|
-
total_backups = directory.files.all.size
|
126
|
+
def HerokuMongoBackup::remove_old_backup_files(directory, dirname, files_number_to_leave)
|
127
|
+
total_backups = directory.files.all({:prefix => "#{dirname}/"}).size
|
128
128
|
|
129
129
|
if total_backups > files_number_to_leave
|
130
130
|
|
131
|
-
files_to_destroy = (0..total_backups-files_number_to_leave-1).collect{|i| directory.files.all[i] }
|
131
|
+
files_to_destroy = (0..total_backups-files_number_to_leave-1).collect{|i| directory.files.all({:prefix => "#{dirname}/"})[i] }
|
132
132
|
|
133
133
|
files_to_destroy.each do |f|
|
134
134
|
f.destroy
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: heroku-mongo-backup
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.32
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -11,17 +11,18 @@ authors:
|
|
11
11
|
- David Hall
|
12
12
|
- Dan Porter
|
13
13
|
- aarti
|
14
|
+
- strayduy
|
14
15
|
autorequire:
|
15
16
|
bindir: bin
|
16
17
|
cert_chain: []
|
17
|
-
date: 2013-
|
18
|
+
date: 2013-08-11 00:00:00.000000000 Z
|
18
19
|
dependencies:
|
19
20
|
- !ruby/object:Gem::Dependency
|
20
21
|
name: mongo
|
21
22
|
requirement: !ruby/object:Gem::Requirement
|
22
23
|
none: false
|
23
24
|
requirements:
|
24
|
-
- -
|
25
|
+
- - '>='
|
25
26
|
- !ruby/object:Gem::Version
|
26
27
|
version: '0'
|
27
28
|
type: :runtime
|
@@ -29,7 +30,7 @@ dependencies:
|
|
29
30
|
version_requirements: !ruby/object:Gem::Requirement
|
30
31
|
none: false
|
31
32
|
requirements:
|
32
|
-
- -
|
33
|
+
- - '>='
|
33
34
|
- !ruby/object:Gem::Version
|
34
35
|
version: '0'
|
35
36
|
description: Rake task for backing up mongo database on heroku and push it to S3 or
|
@@ -55,18 +56,18 @@ require_paths:
|
|
55
56
|
required_ruby_version: !ruby/object:Gem::Requirement
|
56
57
|
none: false
|
57
58
|
requirements:
|
58
|
-
- -
|
59
|
+
- - '>='
|
59
60
|
- !ruby/object:Gem::Version
|
60
61
|
version: '0'
|
61
62
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
62
63
|
none: false
|
63
64
|
requirements:
|
64
|
-
- -
|
65
|
+
- - '>='
|
65
66
|
- !ruby/object:Gem::Version
|
66
67
|
version: '0'
|
67
68
|
requirements: []
|
68
69
|
rubyforge_project: nowarning
|
69
|
-
rubygems_version: 1.8.
|
70
|
+
rubygems_version: 1.8.25
|
70
71
|
signing_key:
|
71
72
|
specification_version: 3
|
72
73
|
summary: Rake task backups mongo database on Heroku and push gzipped file to Amazon
|