venet-backup 4.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.md +24 -0
- data/README.md +15 -0
- data/bin/backup +5 -0
- data/lib/backup.rb +141 -0
- data/lib/backup/archive.rb +170 -0
- data/lib/backup/binder.rb +22 -0
- data/lib/backup/cleaner.rb +116 -0
- data/lib/backup/cli.rb +374 -0
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +298 -0
- data/lib/backup/cloud_io/s3.rb +260 -0
- data/lib/backup/compressor/base.rb +35 -0
- data/lib/backup/compressor/bzip2.rb +39 -0
- data/lib/backup/compressor/custom.rb +53 -0
- data/lib/backup/compressor/gzip.rb +74 -0
- data/lib/backup/config.rb +119 -0
- data/lib/backup/config/dsl.rb +103 -0
- data/lib/backup/config/helpers.rb +143 -0
- data/lib/backup/database/base.rb +85 -0
- data/lib/backup/database/mongodb.rb +186 -0
- data/lib/backup/database/mysql.rb +180 -0
- data/lib/backup/database/openldap.rb +95 -0
- data/lib/backup/database/postgresql.rb +133 -0
- data/lib/backup/database/redis.rb +179 -0
- data/lib/backup/database/riak.rb +82 -0
- data/lib/backup/database/sqlite.rb +57 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +747 -0
- data/lib/backup/encryptor/open_ssl.rb +72 -0
- data/lib/backup/errors.rb +58 -0
- data/lib/backup/logger.rb +199 -0
- data/lib/backup/logger/console.rb +51 -0
- data/lib/backup/logger/fog_adapter.rb +29 -0
- data/lib/backup/logger/logfile.rb +133 -0
- data/lib/backup/logger/syslog.rb +116 -0
- data/lib/backup/model.rb +454 -0
- data/lib/backup/notifier/base.rb +98 -0
- data/lib/backup/notifier/campfire.rb +69 -0
- data/lib/backup/notifier/datadog.rb +116 -0
- data/lib/backup/notifier/flowdock.rb +102 -0
- data/lib/backup/notifier/hipchat.rb +93 -0
- data/lib/backup/notifier/http_post.rb +122 -0
- data/lib/backup/notifier/mail.rb +238 -0
- data/lib/backup/notifier/nagios.rb +74 -0
- data/lib/backup/notifier/pagerduty.rb +81 -0
- data/lib/backup/notifier/prowl.rb +69 -0
- data/lib/backup/notifier/pushover.rb +80 -0
- data/lib/backup/notifier/slack.rb +158 -0
- data/lib/backup/notifier/twitter.rb +64 -0
- data/lib/backup/notifier/zabbix.rb +68 -0
- data/lib/backup/package.rb +51 -0
- data/lib/backup/packager.rb +101 -0
- data/lib/backup/pipeline.rb +124 -0
- data/lib/backup/splitter.rb +76 -0
- data/lib/backup/storage/base.rb +57 -0
- data/lib/backup/storage/cloud_files.rb +158 -0
- data/lib/backup/storage/cycler.rb +65 -0
- data/lib/backup/storage/dropbox.rb +236 -0
- data/lib/backup/storage/ftp.rb +98 -0
- data/lib/backup/storage/google/google_drive_auth.rb +96 -0
- data/lib/backup/storage/google/google_drive_transfer.rb +125 -0
- data/lib/backup/storage/google_drive.rb +62 -0
- data/lib/backup/storage/local.rb +64 -0
- data/lib/backup/storage/ninefold.rb +74 -0
- data/lib/backup/storage/rsync.rb +248 -0
- data/lib/backup/storage/s3.rb +154 -0
- data/lib/backup/storage/scp.rb +67 -0
- data/lib/backup/storage/sftp.rb +82 -0
- data/lib/backup/syncer/base.rb +70 -0
- data/lib/backup/syncer/cloud/base.rb +179 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
- data/lib/backup/syncer/cloud/local_file.rb +100 -0
- data/lib/backup/syncer/cloud/s3.rb +110 -0
- data/lib/backup/syncer/rsync/base.rb +48 -0
- data/lib/backup/syncer/rsync/local.rb +31 -0
- data/lib/backup/syncer/rsync/pull.rb +51 -0
- data/lib/backup/syncer/rsync/push.rb +205 -0
- data/lib/backup/template.rb +46 -0
- data/lib/backup/utilities.rb +224 -0
- data/lib/backup/version.rb +5 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/openldap +24 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/databases/sqlite +12 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifier/zabbix +15 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/datadog +57 -0
- data/templates/cli/notifiers/flowdock +16 -0
- data/templates/cli/notifiers/hipchat +15 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +21 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/pagerduty +12 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/slack +23 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +19 -0
- data/templates/cli/storages/ftp +12 -0
- data/templates/cli/storages/local +7 -0
- data/templates/cli/storages/ninefold +9 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +14 -0
- data/templates/cli/storages/scp +14 -0
- data/templates/cli/storages/sftp +14 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- data/templates/storage/google_drive/authorization_url.erb +6 -0
- data/templates/storage/google_drive/authorized.erb +4 -0
- data/templates/storage/google_drive/cache_file_written.erb +10 -0
- metadata +957 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
|
|
3
|
+
module Backup
|
|
4
|
+
module Storage
|
|
5
|
+
module Cycler
|
|
6
|
+
class Error < Backup::Error; end
|
|
7
|
+
|
|
8
|
+
private
|
|
9
|
+
|
|
10
|
+
# Adds the current package being stored to the YAML cycle data file
|
|
11
|
+
# and will remove any old package file(s) when the storage limit
|
|
12
|
+
# set by #keep is exceeded.
|
|
13
|
+
def cycle!
|
|
14
|
+
Logger.info 'Cycling Started...'
|
|
15
|
+
|
|
16
|
+
packages = yaml_load.unshift(package)
|
|
17
|
+
excess = packages.count - keep.to_i
|
|
18
|
+
|
|
19
|
+
if excess > 0
|
|
20
|
+
packages.pop(excess).each do |pkg|
|
|
21
|
+
begin
|
|
22
|
+
remove!(pkg) unless pkg.no_cycle
|
|
23
|
+
rescue => err
|
|
24
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
|
25
|
+
There was a problem removing the following package:
|
|
26
|
+
Trigger: #{pkg.trigger} :: Dated: #{pkg.time}
|
|
27
|
+
Package included the following #{ pkg.filenames.count } file(s):
|
|
28
|
+
#{ pkg.filenames.join("\n") }
|
|
29
|
+
EOS
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
yaml_save(packages)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Returns path to the YAML data file.
|
|
38
|
+
def yaml_file
|
|
39
|
+
@yaml_file ||= begin
|
|
40
|
+
filename = self.class.to_s.split('::').last
|
|
41
|
+
filename << "-#{ storage_id }" if storage_id
|
|
42
|
+
File.join(Config.data_path, package.trigger, "#{ filename }.yml")
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# Returns stored Package objects, sorted by #time descending (oldest last).
|
|
47
|
+
def yaml_load
|
|
48
|
+
if File.exist?(yaml_file) && !File.zero?(yaml_file)
|
|
49
|
+
YAML.load_file(yaml_file).sort_by!(&:time).reverse!
|
|
50
|
+
else
|
|
51
|
+
[]
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# Stores the given package objects to the YAML data file.
|
|
56
|
+
def yaml_save(packages)
|
|
57
|
+
FileUtils.mkdir_p(File.dirname(yaml_file))
|
|
58
|
+
File.open(yaml_file, 'w') do |file|
|
|
59
|
+
file.write(packages.to_yaml)
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'dropbox_sdk'
|
|
3
|
+
|
|
4
|
+
module Backup
|
|
5
|
+
module Storage
|
|
6
|
+
class Dropbox < Base
|
|
7
|
+
include Storage::Cycler
|
|
8
|
+
class Error < Backup::Error; end
|
|
9
|
+
|
|
10
|
+
##
|
|
11
|
+
# Dropbox API credentials
|
|
12
|
+
attr_accessor :api_key, :api_secret
|
|
13
|
+
|
|
14
|
+
##
|
|
15
|
+
# Path to store cached authorized session.
|
|
16
|
+
#
|
|
17
|
+
# Relative paths will be expanded using Config.root_path,
|
|
18
|
+
# which by default is ~/Backup unless --root-path was used
|
|
19
|
+
# on the command line or set in config.rb.
|
|
20
|
+
#
|
|
21
|
+
# By default, +cache_path+ is '.cache', which would be
|
|
22
|
+
# '~/Backup/.cache/' if using the default root_path.
|
|
23
|
+
attr_accessor :cache_path
|
|
24
|
+
|
|
25
|
+
##
|
|
26
|
+
# Dropbox Access Type
|
|
27
|
+
# Valid values are:
|
|
28
|
+
# :app_folder (default)
|
|
29
|
+
# :dropbox (full access)
|
|
30
|
+
attr_accessor :access_type
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
# Chunk size, specified in MiB, for the ChunkedUploader.
|
|
34
|
+
attr_accessor :chunk_size
|
|
35
|
+
|
|
36
|
+
##
|
|
37
|
+
# Number of times to retry failed operations.
|
|
38
|
+
#
|
|
39
|
+
# Default: 10
|
|
40
|
+
attr_accessor :max_retries
|
|
41
|
+
|
|
42
|
+
##
|
|
43
|
+
# Time in seconds to pause before each retry.
|
|
44
|
+
#
|
|
45
|
+
# Default: 30
|
|
46
|
+
attr_accessor :retry_waitsec
|
|
47
|
+
|
|
48
|
+
##
|
|
49
|
+
# Creates a new instance of the storage object
|
|
50
|
+
def initialize(model, storage_id = nil)
|
|
51
|
+
super
|
|
52
|
+
|
|
53
|
+
@path ||= 'backups'
|
|
54
|
+
@cache_path ||= '.cache'
|
|
55
|
+
@access_type ||= :app_folder
|
|
56
|
+
@chunk_size ||= 4 # MiB
|
|
57
|
+
@max_retries ||= 10
|
|
58
|
+
@retry_waitsec ||= 30
|
|
59
|
+
path.sub!(/^\//, '')
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
private
|
|
63
|
+
|
|
64
|
+
##
|
|
65
|
+
# The initial connection to Dropbox will provide the user with an
|
|
66
|
+
# authorization url. The user must open this URL and confirm that the
|
|
67
|
+
# authorization successfully took place. If this is the case, then the
|
|
68
|
+
# user hits 'enter' and the session will be properly established.
|
|
69
|
+
# Immediately after establishing the session, the session will be
|
|
70
|
+
# serialized and written to a cache file in +cache_path+.
|
|
71
|
+
# The cached file will be used from that point on to re-establish a
|
|
72
|
+
# connection with Dropbox at a later time. This allows the user to avoid
|
|
73
|
+
# having to go to a new Dropbox URL to authorize over and over again.
|
|
74
|
+
def connection
|
|
75
|
+
return @connection if @connection
|
|
76
|
+
|
|
77
|
+
unless session = cached_session
|
|
78
|
+
Logger.info "Creating a new session!"
|
|
79
|
+
session = create_write_and_return_new_session!
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# will raise an error if session not authorized
|
|
83
|
+
@connection = DropboxClient.new(session, access_type)
|
|
84
|
+
|
|
85
|
+
rescue => err
|
|
86
|
+
raise Error.wrap(err, 'Authorization Failed')
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
##
|
|
90
|
+
# Attempt to load a cached session
|
|
91
|
+
def cached_session
|
|
92
|
+
session = false
|
|
93
|
+
if File.exist?(cached_file)
|
|
94
|
+
begin
|
|
95
|
+
session = DropboxSession.deserialize(File.read(cached_file))
|
|
96
|
+
Logger.info "Session data loaded from cache!"
|
|
97
|
+
|
|
98
|
+
rescue => err
|
|
99
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
|
100
|
+
Could not read session data from cache.
|
|
101
|
+
Cache data might be corrupt.
|
|
102
|
+
EOS
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
session
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
##
|
|
109
|
+
# Transfer each of the package files to Dropbox in chunks of +chunk_size+.
|
|
110
|
+
# Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
|
|
111
|
+
# between retries, if errors occur.
|
|
112
|
+
def transfer!
|
|
113
|
+
package.filenames.each do |filename|
|
|
114
|
+
src = File.join(Config.tmp_path, filename)
|
|
115
|
+
dest = File.join(remote_path, filename)
|
|
116
|
+
Logger.info "Storing '#{ dest }'..."
|
|
117
|
+
|
|
118
|
+
uploader = nil
|
|
119
|
+
File.open(src, 'r') do |file|
|
|
120
|
+
uploader = connection.get_chunked_uploader(file, file.stat.size)
|
|
121
|
+
while uploader.offset < uploader.total_size
|
|
122
|
+
with_retries do
|
|
123
|
+
uploader.upload(1024**2 * chunk_size)
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
with_retries do
|
|
129
|
+
uploader.finish(dest)
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
rescue => err
|
|
134
|
+
raise Error.wrap(err, 'Upload Failed!')
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def with_retries
|
|
138
|
+
retries = 0
|
|
139
|
+
begin
|
|
140
|
+
yield
|
|
141
|
+
rescue StandardError => err
|
|
142
|
+
retries += 1
|
|
143
|
+
raise if retries > max_retries
|
|
144
|
+
|
|
145
|
+
Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
|
|
146
|
+
sleep(retry_waitsec)
|
|
147
|
+
retry
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Called by the Cycler.
|
|
152
|
+
# Any error raised will be logged as a warning.
|
|
153
|
+
def remove!(package)
|
|
154
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
|
155
|
+
|
|
156
|
+
connection.file_delete(remote_path_for(package))
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def cached_file
|
|
160
|
+
path = cache_path.start_with?('/') ?
|
|
161
|
+
cache_path : File.join(Config.root_path, cache_path)
|
|
162
|
+
File.join(path, api_key + api_secret)
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
##
|
|
166
|
+
# Serializes and writes the Dropbox session to a cache file
|
|
167
|
+
def write_cache!(session)
|
|
168
|
+
FileUtils.mkdir_p File.dirname(cached_file)
|
|
169
|
+
File.open(cached_file, "w") do |cache_file|
|
|
170
|
+
cache_file.write(session.serialize)
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
##
|
|
175
|
+
# Create a new session, write a serialized version of it to the
|
|
176
|
+
# .cache directory, and return the session object
|
|
177
|
+
def create_write_and_return_new_session!
|
|
178
|
+
require 'timeout'
|
|
179
|
+
|
|
180
|
+
session = DropboxSession.new(api_key, api_secret)
|
|
181
|
+
|
|
182
|
+
# grab the request token for session
|
|
183
|
+
session.get_request_token
|
|
184
|
+
|
|
185
|
+
template = Backup::Template.new(
|
|
186
|
+
{:session => session, :cached_file => cached_file}
|
|
187
|
+
)
|
|
188
|
+
template.render("storage/dropbox/authorization_url.erb")
|
|
189
|
+
|
|
190
|
+
# wait for user to hit 'return' to continue
|
|
191
|
+
Timeout::timeout(180) { STDIN.gets }
|
|
192
|
+
|
|
193
|
+
# this will raise an error if the user did not
|
|
194
|
+
# visit the authorization_url and grant access
|
|
195
|
+
#
|
|
196
|
+
# get the access token from the server
|
|
197
|
+
# this will be stored with the session in the cache file
|
|
198
|
+
session.get_access_token
|
|
199
|
+
|
|
200
|
+
template.render("storage/dropbox/authorized.erb")
|
|
201
|
+
write_cache!(session)
|
|
202
|
+
template.render("storage/dropbox/cache_file_written.erb")
|
|
203
|
+
|
|
204
|
+
session
|
|
205
|
+
|
|
206
|
+
rescue => err
|
|
207
|
+
raise Error.wrap(err, 'Could not create or authenticate a new session')
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
end
|
|
211
|
+
end
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
# Patch for dropbox-ruby-sdk-1.5.1
|
|
215
|
+
class DropboxClient
|
|
216
|
+
class ChunkedUploader
|
|
217
|
+
def upload(chunk_size = 1024**2 * 4)
|
|
218
|
+
while @offset < @total_size
|
|
219
|
+
@file_obj.seek(@offset) unless @file_obj.pos == @offset
|
|
220
|
+
data = @file_obj.read(chunk_size)
|
|
221
|
+
|
|
222
|
+
begin
|
|
223
|
+
resp = @client.parse_response(
|
|
224
|
+
@client.partial_chunked_upload(data, @upload_id, @offset)
|
|
225
|
+
)
|
|
226
|
+
rescue DropboxError => err
|
|
227
|
+
resp = JSON.parse(err.http_response.body) rescue {}
|
|
228
|
+
raise err unless resp['offset']
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
@offset = resp['offset']
|
|
232
|
+
@upload_id ||= resp['upload_id']
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'net/ftp'
|
|
3
|
+
|
|
4
|
+
module Backup
|
|
5
|
+
module Storage
|
|
6
|
+
class FTP < Base
|
|
7
|
+
include Storage::Cycler
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
# Server credentials
|
|
11
|
+
attr_accessor :username, :password
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
# Server IP Address and FTP port
|
|
15
|
+
attr_accessor :ip, :port
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
# use passive mode?
|
|
19
|
+
attr_accessor :passive_mode
|
|
20
|
+
|
|
21
|
+
def initialize(model, storage_id = nil)
|
|
22
|
+
super
|
|
23
|
+
|
|
24
|
+
@port ||= 21
|
|
25
|
+
@path ||= 'backups'
|
|
26
|
+
@passive_mode ||= false
|
|
27
|
+
path.sub!(/^~\//, '')
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
private
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
# Establishes a connection to the remote server
|
|
34
|
+
#
|
|
35
|
+
# Note:
|
|
36
|
+
# Since the FTP port is defined as a constant in the Net::FTP class, and
|
|
37
|
+
# might be required to change by the user, we dynamically remove and
|
|
38
|
+
# re-add the constant with the provided port value
|
|
39
|
+
def connection
|
|
40
|
+
if Net::FTP.const_defined?(:FTP_PORT)
|
|
41
|
+
Net::FTP.send(:remove_const, :FTP_PORT)
|
|
42
|
+
end; Net::FTP.send(:const_set, :FTP_PORT, port)
|
|
43
|
+
|
|
44
|
+
Net::FTP.open(ip, username, password) do |ftp|
|
|
45
|
+
ftp.passive = true if passive_mode
|
|
46
|
+
yield ftp
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def transfer!
|
|
51
|
+
connection do |ftp|
|
|
52
|
+
create_remote_path(ftp)
|
|
53
|
+
|
|
54
|
+
package.filenames.each do |filename|
|
|
55
|
+
src = File.join(Config.tmp_path, filename)
|
|
56
|
+
dest = File.join(remote_path, filename)
|
|
57
|
+
Logger.info "Storing '#{ ip }:#{ dest }'..."
|
|
58
|
+
ftp.put(src, dest)
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Called by the Cycler.
|
|
64
|
+
# Any error raised will be logged as a warning.
|
|
65
|
+
def remove!(package)
|
|
66
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
|
67
|
+
|
|
68
|
+
remote_path = remote_path_for(package)
|
|
69
|
+
connection do |ftp|
|
|
70
|
+
package.filenames.each do |filename|
|
|
71
|
+
ftp.delete(File.join(remote_path, filename))
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
ftp.rmdir(remote_path)
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
##
|
|
79
|
+
# Creates (if they don't exist yet) all the directories on the remote
|
|
80
|
+
# server in order to upload the backup file. Net::FTP does not support
|
|
81
|
+
# paths to directories that don't yet exist when creating new
|
|
82
|
+
# directories. Instead, we split the parts up in to an array (for each
|
|
83
|
+
# '/') and loop through that to create the directories one by one.
|
|
84
|
+
# Net::FTP raises an exception when the directory it's trying to create
|
|
85
|
+
# already exists, so we have rescue it
|
|
86
|
+
def create_remote_path(ftp)
|
|
87
|
+
path_parts = Array.new
|
|
88
|
+
remote_path.split('/').each do |path_part|
|
|
89
|
+
path_parts << path_part
|
|
90
|
+
begin
|
|
91
|
+
ftp.mkdir(path_parts.join('/'))
|
|
92
|
+
rescue Net::FTPPermError; end
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
module Backup
|
|
2
|
+
class GoogleDriveAuth
|
|
3
|
+
include Backup::Config::Helpers
|
|
4
|
+
|
|
5
|
+
attr_accessor :client, :drive
|
|
6
|
+
|
|
7
|
+
def initialize(options = {})
|
|
8
|
+
defaults = {
|
|
9
|
+
client_id: "",
|
|
10
|
+
client_secret: "",
|
|
11
|
+
api_version: "v2",
|
|
12
|
+
cache_path: ".cache",
|
|
13
|
+
scope: "https://www.googleapis.com/auth/drive",
|
|
14
|
+
authorization_uri: "https://accounts.google.com/o/oauth2/auth",
|
|
15
|
+
token_credential_uri: "https://accounts.google.com/o/oauth2/token",
|
|
16
|
+
redirect_uri: "urn:ietf:wg:oauth:2.0:oob"
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
@options = defaults.merge(options)
|
|
20
|
+
|
|
21
|
+
connect
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def connect
|
|
25
|
+
@client = (auth.nil? ? authorize_and_cache : authorized_client)
|
|
26
|
+
@drive = @client.discovered_api("drive", @options[:api_version])
|
|
27
|
+
rescue => err
|
|
28
|
+
raise Error.wrap(err, "Authorization Failed")
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def file_storage
|
|
32
|
+
Google::APIClient::FileStorage.new(credential_store_file)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def auth
|
|
36
|
+
file_storage.authorization
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def authorize_and_cache
|
|
40
|
+
require "timeout"
|
|
41
|
+
|
|
42
|
+
Logger.info "Creating a new authorization!"
|
|
43
|
+
|
|
44
|
+
client = Google::APIClient.new(
|
|
45
|
+
application_name: "Ruby backup to google drive",
|
|
46
|
+
application_version: "0.1.0"
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
authorization = Signet::OAuth2::Client.new(@options)
|
|
50
|
+
|
|
51
|
+
template = Backup::Template.new(
|
|
52
|
+
auth: authorization,
|
|
53
|
+
credential_store_file: credential_store_file
|
|
54
|
+
)
|
|
55
|
+
template.render("storage/google_drive/authorization_url.erb")
|
|
56
|
+
|
|
57
|
+
Timeout::timeout(180) {
|
|
58
|
+
authorization.code = STDIN.gets
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
authorization.fetch_access_token!
|
|
62
|
+
|
|
63
|
+
template.render("storage/google_drive/authorized.erb")
|
|
64
|
+
write_cache!(authorization)
|
|
65
|
+
template.render("storage/google_drive/cache_file_written.erb")
|
|
66
|
+
|
|
67
|
+
client.authorization = authorization
|
|
68
|
+
|
|
69
|
+
client
|
|
70
|
+
|
|
71
|
+
rescue => err
|
|
72
|
+
raise Error.wrap(err, "Could not authorize")
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def authorized_client
|
|
76
|
+
client = Google::APIClient.new(
|
|
77
|
+
application_name: "Ruby backup to google drive",
|
|
78
|
+
application_version: "0.1.0"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
client.authorization = auth
|
|
82
|
+
|
|
83
|
+
client
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def credential_store_file
|
|
87
|
+
path = @options[:cache_path].start_with?("/") ? @options[:cache_path] : File.join(Config.root_path, @options[:cache_path])
|
|
88
|
+
File.join(path, @options[:client_id] + @options[:client_secret])
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def write_cache!(authorization)
|
|
92
|
+
FileUtils.mkdir_p File.dirname(credential_store_file)
|
|
93
|
+
file_storage.write_credentials(authorization)
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
end
|