backup-googledrive 0.0.2 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/backup-googledrive.rb +212 -2
- metadata +30 -12
- data/lib/storage/googledrive.rb +0 -149
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 45c9de7b9020a457a977dc6b804b3bed1598c661
|
4
|
+
data.tar.gz: deb947444c13d07f39a145529f4c275527d90f6a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 775d57b43a92e45393fdb893fe809e9186909575718d651ff13d003c8a0724457c96496f85e73b283d54f8f19fd70fce1e480b41de13285a03c8918dd180ed14
|
7
|
+
data.tar.gz: 0def6331e624ae9d26add60519e035b571e11a78370217652c667d3074ee4ca81279d64eb65951be726da1b6d0320211b6bbf952afaebdf0a4205fcecafa8fcd
|
data/lib/backup-googledrive.rb
CHANGED
@@ -1,6 +1,216 @@
|
|
1
|
+
require 'backup'
|
2
|
+
|
1
3
|
module Backup
|
2
4
|
module Storage
|
3
|
-
|
4
|
-
|
5
|
+
class GoogleDrive < Base
|
6
|
+
include Storage::Cycler
|
7
|
+
class Error < Backup::Error; end
|
8
|
+
|
9
|
+
##
|
10
|
+
# Path to the rclone executable
|
11
|
+
attr_accessor :rclone_exec
|
12
|
+
|
13
|
+
##
|
14
|
+
# Path to the rclone config file
|
15
|
+
attr_accessor :rclone_config
|
16
|
+
|
17
|
+
##
|
18
|
+
# Name of the rclone remote drive
|
19
|
+
attr_accessor :rclone_drive
|
20
|
+
|
21
|
+
##
|
22
|
+
# Chunk size, specified in bytes (power of two, i.e. 8M = 8 * 1024 * 1024)
|
23
|
+
attr_accessor :chunk_size
|
24
|
+
|
25
|
+
##
|
26
|
+
# Number of times to retry failed operations.
|
27
|
+
#
|
28
|
+
# Default: 10
|
29
|
+
attr_accessor :max_retries
|
30
|
+
|
31
|
+
##
|
32
|
+
# Creates a new instance of the storage object
|
33
|
+
def initialize(model, storage_id = nil)
|
34
|
+
super
|
35
|
+
@rclone_exec ||= "/usr/bin/rclone"
|
36
|
+
@rclone_config ||= "/root/.config/rclone/rclone.conf"
|
37
|
+
@rclone_drive ||= "remote"
|
38
|
+
@path ||= ""
|
39
|
+
@chunk_size ||= 8388608
|
40
|
+
@max_retries ||= 3
|
41
|
+
@cmd_base = "#{rclone_exec} --config #{rclone_config} --retries #{max_retries} --drive-chunk-size=#{chunk_size} --drive-use-trash=false"
|
42
|
+
path.sub!(/^\//, "")
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
|
48
|
+
def transfer!
|
49
|
+
package.filenames.each do |filename|
|
50
|
+
src = File.join(Config.tmp_path, filename)
|
51
|
+
dest = File.join(remote_path, filename)
|
52
|
+
Logger.info "Storing '#{dest}'..."
|
53
|
+
puts `#{@cmd_base} copy #{src} #{rclone_drive}:#{dest}`
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
# Called by the Cycler.
|
58
|
+
# Any error raised will be logged as a warning.
|
59
|
+
def remove!(package)
|
60
|
+
Logger.info "Removing backup package dated #{package.time}..."
|
61
|
+
|
62
|
+
remote_path = remote_path_for(package)
|
63
|
+
package.filenames.each do |filename|
|
64
|
+
puts `#{@cmd_base} purge #{rclone_drive}:#{remote_path}`
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
|
69
|
+
|
70
|
+
##
|
71
|
+
# The initial connection to Dropbox will provide the user with an
|
72
|
+
# authorization url. The user must open this URL and confirm that the
|
73
|
+
# authorization successfully took place. If this is the case, then the
|
74
|
+
# user hits 'enter' and the session will be properly established.
|
75
|
+
# Immediately after establishing the session, the session will be
|
76
|
+
# serialized and written to a cache file in +cache_path+.
|
77
|
+
# The cached file will be used from that point on to re-establish a
|
78
|
+
# connection with Dropbox at a later time. This allows the user to avoid
|
79
|
+
# having to go to a new Dropbox URL to authorize over and over again.
|
80
|
+
# def connection
|
81
|
+
# return @connection if @connection
|
82
|
+
#
|
83
|
+
# unless session = cached_session
|
84
|
+
# Logger.info "Creating a new session!"
|
85
|
+
# session = create_write_and_return_new_session!
|
86
|
+
# end
|
87
|
+
#
|
88
|
+
# # will raise an error if session not authorized
|
89
|
+
# @connection = DropboxClient.new(session, access_type)
|
90
|
+
# rescue => err
|
91
|
+
# raise Error.wrap(err, "Authorization Failed")
|
92
|
+
# end
|
93
|
+
#
|
94
|
+
# ##
|
95
|
+
# # Attempt to load a cached session
|
96
|
+
# def cached_session
|
97
|
+
# session = false
|
98
|
+
# if File.exist?(cached_file)
|
99
|
+
# begin
|
100
|
+
# session = DropboxSession.deserialize(File.read(cached_file))
|
101
|
+
# Logger.info "Session data loaded from cache!"
|
102
|
+
# rescue => err
|
103
|
+
# Logger.warn Error.wrap(err, <<-EOS)
|
104
|
+
# Could not read session data from cache.
|
105
|
+
# Cache data might be corrupt.
|
106
|
+
# EOS
|
107
|
+
# end
|
108
|
+
# end
|
109
|
+
# session
|
110
|
+
# end
|
111
|
+
#
|
112
|
+
# ##
|
113
|
+
# # Transfer each of the package files to Dropbox in chunks of +chunk_size+.
|
114
|
+
# # Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
|
115
|
+
# # between retries, if errors occur.
|
116
|
+
# def transfer!
|
117
|
+
# package.filenames.each do |filename|
|
118
|
+
# src = File.join(Config.tmp_path, filename)
|
119
|
+
# dest = File.join(remote_path, filename)
|
120
|
+
# Logger.info "Storing '#{dest}'..."
|
121
|
+
#
|
122
|
+
# uploader = nil
|
123
|
+
# File.open(src, "r") do |file|
|
124
|
+
# uploader = connection.get_chunked_uploader(file, file.stat.size)
|
125
|
+
# while uploader.offset < uploader.total_size
|
126
|
+
# with_retries do
|
127
|
+
# uploader.upload(1024**2 * chunk_size)
|
128
|
+
# end
|
129
|
+
# end
|
130
|
+
# end
|
131
|
+
#
|
132
|
+
# with_retries do
|
133
|
+
# uploader.finish(dest)
|
134
|
+
# end
|
135
|
+
# end
|
136
|
+
# rescue => err
|
137
|
+
# raise Error.wrap(err, "Upload Failed!")
|
138
|
+
# end
|
139
|
+
#
|
140
|
+
# def with_retries
|
141
|
+
# retries = 0
|
142
|
+
# begin
|
143
|
+
# yield
|
144
|
+
# rescue StandardError => err
|
145
|
+
# retries += 1
|
146
|
+
# raise if retries > max_retries
|
147
|
+
#
|
148
|
+
# Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
|
149
|
+
# sleep(retry_waitsec)
|
150
|
+
# retry
|
151
|
+
# end
|
152
|
+
# end
|
153
|
+
#
|
154
|
+
# # Called by the Cycler.
|
155
|
+
# # Any error raised will be logged as a warning.
|
156
|
+
# def remove!(package)
|
157
|
+
# Logger.info "Removing backup package dated #{package.time}..."
|
158
|
+
#
|
159
|
+
# connection.file_delete(remote_path_for(package))
|
160
|
+
# end
|
161
|
+
#
|
162
|
+
# def cached_file
|
163
|
+
# path = cache_path.start_with?("/") ?
|
164
|
+
# cache_path : File.join(Config.root_path, cache_path)
|
165
|
+
# File.join(path, api_key + api_secret)
|
166
|
+
# end
|
167
|
+
#
|
168
|
+
# ##
|
169
|
+
# # Serializes and writes the Dropbox session to a cache file
|
170
|
+
# def write_cache!(session)
|
171
|
+
# FileUtils.mkdir_p File.dirname(cached_file)
|
172
|
+
# File.open(cached_file, "w") do |cache_file|
|
173
|
+
# cache_file.write(session.serialize)
|
174
|
+
# end
|
175
|
+
# end
|
176
|
+
#
|
177
|
+
# ##
|
178
|
+
# # Create a new session, write a serialized version of it to the
|
179
|
+
# # .cache directory, and return the session object
|
180
|
+
# def create_write_and_return_new_session!
|
181
|
+
# require "timeout"
|
182
|
+
#
|
183
|
+
# session = DropboxSession.new(api_key, api_secret)
|
184
|
+
#
|
185
|
+
# # grab the request token for session
|
186
|
+
# session.get_request_token
|
187
|
+
#
|
188
|
+
# template = Backup::Template.new(
|
189
|
+
# session: session, cached_file: cached_file
|
190
|
+
# )
|
191
|
+
# template.render("storage/dropbox/authorization_url.erb")
|
192
|
+
#
|
193
|
+
# # wait for user to hit 'return' to continue
|
194
|
+
# Timeout.timeout(180) { STDIN.gets }
|
195
|
+
#
|
196
|
+
# # this will raise an error if the user did not
|
197
|
+
# # visit the authorization_url and grant access
|
198
|
+
# #
|
199
|
+
# # get the access token from the server
|
200
|
+
# # this will be stored with the session in the cache file
|
201
|
+
# session.get_access_token
|
202
|
+
#
|
203
|
+
# template.render("storage/dropbox/authorized.erb")
|
204
|
+
# write_cache!(session)
|
205
|
+
# template.render("storage/dropbox/cache_file_written.erb")
|
206
|
+
#
|
207
|
+
# session
|
208
|
+
# rescue => err
|
209
|
+
# raise Error.wrap(err, "Could not create or authenticate a new session")
|
210
|
+
# end
|
211
|
+
end
|
5
212
|
end
|
6
213
|
end
|
214
|
+
|
215
|
+
# Add our module name to the Backup DSL
|
216
|
+
Backup::Config::DSL.const_set("GoogleDrive", Module.new)
|
metadata
CHANGED
@@ -1,24 +1,43 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: backup-googledrive
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0
|
4
|
+
version: 0.1.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Joseph Rafferty
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
12
|
-
dependencies:
|
13
|
-
|
14
|
-
|
11
|
+
date: 2018-04-27 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: backup
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '4.4'
|
20
|
+
- - ">="
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: 4.4.1
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - "~>"
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '4.4'
|
30
|
+
- - ">="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 4.4.1
|
33
|
+
description:
|
34
|
+
email: joraff@gmail.com
|
15
35
|
executables: []
|
16
36
|
extensions: []
|
17
37
|
extra_rdoc_files: []
|
18
38
|
files:
|
19
39
|
- lib/backup-googledrive.rb
|
20
|
-
|
21
|
-
homepage: http://github.com/joraff/backup-googledrive
|
40
|
+
homepage: https://github.com/joraff/backup-googledrive
|
22
41
|
licenses:
|
23
42
|
- MIT
|
24
43
|
metadata: {}
|
@@ -28,19 +47,18 @@ require_paths:
|
|
28
47
|
- lib
|
29
48
|
required_ruby_version: !ruby/object:Gem::Requirement
|
30
49
|
requirements:
|
31
|
-
- -
|
50
|
+
- - ">="
|
32
51
|
- !ruby/object:Gem::Version
|
33
52
|
version: '0'
|
34
53
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
35
54
|
requirements:
|
36
|
-
- -
|
55
|
+
- - ">="
|
37
56
|
- !ruby/object:Gem::Version
|
38
57
|
version: '0'
|
39
58
|
requirements: []
|
40
59
|
rubyforge_project:
|
41
|
-
rubygems_version: 2.
|
60
|
+
rubygems_version: 2.5.2
|
42
61
|
signing_key:
|
43
62
|
specification_version: 4
|
44
|
-
summary:
|
63
|
+
summary: Adds Google Drive as a storage option for Backup
|
45
64
|
test_files: []
|
46
|
-
has_rdoc:
|
data/lib/storage/googledrive.rb
DELETED
@@ -1,149 +0,0 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
|
-
module Backup
|
4
|
-
module Storage
|
5
|
-
class GoogleDrive < Base
|
6
|
-
|
7
|
-
include Storage::Cycler
|
8
|
-
class Error < Backup::Error; end
|
9
|
-
|
10
|
-
# This adapter uses gdrive (https://github.com/prasmussen/gdrive) to handle all API interactions.
|
11
|
-
# Fortunately for us gdrive handles things like timeouts, error retries and large file chunking, too!
|
12
|
-
# I found gdrive's defaults to be acceptable, but should be easy to add accessors to customize if needed
|
13
|
-
|
14
|
-
# Path to gdrive executable
|
15
|
-
attr_accessor :gdrive_exe
|
16
|
-
|
17
|
-
# Use the gdrive executable to obtain a refresh token. Add that token to your backup model.
|
18
|
-
# The gdrive exe will handle refresing the access tokens
|
19
|
-
attr_accessor :refresh_token
|
20
|
-
|
21
|
-
##
|
22
|
-
# Creates a new instance of the storage object
|
23
|
-
def initialize(model, storage_id = nil)
|
24
|
-
super
|
25
|
-
|
26
|
-
@path ||= 'backups'
|
27
|
-
path.sub!(/^\//, '')
|
28
|
-
|
29
|
-
required = %w{ refresh_token }
|
30
|
-
raise Error, "Configuration Error: a refresh_token is required" if refresh_token.nil?
|
31
|
-
|
32
|
-
raise Error, "Configuration Error: gdrive executable is required." if gdrive_exe.nil?
|
33
|
-
end
|
34
|
-
|
35
|
-
# private
|
36
|
-
|
37
|
-
|
38
|
-
##
|
39
|
-
# Transfer each of the package files to Dropbox in chunks of +chunk_size+.
|
40
|
-
# Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
|
41
|
-
# between retries, if errors occur.
|
42
|
-
def transfer!
|
43
|
-
package.filenames.each do |filename|
|
44
|
-
src = File.join(Config.tmp_path, filename)
|
45
|
-
dest = File.join(remote_path, filename)
|
46
|
-
Logger.info "Storing '#{ dest }'..."
|
47
|
-
|
48
|
-
parent_id = find_id_from_path(remote_path)
|
49
|
-
gdrive_upload(src, parent_id)
|
50
|
-
end
|
51
|
-
end
|
52
|
-
|
53
|
-
# # Called by the Cycler.
|
54
|
-
# # Any error raised will be logged as a warning.
|
55
|
-
def remove!(package)
|
56
|
-
Logger.info "Removing backup package dated #{ package.time }..."
|
57
|
-
id = find_id_from_path(remote_path_for(package), false)
|
58
|
-
if id.to_s.empty?
|
59
|
-
raise Error, "Backup packge #{ package.time } not found in Google Drive"
|
60
|
-
else
|
61
|
-
gdrive_delete(id)
|
62
|
-
end
|
63
|
-
end
|
64
|
-
|
65
|
-
def find_id_from_path(path = remote_path, create = true)
|
66
|
-
parent = nil
|
67
|
-
path.split('/').each do |path_part|
|
68
|
-
id = get_folder_id(path_part, parent)
|
69
|
-
if id.to_s.empty? && create
|
70
|
-
id = gdrive_mkdir(path_part, parent)
|
71
|
-
end
|
72
|
-
parent = id
|
73
|
-
end
|
74
|
-
return parent
|
75
|
-
end
|
76
|
-
|
77
|
-
|
78
|
-
def get_folder_id(name, parent = nil)
|
79
|
-
parent = parent ? parent : 'root'
|
80
|
-
gdrive_list("name = '#{name}' and '#{parent}' in parents")
|
81
|
-
end
|
82
|
-
|
83
|
-
def gdrive_list(query)
|
84
|
-
unless query.empty?
|
85
|
-
cmd = "gdrive --refresh-token '#{refresh_token}' list --no-header -q \"#{query}\""
|
86
|
-
output = `#{cmd}`
|
87
|
-
if output.downcase.include? "error"
|
88
|
-
raise Error, "Could not list or find the object with query string '#{query}'. gdrive output: #{output}"
|
89
|
-
elsif output.empty?
|
90
|
-
return nil
|
91
|
-
else
|
92
|
-
begin
|
93
|
-
return /^([^ ]*).*/.match(output)[1] # will return an empty string on no match
|
94
|
-
rescue => err
|
95
|
-
return nil
|
96
|
-
end
|
97
|
-
end
|
98
|
-
else
|
99
|
-
raise Error, "A search query is required to list/find a file or folder"
|
100
|
-
end
|
101
|
-
end
|
102
|
-
|
103
|
-
def gdrive_mkdir(name, parent = nil)
|
104
|
-
unless name.empty?
|
105
|
-
parent = parent ? parent : 'root'
|
106
|
-
cmd = "gdrive --refresh-token '#{refresh_token}' mkdir -p '#{parent}' '#{name}'"
|
107
|
-
output = `#{cmd}`
|
108
|
-
if output.downcase.include? "error"
|
109
|
-
raise Error, "Could not create the directory '#{name}' with parent '#{parent}'. gdrive output: #{output}"
|
110
|
-
else
|
111
|
-
id = /^Directory (.*?) created/.match(output)[1]
|
112
|
-
raise Error, "Could not determine ID of newly created folder. See gdrive output: #{output}" if id.to_s.empty?
|
113
|
-
Logger.info "Created folder #{name} successfully with id '#{id}'"
|
114
|
-
return id
|
115
|
-
end
|
116
|
-
else
|
117
|
-
raise Error, "Name parameter is required to make a directory"
|
118
|
-
end
|
119
|
-
end
|
120
|
-
|
121
|
-
def gdrive_upload(src, parent = nil)
|
122
|
-
parent = parent ? parent : 'root'
|
123
|
-
cmd = "gdrive --refresh-token '#{refresh_token}' upload -p '#{parent}' '#{src}'"
|
124
|
-
output = `#{cmd}`
|
125
|
-
if ( ["error", "failed"].any? {|s| output.downcase.include? s } )
|
126
|
-
raise Error, "Could not upload file. See gdrive output: #{output}"
|
127
|
-
else
|
128
|
-
begin
|
129
|
-
id = /.*Uploaded (.*?) .*/.match(output)[1]
|
130
|
-
raise Error, "empty id" if id.to_s.empty?
|
131
|
-
Logger.info "Uploaded #{src} into parent folder '#{parent}' successfully. Google Drive file_id: #{ id }"
|
132
|
-
rescue => err
|
133
|
-
raise Error.wrap(err, "Could not determine ID of newly created folder. See gdrive output: #{output}")
|
134
|
-
end
|
135
|
-
end
|
136
|
-
end
|
137
|
-
|
138
|
-
def gdrive_delete(id, recursive = true)
|
139
|
-
cmd = "gdrive --refresh-token '#{refresh_token}' delete #{'-r' if recursive} '#{id}'"
|
140
|
-
output = `#{cmd}`
|
141
|
-
if output.downcase.include? "error"
|
142
|
-
raise Error, "Could not delete object with id: #{id}. See gdrive output: #{output}"
|
143
|
-
else
|
144
|
-
Logger.info output
|
145
|
-
end
|
146
|
-
end
|
147
|
-
end
|
148
|
-
end
|
149
|
-
end
|