backup-ssh 4.1.10
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE.md +24 -0
- data/README.md +25 -0
- data/bin/backup +5 -0
- data/lib/backup.rb +141 -0
- data/lib/backup/archive.rb +170 -0
- data/lib/backup/binder.rb +22 -0
- data/lib/backup/cleaner.rb +116 -0
- data/lib/backup/cli.rb +374 -0
- data/lib/backup/cloud_io/base.rb +41 -0
- data/lib/backup/cloud_io/cloud_files.rb +298 -0
- data/lib/backup/cloud_io/s3.rb +260 -0
- data/lib/backup/compressor/base.rb +35 -0
- data/lib/backup/compressor/bzip2.rb +39 -0
- data/lib/backup/compressor/custom.rb +53 -0
- data/lib/backup/compressor/gzip.rb +74 -0
- data/lib/backup/config.rb +119 -0
- data/lib/backup/config/dsl.rb +103 -0
- data/lib/backup/config/helpers.rb +143 -0
- data/lib/backup/database/base.rb +85 -0
- data/lib/backup/database/mongodb.rb +186 -0
- data/lib/backup/database/mysql.rb +200 -0
- data/lib/backup/database/openldap.rb +95 -0
- data/lib/backup/database/postgresql.rb +133 -0
- data/lib/backup/database/redis.rb +179 -0
- data/lib/backup/database/riak.rb +82 -0
- data/lib/backup/database/sqlite.rb +57 -0
- data/lib/backup/encryptor/base.rb +29 -0
- data/lib/backup/encryptor/gpg.rb +747 -0
- data/lib/backup/encryptor/open_ssl.rb +77 -0
- data/lib/backup/errors.rb +58 -0
- data/lib/backup/logger.rb +199 -0
- data/lib/backup/logger/console.rb +51 -0
- data/lib/backup/logger/fog_adapter.rb +29 -0
- data/lib/backup/logger/logfile.rb +133 -0
- data/lib/backup/logger/syslog.rb +116 -0
- data/lib/backup/model.rb +454 -0
- data/lib/backup/notifier/base.rb +98 -0
- data/lib/backup/notifier/campfire.rb +69 -0
- data/lib/backup/notifier/datadog.rb +116 -0
- data/lib/backup/notifier/flowdock.rb +102 -0
- data/lib/backup/notifier/hipchat.rb +93 -0
- data/lib/backup/notifier/http_post.rb +122 -0
- data/lib/backup/notifier/mail.rb +238 -0
- data/lib/backup/notifier/nagios.rb +74 -0
- data/lib/backup/notifier/pagerduty.rb +81 -0
- data/lib/backup/notifier/prowl.rb +69 -0
- data/lib/backup/notifier/pushover.rb +80 -0
- data/lib/backup/notifier/ses.rb +94 -0
- data/lib/backup/notifier/slack.rb +154 -0
- data/lib/backup/notifier/twitter.rb +64 -0
- data/lib/backup/notifier/zabbix.rb +68 -0
- data/lib/backup/package.rb +51 -0
- data/lib/backup/packager.rb +101 -0
- data/lib/backup/pipeline.rb +124 -0
- data/lib/backup/splitter.rb +76 -0
- data/lib/backup/storage/base.rb +57 -0
- data/lib/backup/storage/cloud_files.rb +158 -0
- data/lib/backup/storage/cycler.rb +65 -0
- data/lib/backup/storage/dropbox.rb +236 -0
- data/lib/backup/storage/ftp.rb +98 -0
- data/lib/backup/storage/local.rb +64 -0
- data/lib/backup/storage/ninefold.rb +74 -0
- data/lib/backup/storage/rsync.rb +248 -0
- data/lib/backup/storage/s3.rb +155 -0
- data/lib/backup/storage/scp.rb +67 -0
- data/lib/backup/storage/sftp.rb +82 -0
- data/lib/backup/syncer/base.rb +70 -0
- data/lib/backup/syncer/cloud/base.rb +179 -0
- data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
- data/lib/backup/syncer/cloud/local_file.rb +100 -0
- data/lib/backup/syncer/cloud/s3.rb +110 -0
- data/lib/backup/syncer/rsync/base.rb +48 -0
- data/lib/backup/syncer/rsync/local.rb +31 -0
- data/lib/backup/syncer/rsync/pull.rb +51 -0
- data/lib/backup/syncer/rsync/push.rb +205 -0
- data/lib/backup/template.rb +46 -0
- data/lib/backup/utilities.rb +224 -0
- data/lib/backup/version.rb +5 -0
- data/templates/cli/archive +28 -0
- data/templates/cli/compressor/bzip2 +4 -0
- data/templates/cli/compressor/custom +7 -0
- data/templates/cli/compressor/gzip +4 -0
- data/templates/cli/config +123 -0
- data/templates/cli/databases/mongodb +15 -0
- data/templates/cli/databases/mysql +18 -0
- data/templates/cli/databases/openldap +24 -0
- data/templates/cli/databases/postgresql +16 -0
- data/templates/cli/databases/redis +16 -0
- data/templates/cli/databases/riak +17 -0
- data/templates/cli/databases/sqlite +11 -0
- data/templates/cli/encryptor/gpg +27 -0
- data/templates/cli/encryptor/openssl +9 -0
- data/templates/cli/model +26 -0
- data/templates/cli/notifier/zabbix +15 -0
- data/templates/cli/notifiers/campfire +12 -0
- data/templates/cli/notifiers/datadog +57 -0
- data/templates/cli/notifiers/flowdock +16 -0
- data/templates/cli/notifiers/hipchat +15 -0
- data/templates/cli/notifiers/http_post +32 -0
- data/templates/cli/notifiers/mail +21 -0
- data/templates/cli/notifiers/nagios +13 -0
- data/templates/cli/notifiers/pagerduty +12 -0
- data/templates/cli/notifiers/prowl +11 -0
- data/templates/cli/notifiers/pushover +11 -0
- data/templates/cli/notifiers/ses +15 -0
- data/templates/cli/notifiers/slack +22 -0
- data/templates/cli/notifiers/twitter +13 -0
- data/templates/cli/splitter +7 -0
- data/templates/cli/storages/cloud_files +11 -0
- data/templates/cli/storages/dropbox +19 -0
- data/templates/cli/storages/ftp +12 -0
- data/templates/cli/storages/local +7 -0
- data/templates/cli/storages/ninefold +9 -0
- data/templates/cli/storages/rsync +17 -0
- data/templates/cli/storages/s3 +14 -0
- data/templates/cli/storages/scp +14 -0
- data/templates/cli/storages/sftp +14 -0
- data/templates/cli/syncers/cloud_files +22 -0
- data/templates/cli/syncers/rsync_local +20 -0
- data/templates/cli/syncers/rsync_pull +28 -0
- data/templates/cli/syncers/rsync_push +28 -0
- data/templates/cli/syncers/s3 +27 -0
- data/templates/general/links +3 -0
- data/templates/general/version.erb +2 -0
- data/templates/notifier/mail/failure.erb +16 -0
- data/templates/notifier/mail/success.erb +16 -0
- data/templates/notifier/mail/warning.erb +16 -0
- data/templates/storage/dropbox/authorization_url.erb +6 -0
- data/templates/storage/dropbox/authorized.erb +4 -0
- data/templates/storage/dropbox/cache_file_written.erb +10 -0
- metadata +1057 -0
@@ -0,0 +1,158 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'backup/cloud_io/cloud_files'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class CloudFiles < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
class Error < Backup::Error; end
|
9
|
+
|
10
|
+
##
|
11
|
+
# Rackspace CloudFiles Credentials
|
12
|
+
attr_accessor :username, :api_key
|
13
|
+
|
14
|
+
##
|
15
|
+
# Rackspace Auth URL (optional)
|
16
|
+
attr_accessor :auth_url
|
17
|
+
|
18
|
+
##
|
19
|
+
# Rackspace Service Net
|
20
|
+
# (LAN-based transfers to avoid charges and improve performance)
|
21
|
+
attr_accessor :servicenet
|
22
|
+
|
23
|
+
##
|
24
|
+
# Rackspace Region (optional)
|
25
|
+
attr_accessor :region
|
26
|
+
|
27
|
+
##
|
28
|
+
# Rackspace Container Name
|
29
|
+
attr_accessor :container
|
30
|
+
|
31
|
+
##
|
32
|
+
# Rackspace Container Name for SLO Segments
|
33
|
+
# Required if #segment_size is set. Must be different from #container.
|
34
|
+
attr_accessor :segments_container
|
35
|
+
|
36
|
+
##
|
37
|
+
# SLO Segment size, specified in MiB.
|
38
|
+
#
|
39
|
+
# Each package file larger than +segment_size+
|
40
|
+
# will be uploaded as a Static Large Objects (SLO).
|
41
|
+
#
|
42
|
+
# Defaults to 0 for backward compatibility (pre v.3.7.0),
|
43
|
+
# since #segments_container would be required.
|
44
|
+
#
|
45
|
+
# Minimum: 1 (0 disables SLO support)
|
46
|
+
# Maximum: 5120 (5 GiB)
|
47
|
+
attr_accessor :segment_size
|
48
|
+
|
49
|
+
##
|
50
|
+
# If set, all backup package files (including SLO segments) will be
|
51
|
+
# scheduled for automatic removal by the server.
|
52
|
+
#
|
53
|
+
# The `keep` option should not be used if this is set,
|
54
|
+
# unless you're transitioning from the `keep` option.
|
55
|
+
attr_accessor :days_to_keep
|
56
|
+
|
57
|
+
##
|
58
|
+
# Number of times to retry failed operations.
|
59
|
+
#
|
60
|
+
# Default: 10
|
61
|
+
attr_accessor :max_retries
|
62
|
+
|
63
|
+
##
|
64
|
+
# Time in seconds to pause before each retry.
|
65
|
+
#
|
66
|
+
# Default: 30
|
67
|
+
attr_accessor :retry_waitsec
|
68
|
+
|
69
|
+
##
|
70
|
+
# Additional options to pass along to fog.
|
71
|
+
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
|
72
|
+
attr_accessor :fog_options
|
73
|
+
|
74
|
+
def initialize(model, storage_id = nil)
|
75
|
+
super
|
76
|
+
|
77
|
+
@servicenet ||= false
|
78
|
+
@segment_size ||= 0
|
79
|
+
@max_retries ||= 10
|
80
|
+
@retry_waitsec ||= 30
|
81
|
+
|
82
|
+
@path ||= 'backups'
|
83
|
+
path.sub!(/^\//, '')
|
84
|
+
|
85
|
+
check_configuration
|
86
|
+
end
|
87
|
+
|
88
|
+
private
|
89
|
+
|
90
|
+
def cloud_io
|
91
|
+
@cloud_io ||= CloudIO::CloudFiles.new(
|
92
|
+
:username => username,
|
93
|
+
:api_key => api_key,
|
94
|
+
:auth_url => auth_url,
|
95
|
+
:region => region,
|
96
|
+
:servicenet => servicenet,
|
97
|
+
:container => container,
|
98
|
+
:segments_container => segments_container,
|
99
|
+
:segment_size => segment_size,
|
100
|
+
:days_to_keep => days_to_keep,
|
101
|
+
:max_retries => max_retries,
|
102
|
+
:retry_waitsec => retry_waitsec,
|
103
|
+
:fog_options => fog_options
|
104
|
+
)
|
105
|
+
end
|
106
|
+
|
107
|
+
def transfer!
|
108
|
+
package.filenames.each do |filename|
|
109
|
+
src = File.join(Config.tmp_path, filename)
|
110
|
+
dest = File.join(remote_path, filename)
|
111
|
+
Logger.info "Storing '#{ container }/#{ dest }'..."
|
112
|
+
cloud_io.upload(src, dest)
|
113
|
+
end
|
114
|
+
|
115
|
+
package.no_cycle = true if days_to_keep
|
116
|
+
end
|
117
|
+
|
118
|
+
# Called by the Cycler.
|
119
|
+
# Any error raised will be logged as a warning.
|
120
|
+
def remove!(package)
|
121
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
122
|
+
|
123
|
+
remote_path = remote_path_for(package)
|
124
|
+
objects = cloud_io.objects(remote_path)
|
125
|
+
|
126
|
+
raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
|
127
|
+
|
128
|
+
slo_objects, objects = objects.partition(&:slo?)
|
129
|
+
cloud_io.delete_slo(slo_objects)
|
130
|
+
cloud_io.delete(objects)
|
131
|
+
end
|
132
|
+
|
133
|
+
def check_configuration
|
134
|
+
required = %w{ username api_key container }
|
135
|
+
raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
|
136
|
+
Configuration Error
|
137
|
+
#{ required.map {|name| "##{ name }"}.join(', ') } are all required
|
138
|
+
EOS
|
139
|
+
|
140
|
+
raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
|
141
|
+
Configuration Error
|
142
|
+
#segments_container is required if #segment_size is > 0
|
143
|
+
EOS
|
144
|
+
|
145
|
+
raise Error, <<-EOS if container == segments_container
|
146
|
+
Configuration Error
|
147
|
+
#container and #segments_container must not be the same container.
|
148
|
+
EOS
|
149
|
+
|
150
|
+
raise Error, <<-EOS if segment_size > 5120
|
151
|
+
Configuration Error
|
152
|
+
#segment_size is too large (max 5120)
|
153
|
+
EOS
|
154
|
+
end
|
155
|
+
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module Backup
|
4
|
+
module Storage
|
5
|
+
module Cycler
|
6
|
+
class Error < Backup::Error; end
|
7
|
+
|
8
|
+
private
|
9
|
+
|
10
|
+
# Adds the current package being stored to the YAML cycle data file
|
11
|
+
# and will remove any old package file(s) when the storage limit
|
12
|
+
# set by #keep is exceeded.
|
13
|
+
def cycle!
|
14
|
+
Logger.info 'Cycling Started...'
|
15
|
+
|
16
|
+
packages = yaml_load.unshift(package)
|
17
|
+
excess = packages.count - keep.to_i
|
18
|
+
|
19
|
+
if excess > 0
|
20
|
+
packages.pop(excess).each do |pkg|
|
21
|
+
begin
|
22
|
+
remove!(pkg) unless pkg.no_cycle
|
23
|
+
rescue => err
|
24
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
25
|
+
There was a problem removing the following package:
|
26
|
+
Trigger: #{pkg.trigger} :: Dated: #{pkg.time}
|
27
|
+
Package included the following #{ pkg.filenames.count } file(s):
|
28
|
+
#{ pkg.filenames.join("\n") }
|
29
|
+
EOS
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
yaml_save(packages)
|
35
|
+
end
|
36
|
+
|
37
|
+
# Returns path to the YAML data file.
|
38
|
+
def yaml_file
|
39
|
+
@yaml_file ||= begin
|
40
|
+
filename = self.class.to_s.split('::').last
|
41
|
+
filename << "-#{ storage_id }" if storage_id
|
42
|
+
File.join(Config.data_path, package.trigger, "#{ filename }.yml")
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
# Returns stored Package objects, sorted by #time descending (oldest last).
|
47
|
+
def yaml_load
|
48
|
+
if File.exist?(yaml_file) && !File.zero?(yaml_file)
|
49
|
+
YAML.load_file(yaml_file).sort_by!(&:time).reverse!
|
50
|
+
else
|
51
|
+
[]
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
# Stores the given package objects to the YAML data file.
|
56
|
+
def yaml_save(packages)
|
57
|
+
FileUtils.mkdir_p(File.dirname(yaml_file))
|
58
|
+
File.open(yaml_file, 'w') do |file|
|
59
|
+
file.write(packages.to_yaml)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,236 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'dropbox_sdk'
|
3
|
+
|
4
|
+
module Backup
|
5
|
+
module Storage
|
6
|
+
class Dropbox < Base
|
7
|
+
include Storage::Cycler
|
8
|
+
class Error < Backup::Error; end
|
9
|
+
|
10
|
+
##
|
11
|
+
# Dropbox API credentials
|
12
|
+
attr_accessor :api_key, :api_secret
|
13
|
+
|
14
|
+
##
|
15
|
+
# Path to store cached authorized session.
|
16
|
+
#
|
17
|
+
# Relative paths will be expanded using Config.root_path,
|
18
|
+
# which by default is ~/Backup unless --root-path was used
|
19
|
+
# on the command line or set in config.rb.
|
20
|
+
#
|
21
|
+
# By default, +cache_path+ is '.cache', which would be
|
22
|
+
# '~/Backup/.cache/' if using the default root_path.
|
23
|
+
attr_accessor :cache_path
|
24
|
+
|
25
|
+
##
|
26
|
+
# Dropbox Access Type
|
27
|
+
# Valid values are:
|
28
|
+
# :app_folder (default)
|
29
|
+
# :dropbox (full access)
|
30
|
+
attr_accessor :access_type
|
31
|
+
|
32
|
+
##
|
33
|
+
# Chunk size, specified in MiB, for the ChunkedUploader.
|
34
|
+
attr_accessor :chunk_size
|
35
|
+
|
36
|
+
##
|
37
|
+
# Number of times to retry failed operations.
|
38
|
+
#
|
39
|
+
# Default: 10
|
40
|
+
attr_accessor :max_retries
|
41
|
+
|
42
|
+
##
|
43
|
+
# Time in seconds to pause before each retry.
|
44
|
+
#
|
45
|
+
# Default: 30
|
46
|
+
attr_accessor :retry_waitsec
|
47
|
+
|
48
|
+
##
|
49
|
+
# Creates a new instance of the storage object
|
50
|
+
def initialize(model, storage_id = nil)
|
51
|
+
super
|
52
|
+
|
53
|
+
@path ||= 'backups'
|
54
|
+
@cache_path ||= '.cache'
|
55
|
+
@access_type ||= :app_folder
|
56
|
+
@chunk_size ||= 4 # MiB
|
57
|
+
@max_retries ||= 10
|
58
|
+
@retry_waitsec ||= 30
|
59
|
+
path.sub!(/^\//, '')
|
60
|
+
end
|
61
|
+
|
62
|
+
private
|
63
|
+
|
64
|
+
##
|
65
|
+
# The initial connection to Dropbox will provide the user with an
|
66
|
+
# authorization url. The user must open this URL and confirm that the
|
67
|
+
# authorization successfully took place. If this is the case, then the
|
68
|
+
# user hits 'enter' and the session will be properly established.
|
69
|
+
# Immediately after establishing the session, the session will be
|
70
|
+
# serialized and written to a cache file in +cache_path+.
|
71
|
+
# The cached file will be used from that point on to re-establish a
|
72
|
+
# connection with Dropbox at a later time. This allows the user to avoid
|
73
|
+
# having to go to a new Dropbox URL to authorize over and over again.
|
74
|
+
def connection
|
75
|
+
return @connection if @connection
|
76
|
+
|
77
|
+
unless session = cached_session
|
78
|
+
Logger.info "Creating a new session!"
|
79
|
+
session = create_write_and_return_new_session!
|
80
|
+
end
|
81
|
+
|
82
|
+
# will raise an error if session not authorized
|
83
|
+
@connection = DropboxClient.new(session, access_type)
|
84
|
+
|
85
|
+
rescue => err
|
86
|
+
raise Error.wrap(err, 'Authorization Failed')
|
87
|
+
end
|
88
|
+
|
89
|
+
##
|
90
|
+
# Attempt to load a cached session
|
91
|
+
def cached_session
|
92
|
+
session = false
|
93
|
+
if File.exist?(cached_file)
|
94
|
+
begin
|
95
|
+
session = DropboxSession.deserialize(File.read(cached_file))
|
96
|
+
Logger.info "Session data loaded from cache!"
|
97
|
+
|
98
|
+
rescue => err
|
99
|
+
Logger.warn Error.wrap(err, <<-EOS)
|
100
|
+
Could not read session data from cache.
|
101
|
+
Cache data might be corrupt.
|
102
|
+
EOS
|
103
|
+
end
|
104
|
+
end
|
105
|
+
session
|
106
|
+
end
|
107
|
+
|
108
|
+
##
|
109
|
+
# Transfer each of the package files to Dropbox in chunks of +chunk_size+.
|
110
|
+
# Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
|
111
|
+
# between retries, if errors occur.
|
112
|
+
def transfer!
|
113
|
+
package.filenames.each do |filename|
|
114
|
+
src = File.join(Config.tmp_path, filename)
|
115
|
+
dest = File.join(remote_path, filename)
|
116
|
+
Logger.info "Storing '#{ dest }'..."
|
117
|
+
|
118
|
+
uploader = nil
|
119
|
+
File.open(src, 'r') do |file|
|
120
|
+
uploader = connection.get_chunked_uploader(file, file.stat.size)
|
121
|
+
while uploader.offset < uploader.total_size
|
122
|
+
with_retries do
|
123
|
+
uploader.upload(1024**2 * chunk_size)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
with_retries do
|
129
|
+
uploader.finish(dest)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
rescue => err
|
134
|
+
raise Error.wrap(err, 'Upload Failed!')
|
135
|
+
end
|
136
|
+
|
137
|
+
def with_retries
|
138
|
+
retries = 0
|
139
|
+
begin
|
140
|
+
yield
|
141
|
+
rescue StandardError => err
|
142
|
+
retries += 1
|
143
|
+
raise if retries > max_retries
|
144
|
+
|
145
|
+
Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
|
146
|
+
sleep(retry_waitsec)
|
147
|
+
retry
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
# Called by the Cycler.
|
152
|
+
# Any error raised will be logged as a warning.
|
153
|
+
def remove!(package)
|
154
|
+
Logger.info "Removing backup package dated #{ package.time }..."
|
155
|
+
|
156
|
+
connection.file_delete(remote_path_for(package))
|
157
|
+
end
|
158
|
+
|
159
|
+
def cached_file
|
160
|
+
path = cache_path.start_with?('/') ?
|
161
|
+
cache_path : File.join(Config.root_path, cache_path)
|
162
|
+
File.join(path, api_key + api_secret)
|
163
|
+
end
|
164
|
+
|
165
|
+
##
|
166
|
+
# Serializes and writes the Dropbox session to a cache file
|
167
|
+
def write_cache!(session)
|
168
|
+
FileUtils.mkdir_p File.dirname(cached_file)
|
169
|
+
File.open(cached_file, "w") do |cache_file|
|
170
|
+
cache_file.write(session.serialize)
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
##
|
175
|
+
# Create a new session, write a serialized version of it to the
|
176
|
+
# .cache directory, and return the session object
|
177
|
+
def create_write_and_return_new_session!
|
178
|
+
require 'timeout'
|
179
|
+
|
180
|
+
session = DropboxSession.new(api_key, api_secret)
|
181
|
+
|
182
|
+
# grab the request token for session
|
183
|
+
session.get_request_token
|
184
|
+
|
185
|
+
template = Backup::Template.new(
|
186
|
+
{:session => session, :cached_file => cached_file}
|
187
|
+
)
|
188
|
+
template.render("storage/dropbox/authorization_url.erb")
|
189
|
+
|
190
|
+
# wait for user to hit 'return' to continue
|
191
|
+
Timeout::timeout(180) { STDIN.gets }
|
192
|
+
|
193
|
+
# this will raise an error if the user did not
|
194
|
+
# visit the authorization_url and grant access
|
195
|
+
#
|
196
|
+
# get the access token from the server
|
197
|
+
# this will be stored with the session in the cache file
|
198
|
+
session.get_access_token
|
199
|
+
|
200
|
+
template.render("storage/dropbox/authorized.erb")
|
201
|
+
write_cache!(session)
|
202
|
+
template.render("storage/dropbox/cache_file_written.erb")
|
203
|
+
|
204
|
+
session
|
205
|
+
|
206
|
+
rescue => err
|
207
|
+
raise Error.wrap(err, 'Could not create or authenticate a new session')
|
208
|
+
end
|
209
|
+
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
# Patch for dropbox-ruby-sdk-1.5.1
|
215
|
+
class DropboxClient
|
216
|
+
class ChunkedUploader
|
217
|
+
def upload(chunk_size = 1024**2 * 4)
|
218
|
+
while @offset < @total_size
|
219
|
+
@file_obj.seek(@offset) unless @file_obj.pos == @offset
|
220
|
+
data = @file_obj.read(chunk_size)
|
221
|
+
|
222
|
+
begin
|
223
|
+
resp = @client.parse_response(
|
224
|
+
@client.partial_chunked_upload(data, @upload_id, @offset)
|
225
|
+
)
|
226
|
+
rescue DropboxError => err
|
227
|
+
resp = JSON.parse(err.http_response.body) rescue {}
|
228
|
+
raise err unless resp['offset']
|
229
|
+
end
|
230
|
+
|
231
|
+
@offset = resp['offset']
|
232
|
+
@upload_id ||= resp['upload_id']
|
233
|
+
end
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end
|