cm-backup 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +20 -0
  3. data/bin/backup +5 -0
  4. data/lib/backup.rb +144 -0
  5. data/lib/backup/archive.rb +170 -0
  6. data/lib/backup/binder.rb +22 -0
  7. data/lib/backup/cleaner.rb +116 -0
  8. data/lib/backup/cli.rb +374 -0
  9. data/lib/backup/cloud_io/base.rb +41 -0
  10. data/lib/backup/cloud_io/cloud_files.rb +298 -0
  11. data/lib/backup/cloud_io/s3.rb +260 -0
  12. data/lib/backup/compressor/base.rb +35 -0
  13. data/lib/backup/compressor/bzip2.rb +39 -0
  14. data/lib/backup/compressor/custom.rb +53 -0
  15. data/lib/backup/compressor/gzip.rb +74 -0
  16. data/lib/backup/config.rb +119 -0
  17. data/lib/backup/config/dsl.rb +103 -0
  18. data/lib/backup/config/helpers.rb +143 -0
  19. data/lib/backup/database/base.rb +85 -0
  20. data/lib/backup/database/mongodb.rb +187 -0
  21. data/lib/backup/database/mysql.rb +192 -0
  22. data/lib/backup/database/openldap.rb +95 -0
  23. data/lib/backup/database/postgresql.rb +133 -0
  24. data/lib/backup/database/redis.rb +179 -0
  25. data/lib/backup/database/riak.rb +82 -0
  26. data/lib/backup/database/sqlite.rb +57 -0
  27. data/lib/backup/encryptor/base.rb +29 -0
  28. data/lib/backup/encryptor/gpg.rb +747 -0
  29. data/lib/backup/encryptor/open_ssl.rb +77 -0
  30. data/lib/backup/errors.rb +58 -0
  31. data/lib/backup/logger.rb +199 -0
  32. data/lib/backup/logger/console.rb +51 -0
  33. data/lib/backup/logger/fog_adapter.rb +29 -0
  34. data/lib/backup/logger/logfile.rb +133 -0
  35. data/lib/backup/logger/syslog.rb +116 -0
  36. data/lib/backup/model.rb +479 -0
  37. data/lib/backup/notifier/base.rb +128 -0
  38. data/lib/backup/notifier/campfire.rb +63 -0
  39. data/lib/backup/notifier/command.rb +102 -0
  40. data/lib/backup/notifier/datadog.rb +107 -0
  41. data/lib/backup/notifier/flowdock.rb +103 -0
  42. data/lib/backup/notifier/hipchat.rb +118 -0
  43. data/lib/backup/notifier/http_post.rb +117 -0
  44. data/lib/backup/notifier/mail.rb +249 -0
  45. data/lib/backup/notifier/nagios.rb +69 -0
  46. data/lib/backup/notifier/pagerduty.rb +81 -0
  47. data/lib/backup/notifier/prowl.rb +68 -0
  48. data/lib/backup/notifier/pushover.rb +74 -0
  49. data/lib/backup/notifier/ses.rb +105 -0
  50. data/lib/backup/notifier/slack.rb +148 -0
  51. data/lib/backup/notifier/twitter.rb +58 -0
  52. data/lib/backup/notifier/zabbix.rb +63 -0
  53. data/lib/backup/package.rb +55 -0
  54. data/lib/backup/packager.rb +107 -0
  55. data/lib/backup/pipeline.rb +124 -0
  56. data/lib/backup/splitter.rb +76 -0
  57. data/lib/backup/storage/base.rb +69 -0
  58. data/lib/backup/storage/cloud_files.rb +158 -0
  59. data/lib/backup/storage/cycler.rb +75 -0
  60. data/lib/backup/storage/dropbox.rb +212 -0
  61. data/lib/backup/storage/ftp.rb +112 -0
  62. data/lib/backup/storage/local.rb +64 -0
  63. data/lib/backup/storage/qiniu.rb +65 -0
  64. data/lib/backup/storage/rsync.rb +248 -0
  65. data/lib/backup/storage/s3.rb +156 -0
  66. data/lib/backup/storage/scp.rb +67 -0
  67. data/lib/backup/storage/sftp.rb +82 -0
  68. data/lib/backup/syncer/base.rb +70 -0
  69. data/lib/backup/syncer/cloud/base.rb +179 -0
  70. data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
  71. data/lib/backup/syncer/cloud/local_file.rb +100 -0
  72. data/lib/backup/syncer/cloud/s3.rb +110 -0
  73. data/lib/backup/syncer/rsync/base.rb +54 -0
  74. data/lib/backup/syncer/rsync/local.rb +31 -0
  75. data/lib/backup/syncer/rsync/pull.rb +51 -0
  76. data/lib/backup/syncer/rsync/push.rb +205 -0
  77. data/lib/backup/template.rb +46 -0
  78. data/lib/backup/utilities.rb +224 -0
  79. data/lib/backup/version.rb +5 -0
  80. data/templates/cli/archive +28 -0
  81. data/templates/cli/compressor/bzip2 +4 -0
  82. data/templates/cli/compressor/custom +7 -0
  83. data/templates/cli/compressor/gzip +4 -0
  84. data/templates/cli/config +123 -0
  85. data/templates/cli/databases/mongodb +15 -0
  86. data/templates/cli/databases/mysql +18 -0
  87. data/templates/cli/databases/openldap +24 -0
  88. data/templates/cli/databases/postgresql +16 -0
  89. data/templates/cli/databases/redis +16 -0
  90. data/templates/cli/databases/riak +17 -0
  91. data/templates/cli/databases/sqlite +11 -0
  92. data/templates/cli/encryptor/gpg +27 -0
  93. data/templates/cli/encryptor/openssl +9 -0
  94. data/templates/cli/model +26 -0
  95. data/templates/cli/notifier/zabbix +15 -0
  96. data/templates/cli/notifiers/campfire +12 -0
  97. data/templates/cli/notifiers/command +32 -0
  98. data/templates/cli/notifiers/datadog +57 -0
  99. data/templates/cli/notifiers/flowdock +16 -0
  100. data/templates/cli/notifiers/hipchat +16 -0
  101. data/templates/cli/notifiers/http_post +32 -0
  102. data/templates/cli/notifiers/mail +24 -0
  103. data/templates/cli/notifiers/nagios +13 -0
  104. data/templates/cli/notifiers/pagerduty +12 -0
  105. data/templates/cli/notifiers/prowl +11 -0
  106. data/templates/cli/notifiers/pushover +11 -0
  107. data/templates/cli/notifiers/ses +15 -0
  108. data/templates/cli/notifiers/slack +22 -0
  109. data/templates/cli/notifiers/twitter +13 -0
  110. data/templates/cli/splitter +7 -0
  111. data/templates/cli/storages/cloud_files +11 -0
  112. data/templates/cli/storages/dropbox +20 -0
  113. data/templates/cli/storages/ftp +13 -0
  114. data/templates/cli/storages/local +8 -0
  115. data/templates/cli/storages/qiniu +12 -0
  116. data/templates/cli/storages/rsync +17 -0
  117. data/templates/cli/storages/s3 +16 -0
  118. data/templates/cli/storages/scp +15 -0
  119. data/templates/cli/storages/sftp +15 -0
  120. data/templates/cli/syncers/cloud_files +22 -0
  121. data/templates/cli/syncers/rsync_local +20 -0
  122. data/templates/cli/syncers/rsync_pull +28 -0
  123. data/templates/cli/syncers/rsync_push +28 -0
  124. data/templates/cli/syncers/s3 +27 -0
  125. data/templates/general/links +3 -0
  126. data/templates/general/version.erb +2 -0
  127. data/templates/notifier/mail/failure.erb +16 -0
  128. data/templates/notifier/mail/success.erb +16 -0
  129. data/templates/notifier/mail/warning.erb +16 -0
  130. data/templates/storage/dropbox/authorization_url.erb +6 -0
  131. data/templates/storage/dropbox/authorized.erb +4 -0
  132. data/templates/storage/dropbox/cache_file_written.erb +10 -0
  133. metadata +1077 -0
@@ -0,0 +1,69 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Storage
5
+ class Base
6
+ include Config::Helpers
7
+
8
+ ##
9
+ # Base path on the remote where backup package files will be stored.
10
+ attr_accessor :path
11
+
12
+ ##
13
+ # Number of backups to keep or time until which to keep.
14
+ #
15
+ # If an Integer is given it sets the limit to how many backups to keep in
16
+ # the remote location. If exceeded, the oldest will be removed to make
17
+ # room for the newest.
18
+ #
19
+ # If a Time object is given it will remove backups _older_ than the given
20
+ # date.
21
+ #
22
+ # @!attribute [rw] keep
23
+ # @param [Integer|Time]
24
+ # @return [Integer|Time]
25
+ attr_accessor :keep
26
+
27
+ attr_reader :model, :package, :storage_id
28
+
29
+ ##
30
+ # +storage_id+ is a user-defined string used to uniquely identify
31
+ # multiple storages of the same type. If multiple storages of the same
32
+ # type are added to a single backup model, this identifier must be set.
33
+ # This will be appended to the YAML storage file used for cycling backups.
34
+ def initialize(model, storage_id = nil, &block)
35
+ @model = model
36
+ @package = model.package
37
+ @storage_id = storage_id.to_s.gsub(/\W/, '_') if storage_id
38
+
39
+ load_defaults!
40
+ instance_eval(&block) if block_given?
41
+ end
42
+
43
+ def perform!
44
+ Logger.info "#{ storage_name } Started..."
45
+ transfer!
46
+ if respond_to?(:cycle!, true) && (keep.to_i > 0 || keep.is_a?(Time))
47
+ cycle!
48
+ end
49
+ Logger.info "#{ storage_name } Finished!"
50
+ end
51
+
52
+ private
53
+
54
+ ##
55
+ # Return the remote path for the current or given package.
56
+ def remote_path(pkg = package)
57
+ path.empty? ? File.join(pkg.trigger, pkg.time) :
58
+ File.join(path, pkg.trigger, pkg.time)
59
+ end
60
+ alias :remote_path_for :remote_path
61
+
62
+ def storage_name
63
+ @storage_name ||= self.class.to_s.sub('Backup::', '') +
64
+ (storage_id ? " (#{ storage_id })" : '')
65
+ end
66
+
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,158 @@
1
+ # encoding: utf-8
2
+ require 'backup/cloud_io/cloud_files'
3
+
4
+ module Backup
5
+ module Storage
6
+ class CloudFiles < Base
7
+ include Storage::Cycler
8
+ class Error < Backup::Error; end
9
+
10
+ ##
11
+ # Rackspace CloudFiles Credentials
12
+ attr_accessor :username, :api_key
13
+
14
+ ##
15
+ # Rackspace Auth URL (optional)
16
+ attr_accessor :auth_url
17
+
18
+ ##
19
+ # Rackspace Service Net
20
+ # (LAN-based transfers to avoid charges and improve performance)
21
+ attr_accessor :servicenet
22
+
23
+ ##
24
+ # Rackspace Region (optional)
25
+ attr_accessor :region
26
+
27
+ ##
28
+ # Rackspace Container Name
29
+ attr_accessor :container
30
+
31
+ ##
32
+ # Rackspace Container Name for SLO Segments
33
+ # Required if #segment_size is set. Must be different from #container.
34
+ attr_accessor :segments_container
35
+
36
+ ##
37
+ # SLO Segment size, specified in MiB.
38
+ #
39
+ # Each package file larger than +segment_size+
40
+ # will be uploaded as a Static Large Objects (SLO).
41
+ #
42
+ # Defaults to 0 for backward compatibility (pre v.3.7.0),
43
+ # since #segments_container would be required.
44
+ #
45
+ # Minimum: 1 (0 disables SLO support)
46
+ # Maximum: 5120 (5 GiB)
47
+ attr_accessor :segment_size
48
+
49
+ ##
50
+ # If set, all backup package files (including SLO segments) will be
51
+ # scheduled for automatic removal by the server.
52
+ #
53
+ # The `keep` option should not be used if this is set,
54
+ # unless you're transitioning from the `keep` option.
55
+ attr_accessor :days_to_keep
56
+
57
+ ##
58
+ # Number of times to retry failed operations.
59
+ #
60
+ # Default: 10
61
+ attr_accessor :max_retries
62
+
63
+ ##
64
+ # Time in seconds to pause before each retry.
65
+ #
66
+ # Default: 30
67
+ attr_accessor :retry_waitsec
68
+
69
+ ##
70
+ # Additional options to pass along to fog.
71
+ # e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
72
+ attr_accessor :fog_options
73
+
74
+ def initialize(model, storage_id = nil)
75
+ super
76
+
77
+ @servicenet ||= false
78
+ @segment_size ||= 0
79
+ @max_retries ||= 10
80
+ @retry_waitsec ||= 30
81
+
82
+ @path ||= 'backups'
83
+ path.sub!(/^\//, '')
84
+
85
+ check_configuration
86
+ end
87
+
88
+ private
89
+
90
+ def cloud_io
91
+ @cloud_io ||= CloudIO::CloudFiles.new(
92
+ :username => username,
93
+ :api_key => api_key,
94
+ :auth_url => auth_url,
95
+ :region => region,
96
+ :servicenet => servicenet,
97
+ :container => container,
98
+ :segments_container => segments_container,
99
+ :segment_size => segment_size,
100
+ :days_to_keep => days_to_keep,
101
+ :max_retries => max_retries,
102
+ :retry_waitsec => retry_waitsec,
103
+ :fog_options => fog_options
104
+ )
105
+ end
106
+
107
+ def transfer!
108
+ package.filenames.each do |filename|
109
+ src = File.join(Config.tmp_path, filename)
110
+ dest = File.join(remote_path, filename)
111
+ Logger.info "Storing '#{ container }/#{ dest }'..."
112
+ cloud_io.upload(src, dest)
113
+ end
114
+
115
+ package.no_cycle = true if days_to_keep
116
+ end
117
+
118
+ # Called by the Cycler.
119
+ # Any error raised will be logged as a warning.
120
+ def remove!(package)
121
+ Logger.info "Removing backup package dated #{ package.time }..."
122
+
123
+ remote_path = remote_path_for(package)
124
+ objects = cloud_io.objects(remote_path)
125
+
126
+ raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
127
+
128
+ slo_objects, objects = objects.partition(&:slo?)
129
+ cloud_io.delete_slo(slo_objects)
130
+ cloud_io.delete(objects)
131
+ end
132
+
133
+ def check_configuration
134
+ required = %w{ username api_key container }
135
+ raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
136
+ Configuration Error
137
+ #{ required.map {|name| "##{ name }"}.join(', ') } are all required
138
+ EOS
139
+
140
+ raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
141
+ Configuration Error
142
+ #segments_container is required if #segment_size is > 0
143
+ EOS
144
+
145
+ raise Error, <<-EOS if container == segments_container
146
+ Configuration Error
147
+ #container and #segments_container must not be the same container.
148
+ EOS
149
+
150
+ raise Error, <<-EOS if segment_size > 5120
151
+ Configuration Error
152
+ #segment_size is too large (max 5120)
153
+ EOS
154
+ end
155
+
156
+ end
157
+ end
158
+ end
@@ -0,0 +1,75 @@
1
+ # encoding: utf-8
2
+
3
+ module Backup
4
+ module Storage
5
+ module Cycler
6
+ class Error < Backup::Error; end
7
+
8
+ private
9
+
10
+ # Adds the current package being stored to the YAML cycle data file
11
+ # and will remove any old package file(s) when the storage limit
12
+ # set by #keep is exceeded.
13
+ def cycle!
14
+ Logger.info 'Cycling Started...'
15
+
16
+ packages = yaml_load.unshift(package)
17
+ cycled_packages = []
18
+
19
+ if keep.is_a?(Date) || keep.is_a?(Time)
20
+ cycled_packages = packages.select do |p|
21
+ p.time_as_object < keep.to_time
22
+ end
23
+ else
24
+ excess = packages.count - keep.to_i
25
+ cycled_packages = packages.last(excess) if excess > 0
26
+ end
27
+
28
+ saved_packages = packages - cycled_packages
29
+ cycled_packages.each { |package| delete_package package }
30
+
31
+ yaml_save(saved_packages)
32
+ end
33
+
34
+ def delete_package(package)
35
+ begin
36
+ remove!(package) unless package.no_cycle
37
+ rescue => err
38
+ Logger.warn Error.wrap(err, <<-EOS)
39
+ There was a problem removing the following package:
40
+ Trigger: #{package.trigger} :: Dated: #{package.time}
41
+ Package included the following #{ package.filenames.count } file(s):
42
+ #{ package.filenames.join("\n") }
43
+ EOS
44
+ end
45
+ end
46
+
47
+ # Returns path to the YAML data file.
48
+ def yaml_file
49
+ @yaml_file ||= begin
50
+ filename = self.class.to_s.split('::').last
51
+ filename << "-#{ storage_id }" if storage_id
52
+ File.join(Config.data_path, package.trigger, "#{ filename }.yml")
53
+ end
54
+ end
55
+
56
+ # Returns stored Package objects, sorted by #time descending (oldest last).
57
+ def yaml_load
58
+ if File.exist?(yaml_file) && !File.zero?(yaml_file)
59
+ YAML.load_file(yaml_file).sort_by!(&:time).reverse!
60
+ else
61
+ []
62
+ end
63
+ end
64
+
65
+ # Stores the given package objects to the YAML data file.
66
+ def yaml_save(packages)
67
+ FileUtils.mkdir_p(File.dirname(yaml_file))
68
+ File.open(yaml_file, 'w') do |file|
69
+ file.write(packages.to_yaml)
70
+ end
71
+ end
72
+
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,212 @@
1
+ # encoding: utf-8
2
+ require 'dropbox_sdk'
3
+
4
+ module Backup
5
+ module Storage
6
+ class Dropbox < Base
7
+ include Storage::Cycler
8
+ class Error < Backup::Error; end
9
+
10
+ ##
11
+ # Dropbox API credentials
12
+ attr_accessor :api_key, :api_secret
13
+
14
+ ##
15
+ # Path to store cached authorized session.
16
+ #
17
+ # Relative paths will be expanded using Config.root_path,
18
+ # which by default is ~/Backup unless --root-path was used
19
+ # on the command line or set in config.rb.
20
+ #
21
+ # By default, +cache_path+ is '.cache', which would be
22
+ # '~/Backup/.cache/' if using the default root_path.
23
+ attr_accessor :cache_path
24
+
25
+ ##
26
+ # Dropbox Access Type
27
+ # Valid values are:
28
+ # :app_folder (default)
29
+ # :dropbox (full access)
30
+ attr_accessor :access_type
31
+
32
+ ##
33
+ # Chunk size, specified in MiB, for the ChunkedUploader.
34
+ attr_accessor :chunk_size
35
+
36
+ ##
37
+ # Number of times to retry failed operations.
38
+ #
39
+ # Default: 10
40
+ attr_accessor :max_retries
41
+
42
+ ##
43
+ # Time in seconds to pause before each retry.
44
+ #
45
+ # Default: 30
46
+ attr_accessor :retry_waitsec
47
+
48
+ ##
49
+ # Creates a new instance of the storage object
50
+ def initialize(model, storage_id = nil)
51
+ super
52
+
53
+ @path ||= 'backups'
54
+ @cache_path ||= '.cache'
55
+ @access_type ||= :app_folder
56
+ @chunk_size ||= 4 # MiB
57
+ @max_retries ||= 10
58
+ @retry_waitsec ||= 30
59
+ path.sub!(/^\//, '')
60
+ end
61
+
62
+ private
63
+
64
+ ##
65
+ # The initial connection to Dropbox will provide the user with an
66
+ # authorization url. The user must open this URL and confirm that the
67
+ # authorization successfully took place. If this is the case, then the
68
+ # user hits 'enter' and the session will be properly established.
69
+ # Immediately after establishing the session, the session will be
70
+ # serialized and written to a cache file in +cache_path+.
71
+ # The cached file will be used from that point on to re-establish a
72
+ # connection with Dropbox at a later time. This allows the user to avoid
73
+ # having to go to a new Dropbox URL to authorize over and over again.
74
+ def connection
75
+ return @connection if @connection
76
+
77
+ unless session = cached_session
78
+ Logger.info "Creating a new session!"
79
+ session = create_write_and_return_new_session!
80
+ end
81
+
82
+ # will raise an error if session not authorized
83
+ @connection = DropboxClient.new(session, access_type)
84
+
85
+ rescue => err
86
+ raise Error.wrap(err, 'Authorization Failed')
87
+ end
88
+
89
+ ##
90
+ # Attempt to load a cached session
91
+ def cached_session
92
+ session = false
93
+ if File.exist?(cached_file)
94
+ begin
95
+ session = DropboxSession.deserialize(File.read(cached_file))
96
+ Logger.info "Session data loaded from cache!"
97
+
98
+ rescue => err
99
+ Logger.warn Error.wrap(err, <<-EOS)
100
+ Could not read session data from cache.
101
+ Cache data might be corrupt.
102
+ EOS
103
+ end
104
+ end
105
+ session
106
+ end
107
+
108
+ ##
109
+ # Transfer each of the package files to Dropbox in chunks of +chunk_size+.
110
+ # Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
111
+ # between retries, if errors occur.
112
+ def transfer!
113
+ package.filenames.each do |filename|
114
+ src = File.join(Config.tmp_path, filename)
115
+ dest = File.join(remote_path, filename)
116
+ Logger.info "Storing '#{ dest }'..."
117
+
118
+ uploader = nil
119
+ File.open(src, 'r') do |file|
120
+ uploader = connection.get_chunked_uploader(file, file.stat.size)
121
+ while uploader.offset < uploader.total_size
122
+ with_retries do
123
+ uploader.upload(1024**2 * chunk_size)
124
+ end
125
+ end
126
+ end
127
+
128
+ with_retries do
129
+ uploader.finish(dest)
130
+ end
131
+ end
132
+
133
+ rescue => err
134
+ raise Error.wrap(err, 'Upload Failed!')
135
+ end
136
+
137
+ def with_retries
138
+ retries = 0
139
+ begin
140
+ yield
141
+ rescue StandardError => err
142
+ retries += 1
143
+ raise if retries > max_retries
144
+
145
+ Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
146
+ sleep(retry_waitsec)
147
+ retry
148
+ end
149
+ end
150
+
151
+ # Called by the Cycler.
152
+ # Any error raised will be logged as a warning.
153
+ def remove!(package)
154
+ Logger.info "Removing backup package dated #{ package.time }..."
155
+
156
+ connection.file_delete(remote_path_for(package))
157
+ end
158
+
159
+ def cached_file
160
+ path = cache_path.start_with?('/') ?
161
+ cache_path : File.join(Config.root_path, cache_path)
162
+ File.join(path, api_key + api_secret)
163
+ end
164
+
165
+ ##
166
+ # Serializes and writes the Dropbox session to a cache file
167
+ def write_cache!(session)
168
+ FileUtils.mkdir_p File.dirname(cached_file)
169
+ File.open(cached_file, "w") do |cache_file|
170
+ cache_file.write(session.serialize)
171
+ end
172
+ end
173
+
174
+ ##
175
+ # Create a new session, write a serialized version of it to the
176
+ # .cache directory, and return the session object
177
+ def create_write_and_return_new_session!
178
+ require 'timeout'
179
+
180
+ session = DropboxSession.new(api_key, api_secret)
181
+
182
+ # grab the request token for session
183
+ session.get_request_token
184
+
185
+ template = Backup::Template.new(
186
+ {:session => session, :cached_file => cached_file}
187
+ )
188
+ template.render("storage/dropbox/authorization_url.erb")
189
+
190
+ # wait for user to hit 'return' to continue
191
+ Timeout::timeout(180) { STDIN.gets }
192
+
193
+ # this will raise an error if the user did not
194
+ # visit the authorization_url and grant access
195
+ #
196
+ # get the access token from the server
197
+ # this will be stored with the session in the cache file
198
+ session.get_access_token
199
+
200
+ template.render("storage/dropbox/authorized.erb")
201
+ write_cache!(session)
202
+ template.render("storage/dropbox/cache_file_written.erb")
203
+
204
+ session
205
+
206
+ rescue => err
207
+ raise Error.wrap(err, 'Could not create or authenticate a new session')
208
+ end
209
+
210
+ end
211
+ end
212
+ end