ext_backup 5.0.0.beta.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +19 -0
  3. data/README.md +33 -0
  4. data/bin/backup +5 -0
  5. data/bin/docker_test +24 -0
  6. data/lib/backup.rb +140 -0
  7. data/lib/backup/archive.rb +169 -0
  8. data/lib/backup/binder.rb +18 -0
  9. data/lib/backup/cleaner.rb +112 -0
  10. data/lib/backup/cli.rb +370 -0
  11. data/lib/backup/cloud_io/base.rb +38 -0
  12. data/lib/backup/cloud_io/cloud_files.rb +296 -0
  13. data/lib/backup/cloud_io/s3.rb +253 -0
  14. data/lib/backup/compressor/base.rb +32 -0
  15. data/lib/backup/compressor/bzip2.rb +35 -0
  16. data/lib/backup/compressor/custom.rb +49 -0
  17. data/lib/backup/compressor/gzip.rb +73 -0
  18. data/lib/backup/config.rb +128 -0
  19. data/lib/backup/config/dsl.rb +102 -0
  20. data/lib/backup/config/helpers.rb +137 -0
  21. data/lib/backup/database/base.rb +86 -0
  22. data/lib/backup/database/mongodb.rb +186 -0
  23. data/lib/backup/database/mysql.rb +191 -0
  24. data/lib/backup/database/openldap.rb +93 -0
  25. data/lib/backup/database/postgresql.rb +132 -0
  26. data/lib/backup/database/redis.rb +176 -0
  27. data/lib/backup/database/riak.rb +79 -0
  28. data/lib/backup/database/sqlite.rb +55 -0
  29. data/lib/backup/encryptor/base.rb +27 -0
  30. data/lib/backup/encryptor/gpg.rb +737 -0
  31. data/lib/backup/encryptor/open_ssl.rb +74 -0
  32. data/lib/backup/errors.rb +53 -0
  33. data/lib/backup/logger.rb +197 -0
  34. data/lib/backup/logger/console.rb +48 -0
  35. data/lib/backup/logger/fog_adapter.rb +25 -0
  36. data/lib/backup/logger/logfile.rb +131 -0
  37. data/lib/backup/logger/syslog.rb +114 -0
  38. data/lib/backup/model.rb +472 -0
  39. data/lib/backup/notifier/base.rb +126 -0
  40. data/lib/backup/notifier/campfire.rb +61 -0
  41. data/lib/backup/notifier/command.rb +99 -0
  42. data/lib/backup/notifier/datadog.rb +104 -0
  43. data/lib/backup/notifier/flowdock.rb +99 -0
  44. data/lib/backup/notifier/hipchat.rb +116 -0
  45. data/lib/backup/notifier/http_post.rb +114 -0
  46. data/lib/backup/notifier/mail.rb +232 -0
  47. data/lib/backup/notifier/nagios.rb +65 -0
  48. data/lib/backup/notifier/pagerduty.rb +79 -0
  49. data/lib/backup/notifier/prowl.rb +68 -0
  50. data/lib/backup/notifier/pushover.rb +71 -0
  51. data/lib/backup/notifier/ses.rb +123 -0
  52. data/lib/backup/notifier/slack.rb +147 -0
  53. data/lib/backup/notifier/twitter.rb +55 -0
  54. data/lib/backup/notifier/zabbix.rb +60 -0
  55. data/lib/backup/package.rb +51 -0
  56. data/lib/backup/packager.rb +106 -0
  57. data/lib/backup/pipeline.rb +120 -0
  58. data/lib/backup/splitter.rb +73 -0
  59. data/lib/backup/storage/base.rb +66 -0
  60. data/lib/backup/storage/cloud_files.rb +156 -0
  61. data/lib/backup/storage/cycler.rb +70 -0
  62. data/lib/backup/storage/dropbox.rb +206 -0
  63. data/lib/backup/storage/ftp.rb +116 -0
  64. data/lib/backup/storage/local.rb +61 -0
  65. data/lib/backup/storage/qiniu.rb +65 -0
  66. data/lib/backup/storage/rsync.rb +246 -0
  67. data/lib/backup/storage/s3.rb +155 -0
  68. data/lib/backup/storage/scp.rb +65 -0
  69. data/lib/backup/storage/sftp.rb +80 -0
  70. data/lib/backup/syncer/base.rb +67 -0
  71. data/lib/backup/syncer/cloud/base.rb +176 -0
  72. data/lib/backup/syncer/cloud/cloud_files.rb +81 -0
  73. data/lib/backup/syncer/cloud/local_file.rb +97 -0
  74. data/lib/backup/syncer/cloud/s3.rb +109 -0
  75. data/lib/backup/syncer/rsync/base.rb +50 -0
  76. data/lib/backup/syncer/rsync/local.rb +27 -0
  77. data/lib/backup/syncer/rsync/pull.rb +47 -0
  78. data/lib/backup/syncer/rsync/push.rb +201 -0
  79. data/lib/backup/template.rb +41 -0
  80. data/lib/backup/utilities.rb +233 -0
  81. data/lib/backup/version.rb +3 -0
  82. data/lib/ext_backup.rb +5 -0
  83. data/lib/ext_backup/version.rb +5 -0
  84. data/templates/cli/archive +28 -0
  85. data/templates/cli/compressor/bzip2 +4 -0
  86. data/templates/cli/compressor/custom +7 -0
  87. data/templates/cli/compressor/gzip +4 -0
  88. data/templates/cli/config +123 -0
  89. data/templates/cli/databases/mongodb +15 -0
  90. data/templates/cli/databases/mysql +18 -0
  91. data/templates/cli/databases/openldap +24 -0
  92. data/templates/cli/databases/postgresql +16 -0
  93. data/templates/cli/databases/redis +16 -0
  94. data/templates/cli/databases/riak +17 -0
  95. data/templates/cli/databases/sqlite +11 -0
  96. data/templates/cli/encryptor/gpg +27 -0
  97. data/templates/cli/encryptor/openssl +9 -0
  98. data/templates/cli/model +26 -0
  99. data/templates/cli/notifier/zabbix +15 -0
  100. data/templates/cli/notifiers/campfire +12 -0
  101. data/templates/cli/notifiers/command +32 -0
  102. data/templates/cli/notifiers/datadog +57 -0
  103. data/templates/cli/notifiers/flowdock +16 -0
  104. data/templates/cli/notifiers/hipchat +16 -0
  105. data/templates/cli/notifiers/http_post +32 -0
  106. data/templates/cli/notifiers/mail +24 -0
  107. data/templates/cli/notifiers/nagios +13 -0
  108. data/templates/cli/notifiers/pagerduty +12 -0
  109. data/templates/cli/notifiers/prowl +11 -0
  110. data/templates/cli/notifiers/pushover +11 -0
  111. data/templates/cli/notifiers/ses +15 -0
  112. data/templates/cli/notifiers/slack +22 -0
  113. data/templates/cli/notifiers/twitter +13 -0
  114. data/templates/cli/splitter +7 -0
  115. data/templates/cli/storages/cloud_files +11 -0
  116. data/templates/cli/storages/dropbox +20 -0
  117. data/templates/cli/storages/ftp +13 -0
  118. data/templates/cli/storages/local +8 -0
  119. data/templates/cli/storages/qiniu +12 -0
  120. data/templates/cli/storages/rsync +17 -0
  121. data/templates/cli/storages/s3 +16 -0
  122. data/templates/cli/storages/scp +15 -0
  123. data/templates/cli/storages/sftp +15 -0
  124. data/templates/cli/syncers/cloud_files +22 -0
  125. data/templates/cli/syncers/rsync_local +20 -0
  126. data/templates/cli/syncers/rsync_pull +28 -0
  127. data/templates/cli/syncers/rsync_push +28 -0
  128. data/templates/cli/syncers/s3 +27 -0
  129. data/templates/general/links +3 -0
  130. data/templates/general/version.erb +2 -0
  131. data/templates/notifier/mail/failure.erb +16 -0
  132. data/templates/notifier/mail/success.erb +16 -0
  133. data/templates/notifier/mail/warning.erb +16 -0
  134. data/templates/storage/dropbox/authorization_url.erb +6 -0
  135. data/templates/storage/dropbox/authorized.erb +4 -0
  136. data/templates/storage/dropbox/cache_file_written.erb +10 -0
  137. metadata +506 -0
@@ -0,0 +1,66 @@
1
+ module Backup
2
+ module Storage
3
+ class Base
4
+ include Config::Helpers
5
+
6
+ ##
7
+ # Base path on the remote where backup package files will be stored.
8
+ attr_accessor :path
9
+
10
+ ##
11
+ # Number of backups to keep or time until which to keep.
12
+ #
13
+ # If an Integer is given it sets the limit to how many backups to keep in
14
+ # the remote location. If exceeded, the oldest will be removed to make
15
+ # room for the newest.
16
+ #
17
+ # If a Time object is given it will remove backups _older_ than the given
18
+ # date.
19
+ #
20
+ # @!attribute [rw] keep
21
+ # @param [Integer|Time]
22
+ # @return [Integer|Time]
23
+ attr_accessor :keep
24
+
25
+ attr_reader :model, :package, :storage_id
26
+
27
+ ##
28
+ # +storage_id+ is a user-defined string used to uniquely identify
29
+ # multiple storages of the same type. If multiple storages of the same
30
+ # type are added to a single backup model, this identifier must be set.
31
+ # This will be appended to the YAML storage file used for cycling backups.
32
+ def initialize(model, storage_id = nil, &block)
33
+ @model = model
34
+ @package = model.package
35
+ @storage_id = storage_id.to_s.gsub(/\W/, "_") if storage_id
36
+
37
+ load_defaults!
38
+ instance_eval(&block) if block_given?
39
+ end
40
+
41
+ def perform!
42
+ Logger.info "#{storage_name} Started..."
43
+ transfer!
44
+ if respond_to?(:cycle!, true) && (keep.to_i > 0 || keep.is_a?(Time))
45
+ cycle!
46
+ end
47
+ Logger.info "#{storage_name} Finished!"
48
+ end
49
+
50
+ private
51
+
52
+ ##
53
+ # Return the remote path for the current or given package.
54
+ def remote_path(pkg = package)
55
+ path.empty? ? File.join(pkg.trigger, pkg.time) :
56
+ File.join(path, pkg.trigger, pkg.time)
57
+ end
58
+ alias :remote_path_for :remote_path
59
+
60
+ def storage_name
61
+ @storage_name ||= self.class.to_s.sub("Backup::", "") +
62
+ (storage_id ? " (#{storage_id})" : "")
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,156 @@
1
+ require "backup/cloud_io/cloud_files"
2
+
3
+ module Backup
4
+ module Storage
5
+ class CloudFiles < Base
6
+ include Storage::Cycler
7
+ class Error < Backup::Error; end
8
+
9
+ ##
10
+ # Rackspace CloudFiles Credentials
11
+ attr_accessor :username, :api_key
12
+
13
+ ##
14
+ # Rackspace Auth URL (optional)
15
+ attr_accessor :auth_url
16
+
17
+ ##
18
+ # Rackspace Service Net
19
+ # (LAN-based transfers to avoid charges and improve performance)
20
+ attr_accessor :servicenet
21
+
22
+ ##
23
+ # Rackspace Region (optional)
24
+ attr_accessor :region
25
+
26
+ ##
27
+ # Rackspace Container Name
28
+ attr_accessor :container
29
+
30
+ ##
31
+ # Rackspace Container Name for SLO Segments
32
+ # Required if #segment_size is set. Must be different from #container.
33
+ attr_accessor :segments_container
34
+
35
+ ##
36
+ # SLO Segment size, specified in MiB.
37
+ #
38
+ # Each package file larger than +segment_size+
39
+ # will be uploaded as a Static Large Objects (SLO).
40
+ #
41
+ # Defaults to 0 for backward compatibility (pre v.3.7.0),
42
+ # since #segments_container would be required.
43
+ #
44
+ # Minimum: 1 (0 disables SLO support)
45
+ # Maximum: 5120 (5 GiB)
46
+ attr_accessor :segment_size
47
+
48
+ ##
49
+ # If set, all backup package files (including SLO segments) will be
50
+ # scheduled for automatic removal by the server.
51
+ #
52
+ # The `keep` option should not be used if this is set,
53
+ # unless you're transitioning from the `keep` option.
54
+ attr_accessor :days_to_keep
55
+
56
+ ##
57
+ # Number of times to retry failed operations.
58
+ #
59
+ # Default: 10
60
+ attr_accessor :max_retries
61
+
62
+ ##
63
+ # Time in seconds to pause before each retry.
64
+ #
65
+ # Default: 30
66
+ attr_accessor :retry_waitsec
67
+
68
+ ##
69
+ # Additional options to pass along to fog.
70
+ # e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
71
+ attr_accessor :fog_options
72
+
73
+ def initialize(model, storage_id = nil)
74
+ super
75
+
76
+ @servicenet ||= false
77
+ @segment_size ||= 0
78
+ @max_retries ||= 10
79
+ @retry_waitsec ||= 30
80
+
81
+ @path ||= "backups"
82
+ path.sub!(/^\//, "")
83
+
84
+ check_configuration
85
+ end
86
+
87
+ private
88
+
89
+ def cloud_io
90
+ @cloud_io ||= CloudIO::CloudFiles.new(
91
+ username: username,
92
+ api_key: api_key,
93
+ auth_url: auth_url,
94
+ region: region,
95
+ servicenet: servicenet,
96
+ container: container,
97
+ segments_container: segments_container,
98
+ segment_size: segment_size,
99
+ days_to_keep: days_to_keep,
100
+ max_retries: max_retries,
101
+ retry_waitsec: retry_waitsec,
102
+ fog_options: fog_options
103
+ )
104
+ end
105
+
106
+ def transfer!
107
+ package.filenames.each do |filename|
108
+ src = File.join(Config.tmp_path, filename)
109
+ dest = File.join(remote_path, filename)
110
+ Logger.info "Storing '#{container}/#{dest}'..."
111
+ cloud_io.upload(src, dest)
112
+ end
113
+
114
+ package.no_cycle = true if days_to_keep
115
+ end
116
+
117
+ # Called by the Cycler.
118
+ # Any error raised will be logged as a warning.
119
+ def remove!(package)
120
+ Logger.info "Removing backup package dated #{package.time}..."
121
+
122
+ remote_path = remote_path_for(package)
123
+ objects = cloud_io.objects(remote_path)
124
+
125
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
126
+
127
+ slo_objects, objects = objects.partition(&:slo?)
128
+ cloud_io.delete_slo(slo_objects)
129
+ cloud_io.delete(objects)
130
+ end
131
+
132
+ def check_configuration
133
+ required = %w[username api_key container]
134
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
135
+ Configuration Error
136
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
137
+ EOS
138
+
139
+ raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
140
+ Configuration Error
141
+ #segments_container is required if #segment_size is > 0
142
+ EOS
143
+
144
+ raise Error, <<-EOS if container == segments_container
145
+ Configuration Error
146
+ #container and #segments_container must not be the same container.
147
+ EOS
148
+
149
+ raise Error, <<-EOS if segment_size > 5120
150
+ Configuration Error
151
+ #segment_size is too large (max 5120)
152
+ EOS
153
+ end
154
+ end
155
+ end
156
+ end
@@ -0,0 +1,70 @@
1
+ module Backup
2
+ module Storage
3
+ module Cycler
4
+ class Error < Backup::Error; end
5
+
6
+ private
7
+
8
+ # Adds the current package being stored to the YAML cycle data file
9
+ # and will remove any old package file(s) when the storage limit
10
+ # set by #keep is exceeded.
11
+ def cycle!
12
+ Logger.info "Cycling Started..."
13
+
14
+ packages = yaml_load.unshift(package)
15
+ cycled_packages = []
16
+
17
+ if keep.is_a?(Date) || keep.is_a?(Time)
18
+ cycled_packages = packages.select do |p|
19
+ p.time_as_object < keep.to_time
20
+ end
21
+ else
22
+ excess = packages.count - keep.to_i
23
+ cycled_packages = packages.last(excess) if excess > 0
24
+ end
25
+
26
+ saved_packages = packages - cycled_packages
27
+ cycled_packages.each { |package| delete_package package }
28
+
29
+ yaml_save(saved_packages)
30
+ end
31
+
32
+ def delete_package(package)
33
+ remove!(package) unless package.no_cycle
34
+ rescue => err
35
+ Logger.warn Error.wrap(err, <<-EOS)
36
+ There was a problem removing the following package:
37
+ Trigger: #{package.trigger} :: Dated: #{package.time}
38
+ Package included the following #{package.filenames.count} file(s):
39
+ #{package.filenames.join("\n")}
40
+ EOS
41
+ end
42
+
43
+ # Returns path to the YAML data file.
44
+ def yaml_file
45
+ @yaml_file ||= begin
46
+ filename = self.class.to_s.split("::").last
47
+ filename << "-#{storage_id}" if storage_id
48
+ File.join(Config.data_path, package.trigger, "#{filename}.yml")
49
+ end
50
+ end
51
+
52
+ # Returns stored Package objects, sorted by #time descending (oldest last).
53
+ def yaml_load
54
+ if File.exist?(yaml_file) && !File.zero?(yaml_file)
55
+ YAML.load_file(yaml_file).sort_by!(&:time).reverse!
56
+ else
57
+ []
58
+ end
59
+ end
60
+
61
+ # Stores the given package objects to the YAML data file.
62
+ def yaml_save(packages)
63
+ FileUtils.mkdir_p(File.dirname(yaml_file))
64
+ File.open(yaml_file, "w") do |file|
65
+ file.write(packages.to_yaml)
66
+ end
67
+ end
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,206 @@
1
+ require "dropbox_sdk"
2
+
3
+ module Backup
4
+ module Storage
5
+ class Dropbox < Base
6
+ include Storage::Cycler
7
+ class Error < Backup::Error; end
8
+
9
+ ##
10
+ # Dropbox API credentials
11
+ attr_accessor :api_key, :api_secret
12
+
13
+ ##
14
+ # Path to store cached authorized session.
15
+ #
16
+ # Relative paths will be expanded using Config.root_path,
17
+ # which by default is ~/Backup unless --root-path was used
18
+ # on the command line or set in config.rb.
19
+ #
20
+ # By default, +cache_path+ is '.cache', which would be
21
+ # '~/Backup/.cache/' if using the default root_path.
22
+ attr_accessor :cache_path
23
+
24
+ ##
25
+ # Dropbox Access Type
26
+ # Valid values are:
27
+ # :app_folder (default)
28
+ # :dropbox (full access)
29
+ attr_accessor :access_type
30
+
31
+ ##
32
+ # Chunk size, specified in MiB, for the ChunkedUploader.
33
+ attr_accessor :chunk_size
34
+
35
+ ##
36
+ # Number of times to retry failed operations.
37
+ #
38
+ # Default: 10
39
+ attr_accessor :max_retries
40
+
41
+ ##
42
+ # Time in seconds to pause before each retry.
43
+ #
44
+ # Default: 30
45
+ attr_accessor :retry_waitsec
46
+
47
+ ##
48
+ # Creates a new instance of the storage object
49
+ def initialize(model, storage_id = nil)
50
+ super
51
+
52
+ @path ||= "backups"
53
+ @cache_path ||= ".cache"
54
+ @access_type ||= :app_folder
55
+ @chunk_size ||= 4 # MiB
56
+ @max_retries ||= 10
57
+ @retry_waitsec ||= 30
58
+ path.sub!(/^\//, "")
59
+ end
60
+
61
+ private
62
+
63
+ ##
64
+ # The initial connection to Dropbox will provide the user with an
65
+ # authorization url. The user must open this URL and confirm that the
66
+ # authorization successfully took place. If this is the case, then the
67
+ # user hits 'enter' and the session will be properly established.
68
+ # Immediately after establishing the session, the session will be
69
+ # serialized and written to a cache file in +cache_path+.
70
+ # The cached file will be used from that point on to re-establish a
71
+ # connection with Dropbox at a later time. This allows the user to avoid
72
+ # having to go to a new Dropbox URL to authorize over and over again.
73
+ def connection
74
+ return @connection if @connection
75
+
76
+ unless session = cached_session
77
+ Logger.info "Creating a new session!"
78
+ session = create_write_and_return_new_session!
79
+ end
80
+
81
+ # will raise an error if session not authorized
82
+ @connection = DropboxClient.new(session, access_type)
83
+ rescue => err
84
+ raise Error.wrap(err, "Authorization Failed")
85
+ end
86
+
87
+ ##
88
+ # Attempt to load a cached session
89
+ def cached_session
90
+ session = false
91
+ if File.exist?(cached_file)
92
+ begin
93
+ session = DropboxSession.deserialize(File.read(cached_file))
94
+ Logger.info "Session data loaded from cache!"
95
+ rescue => err
96
+ Logger.warn Error.wrap(err, <<-EOS)
97
+ Could not read session data from cache.
98
+ Cache data might be corrupt.
99
+ EOS
100
+ end
101
+ end
102
+ session
103
+ end
104
+
105
+ ##
106
+ # Transfer each of the package files to Dropbox in chunks of +chunk_size+.
107
+ # Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
108
+ # between retries, if errors occur.
109
+ def transfer!
110
+ package.filenames.each do |filename|
111
+ src = File.join(Config.tmp_path, filename)
112
+ dest = File.join(remote_path, filename)
113
+ Logger.info "Storing '#{dest}'..."
114
+
115
+ uploader = nil
116
+ File.open(src, "r") do |file|
117
+ uploader = connection.get_chunked_uploader(file, file.stat.size)
118
+ while uploader.offset < uploader.total_size
119
+ with_retries do
120
+ uploader.upload(1024**2 * chunk_size)
121
+ end
122
+ end
123
+ end
124
+
125
+ with_retries do
126
+ uploader.finish(dest)
127
+ end
128
+ end
129
+ rescue => err
130
+ raise Error.wrap(err, "Upload Failed!")
131
+ end
132
+
133
+ def with_retries
134
+ retries = 0
135
+ begin
136
+ yield
137
+ rescue StandardError => err
138
+ retries += 1
139
+ raise if retries > max_retries
140
+
141
+ Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
142
+ sleep(retry_waitsec)
143
+ retry
144
+ end
145
+ end
146
+
147
+ # Called by the Cycler.
148
+ # Any error raised will be logged as a warning.
149
+ def remove!(package)
150
+ Logger.info "Removing backup package dated #{package.time}..."
151
+
152
+ connection.file_delete(remote_path_for(package))
153
+ end
154
+
155
+ def cached_file
156
+ path = cache_path.start_with?("/") ?
157
+ cache_path : File.join(Config.root_path, cache_path)
158
+ File.join(path, api_key + api_secret)
159
+ end
160
+
161
+ ##
162
+ # Serializes and writes the Dropbox session to a cache file
163
+ def write_cache!(session)
164
+ FileUtils.mkdir_p File.dirname(cached_file)
165
+ File.open(cached_file, "w") do |cache_file|
166
+ cache_file.write(session.serialize)
167
+ end
168
+ end
169
+
170
+ ##
171
+ # Create a new session, write a serialized version of it to the
172
+ # .cache directory, and return the session object
173
+ def create_write_and_return_new_session!
174
+ require "timeout"
175
+
176
+ session = DropboxSession.new(api_key, api_secret)
177
+
178
+ # grab the request token for session
179
+ session.get_request_token
180
+
181
+ template = Backup::Template.new(
182
+ session: session, cached_file: cached_file
183
+ )
184
+ template.render("storage/dropbox/authorization_url.erb")
185
+
186
+ # wait for user to hit 'return' to continue
187
+ Timeout.timeout(180) { STDIN.gets }
188
+
189
+ # this will raise an error if the user did not
190
+ # visit the authorization_url and grant access
191
+ #
192
+ # get the access token from the server
193
+ # this will be stored with the session in the cache file
194
+ session.get_access_token
195
+
196
+ template.render("storage/dropbox/authorized.erb")
197
+ write_cache!(session)
198
+ template.render("storage/dropbox/cache_file_written.erb")
199
+
200
+ session
201
+ rescue => err
202
+ raise Error.wrap(err, "Could not create or authenticate a new session")
203
+ end
204
+ end
205
+ end
206
+ end