backup 4.3.0 → 5.0.0.beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. checksums.yaml +5 -5
  2. data/LICENSE +19 -0
  3. data/README.md +13 -9
  4. data/bin/docker_test +24 -0
  5. data/lib/backup.rb +74 -78
  6. data/lib/backup/archive.rb +31 -32
  7. data/lib/backup/binder.rb +2 -6
  8. data/lib/backup/cleaner.rb +14 -18
  9. data/lib/backup/cli.rb +104 -108
  10. data/lib/backup/cloud_io/base.rb +4 -7
  11. data/lib/backup/cloud_io/cloud_files.rb +60 -62
  12. data/lib/backup/cloud_io/s3.rb +69 -76
  13. data/lib/backup/compressor/base.rb +4 -7
  14. data/lib/backup/compressor/bzip2.rb +3 -7
  15. data/lib/backup/compressor/custom.rb +2 -6
  16. data/lib/backup/compressor/gzip.rb +16 -17
  17. data/lib/backup/config.rb +17 -18
  18. data/lib/backup/config/dsl.rb +16 -17
  19. data/lib/backup/config/helpers.rb +10 -16
  20. data/lib/backup/database/base.rb +22 -21
  21. data/lib/backup/database/mongodb.rb +36 -37
  22. data/lib/backup/database/mysql.rb +40 -41
  23. data/lib/backup/database/openldap.rb +8 -10
  24. data/lib/backup/database/postgresql.rb +29 -30
  25. data/lib/backup/database/redis.rb +27 -30
  26. data/lib/backup/database/riak.rb +15 -18
  27. data/lib/backup/database/sqlite.rb +4 -6
  28. data/lib/backup/encryptor/base.rb +2 -4
  29. data/lib/backup/encryptor/gpg.rb +49 -59
  30. data/lib/backup/encryptor/open_ssl.rb +11 -14
  31. data/lib/backup/errors.rb +7 -12
  32. data/lib/backup/logger.rb +16 -18
  33. data/lib/backup/logger/console.rb +5 -8
  34. data/lib/backup/logger/fog_adapter.rb +2 -6
  35. data/lib/backup/logger/logfile.rb +10 -12
  36. data/lib/backup/logger/syslog.rb +2 -4
  37. data/lib/backup/model.rb +33 -40
  38. data/lib/backup/notifier/base.rb +24 -26
  39. data/lib/backup/notifier/campfire.rb +9 -11
  40. data/lib/backup/notifier/command.rb +3 -3
  41. data/lib/backup/notifier/datadog.rb +9 -12
  42. data/lib/backup/notifier/flowdock.rb +13 -17
  43. data/lib/backup/notifier/hipchat.rb +18 -14
  44. data/lib/backup/notifier/http_post.rb +11 -14
  45. data/lib/backup/notifier/mail.rb +42 -54
  46. data/lib/backup/notifier/nagios.rb +5 -9
  47. data/lib/backup/notifier/pagerduty.rb +10 -12
  48. data/lib/backup/notifier/prowl.rb +15 -15
  49. data/lib/backup/notifier/pushover.rb +7 -10
  50. data/lib/backup/notifier/ses.rb +52 -17
  51. data/lib/backup/notifier/slack.rb +39 -40
  52. data/lib/backup/notifier/twitter.rb +2 -5
  53. data/lib/backup/notifier/zabbix.rb +11 -14
  54. data/lib/backup/package.rb +5 -9
  55. data/lib/backup/packager.rb +16 -17
  56. data/lib/backup/pipeline.rb +17 -21
  57. data/lib/backup/splitter.rb +8 -11
  58. data/lib/backup/storage/base.rb +5 -8
  59. data/lib/backup/storage/cloud_files.rb +21 -23
  60. data/lib/backup/storage/cycler.rb +10 -15
  61. data/lib/backup/storage/dropbox.rb +15 -21
  62. data/lib/backup/storage/ftp.rb +14 -10
  63. data/lib/backup/storage/local.rb +5 -8
  64. data/lib/backup/storage/qiniu.rb +8 -8
  65. data/lib/backup/storage/rsync.rb +24 -26
  66. data/lib/backup/storage/s3.rb +27 -28
  67. data/lib/backup/storage/scp.rb +10 -12
  68. data/lib/backup/storage/sftp.rb +10 -12
  69. data/lib/backup/syncer/base.rb +5 -8
  70. data/lib/backup/syncer/cloud/base.rb +27 -30
  71. data/lib/backup/syncer/cloud/cloud_files.rb +16 -18
  72. data/lib/backup/syncer/cloud/local_file.rb +5 -8
  73. data/lib/backup/syncer/cloud/s3.rb +23 -24
  74. data/lib/backup/syncer/rsync/base.rb +6 -10
  75. data/lib/backup/syncer/rsync/local.rb +1 -5
  76. data/lib/backup/syncer/rsync/pull.rb +6 -10
  77. data/lib/backup/syncer/rsync/push.rb +18 -22
  78. data/lib/backup/template.rb +9 -14
  79. data/lib/backup/utilities.rb +78 -69
  80. data/lib/backup/version.rb +1 -3
  81. metadata +107 -677
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'dropbox_sdk'
1
+ require "dropbox_sdk"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -50,13 +49,13 @@ module Backup
50
49
  def initialize(model, storage_id = nil)
51
50
  super
52
51
 
53
- @path ||= 'backups'
54
- @cache_path ||= '.cache'
52
+ @path ||= "backups"
53
+ @cache_path ||= ".cache"
55
54
  @access_type ||= :app_folder
56
55
  @chunk_size ||= 4 # MiB
57
56
  @max_retries ||= 10
58
57
  @retry_waitsec ||= 30
59
- path.sub!(/^\//, '')
58
+ path.sub!(/^\//, "")
60
59
  end
61
60
 
62
61
  private
@@ -81,9 +80,8 @@ module Backup
81
80
 
82
81
  # will raise an error if session not authorized
83
82
  @connection = DropboxClient.new(session, access_type)
84
-
85
83
  rescue => err
86
- raise Error.wrap(err, 'Authorization Failed')
84
+ raise Error.wrap(err, "Authorization Failed")
87
85
  end
88
86
 
89
87
  ##
@@ -94,7 +92,6 @@ module Backup
94
92
  begin
95
93
  session = DropboxSession.deserialize(File.read(cached_file))
96
94
  Logger.info "Session data loaded from cache!"
97
-
98
95
  rescue => err
99
96
  Logger.warn Error.wrap(err, <<-EOS)
100
97
  Could not read session data from cache.
@@ -113,10 +110,10 @@ module Backup
113
110
  package.filenames.each do |filename|
114
111
  src = File.join(Config.tmp_path, filename)
115
112
  dest = File.join(remote_path, filename)
116
- Logger.info "Storing '#{ dest }'..."
113
+ Logger.info "Storing '#{dest}'..."
117
114
 
118
115
  uploader = nil
119
- File.open(src, 'r') do |file|
116
+ File.open(src, "r") do |file|
120
117
  uploader = connection.get_chunked_uploader(file, file.stat.size)
121
118
  while uploader.offset < uploader.total_size
122
119
  with_retries do
@@ -129,9 +126,8 @@ module Backup
129
126
  uploader.finish(dest)
130
127
  end
131
128
  end
132
-
133
129
  rescue => err
134
- raise Error.wrap(err, 'Upload Failed!')
130
+ raise Error.wrap(err, "Upload Failed!")
135
131
  end
136
132
 
137
133
  def with_retries
@@ -142,7 +138,7 @@ module Backup
142
138
  retries += 1
143
139
  raise if retries > max_retries
144
140
 
145
- Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
141
+ Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
146
142
  sleep(retry_waitsec)
147
143
  retry
148
144
  end
@@ -151,13 +147,13 @@ module Backup
151
147
  # Called by the Cycler.
152
148
  # Any error raised will be logged as a warning.
153
149
  def remove!(package)
154
- Logger.info "Removing backup package dated #{ package.time }..."
150
+ Logger.info "Removing backup package dated #{package.time}..."
155
151
 
156
152
  connection.file_delete(remote_path_for(package))
157
153
  end
158
154
 
159
155
  def cached_file
160
- path = cache_path.start_with?('/') ?
156
+ path = cache_path.start_with?("/") ?
161
157
  cache_path : File.join(Config.root_path, cache_path)
162
158
  File.join(path, api_key + api_secret)
163
159
  end
@@ -175,7 +171,7 @@ module Backup
175
171
  # Create a new session, write a serialized version of it to the
176
172
  # .cache directory, and return the session object
177
173
  def create_write_and_return_new_session!
178
- require 'timeout'
174
+ require "timeout"
179
175
 
180
176
  session = DropboxSession.new(api_key, api_secret)
181
177
 
@@ -183,12 +179,12 @@ module Backup
183
179
  session.get_request_token
184
180
 
185
181
  template = Backup::Template.new(
186
- {:session => session, :cached_file => cached_file}
182
+ session: session, cached_file: cached_file
187
183
  )
188
184
  template.render("storage/dropbox/authorization_url.erb")
189
185
 
190
186
  # wait for user to hit 'return' to continue
191
- Timeout::timeout(180) { STDIN.gets }
187
+ Timeout.timeout(180) { STDIN.gets }
192
188
 
193
189
  # this will raise an error if the user did not
194
190
  # visit the authorization_url and grant access
@@ -202,11 +198,9 @@ module Backup
202
198
  template.render("storage/dropbox/cache_file_written.erb")
203
199
 
204
200
  session
205
-
206
201
  rescue => err
207
- raise Error.wrap(err, 'Could not create or authenticate a new session')
202
+ raise Error.wrap(err, "Could not create or authenticate a new session")
208
203
  end
209
-
210
204
  end
211
205
  end
212
206
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'net/ftp'
1
+ require "net/ftp"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -31,10 +30,10 @@ module Backup
31
30
  super
32
31
 
33
32
  @port ||= 21
34
- @path ||= 'backups'
33
+ @path ||= "backups"
35
34
  @passive_mode ||= false
36
35
  @timeout ||= nil
37
- path.sub!(/^~\//, '')
36
+ path.sub!(/^~\//, "")
38
37
  end
39
38
 
40
39
  private
@@ -51,6 +50,12 @@ module Backup
51
50
  Net::FTP.send(:remove_const, :FTP_PORT)
52
51
  end; Net::FTP.send(:const_set, :FTP_PORT, port)
53
52
 
53
+ # Ensure default passive mode to false.
54
+ # Note: The default passive setting changed between Ruby 2.2 and 2.3
55
+ if Net::FTP.respond_to?(:default_passive=)
56
+ Net::FTP.default_passive = false
57
+ end
58
+
54
59
  Net::FTP.open(ip, username, password) do |ftp|
55
60
  if timeout
56
61
  ftp.open_timeout = timeout
@@ -68,7 +73,7 @@ module Backup
68
73
  package.filenames.each do |filename|
69
74
  src = File.join(Config.tmp_path, filename)
70
75
  dest = File.join(remote_path, filename)
71
- Logger.info "Storing '#{ ip }:#{ dest }'..."
76
+ Logger.info "Storing '#{ip}:#{dest}'..."
72
77
  ftp.put(src, dest)
73
78
  end
74
79
  end
@@ -77,7 +82,7 @@ module Backup
77
82
  # Called by the Cycler.
78
83
  # Any error raised will be logged as a warning.
79
84
  def remove!(package)
80
- Logger.info "Removing backup package dated #{ package.time }..."
85
+ Logger.info "Removing backup package dated #{package.time}..."
81
86
 
82
87
  remote_path = remote_path_for(package)
83
88
  connection do |ftp|
@@ -98,15 +103,14 @@ module Backup
98
103
  # Net::FTP raises an exception when the directory it's trying to create
99
104
  # already exists, so we have rescue it
100
105
  def create_remote_path(ftp)
101
- path_parts = Array.new
102
- remote_path.split('/').each do |path_part|
106
+ path_parts = []
107
+ remote_path.split("/").each do |path_part|
103
108
  path_parts << path_part
104
109
  begin
105
- ftp.mkdir(path_parts.join('/'))
110
+ ftp.mkdir(path_parts.join("/"))
106
111
  rescue Net::FTPPermError; end
107
112
  end
108
113
  end
109
-
110
114
  end
111
115
  end
112
116
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Storage
5
3
  class Local < Base
@@ -9,7 +7,7 @@ module Backup
9
7
  def initialize(model, storage_id = nil)
10
8
  super
11
9
 
12
- @path ||= '~/backups'
10
+ @path ||= "~/backups"
13
11
  end
14
12
 
15
13
  private
@@ -21,7 +19,7 @@ module Backup
21
19
  package.filenames.each do |filename|
22
20
  src = File.join(Config.tmp_path, filename)
23
21
  dest = File.join(remote_path, filename)
24
- Logger.info "Storing '#{ dest }'..."
22
+ Logger.info "Storing '#{dest}'..."
25
23
 
26
24
  FileUtils.send(transfer_method, src, dest)
27
25
  end
@@ -30,7 +28,7 @@ module Backup
30
28
  # Called by the Cycler.
31
29
  # Any error raised will be logged as a warning.
32
30
  def remove!(package)
33
- Logger.info "Removing backup package dated #{ package.time }..."
31
+ Logger.info "Removing backup package dated #{package.time}..."
34
32
 
35
33
  FileUtils.rm_r(remote_path_for(package))
36
34
  end
@@ -50,15 +48,14 @@ module Backup
50
48
  else
51
49
  Logger.warn Error.new(<<-EOS)
52
50
  Local File Copy Warning!
53
- The final backup file(s) for '#{ model.label }' (#{ model.trigger })
54
- will be *copied* to '#{ remote_path }'
51
+ The final backup file(s) for '#{model.label}' (#{model.trigger})
52
+ will be *copied* to '#{remote_path}'
55
53
  To avoid this, when using more than one Storage, the 'Local' Storage
56
54
  should be added *last* so the files may be *moved* to their destination.
57
55
  EOS
58
56
  false
59
57
  end
60
58
  end
61
-
62
59
  end
63
60
  end
64
61
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'qiniu'
1
+ require "qiniu"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -18,18 +17,19 @@ module Backup
18
17
  def initialize(model, storage_id = nil)
19
18
  super
20
19
 
21
- @path ||= 'backups'
20
+ @path ||= "backups"
22
21
 
23
22
  check_configuration
24
23
  config_credentials
25
24
  end
26
25
 
27
26
  private
27
+
28
28
  def transfer!
29
29
  package.filenames.each do |filename|
30
30
  src = File.join(Config.tmp_path, filename)
31
31
  dest = File.join(remote_path, filename)
32
- Logger.info "Storing '#{ dest }'..."
32
+ Logger.info "Storing '#{dest}'..."
33
33
 
34
34
  ::Qiniu.upload_file(uptoken: ::Qiniu.generate_upload_token,
35
35
  bucket: bucket,
@@ -41,7 +41,7 @@ module Backup
41
41
  # Called by the Cycler.
42
42
  # Any error raised will be logged as a warning.
43
43
  def remove!(package)
44
- Logger.info "Removing backup package dated #{ package.time }..."
44
+ Logger.info "Removing backup package dated #{package.time}..."
45
45
  remote_path = remote_path_for(package)
46
46
  package.filenames.each do |filename|
47
47
  ::Qiniu.delete(bucket, File.join(remote_path, filename))
@@ -49,11 +49,11 @@ module Backup
49
49
  end
50
50
 
51
51
  def check_configuration
52
- required = %w{ access_key secret_key bucket }
52
+ required = %w[access_key secret_key bucket]
53
53
 
54
- raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
54
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
55
55
  Configuration Error
56
- #{ required.map {|name| "##{ name }"}.join(', ') } are all required
56
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
57
57
  EOS
58
58
  end
59
59
 
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Storage
5
3
  class RSync < Base
@@ -133,7 +131,7 @@ module Backup
133
131
  @mode ||= :ssh
134
132
  @port ||= mode == :rsync_daemon ? 873 : 22
135
133
  @compress ||= false
136
- @path ||= '~/backups'
134
+ @path ||= "~/backups"
137
135
  end
138
136
 
139
137
  private
@@ -143,10 +141,10 @@ module Backup
143
141
  create_remote_path
144
142
 
145
143
  package.filenames.each do |filename|
146
- src = "'#{ File.join(Config.tmp_path, filename) }'"
147
- dest = "#{ host_options }'#{ File.join(remote_path, filename) }'"
148
- Logger.info "Syncing to #{ dest }..."
149
- run("#{ rsync_command } #{ src } #{ dest }")
144
+ src = "'#{File.join(Config.tmp_path, filename)}'"
145
+ dest = "#{host_options}'#{File.join(remote_path, filename)}'"
146
+ Logger.info "Syncing to #{dest}..."
147
+ run("#{rsync_command} #{src} #{dest}")
150
148
  end
151
149
  ensure
152
150
  remove_password_file
@@ -159,7 +157,7 @@ module Backup
159
157
  def remote_path
160
158
  @remote_path ||= begin
161
159
  if host
162
- path.sub(/^~\//, '').sub(/\/$/, '')
160
+ path.sub(/^~\//, "").sub(/\/$/, "")
163
161
  else
164
162
  File.expand_path(path)
165
163
  end
@@ -176,8 +174,9 @@ module Backup
176
174
  # module name that must define a path on the remote that already exists.
177
175
  def create_remote_path
178
176
  if host
179
- run("#{ utility(:ssh) } #{ ssh_transport_args } #{ host } " +
180
- %Q["mkdir -p '#{ remote_path }'"]) if mode == :ssh
177
+ return unless mode == :ssh
178
+ run "#{utility(:ssh)} #{ssh_transport_args} #{host} " +
179
+ %("mkdir -p '#{remote_path}'")
181
180
  else
182
181
  FileUtils.mkdir_p(remote_path)
183
182
  end
@@ -186,55 +185,55 @@ module Backup
186
185
  def host_options
187
186
  @host_options ||= begin
188
187
  if !host
189
- ''
188
+ ""
190
189
  elsif mode == :ssh
191
- "#{ host }:"
190
+ "#{host}:"
192
191
  else
193
- user = "#{ rsync_user }@" if rsync_user
194
- "#{ user }#{ host }::"
192
+ user = "#{rsync_user}@" if rsync_user
193
+ "#{user}#{host}::"
195
194
  end
196
195
  end
197
196
  end
198
197
 
199
198
  def rsync_command
200
199
  @rsync_command ||= begin
201
- cmd = utility(:rsync) << ' --archive' <<
202
- " #{ Array(additional_rsync_options).join(' ') }".rstrip
200
+ cmd = utility(:rsync) << " --archive" <<
201
+ " #{Array(additional_rsync_options).join(" ")}".rstrip
203
202
  cmd << compress_option << password_option << transport_options if host
204
203
  cmd
205
204
  end
206
205
  end
207
206
 
208
207
  def compress_option
209
- compress ? ' --compress' : ''
208
+ compress ? " --compress" : ""
210
209
  end
211
210
 
212
211
  def password_option
213
- return '' if mode == :ssh
212
+ return "" if mode == :ssh
214
213
 
215
214
  path = @password_file ? @password_file.path : rsync_password_file
216
- path ? " --password-file='#{ File.expand_path(path) }'" : ''
215
+ path ? " --password-file='#{File.expand_path(path)}'" : ""
217
216
  end
218
217
 
219
218
  def transport_options
220
219
  if mode == :rsync_daemon
221
- " --port #{ port }"
220
+ " --port #{port}"
222
221
  else
223
- %Q[ -e "#{ utility(:ssh) } #{ ssh_transport_args }"]
222
+ %( -e "#{utility(:ssh)} #{ssh_transport_args}")
224
223
  end
225
224
  end
226
225
 
227
226
  def ssh_transport_args
228
- args = "-p #{ port } "
229
- args << "-l #{ ssh_user } " if ssh_user
230
- args << Array(additional_ssh_options).join(' ')
227
+ args = "-p #{port} "
228
+ args << "-l #{ssh_user} " if ssh_user
229
+ args << Array(additional_ssh_options).join(" ")
231
230
  args.rstrip
232
231
  end
233
232
 
234
233
  def write_password_file
235
234
  return unless host && rsync_password && mode != :ssh
236
235
 
237
- @password_file = Tempfile.new('backup-rsync-password')
236
+ @password_file = Tempfile.new("backup-rsync-password")
238
237
  @password_file.write(rsync_password)
239
238
  @password_file.close
240
239
  end
@@ -242,7 +241,6 @@ module Backup
242
241
  def remove_password_file
243
242
  @password_file.delete if @password_file
244
243
  end
245
-
246
244
  end
247
245
  end
248
246
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'backup/cloud_io/s3'
1
+ require "backup/cloud_io/s3"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -75,10 +74,10 @@ module Backup
75
74
  @chunk_size ||= 5 # MiB
76
75
  @max_retries ||= 10
77
76
  @retry_waitsec ||= 30
78
- @path ||= 'backups'
77
+ @path ||= "backups"
79
78
  @storage_class ||= :standard
80
79
 
81
- @path = @path.sub(/^\//, '')
80
+ @path = @path.sub(/^\//, "")
82
81
 
83
82
  check_configuration
84
83
  end
@@ -87,17 +86,17 @@ module Backup
87
86
 
88
87
  def cloud_io
89
88
  @cloud_io ||= CloudIO::S3.new(
90
- :access_key_id => access_key_id,
91
- :secret_access_key => secret_access_key,
92
- :use_iam_profile => use_iam_profile,
93
- :region => region,
94
- :bucket => bucket,
95
- :encryption => encryption,
96
- :storage_class => storage_class,
97
- :max_retries => max_retries,
98
- :retry_waitsec => retry_waitsec,
99
- :chunk_size => chunk_size,
100
- :fog_options => fog_options
89
+ access_key_id: access_key_id,
90
+ secret_access_key: secret_access_key,
91
+ use_iam_profile: use_iam_profile,
92
+ region: region,
93
+ bucket: bucket,
94
+ encryption: encryption,
95
+ storage_class: storage_class,
96
+ max_retries: max_retries,
97
+ retry_waitsec: retry_waitsec,
98
+ chunk_size: chunk_size,
99
+ fog_options: fog_options
101
100
  )
102
101
  end
103
102
 
@@ -105,7 +104,7 @@ module Backup
105
104
  package.filenames.each do |filename|
106
105
  src = File.join(Config.tmp_path, filename)
107
106
  dest = File.join(remote_path, filename)
108
- Logger.info "Storing '#{ bucket }/#{ dest }'..."
107
+ Logger.info "Storing '#{bucket}/#{dest}'..."
109
108
  cloud_io.upload(src, dest)
110
109
  end
111
110
  end
@@ -113,25 +112,26 @@ module Backup
113
112
  # Called by the Cycler.
114
113
  # Any error raised will be logged as a warning.
115
114
  def remove!(package)
116
- Logger.info "Removing backup package dated #{ package.time }..."
115
+ Logger.info "Removing backup package dated #{package.time}..."
117
116
 
118
117
  remote_path = remote_path_for(package)
119
118
  objects = cloud_io.objects(remote_path)
120
119
 
121
- raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
120
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
122
121
 
123
122
  cloud_io.delete(objects)
124
123
  end
125
124
 
126
125
  def check_configuration
127
- if use_iam_profile
128
- required = %w{ bucket }
129
- else
130
- required = %w{ access_key_id secret_access_key bucket }
131
- end
132
- raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
126
+ required =
127
+ if use_iam_profile
128
+ %w[bucket]
129
+ else
130
+ %w[access_key_id secret_access_key bucket]
131
+ end
132
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
133
133
  Configuration Error
134
- #{ required.map {|name| "##{ name }"}.join(', ') } are all required
134
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
135
135
  EOS
136
136
 
137
137
  raise Error, <<-EOS if chunk_size > 0 && !chunk_size.between?(5, 5120)
@@ -139,18 +139,17 @@ module Backup
139
139
  #chunk_size must be between 5 and 5120 (or 0 to disable multipart)
140
140
  EOS
141
141
 
142
- raise Error, <<-EOS if encryption && encryption.to_s.upcase != 'AES256'
142
+ raise Error, <<-EOS if encryption && encryption.to_s.upcase != "AES256"
143
143
  Configuration Error
144
144
  #encryption must be :aes256 or nil
145
145
  EOS
146
146
 
147
- classes = ['STANDARD', 'STANDARD_IA', 'REDUCED_REDUNDANCY']
147
+ classes = ["STANDARD", "STANDARD_IA", "REDUCED_REDUNDANCY"]
148
148
  raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
149
149
  Configuration Error
150
150
  #storage_class must be :standard or :standard_ia or :reduced_redundancy
151
151
  EOS
152
152
  end
153
-
154
153
  end
155
154
  end
156
155
  end