backup 4.4.1 → 5.0.0.beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/LICENSE +19 -0
- data/README.md +1 -1
- data/lib/backup.rb +74 -78
- data/lib/backup/archive.rb +31 -32
- data/lib/backup/binder.rb +2 -6
- data/lib/backup/cleaner.rb +14 -18
- data/lib/backup/cli.rb +104 -108
- data/lib/backup/cloud_io/base.rb +4 -7
- data/lib/backup/cloud_io/cloud_files.rb +60 -62
- data/lib/backup/cloud_io/s3.rb +69 -76
- data/lib/backup/compressor/base.rb +4 -7
- data/lib/backup/compressor/bzip2.rb +3 -7
- data/lib/backup/compressor/custom.rb +2 -6
- data/lib/backup/compressor/gzip.rb +16 -17
- data/lib/backup/config.rb +17 -18
- data/lib/backup/config/dsl.rb +16 -17
- data/lib/backup/config/helpers.rb +10 -16
- data/lib/backup/database/base.rb +22 -21
- data/lib/backup/database/mongodb.rb +36 -37
- data/lib/backup/database/mysql.rb +40 -41
- data/lib/backup/database/openldap.rb +8 -10
- data/lib/backup/database/postgresql.rb +29 -30
- data/lib/backup/database/redis.rb +27 -30
- data/lib/backup/database/riak.rb +15 -18
- data/lib/backup/database/sqlite.rb +4 -6
- data/lib/backup/encryptor/base.rb +2 -4
- data/lib/backup/encryptor/gpg.rb +49 -59
- data/lib/backup/encryptor/open_ssl.rb +11 -14
- data/lib/backup/errors.rb +7 -12
- data/lib/backup/logger.rb +16 -18
- data/lib/backup/logger/console.rb +5 -8
- data/lib/backup/logger/fog_adapter.rb +2 -6
- data/lib/backup/logger/logfile.rb +10 -12
- data/lib/backup/logger/syslog.rb +2 -4
- data/lib/backup/model.rb +75 -40
- data/lib/backup/notifier/base.rb +24 -26
- data/lib/backup/notifier/campfire.rb +9 -11
- data/lib/backup/notifier/command.rb +0 -3
- data/lib/backup/notifier/datadog.rb +9 -12
- data/lib/backup/notifier/flowdock.rb +13 -17
- data/lib/backup/notifier/hipchat.rb +11 -13
- data/lib/backup/notifier/http_post.rb +11 -14
- data/lib/backup/notifier/mail.rb +44 -47
- data/lib/backup/notifier/nagios.rb +5 -9
- data/lib/backup/notifier/pagerduty.rb +10 -12
- data/lib/backup/notifier/prowl.rb +15 -15
- data/lib/backup/notifier/pushover.rb +7 -10
- data/lib/backup/notifier/ses.rb +34 -16
- data/lib/backup/notifier/slack.rb +39 -40
- data/lib/backup/notifier/twitter.rb +2 -5
- data/lib/backup/notifier/zabbix.rb +11 -14
- data/lib/backup/package.rb +5 -9
- data/lib/backup/packager.rb +16 -17
- data/lib/backup/pipeline.rb +17 -21
- data/lib/backup/splitter.rb +8 -11
- data/lib/backup/storage/base.rb +5 -8
- data/lib/backup/storage/cloud_files.rb +21 -23
- data/lib/backup/storage/cycler.rb +10 -15
- data/lib/backup/storage/dropbox.rb +15 -21
- data/lib/backup/storage/ftp.rb +8 -10
- data/lib/backup/storage/local.rb +5 -8
- data/lib/backup/storage/qiniu.rb +8 -8
- data/lib/backup/storage/rsync.rb +24 -26
- data/lib/backup/storage/s3.rb +27 -28
- data/lib/backup/storage/scp.rb +10 -12
- data/lib/backup/storage/sftp.rb +10 -12
- data/lib/backup/syncer/base.rb +5 -8
- data/lib/backup/syncer/cloud/base.rb +27 -30
- data/lib/backup/syncer/cloud/cloud_files.rb +16 -18
- data/lib/backup/syncer/cloud/local_file.rb +5 -8
- data/lib/backup/syncer/cloud/s3.rb +23 -24
- data/lib/backup/syncer/rsync/base.rb +6 -10
- data/lib/backup/syncer/rsync/local.rb +1 -5
- data/lib/backup/syncer/rsync/pull.rb +6 -10
- data/lib/backup/syncer/rsync/push.rb +18 -22
- data/lib/backup/template.rb +9 -14
- data/lib/backup/utilities.rb +82 -69
- data/lib/backup/version.rb +1 -3
- metadata +100 -660
data/lib/backup/cli.rb
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
##
|
4
2
|
# Build the Backup Command Line Interface using Thor
|
5
3
|
module Backup
|
@@ -27,9 +25,9 @@ module Backup
|
|
27
25
|
#
|
28
26
|
# If the --check option is given, `backup check` will be run
|
29
27
|
# and no triggers will be performed.
|
30
|
-
desc
|
28
|
+
desc "perform", "Performs the backup for the specified trigger(s)."
|
31
29
|
|
32
|
-
long_desc <<-EOS.gsub(/^ +/,
|
30
|
+
long_desc <<-EOS.gsub(/^ +/, "")
|
33
31
|
Performs the backup for the specified trigger(s).
|
34
32
|
|
35
33
|
You may perform multiple backups by providing multiple triggers,
|
@@ -51,65 +49,65 @@ module Backup
|
|
51
49
|
EOS
|
52
50
|
|
53
51
|
method_option :trigger,
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
52
|
+
aliases: ["-t", "--triggers"],
|
53
|
+
required: true,
|
54
|
+
type: :string,
|
55
|
+
desc: "Triggers to perform. e.g. 'trigger_a,trigger_b'"
|
58
56
|
|
59
57
|
method_option :config_file,
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
58
|
+
aliases: "-c",
|
59
|
+
type: :string,
|
60
|
+
default: "",
|
61
|
+
desc: "Path to your config.rb file."
|
64
62
|
|
65
63
|
method_option :root_path,
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
64
|
+
aliases: "-r",
|
65
|
+
type: :string,
|
66
|
+
default: "",
|
67
|
+
desc: "Root path to base all relative path on."
|
70
68
|
|
71
69
|
method_option :data_path,
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
70
|
+
aliases: "-d",
|
71
|
+
type: :string,
|
72
|
+
default: "",
|
73
|
+
desc: "Path to store storage cycling data."
|
76
74
|
|
77
75
|
method_option :log_path,
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
76
|
+
aliases: "-l",
|
77
|
+
type: :string,
|
78
|
+
default: "",
|
79
|
+
desc: "Path to store Backup's log file."
|
82
80
|
|
83
81
|
method_option :tmp_path,
|
84
|
-
|
85
|
-
|
86
|
-
|
82
|
+
type: :string,
|
83
|
+
default: "",
|
84
|
+
desc: "Path to store temporary data during the backup."
|
87
85
|
|
88
86
|
# Note that :quiet, :syslog and :logfile are specified as :string types,
|
89
87
|
# so the --no-<option> usage will set the value to nil instead of false.
|
90
88
|
method_option :quiet,
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
89
|
+
aliases: "-q",
|
90
|
+
type: :boolean,
|
91
|
+
default: false,
|
92
|
+
banner: "",
|
93
|
+
desc: "Disable console log output."
|
96
94
|
|
97
95
|
method_option :syslog,
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
96
|
+
type: :boolean,
|
97
|
+
default: false,
|
98
|
+
banner: "",
|
99
|
+
desc: "Enable logging to syslog."
|
102
100
|
|
103
101
|
method_option :logfile,
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
102
|
+
type: :boolean,
|
103
|
+
default: true,
|
104
|
+
banner: "",
|
105
|
+
desc: "Enable Backup's log file."
|
108
106
|
|
109
107
|
method_option :check,
|
110
|
-
|
111
|
-
|
112
|
-
|
108
|
+
type: :boolean,
|
109
|
+
default: false,
|
110
|
+
desc: "Check configuration for errors or warnings."
|
113
111
|
|
114
112
|
def perform
|
115
113
|
check if options[:check] # this will exit()
|
@@ -119,27 +117,28 @@ module Backup
|
|
119
117
|
# Set logger options
|
120
118
|
opts = options
|
121
119
|
Logger.configure do
|
122
|
-
console.quiet
|
123
|
-
logfile.enabled
|
124
|
-
logfile.log_path
|
125
|
-
syslog.enabled
|
120
|
+
console.quiet = opts[:quiet]
|
121
|
+
logfile.enabled = opts[:logfile]
|
122
|
+
logfile.log_path = opts[:log_path]
|
123
|
+
syslog.enabled = opts[:syslog]
|
126
124
|
end
|
127
125
|
|
128
126
|
# Load the user's +config.rb+ file and all their Models
|
129
127
|
Config.load(options)
|
130
128
|
|
131
129
|
# Identify all Models to be run for the given +triggers+.
|
132
|
-
triggers = options[:trigger].split(
|
133
|
-
models = triggers.
|
130
|
+
triggers = options[:trigger].split(",").map(&:strip)
|
131
|
+
models = triggers.uniq.flat_map do |trigger|
|
134
132
|
Model.find_by_trigger(trigger)
|
135
|
-
|
133
|
+
end
|
136
134
|
|
137
|
-
|
138
|
-
|
135
|
+
if models.empty?
|
136
|
+
raise Error, "No Models found for trigger(s) " \
|
137
|
+
"'#{triggers.join(",")}'."
|
138
|
+
end
|
139
139
|
|
140
140
|
# Finalize Logger and begin real-time logging.
|
141
141
|
Logger.start!
|
142
|
-
|
143
142
|
rescue Exception => err
|
144
143
|
Logger.error Error.wrap(err)
|
145
144
|
unless Helpers.is_backup_error? err
|
@@ -148,7 +147,7 @@ module Backup
|
|
148
147
|
# Logger configuration will be ignored
|
149
148
|
# and messages will be output to the console only.
|
150
149
|
Logger.abort!
|
151
|
-
exit
|
150
|
+
exit 3
|
152
151
|
end
|
153
152
|
|
154
153
|
until models.empty?
|
@@ -164,7 +163,7 @@ module Backup
|
|
164
163
|
Logger.info Error.new(<<-EOS)
|
165
164
|
Backup will now continue...
|
166
165
|
The following triggers will now be processed:
|
167
|
-
(#{
|
166
|
+
(#{models.map(&:trigger).join(", ")})
|
168
167
|
EOS
|
169
168
|
end
|
170
169
|
when 3
|
@@ -173,7 +172,7 @@ module Backup
|
|
173
172
|
Logger.error FatalError.new(<<-EOS)
|
174
173
|
Backup will now exit.
|
175
174
|
The following triggers will not be processed:
|
176
|
-
(#{
|
175
|
+
(#{models.map(&:trigger).join(", ")})
|
177
176
|
EOS
|
178
177
|
end
|
179
178
|
end
|
@@ -203,19 +202,19 @@ module Backup
|
|
203
202
|
#
|
204
203
|
# If successful, this method with exit(0).
|
205
204
|
# If there are Errors or Warnings, it will exit(1).
|
206
|
-
desc
|
205
|
+
desc "check", "Check for configuration errors or warnings"
|
207
206
|
|
208
|
-
long_desc <<-EOS.gsub(/^ +/,
|
207
|
+
long_desc <<-EOS.gsub(/^ +/, "")
|
209
208
|
Loads your 'config.rb' file and all models and reports any
|
210
209
|
errors or warnings with your configuration, including missing
|
211
210
|
dependencies and the use of any deprecated settings.
|
212
211
|
EOS
|
213
212
|
|
214
213
|
method_option :config_file,
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
214
|
+
aliases: "-c",
|
215
|
+
type: :string,
|
216
|
+
default: "",
|
217
|
+
desc: "Path to your config.rb file."
|
219
218
|
|
220
219
|
def check
|
221
220
|
begin
|
@@ -228,10 +227,10 @@ module Backup
|
|
228
227
|
end
|
229
228
|
|
230
229
|
if Logger.has_warnings? || Logger.has_errors?
|
231
|
-
Logger.error
|
230
|
+
Logger.error "Configuration Check Failed."
|
232
231
|
exit_code = 1
|
233
232
|
else
|
234
|
-
Logger.info
|
233
|
+
Logger.info "Configuration Check Succeeded."
|
235
234
|
exit_code = 0
|
236
235
|
end
|
237
236
|
|
@@ -245,130 +244,127 @@ module Backup
|
|
245
244
|
# For example:
|
246
245
|
# $ backup generate:model --trigger my_backup --databases='mongodb'
|
247
246
|
# will generate a pre-populated model with a base MongoDB setup
|
248
|
-
desc
|
247
|
+
desc "generate:model", "Generates a Backup model file."
|
249
248
|
|
250
|
-
long_desc <<-EOS.gsub(/^ +/,
|
249
|
+
long_desc <<-EOS.gsub(/^ +/, "")
|
251
250
|
Generates a Backup model file.
|
252
251
|
|
253
252
|
If your configuration file is not in the default location at
|
254
|
-
#{
|
253
|
+
#{Config.config_file}
|
255
254
|
you must specify it's location using '--config-file'.
|
256
255
|
If no configuration file exists at this location, one will be created.
|
257
256
|
|
258
257
|
The model file will be created as '<config_path>/models/<trigger>.rb'
|
259
258
|
Your model file will be created in a 'models/' sub-directory
|
260
259
|
where your config file is located. The default location would be:
|
261
|
-
#{
|
260
|
+
#{Config.root_path}/models/<trigger>.rb
|
262
261
|
EOS
|
263
262
|
|
264
263
|
method_option :trigger,
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
264
|
+
aliases: "-t",
|
265
|
+
required: true,
|
266
|
+
type: :string,
|
267
|
+
desc: "Trigger name for the Backup model"
|
269
268
|
|
270
269
|
method_option :config_file,
|
271
|
-
|
272
|
-
|
270
|
+
type: :string,
|
271
|
+
desc: "Path to your Backup configuration file"
|
273
272
|
|
274
273
|
# options with their available values
|
275
|
-
%w
|
276
|
-
path = File.join(Backup::TEMPLATE_PATH,
|
277
|
-
opts = Dir[path +
|
278
|
-
method_option name, :
|
274
|
+
%w[databases storages syncers encryptor compressor notifiers].each do |name|
|
275
|
+
path = File.join(Backup::TEMPLATE_PATH, "cli", name)
|
276
|
+
opts = Dir[path + "/*"].sort.map { |p| File.basename(p) }.join(", ")
|
277
|
+
method_option name, type: :string, desc: "(#{opts})"
|
279
278
|
end
|
280
279
|
|
281
280
|
method_option :archives,
|
282
|
-
|
283
|
-
|
281
|
+
type: :boolean,
|
282
|
+
desc: "Model will include tar archives."
|
284
283
|
|
285
284
|
method_option :splitter,
|
286
|
-
|
287
|
-
|
288
|
-
|
285
|
+
type: :boolean,
|
286
|
+
default: false,
|
287
|
+
desc: "Add Splitter to the model"
|
289
288
|
|
290
|
-
define_method
|
291
|
-
opts = options.merge(:
|
289
|
+
define_method "generate:model" do
|
290
|
+
opts = options.merge(trigger: options[:trigger].gsub(/\W/, "_"))
|
292
291
|
config_file = opts[:config_file] ?
|
293
292
|
File.expand_path(opts.delete(:config_file)) : Config.config_file
|
294
|
-
models_path = File.join(File.dirname(config_file),
|
295
|
-
model_file = File.join(models_path, "#{
|
293
|
+
models_path = File.join(File.dirname(config_file), "models")
|
294
|
+
model_file = File.join(models_path, "#{opts[:trigger]}.rb")
|
296
295
|
|
297
296
|
unless File.exist?(config_file)
|
298
|
-
invoke
|
297
|
+
invoke "generate:config", [], config_file: config_file
|
299
298
|
end
|
300
299
|
|
301
300
|
FileUtils.mkdir_p(models_path)
|
302
301
|
if Helpers.overwrite?(model_file)
|
303
|
-
File.open(model_file,
|
304
|
-
file.write(Backup::Template.new(
|
302
|
+
File.open(model_file, "w") do |file|
|
303
|
+
file.write(Backup::Template.new(options: opts).result("cli/model"))
|
305
304
|
end
|
306
|
-
puts "Generated model file: '#{
|
305
|
+
puts "Generated model file: '#{model_file}'."
|
307
306
|
end
|
308
307
|
end
|
309
308
|
|
310
309
|
##
|
311
310
|
# [Generate:Config]
|
312
311
|
# Generates the main configuration file
|
313
|
-
desc
|
312
|
+
desc "generate:config", "Generates the main Backup configuration file"
|
314
313
|
|
315
|
-
long_desc <<-EOS.gsub(/^ +/,
|
314
|
+
long_desc <<-EOS.gsub(/^ +/, "")
|
316
315
|
Path to the Backup configuration file to generate.
|
317
316
|
|
318
317
|
Defaults to:
|
319
318
|
|
320
|
-
#{
|
319
|
+
#{Config.config_file}
|
321
320
|
EOS
|
322
321
|
|
323
322
|
method_option :config_file,
|
324
|
-
|
325
|
-
|
323
|
+
type: :string,
|
324
|
+
desc: "Path to the Backup configuration file to generate."
|
326
325
|
|
327
|
-
define_method
|
326
|
+
define_method "generate:config" do
|
328
327
|
config_file = options[:config_file] ?
|
329
328
|
File.expand_path(options[:config_file]) : Config.config_file
|
330
329
|
|
331
330
|
FileUtils.mkdir_p(File.dirname(config_file))
|
332
331
|
if Helpers.overwrite?(config_file)
|
333
|
-
File.open(config_file,
|
334
|
-
file.write(Backup::Template.new.result(
|
332
|
+
File.open(config_file, "w") do |file|
|
333
|
+
file.write(Backup::Template.new.result("cli/config"))
|
335
334
|
end
|
336
|
-
puts "Generated configuration file: '#{
|
335
|
+
puts "Generated configuration file: '#{config_file}'."
|
337
336
|
end
|
338
337
|
end
|
339
338
|
|
340
339
|
##
|
341
340
|
# [Version]
|
342
341
|
# Returns the current version of the Backup gem
|
343
|
-
map
|
344
|
-
desc
|
342
|
+
map "-v" => :version
|
343
|
+
desc "version", "Display installed Backup version"
|
345
344
|
def version
|
346
|
-
puts "Backup #{
|
345
|
+
puts "Backup #{Backup::VERSION}"
|
347
346
|
end
|
348
347
|
|
349
348
|
# This is to avoid Thor's warnings when stubbing methods on the Thor class.
|
350
349
|
module Helpers
|
351
350
|
class << self
|
352
|
-
|
353
351
|
def overwrite?(path)
|
354
352
|
return true unless File.exist?(path)
|
355
353
|
|
356
|
-
$stderr.print "A file already exists at '#{
|
354
|
+
$stderr.print "A file already exists at '#{path}'.\n" \
|
357
355
|
"Do you want to overwrite? [y/n] "
|
358
356
|
/^[Yy]/ =~ $stdin.gets
|
359
357
|
end
|
360
358
|
|
361
359
|
def exec!(cmd)
|
362
|
-
puts "Launching: #{
|
360
|
+
puts "Launching: #{cmd}"
|
363
361
|
exec(cmd)
|
364
362
|
end
|
365
363
|
|
366
364
|
def is_backup_error?(error)
|
367
365
|
error.class.ancestors.include? Backup::Error
|
368
366
|
end
|
369
|
-
|
370
367
|
end
|
371
368
|
end
|
372
|
-
|
373
369
|
end
|
374
370
|
end
|
data/lib/backup/cloud_io/base.rb
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
module Backup
|
4
2
|
module CloudIO
|
5
3
|
class Error < Backup::Error; end
|
@@ -22,20 +20,19 @@ module Backup
|
|
22
20
|
rescue => err
|
23
21
|
retries += 1
|
24
22
|
raise Error.wrap(err, <<-EOS) if retries > max_retries
|
25
|
-
Max Retries (#{
|
26
|
-
Operation: #{
|
23
|
+
Max Retries (#{max_retries}) Exceeded!
|
24
|
+
Operation: #{operation}
|
27
25
|
Be sure to check the log messages for each retry attempt.
|
28
26
|
EOS
|
29
27
|
|
30
28
|
Logger.info Error.wrap(err, <<-EOS)
|
31
|
-
Retry ##{
|
32
|
-
Operation: #{
|
29
|
+
Retry ##{retries} of #{max_retries}
|
30
|
+
Operation: #{operation}
|
33
31
|
EOS
|
34
32
|
sleep(retry_waitsec)
|
35
33
|
retry
|
36
34
|
end
|
37
35
|
end
|
38
|
-
|
39
36
|
end
|
40
37
|
end
|
41
38
|
end
|
@@ -1,7 +1,6 @@
|
|
1
|
-
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require 'digest/md5'
|
1
|
+
require "backup/cloud_io/base"
|
2
|
+
require "fog"
|
3
|
+
require "digest/md5"
|
5
4
|
|
6
5
|
module Backup
|
7
6
|
module CloudIO
|
@@ -13,8 +12,8 @@ module Backup
|
|
13
12
|
SEGMENT_BUFFER = 1024**2 # 1 MiB
|
14
13
|
|
15
14
|
attr_reader :username, :api_key, :auth_url, :region, :servicenet,
|
16
|
-
|
17
|
-
|
15
|
+
:container, :segments_container, :segment_size, :days_to_keep,
|
16
|
+
:fog_options
|
18
17
|
|
19
18
|
def initialize(options = {})
|
20
19
|
super
|
@@ -41,9 +40,9 @@ module Backup
|
|
41
40
|
if segment_bytes > 0 && file_size > segment_bytes
|
42
41
|
raise FileSizeError, <<-EOS if file_size > MAX_SLO_SIZE
|
43
42
|
File Too Large
|
44
|
-
File: #{
|
45
|
-
Size: #{
|
46
|
-
Max SLO Size is #{
|
43
|
+
File: #{src}
|
44
|
+
Size: #{file_size}
|
45
|
+
Max SLO Size is #{MAX_SLO_SIZE} (5 GiB * 1000 segments)
|
47
46
|
EOS
|
48
47
|
|
49
48
|
segment_bytes = adjusted_segment_bytes(segment_bytes, file_size)
|
@@ -52,9 +51,9 @@ module Backup
|
|
52
51
|
else
|
53
52
|
raise FileSizeError, <<-EOS if file_size > MAX_FILE_SIZE
|
54
53
|
File Too Large
|
55
|
-
File: #{
|
56
|
-
Size: #{
|
57
|
-
Max File Size is #{
|
54
|
+
File: #{src}
|
55
|
+
Size: #{file_size}
|
56
|
+
Max File Size is #{MAX_FILE_SIZE} (5 GiB)
|
58
57
|
EOS
|
59
58
|
|
60
59
|
put_object(src, dest)
|
@@ -69,14 +68,14 @@ module Backup
|
|
69
68
|
def objects(prefix)
|
70
69
|
objects = []
|
71
70
|
resp = nil
|
72
|
-
prefix = prefix.chomp(
|
73
|
-
opts = { :
|
71
|
+
prefix = prefix.chomp("/")
|
72
|
+
opts = { prefix: prefix + "/" }
|
74
73
|
|
75
74
|
create_containers
|
76
75
|
|
77
|
-
while resp.nil? || resp.body.count ==
|
78
|
-
opts
|
79
|
-
with_retries("GET '#{
|
76
|
+
while resp.nil? || resp.body.count == 10_000
|
77
|
+
opts[:marker] = objects.last.name unless objects.empty?
|
78
|
+
with_retries("GET '#{container}/#{prefix}/*'") do
|
80
79
|
resp = connection.get_container(container, opts)
|
81
80
|
end
|
82
81
|
resp.body.each do |obj_data|
|
@@ -90,7 +89,7 @@ module Backup
|
|
90
89
|
# Used by Object to fetch metadata if needed.
|
91
90
|
def head_object(object)
|
92
91
|
resp = nil
|
93
|
-
with_retries("HEAD '#{
|
92
|
+
with_retries("HEAD '#{container}/#{object.name}'") do
|
94
93
|
resp = connection.head_object(container, object.name)
|
95
94
|
end
|
96
95
|
resp
|
@@ -106,14 +105,14 @@ module Backup
|
|
106
105
|
names.map!(&:name) if names.first.is_a?(Object)
|
107
106
|
|
108
107
|
until names.empty?
|
109
|
-
|
110
|
-
with_retries(
|
111
|
-
resp = connection.delete_multiple_objects(container,
|
112
|
-
resp_status = resp.body[
|
113
|
-
raise Error, <<-EOS unless resp_status ==
|
114
|
-
#{
|
108
|
+
names_partial = names.slice!(0, 10_000)
|
109
|
+
with_retries("DELETE Multiple Objects") do
|
110
|
+
resp = connection.delete_multiple_objects(container, names_partial)
|
111
|
+
resp_status = resp.body["Response Status"]
|
112
|
+
raise Error, <<-EOS unless resp_status == "200 OK"
|
113
|
+
#{resp_status}
|
115
114
|
The server returned the following:
|
116
|
-
#{
|
115
|
+
#{resp.body.inspect}
|
117
116
|
EOS
|
118
117
|
end
|
119
118
|
end
|
@@ -126,13 +125,13 @@ module Backup
|
|
126
125
|
# - Missing segments will be ignored.
|
127
126
|
def delete_slo(objects)
|
128
127
|
Array(objects).each do |object|
|
129
|
-
with_retries("DELETE SLO Manifest '#{
|
128
|
+
with_retries("DELETE SLO Manifest '#{container}/#{object.name}'") do
|
130
129
|
resp = connection.delete_static_large_object(container, object.name)
|
131
|
-
resp_status = resp.body[
|
132
|
-
raise Error, <<-EOS unless resp_status ==
|
133
|
-
#{
|
130
|
+
resp_status = resp.body["Response Status"]
|
131
|
+
raise Error, <<-EOS unless resp_status == "200 OK"
|
132
|
+
#{resp_status}
|
134
133
|
The server returned the following:
|
135
|
-
#{
|
134
|
+
#{resp.body.inspect}
|
136
135
|
EOS
|
137
136
|
end
|
138
137
|
end
|
@@ -142,12 +141,12 @@ module Backup
|
|
142
141
|
|
143
142
|
def connection
|
144
143
|
@connection ||= Fog::Storage.new({
|
145
|
-
:
|
146
|
-
:
|
147
|
-
:
|
148
|
-
:
|
149
|
-
:
|
150
|
-
:
|
144
|
+
provider: "Rackspace",
|
145
|
+
rackspace_username: username,
|
146
|
+
rackspace_api_key: api_key,
|
147
|
+
rackspace_auth_url: auth_url,
|
148
|
+
rackspace_region: region,
|
149
|
+
rackspace_servicenet: servicenet
|
151
150
|
}.merge(fog_options || {}))
|
152
151
|
end
|
153
152
|
|
@@ -155,16 +154,16 @@ module Backup
|
|
155
154
|
return if @containers_created
|
156
155
|
@containers_created = true
|
157
156
|
|
158
|
-
with_retries(
|
157
|
+
with_retries("Create Containers") do
|
159
158
|
connection.put_container(container)
|
160
159
|
connection.put_container(segments_container) if segments_container
|
161
160
|
end
|
162
161
|
end
|
163
162
|
|
164
163
|
def put_object(src, dest)
|
165
|
-
opts = headers.merge(
|
166
|
-
with_retries("PUT '#{
|
167
|
-
File.open(src,
|
164
|
+
opts = headers.merge("ETag" => Digest::MD5.file(src).hexdigest)
|
165
|
+
with_retries("PUT '#{container}/#{dest}'") do
|
166
|
+
File.open(src, "r") do |file|
|
168
167
|
connection.put_object(container, dest, file, opts)
|
169
168
|
end
|
170
169
|
end
|
@@ -176,25 +175,25 @@ module Backup
|
|
176
175
|
# SLO manifest object is uploaded.
|
177
176
|
def upload_segments(src, dest, segment_bytes, file_size)
|
178
177
|
total_segments = (file_size / segment_bytes.to_f).ceil
|
179
|
-
progress = (0.1..0.9).step(0.1).map {|n| (total_segments * n).floor }
|
180
|
-
Logger.info "\s\sUploading #{
|
178
|
+
progress = (0.1..0.9).step(0.1).map { |n| (total_segments * n).floor }
|
179
|
+
Logger.info "\s\sUploading #{total_segments} SLO Segments..."
|
181
180
|
|
182
181
|
segments = []
|
183
|
-
File.open(src,
|
182
|
+
File.open(src, "r") do |file|
|
184
183
|
segment_number = 0
|
185
184
|
until file.eof?
|
186
185
|
segment_number += 1
|
187
|
-
object = "#{
|
186
|
+
object = "#{dest}/#{segment_number.to_s.rjust(4, "0")}"
|
188
187
|
pos = file.pos
|
189
188
|
md5 = segment_md5(file, segment_bytes)
|
190
|
-
opts = headers.merge(
|
189
|
+
opts = headers.merge("ETag" => md5)
|
191
190
|
|
192
|
-
with_retries("PUT '#{
|
191
|
+
with_retries("PUT '#{segments_container}/#{object}'") do
|
193
192
|
file.seek(pos)
|
194
193
|
offset = 0
|
195
194
|
connection.put_object(segments_container, object, nil, opts) do
|
196
195
|
# block is called to stream data until it returns ''
|
197
|
-
data =
|
196
|
+
data = ""
|
198
197
|
if offset <= segment_bytes - SEGMENT_BUFFER
|
199
198
|
data = file.read(SEGMENT_BUFFER).to_s # nil => ''
|
200
199
|
offset += data.size
|
@@ -204,13 +203,13 @@ module Backup
|
|
204
203
|
end
|
205
204
|
|
206
205
|
segments << {
|
207
|
-
:
|
208
|
-
:
|
209
|
-
:
|
206
|
+
path: "#{segments_container}/#{object}",
|
207
|
+
etag: md5,
|
208
|
+
size_bytes: file.pos - pos
|
210
209
|
}
|
211
210
|
|
212
211
|
if i = progress.rindex(segment_number)
|
213
|
-
Logger.info "\s\s...#{
|
212
|
+
Logger.info "\s\s...#{i + 1}0% Complete..."
|
214
213
|
end
|
215
214
|
end
|
216
215
|
end
|
@@ -234,9 +233,9 @@ module Backup
|
|
234
233
|
# are not found. However, each segment's ETag was verified when we
|
235
234
|
# uploaded the segments, so this should only retry failed requests.
|
236
235
|
def upload_manifest(dest, segments)
|
237
|
-
Logger.info "\s\sStoring SLO Manifest '#{
|
236
|
+
Logger.info "\s\sStoring SLO Manifest '#{container}/#{dest}'"
|
238
237
|
|
239
|
-
with_retries("PUT SLO Manifest '#{
|
238
|
+
with_retries("PUT SLO Manifest '#{container}/#{dest}'") do
|
240
239
|
connection.put_static_obj_manifest(container, dest, segments, headers)
|
241
240
|
end
|
242
241
|
end
|
@@ -245,7 +244,7 @@ module Backup
|
|
245
244
|
# This includes non-SLO objects, the SLO manifest and all segments.
|
246
245
|
def headers
|
247
246
|
headers = {}
|
248
|
-
headers
|
247
|
+
headers["X-Delete-At"] = delete_at if delete_at
|
249
248
|
headers
|
250
249
|
end
|
251
250
|
|
@@ -261,10 +260,10 @@ module Backup
|
|
261
260
|
mb += 1 until file_size / (1024**2 * mb).to_f <= 1000
|
262
261
|
Logger.warn Error.new(<<-EOS)
|
263
262
|
Segment Size Adjusted
|
264
|
-
Your original #segment_size of #{
|
265
|
-
to #{
|
263
|
+
Your original #segment_size of #{orig_mb} MiB has been adjusted
|
264
|
+
to #{mb} MiB in order to satisfy the limit of 1000 segments.
|
266
265
|
To enforce your chosen #segment_size, you should use the Splitter.
|
267
|
-
e.g. split_into_chunks_of #{
|
266
|
+
e.g. split_into_chunks_of #{mb * 1000} (#segment_size * 1000)
|
268
267
|
EOS
|
269
268
|
1024**2 * mb
|
270
269
|
end
|
@@ -274,16 +273,16 @@ module Backup
|
|
274
273
|
|
275
274
|
def initialize(cloud_io, data)
|
276
275
|
@cloud_io = cloud_io
|
277
|
-
@name = data[
|
278
|
-
@hash = data[
|
276
|
+
@name = data["name"]
|
277
|
+
@hash = data["hash"]
|
279
278
|
end
|
280
279
|
|
281
280
|
def slo?
|
282
|
-
!!metadata[
|
281
|
+
!!metadata["X-Static-Large-Object"]
|
283
282
|
end
|
284
283
|
|
285
284
|
def marked_for_deletion?
|
286
|
-
!!metadata[
|
285
|
+
!!metadata["X-Delete-At"]
|
287
286
|
end
|
288
287
|
|
289
288
|
private
|
@@ -292,7 +291,6 @@ module Backup
|
|
292
291
|
@metadata ||= @cloud_io.head_object(self).headers
|
293
292
|
end
|
294
293
|
end
|
295
|
-
|
296
294
|
end
|
297
295
|
end
|
298
296
|
end
|