backup 4.4.1 → 5.0.0.beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +19 -0
  3. data/README.md +13 -9
  4. data/bin/docker_test +24 -0
  5. data/lib/backup/archive.rb +31 -32
  6. data/lib/backup/binder.rb +2 -6
  7. data/lib/backup/cleaner.rb +14 -18
  8. data/lib/backup/cli.rb +104 -108
  9. data/lib/backup/cloud_io/base.rb +4 -7
  10. data/lib/backup/cloud_io/cloud_files.rb +60 -62
  11. data/lib/backup/cloud_io/s3.rb +69 -76
  12. data/lib/backup/compressor/base.rb +4 -7
  13. data/lib/backup/compressor/bzip2.rb +3 -7
  14. data/lib/backup/compressor/custom.rb +2 -6
  15. data/lib/backup/compressor/gzip.rb +16 -17
  16. data/lib/backup/config/dsl.rb +16 -17
  17. data/lib/backup/config/helpers.rb +10 -16
  18. data/lib/backup/config.rb +17 -18
  19. data/lib/backup/database/base.rb +22 -21
  20. data/lib/backup/database/mongodb.rb +36 -37
  21. data/lib/backup/database/mysql.rb +40 -41
  22. data/lib/backup/database/openldap.rb +8 -10
  23. data/lib/backup/database/postgresql.rb +29 -30
  24. data/lib/backup/database/redis.rb +27 -30
  25. data/lib/backup/database/riak.rb +15 -18
  26. data/lib/backup/database/sqlite.rb +4 -6
  27. data/lib/backup/encryptor/base.rb +2 -4
  28. data/lib/backup/encryptor/gpg.rb +49 -59
  29. data/lib/backup/encryptor/open_ssl.rb +11 -14
  30. data/lib/backup/errors.rb +7 -12
  31. data/lib/backup/logger/console.rb +5 -8
  32. data/lib/backup/logger/fog_adapter.rb +2 -6
  33. data/lib/backup/logger/logfile.rb +10 -12
  34. data/lib/backup/logger/syslog.rb +2 -4
  35. data/lib/backup/logger.rb +16 -18
  36. data/lib/backup/model.rb +33 -40
  37. data/lib/backup/notifier/base.rb +24 -26
  38. data/lib/backup/notifier/campfire.rb +9 -11
  39. data/lib/backup/notifier/command.rb +0 -3
  40. data/lib/backup/notifier/datadog.rb +9 -12
  41. data/lib/backup/notifier/flowdock.rb +13 -17
  42. data/lib/backup/notifier/hipchat.rb +11 -13
  43. data/lib/backup/notifier/http_post.rb +11 -14
  44. data/lib/backup/notifier/mail.rb +42 -59
  45. data/lib/backup/notifier/nagios.rb +5 -9
  46. data/lib/backup/notifier/pagerduty.rb +10 -12
  47. data/lib/backup/notifier/prowl.rb +15 -15
  48. data/lib/backup/notifier/pushover.rb +7 -10
  49. data/lib/backup/notifier/ses.rb +34 -16
  50. data/lib/backup/notifier/slack.rb +39 -40
  51. data/lib/backup/notifier/twitter.rb +2 -5
  52. data/lib/backup/notifier/zabbix.rb +11 -14
  53. data/lib/backup/package.rb +5 -9
  54. data/lib/backup/packager.rb +16 -17
  55. data/lib/backup/pipeline.rb +17 -21
  56. data/lib/backup/splitter.rb +8 -11
  57. data/lib/backup/storage/base.rb +5 -8
  58. data/lib/backup/storage/cloud_files.rb +21 -23
  59. data/lib/backup/storage/cycler.rb +10 -15
  60. data/lib/backup/storage/dropbox.rb +15 -21
  61. data/lib/backup/storage/ftp.rb +14 -10
  62. data/lib/backup/storage/local.rb +5 -8
  63. data/lib/backup/storage/qiniu.rb +8 -8
  64. data/lib/backup/storage/rsync.rb +24 -26
  65. data/lib/backup/storage/s3.rb +27 -28
  66. data/lib/backup/storage/scp.rb +10 -12
  67. data/lib/backup/storage/sftp.rb +10 -12
  68. data/lib/backup/syncer/base.rb +5 -8
  69. data/lib/backup/syncer/cloud/base.rb +27 -30
  70. data/lib/backup/syncer/cloud/cloud_files.rb +16 -18
  71. data/lib/backup/syncer/cloud/local_file.rb +5 -8
  72. data/lib/backup/syncer/cloud/s3.rb +23 -24
  73. data/lib/backup/syncer/rsync/base.rb +6 -10
  74. data/lib/backup/syncer/rsync/local.rb +1 -5
  75. data/lib/backup/syncer/rsync/pull.rb +6 -10
  76. data/lib/backup/syncer/rsync/push.rb +18 -22
  77. data/lib/backup/template.rb +9 -14
  78. data/lib/backup/utilities.rb +78 -69
  79. data/lib/backup/version.rb +1 -3
  80. data/lib/backup.rb +74 -78
  81. metadata +107 -676
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Packager
5
3
  class Error < Backup::Error; end
@@ -22,7 +20,7 @@ module Backup
22
20
  Logger.info "Packaging Complete!"
23
21
  else
24
22
  raise Error, "Failed to Create Backup Package\n" +
25
- @pipeline.error_messages
23
+ @pipeline.error_messages
26
24
  end
27
25
  end
28
26
 
@@ -43,8 +41,8 @@ module Backup
43
41
  # or the Splitter (if no Encryptor), or through `cat` into the final
44
42
  # output file if neither are configured.
45
43
  @pipeline.add(
46
- "#{ utility(:tar) } -cf - " +
47
- "-C '#{ Config.tmp_path }' '#{ @package.trigger }'",
44
+ "#{utility(:tar)} -cf - " \
45
+ "-C '#{Config.tmp_path}' '#{@package.trigger}'",
48
46
  tar_success_codes
49
47
  )
50
48
 
@@ -75,26 +73,27 @@ module Backup
75
73
  #
76
74
  # If no Splitter was configured, the final file output will be
77
75
  # piped through `cat` into the final output file.
78
- if @splitter
79
- stack << lambda do
80
- @splitter.split_with do |command|
81
- @pipeline << command
76
+ stack <<
77
+ if @splitter
78
+ lambda do
79
+ @splitter.split_with do |command|
80
+ @pipeline << command
81
+ stack.shift.call
82
+ end
83
+ end
84
+ else
85
+ lambda do
86
+ outfile = File.join(Config.tmp_path, @package.basename)
87
+ @pipeline << "#{utility(:cat)} > #{outfile}"
82
88
  stack.shift.call
83
89
  end
84
90
  end
85
- else
86
- stack << lambda do
87
- outfile = File.join(Config.tmp_path, @package.basename)
88
- @pipeline << "#{ utility(:cat) } > #{ outfile }"
89
- stack.shift.call
90
- end
91
- end
92
91
 
93
92
  ##
94
93
  # Last Proc to be called runs the Pipeline the procedure built.
95
94
  # Once complete, the call stack will unwind back through the
96
95
  # preceeding Procs in the stack (if any)
97
- stack << lambda { @pipeline.run }
96
+ stack << -> { @pipeline.run }
98
97
 
99
98
  stack.shift
100
99
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  class Pipeline
5
3
  class Error < Backup::Error; end
@@ -12,7 +10,7 @@ module Backup
12
10
  @commands = []
13
11
  @success_codes = []
14
12
  @errors = []
15
- @stderr = ''
13
+ @stderr = ""
16
14
  end
17
15
 
18
16
  ##
@@ -51,22 +49,21 @@ module Backup
51
49
  # Use `#success?` to determine if all commands in the pipeline succeeded.
52
50
  # If `#success?` returns `false`, use `#error_messages` to get an error report.
53
51
  def run
54
- Open4.popen4(pipeline) do |pid, stdin, stdout, stderr|
55
- pipestatus = stdout.read.gsub("\n", '').split(':').sort
52
+ Open4.popen4(pipeline) do |_pid, _stdin, stdout, stderr|
53
+ pipestatus = stdout.read.delete("\n").split(":").sort
56
54
  pipestatus.each do |status|
57
- index, exitstatus = status.split('|').map(&:to_i)
58
- unless @success_codes[index].include?(exitstatus)
59
- command = command_name(@commands[index])
60
- @errors << SystemCallError.new(
61
- "'#{ command }' returned exit code: #{ exitstatus }", exitstatus
62
- )
63
- end
55
+ index, exitstatus = status.split("|").map(&:to_i)
56
+ next if @success_codes[index].include?(exitstatus)
57
+ command = command_name(@commands[index])
58
+ @errors << SystemCallError.new(
59
+ "'#{command}' returned exit code: #{exitstatus}", exitstatus
60
+ )
64
61
  end
65
62
  @stderr = stderr.read.strip
66
63
  end
67
64
  Logger.warn(stderr_messages) if success? && stderr_messages
68
65
  rescue Exception => err
69
- raise Error.wrap(err, 'Pipeline failed to execute')
66
+ raise Error.wrap(err, "Pipeline failed to execute")
70
67
  end
71
68
 
72
69
  def success?
@@ -78,9 +75,9 @@ module Backup
78
75
  # from the commands in the pipeline (if any), along with the SystemCallError
79
76
  # (Errno) message for each command which had a non-zero exit status.
80
77
  def error_messages
81
- @error_messages ||= (stderr_messages || '') +
82
- "The following system errors were returned:\n" +
83
- @errors.map {|err| "#{ err.class }: #{ err.message }" }.join("\n")
78
+ @error_messages ||= (stderr_messages || "") +
79
+ "The following system errors were returned:\n" +
80
+ @errors.map { |err| "#{err.class}: #{err.message}" }.join("\n")
84
81
  end
85
82
 
86
83
  private
@@ -106,19 +103,18 @@ module Backup
106
103
  def pipeline
107
104
  parts = []
108
105
  @commands.each_with_index do |command, index|
109
- parts << %Q[{ #{ command } 2>&4 ; echo "#{ index }|$?:" >&3 ; }]
106
+ parts << %({ #{command} 2>&4 ; echo "#{index}|$?:" >&3 ; })
110
107
  end
111
- %Q[{ #{ parts.join(' | ') } } 3>&1 1>&2 4>&2]
108
+ %({ #{parts.join(" | ")} } 3>&1 1>&2 4>&2)
112
109
  end
113
110
 
114
111
  def stderr_messages
115
- @stderr_messages ||= @stderr.empty? ? false : <<-EOS.gsub(/^ +/, ' ')
112
+ @stderr_messages ||= @stderr.empty? ? false : <<-EOS.gsub(/^ +/, " ")
116
113
  Pipeline STDERR Messages:
117
114
  (Note: may be interleaved if multiple commands returned error messages)
118
115
 
119
- #{ @stderr }
116
+ #{@stderr}
120
117
  EOS
121
118
  end
122
-
123
119
  end
124
120
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  class Splitter
5
3
  include Utilities::Helpers
@@ -21,8 +19,8 @@ module Backup
21
19
  # Once the packaging procedure is complete, it will return and
22
20
  # @package.chunk_suffixes will be set based on the resulting files.
23
21
  def split_with
24
- Logger.info "Splitter configured with a chunk size of #{ chunk_size }MB " +
25
- "and suffix length of #{ suffix_length }."
22
+ Logger.info "Splitter configured with a chunk size of #{chunk_size}MB " \
23
+ "and suffix length of #{suffix_length}."
26
24
  yield split_command
27
25
  after_packaging
28
26
  end
@@ -34,8 +32,8 @@ module Backup
34
32
  # multiple files, based on @chunk_size and @suffix_length, using the full
35
33
  # path to the final @package.basename, plus a '-' separator as the `prefix`.
36
34
  def split_command
37
- "#{ utility(:split) } -a #{ suffix_length } -b #{ chunk_size }m - " +
38
- "'#{ File.join(Config.tmp_path, package.basename + '-') }'"
35
+ "#{utility(:split)} -a #{suffix_length} -b #{chunk_size}m - " \
36
+ "'#{File.join(Config.tmp_path, package.basename + "-")}'"
39
37
  end
40
38
 
41
39
  ##
@@ -47,10 +45,10 @@ module Backup
47
45
  # remove the suffix from the filename.
48
46
  def after_packaging
49
47
  suffixes = chunk_suffixes
50
- first_suffix = 'a' * suffix_length
48
+ first_suffix = "a" * suffix_length
51
49
  if suffixes == [first_suffix]
52
50
  FileUtils.mv(
53
- File.join(Config.tmp_path, "#{ package.basename }-#{ first_suffix }"),
51
+ File.join(Config.tmp_path, "#{package.basename}-#{first_suffix}"),
54
52
  File.join(Config.tmp_path, package.basename)
55
53
  )
56
54
  else
@@ -62,15 +60,14 @@ module Backup
62
60
  # Returns an array of suffixes for each chunk, in alphabetical order.
63
61
  # For example: [aa, ab, ac, ad, ae] or [aaa, aab, aac aad]
64
62
  def chunk_suffixes
65
- chunks.map {|chunk| File.extname(chunk).split('-').last }.sort
63
+ chunks.map { |chunk| File.extname(chunk).split("-").last }.sort
66
64
  end
67
65
 
68
66
  ##
69
67
  # Returns an array of full paths to the backup chunks.
70
68
  # Chunks are sorted in alphabetical order.
71
69
  def chunks
72
- Dir[File.join(Config.tmp_path, package.basename + '-*')].sort
70
+ Dir[File.join(Config.tmp_path, package.basename + "-*")].sort
73
71
  end
74
-
75
72
  end
76
73
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Storage
5
3
  class Base
@@ -34,19 +32,19 @@ module Backup
34
32
  def initialize(model, storage_id = nil, &block)
35
33
  @model = model
36
34
  @package = model.package
37
- @storage_id = storage_id.to_s.gsub(/\W/, '_') if storage_id
35
+ @storage_id = storage_id.to_s.gsub(/\W/, "_") if storage_id
38
36
 
39
37
  load_defaults!
40
38
  instance_eval(&block) if block_given?
41
39
  end
42
40
 
43
41
  def perform!
44
- Logger.info "#{ storage_name } Started..."
42
+ Logger.info "#{storage_name} Started..."
45
43
  transfer!
46
44
  if respond_to?(:cycle!, true) && (keep.to_i > 0 || keep.is_a?(Time))
47
45
  cycle!
48
46
  end
49
- Logger.info "#{ storage_name } Finished!"
47
+ Logger.info "#{storage_name} Finished!"
50
48
  end
51
49
 
52
50
  private
@@ -60,10 +58,9 @@ module Backup
60
58
  alias :remote_path_for :remote_path
61
59
 
62
60
  def storage_name
63
- @storage_name ||= self.class.to_s.sub('Backup::', '') +
64
- (storage_id ? " (#{ storage_id })" : '')
61
+ @storage_name ||= self.class.to_s.sub("Backup::", "") +
62
+ (storage_id ? " (#{storage_id})" : "")
65
63
  end
66
-
67
64
  end
68
65
  end
69
66
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'backup/cloud_io/cloud_files'
1
+ require "backup/cloud_io/cloud_files"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -79,8 +78,8 @@ module Backup
79
78
  @max_retries ||= 10
80
79
  @retry_waitsec ||= 30
81
80
 
82
- @path ||= 'backups'
83
- path.sub!(/^\//, '')
81
+ @path ||= "backups"
82
+ path.sub!(/^\//, "")
84
83
 
85
84
  check_configuration
86
85
  end
@@ -89,18 +88,18 @@ module Backup
89
88
 
90
89
  def cloud_io
91
90
  @cloud_io ||= CloudIO::CloudFiles.new(
92
- :username => username,
93
- :api_key => api_key,
94
- :auth_url => auth_url,
95
- :region => region,
96
- :servicenet => servicenet,
97
- :container => container,
98
- :segments_container => segments_container,
99
- :segment_size => segment_size,
100
- :days_to_keep => days_to_keep,
101
- :max_retries => max_retries,
102
- :retry_waitsec => retry_waitsec,
103
- :fog_options => fog_options
91
+ username: username,
92
+ api_key: api_key,
93
+ auth_url: auth_url,
94
+ region: region,
95
+ servicenet: servicenet,
96
+ container: container,
97
+ segments_container: segments_container,
98
+ segment_size: segment_size,
99
+ days_to_keep: days_to_keep,
100
+ max_retries: max_retries,
101
+ retry_waitsec: retry_waitsec,
102
+ fog_options: fog_options
104
103
  )
105
104
  end
106
105
 
@@ -108,7 +107,7 @@ module Backup
108
107
  package.filenames.each do |filename|
109
108
  src = File.join(Config.tmp_path, filename)
110
109
  dest = File.join(remote_path, filename)
111
- Logger.info "Storing '#{ container }/#{ dest }'..."
110
+ Logger.info "Storing '#{container}/#{dest}'..."
112
111
  cloud_io.upload(src, dest)
113
112
  end
114
113
 
@@ -118,12 +117,12 @@ module Backup
118
117
  # Called by the Cycler.
119
118
  # Any error raised will be logged as a warning.
120
119
  def remove!(package)
121
- Logger.info "Removing backup package dated #{ package.time }..."
120
+ Logger.info "Removing backup package dated #{package.time}..."
122
121
 
123
122
  remote_path = remote_path_for(package)
124
123
  objects = cloud_io.objects(remote_path)
125
124
 
126
- raise Error, "Package at '#{ remote_path }' not found" if objects.empty?
125
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
127
126
 
128
127
  slo_objects, objects = objects.partition(&:slo?)
129
128
  cloud_io.delete_slo(slo_objects)
@@ -131,10 +130,10 @@ module Backup
131
130
  end
132
131
 
133
132
  def check_configuration
134
- required = %w{ username api_key container }
135
- raise Error, <<-EOS if required.map {|name| send(name) }.any?(&:nil?)
133
+ required = %w[username api_key container]
134
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
136
135
  Configuration Error
137
- #{ required.map {|name| "##{ name }"}.join(', ') } are all required
136
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
138
137
  EOS
139
138
 
140
139
  raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
@@ -152,7 +151,6 @@ module Backup
152
151
  #segment_size is too large (max 5120)
153
152
  EOS
154
153
  end
155
-
156
154
  end
157
155
  end
158
156
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Storage
5
3
  module Cycler
@@ -11,7 +9,7 @@ module Backup
11
9
  # and will remove any old package file(s) when the storage limit
12
10
  # set by #keep is exceeded.
13
11
  def cycle!
14
- Logger.info 'Cycling Started...'
12
+ Logger.info "Cycling Started..."
15
13
 
16
14
  packages = yaml_load.unshift(package)
17
15
  cycled_packages = []
@@ -32,24 +30,22 @@ module Backup
32
30
  end
33
31
 
34
32
  def delete_package(package)
35
- begin
36
- remove!(package) unless package.no_cycle
37
- rescue => err
38
- Logger.warn Error.wrap(err, <<-EOS)
33
+ remove!(package) unless package.no_cycle
34
+ rescue => err
35
+ Logger.warn Error.wrap(err, <<-EOS)
39
36
  There was a problem removing the following package:
40
37
  Trigger: #{package.trigger} :: Dated: #{package.time}
41
- Package included the following #{ package.filenames.count } file(s):
42
- #{ package.filenames.join("\n") }
38
+ Package included the following #{package.filenames.count} file(s):
39
+ #{package.filenames.join("\n")}
43
40
  EOS
44
- end
45
41
  end
46
42
 
47
43
  # Returns path to the YAML data file.
48
44
  def yaml_file
49
45
  @yaml_file ||= begin
50
- filename = self.class.to_s.split('::').last
51
- filename << "-#{ storage_id }" if storage_id
52
- File.join(Config.data_path, package.trigger, "#{ filename }.yml")
46
+ filename = self.class.to_s.split("::").last
47
+ filename << "-#{storage_id}" if storage_id
48
+ File.join(Config.data_path, package.trigger, "#{filename}.yml")
53
49
  end
54
50
  end
55
51
 
@@ -65,11 +61,10 @@ module Backup
65
61
  # Stores the given package objects to the YAML data file.
66
62
  def yaml_save(packages)
67
63
  FileUtils.mkdir_p(File.dirname(yaml_file))
68
- File.open(yaml_file, 'w') do |file|
64
+ File.open(yaml_file, "w") do |file|
69
65
  file.write(packages.to_yaml)
70
66
  end
71
67
  end
72
-
73
68
  end
74
69
  end
75
70
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'dropbox_sdk'
1
+ require "dropbox_sdk"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -50,13 +49,13 @@ module Backup
50
49
  def initialize(model, storage_id = nil)
51
50
  super
52
51
 
53
- @path ||= 'backups'
54
- @cache_path ||= '.cache'
52
+ @path ||= "backups"
53
+ @cache_path ||= ".cache"
55
54
  @access_type ||= :app_folder
56
55
  @chunk_size ||= 4 # MiB
57
56
  @max_retries ||= 10
58
57
  @retry_waitsec ||= 30
59
- path.sub!(/^\//, '')
58
+ path.sub!(/^\//, "")
60
59
  end
61
60
 
62
61
  private
@@ -81,9 +80,8 @@ module Backup
81
80
 
82
81
  # will raise an error if session not authorized
83
82
  @connection = DropboxClient.new(session, access_type)
84
-
85
83
  rescue => err
86
- raise Error.wrap(err, 'Authorization Failed')
84
+ raise Error.wrap(err, "Authorization Failed")
87
85
  end
88
86
 
89
87
  ##
@@ -94,7 +92,6 @@ module Backup
94
92
  begin
95
93
  session = DropboxSession.deserialize(File.read(cached_file))
96
94
  Logger.info "Session data loaded from cache!"
97
-
98
95
  rescue => err
99
96
  Logger.warn Error.wrap(err, <<-EOS)
100
97
  Could not read session data from cache.
@@ -113,10 +110,10 @@ module Backup
113
110
  package.filenames.each do |filename|
114
111
  src = File.join(Config.tmp_path, filename)
115
112
  dest = File.join(remote_path, filename)
116
- Logger.info "Storing '#{ dest }'..."
113
+ Logger.info "Storing '#{dest}'..."
117
114
 
118
115
  uploader = nil
119
- File.open(src, 'r') do |file|
116
+ File.open(src, "r") do |file|
120
117
  uploader = connection.get_chunked_uploader(file, file.stat.size)
121
118
  while uploader.offset < uploader.total_size
122
119
  with_retries do
@@ -129,9 +126,8 @@ module Backup
129
126
  uploader.finish(dest)
130
127
  end
131
128
  end
132
-
133
129
  rescue => err
134
- raise Error.wrap(err, 'Upload Failed!')
130
+ raise Error.wrap(err, "Upload Failed!")
135
131
  end
136
132
 
137
133
  def with_retries
@@ -142,7 +138,7 @@ module Backup
142
138
  retries += 1
143
139
  raise if retries > max_retries
144
140
 
145
- Logger.info Error.wrap(err, "Retry ##{ retries } of #{ max_retries }.")
141
+ Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
146
142
  sleep(retry_waitsec)
147
143
  retry
148
144
  end
@@ -151,13 +147,13 @@ module Backup
151
147
  # Called by the Cycler.
152
148
  # Any error raised will be logged as a warning.
153
149
  def remove!(package)
154
- Logger.info "Removing backup package dated #{ package.time }..."
150
+ Logger.info "Removing backup package dated #{package.time}..."
155
151
 
156
152
  connection.file_delete(remote_path_for(package))
157
153
  end
158
154
 
159
155
  def cached_file
160
- path = cache_path.start_with?('/') ?
156
+ path = cache_path.start_with?("/") ?
161
157
  cache_path : File.join(Config.root_path, cache_path)
162
158
  File.join(path, api_key + api_secret)
163
159
  end
@@ -175,7 +171,7 @@ module Backup
175
171
  # Create a new session, write a serialized version of it to the
176
172
  # .cache directory, and return the session object
177
173
  def create_write_and_return_new_session!
178
- require 'timeout'
174
+ require "timeout"
179
175
 
180
176
  session = DropboxSession.new(api_key, api_secret)
181
177
 
@@ -183,12 +179,12 @@ module Backup
183
179
  session.get_request_token
184
180
 
185
181
  template = Backup::Template.new(
186
- {:session => session, :cached_file => cached_file}
182
+ session: session, cached_file: cached_file
187
183
  )
188
184
  template.render("storage/dropbox/authorization_url.erb")
189
185
 
190
186
  # wait for user to hit 'return' to continue
191
- Timeout::timeout(180) { STDIN.gets }
187
+ Timeout.timeout(180) { STDIN.gets }
192
188
 
193
189
  # this will raise an error if the user did not
194
190
  # visit the authorization_url and grant access
@@ -202,11 +198,9 @@ module Backup
202
198
  template.render("storage/dropbox/cache_file_written.erb")
203
199
 
204
200
  session
205
-
206
201
  rescue => err
207
- raise Error.wrap(err, 'Could not create or authenticate a new session')
202
+ raise Error.wrap(err, "Could not create or authenticate a new session")
208
203
  end
209
-
210
204
  end
211
205
  end
212
206
  end
@@ -1,5 +1,4 @@
1
- # encoding: utf-8
2
- require 'net/ftp'
1
+ require "net/ftp"
3
2
 
4
3
  module Backup
5
4
  module Storage
@@ -31,10 +30,10 @@ module Backup
31
30
  super
32
31
 
33
32
  @port ||= 21
34
- @path ||= 'backups'
33
+ @path ||= "backups"
35
34
  @passive_mode ||= false
36
35
  @timeout ||= nil
37
- path.sub!(/^~\//, '')
36
+ path.sub!(/^~\//, "")
38
37
  end
39
38
 
40
39
  private
@@ -51,6 +50,12 @@ module Backup
51
50
  Net::FTP.send(:remove_const, :FTP_PORT)
52
51
  end; Net::FTP.send(:const_set, :FTP_PORT, port)
53
52
 
53
+ # Ensure default passive mode to false.
54
+ # Note: The default passive setting changed between Ruby 2.2 and 2.3
55
+ if Net::FTP.respond_to?(:default_passive=)
56
+ Net::FTP.default_passive = false
57
+ end
58
+
54
59
  Net::FTP.open(ip, username, password) do |ftp|
55
60
  if timeout
56
61
  ftp.open_timeout = timeout
@@ -68,7 +73,7 @@ module Backup
68
73
  package.filenames.each do |filename|
69
74
  src = File.join(Config.tmp_path, filename)
70
75
  dest = File.join(remote_path, filename)
71
- Logger.info "Storing '#{ ip }:#{ dest }'..."
76
+ Logger.info "Storing '#{ip}:#{dest}'..."
72
77
  ftp.put(src, dest)
73
78
  end
74
79
  end
@@ -77,7 +82,7 @@ module Backup
77
82
  # Called by the Cycler.
78
83
  # Any error raised will be logged as a warning.
79
84
  def remove!(package)
80
- Logger.info "Removing backup package dated #{ package.time }..."
85
+ Logger.info "Removing backup package dated #{package.time}..."
81
86
 
82
87
  remote_path = remote_path_for(package)
83
88
  connection do |ftp|
@@ -98,15 +103,14 @@ module Backup
98
103
  # Net::FTP raises an exception when the directory it's trying to create
99
104
  # already exists, so we have rescue it
100
105
  def create_remote_path(ftp)
101
- path_parts = Array.new
102
- remote_path.split('/').each do |path_part|
106
+ path_parts = []
107
+ remote_path.split("/").each do |path_part|
103
108
  path_parts << path_part
104
109
  begin
105
- ftp.mkdir(path_parts.join('/'))
110
+ ftp.mkdir(path_parts.join("/"))
106
111
  rescue Net::FTPPermError; end
107
112
  end
108
113
  end
109
-
110
114
  end
111
115
  end
112
116
  end
@@ -1,5 +1,3 @@
1
- # encoding: utf-8
2
-
3
1
  module Backup
4
2
  module Storage
5
3
  class Local < Base
@@ -9,7 +7,7 @@ module Backup
9
7
  def initialize(model, storage_id = nil)
10
8
  super
11
9
 
12
- @path ||= '~/backups'
10
+ @path ||= "~/backups"
13
11
  end
14
12
 
15
13
  private
@@ -21,7 +19,7 @@ module Backup
21
19
  package.filenames.each do |filename|
22
20
  src = File.join(Config.tmp_path, filename)
23
21
  dest = File.join(remote_path, filename)
24
- Logger.info "Storing '#{ dest }'..."
22
+ Logger.info "Storing '#{dest}'..."
25
23
 
26
24
  FileUtils.send(transfer_method, src, dest)
27
25
  end
@@ -30,7 +28,7 @@ module Backup
30
28
  # Called by the Cycler.
31
29
  # Any error raised will be logged as a warning.
32
30
  def remove!(package)
33
- Logger.info "Removing backup package dated #{ package.time }..."
31
+ Logger.info "Removing backup package dated #{package.time}..."
34
32
 
35
33
  FileUtils.rm_r(remote_path_for(package))
36
34
  end
@@ -50,15 +48,14 @@ module Backup
50
48
  else
51
49
  Logger.warn Error.new(<<-EOS)
52
50
  Local File Copy Warning!
53
- The final backup file(s) for '#{ model.label }' (#{ model.trigger })
54
- will be *copied* to '#{ remote_path }'
51
+ The final backup file(s) for '#{model.label}' (#{model.trigger})
52
+ will be *copied* to '#{remote_path}'
55
53
  To avoid this, when using more than one Storage, the 'Local' Storage
56
54
  should be added *last* so the files may be *moved* to their destination.
57
55
  EOS
58
56
  false
59
57
  end
60
58
  end
61
-
62
59
  end
63
60
  end
64
61
  end