backupii 0.1.0.pre.alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +19 -0
  3. data/README.md +37 -0
  4. data/bin/backupii +5 -0
  5. data/bin/docker_test +24 -0
  6. data/lib/backup/archive.rb +171 -0
  7. data/lib/backup/binder.rb +23 -0
  8. data/lib/backup/cleaner.rb +114 -0
  9. data/lib/backup/cli.rb +376 -0
  10. data/lib/backup/cloud_io/base.rb +40 -0
  11. data/lib/backup/cloud_io/cloud_files.rb +301 -0
  12. data/lib/backup/cloud_io/s3.rb +256 -0
  13. data/lib/backup/compressor/base.rb +34 -0
  14. data/lib/backup/compressor/bzip2.rb +37 -0
  15. data/lib/backup/compressor/custom.rb +51 -0
  16. data/lib/backup/compressor/gzip.rb +76 -0
  17. data/lib/backup/config/dsl.rb +103 -0
  18. data/lib/backup/config/helpers.rb +139 -0
  19. data/lib/backup/config.rb +122 -0
  20. data/lib/backup/database/base.rb +89 -0
  21. data/lib/backup/database/mongodb.rb +189 -0
  22. data/lib/backup/database/mysql.rb +194 -0
  23. data/lib/backup/database/openldap.rb +97 -0
  24. data/lib/backup/database/postgresql.rb +134 -0
  25. data/lib/backup/database/redis.rb +179 -0
  26. data/lib/backup/database/riak.rb +82 -0
  27. data/lib/backup/database/sqlite.rb +57 -0
  28. data/lib/backup/encryptor/base.rb +29 -0
  29. data/lib/backup/encryptor/gpg.rb +745 -0
  30. data/lib/backup/encryptor/open_ssl.rb +76 -0
  31. data/lib/backup/errors.rb +55 -0
  32. data/lib/backup/logger/console.rb +50 -0
  33. data/lib/backup/logger/fog_adapter.rb +27 -0
  34. data/lib/backup/logger/logfile.rb +134 -0
  35. data/lib/backup/logger/syslog.rb +116 -0
  36. data/lib/backup/logger.rb +199 -0
  37. data/lib/backup/model.rb +478 -0
  38. data/lib/backup/notifier/base.rb +128 -0
  39. data/lib/backup/notifier/campfire.rb +63 -0
  40. data/lib/backup/notifier/command.rb +101 -0
  41. data/lib/backup/notifier/datadog.rb +107 -0
  42. data/lib/backup/notifier/flowdock.rb +101 -0
  43. data/lib/backup/notifier/hipchat.rb +118 -0
  44. data/lib/backup/notifier/http_post.rb +116 -0
  45. data/lib/backup/notifier/mail.rb +235 -0
  46. data/lib/backup/notifier/nagios.rb +67 -0
  47. data/lib/backup/notifier/pagerduty.rb +82 -0
  48. data/lib/backup/notifier/prowl.rb +70 -0
  49. data/lib/backup/notifier/pushover.rb +73 -0
  50. data/lib/backup/notifier/ses.rb +126 -0
  51. data/lib/backup/notifier/slack.rb +149 -0
  52. data/lib/backup/notifier/twitter.rb +57 -0
  53. data/lib/backup/notifier/zabbix.rb +62 -0
  54. data/lib/backup/package.rb +53 -0
  55. data/lib/backup/packager.rb +108 -0
  56. data/lib/backup/pipeline.rb +122 -0
  57. data/lib/backup/splitter.rb +75 -0
  58. data/lib/backup/storage/base.rb +72 -0
  59. data/lib/backup/storage/cloud_files.rb +158 -0
  60. data/lib/backup/storage/cycler.rb +73 -0
  61. data/lib/backup/storage/dropbox.rb +208 -0
  62. data/lib/backup/storage/ftp.rb +118 -0
  63. data/lib/backup/storage/local.rb +63 -0
  64. data/lib/backup/storage/qiniu.rb +68 -0
  65. data/lib/backup/storage/rsync.rb +251 -0
  66. data/lib/backup/storage/s3.rb +157 -0
  67. data/lib/backup/storage/scp.rb +67 -0
  68. data/lib/backup/storage/sftp.rb +82 -0
  69. data/lib/backup/syncer/base.rb +70 -0
  70. data/lib/backup/syncer/cloud/base.rb +180 -0
  71. data/lib/backup/syncer/cloud/cloud_files.rb +83 -0
  72. data/lib/backup/syncer/cloud/local_file.rb +99 -0
  73. data/lib/backup/syncer/cloud/s3.rb +118 -0
  74. data/lib/backup/syncer/rsync/base.rb +55 -0
  75. data/lib/backup/syncer/rsync/local.rb +29 -0
  76. data/lib/backup/syncer/rsync/pull.rb +49 -0
  77. data/lib/backup/syncer/rsync/push.rb +206 -0
  78. data/lib/backup/template.rb +45 -0
  79. data/lib/backup/utilities.rb +235 -0
  80. data/lib/backup/version.rb +5 -0
  81. data/lib/backup.rb +141 -0
  82. data/templates/cli/archive +28 -0
  83. data/templates/cli/compressor/bzip2 +4 -0
  84. data/templates/cli/compressor/custom +7 -0
  85. data/templates/cli/compressor/gzip +4 -0
  86. data/templates/cli/config +123 -0
  87. data/templates/cli/databases/mongodb +15 -0
  88. data/templates/cli/databases/mysql +18 -0
  89. data/templates/cli/databases/openldap +24 -0
  90. data/templates/cli/databases/postgresql +16 -0
  91. data/templates/cli/databases/redis +16 -0
  92. data/templates/cli/databases/riak +17 -0
  93. data/templates/cli/databases/sqlite +11 -0
  94. data/templates/cli/encryptor/gpg +27 -0
  95. data/templates/cli/encryptor/openssl +9 -0
  96. data/templates/cli/model +26 -0
  97. data/templates/cli/notifier/zabbix +15 -0
  98. data/templates/cli/notifiers/campfire +12 -0
  99. data/templates/cli/notifiers/command +32 -0
  100. data/templates/cli/notifiers/datadog +57 -0
  101. data/templates/cli/notifiers/flowdock +16 -0
  102. data/templates/cli/notifiers/hipchat +16 -0
  103. data/templates/cli/notifiers/http_post +32 -0
  104. data/templates/cli/notifiers/mail +24 -0
  105. data/templates/cli/notifiers/nagios +13 -0
  106. data/templates/cli/notifiers/pagerduty +12 -0
  107. data/templates/cli/notifiers/prowl +11 -0
  108. data/templates/cli/notifiers/pushover +11 -0
  109. data/templates/cli/notifiers/ses +15 -0
  110. data/templates/cli/notifiers/slack +22 -0
  111. data/templates/cli/notifiers/twitter +13 -0
  112. data/templates/cli/splitter +7 -0
  113. data/templates/cli/storages/cloud_files +11 -0
  114. data/templates/cli/storages/dropbox +20 -0
  115. data/templates/cli/storages/ftp +13 -0
  116. data/templates/cli/storages/local +8 -0
  117. data/templates/cli/storages/qiniu +12 -0
  118. data/templates/cli/storages/rsync +17 -0
  119. data/templates/cli/storages/s3 +16 -0
  120. data/templates/cli/storages/scp +15 -0
  121. data/templates/cli/storages/sftp +15 -0
  122. data/templates/cli/syncers/cloud_files +22 -0
  123. data/templates/cli/syncers/rsync_local +20 -0
  124. data/templates/cli/syncers/rsync_pull +28 -0
  125. data/templates/cli/syncers/rsync_push +28 -0
  126. data/templates/cli/syncers/s3 +27 -0
  127. data/templates/general/links +3 -0
  128. data/templates/general/version.erb +2 -0
  129. data/templates/notifier/mail/failure.erb +16 -0
  130. data/templates/notifier/mail/success.erb +16 -0
  131. data/templates/notifier/mail/warning.erb +16 -0
  132. data/templates/storage/dropbox/authorization_url.erb +6 -0
  133. data/templates/storage/dropbox/authorized.erb +4 -0
  134. data/templates/storage/dropbox/cache_file_written.erb +10 -0
  135. metadata +507 -0
@@ -0,0 +1,158 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "backup/cloud_io/cloud_files"
4
+
5
+ module Backup
6
+ module Storage
7
+ class CloudFiles < Base
8
+ include Storage::Cycler
9
+ class Error < Backup::Error; end
10
+
11
+ ##
12
+ # Rackspace CloudFiles Credentials
13
+ attr_accessor :username, :api_key
14
+
15
+ ##
16
+ # Rackspace Auth URL (optional)
17
+ attr_accessor :auth_url
18
+
19
+ ##
20
+ # Rackspace Service Net
21
+ # (LAN-based transfers to avoid charges and improve performance)
22
+ attr_accessor :servicenet
23
+
24
+ ##
25
+ # Rackspace Region (optional)
26
+ attr_accessor :region
27
+
28
+ ##
29
+ # Rackspace Container Name
30
+ attr_accessor :container
31
+
32
+ ##
33
+ # Rackspace Container Name for SLO Segments
34
+ # Required if #segment_size is set. Must be different from #container.
35
+ attr_accessor :segments_container
36
+
37
+ ##
38
+ # SLO Segment size, specified in MiB.
39
+ #
40
+ # Each package file larger than +segment_size+
41
+ # will be uploaded as a Static Large Objects (SLO).
42
+ #
43
+ # Defaults to 0 for backward compatibility (pre v.3.7.0),
44
+ # since #segments_container would be required.
45
+ #
46
+ # Minimum: 1 (0 disables SLO support)
47
+ # Maximum: 5120 (5 GiB)
48
+ attr_accessor :segment_size
49
+
50
+ ##
51
+ # If set, all backup package files (including SLO segments) will be
52
+ # scheduled for automatic removal by the server.
53
+ #
54
+ # The `keep` option should not be used if this is set,
55
+ # unless you're transitioning from the `keep` option.
56
+ attr_accessor :days_to_keep
57
+
58
+ ##
59
+ # Number of times to retry failed operations.
60
+ #
61
+ # Default: 10
62
+ attr_accessor :max_retries
63
+
64
+ ##
65
+ # Time in seconds to pause before each retry.
66
+ #
67
+ # Default: 30
68
+ attr_accessor :retry_waitsec
69
+
70
+ ##
71
+ # Additional options to pass along to fog.
72
+ # e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
73
+ attr_accessor :fog_options
74
+
75
+ def initialize(model, storage_id = nil)
76
+ super
77
+
78
+ @servicenet ||= false
79
+ @segment_size ||= 0
80
+ @max_retries ||= 10
81
+ @retry_waitsec ||= 30
82
+
83
+ @path ||= "backups".dup
84
+ path.sub!(%r{^/}, "")
85
+
86
+ check_configuration
87
+ end
88
+
89
+ private
90
+
91
+ def cloud_io
92
+ @cloud_io ||= CloudIO::CloudFiles.new(
93
+ username: username,
94
+ api_key: api_key,
95
+ auth_url: auth_url,
96
+ region: region,
97
+ servicenet: servicenet,
98
+ container: container,
99
+ segments_container: segments_container,
100
+ segment_size: segment_size,
101
+ days_to_keep: days_to_keep,
102
+ max_retries: max_retries,
103
+ retry_waitsec: retry_waitsec,
104
+ fog_options: fog_options
105
+ )
106
+ end
107
+
108
+ def transfer!
109
+ package.filenames.each do |filename|
110
+ src = File.join(Config.tmp_path, filename)
111
+ dest = File.join(remote_path, filename)
112
+ Logger.info "Storing '#{container}/#{dest}'..."
113
+ cloud_io.upload(src, dest)
114
+ end
115
+
116
+ package.no_cycle = true if days_to_keep
117
+ end
118
+
119
+ # Called by the Cycler.
120
+ # Any error raised will be logged as a warning.
121
+ def remove!(package)
122
+ Logger.info "Removing backup package dated #{package.time}..."
123
+
124
+ remote_path = remote_path_for(package)
125
+ objects = cloud_io.objects(remote_path)
126
+
127
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
128
+
129
+ slo_objects, objects = objects.partition(&:slo?)
130
+ cloud_io.delete_slo(slo_objects)
131
+ cloud_io.delete(objects)
132
+ end
133
+
134
+ def check_configuration
135
+ required = %w[username api_key container]
136
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
137
+ Configuration Error
138
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
139
+ EOS
140
+
141
+ raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
142
+ Configuration Error
143
+ #segments_container is required if #segment_size is > 0
144
+ EOS
145
+
146
+ raise Error, <<-EOS if container == segments_container
147
+ Configuration Error
148
+ #container and #segments_container must not be the same container.
149
+ EOS
150
+
151
+ raise Error, <<-EOS if segment_size > 5120
152
+ Configuration Error
153
+ #segment_size is too large (max 5120)
154
+ EOS
155
+ end
156
+ end
157
+ end
158
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Backup
4
+ module Storage
5
+ module Cycler
6
+ class Error < Backup::Error; end
7
+
8
+ private
9
+
10
+ # Adds the current package being stored to the YAML cycle data file
11
+ # and will remove any old package file(s) when the storage limit
12
+ # set by #keep is exceeded.
13
+ def cycle!
14
+ Logger.info "Cycling Started..."
15
+
16
+ packages = yaml_load.unshift(package)
17
+ cycled_packages = []
18
+
19
+ if keep.is_a?(Date) || keep.is_a?(Time)
20
+ cycled_packages = packages.select do |p|
21
+ p.time_as_object < keep.to_time
22
+ end
23
+ else
24
+ excess = packages.count - keep.to_i
25
+ cycled_packages = packages.last(excess) if excess > 0
26
+ end
27
+
28
+ saved_packages = packages - cycled_packages
29
+ cycled_packages.each { |package| delete_package package }
30
+
31
+ yaml_save(saved_packages)
32
+ end
33
+
34
+ def delete_package(package)
35
+ remove!(package) unless package.no_cycle
36
+ rescue => err
37
+ Logger.warn Error.wrap(err, <<-EOS)
38
+ There was a problem removing the following package:
39
+ Trigger: #{package.trigger} :: Dated: #{package.time}
40
+ Package included the following #{package.filenames.count} file(s):
41
+ #{package.filenames.join("\n")}
42
+ EOS
43
+ end
44
+
45
+ # Returns path to the YAML data file.
46
+ def yaml_file
47
+ @yaml_file ||= begin
48
+ filename = self.class.to_s.split("::").last
49
+ filename << "-#{storage_id}" if storage_id
50
+ File.join(Config.data_path, package.trigger, "#{filename}.yml")
51
+ end
52
+ end
53
+
54
+ # Returns stored Package objects, sorted by #time descending (oldest
55
+ # last).
56
+ def yaml_load
57
+ if File.exist?(yaml_file) && !File.zero?(yaml_file)
58
+ YAML.load_file(yaml_file).sort_by!(&:time).reverse!
59
+ else
60
+ []
61
+ end
62
+ end
63
+
64
+ # Stores the given package objects to the YAML data file.
65
+ def yaml_save(packages)
66
+ FileUtils.mkdir_p(File.dirname(yaml_file))
67
+ File.open(yaml_file, "w") do |file|
68
+ file.write(packages.to_yaml)
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,208 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "dropbox_sdk"
4
+
5
+ module Backup
6
+ module Storage
7
+ class Dropbox < Base
8
+ include Storage::Cycler
9
+ class Error < Backup::Error; end
10
+
11
+ ##
12
+ # Dropbox API credentials
13
+ attr_accessor :api_key, :api_secret
14
+
15
+ ##
16
+ # Path to store cached authorized session.
17
+ #
18
+ # Relative paths will be expanded using Config.root_path,
19
+ # which by default is ~/Backup unless --root-path was used
20
+ # on the command line or set in config.rb.
21
+ #
22
+ # By default, +cache_path+ is '.cache', which would be
23
+ # '~/Backup/.cache/' if using the default root_path.
24
+ attr_accessor :cache_path
25
+
26
+ ##
27
+ # Dropbox Access Type
28
+ # Valid values are:
29
+ # :app_folder (default)
30
+ # :dropbox (full access)
31
+ attr_accessor :access_type
32
+
33
+ ##
34
+ # Chunk size, specified in MiB, for the ChunkedUploader.
35
+ attr_accessor :chunk_size
36
+
37
+ ##
38
+ # Number of times to retry failed operations.
39
+ #
40
+ # Default: 10
41
+ attr_accessor :max_retries
42
+
43
+ ##
44
+ # Time in seconds to pause before each retry.
45
+ #
46
+ # Default: 30
47
+ attr_accessor :retry_waitsec
48
+
49
+ ##
50
+ # Creates a new instance of the storage object
51
+ def initialize(model, storage_id = nil)
52
+ super
53
+
54
+ @path ||= "backups".dup
55
+ @cache_path ||= ".cache"
56
+ @access_type ||= :app_folder
57
+ @chunk_size ||= 4 # MiB
58
+ @max_retries ||= 10
59
+ @retry_waitsec ||= 30
60
+ path.sub!(%r{^/}, "")
61
+ end
62
+
63
+ private
64
+
65
+ ##
66
+ # The initial connection to Dropbox will provide the user with an
67
+ # authorization url. The user must open this URL and confirm that the
68
+ # authorization successfully took place. If this is the case, then the
69
+ # user hits 'enter' and the session will be properly established.
70
+ # Immediately after establishing the session, the session will be
71
+ # serialized and written to a cache file in +cache_path+.
72
+ # The cached file will be used from that point on to re-establish a
73
+ # connection with Dropbox at a later time. This allows the user to avoid
74
+ # having to go to a new Dropbox URL to authorize over and over again.
75
+ def connection
76
+ return @connection if @connection
77
+
78
+ unless (session = cached_session)
79
+ Logger.info "Creating a new session!"
80
+ session = create_write_and_return_new_session!
81
+ end
82
+
83
+ # will raise an error if session not authorized
84
+ @connection = DropboxClient.new(session, access_type)
85
+ rescue => err
86
+ raise Error.wrap(err, "Authorization Failed")
87
+ end
88
+
89
+ ##
90
+ # Attempt to load a cached session
91
+ def cached_session
92
+ session = false
93
+ if File.exist?(cached_file)
94
+ begin
95
+ session = DropboxSession.deserialize(File.read(cached_file))
96
+ Logger.info "Session data loaded from cache!"
97
+ rescue => err
98
+ Logger.warn Error.wrap(err, <<-EOS)
99
+ Could not read session data from cache.
100
+ Cache data might be corrupt.
101
+ EOS
102
+ end
103
+ end
104
+ session
105
+ end
106
+
107
+ ##
108
+ # Transfer each of the package files to Dropbox in chunks of +chunk_size+.
109
+ # Each chunk will be retried +chunk_retries+ times, pausing
110
+ # +retry_waitsec+ between retries, if errors occur.
111
+ def transfer!
112
+ package.filenames.each do |filename|
113
+ src = File.join(Config.tmp_path, filename)
114
+ dest = File.join(remote_path, filename)
115
+ Logger.info "Storing '#{dest}'..."
116
+
117
+ uploader = nil
118
+ File.open(src, "r") do |file|
119
+ uploader = connection.get_chunked_uploader(file, file.stat.size)
120
+ while uploader.offset < uploader.total_size
121
+ with_retries do
122
+ uploader.upload(1024**2 * chunk_size)
123
+ end
124
+ end
125
+ end
126
+
127
+ with_retries do
128
+ uploader.finish(dest)
129
+ end
130
+ end
131
+ rescue => err
132
+ raise Error.wrap(err, "Upload Failed!")
133
+ end
134
+
135
+ def with_retries
136
+ retries = 0
137
+ begin
138
+ yield
139
+ rescue StandardError => err
140
+ retries += 1
141
+ raise if retries > max_retries
142
+
143
+ Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
144
+ sleep(retry_waitsec)
145
+ retry
146
+ end
147
+ end
148
+
149
+ # Called by the Cycler.
150
+ # Any error raised will be logged as a warning.
151
+ def remove!(package)
152
+ Logger.info "Removing backup package dated #{package.time}..."
153
+
154
+ connection.file_delete(remote_path_for(package))
155
+ end
156
+
157
+ def cached_file
158
+ path = cache_path.start_with?("/") ?
159
+ cache_path : File.join(Config.root_path, cache_path)
160
+ File.join(path, api_key + api_secret)
161
+ end
162
+
163
+ ##
164
+ # Serializes and writes the Dropbox session to a cache file
165
+ def write_cache!(session)
166
+ FileUtils.mkdir_p File.dirname(cached_file)
167
+ File.open(cached_file, "w") do |cache_file|
168
+ cache_file.write(session.serialize)
169
+ end
170
+ end
171
+
172
+ ##
173
+ # Create a new session, write a serialized version of it to the
174
+ # .cache directory, and return the session object
175
+ def create_write_and_return_new_session!
176
+ require "timeout"
177
+
178
+ session = DropboxSession.new(api_key, api_secret)
179
+
180
+ # grab the request token for session
181
+ session.get_request_token
182
+
183
+ template = Backup::Template.new(
184
+ session: session, cached_file: cached_file
185
+ )
186
+ template.render("storage/dropbox/authorization_url.erb")
187
+
188
+ # wait for user to hit 'return' to continue
189
+ Timeout.timeout(180) { STDIN.gets }
190
+
191
+ # this will raise an error if the user did not
192
+ # visit the authorization_url and grant access
193
+ #
194
+ # get the access token from the server
195
+ # this will be stored with the session in the cache file
196
+ session.get_access_token
197
+
198
+ template.render("storage/dropbox/authorized.erb")
199
+ write_cache!(session)
200
+ template.render("storage/dropbox/cache_file_written.erb")
201
+
202
+ session
203
+ rescue => err
204
+ raise Error.wrap(err, "Could not create or authenticate a new session")
205
+ end
206
+ end
207
+ end
208
+ end
@@ -0,0 +1,118 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "net/ftp"
4
+
5
+ module Backup
6
+ module Storage
7
+ class FTP < Base
8
+ include Storage::Cycler
9
+
10
+ ##
11
+ # Server credentials
12
+ attr_accessor :username, :password
13
+
14
+ ##
15
+ # Server IP Address and FTP port
16
+ attr_accessor :ip, :port
17
+
18
+ ##
19
+ # Use passive mode?
20
+ attr_accessor :passive_mode
21
+
22
+ ##
23
+ # Configure connection open and read timeouts.
24
+ # Net::FTP's open_timeout and read_timeout will both be configured using
25
+ # this setting.
26
+ # @!attribute [rw] timeout
27
+ # @param [Integer|Float]
28
+ # @return [Integer|Float]
29
+ attr_accessor :timeout
30
+
31
+ def initialize(model, storage_id = nil)
32
+ super
33
+
34
+ @port ||= 21
35
+ @path ||= "backups".dup
36
+ @passive_mode ||= false
37
+ @timeout ||= nil
38
+ path.sub!(%r{^~/}, "")
39
+ end
40
+
41
+ private
42
+
43
+ ##
44
+ # Establishes a connection to the remote server
45
+ #
46
+ # Note:
47
+ # Since the FTP port is defined as a constant in the Net::FTP class, and
48
+ # might be required to change by the user, we dynamically remove and
49
+ # re-add the constant with the provided port value
50
+ def connection
51
+ if Net::FTP.const_defined?(:FTP_PORT)
52
+ Net::FTP.send(:remove_const, :FTP_PORT)
53
+ end; Net::FTP.send(:const_set, :FTP_PORT, port)
54
+
55
+ # Ensure default passive mode to false.
56
+ # Note: The default passive setting changed between Ruby 2.2 and 2.3
57
+ if Net::FTP.respond_to?(:default_passive=)
58
+ Net::FTP.default_passive = false
59
+ end
60
+
61
+ Net::FTP.open(ip, username, password) do |ftp|
62
+ if timeout
63
+ ftp.open_timeout = timeout
64
+ ftp.read_timeout = timeout
65
+ end
66
+ ftp.passive = true if passive_mode
67
+ yield ftp
68
+ end
69
+ end
70
+
71
+ def transfer!
72
+ connection do |ftp|
73
+ create_remote_path(ftp)
74
+
75
+ package.filenames.each do |filename|
76
+ src = File.join(Config.tmp_path, filename)
77
+ dest = File.join(remote_path, filename)
78
+ Logger.info "Storing '#{ip}:#{dest}'..."
79
+ ftp.put(src, dest)
80
+ end
81
+ end
82
+ end
83
+
84
+ # Called by the Cycler.
85
+ # Any error raised will be logged as a warning.
86
+ def remove!(package)
87
+ Logger.info "Removing backup package dated #{package.time}..."
88
+
89
+ remote_path = remote_path_for(package)
90
+ connection do |ftp|
91
+ package.filenames.each do |filename|
92
+ ftp.delete(File.join(remote_path, filename))
93
+ end
94
+
95
+ ftp.rmdir(remote_path)
96
+ end
97
+ end
98
+
99
+ ##
100
+ # Creates (if they don't exist yet) all the directories on the remote
101
+ # server in order to upload the backup file. Net::FTP does not support
102
+ # paths to directories that don't yet exist when creating new
103
+ # directories. Instead, we split the parts up in to an array (for each
104
+ # '/') and loop through that to create the directories one by one.
105
+ # Net::FTP raises an exception when the directory it's trying to create
106
+ # already exists, so we have rescue it
107
+ def create_remote_path(ftp)
108
+ path_parts = []
109
+ remote_path.split("/").each do |path_part|
110
+ path_parts << path_part
111
+ begin
112
+ ftp.mkdir(path_parts.join("/"))
113
+ rescue Net::FTPPermError; end
114
+ end
115
+ end
116
+ end
117
+ end
118
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Backup
4
+ module Storage
5
+ class Local < Base
6
+ include Storage::Cycler
7
+ class Error < Backup::Error; end
8
+
9
+ def initialize(model, storage_id = nil)
10
+ super
11
+
12
+ @path ||= "~/backups"
13
+ end
14
+
15
+ private
16
+
17
+ def transfer!
18
+ FileUtils.mkdir_p(remote_path)
19
+
20
+ transfer_method = package_movable? ? :mv : :cp
21
+ package.filenames.each do |filename|
22
+ src = File.join(Config.tmp_path, filename)
23
+ dest = File.join(remote_path, filename)
24
+ Logger.info "Storing '#{dest}'..."
25
+
26
+ FileUtils.send(transfer_method, src, dest)
27
+ end
28
+ end
29
+
30
+ # Called by the Cycler.
31
+ # Any error raised will be logged as a warning.
32
+ def remove!(package)
33
+ Logger.info "Removing backup package dated #{package.time}..."
34
+
35
+ FileUtils.rm_r(remote_path_for(package))
36
+ end
37
+
38
+ # expanded since this is a local path
39
+ def remote_path(pkg = package)
40
+ File.expand_path(super)
41
+ end
42
+ alias remote_path_for remote_path
43
+
44
+ ##
45
+ # If this Local Storage is not the last Storage for the Model,
46
+ # force the transfer to use a *copy* operation and issue a warning.
47
+ def package_movable?
48
+ if self == model.storages.last
49
+ true
50
+ else
51
+ Logger.warn Error.new(<<-EOS)
52
+ Local File Copy Warning!
53
+ The final backup file(s) for '#{model.label}' (#{model.trigger})
54
+ will be *copied* to '#{remote_path}'
55
+ To avoid this, when using more than one Storage, the 'Local' Storage
56
+ should be added *last* so the files may be *moved* to their destination.
57
+ EOS
58
+ false
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "qiniu"
4
+
5
+ module Backup
6
+ module Storage
7
+ class Qiniu < Base
8
+ include Storage::Cycler
9
+ class Error < Backup::Error; end
10
+
11
+ ##
12
+ # Qiniu API credentials
13
+ attr_accessor :access_key, :secret_key
14
+
15
+ ##
16
+ # Qiniu bucket name
17
+ attr_accessor :bucket
18
+
19
+ def initialize(model, storage_id = nil)
20
+ super
21
+
22
+ @path ||= "backups"
23
+
24
+ check_configuration
25
+ config_credentials
26
+ end
27
+
28
+ private
29
+
30
+ def transfer!
31
+ package.filenames.each do |filename|
32
+ src = File.join(Config.tmp_path, filename)
33
+ dest = File.join(remote_path, filename)
34
+ Logger.info "Storing '#{dest}'..."
35
+
36
+ ::Qiniu.upload_file(uptoken: ::Qiniu.generate_upload_token,
37
+ bucket: bucket,
38
+ file: src,
39
+ key: dest)
40
+ end
41
+ end
42
+
43
+ # Called by the Cycler.
44
+ # Any error raised will be logged as a warning.
45
+ def remove!(package)
46
+ Logger.info "Removing backup package dated #{package.time}..."
47
+ remote_path = remote_path_for(package)
48
+ package.filenames.each do |filename|
49
+ ::Qiniu.delete(bucket, File.join(remote_path, filename))
50
+ end
51
+ end
52
+
53
+ def check_configuration
54
+ required = %w[access_key secret_key bucket]
55
+
56
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
57
+ Configuration Error
58
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
59
+ EOS
60
+ end
61
+
62
+ def config_credentials
63
+ ::Qiniu.establish_connection!(access_key: access_key,
64
+ secret_key: secret_key)
65
+ end
66
+ end
67
+ end
68
+ end