backedup 5.0.0.beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +19 -0
  3. data/README.md +33 -0
  4. data/bin/backedup +5 -0
  5. data/bin/docker_test +24 -0
  6. data/lib/backup/archive.rb +169 -0
  7. data/lib/backup/binder.rb +18 -0
  8. data/lib/backup/cleaner.rb +112 -0
  9. data/lib/backup/cli.rb +370 -0
  10. data/lib/backup/cloud_io/base.rb +38 -0
  11. data/lib/backup/cloud_io/cloud_files.rb +296 -0
  12. data/lib/backup/cloud_io/gcs.rb +121 -0
  13. data/lib/backup/cloud_io/s3.rb +253 -0
  14. data/lib/backup/cloud_io/swift.rb +96 -0
  15. data/lib/backup/compressor/base.rb +32 -0
  16. data/lib/backup/compressor/bzip2.rb +35 -0
  17. data/lib/backup/compressor/custom.rb +49 -0
  18. data/lib/backup/compressor/gzip.rb +73 -0
  19. data/lib/backup/compressor/pbzip2.rb +45 -0
  20. data/lib/backup/config/dsl.rb +102 -0
  21. data/lib/backup/config/helpers.rb +137 -0
  22. data/lib/backup/config.rb +118 -0
  23. data/lib/backup/database/base.rb +86 -0
  24. data/lib/backup/database/mongodb.rb +186 -0
  25. data/lib/backup/database/mysql.rb +191 -0
  26. data/lib/backup/database/openldap.rb +93 -0
  27. data/lib/backup/database/postgresql.rb +164 -0
  28. data/lib/backup/database/redis.rb +176 -0
  29. data/lib/backup/database/riak.rb +79 -0
  30. data/lib/backup/database/sqlite.rb +55 -0
  31. data/lib/backup/encryptor/base.rb +27 -0
  32. data/lib/backup/encryptor/gpg.rb +737 -0
  33. data/lib/backup/encryptor/open_ssl.rb +74 -0
  34. data/lib/backup/errors.rb +53 -0
  35. data/lib/backup/logger/console.rb +48 -0
  36. data/lib/backup/logger/fog_adapter.rb +25 -0
  37. data/lib/backup/logger/logfile.rb +131 -0
  38. data/lib/backup/logger/syslog.rb +114 -0
  39. data/lib/backup/logger.rb +197 -0
  40. data/lib/backup/model.rb +472 -0
  41. data/lib/backup/notifier/base.rb +126 -0
  42. data/lib/backup/notifier/campfire.rb +61 -0
  43. data/lib/backup/notifier/command.rb +99 -0
  44. data/lib/backup/notifier/datadog.rb +104 -0
  45. data/lib/backup/notifier/flowdock.rb +99 -0
  46. data/lib/backup/notifier/hipchat.rb +116 -0
  47. data/lib/backup/notifier/http_post.rb +114 -0
  48. data/lib/backup/notifier/mail.rb +232 -0
  49. data/lib/backup/notifier/nagios.rb +65 -0
  50. data/lib/backup/notifier/pagerduty.rb +79 -0
  51. data/lib/backup/notifier/prowl.rb +68 -0
  52. data/lib/backup/notifier/pushover.rb +71 -0
  53. data/lib/backup/notifier/ses.rb +123 -0
  54. data/lib/backup/notifier/slack.rb +147 -0
  55. data/lib/backup/notifier/twitter.rb +55 -0
  56. data/lib/backup/notifier/zabbix.rb +60 -0
  57. data/lib/backup/package.rb +51 -0
  58. data/lib/backup/packager.rb +106 -0
  59. data/lib/backup/pipeline.rb +120 -0
  60. data/lib/backup/splitter.rb +73 -0
  61. data/lib/backup/storage/base.rb +66 -0
  62. data/lib/backup/storage/cloud_files.rb +156 -0
  63. data/lib/backup/storage/cycler.rb +70 -0
  64. data/lib/backup/storage/dropbox.rb +206 -0
  65. data/lib/backup/storage/ftp.rb +116 -0
  66. data/lib/backup/storage/gcs.rb +93 -0
  67. data/lib/backup/storage/local.rb +61 -0
  68. data/lib/backup/storage/qiniu.rb +65 -0
  69. data/lib/backup/storage/rsync.rb +246 -0
  70. data/lib/backup/storage/s3.rb +155 -0
  71. data/lib/backup/storage/scp.rb +65 -0
  72. data/lib/backup/storage/sftp.rb +80 -0
  73. data/lib/backup/storage/swift.rb +124 -0
  74. data/lib/backup/storage/webdav.rb +102 -0
  75. data/lib/backup/syncer/base.rb +67 -0
  76. data/lib/backup/syncer/cloud/base.rb +176 -0
  77. data/lib/backup/syncer/cloud/cloud_files.rb +81 -0
  78. data/lib/backup/syncer/cloud/local_file.rb +97 -0
  79. data/lib/backup/syncer/cloud/s3.rb +109 -0
  80. data/lib/backup/syncer/rsync/base.rb +50 -0
  81. data/lib/backup/syncer/rsync/local.rb +27 -0
  82. data/lib/backup/syncer/rsync/pull.rb +47 -0
  83. data/lib/backup/syncer/rsync/push.rb +201 -0
  84. data/lib/backup/template.rb +41 -0
  85. data/lib/backup/utilities.rb +234 -0
  86. data/lib/backup/version.rb +3 -0
  87. data/lib/backup.rb +145 -0
  88. data/templates/cli/archive +28 -0
  89. data/templates/cli/compressor/bzip2 +4 -0
  90. data/templates/cli/compressor/custom +7 -0
  91. data/templates/cli/compressor/gzip +4 -0
  92. data/templates/cli/config +123 -0
  93. data/templates/cli/databases/mongodb +15 -0
  94. data/templates/cli/databases/mysql +18 -0
  95. data/templates/cli/databases/openldap +24 -0
  96. data/templates/cli/databases/postgresql +16 -0
  97. data/templates/cli/databases/redis +16 -0
  98. data/templates/cli/databases/riak +17 -0
  99. data/templates/cli/databases/sqlite +11 -0
  100. data/templates/cli/encryptor/gpg +27 -0
  101. data/templates/cli/encryptor/openssl +9 -0
  102. data/templates/cli/model +26 -0
  103. data/templates/cli/notifier/zabbix +15 -0
  104. data/templates/cli/notifiers/campfire +12 -0
  105. data/templates/cli/notifiers/command +32 -0
  106. data/templates/cli/notifiers/datadog +57 -0
  107. data/templates/cli/notifiers/flowdock +16 -0
  108. data/templates/cli/notifiers/hipchat +16 -0
  109. data/templates/cli/notifiers/http_post +32 -0
  110. data/templates/cli/notifiers/mail +24 -0
  111. data/templates/cli/notifiers/nagios +13 -0
  112. data/templates/cli/notifiers/pagerduty +12 -0
  113. data/templates/cli/notifiers/prowl +11 -0
  114. data/templates/cli/notifiers/pushover +11 -0
  115. data/templates/cli/notifiers/ses +15 -0
  116. data/templates/cli/notifiers/slack +22 -0
  117. data/templates/cli/notifiers/twitter +13 -0
  118. data/templates/cli/splitter +7 -0
  119. data/templates/cli/storages/cloud_files +11 -0
  120. data/templates/cli/storages/dropbox +20 -0
  121. data/templates/cli/storages/ftp +13 -0
  122. data/templates/cli/storages/gcs +8 -0
  123. data/templates/cli/storages/local +8 -0
  124. data/templates/cli/storages/qiniu +12 -0
  125. data/templates/cli/storages/rsync +17 -0
  126. data/templates/cli/storages/s3 +16 -0
  127. data/templates/cli/storages/scp +15 -0
  128. data/templates/cli/storages/sftp +15 -0
  129. data/templates/cli/storages/swift +19 -0
  130. data/templates/cli/storages/webdav +13 -0
  131. data/templates/cli/syncers/cloud_files +22 -0
  132. data/templates/cli/syncers/rsync_local +20 -0
  133. data/templates/cli/syncers/rsync_pull +28 -0
  134. data/templates/cli/syncers/rsync_push +28 -0
  135. data/templates/cli/syncers/s3 +27 -0
  136. data/templates/general/links +3 -0
  137. data/templates/general/version.erb +2 -0
  138. data/templates/notifier/mail/failure.erb +16 -0
  139. data/templates/notifier/mail/success.erb +16 -0
  140. data/templates/notifier/mail/warning.erb +16 -0
  141. data/templates/storage/dropbox/authorization_url.erb +6 -0
  142. data/templates/storage/dropbox/authorized.erb +4 -0
  143. data/templates/storage/dropbox/cache_file_written.erb +10 -0
  144. metadata +1255 -0
@@ -0,0 +1,156 @@
1
+ require "backup/cloud_io/cloud_files"
2
+
3
+ module Backup
4
+ module Storage
5
+ class CloudFiles < Base
6
+ include Storage::Cycler
7
+ class Error < Backup::Error; end
8
+
9
+ ##
10
+ # Rackspace CloudFiles Credentials
11
+ attr_accessor :username, :api_key
12
+
13
+ ##
14
+ # Rackspace Auth URL (optional)
15
+ attr_accessor :auth_url
16
+
17
+ ##
18
+ # Rackspace Service Net
19
+ # (LAN-based transfers to avoid charges and improve performance)
20
+ attr_accessor :servicenet
21
+
22
+ ##
23
+ # Rackspace Region (optional)
24
+ attr_accessor :region
25
+
26
+ ##
27
+ # Rackspace Container Name
28
+ attr_accessor :container
29
+
30
+ ##
31
+ # Rackspace Container Name for SLO Segments
32
+ # Required if #segment_size is set. Must be different from #container.
33
+ attr_accessor :segments_container
34
+
35
+ ##
36
+ # SLO Segment size, specified in MiB.
37
+ #
38
+ # Each package file larger than +segment_size+
39
+ # will be uploaded as a Static Large Objects (SLO).
40
+ #
41
+ # Defaults to 0 for backward compatibility (pre v.3.7.0),
42
+ # since #segments_container would be required.
43
+ #
44
+ # Minimum: 1 (0 disables SLO support)
45
+ # Maximum: 5120 (5 GiB)
46
+ attr_accessor :segment_size
47
+
48
+ ##
49
+ # If set, all backup package files (including SLO segments) will be
50
+ # scheduled for automatic removal by the server.
51
+ #
52
+ # The `keep` option should not be used if this is set,
53
+ # unless you're transitioning from the `keep` option.
54
+ attr_accessor :days_to_keep
55
+
56
+ ##
57
+ # Number of times to retry failed operations.
58
+ #
59
+ # Default: 10
60
+ attr_accessor :max_retries
61
+
62
+ ##
63
+ # Time in seconds to pause before each retry.
64
+ #
65
+ # Default: 30
66
+ attr_accessor :retry_waitsec
67
+
68
+ ##
69
+ # Additional options to pass along to fog.
70
+ # e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
71
+ attr_accessor :fog_options
72
+
73
+ def initialize(model, storage_id = nil)
74
+ super
75
+
76
+ @servicenet ||= false
77
+ @segment_size ||= 0
78
+ @max_retries ||= 10
79
+ @retry_waitsec ||= 30
80
+
81
+ @path ||= "backups"
82
+ path.sub!(/^\//, "")
83
+
84
+ check_configuration
85
+ end
86
+
87
+ private
88
+
89
+ def cloud_io
90
+ @cloud_io ||= CloudIO::CloudFiles.new(
91
+ username: username,
92
+ api_key: api_key,
93
+ auth_url: auth_url,
94
+ region: region,
95
+ servicenet: servicenet,
96
+ container: container,
97
+ segments_container: segments_container,
98
+ segment_size: segment_size,
99
+ days_to_keep: days_to_keep,
100
+ max_retries: max_retries,
101
+ retry_waitsec: retry_waitsec,
102
+ fog_options: fog_options
103
+ )
104
+ end
105
+
106
+ def transfer!
107
+ package.filenames.each do |filename|
108
+ src = File.join(Config.tmp_path, filename)
109
+ dest = File.join(remote_path, filename)
110
+ Logger.info "Storing '#{container}/#{dest}'..."
111
+ cloud_io.upload(src, dest)
112
+ end
113
+
114
+ package.no_cycle = true if days_to_keep
115
+ end
116
+
117
+ # Called by the Cycler.
118
+ # Any error raised will be logged as a warning.
119
+ def remove!(package)
120
+ Logger.info "Removing backup package dated #{package.time}..."
121
+
122
+ remote_path = remote_path_for(package)
123
+ objects = cloud_io.objects(remote_path)
124
+
125
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
126
+
127
+ slo_objects, objects = objects.partition(&:slo?)
128
+ cloud_io.delete_slo(slo_objects)
129
+ cloud_io.delete(objects)
130
+ end
131
+
132
+ def check_configuration
133
+ required = %w[username api_key container]
134
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
135
+ Configuration Error
136
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
137
+ EOS
138
+
139
+ raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
140
+ Configuration Error
141
+ #segments_container is required if #segment_size is > 0
142
+ EOS
143
+
144
+ raise Error, <<-EOS if container == segments_container
145
+ Configuration Error
146
+ #container and #segments_container must not be the same container.
147
+ EOS
148
+
149
+ raise Error, <<-EOS if segment_size > 5120
150
+ Configuration Error
151
+ #segment_size is too large (max 5120)
152
+ EOS
153
+ end
154
+ end
155
+ end
156
+ end
@@ -0,0 +1,70 @@
1
+ module Backup
2
+ module Storage
3
+ module Cycler
4
+ class Error < Backup::Error; end
5
+
6
+ private
7
+
8
+ # Adds the current package being stored to the YAML cycle data file
9
+ # and will remove any old package file(s) when the storage limit
10
+ # set by #keep is exceeded.
11
+ def cycle!
12
+ Logger.info "Cycling Started..."
13
+
14
+ packages = yaml_load.unshift(package)
15
+ cycled_packages = []
16
+
17
+ if keep.is_a?(Date) || keep.is_a?(Time)
18
+ cycled_packages = packages.select do |p|
19
+ p.time_as_object < keep.to_time
20
+ end
21
+ else
22
+ excess = packages.count - keep.to_i
23
+ cycled_packages = packages.last(excess) if excess > 0
24
+ end
25
+
26
+ saved_packages = packages - cycled_packages
27
+ cycled_packages.each { |package| delete_package package }
28
+
29
+ yaml_save(saved_packages)
30
+ end
31
+
32
+ def delete_package(package)
33
+ remove!(package) unless package.no_cycle
34
+ rescue => err
35
+ Logger.warn Error.wrap(err, <<-EOS)
36
+ There was a problem removing the following package:
37
+ Trigger: #{package.trigger} :: Dated: #{package.time}
38
+ Package included the following #{package.filenames.count} file(s):
39
+ #{package.filenames.join("\n")}
40
+ EOS
41
+ end
42
+
43
+ # Returns path to the YAML data file.
44
+ def yaml_file
45
+ @yaml_file ||= begin
46
+ filename = self.class.to_s.split("::").last
47
+ filename << "-#{storage_id}" if storage_id
48
+ File.join(Config.data_path, package.trigger, "#{filename}.yml")
49
+ end
50
+ end
51
+
52
+ # Returns stored Package objects, sorted by #time descending (oldest last).
53
+ def yaml_load
54
+ if File.exist?(yaml_file) && !File.zero?(yaml_file)
55
+ YAML.load_file(yaml_file).sort_by!(&:time).reverse!
56
+ else
57
+ []
58
+ end
59
+ end
60
+
61
+ # Stores the given package objects to the YAML data file.
62
+ def yaml_save(packages)
63
+ FileUtils.mkdir_p(File.dirname(yaml_file))
64
+ File.open(yaml_file, "w") do |file|
65
+ file.write(packages.to_yaml)
66
+ end
67
+ end
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,206 @@
1
+ require "dropbox_sdk"
2
+
3
+ module Backup
4
+ module Storage
5
+ class Dropbox < Base
6
+ include Storage::Cycler
7
+ class Error < Backup::Error; end
8
+
9
+ ##
10
+ # Dropbox API credentials
11
+ attr_accessor :api_key, :api_secret
12
+
13
+ ##
14
+ # Path to store cached authorized session.
15
+ #
16
+ # Relative paths will be expanded using Config.root_path,
17
+ # which by default is ~/Backup unless --root-path was used
18
+ # on the command line or set in config.rb.
19
+ #
20
+ # By default, +cache_path+ is '.cache', which would be
21
+ # '~/Backup/.cache/' if using the default root_path.
22
+ attr_accessor :cache_path
23
+
24
+ ##
25
+ # Dropbox Access Type
26
+ # Valid values are:
27
+ # :app_folder (default)
28
+ # :dropbox (full access)
29
+ attr_accessor :access_type
30
+
31
+ ##
32
+ # Chunk size, specified in MiB, for the ChunkedUploader.
33
+ attr_accessor :chunk_size
34
+
35
+ ##
36
+ # Number of times to retry failed operations.
37
+ #
38
+ # Default: 10
39
+ attr_accessor :max_retries
40
+
41
+ ##
42
+ # Time in seconds to pause before each retry.
43
+ #
44
+ # Default: 30
45
+ attr_accessor :retry_waitsec
46
+
47
+ ##
48
+ # Creates a new instance of the storage object
49
+ def initialize(model, storage_id = nil)
50
+ super
51
+
52
+ @path ||= "backups"
53
+ @cache_path ||= ".cache"
54
+ @access_type ||= :app_folder
55
+ @chunk_size ||= 4 # MiB
56
+ @max_retries ||= 10
57
+ @retry_waitsec ||= 30
58
+ path.sub!(/^\//, "")
59
+ end
60
+
61
+ private
62
+
63
+ ##
64
+ # The initial connection to Dropbox will provide the user with an
65
+ # authorization url. The user must open this URL and confirm that the
66
+ # authorization successfully took place. If this is the case, then the
67
+ # user hits 'enter' and the session will be properly established.
68
+ # Immediately after establishing the session, the session will be
69
+ # serialized and written to a cache file in +cache_path+.
70
+ # The cached file will be used from that point on to re-establish a
71
+ # connection with Dropbox at a later time. This allows the user to avoid
72
+ # having to go to a new Dropbox URL to authorize over and over again.
73
+ def connection
74
+ return @connection if @connection
75
+
76
+ unless session = cached_session
77
+ Logger.info "Creating a new session!"
78
+ session = create_write_and_return_new_session!
79
+ end
80
+
81
+ # will raise an error if session not authorized
82
+ @connection = DropboxClient.new(session, access_type)
83
+ rescue => err
84
+ raise Error.wrap(err, "Authorization Failed")
85
+ end
86
+
87
+ ##
88
+ # Attempt to load a cached session
89
+ def cached_session
90
+ session = false
91
+ if File.exist?(cached_file)
92
+ begin
93
+ session = DropboxSession.deserialize(File.read(cached_file))
94
+ Logger.info "Session data loaded from cache!"
95
+ rescue => err
96
+ Logger.warn Error.wrap(err, <<-EOS)
97
+ Could not read session data from cache.
98
+ Cache data might be corrupt.
99
+ EOS
100
+ end
101
+ end
102
+ session
103
+ end
104
+
105
+ ##
106
+ # Transfer each of the package files to Dropbox in chunks of +chunk_size+.
107
+ # Each chunk will be retried +chunk_retries+ times, pausing +retry_waitsec+
108
+ # between retries, if errors occur.
109
+ def transfer!
110
+ package.filenames.each do |filename|
111
+ src = File.join(Config.tmp_path, filename)
112
+ dest = File.join(remote_path, filename)
113
+ Logger.info "Storing '#{dest}'..."
114
+
115
+ uploader = nil
116
+ File.open(src, "r") do |file|
117
+ uploader = connection.get_chunked_uploader(file, file.stat.size)
118
+ while uploader.offset < uploader.total_size
119
+ with_retries do
120
+ uploader.upload(1024**2 * chunk_size)
121
+ end
122
+ end
123
+ end
124
+
125
+ with_retries do
126
+ uploader.finish(dest)
127
+ end
128
+ end
129
+ rescue => err
130
+ raise Error.wrap(err, "Upload Failed!")
131
+ end
132
+
133
+ def with_retries
134
+ retries = 0
135
+ begin
136
+ yield
137
+ rescue StandardError => err
138
+ retries += 1
139
+ raise if retries > max_retries
140
+
141
+ Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
142
+ sleep(retry_waitsec)
143
+ retry
144
+ end
145
+ end
146
+
147
+ # Called by the Cycler.
148
+ # Any error raised will be logged as a warning.
149
+ def remove!(package)
150
+ Logger.info "Removing backup package dated #{package.time}..."
151
+
152
+ connection.file_delete(remote_path_for(package))
153
+ end
154
+
155
+ def cached_file
156
+ path = cache_path.start_with?("/") ?
157
+ cache_path : File.join(Config.root_path, cache_path)
158
+ File.join(path, api_key + api_secret)
159
+ end
160
+
161
+ ##
162
+ # Serializes and writes the Dropbox session to a cache file
163
+ def write_cache!(session)
164
+ FileUtils.mkdir_p File.dirname(cached_file)
165
+ File.open(cached_file, "w") do |cache_file|
166
+ cache_file.write(session.serialize)
167
+ end
168
+ end
169
+
170
+ ##
171
+ # Create a new session, write a serialized version of it to the
172
+ # .cache directory, and return the session object
173
+ def create_write_and_return_new_session!
174
+ require "timeout"
175
+
176
+ session = DropboxSession.new(api_key, api_secret)
177
+
178
+ # grab the request token for session
179
+ session.get_request_token
180
+
181
+ template = Backup::Template.new(
182
+ session: session, cached_file: cached_file
183
+ )
184
+ template.render("storage/dropbox/authorization_url.erb")
185
+
186
+ # wait for user to hit 'return' to continue
187
+ Timeout.timeout(180) { STDIN.gets }
188
+
189
+ # this will raise an error if the user did not
190
+ # visit the authorization_url and grant access
191
+ #
192
+ # get the access token from the server
193
+ # this will be stored with the session in the cache file
194
+ session.get_access_token
195
+
196
+ template.render("storage/dropbox/authorized.erb")
197
+ write_cache!(session)
198
+ template.render("storage/dropbox/cache_file_written.erb")
199
+
200
+ session
201
+ rescue => err
202
+ raise Error.wrap(err, "Could not create or authenticate a new session")
203
+ end
204
+ end
205
+ end
206
+ end
@@ -0,0 +1,116 @@
1
+ require "net/ftp"
2
+
3
+ module Backup
4
+ module Storage
5
+ class FTP < Base
6
+ include Storage::Cycler
7
+
8
+ ##
9
+ # Server credentials
10
+ attr_accessor :username, :password
11
+
12
+ ##
13
+ # Server IP Address and FTP port
14
+ attr_accessor :ip, :port
15
+
16
+ ##
17
+ # Use passive mode?
18
+ attr_accessor :passive_mode
19
+
20
+ ##
21
+ # Configure connection open and read timeouts.
22
+ # Net::FTP's open_timeout and read_timeout will both be configured using
23
+ # this setting.
24
+ # @!attribute [rw] timeout
25
+ # @param [Integer|Float]
26
+ # @return [Integer|Float]
27
+ attr_accessor :timeout
28
+
29
+ def initialize(model, storage_id = nil)
30
+ super
31
+
32
+ @port ||= 21
33
+ @path ||= "backups"
34
+ @passive_mode ||= false
35
+ @timeout ||= nil
36
+ path.sub!(/^~\//, "")
37
+ end
38
+
39
+ private
40
+
41
+ ##
42
+ # Establishes a connection to the remote server
43
+ #
44
+ # Note:
45
+ # Since the FTP port is defined as a constant in the Net::FTP class, and
46
+ # might be required to change by the user, we dynamically remove and
47
+ # re-add the constant with the provided port value
48
+ def connection
49
+ if Net::FTP.const_defined?(:FTP_PORT)
50
+ Net::FTP.send(:remove_const, :FTP_PORT)
51
+ end; Net::FTP.send(:const_set, :FTP_PORT, port)
52
+
53
+ # Ensure default passive mode to false.
54
+ # Note: The default passive setting changed between Ruby 2.2 and 2.3
55
+ if Net::FTP.respond_to?(:default_passive=)
56
+ Net::FTP.default_passive = false
57
+ end
58
+
59
+ Net::FTP.open(ip, username, password) do |ftp|
60
+ if timeout
61
+ ftp.open_timeout = timeout
62
+ ftp.read_timeout = timeout
63
+ end
64
+ ftp.passive = true if passive_mode
65
+ yield ftp
66
+ end
67
+ end
68
+
69
+ def transfer!
70
+ connection do |ftp|
71
+ create_remote_path(ftp)
72
+
73
+ package.filenames.each do |filename|
74
+ src = File.join(Config.tmp_path, filename)
75
+ dest = File.join(remote_path, filename)
76
+ Logger.info "Storing '#{ip}:#{dest}'..."
77
+ ftp.put(src, dest)
78
+ end
79
+ end
80
+ end
81
+
82
+ # Called by the Cycler.
83
+ # Any error raised will be logged as a warning.
84
+ def remove!(package)
85
+ Logger.info "Removing backup package dated #{package.time}..."
86
+
87
+ remote_path = remote_path_for(package)
88
+ connection do |ftp|
89
+ package.filenames.each do |filename|
90
+ ftp.delete(File.join(remote_path, filename))
91
+ end
92
+
93
+ ftp.rmdir(remote_path)
94
+ end
95
+ end
96
+
97
+ ##
98
+ # Creates (if they don't exist yet) all the directories on the remote
99
+ # server in order to upload the backup file. Net::FTP does not support
100
+ # paths to directories that don't yet exist when creating new
101
+ # directories. Instead, we split the parts up in to an array (for each
102
+ # '/') and loop through that to create the directories one by one.
103
+ # Net::FTP raises an exception when the directory it's trying to create
104
+ # already exists, so we have rescue it
105
+ def create_remote_path(ftp)
106
+ path_parts = []
107
+ remote_path.split("/").each do |path_part|
108
+ path_parts << path_part
109
+ begin
110
+ ftp.mkdir(path_parts.join("/"))
111
+ rescue Net::FTPPermError; end
112
+ end
113
+ end
114
+ end
115
+ end
116
+ end
@@ -0,0 +1,93 @@
1
+ # encoding: utf-8
2
+ require "backup/cloud_io/gcs"
3
+
4
+ module Backup
5
+ module Storage
6
+ class GCS < Base
7
+ include Storage::Cycler
8
+ class Error < Backup::Error; end
9
+
10
+ ##
11
+ # Google XML API credentials
12
+ attr_accessor :google_storage_secret_access_key
13
+ attr_accessor :google_storage_access_key_id
14
+
15
+ ##
16
+ # Amazon GCS bucket name
17
+ attr_accessor :bucket
18
+
19
+ ##
20
+ # Number of times to retry failed operations.
21
+ #
22
+ # Default: 10
23
+ attr_accessor :max_retries
24
+
25
+ ##
26
+ # Time in seconds to pause before each retry.
27
+ #
28
+ # Default: 30
29
+ attr_accessor :retry_waitsec
30
+
31
+ ##
32
+ # Additional options to pass along to fog.
33
+ # e.g. Fog::Storage.new({ :provider => 'AWS' }.merge(fog_options))
34
+ attr_accessor :fog_options
35
+
36
+ def initialize(model, storage_id = nil)
37
+ super
38
+
39
+ @max_retries ||= 10
40
+ @retry_waitsec ||= 30
41
+ @path ||= "backups"
42
+ @path = @path.sub(%r{^/}, "")
43
+
44
+ check_configuration
45
+ end
46
+
47
+ private
48
+
49
+ def cloud_io
50
+ @cloud_io ||= CloudIO::GCS.new(
51
+ google_storage_secret_access_key: google_storage_secret_access_key,
52
+ google_storage_access_key_id: google_storage_access_key_id,
53
+ bucket: bucket,
54
+ max_retries: max_retries,
55
+ retry_waitsec: retry_waitsec,
56
+ fog_options: fog_options
57
+ )
58
+ end
59
+
60
+ def transfer!
61
+ package.filenames.each do |filename|
62
+ src = File.join(Config.tmp_path, filename)
63
+ dest = File.join(remote_path, filename)
64
+ Logger.info "Storing '#{bucket}/#{dest}'..."
65
+ cloud_io.upload(src, dest)
66
+ end
67
+ end
68
+
69
+ # Called by the Cycler.
70
+ # Any error raised will be logged as a warning.
71
+ def remove!(package)
72
+ Logger.info "Removing backup package dated #{package.time}..."
73
+
74
+ remote_path = remote_path_for(package)
75
+ objects = cloud_io.objects(remote_path)
76
+
77
+ raise Error, "Package at '#{remote_path}' not found" if objects.empty?
78
+
79
+ cloud_io.delete(objects)
80
+ end
81
+
82
+ def check_configuration
83
+ required =
84
+ %w(google_storage_secret_access_key google_storage_access_key_id bucket)
85
+
86
+ raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
87
+ Configuration Error
88
+ #{required.map { |name| "##{name}" }.join(", ")} are all required
89
+ EOS
90
+ end
91
+ end
92
+ end
93
+ end
@@ -0,0 +1,61 @@
1
+ module Backup
2
+ module Storage
3
+ class Local < Base
4
+ include Storage::Cycler
5
+ class Error < Backup::Error; end
6
+
7
+ def initialize(model, storage_id = nil)
8
+ super
9
+
10
+ @path ||= "~/backups"
11
+ end
12
+
13
+ private
14
+
15
+ def transfer!
16
+ FileUtils.mkdir_p(remote_path)
17
+
18
+ transfer_method = package_movable? ? :mv : :cp
19
+ package.filenames.each do |filename|
20
+ src = File.join(Config.tmp_path, filename)
21
+ dest = File.join(remote_path, filename)
22
+ Logger.info "Storing '#{dest}'..."
23
+
24
+ FileUtils.send(transfer_method, src, dest)
25
+ end
26
+ end
27
+
28
+ # Called by the Cycler.
29
+ # Any error raised will be logged as a warning.
30
+ def remove!(package)
31
+ Logger.info "Removing backup package dated #{package.time}..."
32
+
33
+ FileUtils.rm_r(remote_path_for(package))
34
+ end
35
+
36
+ # expanded since this is a local path
37
+ def remote_path(pkg = package)
38
+ File.expand_path(super)
39
+ end
40
+ alias :remote_path_for :remote_path
41
+
42
+ ##
43
+ # If this Local Storage is not the last Storage for the Model,
44
+ # force the transfer to use a *copy* operation and issue a warning.
45
+ def package_movable?
46
+ if self == model.storages.last
47
+ true
48
+ else
49
+ Logger.warn Error.new(<<-EOS)
50
+ Local File Copy Warning!
51
+ The final backup file(s) for '#{model.label}' (#{model.trigger})
52
+ will be *copied* to '#{remote_path}'
53
+ To avoid this, when using more than one Storage, the 'Local' Storage
54
+ should be added *last* so the files may be *moved* to their destination.
55
+ EOS
56
+ false
57
+ end
58
+ end
59
+ end
60
+ end
61
+ end