capistrano-ops 1.0.4 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/capistrano-ops.gemspec +2 -2
- data/lib/capistrano/ops/rails/lib/backup/api.rb +2 -2
- data/lib/capistrano/ops/rails/lib/backup/s3.rb +183 -1
- data/lib/capistrano/ops/rails/lib/backup/s3_helper.rb +16 -0
- data/lib/capistrano/ops/rails/lib/tasks/pg/dump.rake +15 -12
- data/lib/capistrano/ops/rails/lib/tasks/pg/postgres_helper.rb +33 -33
- data/lib/capistrano/ops/rails/lib/tasks/storage/backup.rake +27 -37
- data/lib/capistrano/ops/rails/lib/tasks/storage/remove_old_backups.rake +2 -2
- data/lib/capistrano/ops/rails/lib/tasks/storage/storage_helper.rb +69 -0
- data/lib/capistrano/ops/version.rb +1 -1
- metadata +7 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8f235992d4982cea3d663c6bd2616becfd54a4f772d199c93c76d3f27b7b359a
|
4
|
+
data.tar.gz: af9e2c333ff0a24fb9eff2bbc87ef38f6ac5d014b4e975ac685542ad075f8544
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7046680c7e931b6942785f2539215549a3bc5f20a32d4a1a2226a6bf2db684b019124cd409726bdb839295265b488abf60f340ef9311f8f794b8d576fe3b79c3
|
7
|
+
data.tar.gz: e0499b85de6f9bc85463433ab80fab5fccfa33489940d21d8578fa1e1a71d9ad94bcac8c7e542f4c2e8982ab5f601d986445175643646dc74769f3810c1a9157
|
data/capistrano-ops.gemspec
CHANGED
@@ -20,12 +20,12 @@ Gem::Specification.new do |s|
|
|
20
20
|
s.require_paths = ['lib']
|
21
21
|
|
22
22
|
s.required_ruby_version = '>= 2.7.0', '< 3.3.0'
|
23
|
-
s.add_dependency 'aws-sdk-s3', '~> 1.
|
23
|
+
s.add_dependency 'aws-sdk-s3', '~> 1.175'
|
24
24
|
s.add_dependency 'faraday'
|
25
25
|
s.add_dependency 'nokogiri'
|
26
26
|
s.add_dependency 'rails'
|
27
27
|
s.add_development_dependency 'bundler', '~> 2.4.12'
|
28
|
-
s.add_development_dependency 'rake', '~>
|
28
|
+
s.add_development_dependency 'rake', '~> 12.3'
|
29
29
|
s.add_development_dependency 'rubocop', '~> 1.56.2' # rubocop ruby
|
30
30
|
s.add_development_dependency 'rubocop-performance', '~> 1.19.0' # speed up rubocop
|
31
31
|
end
|
@@ -9,11 +9,11 @@ module Backup
|
|
9
9
|
self.provider_config = provider_config
|
10
10
|
end
|
11
11
|
|
12
|
-
def upload(backup_file, filename)
|
12
|
+
def upload(backup_file, filename, type = 'file')
|
13
13
|
case backup_provider
|
14
14
|
when 's3'
|
15
15
|
s3 = Backup::S3.new(**provider_config)
|
16
|
-
s3.
|
16
|
+
s3.upload_stream(backup_file, filename, type)
|
17
17
|
when 'scp'
|
18
18
|
p 'SCP backup not implemented yet'
|
19
19
|
when 'rsync'
|
@@ -2,10 +2,13 @@
|
|
2
2
|
|
3
3
|
module Backup
|
4
4
|
require 'aws-sdk-s3'
|
5
|
+
require 'rubygems/package'
|
6
|
+
require 'zlib'
|
7
|
+
require 'find'
|
5
8
|
require 'capistrano/ops/rails/lib/backup/s3_helper'
|
6
9
|
class S3
|
7
10
|
include Backup::S3Helper
|
8
|
-
attr_accessor :endpoint, :region, :access_key_id, :secret_access_key, :s3_resource
|
11
|
+
attr_accessor :endpoint, :region, :access_key_id, :secret_access_key, :s3_resource, :s3_client
|
9
12
|
|
10
13
|
def initialize(endpoint: ENV['S3_BACKUP_ENDPOINT'], region: ENV['S3_BACKUP_REGION'], access_key_id: ENV['S3_BACKUP_KEY'],
|
11
14
|
secret_access_key: ENV['S3_BACKUP_SECRET'])
|
@@ -22,6 +25,7 @@ module Backup
|
|
22
25
|
}
|
23
26
|
configuration[:endpoint] = endpoint unless endpoint.nil?
|
24
27
|
self.s3_resource = Aws::S3::Resource.new(configuration)
|
28
|
+
self.s3_client = Aws::S3::Client.new(configuration)
|
25
29
|
end
|
26
30
|
|
27
31
|
def upload(backup_file, key)
|
@@ -34,6 +38,184 @@ module Backup
|
|
34
38
|
'File uploaded to S3'
|
35
39
|
end
|
36
40
|
|
41
|
+
def upload_stream(backup_file, key, type)
|
42
|
+
if type == 'folder'
|
43
|
+
upload_folder_as_tar_gz_stream(backup_file, key)
|
44
|
+
else
|
45
|
+
upload_file_as_stream(backup_file, key)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def upload_file_as_stream(file_path, key)
|
50
|
+
bucket = ENV['S3_BACKUP_BUCKET']
|
51
|
+
# Calculate total size of the file to be uploaded
|
52
|
+
total_size = File.size(file_path)
|
53
|
+
chunk_size = calculate_chunk_size(total_size)
|
54
|
+
|
55
|
+
uploaded_size = 0
|
56
|
+
|
57
|
+
# Initiate multipart upload
|
58
|
+
multipart_upload = s3_client.create_multipart_upload(bucket: bucket, key: key)
|
59
|
+
|
60
|
+
# Upload the tar.gz data from the file in parts
|
61
|
+
part_number = 1
|
62
|
+
parts = []
|
63
|
+
last_logged_progress = 0
|
64
|
+
|
65
|
+
begin
|
66
|
+
File.open(file_path, 'rb') do |file|
|
67
|
+
while (part = file.read(chunk_size)) # Read calculated chunk size
|
68
|
+
part_upload = s3_client.upload_part(
|
69
|
+
bucket: bucket,
|
70
|
+
key: key,
|
71
|
+
upload_id: multipart_upload.upload_id,
|
72
|
+
part_number: part_number,
|
73
|
+
body: part
|
74
|
+
)
|
75
|
+
parts << { part_number: part_number, etag: part_upload.etag }
|
76
|
+
uploaded_size += part.size
|
77
|
+
part_number += 1
|
78
|
+
|
79
|
+
progress = (uploaded_size.to_f / total_size * 100).round
|
80
|
+
if progress >= last_logged_progress + 10
|
81
|
+
puts "Upload progress: #{progress}% complete"
|
82
|
+
last_logged_progress = progress
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
# Complete multipart upload
|
88
|
+
s3_client.complete_multipart_upload(
|
89
|
+
bucket: bucket,
|
90
|
+
key: key,
|
91
|
+
upload_id: multipart_upload.upload_id,
|
92
|
+
multipart_upload: { parts: parts }
|
93
|
+
)
|
94
|
+
puts 'Completed multipart upload'
|
95
|
+
rescue StandardError => e
|
96
|
+
# Abort multipart upload in case of error
|
97
|
+
s3_client.abort_multipart_upload(
|
98
|
+
bucket: bucket,
|
99
|
+
key: key,
|
100
|
+
upload_id: multipart_upload.upload_id
|
101
|
+
)
|
102
|
+
puts "Aborted multipart upload due to error: #{e.message}"
|
103
|
+
raise e
|
104
|
+
end
|
105
|
+
|
106
|
+
'File uploaded to S3 as tar.gz'
|
107
|
+
rescue StandardError => e
|
108
|
+
puts "Upload failed: #{e.message}"
|
109
|
+
raise e
|
110
|
+
end
|
111
|
+
|
112
|
+
# rubocop:disable Metrics/MethodLength
|
113
|
+
def upload_folder_as_tar_gz_stream(folder_path, key)
|
114
|
+
bucket = ENV['S3_BACKUP_BUCKET']
|
115
|
+
|
116
|
+
# Calculate total size of the files to be uploaded
|
117
|
+
total_size = calculate_total_size(folder_path)
|
118
|
+
chunk_size = calculate_chunk_size(total_size)
|
119
|
+
|
120
|
+
# Create a pipe to stream data
|
121
|
+
read_io, write_io = IO.pipe
|
122
|
+
read_io.binmode
|
123
|
+
write_io.binmode
|
124
|
+
|
125
|
+
uploaded_size = 0
|
126
|
+
|
127
|
+
# Start a thread to write the tar.gz data to the pipe
|
128
|
+
writer_thread = start_writer_thread(folder_path, write_io)
|
129
|
+
|
130
|
+
# Initiate multipart upload
|
131
|
+
multipart_upload = s3_client.create_multipart_upload(bucket: bucket, key: key)
|
132
|
+
|
133
|
+
# Upload the tar.gz data from the pipe in parts
|
134
|
+
part_number = 1
|
135
|
+
parts = []
|
136
|
+
last_logged_progress = 0
|
137
|
+
|
138
|
+
begin
|
139
|
+
while (part = read_io.read(chunk_size)) # Read calculated chunk size
|
140
|
+
part_upload = s3_client.upload_part(
|
141
|
+
bucket: bucket,
|
142
|
+
key: key,
|
143
|
+
upload_id: multipart_upload.upload_id,
|
144
|
+
part_number: part_number,
|
145
|
+
body: part
|
146
|
+
)
|
147
|
+
parts << { part_number: part_number, etag: part_upload.etag }
|
148
|
+
uploaded_size += part.size
|
149
|
+
part_number += 1
|
150
|
+
|
151
|
+
progress = (uploaded_size.to_f / total_size * 100).round
|
152
|
+
if progress >= last_logged_progress + 10
|
153
|
+
puts "Upload progress: #{progress}% complete"
|
154
|
+
last_logged_progress = progress
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
158
|
+
# Complete multipart upload
|
159
|
+
s3_client.complete_multipart_upload(
|
160
|
+
bucket: bucket,
|
161
|
+
key: key,
|
162
|
+
upload_id: multipart_upload.upload_id,
|
163
|
+
multipart_upload: { parts: parts }
|
164
|
+
)
|
165
|
+
puts 'Completed multipart upload'
|
166
|
+
rescue StandardError => e
|
167
|
+
# Abort multipart upload in case of error
|
168
|
+
s3_client.abort_multipart_upload(
|
169
|
+
bucket: bucket,
|
170
|
+
key: key,
|
171
|
+
upload_id: multipart_upload.upload_id
|
172
|
+
)
|
173
|
+
puts "Aborted multipart upload due to error: #{e.message}"
|
174
|
+
raise e
|
175
|
+
ensure
|
176
|
+
read_io.close unless read_io.closed?
|
177
|
+
writer_thread.join
|
178
|
+
end
|
179
|
+
|
180
|
+
'Folder uploaded to S3 as tar.gz'
|
181
|
+
rescue StandardError => e
|
182
|
+
puts "Upload failed: #{e.message}"
|
183
|
+
raise e
|
184
|
+
end
|
185
|
+
# rubocop:enable Metrics/MethodLength
|
186
|
+
|
187
|
+
def start_writer_thread(folder_path, write_io)
|
188
|
+
Thread.new do
|
189
|
+
parent_folder = File.dirname(folder_path)
|
190
|
+
folder_name = File.basename(folder_path)
|
191
|
+
|
192
|
+
Zlib::GzipWriter.wrap(write_io) do |gz|
|
193
|
+
Gem::Package::TarWriter.new(gz) do |tar|
|
194
|
+
Dir.chdir(parent_folder) do
|
195
|
+
Find.find(folder_name) do |file_path|
|
196
|
+
if File.directory?(file_path)
|
197
|
+
tar.mkdir(file_path, File.stat(file_path).mode)
|
198
|
+
else
|
199
|
+
mode = File.stat(file_path).mode
|
200
|
+
tar.add_file_simple(file_path, mode, File.size(file_path)) do |tar_file|
|
201
|
+
File.open(file_path, 'rb') do |f|
|
202
|
+
while (chunk = f.read(1024 * 1024)) # Read in 1MB chunks
|
203
|
+
tar_file.write(chunk)
|
204
|
+
end
|
205
|
+
end
|
206
|
+
end
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
212
|
+
rescue StandardError => e
|
213
|
+
puts "Error writing tar.gz data: #{e.message}"
|
214
|
+
ensure
|
215
|
+
write_io.close unless write_io.closed?
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
37
219
|
def remove_old_backups(basename, keep: 5)
|
38
220
|
all_items = s3_resource.bucket(ENV['S3_BACKUP_BUCKET']).objects(prefix: basename).map do |item|
|
39
221
|
{ key: item.key, last_modified: item.last_modified }
|
@@ -21,5 +21,21 @@ module Backup
|
|
21
21
|
def get_items_by_month(all_items, month)
|
22
22
|
all_items.select { |item| item[:last_modified].strftime('%Y-%m') == month }
|
23
23
|
end
|
24
|
+
|
25
|
+
def calculate_total_size(folder_path)
|
26
|
+
total_size = 0
|
27
|
+
Find.find(folder_path) do |file_path|
|
28
|
+
total_size += File.size(file_path) unless File.directory?(file_path)
|
29
|
+
end
|
30
|
+
total_size
|
31
|
+
end
|
32
|
+
|
33
|
+
def calculate_chunk_size(total_size)
|
34
|
+
max_chunks = 10_000
|
35
|
+
min_chunk_size = 20 * 1024 * 1024 # 20MB
|
36
|
+
max_chunk_size = 105 * 1024 * 1024 # 105MB
|
37
|
+
chunk_size = [total_size / max_chunks, min_chunk_size].max
|
38
|
+
[chunk_size, max_chunk_size].min
|
39
|
+
end
|
24
40
|
end
|
25
41
|
end
|
@@ -5,9 +5,12 @@ namespace :pg do
|
|
5
5
|
include PostgresHelper
|
6
6
|
|
7
7
|
task :dump do
|
8
|
-
backup_path =
|
9
|
-
backups_enabled =
|
10
|
-
external_backup =
|
8
|
+
backup_path = configs[:backup_path]
|
9
|
+
backups_enabled = configs[:backups_enabled]
|
10
|
+
external_backup = configs[:external_backup]
|
11
|
+
database = configs[:database]
|
12
|
+
date = Time.now.to_i
|
13
|
+
filename = "#{database}_#{date}.dump"
|
11
14
|
|
12
15
|
unless backups_enabled
|
13
16
|
puts 'dump: Backups are disabled'
|
@@ -15,26 +18,26 @@ namespace :pg do
|
|
15
18
|
end
|
16
19
|
|
17
20
|
notification = Notification::Api.new
|
18
|
-
commandlist = dump_cmd(
|
21
|
+
commandlist = dump_cmd(configs)
|
19
22
|
|
20
23
|
system "mkdir -p #{backup_path}" unless Dir.exist?(backup_path)
|
21
24
|
|
22
25
|
result = system(commandlist)
|
23
26
|
|
24
27
|
if ENV['BACKUP_PROVIDER'].present? && external_backup && result
|
25
|
-
puts "Uploading #{
|
28
|
+
puts "Uploading #{filename} to #{ENV['BACKUP_PROVIDER']}..."
|
26
29
|
provider = Backup::Api.new
|
27
30
|
begin
|
28
|
-
provider.upload("#{
|
29
|
-
puts "#{
|
31
|
+
provider.upload("#{backup_path}/#{filename}", filename.to_s, 'file')
|
32
|
+
puts "#{filename} uploaded to #{ENV['BACKUP_PROVIDER']}"
|
30
33
|
rescue StandardError => e
|
31
|
-
puts "#{
|
34
|
+
puts "#{filename} upload failed: #{e.message}"
|
32
35
|
end
|
33
36
|
end
|
34
|
-
notification.send_backup_notification(result, title, content(result, { database:
|
35
|
-
{ date:
|
36
|
-
puts result ? "Backup created: #{
|
37
|
+
notification.send_backup_notification(result, title, content(result, { database: database, backup_path: backup_path, filename: filename }),
|
38
|
+
{ date: date, backup_path: backup_path, database: database })
|
39
|
+
puts result ? "Backup created: #{backup_path}/#{filename} (#{size_str(File.size("#{backup_path}/#{filename}"))})" : 'Backup failed removing dump file'
|
37
40
|
|
38
|
-
system "rm #{
|
41
|
+
system "rm #{backup_path}/#{filename}" unless result
|
39
42
|
end
|
40
43
|
end
|
@@ -1,18 +1,18 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module PostgresHelper
|
4
|
-
def
|
5
|
-
@
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
4
|
+
def configs
|
5
|
+
@configs ||= {
|
6
|
+
database: Rails.configuration.database_configuration[Rails.env]['database'],
|
7
|
+
username: Rails.configuration.database_configuration[Rails.env]['username'],
|
8
|
+
password: Rails.configuration.database_configuration[Rails.env]['password'],
|
9
|
+
hostname: Rails.configuration.database_configuration[Rails.env]['host'],
|
10
|
+
portnumber: Rails.configuration.database_configuration[Rails.env]['port'],
|
11
|
+
backup_path: Rails.root.join(Rails.env.development? ? 'tmp/backups' : '../../shared/backups').to_s,
|
12
|
+
backups_enabled: Rails.env.production? || ENV['BACKUPS_ENABLED'] == 'true',
|
13
|
+
external_backup: Rails.env.production? || ENV['EXTERNAL_BACKUP_ENABLED'] == 'true',
|
14
|
+
filename: "#{Rails.configuration.database_configuration[Rails.env]['database']}_#{Time.now.to_i}.dump"
|
15
|
+
}
|
16
16
|
end
|
17
17
|
|
18
18
|
def title
|
@@ -20,41 +20,41 @@ module PostgresHelper
|
|
20
20
|
end
|
21
21
|
|
22
22
|
def content(result, settings = {})
|
23
|
-
|
24
|
-
|
25
|
-
|
23
|
+
database = settings[:database]
|
24
|
+
backup_path = settings[:backup_path]
|
25
|
+
filename = settings[:filename]
|
26
26
|
|
27
27
|
messages = []
|
28
28
|
if result
|
29
|
-
messages << "Backup of #{
|
30
|
-
messages << "Backup path:\`#{
|
29
|
+
messages << "Backup of #{database} successfully finished at #{Time.now}"
|
30
|
+
messages << "Backup path:\`#{backup_path}/#{filename}\`"
|
31
31
|
else
|
32
|
-
messages << "Backup of #{
|
32
|
+
messages << "Backup of #{database} failed at #{Time.now}"
|
33
33
|
end
|
34
34
|
messages.join("\n")
|
35
35
|
end
|
36
36
|
|
37
37
|
def dump_cmd(settings = {})
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
38
|
+
hostname = settings[:hostname]
|
39
|
+
database = settings[:database]
|
40
|
+
username = settings[:username]
|
41
|
+
password = settings[:password]
|
42
|
+
portnumber = settings[:portnumber]
|
43
|
+
backup_path = settings[:backup_path]
|
44
44
|
|
45
|
-
|
45
|
+
date = Time.now.to_i
|
46
46
|
options = []
|
47
|
-
options << " -d #{
|
48
|
-
options << " -U #{
|
49
|
-
options << " -h #{
|
50
|
-
options << " -p #{
|
47
|
+
options << " -d #{database}" if database.present?
|
48
|
+
options << " -U #{username}" if username.present?
|
49
|
+
options << " -h #{hostname}" if hostname.present?
|
50
|
+
options << " -p #{portnumber}" if portnumber.present?
|
51
51
|
|
52
|
-
|
52
|
+
filename = "#{database}_#{date}.dump"
|
53
53
|
|
54
54
|
commandlist = []
|
55
|
-
commandlist << "export PGPASSWORD='#{
|
56
|
-
commandlist << "cd #{
|
57
|
-
commandlist << "pg_dump --no-acl --no-owner #{options.join('')} > #{
|
55
|
+
commandlist << "export PGPASSWORD='#{password}'"
|
56
|
+
commandlist << "cd #{backup_path}"
|
57
|
+
commandlist << "pg_dump --no-acl --no-owner #{options.join('')} > #{filename}"
|
58
58
|
commandlist.join(' && ')
|
59
59
|
end
|
60
60
|
|
@@ -1,58 +1,48 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require_relative './storage_helper'
|
3
4
|
namespace :storage do
|
4
|
-
|
5
|
-
@storage_path = Rails.root.join(Rails.env.development? ? 'storage' : '../../shared/storage').to_s
|
6
|
-
backups_enabled = Rails.env.production? || ENV['BACKUPS_ENABLED'] == 'true'
|
7
|
-
external_backup = Rails.env.production? || ENV['EXTERNAL_BACKUP_ENABLED'] == 'true'
|
5
|
+
include StorageHelper
|
8
6
|
|
9
7
|
desc 'backup storage'
|
10
8
|
task :backup do
|
9
|
+
backup_path = configuration[:backup_path]
|
10
|
+
storage_path = configuration[:storage_path]
|
11
|
+
backups_enabled = configuration[:backups_enabled]
|
12
|
+
external_backup = configuration[:external_backup]
|
13
|
+
keep_local_backups = configuration[:keep_local_backups]
|
14
|
+
backup_provider = configuration[:backup_provider]
|
11
15
|
unless backups_enabled
|
12
16
|
puts 'storage: Backups are disabled'
|
13
17
|
exit(0)
|
14
18
|
end
|
15
19
|
notification = Notification::Api.new
|
16
20
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
puts "Uploading #{@filename} to #{
|
21
|
+
response = false
|
22
|
+
if keep_local_backups
|
23
|
+
puts "Creating backup of storage folder at #{Time.now}"
|
24
|
+
response = create_local_backup(@filename, storage_path, backup_path)
|
25
|
+
end
|
26
|
+
if backup_provider.present? && external_backup
|
27
|
+
@date = Time.now.to_i
|
28
|
+
@filename = "storage_#{@date}.tar.gz"
|
29
|
+
puts "Uploading #{@filename} to #{backup_provider}..."
|
26
30
|
provider = Backup::Api.new
|
27
31
|
begin
|
28
|
-
|
29
|
-
|
32
|
+
if keep_local_backups
|
33
|
+
provider.upload("#{backup_path}/#{@filename}", @filename.to_s, 'file')
|
34
|
+
else
|
35
|
+
provider.upload(storage_path, @filename.to_s, 'folder')
|
36
|
+
response = true
|
37
|
+
end
|
38
|
+
puts "#{@filename} uploaded to #{backup_provider}" if response
|
30
39
|
rescue StandardError => e
|
31
40
|
puts "#{@filename} upload failed: #{e.message}"
|
41
|
+
response = false
|
32
42
|
end
|
33
43
|
end
|
34
|
-
notification.send_backup_notification(response, title, message(response), { date: date, backup_path: @backup_path, database: 'storage' })
|
35
|
-
end
|
36
|
-
|
37
|
-
def title
|
38
|
-
ENV['DEFAULT_URL'] || "#{Rails.env} Backup"
|
39
|
-
end
|
40
|
-
|
41
|
-
def message(result=false)
|
42
|
-
messages = []
|
43
|
-
if result
|
44
|
-
messages << "Backup of storage folder successfully finished at #{Time.now}"
|
45
|
-
messages << "Backup path:\`#{@backup_path}/#{@filename}\`"
|
46
|
-
else
|
47
|
-
messages << "Backup of storage folder failed at #{Time.now}"
|
48
|
-
end
|
49
|
-
messages.join("\n")
|
50
|
-
end
|
51
44
|
|
52
|
-
|
53
|
-
|
54
|
-
e = (Math.log(size) / Math.log(1024)).floor
|
55
|
-
s = format('%.2f', size.to_f / 1024**e)
|
56
|
-
s.sub(/\.?0*$/, units[e])
|
45
|
+
notification.send_backup_notification(response, title, message(response, { backup_path: backup_path, filename: @filename }),
|
46
|
+
{ date: @date, backup_path: @backup_path, database: 'storage' })
|
57
47
|
end
|
58
48
|
end
|
@@ -8,8 +8,8 @@ namespace :storage do
|
|
8
8
|
local_backup = Rails.env.production?
|
9
9
|
local_backup = ENV['KEEP_LOCAL_STORAGE_BACKUPS'] == 'true' if ENV['KEEP_LOCAL_STORAGE_BACKUPS'].present?
|
10
10
|
|
11
|
-
@env_local_no = ENV['NUMBER_OF_LOCAL_BACKUPS'].
|
12
|
-
@env_external_no = ENV['NUMBER_OF_EXTERNAL_BACKUPS'].
|
11
|
+
@env_local_no = ENV['NUMBER_OF_LOCAL_BACKUPS'].presence
|
12
|
+
@env_external_no = ENV['NUMBER_OF_EXTERNAL_BACKUPS'].presence
|
13
13
|
@total_local_backups_no = (@env_local_no || ENV['NUMBER_OF_BACKUPS'] || 7).to_i
|
14
14
|
@total_external_backups_no = (@env_external_no || ENV['NUMBER_OF_BACKUPS'] || 7).to_i
|
15
15
|
desc 'remove old storage backups'
|
@@ -0,0 +1,69 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module StorageHelper
|
4
|
+
def configuration
|
5
|
+
@configuration ||=
|
6
|
+
{
|
7
|
+
backup_path: path_resolver('backups'),
|
8
|
+
storage_path: path_resolver('storage'),
|
9
|
+
backups_enabled: env_or_production('BACKUPS_ENABLED'),
|
10
|
+
external_backup: env_or_production('EXTERNAL_BACKUP_ENABLED'),
|
11
|
+
keep_local_backups: env_or_production('KEEP_LOCAL_STORAGE_BACKUPS'),
|
12
|
+
backup_provider: ENV['BACKUP_PROVIDER']
|
13
|
+
}
|
14
|
+
end
|
15
|
+
|
16
|
+
def title
|
17
|
+
ENV['DEFAULT_URL'] || "#{Rails.env} Backup"
|
18
|
+
end
|
19
|
+
|
20
|
+
def message(result = false, settings = {})
|
21
|
+
@backup_path = settings[:backup_path]
|
22
|
+
@filename = settings[:filename]
|
23
|
+
messages = []
|
24
|
+
if result
|
25
|
+
messages << "Backup of storage folder successfully finished at #{Time.now}"
|
26
|
+
messages << "Backup path:\`#{@backup_path}/#{@filename}\`"
|
27
|
+
else
|
28
|
+
messages << "Backup of storage folder failed at #{Time.now}"
|
29
|
+
end
|
30
|
+
messages.join("\n")
|
31
|
+
end
|
32
|
+
|
33
|
+
def backup_cmd(settings = {})
|
34
|
+
@backup_path = settings[:backup_path]
|
35
|
+
@date = Time.now.to_i
|
36
|
+
@filename = "storage_#{@date}.tar.gz"
|
37
|
+
FileUtils.mkdir_p(@backup_path) unless Dir.exist?(@backup_path)
|
38
|
+
"tar -zcf #{@backup_path}/#{@filename} -C #{settings[:storage_path]} ."
|
39
|
+
end
|
40
|
+
|
41
|
+
def size_str(size)
|
42
|
+
units = %w[B KB MB GB TB]
|
43
|
+
e = (Math.log(size) / Math.log(1024)).floor
|
44
|
+
s = format('%.2f', size.to_f / 1024**e)
|
45
|
+
s.sub(/\.?0*$/, units[e])
|
46
|
+
end
|
47
|
+
|
48
|
+
def create_local_backup(filename, storage_path, backup_path)
|
49
|
+
FileUtils.mkdir_p(backup_path) unless Dir.exist?(backup_path)
|
50
|
+
response = system(backup_cmd(backup_path: backup_path, storage_path: storage_path))
|
51
|
+
FileUtils.rm_rf("#{@backup_path}/#{filename}") unless response
|
52
|
+
puts response ? "Backup created: #{backup_path}/#{@filename} (#{size_str(File.size("#{@backup_path}/#{@filename}"))})" : 'Backup failed removing dump file'
|
53
|
+
response
|
54
|
+
end
|
55
|
+
|
56
|
+
private
|
57
|
+
|
58
|
+
def env_or_production(env_var, default: Rails.env.production?)
|
59
|
+
if ENV.key?(env_var)
|
60
|
+
ENV[env_var] == 'true'
|
61
|
+
else
|
62
|
+
default
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def path_resolver(folder)
|
67
|
+
Rails.root.join(Rails.env.development? ? 'tmp' : '../../shared', folder).to_s
|
68
|
+
end
|
69
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: capistrano-ops
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Crusius
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-12-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: aws-sdk-s3
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '1.
|
19
|
+
version: '1.175'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '1.
|
26
|
+
version: '1.175'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: faraday
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -86,14 +86,14 @@ dependencies:
|
|
86
86
|
requirements:
|
87
87
|
- - "~>"
|
88
88
|
- !ruby/object:Gem::Version
|
89
|
-
version: '
|
89
|
+
version: '12.3'
|
90
90
|
type: :development
|
91
91
|
prerelease: false
|
92
92
|
version_requirements: !ruby/object:Gem::Requirement
|
93
93
|
requirements:
|
94
94
|
- - "~>"
|
95
95
|
- !ruby/object:Gem::Version
|
96
|
-
version: '
|
96
|
+
version: '12.3'
|
97
97
|
- !ruby/object:Gem::Dependency
|
98
98
|
name: rubocop
|
99
99
|
requirement: !ruby/object:Gem::Requirement
|
@@ -200,6 +200,7 @@ files:
|
|
200
200
|
- lib/capistrano/ops/rails/lib/tasks/pg/remove_old_dumps.rake
|
201
201
|
- lib/capistrano/ops/rails/lib/tasks/storage/backup.rake
|
202
202
|
- lib/capistrano/ops/rails/lib/tasks/storage/remove_old_backups.rake
|
203
|
+
- lib/capistrano/ops/rails/lib/tasks/storage/storage_helper.rb
|
203
204
|
- lib/capistrano/ops/task_loader.rb
|
204
205
|
- lib/capistrano/ops/version.rb
|
205
206
|
- lib/capistrano/ops/whenever.rb
|