capistrano-ops 1.0.4 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6028f991e964a7ce6f029c3878b1c36b322802df80013019de85c1f9f9220f9f
4
- data.tar.gz: 1664aea997e5db3daa236ea04758cb68bd3b34262e29faa90fa41564b2e88aeb
3
+ metadata.gz: cb9a8459b0906318eb8149333da576558e4f232c35ca3bd453756a1ba4031036
4
+ data.tar.gz: e6e1342c6a847e029583db4824b1f69e63b687db53f42ce3f6e4e933d5d7aa27
5
5
  SHA512:
6
- metadata.gz: 99aba98f09eee78b13851eec6ad35dd142fe1252903af6f3c5faf36260c02c693ef6245eddef8dfdefaac9965c9836b08de2620bf449716e47ad096380e26d3b
7
- data.tar.gz: 69d303c90aad93e679ac797d90bf759f615e82431ef9a39008fad8c430aa66938cf1fd2b14dfb4249bc9c17bd8fbdf76786c00d0a1b226b3c588fe650024b1e8
6
+ metadata.gz: b8e3b49f6fc1fbc179713f06ec82a06b3e6bc953b256ad1f5e6b564ff47a28c9d3705e4e4b8add425de28aea1e0d5830fa055b42c9a6bc46d2244f5e645d3fb3
7
+ data.tar.gz: 19516277d0a171017ad63b526f602a47056ab469defe6d6b3e5292ffe9a1a0221ed8dedd5e2012d49ea4dca5c233eb283d11fd05df6111a723a9bf13e48d5867
@@ -20,12 +20,12 @@ Gem::Specification.new do |s|
20
20
  s.require_paths = ['lib']
21
21
 
22
22
  s.required_ruby_version = '>= 2.7.0', '< 3.3.0'
23
- s.add_dependency 'aws-sdk-s3', '~> 1.128'
23
+ s.add_dependency 'aws-sdk-s3', '~> 1.175'
24
24
  s.add_dependency 'faraday'
25
25
  s.add_dependency 'nokogiri'
26
26
  s.add_dependency 'rails'
27
27
  s.add_development_dependency 'bundler', '~> 2.4.12'
28
- s.add_development_dependency 'rake', '~> 10.0'
28
+ s.add_development_dependency 'rake', '~> 12.3'
29
29
  s.add_development_dependency 'rubocop', '~> 1.56.2' # rubocop ruby
30
30
  s.add_development_dependency 'rubocop-performance', '~> 1.19.0' # speed up rubocop
31
31
  end
@@ -9,11 +9,11 @@ module Backup
9
9
  self.provider_config = provider_config
10
10
  end
11
11
 
12
- def upload(backup_file, filename)
12
+ def upload(backup_file, filename, type = 'file')
13
13
  case backup_provider
14
14
  when 's3'
15
15
  s3 = Backup::S3.new(**provider_config)
16
- s3.upload(backup_file, filename)
16
+ s3.upload_stream(backup_file, filename, type)
17
17
  when 'scp'
18
18
  p 'SCP backup not implemented yet'
19
19
  when 'rsync'
@@ -2,10 +2,15 @@
2
2
 
3
3
  module Backup
4
4
  require 'aws-sdk-s3'
5
+ require 'rubygems/package'
6
+ require 'zlib'
7
+ require 'find'
5
8
  require 'capistrano/ops/rails/lib/backup/s3_helper'
9
+
6
10
  class S3
7
11
  include Backup::S3Helper
8
- attr_accessor :endpoint, :region, :access_key_id, :secret_access_key, :s3_resource
12
+
13
+ attr_accessor :endpoint, :region, :access_key_id, :secret_access_key, :s3_resource, :s3_client
9
14
 
10
15
  def initialize(endpoint: ENV['S3_BACKUP_ENDPOINT'], region: ENV['S3_BACKUP_REGION'], access_key_id: ENV['S3_BACKUP_KEY'],
11
16
  secret_access_key: ENV['S3_BACKUP_SECRET'])
@@ -18,10 +23,10 @@ module Backup
18
23
  access_key_id: access_key_id,
19
24
  secret_access_key: secret_access_key,
20
25
  force_path_style: true
21
-
22
26
  }
23
27
  configuration[:endpoint] = endpoint unless endpoint.nil?
24
28
  self.s3_resource = Aws::S3::Resource.new(configuration)
29
+ self.s3_client = Aws::S3::Client.new(configuration)
25
30
  end
26
31
 
27
32
  def upload(backup_file, key)
@@ -34,6 +39,226 @@ module Backup
34
39
  'File uploaded to S3'
35
40
  end
36
41
 
42
+ def upload_stream(backup_file, key, type)
43
+ if type == 'folder'
44
+ upload_folder_as_tar_gz_stream(backup_file, key)
45
+ else
46
+ upload_file_as_stream(backup_file, key)
47
+ end
48
+ end
49
+
50
+ # rubocop:disable Metrics/MethodLength
51
+ def upload_file_as_stream(file_path, key)
52
+ bucket = ENV['S3_BACKUP_BUCKET']
53
+ # Calculate total size of the file to be uploaded
54
+ total_size = File.size(file_path)
55
+ chunk_size = calculate_chunk_size(total_size)
56
+
57
+ uploaded_size = 0
58
+
59
+ # Initiate multipart upload
60
+
61
+ # Upload the tar.gz data from the file in parts
62
+ part_number = 1
63
+ parts = []
64
+ last_logged_progress = 0
65
+ max_retry_time = 300 # 5 minutes in seconds
66
+ total_wait_time = 0
67
+
68
+ begin
69
+ File.open(file_path, 'rb') do |file|
70
+ while (part = file.read(chunk_size)) # Read calculated chunk size
71
+ retry_count = 0
72
+ begin
73
+ # Initiate multipart upload
74
+ multipart_upload ||= s3_client.create_multipart_upload(bucket: bucket, key: key)
75
+ part_upload = s3_client.upload_part(
76
+ bucket: bucket,
77
+ key: key,
78
+ upload_id: multipart_upload.upload_id,
79
+ part_number: part_number,
80
+ body: part
81
+ )
82
+ parts << { part_number: part_number, etag: part_upload.etag }
83
+ uploaded_size += part.size
84
+ part_number += 1
85
+
86
+ progress = (uploaded_size.to_f / total_size * 100).round
87
+ if progress >= last_logged_progress + 10
88
+ puts "Upload progress: #{progress}% complete"
89
+ last_logged_progress = progress
90
+ end
91
+ rescue StandardError => e
92
+ retry_count += 1
93
+ wait_time = 2**retry_count
94
+ total_wait_time += wait_time
95
+
96
+ if total_wait_time > max_retry_time
97
+ puts "Exceeded maximum retry time of #{max_retry_time / 60} minutes. Aborting upload."
98
+ raise e
99
+ end
100
+ puts "Error uploading part #{part_number}: #{e.message.split("\n").first} (Attempt #{retry_count})"
101
+ puts "Retry in #{wait_time} seconds"
102
+ sleep(wait_time) # Exponential backoff
103
+ puts 'Retrying upload part...'
104
+ retry
105
+ end
106
+ end
107
+ end
108
+
109
+ # Complete multipart upload
110
+ s3_client.complete_multipart_upload(
111
+ bucket: bucket,
112
+ key: key,
113
+ upload_id: multipart_upload.upload_id,
114
+ multipart_upload: { parts: parts }
115
+ )
116
+ puts 'Completed multipart upload'
117
+ rescue StandardError => e
118
+ # Abort multipart upload in case of error
119
+ s3_client.abort_multipart_upload(
120
+ bucket: bucket,
121
+ key: key,
122
+ upload_id: multipart_upload.upload_id
123
+ )
124
+ puts "Aborted multipart upload due to error: #{e.message}"
125
+ raise e
126
+ end
127
+
128
+ 'File uploaded to S3 as tar.gz'
129
+ rescue StandardError => e
130
+ puts "Upload failed: #{e.message}"
131
+ raise e
132
+ end
133
+
134
+ def upload_folder_as_tar_gz_stream(folder_path, key)
135
+ bucket = ENV['S3_BACKUP_BUCKET']
136
+
137
+ # Calculate total size of the files to be uploaded
138
+ total_size = calculate_total_size(folder_path)
139
+ chunk_size = calculate_chunk_size(total_size)
140
+
141
+ # Create a pipe to stream data
142
+ read_io, write_io = IO.pipe
143
+ read_io.binmode
144
+ write_io.binmode
145
+
146
+ uploaded_size = 0
147
+
148
+ # Start a thread to write the tar.gz data to the pipe
149
+ writer_thread = start_writer_thread(folder_path, write_io)
150
+
151
+ # Upload the tar.gz data from the pipe in parts
152
+ part_number = 1
153
+ parts = []
154
+ last_logged_progress = 0
155
+ max_retry_time = 300 # 5 minutes in seconds
156
+ total_wait_time = 0
157
+
158
+ begin
159
+ while (part = read_io.read(chunk_size)) # Read calculated chunk size
160
+ retry_count = 0
161
+ begin
162
+ # Initiate multipart upload
163
+ multipart_upload ||= s3_client.create_multipart_upload(bucket: bucket, key: key)
164
+ part_upload = s3_client.upload_part(
165
+ bucket: bucket,
166
+ key: key,
167
+ upload_id: multipart_upload.upload_id,
168
+ part_number: part_number,
169
+ body: part
170
+ )
171
+ parts << { part_number: part_number, etag: part_upload.etag }
172
+ uploaded_size += part.size
173
+ part_number += 1
174
+
175
+ progress = (uploaded_size.to_f / total_size * 100).round
176
+ if progress >= last_logged_progress + 10
177
+ puts "Upload progress: #{progress}% complete"
178
+ last_logged_progress = progress
179
+ end
180
+ rescue StandardError => e
181
+ retry_count += 1
182
+ wait_time = 2**retry_count
183
+ total_wait_time += wait_time
184
+
185
+ if total_wait_time > max_retry_time
186
+ puts "Exceeded maximum retry time of #{max_retry_time / 60} minutes. Aborting upload."
187
+ raise e
188
+ end
189
+ puts "Error uploading part #{part_number}: #{e.message.split("\n").first} (Attempt #{retry_count})"
190
+ puts "Retry in #{wait_time} seconds"
191
+ sleep(wait_time) # Exponential backoff
192
+ puts 'Retrying upload part...'
193
+ retry
194
+ end
195
+ end
196
+
197
+ # Complete multipart upload
198
+ s3_client.complete_multipart_upload(
199
+ bucket: bucket,
200
+ key: key,
201
+ upload_id: multipart_upload.upload_id,
202
+ multipart_upload: { parts: parts }
203
+ )
204
+ puts 'Completed multipart upload'
205
+ rescue StandardError => e
206
+ # Abort multipart upload in case of error
207
+ if multipart_upload
208
+ s3_client.abort_multipart_upload(
209
+ bucket: bucket,
210
+ key: key,
211
+ upload_id: multipart_upload.upload_id
212
+ )
213
+ end
214
+ puts "Aborted multipart upload due to error: #{e.message}"
215
+ raise e
216
+ ensure
217
+ read_io.close unless read_io.closed?
218
+ writer_thread.join
219
+ end
220
+
221
+ 'Folder uploaded to S3 as tar.gz'
222
+ rescue StandardError => e
223
+ puts "Upload failed: #{e.message}"
224
+ raise e
225
+ end
226
+ # rubocop:enable Metrics/MethodLength
227
+
228
+ private
229
+
230
+ def start_writer_thread(folder_path, write_io)
231
+ Thread.new do
232
+ parent_folder = File.dirname(folder_path)
233
+ folder_name = File.basename(folder_path)
234
+
235
+ Zlib::GzipWriter.wrap(write_io) do |gz|
236
+ Gem::Package::TarWriter.new(gz) do |tar|
237
+ Dir.chdir(parent_folder) do
238
+ Find.find(folder_name) do |file_path|
239
+ if File.directory?(file_path)
240
+ tar.mkdir(file_path, File.stat(file_path).mode)
241
+ else
242
+ mode = File.stat(file_path).mode
243
+ tar.add_file_simple(file_path, mode, File.size(file_path)) do |tar_file|
244
+ File.open(file_path, 'rb') do |f|
245
+ while (chunk = f.read(1024 * 1024)) # Read in 1MB chunks
246
+ tar_file.write(chunk)
247
+ end
248
+ end
249
+ end
250
+ end
251
+ end
252
+ end
253
+ end
254
+ end
255
+ rescue StandardError => e
256
+ puts "Error writing tar.gz data: #{e.message}"
257
+ ensure
258
+ write_io.close unless write_io.closed?
259
+ end
260
+ end
261
+
37
262
  def remove_old_backups(basename, keep: 5)
38
263
  all_items = s3_resource.bucket(ENV['S3_BACKUP_BUCKET']).objects(prefix: basename).map do |item|
39
264
  { key: item.key, last_modified: item.last_modified }
@@ -21,5 +21,21 @@ module Backup
21
21
  def get_items_by_month(all_items, month)
22
22
  all_items.select { |item| item[:last_modified].strftime('%Y-%m') == month }
23
23
  end
24
+
25
+ def calculate_total_size(folder_path)
26
+ total_size = 0
27
+ Find.find(folder_path) do |file_path|
28
+ total_size += File.size(file_path) unless File.directory?(file_path)
29
+ end
30
+ total_size
31
+ end
32
+
33
+ def calculate_chunk_size(total_size)
34
+ max_chunks = 10_000
35
+ min_chunk_size = 20 * 1024 * 1024 # 20MB
36
+ max_chunk_size = 105 * 1024 * 1024 # 105MB
37
+ chunk_size = [total_size / max_chunks, min_chunk_size].max
38
+ [chunk_size, max_chunk_size].min
39
+ end
24
40
  end
25
41
  end
@@ -5,9 +5,12 @@ namespace :pg do
5
5
  include PostgresHelper
6
6
 
7
7
  task :dump do
8
- backup_path = configuration[:backup_path]
9
- backups_enabled = configuration[:backups_enabled]
10
- external_backup = configuration[:external_backup]
8
+ backup_path = configs[:backup_path]
9
+ backups_enabled = configs[:backups_enabled]
10
+ external_backup = configs[:external_backup]
11
+ database = configs[:database]
12
+ date = Time.now.to_i
13
+ filename = "#{database}_#{date}.dump"
11
14
 
12
15
  unless backups_enabled
13
16
  puts 'dump: Backups are disabled'
@@ -15,26 +18,26 @@ namespace :pg do
15
18
  end
16
19
 
17
20
  notification = Notification::Api.new
18
- commandlist = dump_cmd(configuration)
21
+ commandlist = dump_cmd(configs)
19
22
 
20
23
  system "mkdir -p #{backup_path}" unless Dir.exist?(backup_path)
21
24
 
22
25
  result = system(commandlist)
23
26
 
24
27
  if ENV['BACKUP_PROVIDER'].present? && external_backup && result
25
- puts "Uploading #{@filename} to #{ENV['BACKUP_PROVIDER']}..."
28
+ puts "Uploading #{filename} to #{ENV['BACKUP_PROVIDER']}..."
26
29
  provider = Backup::Api.new
27
30
  begin
28
- provider.upload("#{@backup_path}/#{@filename}", @filename.to_s)
29
- puts "#{@filename} uploaded to #{ENV['BACKUP_PROVIDER']}"
31
+ provider.upload("#{backup_path}/#{filename}", filename.to_s, 'file')
32
+ puts "#{filename} uploaded to #{ENV['BACKUP_PROVIDER']}"
30
33
  rescue StandardError => e
31
- puts "#{@filename} upload failed: #{e.message}"
34
+ puts "#{filename} upload failed: #{e.message}"
32
35
  end
33
36
  end
34
- notification.send_backup_notification(result, title, content(result, { database: @database, backup_path: @backup_path, filename: @filename }),
35
- { date: @date, backup_path: @backup_path, database: @database })
36
- puts result ? "Backup created: #{@backup_path}/#{@filename} (#{size_str(File.size("#{@backup_path}/#{@filename}"))})" : 'Backup failed removing dump file'
37
+ notification.send_backup_notification(result, title, content(result, { database: database, backup_path: backup_path, filename: filename }),
38
+ { date: date, backup_path: backup_path, database: database })
39
+ puts result ? "Backup created: #{backup_path}/#{filename} (#{size_str(File.size("#{backup_path}/#{filename}"))})" : 'Backup failed removing dump file'
37
40
 
38
- system "rm #{@backup_path}/#{@filename}" unless result
41
+ system "rm #{backup_path}/#{filename}" unless result
39
42
  end
40
43
  end
@@ -1,18 +1,18 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PostgresHelper
4
- def configuration
5
- @configuration ||=
6
- {
7
- database: Rails.configuration.database_configuration[Rails.env]['database'],
8
- username: Rails.configuration.database_configuration[Rails.env]['username'],
9
- password: Rails.configuration.database_configuration[Rails.env]['password'],
10
- hostname: Rails.configuration.database_configuration[Rails.env]['host'],
11
- portnumber: Rails.configuration.database_configuration[Rails.env]['port'],
12
- backup_path: Rails.root.join(Rails.env.development? ? 'tmp/backups' : '../../shared/backups').to_s,
13
- backups_enabled: Rails.env.production? || ENV['BACKUPS_ENABLED'] == 'true',
14
- external_backup: Rails.env.production? || ENV['EXTERNAL_BACKUP_ENABLED'] == 'true'
15
- }
4
+ def configs
5
+ @configs ||= {
6
+ database: Rails.configuration.database_configuration[Rails.env]['database'],
7
+ username: Rails.configuration.database_configuration[Rails.env]['username'],
8
+ password: Rails.configuration.database_configuration[Rails.env]['password'],
9
+ hostname: Rails.configuration.database_configuration[Rails.env]['host'],
10
+ portnumber: Rails.configuration.database_configuration[Rails.env]['port'],
11
+ backup_path: Rails.root.join(Rails.env.development? ? 'tmp/backups' : '../../shared/backups').to_s,
12
+ backups_enabled: Rails.env.production? || ENV['BACKUPS_ENABLED'] == 'true',
13
+ external_backup: Rails.env.production? || ENV['EXTERNAL_BACKUP_ENABLED'] == 'true',
14
+ filename: "#{Rails.configuration.database_configuration[Rails.env]['database']}_#{Time.now.to_i}.dump"
15
+ }
16
16
  end
17
17
 
18
18
  def title
@@ -20,41 +20,41 @@ module PostgresHelper
20
20
  end
21
21
 
22
22
  def content(result, settings = {})
23
- @database = settings[:database]
24
- @backup_path = settings[:backup_path]
25
- @filename = settings[:filename]
23
+ database = settings[:database]
24
+ backup_path = settings[:backup_path]
25
+ filename = settings[:filename]
26
26
 
27
27
  messages = []
28
28
  if result
29
- messages << "Backup of #{@database} successfully finished at #{Time.now}"
30
- messages << "Backup path:\`#{@backup_path}/#{@filename}\`"
29
+ messages << "Backup of #{database} successfully finished at #{Time.now}"
30
+ messages << "Backup path:\`#{backup_path}/#{filename}\`"
31
31
  else
32
- messages << "Backup of #{@database} failed at #{Time.now}"
32
+ messages << "Backup of #{database} failed at #{Time.now}"
33
33
  end
34
34
  messages.join("\n")
35
35
  end
36
36
 
37
37
  def dump_cmd(settings = {})
38
- @hostname = settings[:hostname]
39
- @database = settings[:database]
40
- @username = settings[:username]
41
- @password = settings[:password]
42
- @portnumber = settings[:portnumber]
43
- @backup_path = settings[:backup_path]
38
+ hostname = settings[:hostname]
39
+ database = settings[:database]
40
+ username = settings[:username]
41
+ password = settings[:password]
42
+ portnumber = settings[:portnumber]
43
+ backup_path = settings[:backup_path]
44
44
 
45
- @date = Time.now.to_i
45
+ date = Time.now.to_i
46
46
  options = []
47
- options << " -d #{@database}" if @database.present?
48
- options << " -U #{@username}" if @username.present?
49
- options << " -h #{@hostname}" if @hostname.present?
50
- options << " -p #{@portnumber}" if @portnumber.present?
47
+ options << " -d #{database}" if database.present?
48
+ options << " -U #{username}" if username.present?
49
+ options << " -h #{hostname}" if hostname.present?
50
+ options << " -p #{portnumber}" if portnumber.present?
51
51
 
52
- @filename = "#{@database}_#{@date}.dump"
52
+ filename = "#{database}_#{date}.dump"
53
53
 
54
54
  commandlist = []
55
- commandlist << "export PGPASSWORD='#{@password}'"
56
- commandlist << "cd #{@backup_path}"
57
- commandlist << "pg_dump --no-acl --no-owner #{options.join('')} > #{@filename}"
55
+ commandlist << "export PGPASSWORD='#{password}'"
56
+ commandlist << "cd #{backup_path}"
57
+ commandlist << "pg_dump --no-acl --no-owner #{options.join('')} > #{filename}"
58
58
  commandlist.join(' && ')
59
59
  end
60
60
 
@@ -1,58 +1,48 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require_relative './storage_helper'
3
4
  namespace :storage do
4
- @backup_path = Rails.root.join(Rails.env.development? ? 'tmp/backups' : '../../shared/backups').to_s
5
- @storage_path = Rails.root.join(Rails.env.development? ? 'storage' : '../../shared/storage').to_s
6
- backups_enabled = Rails.env.production? || ENV['BACKUPS_ENABLED'] == 'true'
7
- external_backup = Rails.env.production? || ENV['EXTERNAL_BACKUP_ENABLED'] == 'true'
5
+ include StorageHelper
8
6
 
9
7
  desc 'backup storage'
10
8
  task :backup do
9
+ backup_path = configuration[:backup_path]
10
+ storage_path = configuration[:storage_path]
11
+ backups_enabled = configuration[:backups_enabled]
12
+ external_backup = configuration[:external_backup]
13
+ keep_local_backups = configuration[:keep_local_backups]
14
+ backup_provider = configuration[:backup_provider]
11
15
  unless backups_enabled
12
16
  puts 'storage: Backups are disabled'
13
17
  exit(0)
14
18
  end
15
19
  notification = Notification::Api.new
16
20
 
17
- date = Time.now.to_i
18
- @filename = "storage_#{date}.tar.gz"
19
- FileUtils.mkdir_p(@backup_path) unless Dir.exist?(@backup_path)
20
- response = system "tar -zcf #{@backup_path}/#{@filename} -C #{@storage_path} ."
21
- FileUtils.rm_rf("#{@backup_path}/#{filename}") unless response
22
- puts response ? "Backup created: #{@backup_path}/#{@filename} (#{size_str(File.size("#{@backup_path}/#{@filename}"))})" : 'Backup failed removing dump file'
23
-
24
- if ENV['BACKUP_PROVIDER'].present? && external_backup && response
25
- puts "Uploading #{@filename} to #{ENV['BACKUP_PROVIDER']}..."
21
+ response = false
22
+ if keep_local_backups
23
+ puts "Creating backup of storage folder at #{Time.now}"
24
+ response = create_local_backup(@filename, storage_path, backup_path)
25
+ end
26
+ if backup_provider.present? && external_backup
27
+ @date = Time.now.to_i
28
+ @filename = "storage_#{@date}.tar.gz"
29
+ puts "Uploading #{@filename} to #{backup_provider}..."
26
30
  provider = Backup::Api.new
27
31
  begin
28
- provider.upload("#{@backup_path}/#{@filename}", @filename.to_s)
29
- puts "#{@filename} uploaded to #{ENV['BACKUP_PROVIDER']}"
32
+ if keep_local_backups
33
+ provider.upload("#{backup_path}/#{@filename}", @filename.to_s, 'file')
34
+ else
35
+ provider.upload(storage_path, @filename.to_s, 'folder')
36
+ response = true
37
+ end
38
+ puts "#{@filename} uploaded to #{backup_provider}" if response
30
39
  rescue StandardError => e
31
40
  puts "#{@filename} upload failed: #{e.message}"
41
+ response = false
32
42
  end
33
43
  end
34
- notification.send_backup_notification(response, title, message(response), { date: date, backup_path: @backup_path, database: 'storage' })
35
- end
36
-
37
- def title
38
- ENV['DEFAULT_URL'] || "#{Rails.env} Backup"
39
- end
40
-
41
- def message(result=false)
42
- messages = []
43
- if result
44
- messages << "Backup of storage folder successfully finished at #{Time.now}"
45
- messages << "Backup path:\`#{@backup_path}/#{@filename}\`"
46
- else
47
- messages << "Backup of storage folder failed at #{Time.now}"
48
- end
49
- messages.join("\n")
50
- end
51
44
 
52
- def size_str(size)
53
- units = %w[B KB MB GB TB]
54
- e = (Math.log(size) / Math.log(1024)).floor
55
- s = format('%.2f', size.to_f / 1024**e)
56
- s.sub(/\.?0*$/, units[e])
45
+ notification.send_backup_notification(response, title, message(response, { backup_path: backup_path, filename: @filename }),
46
+ { date: @date, backup_path: @backup_path, database: 'storage' })
57
47
  end
58
48
  end
@@ -8,8 +8,8 @@ namespace :storage do
8
8
  local_backup = Rails.env.production?
9
9
  local_backup = ENV['KEEP_LOCAL_STORAGE_BACKUPS'] == 'true' if ENV['KEEP_LOCAL_STORAGE_BACKUPS'].present?
10
10
 
11
- @env_local_no = ENV['NUMBER_OF_LOCAL_BACKUPS'].present? ? ENV['NUMBER_OF_LOCAL_BACKUPS'] : nil
12
- @env_external_no = ENV['NUMBER_OF_EXTERNAL_BACKUPS'].present? ? ENV['NUMBER_OF_EXTERNAL_BACKUPS'] : nil
11
+ @env_local_no = ENV['NUMBER_OF_LOCAL_BACKUPS'].presence
12
+ @env_external_no = ENV['NUMBER_OF_EXTERNAL_BACKUPS'].presence
13
13
  @total_local_backups_no = (@env_local_no || ENV['NUMBER_OF_BACKUPS'] || 7).to_i
14
14
  @total_external_backups_no = (@env_external_no || ENV['NUMBER_OF_BACKUPS'] || 7).to_i
15
15
  desc 'remove old storage backups'
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ module StorageHelper
4
+ def configuration
5
+ @configuration ||=
6
+ {
7
+ backup_path: path_resolver('backups'),
8
+ storage_path: path_resolver('storage'),
9
+ backups_enabled: env_or_production('BACKUPS_ENABLED'),
10
+ external_backup: env_or_production('EXTERNAL_BACKUP_ENABLED'),
11
+ keep_local_backups: env_or_production('KEEP_LOCAL_STORAGE_BACKUPS'),
12
+ backup_provider: ENV['BACKUP_PROVIDER']
13
+ }
14
+ end
15
+
16
+ def title
17
+ ENV['DEFAULT_URL'] || "#{Rails.env} Backup"
18
+ end
19
+
20
+ def message(result = false, settings = {})
21
+ @backup_path = settings[:backup_path]
22
+ @filename = settings[:filename]
23
+ messages = []
24
+ if result
25
+ messages << "Backup of storage folder successfully finished at #{Time.now}"
26
+ messages << "Backup path:\`#{@backup_path}/#{@filename}\`"
27
+ else
28
+ messages << "Backup of storage folder failed at #{Time.now}"
29
+ end
30
+ messages.join("\n")
31
+ end
32
+
33
+ def backup_cmd(settings = {})
34
+ @backup_path = settings[:backup_path]
35
+ @date = Time.now.to_i
36
+ @filename = "storage_#{@date}.tar.gz"
37
+ FileUtils.mkdir_p(@backup_path) unless Dir.exist?(@backup_path)
38
+ "tar -zcf #{@backup_path}/#{@filename} -C #{settings[:storage_path]} ."
39
+ end
40
+
41
+ def size_str(size)
42
+ units = %w[B KB MB GB TB]
43
+ e = (Math.log(size) / Math.log(1024)).floor
44
+ s = format('%.2f', size.to_f / 1024**e)
45
+ s.sub(/\.?0*$/, units[e])
46
+ end
47
+
48
+ def create_local_backup(filename, storage_path, backup_path)
49
+ FileUtils.mkdir_p(backup_path) unless Dir.exist?(backup_path)
50
+ response = system(backup_cmd(backup_path: backup_path, storage_path: storage_path))
51
+ FileUtils.rm_rf("#{@backup_path}/#{filename}") unless response
52
+ puts response ? "Backup created: #{backup_path}/#{@filename} (#{size_str(File.size("#{@backup_path}/#{@filename}"))})" : 'Backup failed removing dump file'
53
+ response
54
+ end
55
+
56
+ private
57
+
58
+ def env_or_production(env_var, default: Rails.env.production?)
59
+ if ENV.key?(env_var)
60
+ ENV[env_var] == 'true'
61
+ else
62
+ default
63
+ end
64
+ end
65
+
66
+ def path_resolver(folder)
67
+ Rails.root.join(Rails.env.development? ? 'tmp' : '../../shared', folder).to_s
68
+ end
69
+ end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Capistrano
4
4
  module Ops
5
- VERSION = '1.0.4'
5
+ VERSION = '1.0.8'
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: capistrano-ops
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.4
4
+ version: 1.0.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Crusius
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-08-13 00:00:00.000000000 Z
11
+ date: 2024-12-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-s3
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '1.128'
19
+ version: '1.175'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '1.128'
26
+ version: '1.175'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: faraday
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -86,14 +86,14 @@ dependencies:
86
86
  requirements:
87
87
  - - "~>"
88
88
  - !ruby/object:Gem::Version
89
- version: '10.0'
89
+ version: '12.3'
90
90
  type: :development
91
91
  prerelease: false
92
92
  version_requirements: !ruby/object:Gem::Requirement
93
93
  requirements:
94
94
  - - "~>"
95
95
  - !ruby/object:Gem::Version
96
- version: '10.0'
96
+ version: '12.3'
97
97
  - !ruby/object:Gem::Dependency
98
98
  name: rubocop
99
99
  requirement: !ruby/object:Gem::Requirement
@@ -200,6 +200,7 @@ files:
200
200
  - lib/capistrano/ops/rails/lib/tasks/pg/remove_old_dumps.rake
201
201
  - lib/capistrano/ops/rails/lib/tasks/storage/backup.rake
202
202
  - lib/capistrano/ops/rails/lib/tasks/storage/remove_old_backups.rake
203
+ - lib/capistrano/ops/rails/lib/tasks/storage/storage_helper.rb
203
204
  - lib/capistrano/ops/task_loader.rb
204
205
  - lib/capistrano/ops/version.rb
205
206
  - lib/capistrano/ops/whenever.rb