backy_rb 0.1.8 → 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9d66c6c91f2f5538af37756b5dc6e2759330b0b33914647cf0aa0b1f3ee376cc
4
- data.tar.gz: 20856cc301a8590f8d60200664995b0ab408f76f15e077bc931d3c828fc11658
3
+ metadata.gz: b7bb1c2b1110d5fe15106b8bb78e8e07201e2318156aa6f67a4349eb92720a06
4
+ data.tar.gz: 5a8ae6f47b04ac3480cecedf77987166d5d630cc511496a734cddf5f7659e871
5
5
  SHA512:
6
- metadata.gz: 62cd5a81471ef89e5ff57195b32ae4e9307a3f31a14f3bc9ee1a931b6534c4922406198aa30756fda197f2c60c279fefd76b46912419d348f3fe8922bccca978
7
- data.tar.gz: 1541af37d97d53e039b598da4ce53dd1a119e69f77fed1d9251ea6f839a31a72e3d06a324ed3832f053670d0c41350e013b40418ad425971ee73f31df1d95e76
6
+ metadata.gz: 956ae711507992a7c75a3340d108b2df9e8da541ce9e90d346b1c0249bd22a414a6bf41f5e78f901f8b81ec68fd626a6f4cafbc8833d02a6eaea5f55563a0796
7
+ data.tar.gz: ca0b9c66ff07d33bd204df31e400690e53ca3a7108d8267eed734e9f5bc3d280046d1033856fbf13f89c297c38821d212ae573f4ea1d30b49eda95eb135bb624
data/.backyrc.example CHANGED
@@ -1,17 +1,33 @@
1
1
  # ~/.backyrc
2
- defaults:
2
+ shared:
3
3
  use_parallel: true
4
- s3:
5
- access_key_id: "YOUR_AWS_ACCESS_KEY_ID"
6
- secret_access_key: "YOUR_AWS_SECRET_ACCESS_KEY"
7
- region: "eu-central-1"
8
- bucket: "your-s3-bucket-name"
9
- database:
10
- adapter: "postgresql"
11
- host: "localhost"
12
- port: 5432
13
- username: "your-db-username"
14
- password: "your-db-password"
15
- database_name: "your-database-name"
16
- local:
17
- backup_path: "/path/to/your/local/backup/directory"
4
+ pause_replication: true
5
+ s3_access_key: "YOUR_AWS_ACCESS_KEY_ID"
6
+ s3_secret: "YOUR_AWS_SECRET_ACCESS_KEY"
7
+ s3_region: "eu-central-1"
8
+ s3_bucket: "your-s3-bucket-name"
9
+ s3_prefix: "./db/dump/"
10
+ pg_host: "localhost"
11
+ pg_port: 5432
12
+ pg_username: "your-db-username"
13
+ pg_password: "your-db-password"
14
+ pg_database: "your-database-name"
15
+ app_name: "backy"
16
+ environment: "development"
17
+ log_file: "./log/backy.log"
18
+ local_backup_path: "/path/to/your/local/backup/directory"
19
+
20
+ production:
21
+ pg_host: "production-host"
22
+ s3_bucket: "production-s3-bucket-name"
23
+ log_file: "./log/production_backy.log"
24
+
25
+ staging:
26
+ pg_host: "staging-host"
27
+ s3_bucket: "staging-s3-bucket-name"
28
+ log_file: "./log/staging_backy.log"
29
+
30
+ development:
31
+ pg_host: "localhost"
32
+ s3_bucket: "development-s3-bucket-name"
33
+ log_file: "./log/development_backy.log"
data/CHANGELOG.md CHANGED
@@ -3,6 +3,18 @@
3
3
  All notable changes to `Backy` will be documented in this file.
4
4
 
5
5
  ## [Unreleased]
6
+ ## [0.2.1] - 2024-06-24
7
+ ### Fixed
8
+ - Fix bug where `push` is not working because of missing filename
9
+
10
+ ## [0.2.0] - 2024-06-24
11
+ ### Added
12
+ - Support for turning off replication
13
+ - Added support for config by environment in .backyrc
14
+
15
+ ### Changed
16
+ - Breaking change; Change in config keys
17
+ - Internal refactoring
6
18
 
7
19
  ## [0.1.8] - 2024-06-22
8
20
  ### Fixed
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- backy_rb (0.1.8)
4
+ backy_rb (0.2.1)
5
5
  activerecord (>= 4.0)
6
6
  activesupport (>= 4.0)
7
7
  aws-sdk-s3 (>= 1.117)
@@ -1,4 +1,4 @@
1
- require 'forwardable'
1
+ require "forwardable"
2
2
 
3
3
  module Backy
4
4
  module AppConfig
@@ -1,29 +1,44 @@
1
+ require "fileutils"
2
+ require "etc"
3
+ require "open3"
4
+ require "yaml"
5
+ require "uri"
6
+
1
7
  module Backy
2
8
  class Configuration
3
- attr_writer(
4
- :pg_host,
5
- :pg_port,
6
- :pg_database,
7
- :pg_username,
8
- :pg_password,
9
- :s3_region,
10
- :s3_access_key,
11
- :s3_secret,
12
- :s3_bucket,
13
- :s3_prefix,
14
- :app_name,
15
- :environment,
16
- :use_parallel,
17
- :log_file
18
- )
9
+ DEFAULTS = {
10
+ pg_host: nil,
11
+ pg_port: nil,
12
+ pg_database: nil,
13
+ pg_username: nil,
14
+ pg_password: nil,
15
+ s3_region: nil,
16
+ s3_access_key: nil,
17
+ s3_secret: nil,
18
+ s3_bucket: nil,
19
+ s3_prefix: "./db/dump/",
20
+ app_name: "backy",
21
+ environment: "development",
22
+ use_parallel: false,
23
+ pause_replication: true,
24
+ log_file: "./log/backy.log",
25
+ local_backup_path: nil
26
+ }.freeze
27
+
28
+ CONFIG_FILE_NAME = ".backyrc"
29
+
30
+ attr_accessor(*DEFAULTS.keys)
31
+
32
+ def initialize
33
+ DEFAULTS.each do |key, value|
34
+ instance_variable_set("@#{key}", value)
35
+ end
36
+ end
19
37
 
20
38
  def load
21
- local_config_file = File.join(Dir.pwd, '.backyrc')
22
- global_config_file = File.join(Dir.home, '.backyrc')
39
+ load_config_file
23
40
 
24
- config_file = File.exist?(local_config_file) ? local_config_file : global_config_file
25
- Logger.log("Loading configuration from #{config_file}...") if File.exist?(config_file)
26
- load_from_file(config_file) if File.exist?(config_file)
41
+ load_from_env
27
42
  end
28
43
 
29
44
  def pg_url=(url)
@@ -40,56 +55,14 @@ module Backy
40
55
  @pg_url ||= ENV["PG_URL"]
41
56
  end
42
57
 
43
- def pg_host
44
- @pg_host ||= ENV["PG_HOST"]
45
- end
46
-
47
- def pg_port
48
- @pg_port ||= ENV["PG_PORT"]
49
- end
50
-
51
- def pg_database
52
- @pg_database ||= ENV["PG_DATABASE"]
53
- end
54
-
55
- def pg_username
56
- @pg_username ||= ENV["PG_USERNAME"]
57
- end
58
-
59
- def pg_password
60
- @pg_password ||= ENV["PG_PASSWORD"]
61
- end
62
-
63
- def s3_region
64
- @s3_region ||= ENV["S3_REGION"]
65
- end
66
-
67
- def s3_access_key
68
- @s3_access_key ||= ENV["S3_ACCESS_KEY"]
69
- end
70
-
71
- def s3_secret
72
- @s3_secret ||= ENV["S3_SECRET"]
73
- end
74
-
75
- def s3_bucket
76
- @s3_bucket ||= ENV["S3_BUCKET"]
77
- end
78
-
79
- def s3_prefix
80
- @s3_prefix ||= ENV["S3_PREFIX"].presence || "/db/dump/"
81
- end
82
-
83
-
84
-
85
- def use_parallel
86
- @use_parallel ||= ENV["BACKY_USE_PARALLEL"] == "true"
87
- end
88
-
89
58
  def use_parallel?
90
59
  use_parallel && pigz_installed && multicore
91
60
  end
92
61
 
62
+ def pause_replication?
63
+ pause_replication
64
+ end
65
+
93
66
  # Detect if pigz binary is available
94
67
  # If it is, use it to speed up the dump
95
68
  # pigz is a parallel gzip implementation
@@ -103,14 +76,6 @@ module Backy
103
76
  @multicore ||= Etc.nprocessors > 1
104
77
  end
105
78
 
106
- def app_name
107
- @app_name ||= ENV["APP_NAME"].presence || "backy"
108
- end
109
-
110
- def environment
111
- @environment ||= "development"
112
- end
113
-
114
79
  def log_file
115
80
  @log_file ||= default_log_file
116
81
  end
@@ -120,37 +85,62 @@ module Backy
120
85
  def default_log_file
121
86
  if Gem.win_platform?
122
87
  # Windows default path
123
- File.join(Dir.home, "AppData", "Local", "#{app_name}", "log", "#{app_name}.log")
88
+ File.join(Dir.home, "AppData", "Local", app_name, "log", app_name, ".log")
124
89
  else
125
90
  # Unix-like systems default path
126
- File.join(Dir.home, ".local", "share", "#{app_name}", "log", "#{app_name}.log")
91
+ File.join(Dir.home, ".local", "share", app_name, "log", app_name, ".log")
92
+ end
93
+ end
94
+
95
+ def load_config_file
96
+ local_config_file = File.join(Dir.pwd, CONFIG_FILE_NAME)
97
+ global_config_file = File.join(Dir.home, CONFIG_FILE_NAME)
98
+
99
+ config_file = File.exist?(local_config_file) ? local_config_file : global_config_file
100
+ if File.exist?(config_file)
101
+ Logger.log("Loading configuration from #{config_file}...")
102
+ load_from_file(config_file)
127
103
  end
128
104
  end
129
105
 
130
106
  def load_from_file(file_path)
131
107
  configuration = YAML.load_file(file_path)
132
108
 
133
- @s3_access_key = configuration.dig("defaults", "s3", "access_key_id")
134
- @s3_secret = configuration.dig("defaults", "s3", "secret_access_key")
135
- @s3_region = configuration.dig("defaults", "s3", "region")
136
- @s3_bucket = configuration.dig("defaults", "s3", "bucket")
137
- @s3_prefix = configuration.dig("defaults", "s3", "prefix") || s3_prefix
109
+ shared_config = configuration.fetch("shared", {})
110
+ environment_config = configuration.fetch(environment, {})
138
111
 
139
- @pg_url = configuration.dig("defaults", "database", "pg_url")
140
- if @pg_url
141
- self.pg_url = @pg_url
142
- else
143
- @pg_host = configuration.dig("defaults", "database", "host")
144
- @pg_port = configuration.dig("defaults", "database", "port")
145
- @pg_username = configuration.dig("defaults", "database", "username")
146
- @pg_password = configuration.dig("defaults", "database", "password")
147
- @pg_database = configuration.dig("defaults", "database", "database_name")
112
+ merged_config = deep_merge(shared_config, environment_config)
113
+
114
+ apply_config(merged_config)
115
+ end
116
+
117
+ def apply_config(config)
118
+ config.each do |key, value|
119
+ instance_variable_set("@#{key}", value) if respond_to?("#{key}=")
148
120
  end
149
121
 
150
- @app_name = configuration.dig("defaults", "app_name") || "backy"
151
- @environment = configuration.dig("defaults", "environment") || "development"
152
- @log_file = configuration.dig("defaults", "log", "file") || default_log_file
153
- @use_parallel = configuration.dig("defaults", "use_parallel") || false
122
+ self.pg_url = @pg_url if @pg_url
123
+ end
124
+
125
+ def load_from_env
126
+ ENV.each do |key, value|
127
+ case key
128
+ when "PG_HOST" then @pg_host = value
129
+ when "PG_PORT" then @pg_port = value
130
+ when "PG_DATABASE" then @pg_database = value
131
+ when "PG_USERNAME" then @pg_username = value
132
+ when "PG_PASSWORD" then @pg_password = value
133
+ when "S3_REGION" then @s3_region = value
134
+ when "S3_ACCESS_KEY" then @s3_access_key = value
135
+ when "S3_SECRET" then @s3_secret = value
136
+ when "S3_BUCKET" then @s3_bucket = value
137
+ when "S3_PREFIX" then @s3_prefix = value
138
+ when "APP_NAME" then @app_name = value
139
+ when "BACKY_USE_PARALLEL" then @use_parallel = value == "true"
140
+ when "BACKY_PAUSE_REPLICATION" then @pause_replication = value == "true"
141
+ when "LOCAL_BACKUP_PATH" then @local_backup_path = value
142
+ end
143
+ end
154
144
  end
155
145
 
156
146
  def parse_postgres_uri(uri)
@@ -165,5 +155,17 @@ module Backy
165
155
  database_name: parsed_uri.path[1..]
166
156
  }
167
157
  end
158
+
159
+ def deep_merge(hash1, hash2)
160
+ merged = hash1.dup
161
+ hash2.each do |key, value|
162
+ merged[key] = if value.is_a?(Hash) && hash1[key].is_a?(Hash)
163
+ deep_merge(hash1[key], value)
164
+ else
165
+ value
166
+ end
167
+ end
168
+ merged
169
+ end
168
170
  end
169
171
  end
data/lib/backy/db.rb CHANGED
@@ -1,4 +1,4 @@
1
- require 'forwardable'
1
+ require "forwardable"
2
2
 
3
3
  module Backy
4
4
  module Db
@@ -12,6 +12,7 @@ module Backy
12
12
  def_delegator "Backy.configuration", :pg_username, :username
13
13
  def_delegator "Backy.configuration", :pg_password, :password
14
14
  def_delegator "Backy.configuration", :use_parallel?, :use_parallel?
15
+ def_delegator "Backy.configuration", :pause_replication?, :pause_replication?
15
16
 
16
17
  def pg_password_env
17
18
  password.present? ? "PGPASSWORD='#{password}' " : ""
data/lib/backy/logger.rb CHANGED
@@ -1,11 +1,13 @@
1
- require 'thor'
1
+ require "thor"
2
2
 
3
3
  module Backy
4
4
  class Logger
5
+ @log_messages = []
6
+
5
7
  # Logs a message with the specified color using Thor's shell
6
8
  def self.log(message, color = nil)
7
- thor_shell = Thor::Base.shell.new
8
- thor_shell.say("[#{Time.now.strftime("%Y-%m-%d %H:%M:%S")}] #{message}", color)
9
+ @log_messages << message
10
+ say("[#{Time.now.strftime("%Y-%m-%d %H:%M:%S")}] #{message}\n", color)
9
11
  end
10
12
 
11
13
  def self.success(message)
@@ -23,5 +25,17 @@ module Backy
23
25
  def self.error(message)
24
26
  log(message, :red)
25
27
  end
28
+
29
+ def self.say(message, color = nil)
30
+ thor_shell.say(message, color)
31
+ end
32
+
33
+ def self.log_messages
34
+ @log_messages
35
+ end
36
+
37
+ private_class_method def self.thor_shell
38
+ @thor_shell ||= Thor::Base.shell.new
39
+ end
26
40
  end
27
41
  end
data/lib/backy/pg_dump.rb CHANGED
@@ -1,5 +1,6 @@
1
1
  require "fileutils"
2
2
  require "etc"
3
+ require "open3"
3
4
 
4
5
  module Backy
5
6
  class PgDump
@@ -10,9 +11,46 @@ module Backy
10
11
  DUMP_CMD_OPTS = "--no-acl --no-owner --no-subscriptions --no-publications"
11
12
 
12
13
  def call
14
+ setup_backup_directory
15
+ log_start
16
+
17
+ dump_file = nil
18
+
19
+ begin
20
+ handle_replication { dump_file = backup }
21
+ rescue => e
22
+ Logger.error("An error occurred during backup: #{e.message}")
23
+ ensure
24
+ log_replication_resume if replica? && pause_replication?
25
+ end
26
+
27
+ dump_file
28
+ end
29
+
30
+ private
31
+
32
+ def setup_backup_directory
13
33
  FileUtils.mkdir_p(DUMP_DIR)
34
+ end
35
+
36
+ def log_start
14
37
  Logger.log("Starting backy for #{database}")
38
+ end
15
39
 
40
+ def handle_replication
41
+ if replica? && pause_replication?
42
+ if pause_replication
43
+ Logger.log("Replication paused.")
44
+ yield
45
+ else
46
+ Logger.error("Failed to pause replication. Aborting backup.")
47
+ end
48
+ else
49
+ yield
50
+ end
51
+ end
52
+
53
+ def backup
16
54
  if use_parallel?
17
55
  Logger.log("Using multicore dump with pigz")
18
56
  parallel_backup
@@ -22,64 +60,55 @@ module Backy
22
60
  end
23
61
  end
24
62
 
25
- private
63
+ def log_replication_resume
64
+ if resume_replication
65
+ Logger.log("Replication resumed.")
66
+ else
67
+ Logger.error("Failed to resume replication. Manual intervention required.")
68
+ end
69
+ end
26
70
 
27
71
  def plain_text_backup
28
- timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
29
- dump_file = "#{DUMP_DIR}/#{database}_#{whoami}@#{hostname}_single_#{timestamp}.sql.gz"
72
+ timestamp = current_timestamp
73
+ dump_file = "#{DUMP_DIR}/#{database}_#{whoami}@#{hostname}_#{timestamp}.sql.gz"
30
74
 
31
75
  cmd = "(#{pg_password_env}pg_dump #{pg_credentials} #{database} #{DUMP_CMD_OPTS} | gzip -9 > #{dump_file}) 2>&1 >> #{log_file}"
32
76
 
33
- print "Saving to #{dump_file} ... "
77
+ Logger.log("Saving to #{dump_file} ... ")
34
78
 
35
- if system(cmd)
36
- Logger.success("done")
37
- else
38
- Logger.error("error. See #{log_file}")
39
- return
40
- end
79
+ execute_command(cmd, "error. See #{log_file}")
41
80
 
42
81
  dump_file
43
82
  end
44
83
 
45
84
  def parallel_backup
46
- timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
85
+ timestamp = current_timestamp
47
86
  dump_dir = "#{DUMP_DIR}/#{database}_dump_parallel_#{timestamp}"
48
87
  dump_file = "#{dump_dir}.tar.gz"
49
88
 
50
- pg_dump_cmd = "pg_dump -Z0 -j #{Etc.nprocessors} -Fd #{database} -f #{dump_dir} #{pg_credentials} #{DUMP_CMD_OPTS}"
89
+ pg_dump_cmd = "#{pg_password_env}pg_dump -Z0 -j #{Etc.nprocessors} -Fd #{database} -f #{dump_dir} #{pg_credentials} #{DUMP_CMD_OPTS}"
51
90
  tar_cmd = "tar -cf - #{dump_dir} | pigz -p #{Etc.nprocessors} > #{dump_file}"
52
91
  cleanup_cmd = "rm -rf #{dump_dir}"
53
92
 
54
- Logger.log("Running pg_dump #{database}")
55
- if system("#{pg_password_env}#{pg_dump_cmd} 2>&1 >> #{log_file}")
56
- Logger.log("pg_dump completed successfully.")
57
- else
58
- Logger.error("pg_dump failed. See #{log_file} for details.")
59
- return
60
- end
93
+ execute_command("#{pg_password_env}#{pg_dump_cmd} 2>&1 >> #{log_file}", "pg_dump failed. See #{log_file} for details.")
94
+ execute_command(tar_cmd, "Compression failed. See #{log_file} for details.")
95
+ execute_command(cleanup_cmd, "Cleanup failed. See #{log_file} for details.")
61
96
 
62
- # Execute tar command
63
- Logger.log("Compressing #{dump_dir}")
64
- if system(tar_cmd)
65
- Logger.log("Compression completed successfully.")
66
- else
67
- Logger.error("Compression failed. See #{log_file} for details.")
68
- return
69
- end
97
+ Logger.success("Backup process completed. Output file: #{dump_file}")
98
+
99
+ dump_file
100
+ end
70
101
 
71
- # Execute cleanup command
72
- Logger.log("Cleaning up #{dump_dir}")
73
- if system(cleanup_cmd)
74
- Logger.log("Cleanup completed successfully.")
102
+ def execute_command(cmd, error_message)
103
+ if system(cmd)
104
+ Logger.success("done")
75
105
  else
76
- Logger.error("Cleanup failed. See #{log_file} for details.")
77
- return
106
+ Logger.error(error_message)
78
107
  end
108
+ end
79
109
 
80
- Logger.success("Backup process completed. Output file: #{dump_file}")
81
-
82
- dump_file # Return the name of the dump file
110
+ def current_timestamp
111
+ Time.now.strftime("%Y%m%d_%H%M%S")
83
112
  end
84
113
 
85
114
  def hostname
@@ -89,5 +118,45 @@ module Backy
89
118
  def whoami
90
119
  @whoami ||= `whoami`.strip
91
120
  end
121
+
122
+ def pause_replication
123
+ query = "SELECT pg_wal_replay_pause();"
124
+ success, _output = execute_sql(query)
125
+ success
126
+ end
127
+
128
+ def resume_replication
129
+ query = "SELECT pg_wal_replay_resume();"
130
+ success, _output = execute_sql(query)
131
+ success
132
+ end
133
+
134
+ def execute_sql(query)
135
+ command = %(#{pg_password_env}psql #{pg_credentials} -d #{database} -c "#{query}")
136
+ output = ""
137
+ Open3.popen3(command) do |_stdin, stdout, stderr, wait_thr|
138
+ while (line = stdout.gets)
139
+ output << line
140
+ end
141
+ while (line = stderr.gets)
142
+ puts "Error: #{line}"
143
+ end
144
+ exit_status = wait_thr.value
145
+ [exit_status.success?, output]
146
+ end
147
+ end
148
+
149
+ def replica?
150
+ @is_replica ||= begin
151
+ query = "SELECT pg_is_in_recovery();"
152
+ success, output = execute_sql(query)
153
+ if success && output.include?("t")
154
+ Logger.log("Database is a replica.")
155
+ true
156
+ else
157
+ false
158
+ end
159
+ end
160
+ end
92
161
  end
93
162
  end
@@ -10,16 +10,16 @@ module Backy
10
10
  end
11
11
 
12
12
  def call
13
- pigz_installed = system('which pigz > /dev/null 2>&1')
13
+ pigz_installed = system("which pigz > /dev/null 2>&1")
14
14
  multicore = Etc.nprocessors > 1
15
15
  use_multicore = ENV["BACKY_USE_PARALLEL"] == "true"
16
16
 
17
17
  if pigz_installed && multicore && use_multicore
18
- Logger.log('Using parallel restore with pigz')
18
+ Logger.log("Using parallel restore with pigz")
19
19
  parallel_restore
20
20
  else
21
21
  Logger.log("Pigz not installed or system is not multicore")
22
- Logger.log('Using plain text restore')
22
+ Logger.log("Using plain text restore")
23
23
  plain_text_restore
24
24
  end
25
25
  end
data/lib/backy/s3.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  require "aws-sdk-s3"
2
- require 'forwardable'
2
+ require "forwardable"
3
3
 
4
4
  module Backy
5
5
  module S3
data/lib/backy/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Backy
2
- VERSION = "0.1.8"
2
+ VERSION = "0.2.1"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: backy_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alexey Kharchenko
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2024-06-22 00:00:00.000000000 Z
13
+ date: 2024-06-24 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: rspec
@@ -301,7 +301,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
301
301
  - !ruby/object:Gem::Version
302
302
  version: '0'
303
303
  requirements: []
304
- rubygems_version: 3.5.5
304
+ rubygems_version: 3.4.22
305
305
  signing_key:
306
306
  specification_version: 4
307
307
  summary: Backy is a powerful and user-friendly database backup gem designed specifically