backy_rb 0.1.8 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9d66c6c91f2f5538af37756b5dc6e2759330b0b33914647cf0aa0b1f3ee376cc
4
- data.tar.gz: 20856cc301a8590f8d60200664995b0ab408f76f15e077bc931d3c828fc11658
3
+ metadata.gz: a7cc1e70142f6fa0c98d12c5e1187e05596a847e58ff13060b49d713bea3189c
4
+ data.tar.gz: 702727d108936670bcfa86a203cd67c672eb0dfb2a3f84d4f22e538962c2e151
5
5
  SHA512:
6
- metadata.gz: 62cd5a81471ef89e5ff57195b32ae4e9307a3f31a14f3bc9ee1a931b6534c4922406198aa30756fda197f2c60c279fefd76b46912419d348f3fe8922bccca978
7
- data.tar.gz: 1541af37d97d53e039b598da4ce53dd1a119e69f77fed1d9251ea6f839a31a72e3d06a324ed3832f053670d0c41350e013b40418ad425971ee73f31df1d95e76
6
+ metadata.gz: 16e3cc5cecf30e88be20ed71685cd5c615ace3ad960f0bcca92c03cda64f0c586da5d6905072e99c3fac2b334e598e46588eb438a0406dd10956a73fb1d4ec78
7
+ data.tar.gz: 55e8e14e5edb195cc882abe7cf4568cbb986d1e39b955c198fea5f25091b0adc00e42136b8ccd6df5a522ddf61662a4ee0ff617ee877221fc6540f91b7813aee
data/.backyrc.example CHANGED
@@ -1,17 +1,33 @@
1
1
  # ~/.backyrc
2
- defaults:
2
+ shared:
3
3
  use_parallel: true
4
- s3:
5
- access_key_id: "YOUR_AWS_ACCESS_KEY_ID"
6
- secret_access_key: "YOUR_AWS_SECRET_ACCESS_KEY"
7
- region: "eu-central-1"
8
- bucket: "your-s3-bucket-name"
9
- database:
10
- adapter: "postgresql"
11
- host: "localhost"
12
- port: 5432
13
- username: "your-db-username"
14
- password: "your-db-password"
15
- database_name: "your-database-name"
16
- local:
17
- backup_path: "/path/to/your/local/backup/directory"
4
+ pause_replication: true
5
+ s3_access_key: "YOUR_AWS_ACCESS_KEY_ID"
6
+ s3_secret: "YOUR_AWS_SECRET_ACCESS_KEY"
7
+ s3_region: "eu-central-1"
8
+ s3_bucket: "your-s3-bucket-name"
9
+ s3_prefix: "./db/dump/"
10
+ pg_host: "localhost"
11
+ pg_port: 5432
12
+ pg_username: "your-db-username"
13
+ pg_password: "your-db-password"
14
+ pg_database: "your-database-name"
15
+ app_name: "backy"
16
+ environment: "development"
17
+ log_file: "./log/backy.log"
18
+ local_backup_path: "/path/to/your/local/backup/directory"
19
+
20
+ production:
21
+ pg_host: "production-host"
22
+ s3_bucket: "production-s3-bucket-name"
23
+ log_file: "./log/production_backy.log"
24
+
25
+ staging:
26
+ pg_host: "staging-host"
27
+ s3_bucket: "staging-s3-bucket-name"
28
+ log_file: "./log/staging_backy.log"
29
+
30
+ development:
31
+ pg_host: "localhost"
32
+ s3_bucket: "development-s3-bucket-name"
33
+ log_file: "./log/development_backy.log"
data/CHANGELOG.md CHANGED
@@ -3,6 +3,14 @@
3
3
  All notable changes to `Backy` will be documented in this file.
4
4
 
5
5
  ## [Unreleased]
6
+ ## [0.2.0] - 2024-06-24
7
+ ### Added
8
+ - Support for turning off replication
9
+ - Added support for config by environment in .backyrc
10
+
11
+ ### Changed
12
+ - Breaking change; Change in config keys
13
+ - Internal refactoring
6
14
 
7
15
  ## [0.1.8] - 2024-06-22
8
16
  ### Fixed
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- backy_rb (0.1.8)
4
+ backy_rb (0.2.0)
5
5
  activerecord (>= 4.0)
6
6
  activesupport (>= 4.0)
7
7
  aws-sdk-s3 (>= 1.117)
data/README.md CHANGED
@@ -68,19 +68,18 @@ Backy can be configured through a .backyrc YAML file. Place this file in your ho
68
68
  Example `.backyrc`:
69
69
 
70
70
  ```yaml
71
- defaults:
71
+ shared:
72
72
  use_parallel: true
73
- s3:
74
- access_key_id: YOUR_ACCESS_KEY
75
- secret_access_key: YOUR_SECRET_KEY
76
- region: YOUR_REGION
77
- bucket: YOUR_BUCKET
78
- database:
79
- host: DB_HOST
80
- port: DB_PORT
81
- username: DB_USERNAME
82
- password: DB_PASSWORD
83
- database_name: DB_NAME
73
+ pause_replication: true
74
+ s3_access_key_id: YOUR_ACCESS_KEY
75
+ s3_secret_access_key: YOUR_SECRET_KEY
76
+ s3_region: YOUR_REGION
77
+ s3_bucket: YOUR_BUCKET
78
+ pg_host: DB_HOST
79
+ pg_port: DB_PORT
80
+ pg_username: DB_USERNAME
81
+ pg_password: DB_PASSWORD
82
+ pg_database: DB_NAME
84
83
  ```
85
84
 
86
85
  ## Development
@@ -1,4 +1,4 @@
1
- require 'forwardable'
1
+ require "forwardable"
2
2
 
3
3
  module Backy
4
4
  module AppConfig
@@ -1,29 +1,44 @@
1
+ require "fileutils"
2
+ require "etc"
3
+ require "open3"
4
+ require "yaml"
5
+ require "uri"
6
+
1
7
  module Backy
2
8
  class Configuration
3
- attr_writer(
4
- :pg_host,
5
- :pg_port,
6
- :pg_database,
7
- :pg_username,
8
- :pg_password,
9
- :s3_region,
10
- :s3_access_key,
11
- :s3_secret,
12
- :s3_bucket,
13
- :s3_prefix,
14
- :app_name,
15
- :environment,
16
- :use_parallel,
17
- :log_file
18
- )
9
+ DEFAULTS = {
10
+ pg_host: nil,
11
+ pg_port: nil,
12
+ pg_database: nil,
13
+ pg_username: nil,
14
+ pg_password: nil,
15
+ s3_region: nil,
16
+ s3_access_key: nil,
17
+ s3_secret: nil,
18
+ s3_bucket: nil,
19
+ s3_prefix: "./db/dump/",
20
+ app_name: "backy",
21
+ environment: "development",
22
+ use_parallel: false,
23
+ pause_replication: true,
24
+ log_file: "./log/backy.log",
25
+ local_backup_path: nil
26
+ }.freeze
27
+
28
+ CONFIG_FILE_NAME = ".backyrc"
29
+
30
+ attr_accessor(*DEFAULTS.keys)
31
+
32
+ def initialize
33
+ DEFAULTS.each do |key, value|
34
+ instance_variable_set("@#{key}", value)
35
+ end
36
+ end
19
37
 
20
38
  def load
21
- local_config_file = File.join(Dir.pwd, '.backyrc')
22
- global_config_file = File.join(Dir.home, '.backyrc')
39
+ load_config_file
23
40
 
24
- config_file = File.exist?(local_config_file) ? local_config_file : global_config_file
25
- Logger.log("Loading configuration from #{config_file}...") if File.exist?(config_file)
26
- load_from_file(config_file) if File.exist?(config_file)
41
+ load_from_env
27
42
  end
28
43
 
29
44
  def pg_url=(url)
@@ -40,56 +55,14 @@ module Backy
40
55
  @pg_url ||= ENV["PG_URL"]
41
56
  end
42
57
 
43
- def pg_host
44
- @pg_host ||= ENV["PG_HOST"]
45
- end
46
-
47
- def pg_port
48
- @pg_port ||= ENV["PG_PORT"]
49
- end
50
-
51
- def pg_database
52
- @pg_database ||= ENV["PG_DATABASE"]
53
- end
54
-
55
- def pg_username
56
- @pg_username ||= ENV["PG_USERNAME"]
57
- end
58
-
59
- def pg_password
60
- @pg_password ||= ENV["PG_PASSWORD"]
61
- end
62
-
63
- def s3_region
64
- @s3_region ||= ENV["S3_REGION"]
65
- end
66
-
67
- def s3_access_key
68
- @s3_access_key ||= ENV["S3_ACCESS_KEY"]
69
- end
70
-
71
- def s3_secret
72
- @s3_secret ||= ENV["S3_SECRET"]
73
- end
74
-
75
- def s3_bucket
76
- @s3_bucket ||= ENV["S3_BUCKET"]
77
- end
78
-
79
- def s3_prefix
80
- @s3_prefix ||= ENV["S3_PREFIX"].presence || "/db/dump/"
81
- end
82
-
83
-
84
-
85
- def use_parallel
86
- @use_parallel ||= ENV["BACKY_USE_PARALLEL"] == "true"
87
- end
88
-
89
58
  def use_parallel?
90
59
  use_parallel && pigz_installed && multicore
91
60
  end
92
61
 
62
+ def pause_replication?
63
+ pause_replication
64
+ end
65
+
93
66
  # Detect if pigz binary is available
94
67
  # If it is, use it to speed up the dump
95
68
  # pigz is a parallel gzip implementation
@@ -103,14 +76,6 @@ module Backy
103
76
  @multicore ||= Etc.nprocessors > 1
104
77
  end
105
78
 
106
- def app_name
107
- @app_name ||= ENV["APP_NAME"].presence || "backy"
108
- end
109
-
110
- def environment
111
- @environment ||= "development"
112
- end
113
-
114
79
  def log_file
115
80
  @log_file ||= default_log_file
116
81
  end
@@ -120,37 +85,62 @@ module Backy
120
85
  def default_log_file
121
86
  if Gem.win_platform?
122
87
  # Windows default path
123
- File.join(Dir.home, "AppData", "Local", "#{app_name}", "log", "#{app_name}.log")
88
+ File.join(Dir.home, "AppData", "Local", app_name, "log", app_name, ".log")
124
89
  else
125
90
  # Unix-like systems default path
126
- File.join(Dir.home, ".local", "share", "#{app_name}", "log", "#{app_name}.log")
91
+ File.join(Dir.home, ".local", "share", app_name, "log", app_name, ".log")
92
+ end
93
+ end
94
+
95
+ def load_config_file
96
+ local_config_file = File.join(Dir.pwd, CONFIG_FILE_NAME)
97
+ global_config_file = File.join(Dir.home, CONFIG_FILE_NAME)
98
+
99
+ config_file = File.exist?(local_config_file) ? local_config_file : global_config_file
100
+ if File.exist?(config_file)
101
+ Logger.log("Loading configuration from #{config_file}...")
102
+ load_from_file(config_file)
127
103
  end
128
104
  end
129
105
 
130
106
  def load_from_file(file_path)
131
107
  configuration = YAML.load_file(file_path)
132
108
 
133
- @s3_access_key = configuration.dig("defaults", "s3", "access_key_id")
134
- @s3_secret = configuration.dig("defaults", "s3", "secret_access_key")
135
- @s3_region = configuration.dig("defaults", "s3", "region")
136
- @s3_bucket = configuration.dig("defaults", "s3", "bucket")
137
- @s3_prefix = configuration.dig("defaults", "s3", "prefix") || s3_prefix
109
+ shared_config = configuration.fetch("shared", {})
110
+ environment_config = configuration.fetch(environment, {})
138
111
 
139
- @pg_url = configuration.dig("defaults", "database", "pg_url")
140
- if @pg_url
141
- self.pg_url = @pg_url
142
- else
143
- @pg_host = configuration.dig("defaults", "database", "host")
144
- @pg_port = configuration.dig("defaults", "database", "port")
145
- @pg_username = configuration.dig("defaults", "database", "username")
146
- @pg_password = configuration.dig("defaults", "database", "password")
147
- @pg_database = configuration.dig("defaults", "database", "database_name")
112
+ merged_config = deep_merge(shared_config, environment_config)
113
+
114
+ apply_config(merged_config)
115
+ end
116
+
117
+ def apply_config(config)
118
+ config.each do |key, value|
119
+ instance_variable_set("@#{key}", value) if respond_to?("#{key}=")
148
120
  end
149
121
 
150
- @app_name = configuration.dig("defaults", "app_name") || "backy"
151
- @environment = configuration.dig("defaults", "environment") || "development"
152
- @log_file = configuration.dig("defaults", "log", "file") || default_log_file
153
- @use_parallel = configuration.dig("defaults", "use_parallel") || false
122
+ self.pg_url = @pg_url if @pg_url
123
+ end
124
+
125
+ def load_from_env
126
+ ENV.each do |key, value|
127
+ case key
128
+ when "PG_HOST" then @pg_host = value
129
+ when "PG_PORT" then @pg_port = value
130
+ when "PG_DATABASE" then @pg_database = value
131
+ when "PG_USERNAME" then @pg_username = value
132
+ when "PG_PASSWORD" then @pg_password = value
133
+ when "S3_REGION" then @s3_region = value
134
+ when "S3_ACCESS_KEY" then @s3_access_key = value
135
+ when "S3_SECRET" then @s3_secret = value
136
+ when "S3_BUCKET" then @s3_bucket = value
137
+ when "S3_PREFIX" then @s3_prefix = value
138
+ when "APP_NAME" then @app_name = value
139
+ when "BACKY_USE_PARALLEL" then @use_parallel = value == "true"
140
+ when "BACKY_PAUSE_REPLICATION" then @pause_replication = value == "true"
141
+ when "LOCAL_BACKUP_PATH" then @local_backup_path = value
142
+ end
143
+ end
154
144
  end
155
145
 
156
146
  def parse_postgres_uri(uri)
@@ -165,5 +155,17 @@ module Backy
165
155
  database_name: parsed_uri.path[1..]
166
156
  }
167
157
  end
158
+
159
+ def deep_merge(hash1, hash2)
160
+ merged = hash1.dup
161
+ hash2.each do |key, value|
162
+ merged[key] = if value.is_a?(Hash) && hash1[key].is_a?(Hash)
163
+ deep_merge(hash1[key], value)
164
+ else
165
+ value
166
+ end
167
+ end
168
+ merged
169
+ end
168
170
  end
169
171
  end
data/lib/backy/db.rb CHANGED
@@ -1,4 +1,4 @@
1
- require 'forwardable'
1
+ require "forwardable"
2
2
 
3
3
  module Backy
4
4
  module Db
@@ -12,6 +12,7 @@ module Backy
12
12
  def_delegator "Backy.configuration", :pg_username, :username
13
13
  def_delegator "Backy.configuration", :pg_password, :password
14
14
  def_delegator "Backy.configuration", :use_parallel?, :use_parallel?
15
+ def_delegator "Backy.configuration", :pause_replication?, :pause_replication?
15
16
 
16
17
  def pg_password_env
17
18
  password.present? ? "PGPASSWORD='#{password}' " : ""
data/lib/backy/logger.rb CHANGED
@@ -1,11 +1,13 @@
1
- require 'thor'
1
+ require "thor"
2
2
 
3
3
  module Backy
4
4
  class Logger
5
+ @log_messages = []
6
+
5
7
  # Logs a message with the specified color using Thor's shell
6
8
  def self.log(message, color = nil)
7
- thor_shell = Thor::Base.shell.new
8
- thor_shell.say("[#{Time.now.strftime("%Y-%m-%d %H:%M:%S")}] #{message}", color)
9
+ @log_messages << message
10
+ say("[#{Time.now.strftime("%Y-%m-%d %H:%M:%S")}] #{message}\n", color)
9
11
  end
10
12
 
11
13
  def self.success(message)
@@ -23,5 +25,17 @@ module Backy
23
25
  def self.error(message)
24
26
  log(message, :red)
25
27
  end
28
+
29
+ def self.say(message, color = nil)
30
+ thor_shell.say(message, color)
31
+ end
32
+
33
+ def self.log_messages
34
+ @log_messages
35
+ end
36
+
37
+ private_class_method def self.thor_shell
38
+ @thor_shell ||= Thor::Base.shell.new
39
+ end
26
40
  end
27
41
  end
data/lib/backy/pg_dump.rb CHANGED
@@ -1,5 +1,6 @@
1
1
  require "fileutils"
2
2
  require "etc"
3
+ require "open3"
3
4
 
4
5
  module Backy
5
6
  class PgDump
@@ -10,9 +11,44 @@ module Backy
10
11
  DUMP_CMD_OPTS = "--no-acl --no-owner --no-subscriptions --no-publications"
11
12
 
12
13
  def call
14
+ setup_backup_directory
15
+ log_start
16
+
17
+ begin
18
+ handle_replication { backup }
19
+ rescue => e
20
+ Logger.error("An error occurred during backup: #{e.message}")
21
+ ensure
22
+ if replica? && pause_replication?
23
+ log_replication_resume
24
+ end
25
+ end
26
+ end
27
+
28
+ private
29
+
30
+ def setup_backup_directory
13
31
  FileUtils.mkdir_p(DUMP_DIR)
32
+ end
33
+
34
+ def log_start
14
35
  Logger.log("Starting backy for #{database}")
36
+ end
37
+
38
+ def handle_replication
39
+ if replica? && pause_replication?
40
+ if pause_replication
41
+ Logger.log("Replication paused.")
42
+ yield
43
+ else
44
+ Logger.error("Failed to pause replication. Aborting backup.")
45
+ end
46
+ else
47
+ yield
48
+ end
49
+ end
15
50
 
51
+ def backup
16
52
  if use_parallel?
17
53
  Logger.log("Using multicore dump with pigz")
18
54
  parallel_backup
@@ -22,64 +58,47 @@ module Backy
22
58
  end
23
59
  end
24
60
 
25
- private
61
+ def log_replication_resume
62
+ if resume_replication
63
+ Logger.log("Replication resumed.")
64
+ else
65
+ Logger.error("Failed to resume replication. Manual intervention required.")
66
+ end
67
+ end
26
68
 
27
69
  def plain_text_backup
28
- timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
29
- dump_file = "#{DUMP_DIR}/#{database}_#{whoami}@#{hostname}_single_#{timestamp}.sql.gz"
70
+ timestamp = current_timestamp
71
+ dump_file = "#{DUMP_DIR}/#{database}_#{whoami}@#{hostname}_#{timestamp}.sql.gz"
30
72
 
31
73
  cmd = "(#{pg_password_env}pg_dump #{pg_credentials} #{database} #{DUMP_CMD_OPTS} | gzip -9 > #{dump_file}) 2>&1 >> #{log_file}"
32
74
 
33
- print "Saving to #{dump_file} ... "
75
+ Logger.log("Saving to #{dump_file} ... ")
34
76
 
35
- if system(cmd)
36
- Logger.success("done")
37
- else
38
- Logger.error("error. See #{log_file}")
39
- return
40
- end
41
-
42
- dump_file
77
+ execute_command(cmd, "error. See #{log_file}")
43
78
  end
44
79
 
45
80
  def parallel_backup
46
- timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
81
+ timestamp = current_timestamp
47
82
  dump_dir = "#{DUMP_DIR}/#{database}_dump_parallel_#{timestamp}"
48
83
  dump_file = "#{dump_dir}.tar.gz"
49
84
 
50
- pg_dump_cmd = "pg_dump -Z0 -j #{Etc.nprocessors} -Fd #{database} -f #{dump_dir} #{pg_credentials} #{DUMP_CMD_OPTS}"
85
+ pg_dump_cmd = "#{pg_password_env}pg_dump -Z0 -j #{Etc.nprocessors} -Fd #{database} -f #{dump_dir} #{pg_credentials} #{DUMP_CMD_OPTS}"
51
86
  tar_cmd = "tar -cf - #{dump_dir} | pigz -p #{Etc.nprocessors} > #{dump_file}"
52
87
  cleanup_cmd = "rm -rf #{dump_dir}"
53
88
 
54
- Logger.log("Running pg_dump #{database}")
55
- if system("#{pg_password_env}#{pg_dump_cmd} 2>&1 >> #{log_file}")
56
- Logger.log("pg_dump completed successfully.")
57
- else
58
- Logger.error("pg_dump failed. See #{log_file} for details.")
59
- return
60
- end
61
-
62
- # Execute tar command
63
- Logger.log("Compressing #{dump_dir}")
64
- if system(tar_cmd)
65
- Logger.log("Compression completed successfully.")
66
- else
67
- Logger.error("Compression failed. See #{log_file} for details.")
68
- return
69
- end
70
-
71
- # Execute cleanup command
72
- Logger.log("Cleaning up #{dump_dir}")
73
- if system(cleanup_cmd)
74
- Logger.log("Cleanup completed successfully.")
75
- else
76
- Logger.error("Cleanup failed. See #{log_file} for details.")
77
- return
78
- end
89
+ execute_command("#{pg_password_env}#{pg_dump_cmd} 2>&1 >> #{log_file}", "pg_dump failed. See #{log_file} for details.")
90
+ execute_command(tar_cmd, "Compression failed. See #{log_file} for details.")
91
+ execute_command(cleanup_cmd, "Cleanup failed. See #{log_file} for details.")
79
92
 
80
93
  Logger.success("Backup process completed. Output file: #{dump_file}")
94
+ end
95
+
96
+ def execute_command(cmd, error_message)
97
+ Logger.error(error_message) unless system(cmd)
98
+ end
81
99
 
82
- dump_file # Return the name of the dump file
100
+ def current_timestamp
101
+ Time.now.strftime("%Y%m%d_%H%M%S")
83
102
  end
84
103
 
85
104
  def hostname
@@ -89,5 +108,45 @@ module Backy
89
108
  def whoami
90
109
  @whoami ||= `whoami`.strip
91
110
  end
111
+
112
+ def pause_replication
113
+ query = "SELECT pg_wal_replay_pause();"
114
+ success, _output = execute_sql(query)
115
+ success
116
+ end
117
+
118
+ def resume_replication
119
+ query = "SELECT pg_wal_replay_resume();"
120
+ success, _output = execute_sql(query)
121
+ success
122
+ end
123
+
124
+ def execute_sql(query)
125
+ command = %(#{pg_password_env}psql #{pg_credentials} -d #{database} -c "#{query}")
126
+ output = ""
127
+ Open3.popen3(command) do |_stdin, stdout, stderr, wait_thr|
128
+ while (line = stdout.gets)
129
+ output << line
130
+ end
131
+ while (line = stderr.gets)
132
+ puts "Error: #{line}"
133
+ end
134
+ exit_status = wait_thr.value
135
+ [exit_status.success?, output]
136
+ end
137
+ end
138
+
139
+ def replica?
140
+ @is_replica ||= begin
141
+ query = "SELECT pg_is_in_recovery();"
142
+ success, output = execute_sql(query)
143
+ if success && output.include?("t")
144
+ Logger.log("Database is a replica.")
145
+ true
146
+ else
147
+ false
148
+ end
149
+ end
150
+ end
92
151
  end
93
152
  end
@@ -10,16 +10,16 @@ module Backy
10
10
  end
11
11
 
12
12
  def call
13
- pigz_installed = system('which pigz > /dev/null 2>&1')
13
+ pigz_installed = system("which pigz > /dev/null 2>&1")
14
14
  multicore = Etc.nprocessors > 1
15
15
  use_multicore = ENV["BACKY_USE_PARALLEL"] == "true"
16
16
 
17
17
  if pigz_installed && multicore && use_multicore
18
- Logger.log('Using parallel restore with pigz')
18
+ Logger.log("Using parallel restore with pigz")
19
19
  parallel_restore
20
20
  else
21
21
  Logger.log("Pigz not installed or system is not multicore")
22
- Logger.log('Using plain text restore')
22
+ Logger.log("Using plain text restore")
23
23
  plain_text_restore
24
24
  end
25
25
  end
data/lib/backy/s3.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  require "aws-sdk-s3"
2
- require 'forwardable'
2
+ require "forwardable"
3
3
 
4
4
  module Backy
5
5
  module S3
data/lib/backy/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Backy
2
- VERSION = "0.1.8"
2
+ VERSION = "0.2.0"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: backy_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alexey Kharchenko
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2024-06-22 00:00:00.000000000 Z
13
+ date: 2024-06-24 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: rspec
@@ -301,7 +301,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
301
301
  - !ruby/object:Gem::Version
302
302
  version: '0'
303
303
  requirements: []
304
- rubygems_version: 3.5.5
304
+ rubygems_version: 3.4.22
305
305
  signing_key:
306
306
  specification_version: 4
307
307
  summary: Backy is a powerful and user-friendly database backup gem designed specifically