ops_backups 0.1.4 → 0.1.8
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/bin/pgdb +3 -0
- data/bin/pgdb.sh +383 -0
- data/lib/ops_backups/version.rb +1 -1
- metadata +11 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6162626296f9b33f706c46ad74419d1580deb96117cc6cfb994c1cab6606517e
|
4
|
+
data.tar.gz: aa24b48098a259532ccb7f46aa103d0103b358b0c488d6d19c9b1e87f9a357e7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fd70dada6f380d7ec9a841a1b8d1b021cffe3531bf1766db1527acebc2b92bdba8e46fc7f9cfd71fd6826bd43a41a4c3a33981453571f8f6a338bad4930234e4
|
7
|
+
data.tar.gz: 20e4668207a653ce6c3b2a48c8dd1a752cf305701b45c5f68dcf0d3609ec3018d79f71516d50616a3d2fea7f180620907f522350481f82f23156d8df0c9780ad
|
data/bin/pgdb
ADDED
data/bin/pgdb.sh
ADDED
@@ -0,0 +1,383 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
|
3
|
+
# Function to calculate the backup database name
|
4
|
+
backup_db_name() {
|
5
|
+
local target_db_name="$1"
|
6
|
+
local current_date=$(date +%Y%m%d)
|
7
|
+
local timestamp=$(date +%H%M%S)
|
8
|
+
echo "${target_db_name}_${current_date}_${timestamp}"
|
9
|
+
}
|
10
|
+
|
11
|
+
# Function to rename a PostgreSQL database
|
12
|
+
# Usage: db_rename <db_url> <old_db_name> <new_db_name>
|
13
|
+
db_rename() {
|
14
|
+
local db_url="$1"
|
15
|
+
local old_db_name="$2"
|
16
|
+
local new_db_name="$3"
|
17
|
+
|
18
|
+
if [[ -z "$old_db_name" || -z "$new_db_name" || -z "$db_url" ]]; then
|
19
|
+
echo "Usage: rename <db_url> <old_db_name> <new_db_name>"
|
20
|
+
exit 1
|
21
|
+
fi
|
22
|
+
|
23
|
+
echo "Renaming database from $old_db_name to $new_db_name on $db_url"
|
24
|
+
# Run the ALTER DATABASE command to rename the database
|
25
|
+
psql "$db_url" -c "ALTER DATABASE \"$old_db_name\" RENAME TO \"$new_db_name\";"
|
26
|
+
}
|
27
|
+
|
28
|
+
# Function to create a PostgreSQL database
|
29
|
+
# Usage: db_create <db_url> <new_db_name>
|
30
|
+
db_create() {
|
31
|
+
local db_url="$1"
|
32
|
+
local new_db_name="$2"
|
33
|
+
|
34
|
+
if [[ -z "$new_db_name" || -z "$db_url" ]]; then
|
35
|
+
echo "Usage: create <db_url> <new_db_name>"
|
36
|
+
exit 1
|
37
|
+
fi
|
38
|
+
|
39
|
+
echo "Creating database $new_db_name on $db_url"
|
40
|
+
# Run the CREATE DATABASE command to rename the database
|
41
|
+
psql "$db_url" -c "CREATE DATABASE \"$new_db_name\";"
|
42
|
+
}
|
43
|
+
|
44
|
+
# Function for pulling a database from a remote server to the local machine
|
45
|
+
# Usage: db_pull <db_url> <db_name>
|
46
|
+
db_pull() {
|
47
|
+
local db_url="$1"
|
48
|
+
local db_name="$2"
|
49
|
+
local dump_file="${db_name}.dump"
|
50
|
+
|
51
|
+
# Step 1: Dump the remote database
|
52
|
+
echo "Dumping database from $db_url to $dump_file..."
|
53
|
+
|
54
|
+
local exclude=""
|
55
|
+
if [[ -n "$EXCLUDE_TABLE_DATA" ]]; then
|
56
|
+
exclude="--exclude-table-data=$EXCLUDE_TABLE_DATA"
|
57
|
+
fi
|
58
|
+
pg_dump --no-owner -v -Fc $exclude -f "$dump_file" "$db_url"
|
59
|
+
|
60
|
+
# Step 2: Check if local database exists, prompt for confirmation, and drop if confirmed
|
61
|
+
if psql -lqt | cut -d \| -f 1 | grep -qw "$db_name"; then
|
62
|
+
read -p "Database $db_name already exists. Do you want to drop it and recreate? (y/n): " confirm
|
63
|
+
if [[ "$confirm" != "y" ]]; then
|
64
|
+
echo "Aborting..."
|
65
|
+
exit 1
|
66
|
+
fi
|
67
|
+
echo "Dropping existing database..."
|
68
|
+
dropdb "$db_name"
|
69
|
+
fi
|
70
|
+
|
71
|
+
# Step 3: Create a new local database
|
72
|
+
echo "Creating local database..."
|
73
|
+
createdb "$db_name"
|
74
|
+
|
75
|
+
# Step 4: Restore the dump to the new local database
|
76
|
+
echo "Restoring database..."
|
77
|
+
pg_restore --no-owner --no-comments -v -d "postgres://localhost/$db_name" "$dump_file"
|
78
|
+
|
79
|
+
echo "Database pull completed: $db_name"
|
80
|
+
}
|
81
|
+
|
82
|
+
|
83
|
+
# Function for "push" subcommand
|
84
|
+
# Usage: db_push <local_db_name> <remote_db_url>
|
85
|
+
db_push() {
|
86
|
+
local local_db_name="$1"
|
87
|
+
local remote_db_url="$2"
|
88
|
+
local dump_file="$(backup_db_name "$local_db_name").dump"
|
89
|
+
|
90
|
+
# Dump the local database
|
91
|
+
echo "Dumping local database..."
|
92
|
+
local exclude=""
|
93
|
+
if [[ -n "$EXCLUDE_TABLE_DATA" ]]; then
|
94
|
+
exclude="--exclude-table-data=$EXCLUDE_TABLE_DATA"
|
95
|
+
fi
|
96
|
+
echo pg_dump --no-owner -v -Fc $exclude -f $dump_file postgres://localhost/$local_db_name
|
97
|
+
pg_dump --no-owner -v -Fc $exclude -f $dump_file postgres://localhost/$local_db_name
|
98
|
+
|
99
|
+
# Restore the dump to the new remote database
|
100
|
+
echo pg_restore --no-owner --no-comments -v -d $remote_db_url $dump_file
|
101
|
+
pg_restore --no-owner --no-comments -v -d $remote_db_url $dump_file
|
102
|
+
|
103
|
+
echo "Database push completed: $local_db_name"
|
104
|
+
}
|
105
|
+
|
106
|
+
# Function for copying a database from one server to another
|
107
|
+
# Usage: db_copy <source_db_url> <target_db_url>
|
108
|
+
db_copy() {
|
109
|
+
local source_db_url="$1"
|
110
|
+
local target_db_url="$2"
|
111
|
+
local current_date=$(date +%Y%m%d)
|
112
|
+
local timestamp=$(date +%H%M%S)
|
113
|
+
local source_db_name="${source_db_url##*/}"
|
114
|
+
local target_db_name="${target_db_url##*/}"
|
115
|
+
local dump_file="${source_db_name}_${target_db_name}_${current_date}_${timestamp}.dump"
|
116
|
+
|
117
|
+
# Step 1: Dump the source database
|
118
|
+
echo "Dumping source database..."
|
119
|
+
local exclude=""
|
120
|
+
if [[ -n "$EXCLUDE_TABLE_DATA" ]]; then
|
121
|
+
exclude="--exclude-table-data=$EXCLUDE_TABLE_DATA"
|
122
|
+
fi
|
123
|
+
pg_dump --no-owner -v -Fc $exclude -f "$dump_file" "$source_db_url"
|
124
|
+
|
125
|
+
# stop if the dump failed
|
126
|
+
if [[ $? -ne 0 ]]; then
|
127
|
+
echo "Dump failed. Aborting..."
|
128
|
+
exit 1
|
129
|
+
fi
|
130
|
+
|
131
|
+
# Step 2: Create the target database (and backup if needed)
|
132
|
+
local backup_db_name=$(backup_db_name "$target_db_name")
|
133
|
+
local source_base_url="${source_db_url%/*}"
|
134
|
+
local target_base_url="${target_db_url%/*}"
|
135
|
+
db_rename "$target_base_url" "$target_db_name" "$backup_db_name"
|
136
|
+
echo "Creating new target database: $target_db_name..."
|
137
|
+
psql "$target_base_url" -c "CREATE DATABASE \"$target_db_name\";"
|
138
|
+
|
139
|
+
# Step 3: Restore the dump to the target database
|
140
|
+
echo "Restoring dump to target database..."
|
141
|
+
pg_restore --no-owner --no-comments -v -d "$target_db_url" "$dump_file"
|
142
|
+
|
143
|
+
echo "Database copy completed: $source_db_name to $target_db_name"
|
144
|
+
}
|
145
|
+
|
146
|
+
# function to rename a database
|
147
|
+
# Usage: db_rename <base_url> <old_name> <new_name>
|
148
|
+
db_rename() {
|
149
|
+
local base_url="$1"
|
150
|
+
local old_name="$2"
|
151
|
+
local new_name="$3"
|
152
|
+
# Run the ALTER DATABASE command to rename the database
|
153
|
+
psql "$base_url" -c "ALTER DATABASE \"$old_name\" RENAME TO \"$new_name\";"
|
154
|
+
}
|
155
|
+
|
156
|
+
# function to clone a database on the server
|
157
|
+
# Usage: db_clone_on_server <base_url> <db_name> <clone_db_name:optional>
|
158
|
+
db_clone_on_server() {
|
159
|
+
local base_url="$1"
|
160
|
+
local db_name="$2"
|
161
|
+
# if the third param is available, use it as the backup db name
|
162
|
+
# else append current_date to db_name
|
163
|
+
if [[ -n "$3" ]]; then
|
164
|
+
local clone_db_name="$3"
|
165
|
+
else
|
166
|
+
local clone_db_name=$(backup_db_name "$db_name")
|
167
|
+
fi
|
168
|
+
psql "$base_url" -c "CREATE DATABASE \"$clone_db_name\" WITH TEMPLATE \"$db_name\";"
|
169
|
+
# check if the clone db was created
|
170
|
+
if psql "$base_url" -lqt | cut -d \| -f 1 | grep -qw "$clone_db_name"; then
|
171
|
+
echo "Database $db_name clones as $clone_db_name"
|
172
|
+
fi
|
173
|
+
}
|
174
|
+
|
175
|
+
# Function to extract DATABASE_URL from the secrets file
|
176
|
+
fetch_db_url() {
|
177
|
+
local env="$1"
|
178
|
+
|
179
|
+
# If the env starts with postgres://, return it as is
|
180
|
+
if [[ "$env" == postgres://* ]]; then
|
181
|
+
echo "$env"
|
182
|
+
return
|
183
|
+
fi
|
184
|
+
|
185
|
+
local secrets_file=".kamal/secrets.${env}"
|
186
|
+
|
187
|
+
if [[ ! -f "$secrets_file" ]]; then
|
188
|
+
echo "Secrets file not found: $secrets_file"
|
189
|
+
exit 1
|
190
|
+
fi
|
191
|
+
|
192
|
+
# Extract DATABASE_URL from the secrets file
|
193
|
+
local db_url=$(awk -F '=' '/^DATABASE_URL=/{print $2}' "$secrets_file")
|
194
|
+
|
195
|
+
if [[ -z "$db_url" ]]; then
|
196
|
+
echo "DATABASE_URL not found in $secrets_file"
|
197
|
+
exit 1
|
198
|
+
fi
|
199
|
+
|
200
|
+
echo "$db_url"
|
201
|
+
}
|
202
|
+
|
203
|
+
command="$1"
|
204
|
+
env="$2"
|
205
|
+
current_date=$(date +%Y%m%d)
|
206
|
+
timestamp=$(date +%H%M%S)
|
207
|
+
|
208
|
+
# Fetch the DATABASE_URL from the secrets file
|
209
|
+
db_url=$(fetch_db_url "$env")
|
210
|
+
# Extract everything before the last '/'
|
211
|
+
base_url="${db_url%/*}"
|
212
|
+
# Extract the database name after the last '/'
|
213
|
+
db_name="${db_url##*/}"
|
214
|
+
|
215
|
+
case "$command" in
|
216
|
+
console)
|
217
|
+
echo "url: $db_url"
|
218
|
+
psql "$db_url"
|
219
|
+
;;
|
220
|
+
|
221
|
+
copy)
|
222
|
+
if [[ $# -lt 3 ]]; then
|
223
|
+
echo "Usage: pgdb copy <source_environment> <target_environment>"
|
224
|
+
exit 1
|
225
|
+
fi
|
226
|
+
|
227
|
+
source_environment="$2"
|
228
|
+
target_environment="$3"
|
229
|
+
|
230
|
+
kamal app stop -d "$source_environment"
|
231
|
+
kamal app stop -d "$target_environment"
|
232
|
+
|
233
|
+
# Fetch the source and target DATABASE_URLs
|
234
|
+
source_db_url=$db_url
|
235
|
+
target_db_url=$(fetch_db_url "$3")
|
236
|
+
|
237
|
+
source_base_url="${source_db_url%/*}"
|
238
|
+
target_base_url="${target_db_url%/*}"
|
239
|
+
|
240
|
+
# if source and target base are the same, copy on the server
|
241
|
+
# else copy between servers using pg_dump and pg_restore
|
242
|
+
if [[ "$source_base_url" == "$target_base_url" ]]; then
|
243
|
+
echo "Renaming current target database from $3 to $3_${current_date}_${timestamp}"
|
244
|
+
backup_db_name=$(backup_db_name "$3")
|
245
|
+
db_rename "$base_url" "$3" "$backup_db_name"
|
246
|
+
# echo "Cloning database from $2 to $3"
|
247
|
+
db_clone_on_server "$base_url" "$2" "$3"
|
248
|
+
else
|
249
|
+
# Copy the source DB to the target DB
|
250
|
+
db_copy "$source_db_url" "${target_db_url}"
|
251
|
+
fi
|
252
|
+
|
253
|
+
kamal app boot -d "$source_environment"
|
254
|
+
kamal app boot -d "$target_environment"
|
255
|
+
;;
|
256
|
+
|
257
|
+
pull)
|
258
|
+
if (( $# < 2 || $# > 3 )); then
|
259
|
+
echo "Usage: pgdb pull <environment> <local_db_name:optional>"
|
260
|
+
exit 1
|
261
|
+
fi
|
262
|
+
if [[ -n "$3" ]]; then
|
263
|
+
local_db_name="$3"
|
264
|
+
else
|
265
|
+
local_db_name=$(backup_db_name "$env")
|
266
|
+
fi
|
267
|
+
db_pull "$db_url" "$local_db_name"
|
268
|
+
;;
|
269
|
+
|
270
|
+
push)
|
271
|
+
if [[ $# -lt 3 ]]; then
|
272
|
+
echo "Usage: pgdb push <environment> <local_db_name>"
|
273
|
+
exit 1
|
274
|
+
fi
|
275
|
+
local_db_name="$3"
|
276
|
+
backup_db_name=$(backup_db_name "$db_name")
|
277
|
+
|
278
|
+
kamal app stop -d "$env"
|
279
|
+
db_rename "$base_url" "$db_name" "$backup_db_name"
|
280
|
+
db_create "$base_url" "$db_name"
|
281
|
+
db_push "$local_db_name" "$db_url"
|
282
|
+
kamal app boot -d "$env"
|
283
|
+
;;
|
284
|
+
|
285
|
+
rename)
|
286
|
+
if (( $# < 2 || $# > 3 )); then
|
287
|
+
echo "Usage: pgdb rename <environment> <new_db_name:optional>"
|
288
|
+
exit 1
|
289
|
+
fi
|
290
|
+
|
291
|
+
# if $3 present, use it as new_db_name else append current_date to db_name
|
292
|
+
if [[ -n "$3" ]]; then
|
293
|
+
new_db_name="$3"
|
294
|
+
else
|
295
|
+
new_db_name="${db_name}_${current_date}"
|
296
|
+
fi
|
297
|
+
|
298
|
+
db_rename "$base_url" "$db_name" "$new_db_name"
|
299
|
+
;;
|
300
|
+
|
301
|
+
list)
|
302
|
+
echo "Listing databases for $base_url"
|
303
|
+
echo
|
304
|
+
psql "$base_url" -c "\l" | grep "$db_name"
|
305
|
+
;;
|
306
|
+
|
307
|
+
drop)
|
308
|
+
echo "Dropping database $db_name"
|
309
|
+
# ask for confirmation
|
310
|
+
read -p "Are you sure you want to drop the database $db_name? (y/n): " -n 1 -r
|
311
|
+
# abort if not confirmed
|
312
|
+
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
313
|
+
echo
|
314
|
+
echo "Aborted!"
|
315
|
+
exit 1
|
316
|
+
fi
|
317
|
+
psql "$base_url" -c "DROP DATABASE \"$db_name\";"
|
318
|
+
;;
|
319
|
+
|
320
|
+
backup:local)
|
321
|
+
echo "Backing up database $db_name on the remote to a local dump"
|
322
|
+
backup_db_name=$(backup_db_name "$db_name")
|
323
|
+
db_pull "$db_url" "$backup_db_name"
|
324
|
+
;;
|
325
|
+
|
326
|
+
backup)
|
327
|
+
echo "Backing up database $db_name on the remote"
|
328
|
+
kamal app stop -d "$env"
|
329
|
+
db_clone_on_server "$base_url" "$db_name"
|
330
|
+
kamal app boot -d "$env"
|
331
|
+
;;
|
332
|
+
|
333
|
+
revert)
|
334
|
+
echo "Reverting database $db_name from $3"
|
335
|
+
# ask for confirmation
|
336
|
+
read -p "Are you sure you want to revert the database $db_name to $3? (y/n): " -n 1 -r
|
337
|
+
# abort if not confirmed
|
338
|
+
echo
|
339
|
+
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
340
|
+
echo "Aborted!"
|
341
|
+
exit 1
|
342
|
+
fi
|
343
|
+
# check if the chosen db exists
|
344
|
+
if ! psql "$base_url" -lqt | cut -d \| -f 1 | grep -qw "$3"; then
|
345
|
+
echo "Database $3 does not exist, aborting the revert!"
|
346
|
+
exit 1
|
347
|
+
fi
|
348
|
+
kamal app stop -d "$env"
|
349
|
+
# clone to the default backup name
|
350
|
+
db_clone_on_server "$base_url" "$db_name"
|
351
|
+
# drop the current db
|
352
|
+
psql "$base_url" -c "DROP DATABASE \"$db_name\";"
|
353
|
+
# rename the chosen db to the original db name
|
354
|
+
db_rename "$base_url" "$db_name" "$3"
|
355
|
+
kamal app boot -d "$env"
|
356
|
+
;;
|
357
|
+
|
358
|
+
create)
|
359
|
+
db_create "$base_url" "$env"
|
360
|
+
;;
|
361
|
+
|
362
|
+
*)
|
363
|
+
echo "Unknown command: $command"
|
364
|
+
echo "Usage: pgdb <command> <environment> [<additional_args>]"
|
365
|
+
echo
|
366
|
+
echo "Available commands:"
|
367
|
+
echo " console <environment> - Opens a database console for the specified environment."
|
368
|
+
echo " create <environment> - Creates a new database for the specified environment."
|
369
|
+
echo " revert <environment> <backup_db_name> - Reverts the database to a specified backup."
|
370
|
+
echo " backup <environment> - Creates a clone of the remote database as a backup on the server."
|
371
|
+
echo " backup:local <environment> - Backs up the remote database to a local dump file."
|
372
|
+
echo " drop <environment> - Drops the database of the specified environment after confirmation."
|
373
|
+
echo " pull <environment|url> [local_db_name] - Pulls the remote database to a local database with an optional custom name (optional)."
|
374
|
+
echo " push <environment> <local_db_name> - Pushes a local database to the remote environment, creating a backup of the remote."
|
375
|
+
echo " rename <environment> [new_db_name] - Renames the main database of the specified environment, optionally using a new name (optional)."
|
376
|
+
echo " copy <source_env> <target_env> - Copies a database from one environment to another, with backup of the target database."
|
377
|
+
echo " list <environment> - Lists databases for the base URL associated with the environment."
|
378
|
+
echo
|
379
|
+
echo "set EXCLUDE_TABLE_DATA to exclude data from specific tables on export"
|
380
|
+
echo "e.g. EXCLUDE_TABLE_DATA=versions bin/pgdb pull staging"
|
381
|
+
exit 1
|
382
|
+
;;
|
383
|
+
esac
|
data/lib/ops_backups/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ops_backups
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.8
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Koen Handekyn
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-11-
|
11
|
+
date: 2024-11-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
@@ -28,7 +28,8 @@ description: This gem provides functionality to backup PostgreSQL databases to A
|
|
28
28
|
(S3) from within a Rails context.
|
29
29
|
email:
|
30
30
|
- github.com@handekyn.com
|
31
|
-
executables:
|
31
|
+
executables:
|
32
|
+
- pgdb
|
32
33
|
extensions: []
|
33
34
|
extra_rdoc_files: []
|
34
35
|
files:
|
@@ -46,6 +47,8 @@ files:
|
|
46
47
|
- app/models/ops_backups/application_record.rb
|
47
48
|
- app/models/ops_backups/backup.rb
|
48
49
|
- app/views/layouts/ops_backups/application.html.erb
|
50
|
+
- bin/pgdb
|
51
|
+
- bin/pgdb.sh
|
49
52
|
- config/locales/en.yml
|
50
53
|
- config/routes.rb
|
51
54
|
- db/migrate/20241114173612_create_ops_backups.rb
|
@@ -57,13 +60,13 @@ files:
|
|
57
60
|
- lib/ops_backups/engine.rb
|
58
61
|
- lib/ops_backups/version.rb
|
59
62
|
- lib/tasks/ops_backups_tasks.rake
|
60
|
-
homepage: https://github.com/koenhandekyn/
|
63
|
+
homepage: https://github.com/koenhandekyn/ops_backups
|
61
64
|
licenses:
|
62
65
|
- MIT
|
63
66
|
metadata:
|
64
|
-
homepage_uri: https://github.com/koenhandekyn/
|
65
|
-
source_code_uri: https://github.com/koenhandekyn/
|
66
|
-
changelog_uri: https://github.com/koenhandekyn/
|
67
|
+
homepage_uri: https://github.com/koenhandekyn/ops_backups
|
68
|
+
source_code_uri: https://github.com/koenhandekyn/ops_backups/tree/main
|
69
|
+
changelog_uri: https://github.com/koenhandekyn/ops_backups/blob/main/CHANGELOG.md
|
67
70
|
post_install_message:
|
68
71
|
rdoc_options: []
|
69
72
|
require_paths:
|
@@ -79,7 +82,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
79
82
|
- !ruby/object:Gem::Version
|
80
83
|
version: '0'
|
81
84
|
requirements: []
|
82
|
-
rubygems_version: 3.5.
|
85
|
+
rubygems_version: 3.5.23
|
83
86
|
signing_key:
|
84
87
|
specification_version: 4
|
85
88
|
summary: A Ruby gem for managing PostgreSQL backups.
|