thrivekit 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/explain.md +114 -0
- package/.claude/commands/idea.md +370 -0
- package/.claude/commands/my-dna.md +122 -0
- package/.claude/commands/prd.md +286 -0
- package/.claude/commands/review.md +167 -0
- package/.claude/commands/sign.md +32 -0
- package/.claude/commands/styleguide.md +450 -0
- package/.claude/commands/tour.md +301 -0
- package/.claude/commands/vibe-check.md +116 -0
- package/.claude/commands/vibe-help.md +47 -0
- package/.claude/commands/vibe-list.md +203 -0
- package/.claude/settings.json +75 -0
- package/.claude/settings.local.json +12 -0
- package/.pre-commit-hooks.yaml +102 -0
- package/LICENSE +21 -0
- package/README.md +214 -0
- package/bin/postinstall.sh +29 -0
- package/bin/ralph.sh +171 -0
- package/bin/thrivekit.sh +24 -0
- package/bin/vibe-check.js +19 -0
- package/dist/checks/check-any-types.d.ts +6 -0
- package/dist/checks/check-any-types.d.ts.map +1 -0
- package/dist/checks/check-any-types.js +73 -0
- package/dist/checks/check-any-types.js.map +1 -0
- package/dist/checks/check-commented-code.d.ts +6 -0
- package/dist/checks/check-commented-code.d.ts.map +1 -0
- package/dist/checks/check-commented-code.js +81 -0
- package/dist/checks/check-commented-code.js.map +1 -0
- package/dist/checks/check-console-error.d.ts +6 -0
- package/dist/checks/check-console-error.d.ts.map +1 -0
- package/dist/checks/check-console-error.js +41 -0
- package/dist/checks/check-console-error.js.map +1 -0
- package/dist/checks/check-debug-statements.d.ts +6 -0
- package/dist/checks/check-debug-statements.d.ts.map +1 -0
- package/dist/checks/check-debug-statements.js +120 -0
- package/dist/checks/check-debug-statements.js.map +1 -0
- package/dist/checks/check-deep-nesting.d.ts +6 -0
- package/dist/checks/check-deep-nesting.d.ts.map +1 -0
- package/dist/checks/check-deep-nesting.js +116 -0
- package/dist/checks/check-deep-nesting.js.map +1 -0
- package/dist/checks/check-docker-platform.d.ts +6 -0
- package/dist/checks/check-docker-platform.d.ts.map +1 -0
- package/dist/checks/check-docker-platform.js +42 -0
- package/dist/checks/check-docker-platform.js.map +1 -0
- package/dist/checks/check-dry-violations.d.ts +6 -0
- package/dist/checks/check-dry-violations.d.ts.map +1 -0
- package/dist/checks/check-dry-violations.js +124 -0
- package/dist/checks/check-dry-violations.js.map +1 -0
- package/dist/checks/check-empty-catch.d.ts +6 -0
- package/dist/checks/check-empty-catch.d.ts.map +1 -0
- package/dist/checks/check-empty-catch.js +111 -0
- package/dist/checks/check-empty-catch.js.map +1 -0
- package/dist/checks/check-function-length.d.ts +6 -0
- package/dist/checks/check-function-length.d.ts.map +1 -0
- package/dist/checks/check-function-length.js +152 -0
- package/dist/checks/check-function-length.js.map +1 -0
- package/dist/checks/check-hardcoded-ai-models.d.ts +10 -0
- package/dist/checks/check-hardcoded-ai-models.d.ts.map +1 -0
- package/dist/checks/check-hardcoded-ai-models.js +102 -0
- package/dist/checks/check-hardcoded-ai-models.js.map +1 -0
- package/dist/checks/check-hardcoded-urls.d.ts +6 -0
- package/dist/checks/check-hardcoded-urls.d.ts.map +1 -0
- package/dist/checks/check-hardcoded-urls.js +124 -0
- package/dist/checks/check-hardcoded-urls.js.map +1 -0
- package/dist/checks/check-magic-numbers.d.ts +6 -0
- package/dist/checks/check-magic-numbers.d.ts.map +1 -0
- package/dist/checks/check-magic-numbers.js +116 -0
- package/dist/checks/check-magic-numbers.js.map +1 -0
- package/dist/checks/check-secrets.d.ts +6 -0
- package/dist/checks/check-secrets.d.ts.map +1 -0
- package/dist/checks/check-secrets.js +138 -0
- package/dist/checks/check-secrets.js.map +1 -0
- package/dist/checks/check-snake-case-ts.d.ts +6 -0
- package/dist/checks/check-snake-case-ts.d.ts.map +1 -0
- package/dist/checks/check-snake-case-ts.js +78 -0
- package/dist/checks/check-snake-case-ts.js.map +1 -0
- package/dist/checks/check-todo-fixme.d.ts +6 -0
- package/dist/checks/check-todo-fixme.d.ts.map +1 -0
- package/dist/checks/check-todo-fixme.js +41 -0
- package/dist/checks/check-todo-fixme.js.map +1 -0
- package/dist/checks/check-unsafe-html.d.ts +6 -0
- package/dist/checks/check-unsafe-html.d.ts.map +1 -0
- package/dist/checks/check-unsafe-html.js +101 -0
- package/dist/checks/check-unsafe-html.js.map +1 -0
- package/dist/checks/index.d.ts +30 -0
- package/dist/checks/index.d.ts.map +1 -0
- package/dist/checks/index.js +57 -0
- package/dist/checks/index.js.map +1 -0
- package/dist/cli.d.ts +13 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +206 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10 -0
- package/dist/index.js.map +1 -0
- package/dist/utils/file-reader.d.ts +24 -0
- package/dist/utils/file-reader.d.ts.map +1 -0
- package/dist/utils/file-reader.js +140 -0
- package/dist/utils/file-reader.js.map +1 -0
- package/dist/utils/patterns.d.ts +27 -0
- package/dist/utils/patterns.d.ts.map +1 -0
- package/dist/utils/patterns.js +84 -0
- package/dist/utils/patterns.js.map +1 -0
- package/dist/utils/reporters.d.ts +21 -0
- package/dist/utils/reporters.d.ts.map +1 -0
- package/dist/utils/reporters.js +115 -0
- package/dist/utils/reporters.js.map +1 -0
- package/dist/utils/types.d.ts +71 -0
- package/dist/utils/types.d.ts.map +1 -0
- package/dist/utils/types.js +5 -0
- package/dist/utils/types.js.map +1 -0
- package/package.json +82 -0
- package/ralph/api.sh +210 -0
- package/ralph/backup.sh +838 -0
- package/ralph/browser-verify/README.md +135 -0
- package/ralph/browser-verify/verify.ts +450 -0
- package/ralph/checks/check-fastapi-responses.py +155 -0
- package/ralph/hooks/hooks-config.json +72 -0
- package/ralph/hooks/inject-context.sh +44 -0
- package/ralph/hooks/install.sh +207 -0
- package/ralph/hooks/log-tools.sh +45 -0
- package/ralph/hooks/protect-prd.sh +27 -0
- package/ralph/hooks/save-learnings.sh +36 -0
- package/ralph/hooks/warn-debug.sh +54 -0
- package/ralph/hooks/warn-empty-catch.sh +63 -0
- package/ralph/hooks/warn-secrets.sh +89 -0
- package/ralph/hooks/warn-urls.sh +77 -0
- package/ralph/init.sh +388 -0
- package/ralph/loop.sh +570 -0
- package/ralph/playwright.sh +238 -0
- package/ralph/prd.sh +295 -0
- package/ralph/setup/feature-tour.sh +155 -0
- package/ralph/setup/quick-setup.sh +239 -0
- package/ralph/setup/tutorial.sh +159 -0
- package/ralph/setup/ui.sh +136 -0
- package/ralph/setup.sh +353 -0
- package/ralph/signs.sh +150 -0
- package/ralph/utils.sh +682 -0
- package/ralph/verify/browser.sh +324 -0
- package/ralph/verify/lint.sh +363 -0
- package/ralph/verify/review.sh +164 -0
- package/ralph/verify/tests.sh +81 -0
- package/ralph/verify.sh +224 -0
- package/templates/PROMPT.md +235 -0
- package/templates/config/fullstack.json +86 -0
- package/templates/config/go.json +81 -0
- package/templates/config/minimal.json +76 -0
- package/templates/config/node.json +81 -0
- package/templates/config/python.json +81 -0
- package/templates/config/rust.json +81 -0
- package/templates/examples/CLAUDE-django.md +174 -0
- package/templates/examples/CLAUDE-fastapi.md +270 -0
- package/templates/examples/CLAUDE-fastmcp.md +352 -0
- package/templates/examples/CLAUDE-fullstack.md +256 -0
- package/templates/examples/CLAUDE-node.md +246 -0
- package/templates/examples/CLAUDE-react.md +138 -0
- package/templates/optional/cursorrules.template +147 -0
- package/templates/optional/eslint.config.js +34 -0
- package/templates/optional/lint-staged.config.js +34 -0
- package/templates/optional/ruff.toml +125 -0
- package/templates/optional/vibe-check.yml +116 -0
- package/templates/optional/vscode-settings.json +127 -0
- package/templates/signs.json +46 -0
package/ralph/backup.sh
ADDED
|
@@ -0,0 +1,838 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# shellcheck shell=bash
|
|
3
|
+
# backup.sh - Database backup management for ralph
|
|
4
|
+
|
|
5
|
+
# Constants
|
|
6
|
+
readonly DEFAULT_MAX_BACKUPS=15
|
|
7
|
+
readonly BACKUP_DIR=".backups"
|
|
8
|
+
readonly LARGE_DB_THRESHOLD_MB=100
|
|
9
|
+
|
|
10
|
+
# Quiet mode flag (set via --quiet)
|
|
11
|
+
BACKUP_QUIET=false
|
|
12
|
+
|
|
13
|
+
# Parse MySQL URL into components
|
|
14
|
+
# Usage: parse_mysql_url "mysql://user:pass@host:port/dbname"
|
|
15
|
+
# Sets: MYSQL_USER, MYSQL_PASSWORD, MYSQL_HOST, MYSQL_PORT, MYSQL_DB
|
|
16
|
+
parse_mysql_url() {
|
|
17
|
+
local url="$1"
|
|
18
|
+
|
|
19
|
+
# Reset variables
|
|
20
|
+
MYSQL_USER="" MYSQL_PASSWORD="" MYSQL_HOST="" MYSQL_PORT="" MYSQL_DB=""
|
|
21
|
+
|
|
22
|
+
# Parse URL: mysql://user:pass@host:port/dbname
|
|
23
|
+
if [[ "$url" =~ mysql://([^:]+):([^@]+)@([^:]+):([0-9]+)/([^?]+) ]]; then
|
|
24
|
+
MYSQL_USER="${BASH_REMATCH[1]}"
|
|
25
|
+
MYSQL_PASSWORD="${BASH_REMATCH[2]}"
|
|
26
|
+
MYSQL_HOST="${BASH_REMATCH[3]}"
|
|
27
|
+
MYSQL_PORT="${BASH_REMATCH[4]}"
|
|
28
|
+
MYSQL_DB="${BASH_REMATCH[5]}"
|
|
29
|
+
return 0
|
|
30
|
+
elif [[ "$url" =~ mysql://([^:]+):([^@]+)@([^/]+)/([^?]+) ]]; then
|
|
31
|
+
MYSQL_USER="${BASH_REMATCH[1]}"
|
|
32
|
+
MYSQL_PASSWORD="${BASH_REMATCH[2]}"
|
|
33
|
+
MYSQL_HOST="${BASH_REMATCH[3]}"
|
|
34
|
+
MYSQL_PORT="3306"
|
|
35
|
+
MYSQL_DB="${BASH_REMATCH[4]}"
|
|
36
|
+
return 0
|
|
37
|
+
fi
|
|
38
|
+
|
|
39
|
+
return 1
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Cross-platform file size in MB
|
|
43
|
+
get_file_size_mb() {
|
|
44
|
+
local file="$1"
|
|
45
|
+
if [[ "$(uname)" == "Darwin" ]]; then
|
|
46
|
+
stat -f%z "$file" 2>/dev/null | awk '{print int($1/1048576)}'
|
|
47
|
+
else
|
|
48
|
+
stat -c%s "$file" 2>/dev/null | awk '{print int($1/1048576)}'
|
|
49
|
+
fi
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Print only if not in quiet mode
|
|
53
|
+
backup_info() {
|
|
54
|
+
[[ "$BACKUP_QUIET" == false ]] && print_info "$1"
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
backup_warning() {
|
|
58
|
+
[[ "$BACKUP_QUIET" == false ]] && print_warning "$1"
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
backup_success() {
|
|
62
|
+
[[ "$BACKUP_QUIET" == false ]] && print_success "$1"
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
# ============================================================================
|
|
66
|
+
# DATABASE DETECTION
|
|
67
|
+
# ============================================================================
|
|
68
|
+
|
|
69
|
+
# Find SQLite database files (excluding common non-data directories)
|
|
70
|
+
detect_sqlite() {
|
|
71
|
+
find . -maxdepth 4 -type f \( -name "*.db" -o -name "*.sqlite" -o -name "*.sqlite3" \) \
|
|
72
|
+
-not -path "*/node_modules/*" \
|
|
73
|
+
-not -path "*/.git/*" \
|
|
74
|
+
-not -path "*/.venv/*" \
|
|
75
|
+
-not -path "*/venv/*" \
|
|
76
|
+
-not -path "*/__pycache__/*" \
|
|
77
|
+
-not -path "*/.backups/*" \
|
|
78
|
+
2>/dev/null
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Check for PostgreSQL or Supabase DATABASE_URL
|
|
82
|
+
detect_postgres() {
|
|
83
|
+
local url=""
|
|
84
|
+
|
|
85
|
+
# Check environment variables (including Supabase variants)
|
|
86
|
+
if [[ -n "${DATABASE_URL:-}" ]]; then
|
|
87
|
+
url="$DATABASE_URL"
|
|
88
|
+
elif [[ -n "${SUPABASE_DB_URL:-}" ]]; then
|
|
89
|
+
url="$SUPABASE_DB_URL"
|
|
90
|
+
elif [[ -n "${POSTGRES_URL:-}" ]]; then
|
|
91
|
+
url="$POSTGRES_URL"
|
|
92
|
+
# Then check .env file
|
|
93
|
+
elif [[ -f ".env" ]]; then
|
|
94
|
+
url=$(grep -E '^(DATABASE_URL|SUPABASE_DB_URL|POSTGRES_URL)=' .env 2>/dev/null | head -1 | cut -d'=' -f2- | tr -d '"' | tr -d "'")
|
|
95
|
+
fi
|
|
96
|
+
|
|
97
|
+
# Only return if it's a PostgreSQL URL (includes Supabase which uses postgres://)
|
|
98
|
+
if [[ "$url" =~ ^postgres(ql)?:// ]]; then
|
|
99
|
+
echo "$url"
|
|
100
|
+
return 0
|
|
101
|
+
fi
|
|
102
|
+
return 1
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
# Check for MySQL URL
|
|
106
|
+
detect_mysql() {
|
|
107
|
+
local url=""
|
|
108
|
+
|
|
109
|
+
# Check MYSQL_URL first
|
|
110
|
+
if [[ -n "${MYSQL_URL:-}" ]]; then
|
|
111
|
+
url="$MYSQL_URL"
|
|
112
|
+
# Then DATABASE_URL if it's mysql
|
|
113
|
+
elif [[ -n "${DATABASE_URL:-}" && "${DATABASE_URL}" =~ ^mysql:// ]]; then
|
|
114
|
+
url="$DATABASE_URL"
|
|
115
|
+
# Then .env file
|
|
116
|
+
elif [[ -f ".env" ]]; then
|
|
117
|
+
url=$(grep -E '^MYSQL_URL=' .env 2>/dev/null | head -1 | cut -d'=' -f2- | tr -d '"' | tr -d "'")
|
|
118
|
+
if [[ -z "$url" ]]; then
|
|
119
|
+
url=$(grep -E '^DATABASE_URL=' .env 2>/dev/null | head -1 | cut -d'=' -f2- | tr -d '"' | tr -d "'")
|
|
120
|
+
fi
|
|
121
|
+
fi
|
|
122
|
+
|
|
123
|
+
if [[ "$url" =~ ^mysql:// ]]; then
|
|
124
|
+
echo "$url"
|
|
125
|
+
return 0
|
|
126
|
+
fi
|
|
127
|
+
return 1
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
# Check for MongoDB URL
|
|
131
|
+
detect_mongodb() {
|
|
132
|
+
local url=""
|
|
133
|
+
|
|
134
|
+
# Check various env var names
|
|
135
|
+
if [[ -n "${MONGODB_URL:-}" ]]; then
|
|
136
|
+
url="$MONGODB_URL"
|
|
137
|
+
elif [[ -n "${MONGO_URL:-}" ]]; then
|
|
138
|
+
url="$MONGO_URL"
|
|
139
|
+
elif [[ -f ".env" ]]; then
|
|
140
|
+
url=$(grep -E '^MONGO(DB)?_URL=' .env 2>/dev/null | head -1 | cut -d'=' -f2- | tr -d '"' | tr -d "'")
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
if [[ "$url" =~ ^mongodb(\+srv)?:// ]]; then
|
|
144
|
+
echo "$url"
|
|
145
|
+
return 0
|
|
146
|
+
fi
|
|
147
|
+
return 1
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
# ============================================================================
|
|
151
|
+
# VERIFICATION FUNCTIONS (post-backup integrity checks)
|
|
152
|
+
# ============================================================================
|
|
153
|
+
|
|
154
|
+
# Verify SQLite backup integrity
|
|
155
|
+
verify_sqlite_backup() {
|
|
156
|
+
local backup_file="$1"
|
|
157
|
+
|
|
158
|
+
if ! command -v sqlite3 &>/dev/null; then
|
|
159
|
+
return 0 # Can't verify without sqlite3, assume ok
|
|
160
|
+
fi
|
|
161
|
+
|
|
162
|
+
local result
|
|
163
|
+
result=$(sqlite3 "$backup_file" "PRAGMA integrity_check;" 2>&1)
|
|
164
|
+
|
|
165
|
+
if [[ "$result" == "ok" ]]; then
|
|
166
|
+
return 0
|
|
167
|
+
else
|
|
168
|
+
backup_warning "SQLite integrity check failed: $result"
|
|
169
|
+
return 1
|
|
170
|
+
fi
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
# Verify PostgreSQL backup has valid content
|
|
174
|
+
verify_postgres_backup() {
|
|
175
|
+
local backup_file="$1"
|
|
176
|
+
|
|
177
|
+
# Check gzip integrity
|
|
178
|
+
if ! gzip -t "$backup_file" 2>/dev/null; then
|
|
179
|
+
backup_warning "PostgreSQL backup gzip corrupted"
|
|
180
|
+
return 1
|
|
181
|
+
fi
|
|
182
|
+
|
|
183
|
+
# Check for PostgreSQL dump header
|
|
184
|
+
if ! zcat "$backup_file" 2>/dev/null | head -20 | grep -q "PostgreSQL"; then
|
|
185
|
+
backup_warning "PostgreSQL backup missing valid header"
|
|
186
|
+
return 1
|
|
187
|
+
fi
|
|
188
|
+
|
|
189
|
+
return 0
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
# Verify MySQL backup has valid content
|
|
193
|
+
verify_mysql_backup() {
|
|
194
|
+
local backup_file="$1"
|
|
195
|
+
|
|
196
|
+
# Check gzip integrity
|
|
197
|
+
if ! gzip -t "$backup_file" 2>/dev/null; then
|
|
198
|
+
backup_warning "MySQL backup gzip corrupted"
|
|
199
|
+
return 1
|
|
200
|
+
fi
|
|
201
|
+
|
|
202
|
+
# Check for MySQL dump markers
|
|
203
|
+
if ! zcat "$backup_file" 2>/dev/null | head -20 | grep -qE "(MySQL|mysqldump)"; then
|
|
204
|
+
backup_warning "MySQL backup missing valid header"
|
|
205
|
+
return 1
|
|
206
|
+
fi
|
|
207
|
+
|
|
208
|
+
return 0
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
# Verify MongoDB backup integrity
|
|
212
|
+
verify_mongodb_backup() {
|
|
213
|
+
local backup_file="$1"
|
|
214
|
+
|
|
215
|
+
# Check gzip integrity
|
|
216
|
+
if ! gzip -t "$backup_file" 2>/dev/null; then
|
|
217
|
+
backup_warning "MongoDB backup gzip corrupted"
|
|
218
|
+
return 1
|
|
219
|
+
fi
|
|
220
|
+
|
|
221
|
+
# Check file has content (mongodump archives have binary content)
|
|
222
|
+
local size
|
|
223
|
+
size=$(get_file_size_mb "$backup_file")
|
|
224
|
+
if [[ "$size" -eq 0 ]]; then
|
|
225
|
+
# Check actual byte size for small DBs
|
|
226
|
+
local bytes
|
|
227
|
+
if [[ "$(uname)" == "Darwin" ]]; then
|
|
228
|
+
bytes=$(stat -f%z "$backup_file" 2>/dev/null)
|
|
229
|
+
else
|
|
230
|
+
bytes=$(stat -c%s "$backup_file" 2>/dev/null)
|
|
231
|
+
fi
|
|
232
|
+
if [[ "$bytes" -lt 100 ]]; then
|
|
233
|
+
backup_warning "MongoDB backup appears empty"
|
|
234
|
+
return 1
|
|
235
|
+
fi
|
|
236
|
+
fi
|
|
237
|
+
|
|
238
|
+
return 0
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
# ============================================================================
|
|
242
|
+
# BACKUP FUNCTIONS
|
|
243
|
+
# ============================================================================
|
|
244
|
+
|
|
245
|
+
# Backup a SQLite database
|
|
246
|
+
backup_sqlite() {
|
|
247
|
+
local db_path="$1"
|
|
248
|
+
local backup_dir="$2"
|
|
249
|
+
local timestamp="$3"
|
|
250
|
+
|
|
251
|
+
# Extract database name (without extension)
|
|
252
|
+
local db_name
|
|
253
|
+
db_name=$(basename "$db_path" | sed 's/\.[^.]*$//')
|
|
254
|
+
|
|
255
|
+
local backup_file="$backup_dir/sqlite/${db_name}-${timestamp}.db"
|
|
256
|
+
mkdir -p "$backup_dir/sqlite"
|
|
257
|
+
|
|
258
|
+
# Check file size and warn if large
|
|
259
|
+
local size_mb
|
|
260
|
+
size_mb=$(get_file_size_mb "$db_path")
|
|
261
|
+
if [[ "$size_mb" -gt "$LARGE_DB_THRESHOLD_MB" ]]; then
|
|
262
|
+
backup_warning "Large database (${size_mb}MB): $db_path - backup may be slow"
|
|
263
|
+
fi
|
|
264
|
+
|
|
265
|
+
# Use sqlite3 .backup command (handles WAL mode safely)
|
|
266
|
+
if command -v sqlite3 &>/dev/null; then
|
|
267
|
+
if sqlite3 "$db_path" ".backup '$backup_file'" 2>/dev/null; then
|
|
268
|
+
# Verify backup integrity
|
|
269
|
+
if verify_sqlite_backup "$backup_file"; then
|
|
270
|
+
backup_success "SQLite: $db_path -> $backup_file"
|
|
271
|
+
return 0
|
|
272
|
+
else
|
|
273
|
+
rm -f "$backup_file"
|
|
274
|
+
backup_warning "SQLite backup verification failed, removed: $backup_file"
|
|
275
|
+
return 1
|
|
276
|
+
fi
|
|
277
|
+
else
|
|
278
|
+
backup_warning "SQLite backup failed for $db_path (using file copy fallback)"
|
|
279
|
+
fi
|
|
280
|
+
fi
|
|
281
|
+
|
|
282
|
+
# Fallback to file copy if sqlite3 not available or failed
|
|
283
|
+
if cp "$db_path" "$backup_file" 2>/dev/null; then
|
|
284
|
+
# Verify backup integrity
|
|
285
|
+
if verify_sqlite_backup "$backup_file"; then
|
|
286
|
+
backup_success "SQLite (copy): $db_path -> $backup_file"
|
|
287
|
+
return 0
|
|
288
|
+
else
|
|
289
|
+
rm -f "$backup_file"
|
|
290
|
+
backup_warning "SQLite backup verification failed, removed: $backup_file"
|
|
291
|
+
return 1
|
|
292
|
+
fi
|
|
293
|
+
fi
|
|
294
|
+
|
|
295
|
+
backup_warning "Failed to backup SQLite: $db_path"
|
|
296
|
+
return 1
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
# Backup PostgreSQL database
|
|
300
|
+
backup_postgres() {
|
|
301
|
+
local url="$1"
|
|
302
|
+
local backup_dir="$2"
|
|
303
|
+
local timestamp="$3"
|
|
304
|
+
|
|
305
|
+
if ! command -v pg_dump &>/dev/null; then
|
|
306
|
+
backup_warning "pg_dump not found. Install PostgreSQL client tools to backup PostgreSQL."
|
|
307
|
+
backup_info " macOS: brew install postgresql"
|
|
308
|
+
backup_info " Ubuntu: apt install postgresql-client"
|
|
309
|
+
return 1
|
|
310
|
+
fi
|
|
311
|
+
|
|
312
|
+
# Extract database name from URL
|
|
313
|
+
local db_name
|
|
314
|
+
db_name=$(echo "$url" | sed -E 's|.*/([^?]+).*|\1|')
|
|
315
|
+
[[ -z "$db_name" ]] && db_name="postgres"
|
|
316
|
+
|
|
317
|
+
local backup_file="$backup_dir/postgres/${db_name}-${timestamp}.sql.gz"
|
|
318
|
+
mkdir -p "$backup_dir/postgres"
|
|
319
|
+
|
|
320
|
+
# Run pg_dump with gzip compression
|
|
321
|
+
export PGCONNECT_TIMEOUT=10
|
|
322
|
+
if pg_dump "$url" 2>/dev/null | gzip > "$backup_file"; then
|
|
323
|
+
# Check if backup is empty (connection may have failed silently)
|
|
324
|
+
if [[ -s "$backup_file" ]]; then
|
|
325
|
+
# Verify backup integrity
|
|
326
|
+
if verify_postgres_backup "$backup_file"; then
|
|
327
|
+
backup_success "PostgreSQL: $db_name -> $backup_file"
|
|
328
|
+
return 0
|
|
329
|
+
else
|
|
330
|
+
rm -f "$backup_file"
|
|
331
|
+
backup_warning "PostgreSQL backup verification failed, removed: $backup_file"
|
|
332
|
+
return 1
|
|
333
|
+
fi
|
|
334
|
+
else
|
|
335
|
+
rm -f "$backup_file"
|
|
336
|
+
backup_warning "PostgreSQL backup is empty - connection may have failed"
|
|
337
|
+
return 1
|
|
338
|
+
fi
|
|
339
|
+
fi
|
|
340
|
+
|
|
341
|
+
rm -f "$backup_file"
|
|
342
|
+
backup_warning "PostgreSQL backup failed for $db_name"
|
|
343
|
+
return 1
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
# Backup MySQL database
|
|
347
|
+
backup_mysql() {
|
|
348
|
+
local url="$1"
|
|
349
|
+
local backup_dir="$2"
|
|
350
|
+
local timestamp="$3"
|
|
351
|
+
|
|
352
|
+
if ! command -v mysqldump &>/dev/null; then
|
|
353
|
+
backup_warning "mysqldump not found. Install MySQL client tools to backup MySQL."
|
|
354
|
+
backup_info " macOS: brew install mysql-client"
|
|
355
|
+
backup_info " Ubuntu: apt install mysql-client"
|
|
356
|
+
return 1
|
|
357
|
+
fi
|
|
358
|
+
|
|
359
|
+
# Parse URL using shared helper
|
|
360
|
+
if ! parse_mysql_url "$url"; then
|
|
361
|
+
backup_warning "Cannot parse MySQL URL format"
|
|
362
|
+
return 1
|
|
363
|
+
fi
|
|
364
|
+
|
|
365
|
+
local backup_file="$backup_dir/mysql/${MYSQL_DB}-${timestamp}.sql.gz"
|
|
366
|
+
mkdir -p "$backup_dir/mysql"
|
|
367
|
+
|
|
368
|
+
# Run mysqldump with single-transaction for consistency
|
|
369
|
+
if mysqldump \
|
|
370
|
+
--host="$MYSQL_HOST" \
|
|
371
|
+
--port="$MYSQL_PORT" \
|
|
372
|
+
--user="$MYSQL_USER" \
|
|
373
|
+
--password="$MYSQL_PASSWORD" \
|
|
374
|
+
--single-transaction \
|
|
375
|
+
--quick \
|
|
376
|
+
--lock-tables=false \
|
|
377
|
+
--connect-timeout=10 \
|
|
378
|
+
"$MYSQL_DB" 2>/dev/null | gzip > "$backup_file"; then
|
|
379
|
+
if [[ -s "$backup_file" ]]; then
|
|
380
|
+
# Verify backup integrity
|
|
381
|
+
if verify_mysql_backup "$backup_file"; then
|
|
382
|
+
backup_success "MySQL: $MYSQL_DB -> $backup_file"
|
|
383
|
+
return 0
|
|
384
|
+
else
|
|
385
|
+
rm -f "$backup_file"
|
|
386
|
+
backup_warning "MySQL backup verification failed, removed: $backup_file"
|
|
387
|
+
return 1
|
|
388
|
+
fi
|
|
389
|
+
fi
|
|
390
|
+
fi
|
|
391
|
+
|
|
392
|
+
rm -f "$backup_file"
|
|
393
|
+
backup_warning "MySQL backup failed for $db_name"
|
|
394
|
+
return 1
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
# Backup MongoDB database
|
|
398
|
+
backup_mongodb() {
|
|
399
|
+
local url="$1"
|
|
400
|
+
local backup_dir="$2"
|
|
401
|
+
local timestamp="$3"
|
|
402
|
+
|
|
403
|
+
if ! command -v mongodump &>/dev/null; then
|
|
404
|
+
backup_warning "mongodump not found. Install MongoDB Database Tools to backup MongoDB."
|
|
405
|
+
backup_info " macOS: brew install mongodb-database-tools"
|
|
406
|
+
backup_info " Ubuntu: apt install mongodb-database-tools"
|
|
407
|
+
return 1
|
|
408
|
+
fi
|
|
409
|
+
|
|
410
|
+
# Extract database name from URL
|
|
411
|
+
local db_name
|
|
412
|
+
db_name=$(echo "$url" | sed -E 's|.*/([^?]+).*|\1|')
|
|
413
|
+
[[ -z "$db_name" ]] && db_name="mongodb"
|
|
414
|
+
|
|
415
|
+
local backup_file="$backup_dir/mongodb/${db_name}-${timestamp}.gz"
|
|
416
|
+
mkdir -p "$backup_dir/mongodb"
|
|
417
|
+
|
|
418
|
+
# Run mongodump with archive and gzip
|
|
419
|
+
if mongodump \
|
|
420
|
+
--uri="$url" \
|
|
421
|
+
--archive="$backup_file" \
|
|
422
|
+
--gzip \
|
|
423
|
+
--quiet \
|
|
424
|
+
--serverSelectionTimeoutMS=10000 \
|
|
425
|
+
2>/dev/null; then
|
|
426
|
+
if [[ -s "$backup_file" ]]; then
|
|
427
|
+
# Verify backup integrity
|
|
428
|
+
if verify_mongodb_backup "$backup_file"; then
|
|
429
|
+
backup_success "MongoDB: $db_name -> $backup_file"
|
|
430
|
+
return 0
|
|
431
|
+
else
|
|
432
|
+
rm -f "$backup_file"
|
|
433
|
+
backup_warning "MongoDB backup verification failed, removed: $backup_file"
|
|
434
|
+
return 1
|
|
435
|
+
fi
|
|
436
|
+
fi
|
|
437
|
+
fi
|
|
438
|
+
|
|
439
|
+
rm -f "$backup_file"
|
|
440
|
+
backup_warning "MongoDB backup failed for $db_name"
|
|
441
|
+
return 1
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
# ============================================================================
|
|
445
|
+
# CLEANUP / ROTATION
|
|
446
|
+
# ============================================================================
|
|
447
|
+
|
|
448
|
+
# Clean up old backups, keeping max_backups per database name
|
|
449
|
+
cleanup_old_backups() {
|
|
450
|
+
local backup_dir="$1"
|
|
451
|
+
local max_backups="${2:-$DEFAULT_MAX_BACKUPS}"
|
|
452
|
+
|
|
453
|
+
for subdir in sqlite postgres mysql mongodb; do
|
|
454
|
+
local type_dir="$backup_dir/$subdir"
|
|
455
|
+
[[ ! -d "$type_dir" ]] && continue
|
|
456
|
+
|
|
457
|
+
# Get unique database names from backup files
|
|
458
|
+
# Files are named: {dbname}-{YYYYMMDD-HHMMSS}.{ext}
|
|
459
|
+
local db_names
|
|
460
|
+
db_names=$(ls "$type_dir" 2>/dev/null | sed 's/-[0-9]\{8\}-[0-9]\{6\}\..*//' | sort -u)
|
|
461
|
+
|
|
462
|
+
# Cleanup per database name
|
|
463
|
+
while IFS= read -r db_name; do
|
|
464
|
+
[[ -z "$db_name" ]] && continue
|
|
465
|
+
|
|
466
|
+
# List backups for this specific DB, newest first, delete beyond limit
|
|
467
|
+
ls -t "$type_dir/${db_name}"-* 2>/dev/null | tail -n +$((max_backups + 1)) | while read -r filepath; do
|
|
468
|
+
rm -f "$filepath"
|
|
469
|
+
backup_info "Removed old backup: $filepath"
|
|
470
|
+
done
|
|
471
|
+
done <<< "$db_names"
|
|
472
|
+
done
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
# ============================================================================
|
|
476
|
+
# RESTORE FUNCTIONS
|
|
477
|
+
# ============================================================================
|
|
478
|
+
|
|
479
|
+
# Find original SQLite database path from backup filename
|
|
480
|
+
find_sqlite_target() {
|
|
481
|
+
local db_name="$1"
|
|
482
|
+
|
|
483
|
+
# Search for matching database file
|
|
484
|
+
for ext in db sqlite sqlite3; do
|
|
485
|
+
local found
|
|
486
|
+
found=$(find . -maxdepth 4 -type f -name "${db_name}.${ext}" \
|
|
487
|
+
-not -path "*/node_modules/*" \
|
|
488
|
+
-not -path "*/.git/*" \
|
|
489
|
+
-not -path "*/.backups/*" \
|
|
490
|
+
2>/dev/null | head -1)
|
|
491
|
+
if [[ -n "$found" ]]; then
|
|
492
|
+
echo "$found"
|
|
493
|
+
return 0
|
|
494
|
+
fi
|
|
495
|
+
done
|
|
496
|
+
return 1
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
# Restore SQLite database
|
|
500
|
+
restore_sqlite() {
|
|
501
|
+
local backup_path="$1"
|
|
502
|
+
local target="$2"
|
|
503
|
+
|
|
504
|
+
# Create pre-restore backup
|
|
505
|
+
if [[ -f "$target" ]]; then
|
|
506
|
+
cp "$target" "${target}.pre-restore"
|
|
507
|
+
print_info "Current database backed up to ${target}.pre-restore"
|
|
508
|
+
fi
|
|
509
|
+
|
|
510
|
+
if cp "$backup_path" "$target"; then
|
|
511
|
+
print_success "Restored to $target"
|
|
512
|
+
return 0
|
|
513
|
+
fi
|
|
514
|
+
|
|
515
|
+
print_error "Failed to restore to $target"
|
|
516
|
+
return 1
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
# Restore PostgreSQL database
|
|
520
|
+
restore_postgres() {
|
|
521
|
+
local backup_path="$1"
|
|
522
|
+
|
|
523
|
+
local url
|
|
524
|
+
url=$(detect_postgres) || {
|
|
525
|
+
print_error "DATABASE_URL not found. Set it to restore PostgreSQL."
|
|
526
|
+
return 1
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
if ! command -v psql &>/dev/null; then
|
|
530
|
+
print_error "psql not found. Install PostgreSQL client tools to restore."
|
|
531
|
+
return 1
|
|
532
|
+
fi
|
|
533
|
+
|
|
534
|
+
print_info "Restoring PostgreSQL from $backup_path..."
|
|
535
|
+
|
|
536
|
+
if gunzip -c "$backup_path" | psql "$url" >/dev/null 2>&1; then
|
|
537
|
+
print_success "PostgreSQL restored successfully"
|
|
538
|
+
return 0
|
|
539
|
+
fi
|
|
540
|
+
|
|
541
|
+
print_error "PostgreSQL restore failed"
|
|
542
|
+
return 1
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
# Restore MySQL database
|
|
546
|
+
restore_mysql() {
|
|
547
|
+
local backup_path="$1"
|
|
548
|
+
|
|
549
|
+
local url
|
|
550
|
+
url=$(detect_mysql) || {
|
|
551
|
+
print_error "MYSQL_URL not found. Set it to restore MySQL."
|
|
552
|
+
return 1
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
if ! command -v mysql &>/dev/null; then
|
|
556
|
+
print_error "mysql not found. Install MySQL client tools to restore."
|
|
557
|
+
return 1
|
|
558
|
+
fi
|
|
559
|
+
|
|
560
|
+
# Parse URL using shared helper
|
|
561
|
+
if ! parse_mysql_url "$url"; then
|
|
562
|
+
print_error "Cannot parse MySQL URL"
|
|
563
|
+
return 1
|
|
564
|
+
fi
|
|
565
|
+
|
|
566
|
+
print_info "Restoring MySQL from $backup_path..."
|
|
567
|
+
|
|
568
|
+
if gunzip -c "$backup_path" | mysql \
|
|
569
|
+
--host="$MYSQL_HOST" \
|
|
570
|
+
--port="$MYSQL_PORT" \
|
|
571
|
+
--user="$MYSQL_USER" \
|
|
572
|
+
--password="$MYSQL_PASSWORD" \
|
|
573
|
+
"$MYSQL_DB" 2>/dev/null; then
|
|
574
|
+
print_success "MySQL restored successfully"
|
|
575
|
+
return 0
|
|
576
|
+
fi
|
|
577
|
+
|
|
578
|
+
print_error "MySQL restore failed"
|
|
579
|
+
return 1
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
# Restore MongoDB database
|
|
583
|
+
restore_mongodb() {
|
|
584
|
+
local backup_path="$1"
|
|
585
|
+
|
|
586
|
+
local url
|
|
587
|
+
url=$(detect_mongodb) || {
|
|
588
|
+
print_error "MONGODB_URL not found. Set it to restore MongoDB."
|
|
589
|
+
return 1
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
if ! command -v mongorestore &>/dev/null; then
|
|
593
|
+
print_error "mongorestore not found. Install MongoDB Database Tools to restore."
|
|
594
|
+
return 1
|
|
595
|
+
fi
|
|
596
|
+
|
|
597
|
+
print_info "Restoring MongoDB from $backup_path..."
|
|
598
|
+
|
|
599
|
+
if mongorestore \
|
|
600
|
+
--uri="$url" \
|
|
601
|
+
--archive="$backup_path" \
|
|
602
|
+
--gzip \
|
|
603
|
+
--drop \
|
|
604
|
+
2>/dev/null; then
|
|
605
|
+
print_success "MongoDB restored successfully"
|
|
606
|
+
return 0
|
|
607
|
+
fi
|
|
608
|
+
|
|
609
|
+
print_error "MongoDB restore failed"
|
|
610
|
+
return 1
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
# ============================================================================
|
|
614
|
+
# CLI ENTRY POINTS
|
|
615
|
+
# ============================================================================
|
|
616
|
+
|
|
617
|
+
# Main backup function
|
|
618
|
+
ralph_backup() {
|
|
619
|
+
# Parse args
|
|
620
|
+
while [[ $# -gt 0 ]]; do
|
|
621
|
+
case "$1" in
|
|
622
|
+
--quiet|-q)
|
|
623
|
+
BACKUP_QUIET=true
|
|
624
|
+
shift
|
|
625
|
+
;;
|
|
626
|
+
*)
|
|
627
|
+
shift
|
|
628
|
+
;;
|
|
629
|
+
esac
|
|
630
|
+
done
|
|
631
|
+
|
|
632
|
+
local timestamp
|
|
633
|
+
timestamp=$(date +%Y%m%d-%H%M%S)
|
|
634
|
+
|
|
635
|
+
# Ensure backup directory exists and is gitignored
|
|
636
|
+
mkdir -p "$BACKUP_DIR/sqlite" "$BACKUP_DIR/postgres" "$BACKUP_DIR/mysql" "$BACKUP_DIR/mongodb"
|
|
637
|
+
|
|
638
|
+
# Add to .gitignore if not present
|
|
639
|
+
if [[ -f ".gitignore" ]]; then
|
|
640
|
+
if ! grep -q "^\.backups/" ".gitignore" 2>/dev/null; then
|
|
641
|
+
echo "" >> ".gitignore"
|
|
642
|
+
echo "# Database backups (thrivekit)" >> ".gitignore"
|
|
643
|
+
echo ".backups/" >> ".gitignore"
|
|
644
|
+
fi
|
|
645
|
+
fi
|
|
646
|
+
|
|
647
|
+
local backed_up=0
|
|
648
|
+
local failed=0
|
|
649
|
+
|
|
650
|
+
# Check database types in order of priority (only one type per project)
|
|
651
|
+
# PostgreSQL/Supabase first (most common for production apps)
|
|
652
|
+
local pg_url mysql_url mongo_url sqlite_files
|
|
653
|
+
|
|
654
|
+
if pg_url=$(detect_postgres); then
|
|
655
|
+
# PostgreSQL / Supabase
|
|
656
|
+
if backup_postgres "$pg_url" "$BACKUP_DIR" "$timestamp"; then
|
|
657
|
+
((backed_up++))
|
|
658
|
+
else
|
|
659
|
+
((failed++))
|
|
660
|
+
fi
|
|
661
|
+
elif mysql_url=$(detect_mysql); then
|
|
662
|
+
# MySQL
|
|
663
|
+
if backup_mysql "$mysql_url" "$BACKUP_DIR" "$timestamp"; then
|
|
664
|
+
((backed_up++))
|
|
665
|
+
else
|
|
666
|
+
((failed++))
|
|
667
|
+
fi
|
|
668
|
+
elif mongo_url=$(detect_mongodb); then
|
|
669
|
+
# MongoDB
|
|
670
|
+
if backup_mongodb "$mongo_url" "$BACKUP_DIR" "$timestamp"; then
|
|
671
|
+
((backed_up++))
|
|
672
|
+
else
|
|
673
|
+
((failed++))
|
|
674
|
+
fi
|
|
675
|
+
else
|
|
676
|
+
# SQLite (fallback - local dev databases)
|
|
677
|
+
sqlite_files=$(detect_sqlite)
|
|
678
|
+
if [[ -n "$sqlite_files" ]]; then
|
|
679
|
+
while IFS= read -r db_path; do
|
|
680
|
+
[[ -z "$db_path" ]] && continue
|
|
681
|
+
if backup_sqlite "$db_path" "$BACKUP_DIR" "$timestamp"; then
|
|
682
|
+
((backed_up++))
|
|
683
|
+
else
|
|
684
|
+
((failed++))
|
|
685
|
+
fi
|
|
686
|
+
done <<< "$sqlite_files"
|
|
687
|
+
fi
|
|
688
|
+
fi
|
|
689
|
+
|
|
690
|
+
# Cleanup old backups
|
|
691
|
+
if [[ $backed_up -gt 0 ]]; then
|
|
692
|
+
cleanup_old_backups "$BACKUP_DIR" "$DEFAULT_MAX_BACKUPS"
|
|
693
|
+
fi
|
|
694
|
+
|
|
695
|
+
# Summary
|
|
696
|
+
if [[ $backed_up -eq 0 && $failed -eq 0 ]]; then
|
|
697
|
+
backup_info "No databases detected for backup"
|
|
698
|
+
elif [[ $failed -gt 0 ]]; then
|
|
699
|
+
backup_warning "Backup completed: $backed_up succeeded, $failed failed"
|
|
700
|
+
else
|
|
701
|
+
backup_success "Backed up $backed_up database(s)"
|
|
702
|
+
fi
|
|
703
|
+
|
|
704
|
+
return 0
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
# List available backups
|
|
708
|
+
ralph_list_backups() {
|
|
709
|
+
if [[ ! -d "$BACKUP_DIR" ]]; then
|
|
710
|
+
print_info "No backups found. Run 'ralph backup' or commit to create backups."
|
|
711
|
+
return 0
|
|
712
|
+
fi
|
|
713
|
+
|
|
714
|
+
echo ""
|
|
715
|
+
print_info "=== Available Backups ==="
|
|
716
|
+
echo ""
|
|
717
|
+
|
|
718
|
+
local found=0
|
|
719
|
+
|
|
720
|
+
for subdir in sqlite postgres mysql mongodb; do
|
|
721
|
+
local type_dir="$BACKUP_DIR/$subdir"
|
|
722
|
+
[[ ! -d "$type_dir" ]] && continue
|
|
723
|
+
|
|
724
|
+
local files
|
|
725
|
+
files=$(ls -t "$type_dir" 2>/dev/null)
|
|
726
|
+
[[ -z "$files" ]] && continue
|
|
727
|
+
|
|
728
|
+
# Capitalize first letter (bash 3.2 compatible)
|
|
729
|
+
local label
|
|
730
|
+
label=$(echo "$subdir" | awk '{print toupper(substr($0,1,1)) substr($0,2)}')
|
|
731
|
+
echo "$label:"
|
|
732
|
+
|
|
733
|
+
while IFS= read -r file; do
|
|
734
|
+
[[ -z "$file" ]] && continue
|
|
735
|
+
local filepath="$type_dir/$file"
|
|
736
|
+
local size
|
|
737
|
+
size=$(get_file_size_mb "$filepath")
|
|
738
|
+
[[ "$size" -eq 0 ]] && size="<1"
|
|
739
|
+
|
|
740
|
+
# Extract timestamp from filename
|
|
741
|
+
local ts
|
|
742
|
+
ts=$(echo "$file" | grep -oE '[0-9]{8}-[0-9]{6}' | head -1)
|
|
743
|
+
if [[ -n "$ts" ]]; then
|
|
744
|
+
# Format: YYYYMMDD-HHMMSS -> YYYY-MM-DD HH:MM
|
|
745
|
+
local formatted
|
|
746
|
+
formatted=$(echo "$ts" | sed 's/\([0-9]\{4\}\)\([0-9]\{2\}\)\([0-9]\{2\}\)-\([0-9]\{2\}\)\([0-9]\{2\}\)\([0-9]\{2\}\)/\1-\2-\3 \4:\5/')
|
|
747
|
+
echo " $formatted - $file (${size}MB)"
|
|
748
|
+
else
|
|
749
|
+
echo " $file (${size}MB)"
|
|
750
|
+
fi
|
|
751
|
+
|
|
752
|
+
((found++))
|
|
753
|
+
done <<< "$files"
|
|
754
|
+
|
|
755
|
+
echo ""
|
|
756
|
+
done
|
|
757
|
+
|
|
758
|
+
if [[ $found -eq 0 ]]; then
|
|
759
|
+
print_info "No backups found."
|
|
760
|
+
else
|
|
761
|
+
echo "Total: $found backup(s)"
|
|
762
|
+
echo ""
|
|
763
|
+
echo "Restore with: ralph restore <backup-path>"
|
|
764
|
+
fi
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
# Restore from backup
|
|
768
|
+
ralph_restore() {
|
|
769
|
+
local backup_path="${1:-}"
|
|
770
|
+
local target="${2:-}"
|
|
771
|
+
|
|
772
|
+
if [[ -z "$backup_path" ]]; then
|
|
773
|
+
print_error "Usage: ralph restore <backup-path> [target]"
|
|
774
|
+
print_info "Run 'ralph backups' to see available backups"
|
|
775
|
+
return 1
|
|
776
|
+
fi
|
|
777
|
+
|
|
778
|
+
if [[ ! -f "$backup_path" ]]; then
|
|
779
|
+
print_error "Backup file not found: $backup_path"
|
|
780
|
+
return 1
|
|
781
|
+
fi
|
|
782
|
+
|
|
783
|
+
# Determine database type from path
|
|
784
|
+
local db_type=""
|
|
785
|
+
if [[ "$backup_path" == *"/sqlite/"* ]]; then
|
|
786
|
+
db_type="sqlite"
|
|
787
|
+
elif [[ "$backup_path" == *"/postgres/"* ]]; then
|
|
788
|
+
db_type="postgres"
|
|
789
|
+
elif [[ "$backup_path" == *"/mysql/"* ]]; then
|
|
790
|
+
db_type="mysql"
|
|
791
|
+
elif [[ "$backup_path" == *"/mongodb/"* ]]; then
|
|
792
|
+
db_type="mongodb"
|
|
793
|
+
else
|
|
794
|
+
print_error "Cannot determine database type from path"
|
|
795
|
+
return 1
|
|
796
|
+
fi
|
|
797
|
+
|
|
798
|
+
# For SQLite, find target if not specified
|
|
799
|
+
if [[ "$db_type" == "sqlite" && -z "$target" ]]; then
|
|
800
|
+
local db_name
|
|
801
|
+
db_name=$(basename "$backup_path" | sed 's/-[0-9]\{8\}-[0-9]\{6\}\.db$//')
|
|
802
|
+
target=$(find_sqlite_target "$db_name") || {
|
|
803
|
+
print_error "Cannot find original database for '$db_name'"
|
|
804
|
+
print_info "Specify target explicitly: ralph restore $backup_path <target-path>"
|
|
805
|
+
return 1
|
|
806
|
+
}
|
|
807
|
+
print_info "Found matching database: $target"
|
|
808
|
+
fi
|
|
809
|
+
|
|
810
|
+
# Confirmation
|
|
811
|
+
echo ""
|
|
812
|
+
print_warning "WARNING: This will OVERWRITE the current database!"
|
|
813
|
+
echo ""
|
|
814
|
+
read -p "Type 'restore' to confirm: " confirm
|
|
815
|
+
|
|
816
|
+
if [[ "$confirm" != "restore" ]]; then
|
|
817
|
+
echo "Cancelled."
|
|
818
|
+
return 1
|
|
819
|
+
fi
|
|
820
|
+
|
|
821
|
+
echo ""
|
|
822
|
+
|
|
823
|
+
# Perform restore based on type
|
|
824
|
+
case "$db_type" in
|
|
825
|
+
sqlite)
|
|
826
|
+
restore_sqlite "$backup_path" "$target"
|
|
827
|
+
;;
|
|
828
|
+
postgres)
|
|
829
|
+
restore_postgres "$backup_path"
|
|
830
|
+
;;
|
|
831
|
+
mysql)
|
|
832
|
+
restore_mysql "$backup_path"
|
|
833
|
+
;;
|
|
834
|
+
mongodb)
|
|
835
|
+
restore_mongodb "$backup_path"
|
|
836
|
+
;;
|
|
837
|
+
esac
|
|
838
|
+
}
|