@habityzer/db-sync-tool 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,195 @@
1
+ import {
2
+ createWriteStream,
3
+ mkdirSync,
4
+ statSync,
5
+ existsSync,
6
+ writeFileSync,
7
+ unlinkSync,
8
+ } from 'node:fs';
9
+ import { dirname, join, resolve, isAbsolute } from 'node:path';
10
+ import { PassThrough } from 'node:stream';
11
+ import { pipeline } from 'node:stream/promises';
12
+ import { once } from 'node:events';
13
+ import { loadEnvFile } from '../utils/env-loader.js';
14
+ import { loadConfig, mergeWithCli } from '../utils/config-loader.js';
15
+ import { parseDatabaseUrl } from '../utils/url-parser.js';
16
+ import { createCompressStream } from '../utils/compression.js';
17
+ import { createStreamProgress, formatBytes } from '../utils/progress.js';
18
+ import { AppError, FileSystemError } from '../utils/errors.js';
19
+ import * as ui from '../utils/ui.js';
20
+ import { spawnPgDump } from '../adapters/postgres.js';
21
+ import { spawnMysqldump } from '../adapters/mysql.js';
22
+
23
+ function timestampSuffix() {
24
+ const now = new Date();
25
+ const y = now.getFullYear();
26
+ const m = String(now.getMonth() + 1).padStart(2, '0');
27
+ const d = String(now.getDate()).padStart(2, '0');
28
+ const hh = String(now.getHours()).padStart(2, '0');
29
+ const mm = String(now.getMinutes()).padStart(2, '0');
30
+ const ss = String(now.getSeconds()).padStart(2, '0');
31
+ return `${y}${m}${d}_${hh}${mm}${ss}`;
32
+ }
33
+
34
+ /**
35
+ * @param {object} opts
36
+ */
37
+ function resolveOutputPath(opts) {
38
+ const dbName = opts.dbName || 'database';
39
+ const compress = opts.compress !== false;
40
+ const ext = compress ? '.sql.gz' : '.sql';
41
+ const name = `${dbName}_${timestampSuffix()}${ext}`;
42
+ const backupDir = resolve(process.cwd(), opts.backupDir || './backups');
43
+
44
+ if (!opts.output) {
45
+ return { filePath: join(backupDir, name), backupDir };
46
+ }
47
+
48
+ const out = opts.output;
49
+ const resolved = isAbsolute(out) ? out : resolve(process.cwd(), out);
50
+
51
+ if (existsSync(resolved) && statSync(resolved).isDirectory()) {
52
+ return { filePath: join(resolved, name), backupDir: resolved };
53
+ }
54
+
55
+ if (out.endsWith('/') || out.endsWith('\\')) {
56
+ const dir = resolve(process.cwd(), out);
57
+ return { filePath: join(dir, name), backupDir: dir };
58
+ }
59
+
60
+ if (/\.sql(\.gz)?$/i.test(out)) {
61
+ return { filePath: resolved, backupDir: dirname(resolved) };
62
+ }
63
+
64
+ return { filePath: join(resolved, name), backupDir: resolved };
65
+ }
66
+
67
+ /**
68
+ * @param {string} dir
69
+ */
70
+ function ensureWritableDirSync(dir) {
71
+ try {
72
+ mkdirSync(dir, { recursive: true });
73
+ const test = join(dir, '.dbsync-write-test');
74
+ writeFileSync(test, 'ok');
75
+ unlinkSync(test);
76
+ } catch {
77
+ throw new FileSystemError(
78
+ `Cannot write to ${dir}`,
79
+ 'Check permissions or create the directory'
80
+ );
81
+ }
82
+ }
83
+
84
+ /**
85
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
86
+ * @param {{ schemaOnly?: boolean, dataOnly?: boolean, tables?: string[] }} dumpOpts
87
+ * @param {import('node:stream').Transform | import('node:stream').PassThrough} compressStream
88
+ * @param {import('node:fs').WriteStream} writeStream
89
+ * @param {{ verbose?: boolean }} ctx
90
+ */
91
+ async function runDumpPipeline(parsed, dumpOpts, compressStream, writeStream, ctx) {
92
+ const child =
93
+ parsed.type === 'postgres'
94
+ ? spawnPgDump(parsed, dumpOpts)
95
+ : spawnMysqldump(parsed, dumpOpts);
96
+
97
+ let stderr = '';
98
+ child.stderr?.on('data', (c) => {
99
+ stderr += c.toString();
100
+ });
101
+
102
+ const progress = createStreamProgress(`${ui.icons.save} Exporting...`, ctx);
103
+ progress.start();
104
+ const t0 = Date.now();
105
+
106
+ const counter = new PassThrough();
107
+ counter.on('data', (chunk) => progress.increment(chunk.length));
108
+
109
+ // Subscribe before pipeline: if 'close' fires before we await, `once` would miss it and hang.
110
+ const closePromise = once(child, 'close');
111
+
112
+ try {
113
+ await pipeline(child.stdout, compressStream, counter, writeStream);
114
+ const [code] = await closePromise;
115
+ if (code !== 0) {
116
+ progress.fail(`${ui.icons.err} Export failed`);
117
+ throw new AppError(`Dump process exited with code ${code}`, {
118
+ suggestion: stderr.trim() || 'Check pg_dump/mysqldump are installed',
119
+ });
120
+ }
121
+ const bytes = typeof progress.getBytes === 'function' ? progress.getBytes() : 0;
122
+ const secs = ((Date.now() - t0) / 1000).toFixed(1);
123
+ progress.succeed(`${ui.icons.save} ${formatBytes(bytes)} in ${secs}s`);
124
+ } catch (e) {
125
+ if (e instanceof AppError) throw e;
126
+ progress.fail(`${ui.icons.err} Export failed`);
127
+ throw e;
128
+ }
129
+ }
130
+
131
+ /**
132
+ * @param {object} globalOpts
133
+ * @param {object} cmdOpts
134
+ */
135
+ export async function runExport(globalOpts, cmdOpts) {
136
+ loadEnvFile(globalOpts.envFile);
137
+ const { config } = loadConfig({ configPath: globalOpts.config });
138
+ const merged = mergeWithCli(config, {});
139
+
140
+ const envVar = globalOpts.envVar || 'DATABASE_URL';
141
+ const rawUrl = process.env[envVar];
142
+ const parsed = parseDatabaseUrl(rawUrl);
143
+
144
+ if (/[!@#$%]/.test(parsed.password)) {
145
+ ui.warnLine(
146
+ `${ui.icons.warn} Password contains special characters — encoded for connection. Verify if connection fails.`
147
+ );
148
+ }
149
+
150
+ const compress = merged.compress !== false && !cmdOpts.noCompress;
151
+ let level = Number(cmdOpts.compressLevel ?? merged.compressLevel ?? 6);
152
+ if (!Number.isFinite(level)) level = 6;
153
+ level = Math.min(9, Math.max(1, level));
154
+ const tables = cmdOpts.tables
155
+ ? String(cmdOpts.tables)
156
+ .split(',')
157
+ .map((s) => s.trim())
158
+ .filter(Boolean)
159
+ : undefined;
160
+
161
+ if (cmdOpts.schemaOnly && cmdOpts.dataOnly) {
162
+ throw new AppError('Cannot use --schema-only and --data-only together');
163
+ }
164
+
165
+ const { filePath, backupDir } = resolveOutputPath({
166
+ dbName: parsed.database || 'database',
167
+ compress,
168
+ output: cmdOpts.output,
169
+ backupDir: merged.backupDir,
170
+ });
171
+
172
+ ensureWritableDirSync(backupDir);
173
+
174
+ const masked = parsed.url.toString().replace(/:[^:@/]+@/, ':***@');
175
+ ui.infoLine(`${ui.icons.pkg} Exporting database '${parsed.database || 'default'}'...`);
176
+ ui.infoLine(`${ui.icons.link} Connected to ${masked}`);
177
+
178
+ const compressStream = createCompressStream(compress, level);
179
+ const writeStream = createWriteStream(filePath);
180
+
181
+ const dumpOpts = {
182
+ schemaOnly: !!cmdOpts.schemaOnly,
183
+ dataOnly: !!cmdOpts.dataOnly,
184
+ tables,
185
+ };
186
+
187
+ await runDumpPipeline(parsed, dumpOpts, compressStream, writeStream, {
188
+ verbose: globalOpts.verbose,
189
+ });
190
+
191
+ const st = statSync(filePath);
192
+ const compLabel = compress ? `gzip (level ${level})` : 'none';
193
+ ui.success(`${ui.icons.ok} Exported to: ${filePath}`);
194
+ ui.infoLine(`${ui.icons.chart} Size: ${formatBytes(st.size)} | Compression: ${compLabel}`);
195
+ }
@@ -0,0 +1,31 @@
1
+ import { statSync, existsSync } from 'node:fs';
2
+ import { resolve, isAbsolute } from 'node:path';
3
+ import { parseBackupFilename } from '../utils/backup-scanner.js';
4
+ import { formatBytes } from '../utils/progress.js';
5
+ import { BackupError } from '../utils/errors.js';
6
+ import * as ui from '../utils/ui.js';
7
+
8
+ /**
9
+ * @param {object} globalOpts
10
+ * @param {string} fileArg
11
+ */
12
+ export async function runInfo(globalOpts, fileArg) {
13
+ if (!fileArg) {
14
+ throw new BackupError('Missing file path', 'Usage: dbsync info <file>');
15
+ }
16
+ const p = isAbsolute(fileArg) ? fileArg : resolve(process.cwd(), fileArg);
17
+ if (!existsSync(p)) {
18
+ throw new BackupError('Invalid backup file', 'Check the path');
19
+ }
20
+ const st = statSync(p);
21
+ const name = p.split(/[/\\]/).pop() || fileArg;
22
+ const compressed = name.endsWith('.gz');
23
+ const { database, parsedDate } = parseBackupFilename(name);
24
+
25
+ ui.infoLine(`${ui.icons.pkg} Backup: ${name}`);
26
+ ui.infoLine(` Size: ${formatBytes(st.size)}`);
27
+ ui.infoLine(` Modified: ${st.mtime.toISOString()}`);
28
+ if (database) ui.infoLine(` Database: ${database}`);
29
+ if (parsedDate) ui.infoLine(` Parsed timestamp: ${parsedDate.toISOString()}`);
30
+ ui.infoLine(` Compression: ${compressed ? 'gzip' : 'none'}`);
31
+ }
@@ -0,0 +1,63 @@
1
+ import { resolve } from 'node:path';
2
+ import { loadConfig, mergeWithCli } from '../utils/config-loader.js';
3
+ import { scanBackups, filterBackups } from '../utils/backup-scanner.js';
4
+ import { formatBytes } from '../utils/progress.js';
5
+ import { BackupError } from '../utils/errors.js';
6
+ import * as ui from '../utils/ui.js';
7
+
8
+ /**
9
+ * @param {string[]} widths
10
+ * @param {string[]} cells
11
+ */
12
+ function row(widths, cells) {
13
+ const parts = cells.map((c, i) => String(c).padEnd(widths[i]));
14
+ return `│ ${parts.join(' │ ')} │`;
15
+ }
16
+
17
+ /**
18
+ * @param {object} globalOpts
19
+ * @param {object} cmdOpts
20
+ */
21
+ export async function runList(globalOpts, cmdOpts) {
22
+ const { config } = loadConfig({ configPath: globalOpts.config });
23
+ const merged = mergeWithCli(config, {});
24
+ const backupDir = resolve(process.cwd(), merged.backupDir || './backups');
25
+
26
+ let entries = scanBackups(backupDir);
27
+ let since =
28
+ cmdOpts.since != null && cmdOpts.since !== ''
29
+ ? new Date(String(cmdOpts.since).includes('T') ? cmdOpts.since : `${cmdOpts.since}T00:00:00`)
30
+ : undefined;
31
+ if (since && Number.isNaN(since.getTime())) {
32
+ ui.warnLine(`${ui.icons.warn} Ignoring invalid --since date: ${cmdOpts.since}`);
33
+ since = undefined;
34
+ }
35
+ entries = filterBackups(entries, {
36
+ database: cmdOpts.database,
37
+ limit: cmdOpts.limit,
38
+ since,
39
+ });
40
+
41
+ if (entries.length === 0) {
42
+ throw new BackupError('📭 No backups found', 'Run dbsync export first');
43
+ }
44
+
45
+ ui.infoLine(`${ui.icons.pkg} Available backups:\n`);
46
+
47
+ const widths = [3, 28, 10, 12, 12];
48
+ const sep = `├${widths.map((w) => '─'.repeat(w + 2)).join('┼')}┤`;
49
+ console.log(`┌${widths.map((w) => '─'.repeat(w + 2)).join('┬')}┐`);
50
+ console.log(row(widths, ['#', 'File', 'Size', 'Date', 'Database']));
51
+ console.log(sep);
52
+
53
+ let totalBytes = 0;
54
+ entries.forEach((e, i) => {
55
+ totalBytes += e.size;
56
+ const when = (e.parsedDate ?? e.mtime).toISOString().slice(0, 10);
57
+ const name = e.filename.length > 26 ? `${e.filename.slice(0, 23)}...` : e.filename;
58
+ console.log(row(widths, [String(i + 1), name, formatBytes(e.size), when, e.database ?? '—']));
59
+ });
60
+
61
+ console.log(`└${widths.map((w) => '─'.repeat(w + 2)).join('┴')}┘`);
62
+ ui.infoLine(`\nTotal: ${entries.length} backups | ${formatBytes(totalBytes)}`);
63
+ }
@@ -0,0 +1,147 @@
1
+ import { createReadStream, statSync, existsSync } from 'node:fs';
2
+ import { resolve, isAbsolute } from 'node:path';
3
+ import { PassThrough } from 'node:stream';
4
+ import { pipeline } from 'node:stream/promises';
5
+ import { once } from 'node:events';
6
+ import { loadEnvFile } from '../utils/env-loader.js';
7
+ import { loadConfig, mergeWithCli } from '../utils/config-loader.js';
8
+ import { parseDatabaseUrl } from '../utils/url-parser.js';
9
+ import { createDecompressStream } from '../utils/compression.js';
10
+ import { createFileProgressBar, formatBytes } from '../utils/progress.js';
11
+ import { validateBackupFile } from '../utils/backup-validate.js';
12
+ import { scanBackups, filterBackups } from '../utils/backup-scanner.js';
13
+ import { AppError, BackupError } from '../utils/errors.js';
14
+ import * as ui from '../utils/ui.js';
15
+ import { confirm, ask } from '../utils/prompt.js';
16
+ import { spawnPsqlRestore, dropPostgresDatabase } from '../adapters/postgres.js';
17
+ import { spawnMysqlRestore, dropMysqlDatabase } from '../adapters/mysql.js';
18
+
19
+ /**
20
+ * @param {string} fileArg
21
+ */
22
+ function resolveBackupPath(fileArg) {
23
+ if (!fileArg) return null;
24
+ if (isAbsolute(fileArg)) return fileArg;
25
+ return resolve(process.cwd(), fileArg);
26
+ }
27
+
28
+ /**
29
+ * @param {object} globalOpts
30
+ * @param {object} cmdOpts
31
+ * @param {string | undefined} fileArg
32
+ */
33
+ export async function runRestore(globalOpts, cmdOpts, fileArg) {
34
+ loadEnvFile(globalOpts.envFile);
35
+ const { config } = loadConfig({ configPath: globalOpts.config });
36
+ const merged = mergeWithCli(config, {});
37
+ const backupDir = resolve(process.cwd(), merged.backupDir || './backups');
38
+
39
+ const envVar = globalOpts.envVar || 'DATABASE_URL';
40
+ const parsed = parseDatabaseUrl(process.env[envVar]);
41
+
42
+ let filePath = fileArg ? resolveBackupPath(fileArg, backupDir) : null;
43
+
44
+ if (cmdOpts.interactive || !filePath) {
45
+ const dbFilter = cmdOpts.database || parsed.database;
46
+ let entries = filterBackups(scanBackups(backupDir), {
47
+ database: dbFilter || undefined,
48
+ });
49
+ if (entries.length === 0 && dbFilter) {
50
+ ui.warnLine(
51
+ `${ui.icons.warn} No backups matched database filter; showing all backups.`
52
+ );
53
+ entries = scanBackups(backupDir);
54
+ }
55
+ if (entries.length === 0) {
56
+ throw new BackupError('📭 No backups found', 'Run dbsync export first');
57
+ }
58
+ ui.infoLine(`${ui.icons.pkg} Available backups${parsed.database ? ` for '${parsed.database}'` : ''}:\n`);
59
+ entries.forEach((e, i) => {
60
+ const when = (e.parsedDate ?? e.mtime).toISOString().slice(0, 10);
61
+ ui.infoLine(
62
+ `${i + 1}. ${e.filename} (${formatBytes(e.size)}) - ${when}`
63
+ );
64
+ });
65
+ const ans = await ask(`\nSelect backup to restore (1-${entries.length}) or 'q' to quit: `);
66
+ if (ans.trim().toLowerCase() === 'q') {
67
+ process.exit(0);
68
+ }
69
+ const n = Number(ans.trim());
70
+ if (!Number.isInteger(n) || n < 1 || n > entries.length) {
71
+ throw new AppError('Invalid selection');
72
+ }
73
+ filePath = entries[n - 1].path;
74
+ }
75
+
76
+ if (!filePath || !existsSync(filePath)) {
77
+ throw new BackupError('Invalid backup file', 'Check the path and try again');
78
+ }
79
+
80
+ const isGzip = filePath.endsWith('.gz');
81
+ if (cmdOpts.dryRun) {
82
+ await validateBackupFile(filePath, isGzip);
83
+ ui.success(`${ui.icons.ok} Backup file looks valid (dry-run)`);
84
+ return;
85
+ }
86
+
87
+ if (!cmdOpts.force) {
88
+ ui.warnLine(
89
+ `${ui.icons.warn} Warning: This will overwrite current database '${parsed.database || 'default'}'`
90
+ );
91
+ const ok = await confirm('Continue? (y/N): ');
92
+ if (!ok) {
93
+ ui.infoLine('Cancelled.');
94
+ process.exit(0);
95
+ }
96
+ }
97
+
98
+ if (cmdOpts.dropBefore) {
99
+ ui.infoLine(`${ui.icons.warn} Dropping and recreating database...`);
100
+ if (parsed.type === 'postgres') {
101
+ await dropPostgresDatabase(parsed);
102
+ } else {
103
+ await dropMysqlDatabase(parsed);
104
+ }
105
+ }
106
+
107
+ const masked = parsed.url.toString().replace(/:[^:@/]+@/, ':***@');
108
+ ui.infoLine(`${ui.icons.spin} Restoring from ${filePath.split(/[/\\]/).pop()}...`);
109
+ ui.infoLine(`${ui.icons.link} Connected to ${masked}`);
110
+
111
+ const st = statSync(filePath);
112
+ const bar = createFileProgressBar(`${ui.icons.save} Restoring...`, st.size, {
113
+ verbose: globalOpts.verbose,
114
+ });
115
+ bar.start();
116
+
117
+ const readStream = createReadStream(filePath);
118
+ const decompress = createDecompressStream(isGzip);
119
+ const counter = new PassThrough();
120
+ counter.on('data', (c) => bar.increment(c.length));
121
+
122
+ const child =
123
+ parsed.type === 'postgres' ? spawnPsqlRestore(parsed) : spawnMysqlRestore(parsed);
124
+
125
+ let stderr = '';
126
+ child.stderr?.on('data', (c) => {
127
+ stderr += c.toString();
128
+ });
129
+
130
+ const t0 = Date.now();
131
+ const closePromise = once(child, 'close');
132
+ try {
133
+ await pipeline(readStream, decompress, counter, child.stdin);
134
+ const [code] = await closePromise;
135
+ bar.stop();
136
+ if (code !== 0) {
137
+ throw new AppError(`Restore process exited with code ${code}`, {
138
+ suggestion: stderr.trim() || 'Check psql/mysql client and permissions',
139
+ });
140
+ }
141
+ const secs = ((Date.now() - t0) / 1000).toFixed(1);
142
+ ui.success(`${ui.icons.ok} Restore completed successfully (${secs}s)`);
143
+ } catch (e) {
144
+ bar.stop();
145
+ throw e;
146
+ }
147
+ }
@@ -0,0 +1,29 @@
1
+ import { loadEnvFile } from '../utils/env-loader.js';
2
+ import { parseDatabaseUrl } from '../utils/url-parser.js';
3
+ import { testPostgresConnection } from '../adapters/postgres.js';
4
+ import { testMysqlConnection } from '../adapters/mysql.js';
5
+ import * as ui from '../utils/ui.js';
6
+
7
+ /**
8
+ * @param {object} globalOpts
9
+ */
10
+ export async function runTest(globalOpts) {
11
+ loadEnvFile(globalOpts.envFile);
12
+ const envVar = globalOpts.envVar || 'DATABASE_URL';
13
+ const raw = process.env[envVar];
14
+ const parsed = parseDatabaseUrl(raw);
15
+
16
+ if (/[!@#$%]/.test(parsed.password)) {
17
+ ui.warnLine(
18
+ `${ui.icons.warn} Password contains special characters — auto-encoded for URL parsing; verify connection if it fails.`
19
+ );
20
+ }
21
+
22
+ if (parsed.type === 'postgres') {
23
+ await testPostgresConnection(parsed);
24
+ } else {
25
+ await testMysqlConnection(parsed);
26
+ }
27
+
28
+ ui.success(`${ui.icons.ok} Connection OK`);
29
+ }
package/src/index.js ADDED
@@ -0,0 +1,118 @@
1
+ import { readFileSync } from 'node:fs';
2
+ import { dirname, join } from 'node:path';
3
+ import { fileURLToPath } from 'node:url';
4
+ import { Command, Option } from 'commander';
5
+ import { AppError } from './utils/errors.js';
6
+ import { printErrorWithSuggestion } from './utils/ui.js';
7
+ import { runExport } from './commands/export.js';
8
+ import { runRestore } from './commands/restore.js';
9
+ import { runList } from './commands/list.js';
10
+ import { runInfo } from './commands/info.js';
11
+ import { runClean } from './commands/clean.js';
12
+ import { runTest } from './commands/test.js';
13
+
14
+ const __dirname = dirname(fileURLToPath(import.meta.url));
15
+ const pkg = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf8'));
16
+
17
+ /**
18
+ * @param {import('commander').Command} program
19
+ * @param {import('commander').Command} cmd
20
+ */
21
+ function globalOpts(program, cmd) {
22
+ if (typeof cmd.optsWithGlobals === 'function') {
23
+ return cmd.optsWithGlobals();
24
+ }
25
+ return { ...program.opts(), ...cmd.opts() };
26
+ }
27
+
28
+ /**
29
+ * @param {string[]} argv
30
+ */
31
+ export async function runCli(argv) {
32
+ const program = new Command();
33
+
34
+ program
35
+ .name('dbsync')
36
+ .description('Backup and restore PostgreSQL/MySQL databases with .env support')
37
+ .version(pkg.version)
38
+ .option('--env-file <path>', 'Path to .env file', '.env')
39
+ .option('--env-var <name>', 'Environment variable for database URL', 'DATABASE_URL')
40
+ .option('--config <path>', 'Path to .db-sync.json / .db-syncconfig.json')
41
+ .option('-v, --verbose', 'Verbose logging', false);
42
+
43
+ program
44
+ .command('export')
45
+ .description('Export (backup) the database')
46
+ .option('-o, --output <path>', 'Output file or directory')
47
+ .option('--schema-only', 'Export schema only', false)
48
+ .option('--data-only', 'Export data only', false)
49
+ .option('--tables <list>', 'Comma-separated table names')
50
+ .addOption(new Option('--no-compress', 'Disable gzip compression'))
51
+ .addOption(
52
+ new Option('--compress-level <n>', 'Gzip level 1-9')
53
+ .default('6')
54
+ .argParser((v) => parseInt(String(v), 10))
55
+ )
56
+ .action(async (opts, cmd) => {
57
+ await runExport(globalOpts(program, cmd), opts);
58
+ });
59
+
60
+ program
61
+ .command('restore')
62
+ .description('Restore database from a backup file')
63
+ .argument('[file]', 'Backup .sql or .sql.gz file')
64
+ .option('--drop-before', 'Drop and recreate database before restore', false)
65
+ .option('--dry-run', 'Validate backup without importing', false)
66
+ .option('--force', 'Skip confirmation prompts', false)
67
+ .option('-i, --interactive', 'Choose backup from list', false)
68
+ .option('-d, --database <name>', 'Filter interactive list by database name')
69
+ .action(async (file, opts, cmd) => {
70
+ await runRestore(globalOpts(program, cmd), opts, file);
71
+ });
72
+
73
+ program
74
+ .command('list')
75
+ .description('List backups in the backup directory')
76
+ .option('-d, --database <name>', 'Filter by database name')
77
+ .option('-l, --limit <n>', 'Limit number of results', (v) => parseInt(v, 10))
78
+ .option('--since <date>', 'Backups on or after date (YYYY-MM-DD)')
79
+ .action(async (opts, cmd) => {
80
+ await runList(globalOpts(program, cmd), opts);
81
+ });
82
+
83
+ program
84
+ .command('info')
85
+ .description('Show details for a backup file')
86
+ .argument('<file>', 'Backup file path')
87
+ .action(async (file, _opts, cmd) => {
88
+ await runInfo(globalOpts(program, cmd), file);
89
+ });
90
+
91
+ program
92
+ .command('clean')
93
+ .description('Remove old backups (retention)')
94
+ .option('--keep-last <n>', 'Keep newest N backups (default from config or 30)')
95
+ .option('--keep-days <n>', 'Keep backups newer than N days')
96
+ .option('--dry-run', 'Show files that would be deleted', false)
97
+ .action(async (opts, cmd) => {
98
+ await runClean(globalOpts(program, cmd), opts);
99
+ });
100
+
101
+ program
102
+ .command('test')
103
+ .description('Test database connection from DATABASE_URL')
104
+ .action(async (_opts, cmd) => {
105
+ await runTest(globalOpts(program, cmd));
106
+ });
107
+
108
+ try {
109
+ await program.parseAsync(argv);
110
+ } catch (e) {
111
+ if (e instanceof AppError) {
112
+ printErrorWithSuggestion(e);
113
+ process.exitCode = e.exitCode ?? 1;
114
+ return;
115
+ }
116
+ throw e;
117
+ }
118
+ }
@@ -0,0 +1,101 @@
1
+ import { readdirSync, statSync } from 'node:fs';
2
+ import { join, basename } from 'node:path';
3
+
4
+ /**
5
+ * @typedef {object} BackupEntry
6
+ * @property {string} path
7
+ * @property {string} filename
8
+ * @property {number} size
9
+ * @property {Date} mtime
10
+ * @property {string} [database]
11
+ * @property {Date} [parsedDate]
12
+ * @property {boolean} compressed
13
+ */
14
+
15
+ // dbname_YYYYMMDD_HHMMSS.sql or .sql.gz
16
+ const FILENAME_RE = /^(.+)_(\d{8})_(\d{6})(\.sql)(\.gz)?$/;
17
+
18
+ /**
19
+ * @param {string} filename
20
+ */
21
+ export function parseBackupFilename(filename) {
22
+ const base = basename(filename);
23
+ const m = base.match(FILENAME_RE);
24
+ if (!m) {
25
+ return { database: undefined, parsedDate: undefined };
26
+ }
27
+ const [, db, ymd, hms] = m;
28
+ const y = Number(ymd.slice(0, 4));
29
+ const mo = Number(ymd.slice(4, 6)) - 1;
30
+ const d = Number(ymd.slice(6, 8));
31
+ const hh = Number(hms.slice(0, 2));
32
+ const mm = Number(hms.slice(2, 4));
33
+ const ss = Number(hms.slice(4, 6));
34
+ const parsedDate = new Date(y, mo, d, hh, mm, ss);
35
+ return { database: db, parsedDate };
36
+ }
37
+
38
+ /**
39
+ * @param {string} backupDir
40
+ * @returns {BackupEntry[]}
41
+ */
42
+ export function scanBackups(backupDir) {
43
+ let names;
44
+ try {
45
+ names = readdirSync(backupDir);
46
+ } catch {
47
+ return [];
48
+ }
49
+
50
+ /** @type {BackupEntry[]} */
51
+ const out = [];
52
+ for (const name of names) {
53
+ if (!name.endsWith('.sql') && !name.endsWith('.sql.gz')) continue;
54
+ const p = join(backupDir, name);
55
+ let st;
56
+ try {
57
+ st = statSync(p);
58
+ } catch {
59
+ continue;
60
+ }
61
+ if (!st.isFile()) continue;
62
+ const { database, parsedDate } = parseBackupFilename(name);
63
+ out.push({
64
+ path: p,
65
+ filename: name,
66
+ size: st.size,
67
+ mtime: st.mtime,
68
+ database,
69
+ parsedDate,
70
+ compressed: name.endsWith('.gz'),
71
+ });
72
+ }
73
+
74
+ out.sort((a, b) => {
75
+ const ta = (b.parsedDate ?? b.mtime).getTime();
76
+ const tb = (a.parsedDate ?? a.mtime).getTime();
77
+ return ta - tb;
78
+ });
79
+
80
+ return out;
81
+ }
82
+
83
+ /**
84
+ * @param {BackupEntry[]} entries
85
+ * @param {{ database?: string, limit?: number, since?: Date }} filters
86
+ */
87
+ export function filterBackups(entries, filters = {}) {
88
+ let list = entries;
89
+ if (filters.database) {
90
+ const d = filters.database.toLowerCase();
91
+ list = list.filter((e) => (e.database || '').toLowerCase() === d);
92
+ }
93
+ if (filters.since) {
94
+ const t = filters.since.getTime();
95
+ list = list.filter((e) => (e.parsedDate ?? e.mtime).getTime() >= t);
96
+ }
97
+ if (filters.limit != null && filters.limit > 0) {
98
+ list = list.slice(0, filters.limit);
99
+ }
100
+ return list;
101
+ }