@habityzer/db-sync-tool 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,119 @@
1
+ # db-sync-tool
2
+
3
+ CLI for backing up and restoring **PostgreSQL** and **MySQL** databases with `.env` support, gzip compression, timestamped backups, listing, retention cleanup, and interactive restore.
4
+
5
+ ## Requirements
6
+
7
+ - **Node.js** 18+
8
+ - **PostgreSQL client tools** on `PATH` when using Postgres: `pg_dump`, `psql`
9
+ - **MySQL client tools** on `PATH` when using MySQL: `mysqldump`, `mysql`
10
+
11
+ ## Install
12
+
13
+ ```bash
14
+ pnpm add -g @habityzer/db-sync-tool
15
+ # or
16
+ pnpm dlx @habityzer/db-sync-tool --help
17
+ ```
18
+
19
+ The executable is **`dbsync`**. The package is published under the **Habityzer** org on npm (`@habityzer/…`).
20
+
21
+ ## Configuration
22
+
23
+ ### Environment (default)
24
+
25
+ Create a `.env` in the working directory:
26
+
27
+ ```env
28
+ DATABASE_URL="postgresql://app:!ChangeMe!@127.0.0.1:5434/app?serverVersion=16"
29
+ ```
30
+
31
+ - `postgresql://` is normalized to `postgres://`
32
+ - Query params `serverVersion`, `charset`, and `driver` are stripped for native tools
33
+ - Override file: `--env-file <path>`
34
+ - Override variable name: `--env-var MY_DATABASE_URL`
35
+
36
+ ### Optional JSON config
37
+
38
+ `.db-sync.json` or `.db-syncconfig.json` in the project root:
39
+
40
+ ```json
41
+ {
42
+ "backupDir": "./backups",
43
+ "compress": true,
44
+ "compressLevel": 6,
45
+ "keepLast": 30,
46
+ "timestampFormat": "YYYYMMDD_HHMMSS"
47
+ }
48
+ ```
49
+
50
+ Override path: `--config <path>`.
51
+
52
+ ## Commands
53
+
54
+ | Command | Description |
55
+ |--------|-------------|
56
+ | `dbsync export` | Stream dump → gzip (default) → file under `backupDir` |
57
+ | `dbsync restore [file]` | Stream file → decompress if needed → `psql` / `mysql` |
58
+ | `dbsync list` | List backups with size, date, database name |
59
+ | `dbsync info <file>` | File metadata and parsed backup name |
60
+ | `dbsync clean` | Retention: `--keep-last`, `--keep-days`, `--dry-run` |
61
+ | `dbsync test` | `SELECT 1` via `pg` / `mysql2` drivers |
62
+
63
+ ### Global options
64
+
65
+ - `--env-file`, `--env-var`, `--config`, `-v/--verbose`, `-h/--help`, `-V/--version`
66
+
67
+ ### Export
68
+
69
+ - `-o, --output` — file path or directory (default naming: `{db}_{YYYYMMDD}_{HHMMSS}.sql.gz`)
70
+ - `--schema-only`, `--data-only`, `--tables a,b`
71
+ - `--no-compress`, `--compress-level 1-9`
72
+
73
+ ### Restore
74
+
75
+ - `--drop-before` — drop & recreate database first
76
+ - `--dry-run` — validate backup (full gzip stream check for `.gz`)
77
+ - `--force` — skip confirmation
78
+ - `-i, --interactive` — pick backup from list
79
+ - `-d, --database` — filter interactive list
80
+
81
+ ### List
82
+
83
+ - `-d, --database`, `-l, --limit`, `--since YYYY-MM-DD`
84
+
85
+ ### Clean
86
+
87
+ - `--keep-last N` — keep newest N (default from config or **30** if nothing else set)
88
+ - `--keep-days N` — also keep anything newer than N days (union with keep-last)
89
+ - `--dry-run`
90
+
91
+ ## Examples
92
+
93
+ ```bash
94
+ dbsync export
95
+ dbsync export -o ./backups --schema-only
96
+ dbsync restore ./backups/app_20260410_120000.sql.gz
97
+ dbsync restore -i --force
98
+ dbsync list --database app --limit 5
99
+ dbsync info ./backups/app_20260410_120000.sql.gz
100
+ dbsync clean --keep-last 10 --dry-run
101
+ dbsync test
102
+ ```
103
+
104
+ ## Large databases
105
+
106
+ Exports and restores use **streaming** (no in-memory buffering of the full dump). Progress shows **bytes transferred** during export (spinner) and a **bar** during restore when the input file size is known.
107
+
108
+ ## Troubleshooting
109
+
110
+ | Issue | Suggestion |
111
+ |-------|------------|
112
+ | No `.env` | Create `.env` with `DATABASE_URL` or pass `--env-file` |
113
+ | Cannot connect | Run `dbsync test`; check host, port, user, password, SSL params |
114
+ | `pg_dump` / `psql` not found | Install PostgreSQL client tools and ensure they are on `PATH` |
115
+ | Permission denied on backup dir | Fix directory permissions or choose another `--output` / `backupDir` |
116
+
117
+ ## License
118
+
119
+ MIT
package/bin/dbsync.js ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env node
2
+ import { runCli } from '../src/index.js';
3
+ import { AppError } from '../src/utils/errors.js';
4
+ import { printErrorWithSuggestion } from '../src/utils/ui.js';
5
+
6
+ runCli(process.argv).catch((err) => {
7
+ if (err instanceof AppError) {
8
+ printErrorWithSuggestion(err);
9
+ process.exit(err.exitCode ?? 1);
10
+ return;
11
+ }
12
+ console.error(err?.message || err);
13
+ process.exit(1);
14
+ });
package/package.json ADDED
@@ -0,0 +1,54 @@
1
+ {
2
+ "name": "@habityzer/db-sync-tool",
3
+ "version": "1.0.3",
4
+ "description": "CLI for backing up and restoring PostgreSQL/MySQL databases with .env support, compression, and retention",
5
+ "type": "module",
6
+ "bin": {
7
+ "dbsync": "./bin/dbsync.js"
8
+ },
9
+ "main": "./src/index.js",
10
+ "files": [
11
+ "bin",
12
+ "src"
13
+ ],
14
+ "engines": {
15
+ "node": ">=18"
16
+ },
17
+ "scripts": {
18
+ "test": "vitest run",
19
+ "test:watch": "vitest",
20
+ "lint": "eslint src"
21
+ },
22
+ "dependencies": {
23
+ "chalk": "^5.3.0",
24
+ "cli-progress": "^3.12.0",
25
+ "commander": "^14.0.0",
26
+ "dotenv": "^17.0.0",
27
+ "mysql2": "^3.11.0",
28
+ "ora": "^9.0.0",
29
+ "pg": "^8.13.0"
30
+ },
31
+ "devDependencies": {
32
+ "eslint": "^10.0.0",
33
+ "vite": "^8.0.0",
34
+ "vitest": "^4.0.0"
35
+ },
36
+ "keywords": [
37
+ "postgresql",
38
+ "mysql",
39
+ "backup",
40
+ "restore",
41
+ "cli"
42
+ ],
43
+ "license": "MIT",
44
+ "publishConfig": {
45
+ "access": "public"
46
+ },
47
+ "repository": {
48
+ "type": "git",
49
+ "url": "git+https://github.com/Habityzer/dbsync.git"
50
+ },
51
+ "bugs": {
52
+ "url": "https://github.com/Habityzer/dbsync/issues"
53
+ }
54
+ }
@@ -0,0 +1,116 @@
1
+ import { spawn } from 'node:child_process';
2
+ import { once } from 'node:events';
3
+ import mysql from 'mysql2/promise';
4
+ import { AppError } from '../utils/errors.js';
5
+
6
+ /**
7
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
8
+ */
9
+ export function mysqlConnectionEnv(parsed) {
10
+ return {
11
+ ...process.env,
12
+ MYSQL_PWD: parsed.password,
13
+ };
14
+ }
15
+
16
+ /**
17
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
18
+ */
19
+ export function buildMysqlDumpArgs(parsed, opts = {}) {
20
+ const args = ['-h', parsed.host, '-u', parsed.user];
21
+ if (parsed.port) args.push('-P', String(parsed.port));
22
+ args.push(parsed.database);
23
+ if (opts.schemaOnly) args.push('--no-data');
24
+ if (opts.dataOnly) args.push('--no-create-info');
25
+ if (opts.tables?.length) {
26
+ for (const t of opts.tables) {
27
+ const name = t.trim();
28
+ if (name) args.push(name);
29
+ }
30
+ }
31
+ return args;
32
+ }
33
+
34
+ /**
35
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
36
+ */
37
+ export function buildMysqlArgs(parsed) {
38
+ const args = ['-h', parsed.host, '-u', parsed.user];
39
+ if (parsed.port) args.push('-P', String(parsed.port));
40
+ args.push(parsed.database);
41
+ return args;
42
+ }
43
+
44
+ /**
45
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
46
+ */
47
+ export async function testMysqlConnection(parsed) {
48
+ try {
49
+ const conn = await mysql.createConnection({
50
+ host: parsed.host,
51
+ port: parsed.port ?? 3306,
52
+ user: parsed.user,
53
+ password: parsed.password,
54
+ database: parsed.database || undefined,
55
+ });
56
+ await conn.query('SELECT 1');
57
+ await conn.end();
58
+ } catch (e) {
59
+ const msg = e instanceof Error ? e.message : String(e);
60
+ throw new AppError('Cannot connect to database', {
61
+ suggestion: `Check credentials and host. ${msg}`,
62
+ });
63
+ }
64
+ }
65
+
66
+ /**
67
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
68
+ * @param {{ schemaOnly?: boolean, dataOnly?: boolean, tables?: string[] }} opts
69
+ */
70
+ export function spawnMysqldump(parsed, opts = {}) {
71
+ const args = buildMysqlDumpArgs(parsed, opts);
72
+ const child = spawn('mysqldump', args, {
73
+ stdio: ['ignore', 'pipe', 'pipe'],
74
+ env: mysqlConnectionEnv(parsed),
75
+ });
76
+ return child;
77
+ }
78
+
79
+ /**
80
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
81
+ */
82
+ export function spawnMysqlRestore(parsed) {
83
+ const args = buildMysqlArgs(parsed);
84
+ const child = spawn('mysql', args, {
85
+ stdio: ['pipe', 'pipe', 'pipe'],
86
+ env: mysqlConnectionEnv(parsed),
87
+ });
88
+ return child;
89
+ }
90
+
91
+ /**
92
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
93
+ */
94
+ export async function dropMysqlDatabase(parsed) {
95
+ const db = quoteMySqlIdent(parsed.database);
96
+ const sql = `DROP DATABASE IF EXISTS ${db}; CREATE DATABASE ${db};`;
97
+ const args = ['-h', parsed.host, '-u', parsed.user];
98
+ if (parsed.port) args.push('-P', String(parsed.port));
99
+ args.push('-e', sql);
100
+ const child = spawn('mysql', args, {
101
+ stdio: ['ignore', 'pipe', 'pipe'],
102
+ env: mysqlConnectionEnv(parsed),
103
+ });
104
+ let err = '';
105
+ child.stderr?.on('data', (c) => {
106
+ err += c.toString();
107
+ });
108
+ const [code] = await once(child, 'close');
109
+ if (code !== 0) {
110
+ throw new AppError('Failed to drop/recreate database', { suggestion: err.trim() });
111
+ }
112
+ }
113
+
114
+ function quoteMySqlIdent(name) {
115
+ return '`' + String(name).replace(/`/g, '``') + '`';
116
+ }
@@ -0,0 +1,148 @@
1
+ import { spawn } from 'node:child_process';
2
+ import { once } from 'node:events';
3
+ import pg from 'pg';
4
+ import { AppError } from '../utils/errors.js';
5
+
6
+ /**
7
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
8
+ */
9
+ export function pgConnectionEnv(parsed) {
10
+ const env = {
11
+ ...process.env,
12
+ PGPASSWORD: parsed.password,
13
+ PGHOST: parsed.host,
14
+ PGUSER: parsed.user,
15
+ PGDATABASE: parsed.database,
16
+ };
17
+ if (parsed.port) env.PGPORT = String(parsed.port);
18
+ const ssl = parsed.url.searchParams.get('sslmode');
19
+ if (ssl) env.PGSSLMODE = ssl;
20
+ return env;
21
+ }
22
+
23
+ /**
24
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
25
+ * @param {{ schemaOnly?: boolean, dataOnly?: boolean, tables?: string[] }} opts
26
+ */
27
+ export function buildPgDumpArgs(parsed, opts = {}) {
28
+ const args = ['-h', parsed.host, '-U', parsed.user];
29
+ if (parsed.port) args.push('-p', String(parsed.port));
30
+ args.push('-d', parsed.database);
31
+ if (opts.schemaOnly) args.push('--schema-only');
32
+ if (opts.dataOnly) args.push('--data-only');
33
+ if (opts.tables?.length) {
34
+ for (const t of opts.tables) {
35
+ const name = t.trim();
36
+ if (name) args.push('-t', name);
37
+ }
38
+ }
39
+ return args;
40
+ }
41
+
42
+ /**
43
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
44
+ * @param {boolean} adminDb - connect to postgres maintenance db
45
+ */
46
+ export function buildPsqlArgs(parsed, adminDb = false) {
47
+ const db = adminDb ? 'postgres' : parsed.database;
48
+ const args = ['-h', parsed.host, '-U', parsed.user, '-d', db, '-v', 'ON_ERROR_STOP=1'];
49
+ if (parsed.port) args.push('-p', String(parsed.port));
50
+ return args;
51
+ }
52
+
53
+ /**
54
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
55
+ */
56
+ export async function testPostgresConnection(parsed) {
57
+ const client = new pg.Client({
58
+ host: parsed.host,
59
+ port: parsed.port ?? 5432,
60
+ user: parsed.user,
61
+ password: parsed.password,
62
+ database: parsed.database || 'postgres',
63
+ ssl: sslOptionFromUrl(parsed),
64
+ });
65
+ try {
66
+ await client.connect();
67
+ await client.query('SELECT 1');
68
+ } catch (e) {
69
+ const msg = e instanceof Error ? e.message : String(e);
70
+ throw new AppError('Cannot connect to database', {
71
+ suggestion: `Check credentials and host. ${msg}`,
72
+ });
73
+ } finally {
74
+ await client.end().catch(() => {});
75
+ }
76
+ }
77
+
78
+ /**
79
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
80
+ */
81
+ function sslOptionFromUrl(parsed) {
82
+ const mode = parsed.url.searchParams.get('sslmode');
83
+ if (!mode || mode === 'disable') return undefined;
84
+ return { rejectUnauthorized: mode !== 'no-verify' };
85
+ }
86
+
87
+ /**
88
+ * Spawn pg_dump; returns child with stdout stream.
89
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
90
+ * @param {{ schemaOnly?: boolean, dataOnly?: boolean, tables?: string[] }} opts
91
+ */
92
+ export function spawnPgDump(parsed, opts = {}) {
93
+ const args = buildPgDumpArgs(parsed, opts);
94
+ const child = spawn('pg_dump', args, {
95
+ stdio: ['ignore', 'pipe', 'pipe'],
96
+ env: pgConnectionEnv(parsed),
97
+ });
98
+ return child;
99
+ }
100
+
101
+ /**
102
+ * Spawn psql reading SQL from stdin.
103
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
104
+ * @param {{ admin?: boolean }} opts
105
+ */
106
+ export function spawnPsqlRestore(parsed, opts = {}) {
107
+ const args = buildPsqlArgs(parsed, !!opts.admin);
108
+ const admin = !!opts.admin;
109
+ const env = {
110
+ ...pgConnectionEnv(parsed),
111
+ PGDATABASE: admin ? 'postgres' : parsed.database,
112
+ };
113
+ const child = spawn('psql', args, {
114
+ stdio: ['pipe', 'pipe', 'pipe'],
115
+ env,
116
+ });
117
+ return child;
118
+ }
119
+
120
+ /**
121
+ * Drop and recreate database (connect to postgres).
122
+ * @param {import('../utils/url-parser.js').ParsedDatabaseUrl} parsed
123
+ */
124
+ export async function dropPostgresDatabase(parsed) {
125
+ const dbId = quoteIdent(parsed.database);
126
+ const sql = `SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = ${literal(parsed.database)} AND pid <> pg_backend_pid(); DROP DATABASE IF EXISTS ${dbId}; CREATE DATABASE ${dbId};`;
127
+ const env = { ...pgConnectionEnv(parsed), PGDATABASE: 'postgres' };
128
+ const child = spawn('psql', [...buildPsqlArgs(parsed, true), '-c', sql], {
129
+ stdio: ['ignore', 'pipe', 'pipe'],
130
+ env,
131
+ });
132
+ let err = '';
133
+ child.stderr?.on('data', (c) => {
134
+ err += c.toString();
135
+ });
136
+ const [code] = await once(child, 'close');
137
+ if (code !== 0) {
138
+ throw new AppError('Failed to drop/recreate database', { suggestion: err.trim() });
139
+ }
140
+ }
141
+
142
+ function quoteIdent(name) {
143
+ return `"${String(name).replace(/"/g, '""')}"`;
144
+ }
145
+
146
+ function literal(str) {
147
+ return `'${String(str).replace(/'/g, "''")}'`;
148
+ }
@@ -0,0 +1,68 @@
1
+ import { unlinkSync } from 'node:fs';
2
+ import { resolve } from 'node:path';
3
+ import { loadConfig, mergeWithCli } from '../utils/config-loader.js';
4
+ import { scanBackups } from '../utils/backup-scanner.js';
5
+ import { planRetention } from '../utils/retention.js';
6
+ import { formatBytes } from '../utils/progress.js';
7
+ import { BackupError } from '../utils/errors.js';
8
+ import * as ui from '../utils/ui.js';
9
+
10
+ /**
11
+ * @param {object} globalOpts
12
+ * @param {object} cmdOpts
13
+ */
14
+ export async function runClean(globalOpts, cmdOpts) {
15
+ const { config } = loadConfig({ configPath: globalOpts.config });
16
+ const merged = mergeWithCli(config, {});
17
+ const backupDir = resolve(process.cwd(), merged.backupDir || './backups');
18
+
19
+ const entries = scanBackups(backupDir);
20
+ if (entries.length === 0) {
21
+ throw new BackupError('📭 No backups found', 'Run dbsync export first');
22
+ }
23
+
24
+ let keepLast =
25
+ cmdOpts.keepLast !== undefined && cmdOpts.keepLast !== null
26
+ ? Number(cmdOpts.keepLast)
27
+ : undefined;
28
+ let keepDays =
29
+ cmdOpts.keepDays !== undefined && cmdOpts.keepDays !== null
30
+ ? Number(cmdOpts.keepDays)
31
+ : undefined;
32
+
33
+ if (keepLast === undefined && keepDays === undefined) {
34
+ keepLast = merged.keepLast ?? 30;
35
+ }
36
+
37
+ const { remove } = planRetention(entries, {
38
+ keepLast: keepLast !== undefined && Number.isFinite(keepLast) ? keepLast : undefined,
39
+ keepDays: keepDays !== undefined && Number.isFinite(keepDays) ? keepDays : undefined,
40
+ });
41
+
42
+ if (remove.length === 0) {
43
+ ui.success(`${ui.icons.ok} Nothing to clean`);
44
+ return;
45
+ }
46
+
47
+ ui.infoLine(`${cmdOpts.dryRun ? ui.icons.warn : ui.icons.pkg} ${remove.length} backup(s) eligible for removal:\n`);
48
+ let freed = 0;
49
+ for (const e of remove) {
50
+ freed += e.size;
51
+ ui.infoLine(` ${e.filename} (${formatBytes(e.size)})`);
52
+ }
53
+ ui.infoLine(`\nTotal freed: ${formatBytes(freed)}`);
54
+
55
+ if (cmdOpts.dryRun) {
56
+ ui.infoLine(`\n${ui.icons.warn} Dry-run: no files deleted`);
57
+ return;
58
+ }
59
+
60
+ for (const e of remove) {
61
+ try {
62
+ unlinkSync(e.path);
63
+ } catch (err) {
64
+ ui.warnLine(`Could not delete ${e.path}: ${err instanceof Error ? err.message : err}`);
65
+ }
66
+ }
67
+ ui.success(`${ui.icons.ok} Clean complete`);
68
+ }