@habityzer/db-sync-tool 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +119 -0
- package/bin/dbsync.js +14 -0
- package/package.json +54 -0
- package/src/adapters/mysql.js +116 -0
- package/src/adapters/postgres.js +148 -0
- package/src/commands/clean.js +68 -0
- package/src/commands/export.js +195 -0
- package/src/commands/info.js +31 -0
- package/src/commands/list.js +63 -0
- package/src/commands/restore.js +147 -0
- package/src/commands/test.js +29 -0
- package/src/index.js +118 -0
- package/src/utils/backup-scanner.js +101 -0
- package/src/utils/backup-validate.js +78 -0
- package/src/utils/compression.js +25 -0
- package/src/utils/config-loader.js +92 -0
- package/src/utils/env-loader.js +45 -0
- package/src/utils/errors.js +45 -0
- package/src/utils/process.js +79 -0
- package/src/utils/progress.js +115 -0
- package/src/utils/prompt.js +22 -0
- package/src/utils/retention.js +32 -0
- package/src/utils/ui.js +63 -0
- package/src/utils/url-parser.js +190 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { createReadStream } from 'node:fs';
|
|
2
|
+
import { Writable } from 'node:stream';
|
|
3
|
+
import { pipeline } from 'node:stream/promises';
|
|
4
|
+
import { createGunzip } from 'node:zlib';
|
|
5
|
+
import { BackupError } from './errors.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {Buffer} buf
|
|
9
|
+
*/
|
|
10
|
+
function looksLikeSql(buf) {
|
|
11
|
+
const s = buf.toString('utf8', 0, Math.min(buf.length, 8192)).toLowerCase();
|
|
12
|
+
return (
|
|
13
|
+
s.includes('create ') ||
|
|
14
|
+
s.includes('insert ') ||
|
|
15
|
+
s.includes('--') ||
|
|
16
|
+
s.includes('set ') ||
|
|
17
|
+
s.includes('copy ') ||
|
|
18
|
+
s.includes('/*!') ||
|
|
19
|
+
s.includes('drop ')
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* @param {string} filePath
|
|
25
|
+
* @param {boolean} isGzip
|
|
26
|
+
*/
|
|
27
|
+
export async function validateBackupFile(filePath, isGzip) {
|
|
28
|
+
if (isGzip) {
|
|
29
|
+
const rs = createReadStream(filePath);
|
|
30
|
+
const gunzip = createGunzip();
|
|
31
|
+
const sink = new Writable({
|
|
32
|
+
write(_chunk, _enc, cb) {
|
|
33
|
+
cb();
|
|
34
|
+
},
|
|
35
|
+
});
|
|
36
|
+
try {
|
|
37
|
+
await pipeline(rs, gunzip, sink);
|
|
38
|
+
} catch (e) {
|
|
39
|
+
const msg = e instanceof Error ? e.message : String(e);
|
|
40
|
+
throw new BackupError('Not a valid SQL backup', `File appears corrupted or not gzip: ${msg}`);
|
|
41
|
+
}
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const first = await readMaxBytesFromPath(filePath, 65536);
|
|
46
|
+
if (first.length === 0) {
|
|
47
|
+
throw new BackupError('Not a valid SQL backup', 'File is empty');
|
|
48
|
+
}
|
|
49
|
+
if (!looksLikeSql(first)) {
|
|
50
|
+
throw new BackupError(
|
|
51
|
+
'Not a valid SQL backup',
|
|
52
|
+
'File does not look like a SQL dump (missing common SQL patterns)'
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* @param {string} filePath
|
|
59
|
+
* @param {number} max
|
|
60
|
+
*/
|
|
61
|
+
async function readMaxBytesFromPath(filePath, max) {
|
|
62
|
+
const rs = createReadStream(filePath);
|
|
63
|
+
/** @type {Buffer[]} */
|
|
64
|
+
const chunks = [];
|
|
65
|
+
let total = 0;
|
|
66
|
+
try {
|
|
67
|
+
for await (const chunk of rs) {
|
|
68
|
+
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
|
69
|
+
chunks.push(buf);
|
|
70
|
+
total += buf.length;
|
|
71
|
+
if (total >= max) break;
|
|
72
|
+
}
|
|
73
|
+
} catch (e) {
|
|
74
|
+
const msg = e instanceof Error ? e.message : String(e);
|
|
75
|
+
throw new BackupError('Not a valid SQL backup', msg);
|
|
76
|
+
}
|
|
77
|
+
return Buffer.concat(chunks);
|
|
78
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { createGzip, createGunzip } from 'node:zlib';
|
|
2
|
+
import { PassThrough } from 'node:stream';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @param {boolean} compress
|
|
6
|
+
* @param {number} level 1-9
|
|
7
|
+
* @returns {import('node:stream').Transform}
|
|
8
|
+
*/
|
|
9
|
+
export function createCompressStream(compress, level = 6) {
|
|
10
|
+
if (!compress) {
|
|
11
|
+
return new PassThrough();
|
|
12
|
+
}
|
|
13
|
+
const lv = Math.min(9, Math.max(1, level));
|
|
14
|
+
return createGzip({ level: lv });
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* @param {boolean} isGzip
|
|
19
|
+
*/
|
|
20
|
+
export function createDecompressStream(isGzip) {
|
|
21
|
+
if (!isGzip) {
|
|
22
|
+
return new PassThrough();
|
|
23
|
+
}
|
|
24
|
+
return createGunzip();
|
|
25
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
|
|
4
|
+
const DEFAULT_NAMES = ['.db-sync.json', '.db-syncconfig.json'];
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* @typedef {object} DbSyncConfig
|
|
8
|
+
* @property {string} [backupDir]
|
|
9
|
+
* @property {boolean} [compress]
|
|
10
|
+
* @property {number} [compressLevel]
|
|
11
|
+
* @property {number} [keepLast]
|
|
12
|
+
* @property {string} [timestampFormat]
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Deep merge plain objects (shallow per key).
|
|
17
|
+
* @param {Record<string, unknown>} base
|
|
18
|
+
* @param {Record<string, unknown>} over
|
|
19
|
+
*/
|
|
20
|
+
function merge(base, over) {
|
|
21
|
+
const out = { ...base };
|
|
22
|
+
for (const [k, v] of Object.entries(over)) {
|
|
23
|
+
if (v !== undefined && v !== null && typeof v === 'object' && !Array.isArray(v)) {
|
|
24
|
+
out[k] = merge(
|
|
25
|
+
typeof base[k] === 'object' && base[k] !== null ? /** @type {Record<string, unknown>} */ (base[k]) : {},
|
|
26
|
+
/** @type {Record<string, unknown>} */ (v)
|
|
27
|
+
);
|
|
28
|
+
} else {
|
|
29
|
+
out[k] = v;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return out;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Find first existing config file in cwd.
|
|
37
|
+
* @param {string} [cwd]
|
|
38
|
+
*/
|
|
39
|
+
export function discoverConfigPath(cwd = process.cwd()) {
|
|
40
|
+
for (const name of DEFAULT_NAMES) {
|
|
41
|
+
const p = resolve(cwd, name);
|
|
42
|
+
if (existsSync(p)) return p;
|
|
43
|
+
}
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @param {string} filePath
|
|
49
|
+
* @returns {DbSyncConfig}
|
|
50
|
+
*/
|
|
51
|
+
export function readConfigFile(filePath) {
|
|
52
|
+
const raw = readFileSync(filePath, 'utf8');
|
|
53
|
+
try {
|
|
54
|
+
return JSON.parse(raw);
|
|
55
|
+
} catch {
|
|
56
|
+
return {};
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Load optional config: explicit path or first default in cwd.
|
|
62
|
+
* @param {{ configPath?: string, cwd?: string }} opts
|
|
63
|
+
* @returns {{ config: DbSyncConfig, path: string | null }}
|
|
64
|
+
*/
|
|
65
|
+
export function loadConfig(opts = {}) {
|
|
66
|
+
const cwd = opts.cwd ?? process.cwd();
|
|
67
|
+
const explicit = opts.configPath ? resolve(cwd, opts.configPath) : null;
|
|
68
|
+
const path = explicit && existsSync(explicit) ? explicit : discoverConfigPath(cwd);
|
|
69
|
+
if (!path) {
|
|
70
|
+
return { config: {}, path: null };
|
|
71
|
+
}
|
|
72
|
+
return { config: readConfigFile(path), path };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Merge CLI overrides into config-derived defaults.
|
|
77
|
+
* @param {DbSyncConfig} fileConfig
|
|
78
|
+
* @param {Record<string, unknown>} cliOverrides
|
|
79
|
+
*/
|
|
80
|
+
export function mergeWithCli(fileConfig, cliOverrides) {
|
|
81
|
+
return merge(
|
|
82
|
+
{
|
|
83
|
+
backupDir: './backups',
|
|
84
|
+
compress: true,
|
|
85
|
+
compressLevel: 6,
|
|
86
|
+
keepLast: 30,
|
|
87
|
+
timestampFormat: 'YYYYMMDD_HHMMSS',
|
|
88
|
+
...fileConfig,
|
|
89
|
+
},
|
|
90
|
+
cliOverrides
|
|
91
|
+
);
|
|
92
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
import dotenv from 'dotenv';
|
|
4
|
+
import { ConfigError } from './errors.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Load .env from cwd-relative path. Does not override existing process.env by default.
|
|
8
|
+
* @param {string} [envFilePath]
|
|
9
|
+
* @param {{ override?: boolean }} [opts]
|
|
10
|
+
*/
|
|
11
|
+
export function loadEnvFile(envFilePath = '.env', opts = {}) {
|
|
12
|
+
const path = resolve(process.cwd(), envFilePath);
|
|
13
|
+
if (!existsSync(path)) {
|
|
14
|
+
throw new ConfigError(
|
|
15
|
+
'No .env file found',
|
|
16
|
+
`Create ${envFilePath} with DATABASE_URL or pass --env-file <path>`
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
const raw = readFileSync(path, 'utf8');
|
|
20
|
+
const parsed = dotenv.parse(raw);
|
|
21
|
+
const override = opts.override ?? false;
|
|
22
|
+
for (const [k, v] of Object.entries(parsed)) {
|
|
23
|
+
if (override || process.env[k] === undefined) {
|
|
24
|
+
process.env[k] = v;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return path;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Try to load .env without throwing (for list/info/clean that may not need DB).
|
|
32
|
+
* @param {string} envFilePath
|
|
33
|
+
*/
|
|
34
|
+
export function tryLoadEnvFile(envFilePath = '.env') {
|
|
35
|
+
const path = resolve(process.cwd(), envFilePath);
|
|
36
|
+
if (!existsSync(path)) return null;
|
|
37
|
+
const raw = readFileSync(path, 'utf8');
|
|
38
|
+
const parsed = dotenv.parse(raw);
|
|
39
|
+
for (const [k, v] of Object.entries(parsed)) {
|
|
40
|
+
if (process.env[k] === undefined) {
|
|
41
|
+
process.env[k] = v;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return path;
|
|
45
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Application errors with optional suggested fixes for CLI output.
|
|
3
|
+
*/
|
|
4
|
+
export class AppError extends Error {
|
|
5
|
+
/**
|
|
6
|
+
* @param {string} message
|
|
7
|
+
* @param {object} [opts]
|
|
8
|
+
* @param {string} [opts.suggestion]
|
|
9
|
+
* @param {number} [opts.exitCode]
|
|
10
|
+
*/
|
|
11
|
+
constructor(message, opts = {}) {
|
|
12
|
+
super(message);
|
|
13
|
+
this.name = 'AppError';
|
|
14
|
+
this.suggestion = opts.suggestion;
|
|
15
|
+
this.exitCode = opts.exitCode ?? 1;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class ConfigError extends AppError {
|
|
20
|
+
constructor(message, suggestion) {
|
|
21
|
+
super(message, { suggestion });
|
|
22
|
+
this.name = 'ConfigError';
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export class ConnectionError extends AppError {
|
|
27
|
+
constructor(message, suggestion) {
|
|
28
|
+
super(message, { suggestion });
|
|
29
|
+
this.name = 'ConnectionError';
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export class FileSystemError extends AppError {
|
|
34
|
+
constructor(message, suggestion) {
|
|
35
|
+
super(message, { suggestion });
|
|
36
|
+
this.name = 'FileSystemError';
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export class BackupError extends AppError {
|
|
41
|
+
constructor(message, suggestion) {
|
|
42
|
+
super(message, { suggestion });
|
|
43
|
+
this.name = 'BackupError';
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { spawn } from 'node:child_process';
|
|
2
|
+
import { AppError } from './errors.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @typedef {object} SpawnStreamOptions
|
|
6
|
+
* @property {NodeJS.ReadableStream} [stdin]
|
|
7
|
+
* @property {NodeJS.WritableStream} [stdout]
|
|
8
|
+
* @property {NodeJS.WritableStream} [stderr]
|
|
9
|
+
* @property {Record<string, string>} [env]
|
|
10
|
+
* @property {string} [cwd]
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Spawn a process; pipe stdin/stdout if provided. Resolves on exit 0.
|
|
15
|
+
* @param {string} command
|
|
16
|
+
* @param {string[]} args
|
|
17
|
+
* @param {SpawnStreamOptions} opts
|
|
18
|
+
* @returns {Promise<{ code: number, stderr: string }>}
|
|
19
|
+
*/
|
|
20
|
+
export function spawnProcess(command, args, opts = {}) {
|
|
21
|
+
return new Promise((resolve, reject) => {
|
|
22
|
+
const child = spawn(command, args, {
|
|
23
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
24
|
+
env: { ...process.env, ...opts.env },
|
|
25
|
+
cwd: opts.cwd,
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
let stderr = '';
|
|
29
|
+
child.stderr?.on('data', (c) => {
|
|
30
|
+
stderr += c.toString();
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
if (opts.stdin) {
|
|
34
|
+
opts.stdin.pipe(child.stdin);
|
|
35
|
+
opts.stdin.on('error', (e) => {
|
|
36
|
+
child.stdin?.destroy();
|
|
37
|
+
reject(e);
|
|
38
|
+
});
|
|
39
|
+
} else {
|
|
40
|
+
child.stdin?.end();
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (opts.stdout) {
|
|
44
|
+
child.stdout.pipe(opts.stdout);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
child.on('error', reject);
|
|
48
|
+
child.on('close', (code) => {
|
|
49
|
+
resolve({ code: code ?? 0, stderr });
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* @param {string} command
|
|
56
|
+
* @param {string[]} args
|
|
57
|
+
* @param {SpawnStreamOptions} opts
|
|
58
|
+
*/
|
|
59
|
+
export async function spawnProcessOrThrow(command, args, opts = {}) {
|
|
60
|
+
const { code, stderr } = await spawnProcess(command, args, opts);
|
|
61
|
+
if (code !== 0) {
|
|
62
|
+
throw new AppError(`${command} exited with code ${code}`, {
|
|
63
|
+
suggestion: stderr.trim() || 'Check database client tools are installed and DATABASE_URL is correct',
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Wait for readable/writable stream end if present.
|
|
70
|
+
* @param {import('node:stream').Readable | import('node:stream').Writable} stream
|
|
71
|
+
*/
|
|
72
|
+
export async function finishedWrite(stream) {
|
|
73
|
+
if (!stream || typeof stream.end !== 'function') return;
|
|
74
|
+
await new Promise((resolve, reject) => {
|
|
75
|
+
stream.on('finish', resolve);
|
|
76
|
+
stream.on('error', reject);
|
|
77
|
+
if ('writableFinished' in stream && stream.writableFinished) resolve(undefined);
|
|
78
|
+
});
|
|
79
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import cliProgress from 'cli-progress';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import ora from 'ora';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @param {number} bytes
|
|
7
|
+
*/
|
|
8
|
+
export function formatBytes(bytes) {
|
|
9
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
10
|
+
if (bytes < 1024 ** 2) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
11
|
+
if (bytes < 1024 ** 3) return `${(bytes / 1024 ** 2).toFixed(1)} MB`;
|
|
12
|
+
return `${(bytes / 1024 ** 3).toFixed(1)} GB`;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Spinner + byte counter when total size is unknown (e.g. pg_dump stream).
|
|
17
|
+
* @param {string} text
|
|
18
|
+
* @param {{ verbose?: boolean }} ctx
|
|
19
|
+
*/
|
|
20
|
+
export function createStreamProgress(text, ctx = {}) {
|
|
21
|
+
if (ctx.verbose) {
|
|
22
|
+
let bytes = 0;
|
|
23
|
+
return {
|
|
24
|
+
start() {},
|
|
25
|
+
/** @param {number} n */
|
|
26
|
+
increment(n) {
|
|
27
|
+
bytes += n;
|
|
28
|
+
},
|
|
29
|
+
succeed(finalText) {
|
|
30
|
+
if (finalText) console.log(finalText);
|
|
31
|
+
},
|
|
32
|
+
fail() {},
|
|
33
|
+
getBytes: () => bytes,
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const spin = ora({ text, spinner: 'dots' }).start();
|
|
38
|
+
let bytes = 0;
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
start() {},
|
|
42
|
+
/** @param {number} n */
|
|
43
|
+
increment(n) {
|
|
44
|
+
bytes += n;
|
|
45
|
+
spin.text = `${text} ${formatBytes(bytes)}`;
|
|
46
|
+
},
|
|
47
|
+
succeed(finalText) {
|
|
48
|
+
spin.succeed(finalText ?? `${text} ${formatBytes(bytes)}`);
|
|
49
|
+
},
|
|
50
|
+
fail(msg) {
|
|
51
|
+
spin.fail(msg);
|
|
52
|
+
},
|
|
53
|
+
getBytes: () => bytes,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Progress bar when total bytes known (e.g. restore from file).
|
|
59
|
+
* @param {string} label
|
|
60
|
+
* @param {number} totalBytes
|
|
61
|
+
* @param {{ verbose?: boolean }} ctx
|
|
62
|
+
*/
|
|
63
|
+
export function createFileProgressBar(label, totalBytes, ctx = {}) {
|
|
64
|
+
if (ctx.verbose || totalBytes <= 0) {
|
|
65
|
+
let transferred = 0;
|
|
66
|
+
return {
|
|
67
|
+
start() {},
|
|
68
|
+
/** @param {number} n */
|
|
69
|
+
increment(n) {
|
|
70
|
+
transferred += n;
|
|
71
|
+
},
|
|
72
|
+
stop() {},
|
|
73
|
+
getTransferred() {
|
|
74
|
+
return transferred;
|
|
75
|
+
},
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const bar = new cliProgress.SingleBar(
|
|
80
|
+
{
|
|
81
|
+
format: `${label} |${chalk.cyan('{bar}')}| {percentage}% | {transferred} | {duration}s`,
|
|
82
|
+
barCompleteChar: '\u2588',
|
|
83
|
+
barIncompleteChar: '\u2591',
|
|
84
|
+
hideCursor: true,
|
|
85
|
+
},
|
|
86
|
+
cliProgress.Presets.shades_classic
|
|
87
|
+
);
|
|
88
|
+
|
|
89
|
+
let transferred = 0;
|
|
90
|
+
const startTime = Date.now();
|
|
91
|
+
|
|
92
|
+
return {
|
|
93
|
+
start() {
|
|
94
|
+
bar.start(totalBytes, 0, {
|
|
95
|
+
transferred: '0 B',
|
|
96
|
+
duration: '0.0',
|
|
97
|
+
});
|
|
98
|
+
},
|
|
99
|
+
/** @param {number} n */
|
|
100
|
+
increment(n) {
|
|
101
|
+
transferred += n;
|
|
102
|
+
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
|
|
103
|
+
bar.update(transferred, {
|
|
104
|
+
transferred: formatBytes(transferred),
|
|
105
|
+
duration: elapsed,
|
|
106
|
+
});
|
|
107
|
+
},
|
|
108
|
+
stop() {
|
|
109
|
+
bar.stop();
|
|
110
|
+
},
|
|
111
|
+
getTransferred() {
|
|
112
|
+
return transferred;
|
|
113
|
+
},
|
|
114
|
+
};
|
|
115
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import * as readline from 'node:readline/promises';
|
|
2
|
+
import { stdin as input, stdout as output } from 'node:process';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @param {string} question
|
|
6
|
+
*/
|
|
7
|
+
export async function ask(question) {
|
|
8
|
+
const rl = readline.createInterface({ input, output });
|
|
9
|
+
try {
|
|
10
|
+
return await rl.question(question);
|
|
11
|
+
} finally {
|
|
12
|
+
rl.close();
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* @param {string} question
|
|
18
|
+
*/
|
|
19
|
+
export async function confirm(question) {
|
|
20
|
+
const a = (await ask(question)).trim().toLowerCase();
|
|
21
|
+
return a === 'y' || a === 'yes';
|
|
22
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {import('./backup-scanner.js').BackupEntry} BackupEntry
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Union keep: keep if in newest `keepLast` OR newer than `keepDays` cutoff.
|
|
7
|
+
* @param {BackupEntry[]} sortedNewestFirst
|
|
8
|
+
* @param {{ keepLast?: number, keepDays?: number, now?: number }} opts
|
|
9
|
+
* @returns {{ keep: BackupEntry[], remove: BackupEntry[] }}
|
|
10
|
+
*/
|
|
11
|
+
export function planRetention(sortedNewestFirst, opts) {
|
|
12
|
+
const now = opts.now ?? Date.now();
|
|
13
|
+
const keepLast = opts.keepLast;
|
|
14
|
+
const keepDays = opts.keepDays;
|
|
15
|
+
const cutoff = keepDays != null ? now - keepDays * 86400000 : null;
|
|
16
|
+
|
|
17
|
+
/** @type {BackupEntry[]} */
|
|
18
|
+
const keep = [];
|
|
19
|
+
/** @type {BackupEntry[]} */
|
|
20
|
+
const remove = [];
|
|
21
|
+
|
|
22
|
+
sortedNewestFirst.forEach((entry, index) => {
|
|
23
|
+
const t = (entry.parsedDate ?? entry.mtime).getTime();
|
|
24
|
+
let kept = false;
|
|
25
|
+
if (keepLast != null && index < keepLast) kept = true;
|
|
26
|
+
if (cutoff != null && t >= cutoff) kept = true;
|
|
27
|
+
if (kept) keep.push(entry);
|
|
28
|
+
else remove.push(entry);
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
return { keep, remove };
|
|
32
|
+
}
|
package/src/utils/ui.js
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
|
|
3
|
+
/** @typedef {{ verbose?: boolean }} LogContext */
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @param {LogContext} ctx
|
|
7
|
+
* @param {...unknown} args
|
|
8
|
+
*/
|
|
9
|
+
export function debug(ctx, ...args) {
|
|
10
|
+
if (ctx?.verbose) {
|
|
11
|
+
console.error(chalk.gray('[debug]'), ...args);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export const icons = {
|
|
16
|
+
ok: '✅',
|
|
17
|
+
err: '❌',
|
|
18
|
+
warn: '⚠️',
|
|
19
|
+
pkg: '📦',
|
|
20
|
+
save: '💾',
|
|
21
|
+
link: '🔗',
|
|
22
|
+
chart: '📊',
|
|
23
|
+
empty: '📭',
|
|
24
|
+
spin: '🔄',
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* @param {string} msg
|
|
29
|
+
*/
|
|
30
|
+
export function success(msg) {
|
|
31
|
+
console.log(`${icons.ok} ${msg}`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* @param {string} msg
|
|
36
|
+
*/
|
|
37
|
+
export function errorLine(msg) {
|
|
38
|
+
console.error(`${icons.err} ${msg}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @param {string} msg
|
|
43
|
+
*/
|
|
44
|
+
export function warnLine(msg) {
|
|
45
|
+
console.error(`${icons.warn} ${msg}`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* @param {string} msg
|
|
50
|
+
*/
|
|
51
|
+
export function infoLine(msg) {
|
|
52
|
+
console.log(msg);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* @param {import('./errors.js').AppError} err
|
|
57
|
+
*/
|
|
58
|
+
export function printErrorWithSuggestion(err) {
|
|
59
|
+
errorLine(err.message);
|
|
60
|
+
if (err.suggestion) {
|
|
61
|
+
console.error(chalk.dim(` ${err.suggestion}`));
|
|
62
|
+
}
|
|
63
|
+
}
|