universal-db-backup 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +187 -0
- package/bin/cli.js +104 -0
- package/package.json +37 -0
- package/src/backup/mongodb.js +62 -0
- package/src/backup/mysql.js +92 -0
- package/src/backup/postgres.js +88 -0
- package/src/index.js +101 -0
- package/src/utils/logger.js +59 -0
- package/src/utils/zip.js +47 -0
package/README.md
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# universal-db-backup
|
|
2
|
+
|
|
3
|
+
A universal database backup tool for Node.js that supports MongoDB, MySQL, and PostgreSQL using native dump utilities (`mongodump`, `mysqldump`, `pg_dump`).
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **Multi-database support**: MongoDB, MySQL, PostgreSQL
|
|
8
|
+
- **Parameter-based configuration**: Simple object config for each database type
|
|
9
|
+
- **Promise-based API**: Clean async/await support
|
|
10
|
+
- **Optional ZIP compression**: Compress backups after creation
|
|
11
|
+
- **Scheduling**: Cron-based recurring backups via `node-cron`
|
|
12
|
+
- **CLI support**: Run backups from the command line
|
|
13
|
+
|
|
14
|
+
## Prerequisites
|
|
15
|
+
|
|
16
|
+
Install the appropriate dump utility for your database:
|
|
17
|
+
|
|
18
|
+
- **MongoDB**: [mongodump](https://docs.mongodb.com/database-tools/mongodump/) (included with MongoDB tools)
|
|
19
|
+
- **MySQL**: [mysqldump](https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html) (included with MySQL client)
|
|
20
|
+
- **PostgreSQL**: [pg_dump](https://www.postgresql.org/docs/current/app-pgdump.html) (included with PostgreSQL)
|
|
21
|
+
|
|
22
|
+
## Installation
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
npm install universal-db-backup
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## API Usage
|
|
29
|
+
|
|
30
|
+
### MongoDB
|
|
31
|
+
|
|
32
|
+
```javascript
|
|
33
|
+
const { backup } = require('universal-db-backup');
|
|
34
|
+
|
|
35
|
+
backup({
|
|
36
|
+
dbType: 'mongodb',
|
|
37
|
+
uri: 'mongodb://localhost:27017/mydb',
|
|
38
|
+
output: './backup',
|
|
39
|
+
zip: true, // optional: compress to ZIP
|
|
40
|
+
gzip: true, // optional: use gzip for mongodump
|
|
41
|
+
}).then((result) => {
|
|
42
|
+
console.log('Backup path:', result.path);
|
|
43
|
+
if (result.zipPath) console.log('ZIP path:', result.zipPath);
|
|
44
|
+
}).catch(console.error);
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### MySQL
|
|
48
|
+
|
|
49
|
+
```javascript
|
|
50
|
+
const { backup } = require('universal-db-backup');
|
|
51
|
+
|
|
52
|
+
backup({
|
|
53
|
+
dbType: 'mysql',
|
|
54
|
+
host: 'localhost',
|
|
55
|
+
port: 3306,
|
|
56
|
+
user: 'root',
|
|
57
|
+
password: '123',
|
|
58
|
+
database: 'mydb',
|
|
59
|
+
output: './backup',
|
|
60
|
+
zip: true, // optional
|
|
61
|
+
}).then((result) => {
|
|
62
|
+
console.log('Backup path:', result.path);
|
|
63
|
+
}).catch(console.error);
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### PostgreSQL
|
|
67
|
+
|
|
68
|
+
```javascript
|
|
69
|
+
const { backup } = require('universal-db-backup');
|
|
70
|
+
|
|
71
|
+
backup({
|
|
72
|
+
dbType: 'postgres',
|
|
73
|
+
host: 'localhost',
|
|
74
|
+
port: 5432,
|
|
75
|
+
user: 'postgres',
|
|
76
|
+
password: 'secret',
|
|
77
|
+
database: 'mydb',
|
|
78
|
+
output: './backup',
|
|
79
|
+
zip: true, // optional
|
|
80
|
+
format: 'plain', // 'plain' | 'custom' | 'directory' | 'tar'
|
|
81
|
+
}).then((result) => {
|
|
82
|
+
console.log('Backup path:', result.path);
|
|
83
|
+
}).catch(console.error);
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
### Scheduled Backups
|
|
87
|
+
|
|
88
|
+
```javascript
|
|
89
|
+
const { schedule } = require('universal-db-backup');
|
|
90
|
+
|
|
91
|
+
const job = schedule(
|
|
92
|
+
{
|
|
93
|
+
dbType: 'mongodb',
|
|
94
|
+
uri: 'mongodb://localhost:27017/mydb',
|
|
95
|
+
output: './backup',
|
|
96
|
+
zip: true,
|
|
97
|
+
},
|
|
98
|
+
'0 2 * * *', // Every day at 2:00 AM
|
|
99
|
+
{ zip: true }
|
|
100
|
+
);
|
|
101
|
+
|
|
102
|
+
// Stop scheduler when needed
|
|
103
|
+
// job.stop();
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
### async/await
|
|
107
|
+
|
|
108
|
+
```javascript
|
|
109
|
+
const { backup } = require('universal-db-backup');
|
|
110
|
+
|
|
111
|
+
async function runBackup() {
|
|
112
|
+
try {
|
|
113
|
+
const result = await backup({
|
|
114
|
+
dbType: 'mysql',
|
|
115
|
+
host: 'localhost',
|
|
116
|
+
user: 'root',
|
|
117
|
+
password: '123',
|
|
118
|
+
database: 'mydb',
|
|
119
|
+
output: './backup',
|
|
120
|
+
});
|
|
121
|
+
console.log('Success:', result.path);
|
|
122
|
+
} catch (err) {
|
|
123
|
+
console.error('Failed:', err.message);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
runBackup();
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
## CLI Usage
|
|
131
|
+
|
|
132
|
+
Install globally or use via `npx`:
|
|
133
|
+
|
|
134
|
+
```bash
|
|
135
|
+
npm install -g universal-db-backup
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
### One-time backup
|
|
139
|
+
|
|
140
|
+
```bash
|
|
141
|
+
# MongoDB
|
|
142
|
+
universal-db-backup backup -t mongodb -u "mongodb://localhost:27017/mydb" -o ./backup
|
|
143
|
+
|
|
144
|
+
# MySQL
|
|
145
|
+
universal-db-backup backup -t mysql -H localhost -U root -p 123 -d mydb -o ./backup
|
|
146
|
+
|
|
147
|
+
# PostgreSQL
|
|
148
|
+
universal-db-backup backup -t postgres -H localhost -U postgres -p secret -d mydb -o ./backup
|
|
149
|
+
|
|
150
|
+
# With ZIP compression
|
|
151
|
+
universal-db-backup backup -t mongodb -u "mongodb://localhost:27017/mydb" -o ./backup -z
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
### Scheduled backup
|
|
155
|
+
|
|
156
|
+
```bash
|
|
157
|
+
# Daily at 2:00 AM
|
|
158
|
+
universal-db-backup schedule -t mysql -H localhost -U root -p 123 -d mydb -o ./backup -c "0 2 * * *"
|
|
159
|
+
|
|
160
|
+
# Every 6 hours
|
|
161
|
+
universal-db-backup schedule -t mongodb -u "mongodb://localhost:27017/mydb" -o ./backup -c "0 */6 * * *"
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
### Short alias
|
|
165
|
+
|
|
166
|
+
```bash
|
|
167
|
+
udb-backup backup -t mysql -U root -d mydb -o ./backup
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### CLI Options
|
|
171
|
+
|
|
172
|
+
| Option | Short | Description |
|
|
173
|
+
|--------|-------|-------------|
|
|
174
|
+
| --type | -t | Database type: mongodb, mysql, postgres |
|
|
175
|
+
| --output | -o | Output directory |
|
|
176
|
+
| --uri | -u | MongoDB connection URI |
|
|
177
|
+
| --host | -H | Database host (default: localhost) |
|
|
178
|
+
| --port | -P | Database port |
|
|
179
|
+
| --user | -U | Database user |
|
|
180
|
+
| --password | -p | Database password |
|
|
181
|
+
| --database | -d | Database name |
|
|
182
|
+
| --zip | -z | Compress backup to ZIP |
|
|
183
|
+
| --cron | -c | Cron expression (schedule command only) |
|
|
184
|
+
|
|
185
|
+
## License
|
|
186
|
+
|
|
187
|
+
MIT
|
package/bin/cli.js
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* universal-db-backup CLI
|
|
5
|
+
* Usage: universal-db-backup [options] or udb-backup [options]
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const path = require('path');
|
|
9
|
+
const { program } = require('commander');
|
|
10
|
+
const { backup, schedule, logger } = require('../src/index');
|
|
11
|
+
|
|
12
|
+
const pkg = require(path.join(__dirname, '../package.json'));
|
|
13
|
+
|
|
14
|
+
program
|
|
15
|
+
.name('universal-db-backup')
|
|
16
|
+
.description('Universal database backup tool for MongoDB, MySQL, and PostgreSQL')
|
|
17
|
+
.version(pkg.version);
|
|
18
|
+
|
|
19
|
+
program
|
|
20
|
+
.command('backup')
|
|
21
|
+
.description('Run a one-time backup')
|
|
22
|
+
.requiredOption('-t, --type <dbType>', 'Database type: mongodb, mysql, postgres')
|
|
23
|
+
.requiredOption('-o, --output <path>', 'Output directory for backup')
|
|
24
|
+
.option('-u, --uri <uri>', 'MongoDB connection URI (for mongodb)')
|
|
25
|
+
.option('-H, --host <host>', 'Database host (for mysql/postgres)', 'localhost')
|
|
26
|
+
.option('-P, --port <port>', 'Database port', '')
|
|
27
|
+
.option('-U, --user <user>', 'Database user (for mysql/postgres)')
|
|
28
|
+
.option('-p, --password <password>', 'Database password')
|
|
29
|
+
.option('-d, --database <name>', 'Database name (for mysql/postgres)')
|
|
30
|
+
.option('-z, --zip', 'Compress backup to ZIP after completion')
|
|
31
|
+
.option('--gzip', 'Use gzip for MongoDB dump')
|
|
32
|
+
.action(async (options) => {
|
|
33
|
+
try {
|
|
34
|
+
const config = buildConfig(options);
|
|
35
|
+
const result = await backup(config);
|
|
36
|
+
logger.success(`Backup completed: ${result.path}`);
|
|
37
|
+
if (result.zipPath) {
|
|
38
|
+
logger.success(`Compressed: ${result.zipPath}`);
|
|
39
|
+
}
|
|
40
|
+
process.exit(0);
|
|
41
|
+
} catch (err) {
|
|
42
|
+
logger.error(err.message);
|
|
43
|
+
process.exit(1);
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
program
|
|
48
|
+
.command('schedule')
|
|
49
|
+
.description('Schedule recurring backups with cron')
|
|
50
|
+
.requiredOption('-t, --type <dbType>', 'Database type: mongodb, mysql, postgres')
|
|
51
|
+
.requiredOption('-o, --output <path>', 'Output directory for backup')
|
|
52
|
+
.requiredOption('-c, --cron <expression>', 'Cron expression (e.g., "0 2 * * *" for daily at 2 AM)')
|
|
53
|
+
.option('-u, --uri <uri>', 'MongoDB connection URI (for mongodb)')
|
|
54
|
+
.option('-H, --host <host>', 'Database host (for mysql/postgres)', 'localhost')
|
|
55
|
+
.option('-P, --port <port>', 'Database port', '')
|
|
56
|
+
.option('-U, --user <user>', 'Database user (for mysql/postgres)')
|
|
57
|
+
.option('-p, --password <password>', 'Database password')
|
|
58
|
+
.option('-d, --database <name>', 'Database name (for mysql/postgres)')
|
|
59
|
+
.option('-z, --zip', 'Compress backup to ZIP after completion')
|
|
60
|
+
.action(async (options) => {
|
|
61
|
+
try {
|
|
62
|
+
const config = buildConfig(options);
|
|
63
|
+
const job = schedule(config, options.cron, { zip: options.zip });
|
|
64
|
+
logger.success(`Scheduled backup: ${options.cron} (${config.dbType})`);
|
|
65
|
+
logger.info('Press Ctrl+C to stop scheduler');
|
|
66
|
+
} catch (err) {
|
|
67
|
+
logger.error(err.message);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Build backup config from CLI options
|
|
74
|
+
*/
|
|
75
|
+
function buildConfig(options) {
|
|
76
|
+
const config = {
|
|
77
|
+
dbType: options.type,
|
|
78
|
+
output: options.output,
|
|
79
|
+
zip: options.zip,
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
if (options.type === 'mongodb') {
|
|
83
|
+
if (!options.uri) {
|
|
84
|
+
throw new Error('MongoDB requires --uri');
|
|
85
|
+
}
|
|
86
|
+
config.uri = options.uri;
|
|
87
|
+
if (options.gzip) config.gzip = true;
|
|
88
|
+
} else if (options.type === 'mysql' || options.type === 'postgres') {
|
|
89
|
+
config.host = options.host;
|
|
90
|
+
config.user = options.user;
|
|
91
|
+
config.password = options.password;
|
|
92
|
+
config.database = options.database;
|
|
93
|
+
if (options.port) {
|
|
94
|
+
config.port = parseInt(options.port, 10);
|
|
95
|
+
}
|
|
96
|
+
if (!config.user || !config.database) {
|
|
97
|
+
throw new Error('MySQL/Postgres requires --user and --database');
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return config;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
program.parse();
|
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "universal-db-backup",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Universal database backup tool supporting MongoDB, MySQL, and PostgreSQL",
|
|
5
|
+
"main": "src/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"universal-db-backup": "bin/cli.js",
|
|
8
|
+
"udb-backup": "bin/cli.js"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
12
|
+
},
|
|
13
|
+
"keywords": [
|
|
14
|
+
"database",
|
|
15
|
+
"backup",
|
|
16
|
+
"mongodb",
|
|
17
|
+
"mysql",
|
|
18
|
+
"postgresql",
|
|
19
|
+
"mongodump",
|
|
20
|
+
"mysqldump",
|
|
21
|
+
"pg_dump"
|
|
22
|
+
],
|
|
23
|
+
"author": "",
|
|
24
|
+
"license": "MIT",
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"node-cron": "^3.0.3",
|
|
27
|
+
"archiver": "^7.0.1",
|
|
28
|
+
"commander": "^12.1.0"
|
|
29
|
+
},
|
|
30
|
+
"engines": {
|
|
31
|
+
"node": ">=14.0.0"
|
|
32
|
+
},
|
|
33
|
+
"files": [
|
|
34
|
+
"src/",
|
|
35
|
+
"bin/"
|
|
36
|
+
]
|
|
37
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MongoDB backup handler using mongodump
|
|
3
|
+
* Requires mongodump to be installed and in PATH
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { exec } = require('child_process');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const fs = require('fs');
|
|
9
|
+
const { promisify } = require('util');
|
|
10
|
+
const logger = require('../utils/logger');
|
|
11
|
+
|
|
12
|
+
const execAsync = promisify(exec);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Execute MongoDB backup using mongodump
|
|
16
|
+
* @param {Object} config - Backup configuration
|
|
17
|
+
* @param {string} config.uri - MongoDB connection URI (e.g., mongodb://localhost:27017/mydb)
|
|
18
|
+
* @param {string} config.output - Output directory for backup
|
|
19
|
+
* @param {boolean} [config.gzip] - Use gzip compression for dump
|
|
20
|
+
* @returns {Promise<string>} - Path to backup directory
|
|
21
|
+
*/
|
|
22
|
+
async function backupMongoDB(config) {
|
|
23
|
+
const { uri, output, gzip = false } = config;
|
|
24
|
+
|
|
25
|
+
if (!uri) {
|
|
26
|
+
throw new Error('MongoDB backup requires "uri" in config');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (!output) {
|
|
30
|
+
throw new Error('Backup requires "output" directory in config');
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Create output directory if it doesn't exist
|
|
34
|
+
const outputPath = path.resolve(output);
|
|
35
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
36
|
+
const backupDir = path.join(outputPath, `mongodb-backup-${timestamp}`);
|
|
37
|
+
|
|
38
|
+
if (!fs.existsSync(outputPath)) {
|
|
39
|
+
fs.mkdirSync(outputPath, { recursive: true });
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
let command = `mongodump --uri="${uri}" --out="${backupDir}"`;
|
|
43
|
+
if (gzip) {
|
|
44
|
+
command += ' --gzip';
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
logger.info(`Starting MongoDB backup to ${backupDir}`);
|
|
48
|
+
try {
|
|
49
|
+
const { stdout, stderr } = await execAsync(command);
|
|
50
|
+
if (stdout) logger.info(stdout.trim());
|
|
51
|
+
if (stderr) logger.warn(stderr.trim());
|
|
52
|
+
logger.success(`MongoDB backup completed: ${backupDir}`);
|
|
53
|
+
return backupDir;
|
|
54
|
+
} catch (err) {
|
|
55
|
+
logger.error(`MongoDB backup failed: ${err.message}`);
|
|
56
|
+
throw new Error(`MongoDB backup failed: ${err.message}. Ensure mongodump is installed.`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
module.exports = {
|
|
61
|
+
backupMongoDB,
|
|
62
|
+
};
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MySQL backup handler using mysqldump
|
|
3
|
+
* Requires mysqldump to be installed and in PATH
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { exec } = require('child_process');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const fs = require('fs');
|
|
9
|
+
const { promisify } = require('util');
|
|
10
|
+
const logger = require('../utils/logger');
|
|
11
|
+
|
|
12
|
+
const execAsync = promisify(exec);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Build mysqldump command with proper escaping
|
|
16
|
+
* @param {Object} config - Backup configuration
|
|
17
|
+
* @returns {string} - Backup file path
|
|
18
|
+
*/
|
|
19
|
+
function buildMysqldumpConfig(config) {
|
|
20
|
+
const {
|
|
21
|
+
host = 'localhost',
|
|
22
|
+
port = 3306,
|
|
23
|
+
user,
|
|
24
|
+
password,
|
|
25
|
+
database,
|
|
26
|
+
output,
|
|
27
|
+
} = config;
|
|
28
|
+
|
|
29
|
+
if (!user || !database) {
|
|
30
|
+
throw new Error('MySQL backup requires "user" and "database" in config');
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
if (!output) {
|
|
34
|
+
throw new Error('Backup requires "output" directory in config');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const outputPath = path.resolve(output);
|
|
38
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
39
|
+
const backupFile = path.join(outputPath, `mysql-${database}-${timestamp}.sql`);
|
|
40
|
+
|
|
41
|
+
if (!fs.existsSync(outputPath)) {
|
|
42
|
+
fs.mkdirSync(outputPath, { recursive: true });
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Use --result-file for cross-platform support (no shell redirect needed)
|
|
46
|
+
const args = [
|
|
47
|
+
'-h', host,
|
|
48
|
+
'-P', String(port),
|
|
49
|
+
'-u', user,
|
|
50
|
+
'--single-transaction',
|
|
51
|
+
'--routines',
|
|
52
|
+
'--triggers',
|
|
53
|
+
'--result-file', backupFile,
|
|
54
|
+
database,
|
|
55
|
+
];
|
|
56
|
+
const command = 'mysqldump ' + args.map((a) => `"${String(a).replace(/"/g, '\\"')}"`).join(' ');
|
|
57
|
+
|
|
58
|
+
return { command, backupFile };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Execute MySQL backup using mysqldump
|
|
63
|
+
* @param {Object} config - Backup configuration
|
|
64
|
+
* @param {string} config.host - MySQL host (default: localhost)
|
|
65
|
+
* @param {number} config.port - MySQL port (default: 3306)
|
|
66
|
+
* @param {string} config.user - MySQL user
|
|
67
|
+
* @param {string} [config.password] - MySQL password
|
|
68
|
+
* @param {string} config.database - Database name
|
|
69
|
+
* @param {string} config.output - Output directory for backup
|
|
70
|
+
* @returns {Promise<string>} - Path to backup file
|
|
71
|
+
*/
|
|
72
|
+
async function backupMySQL(config) {
|
|
73
|
+
const { command, backupFile } = buildMysqldumpConfig(config);
|
|
74
|
+
|
|
75
|
+
logger.info(`Starting MySQL backup to ${backupFile}`);
|
|
76
|
+
try {
|
|
77
|
+
await execAsync(command, {
|
|
78
|
+
shell: true,
|
|
79
|
+
env: { ...process.env, MYSQL_PWD: config.password || '' },
|
|
80
|
+
});
|
|
81
|
+
logger.success(`MySQL backup completed: ${backupFile}`);
|
|
82
|
+
return backupFile;
|
|
83
|
+
} catch (err) {
|
|
84
|
+
logger.error(`MySQL backup failed: ${err.message}`);
|
|
85
|
+
throw new Error(`MySQL backup failed: ${err.message}. Ensure mysqldump is installed.`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = {
|
|
90
|
+
backupMySQL,
|
|
91
|
+
buildMysqldumpConfig,
|
|
92
|
+
};
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL backup handler using pg_dump
|
|
3
|
+
* Requires pg_dump to be installed and in PATH
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { exec } = require('child_process');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const fs = require('fs');
|
|
9
|
+
const { promisify } = require('util');
|
|
10
|
+
const logger = require('../utils/logger');
|
|
11
|
+
|
|
12
|
+
const execAsync = promisify(exec);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Execute PostgreSQL backup using pg_dump
|
|
16
|
+
* @param {Object} config - Backup configuration
|
|
17
|
+
* @param {string} [config.host] - PostgreSQL host (default: localhost)
|
|
18
|
+
* @param {number} [config.port] - PostgreSQL port (default: 5432)
|
|
19
|
+
* @param {string} [config.user] - PostgreSQL user
|
|
20
|
+
* @param {string} [config.password] - PostgreSQL password (set via PGPASSWORD env)
|
|
21
|
+
* @param {string} config.database - Database name
|
|
22
|
+
* @param {string} config.output - Output directory for backup
|
|
23
|
+
* @param {string} [config.format] - pg_dump format: 'plain' | 'custom' | 'directory' | 'tar' (default: plain)
|
|
24
|
+
* @returns {Promise<string>} - Path to backup file
|
|
25
|
+
*/
|
|
26
|
+
async function backupPostgres(config) {
|
|
27
|
+
const {
|
|
28
|
+
host = 'localhost',
|
|
29
|
+
port = 5432,
|
|
30
|
+
user,
|
|
31
|
+
password,
|
|
32
|
+
database,
|
|
33
|
+
output,
|
|
34
|
+
format = 'plain',
|
|
35
|
+
} = config;
|
|
36
|
+
|
|
37
|
+
if (!database) {
|
|
38
|
+
throw new Error('PostgreSQL backup requires "database" in config');
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (!output) {
|
|
42
|
+
throw new Error('Backup requires "output" directory in config');
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const outputPath = path.resolve(output);
|
|
46
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
47
|
+
const extension = format === 'plain' ? 'sql' : format === 'custom' ? 'dump' : format;
|
|
48
|
+
const backupFile = path.join(outputPath, `postgres-${database}-${timestamp}.${extension}`);
|
|
49
|
+
|
|
50
|
+
if (!fs.existsSync(outputPath)) {
|
|
51
|
+
fs.mkdirSync(outputPath, { recursive: true });
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const args = [
|
|
55
|
+
'-h', host,
|
|
56
|
+
'-p', port,
|
|
57
|
+
'-U', user || process.env.USER || 'postgres',
|
|
58
|
+
'-d', database,
|
|
59
|
+
'-f', backupFile,
|
|
60
|
+
];
|
|
61
|
+
|
|
62
|
+
if (format !== 'plain') {
|
|
63
|
+
args.push('-F', format === 'custom' ? 'c' : format === 'directory' ? 'd' : 't');
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const command = 'pg_dump ' + args.map((a) => `"${String(a).replace(/"/g, '\\"')}"`).join(' ');
|
|
67
|
+
const env = { ...process.env };
|
|
68
|
+
if (password) {
|
|
69
|
+
env.PGPASSWORD = password;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
logger.info(`Starting PostgreSQL backup to ${backupFile}`);
|
|
73
|
+
try {
|
|
74
|
+
await execAsync(command, {
|
|
75
|
+
shell: true,
|
|
76
|
+
env,
|
|
77
|
+
});
|
|
78
|
+
logger.success(`PostgreSQL backup completed: ${backupFile}`);
|
|
79
|
+
return backupFile;
|
|
80
|
+
} catch (err) {
|
|
81
|
+
logger.error(`PostgreSQL backup failed: ${err.message}`);
|
|
82
|
+
throw new Error(`PostgreSQL backup failed: ${err.message}. Ensure pg_dump is installed.`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
module.exports = {
|
|
87
|
+
backupPostgres,
|
|
88
|
+
};
|
package/src/index.js
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* universal-db-backup
|
|
3
|
+
* Main controller - detects dbType and routes to appropriate backup handler
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const { backupMongoDB } = require('./backup/mongodb');
|
|
8
|
+
const { backupMySQL } = require('./backup/mysql');
|
|
9
|
+
const { backupPostgres } = require('./backup/postgres');
|
|
10
|
+
const { compressDirectory } = require('./utils/zip');
|
|
11
|
+
const cron = require('node-cron');
|
|
12
|
+
const logger = require('./utils/logger');
|
|
13
|
+
|
|
14
|
+
/** Supported database types */
|
|
15
|
+
const SUPPORTED_DB_TYPES = ['mongodb', 'mysql', 'postgres', 'postgresql'];
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Schedule a backup using cron expression
|
|
19
|
+
* @param {Object} config - Backup configuration (same as backup())
|
|
20
|
+
* @param {string} cronExpression - Cron expression (e.g., '0 2 * * *' for daily at 2 AM)
|
|
21
|
+
* @param {Object} [options] - Additional options
|
|
22
|
+
* @param {boolean} [options.zip] - Compress backup after completion
|
|
23
|
+
* @returns {Object} - Cron job object with .stop() method
|
|
24
|
+
*/
|
|
25
|
+
function schedule(config, cronExpression, options = {}) {
|
|
26
|
+
if (!cron.validate(cronExpression)) {
|
|
27
|
+
throw new Error(`Invalid cron expression: ${cronExpression}`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const backupConfig = { ...config, zip: options.zip || config.zip || false };
|
|
31
|
+
const job = cron.schedule(cronExpression, async () => {
|
|
32
|
+
try {
|
|
33
|
+
logger.info(`Scheduled backup started (${config.dbType})`);
|
|
34
|
+
const result = await backup(backupConfig);
|
|
35
|
+
logger.success(`Scheduled backup completed: ${result.path}`);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
logger.error(`Scheduled backup failed: ${err.message}`);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
return job;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Execute database backup based on configuration
|
|
46
|
+
* @param {Object} config - Backup configuration
|
|
47
|
+
* @param {string} config.dbType - Database type: 'mongodb' | 'mysql' | 'postgres' | 'postgresql'
|
|
48
|
+
* @param {string} config.output - Output directory for backup
|
|
49
|
+
* @param {boolean} [config.zip] - Compress backup to ZIP after completion (default: false)
|
|
50
|
+
* @param {...*} config - Additional db-specific config (uri for MongoDB; host, user, password, database for MySQL/Postgres)
|
|
51
|
+
* @returns {Promise<Object>} - { path: string, zipPath?: string }
|
|
52
|
+
*/
|
|
53
|
+
async function backup(config) {
|
|
54
|
+
const { dbType, output, zip: shouldZip = false } = config;
|
|
55
|
+
|
|
56
|
+
if (!dbType) {
|
|
57
|
+
throw new Error('Backup requires "dbType" in config (mongodb | mysql | postgres)');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const normalizedType = dbType.toLowerCase();
|
|
61
|
+
if (!SUPPORTED_DB_TYPES.includes(normalizedType)) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
`Unsupported dbType: ${dbType}. Supported: ${SUPPORTED_DB_TYPES.join(', ')}`
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
let backupPath;
|
|
68
|
+
|
|
69
|
+
switch (normalizedType) {
|
|
70
|
+
case 'mongodb':
|
|
71
|
+
backupPath = await backupMongoDB(config);
|
|
72
|
+
break;
|
|
73
|
+
case 'mysql':
|
|
74
|
+
backupPath = await backupMySQL(config);
|
|
75
|
+
break;
|
|
76
|
+
case 'postgres':
|
|
77
|
+
case 'postgresql':
|
|
78
|
+
backupPath = await backupPostgres(config);
|
|
79
|
+
break;
|
|
80
|
+
default:
|
|
81
|
+
throw new Error(`No handler for dbType: ${dbType}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const result = { path: backupPath };
|
|
85
|
+
|
|
86
|
+
if (shouldZip) {
|
|
87
|
+
logger.info('Compressing backup to ZIP...');
|
|
88
|
+
const zipPath = await compressDirectory(backupPath);
|
|
89
|
+
result.zipPath = zipPath;
|
|
90
|
+
logger.success(`Backup compressed: ${zipPath}`);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
return result;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
module.exports = {
|
|
97
|
+
backup,
|
|
98
|
+
schedule,
|
|
99
|
+
SUPPORTED_DB_TYPES,
|
|
100
|
+
logger,
|
|
101
|
+
};
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple logger utility for consistent console output
|
|
3
|
+
* Supports log levels: info, success, warn, error
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const LOG_LEVELS = {
|
|
7
|
+
info: '\x1b[36m', // Cyan
|
|
8
|
+
success: '\x1b[32m', // Green
|
|
9
|
+
warn: '\x1b[33m', // Yellow
|
|
10
|
+
error: '\x1b[31m', // Red
|
|
11
|
+
};
|
|
12
|
+
const RESET = '\x1b[0m';
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Format and log a message with optional level
|
|
16
|
+
* @param {string} message - Message to log
|
|
17
|
+
* @param {string} level - Log level (info|success|warn|error)
|
|
18
|
+
*/
|
|
19
|
+
function log(message, level = 'info') {
|
|
20
|
+
const color = LOG_LEVELS[level] || LOG_LEVELS.info;
|
|
21
|
+
const prefix = level === 'success' ? '✓' : level === 'error' ? '✗' : level === 'warn' ? '!' : '→';
|
|
22
|
+
console.log(`${color}${prefix} ${message}${RESET}`);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Log info message
|
|
27
|
+
*/
|
|
28
|
+
function info(message) {
|
|
29
|
+
log(message, 'info');
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Log success message
|
|
34
|
+
*/
|
|
35
|
+
function success(message) {
|
|
36
|
+
log(message, 'success');
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Log warning message
|
|
41
|
+
*/
|
|
42
|
+
function warn(message) {
|
|
43
|
+
log(message, 'warn');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Log error message
|
|
48
|
+
*/
|
|
49
|
+
function error(message) {
|
|
50
|
+
log(message, 'error');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
module.exports = {
|
|
54
|
+
log,
|
|
55
|
+
info,
|
|
56
|
+
success,
|
|
57
|
+
warn,
|
|
58
|
+
error,
|
|
59
|
+
};
|
package/src/utils/zip.js
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ZIP compression utility for backup folders
|
|
3
|
+
* Uses archiver to create compressed archives
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
const archiver = require('archiver');
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Compress a directory into a ZIP file
|
|
12
|
+
* @param {string} sourceDir - Path to directory to compress
|
|
13
|
+
* @param {string} outputPath - Path for output ZIP file (optional, defaults to sourceDir.zip)
|
|
14
|
+
* @returns {Promise<string>} - Resolves with path to created ZIP file
|
|
15
|
+
*/
|
|
16
|
+
function compressDirectory(sourceDir, outputPath = null) {
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
if (!fs.existsSync(sourceDir)) {
|
|
19
|
+
reject(new Error(`Source directory does not exist: ${sourceDir}`));
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const dirName = path.basename(sourceDir);
|
|
24
|
+
const zipPath = outputPath || path.join(path.dirname(sourceDir), `${dirName}.zip`);
|
|
25
|
+
|
|
26
|
+
const output = fs.createWriteStream(zipPath);
|
|
27
|
+
const archive = archiver('zip', {
|
|
28
|
+
zlib: { level: 6 },
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
output.on('close', () => {
|
|
32
|
+
resolve(zipPath);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
archive.on('error', (err) => {
|
|
36
|
+
reject(err);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
archive.pipe(output);
|
|
40
|
+
archive.directory(sourceDir, dirName);
|
|
41
|
+
archive.finalize();
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
module.exports = {
|
|
46
|
+
compressDirectory,
|
|
47
|
+
};
|