smoonb 0.0.84 → 0.0.86
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,28 +1,54 @@
|
|
|
1
1
|
const chalk = require('chalk');
|
|
2
2
|
const path = require('path');
|
|
3
3
|
const fs = require('fs').promises;
|
|
4
|
-
const {
|
|
4
|
+
const { spawn } = require('child_process');
|
|
5
5
|
const { t } = require('../../../i18n');
|
|
6
6
|
|
|
7
|
+
function formatBytes(bytes) {
|
|
8
|
+
if (bytes === 0) return '0 B';
|
|
9
|
+
const k = 1024;
|
|
10
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
11
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
12
|
+
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function formatDuration(ms) {
|
|
16
|
+
if (ms < 1000) return `${ms}ms`;
|
|
17
|
+
const s = Math.floor(ms / 1000);
|
|
18
|
+
const m = Math.floor(s / 60);
|
|
19
|
+
const h = Math.floor(m / 60);
|
|
20
|
+
if (h > 0) return `${h}h ${m % 60}m ${s % 60}s`;
|
|
21
|
+
if (m > 0) return `${m}m ${s % 60}s`;
|
|
22
|
+
return `${s}s`;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function exists(filePath) {
|
|
26
|
+
try {
|
|
27
|
+
await fs.access(filePath);
|
|
28
|
+
return true;
|
|
29
|
+
} catch {
|
|
30
|
+
return false;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
7
34
|
/**
|
|
8
35
|
* Etapa 1: Backup Database via pg_dumpall Docker (idêntico ao Dashboard)
|
|
36
|
+
* Com feedback de progresso: tamanho do arquivo, velocidade e tempo decorrido.
|
|
9
37
|
*/
|
|
10
38
|
module.exports = async ({ databaseUrl, backupDir }) => {
|
|
11
39
|
try {
|
|
12
40
|
const getT = global.smoonbI18n?.t || t;
|
|
13
41
|
console.log(chalk.white(` - ${getT('backup.steps.database.creating')}`));
|
|
14
|
-
|
|
15
|
-
// Extrair credenciais da databaseUrl
|
|
42
|
+
|
|
16
43
|
const urlMatch = databaseUrl.match(/postgresql:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)/);
|
|
17
|
-
|
|
44
|
+
|
|
18
45
|
if (!urlMatch) {
|
|
19
46
|
const getT = global.smoonbI18n?.t || t;
|
|
20
47
|
throw new Error(getT('error.databaseUrlInvalidSimple'));
|
|
21
48
|
}
|
|
22
|
-
|
|
49
|
+
|
|
23
50
|
const [, username, password, host, port] = urlMatch;
|
|
24
|
-
|
|
25
|
-
// Gerar nome do arquivo igual ao dashboard
|
|
51
|
+
|
|
26
52
|
const now = new Date();
|
|
27
53
|
const day = String(now.getDate()).padStart(2, '0');
|
|
28
54
|
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
@@ -30,42 +56,124 @@ module.exports = async ({ databaseUrl, backupDir }) => {
|
|
|
30
56
|
const hours = String(now.getHours()).padStart(2, '0');
|
|
31
57
|
const minutes = String(now.getMinutes()).padStart(2, '0');
|
|
32
58
|
const seconds = String(now.getSeconds()).padStart(2, '0');
|
|
33
|
-
|
|
59
|
+
|
|
34
60
|
const fileName = `db_cluster-${day}-${month}-${year}@${hours}-${minutes}-${seconds}.backup`;
|
|
35
|
-
|
|
36
|
-
// Usar caminho absoluto igual às Edge Functions
|
|
37
61
|
const backupDirAbs = path.resolve(backupDir);
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
const
|
|
41
|
-
'
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
'postgres:17 pg_dumpall',
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
]
|
|
50
|
-
|
|
62
|
+
const outputPath = path.join(backupDirAbs, fileName);
|
|
63
|
+
|
|
64
|
+
const dockerArgs = [
|
|
65
|
+
'run', '--rm', '--network', 'host',
|
|
66
|
+
'-v', `${backupDirAbs}:/host`,
|
|
67
|
+
'-e', `PGPASSWORD=${password}`,
|
|
68
|
+
'postgres:17', 'pg_dumpall',
|
|
69
|
+
'-h', host,
|
|
70
|
+
'-p', port,
|
|
71
|
+
'-U', username,
|
|
72
|
+
'-f', `/host/${fileName}`
|
|
73
|
+
];
|
|
74
|
+
|
|
51
75
|
console.log(chalk.white(` - ${getT('backup.steps.database.executing')}`));
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
76
|
+
|
|
77
|
+
const startTime = Date.now();
|
|
78
|
+
let lastSize = 0;
|
|
79
|
+
let lastTime = startTime;
|
|
80
|
+
let ticker = null;
|
|
81
|
+
|
|
82
|
+
const runDump = () => new Promise((resolve, reject) => {
|
|
83
|
+
const proc = spawn('docker', dockerArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
84
|
+
|
|
85
|
+
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
86
|
+
|
|
87
|
+
const pollFile = async () => {
|
|
88
|
+
if (!(await exists(outputPath))) return;
|
|
89
|
+
const stat = await fs.stat(outputPath).catch(() => null);
|
|
90
|
+
if (!stat) return;
|
|
91
|
+
const size = stat.size;
|
|
92
|
+
const elapsed = Date.now() - startTime;
|
|
93
|
+
const deltaTime = (Date.now() - lastTime) / 1000;
|
|
94
|
+
const speed = deltaTime > 0 ? (size - lastSize) / deltaTime : 0;
|
|
95
|
+
lastSize = size;
|
|
96
|
+
lastTime = Date.now();
|
|
97
|
+
const line = ` 📦 ${formatBytes(size)} | ${formatDuration(elapsed)} | ${formatBytes(speed)}/s`;
|
|
98
|
+
process.stdout.write(`\r${line}`);
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
ticker = setInterval(pollFile, 500);
|
|
102
|
+
|
|
103
|
+
proc.on('close', (code) => {
|
|
104
|
+
if (ticker) {
|
|
105
|
+
clearInterval(ticker);
|
|
106
|
+
ticker = null;
|
|
107
|
+
}
|
|
108
|
+
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
109
|
+
if (code !== 0) {
|
|
110
|
+
reject(new Error(`pg_dumpall exited with code ${code}`));
|
|
111
|
+
} else {
|
|
112
|
+
resolve();
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
proc.on('error', (err) => {
|
|
117
|
+
if (ticker) clearInterval(ticker);
|
|
118
|
+
reject(err);
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
await runDump();
|
|
123
|
+
|
|
124
|
+
const gzipArgs = [
|
|
125
|
+
'run', '--rm',
|
|
126
|
+
'-v', `${backupDirAbs}:/host`,
|
|
127
|
+
'postgres:17', 'gzip', `/host/${fileName}`
|
|
128
|
+
];
|
|
129
|
+
|
|
130
|
+
const gzipStart = Date.now();
|
|
131
|
+
let gzipTicker = null;
|
|
63
132
|
const finalFileName = `${fileName}.gz`;
|
|
133
|
+
const gzipOutputPath = path.join(backupDirAbs, finalFileName);
|
|
134
|
+
|
|
135
|
+
const runGzip = () => new Promise((resolve, reject) => {
|
|
136
|
+
const proc = spawn('docker', gzipArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
137
|
+
|
|
138
|
+
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
139
|
+
|
|
140
|
+
const pollGzip = async () => {
|
|
141
|
+
if (!(await exists(gzipOutputPath))) return;
|
|
142
|
+
const stat = await fs.stat(gzipOutputPath).catch(() => null);
|
|
143
|
+
if (!stat) return;
|
|
144
|
+
const size = stat.size;
|
|
145
|
+
const elapsed = Date.now() - gzipStart;
|
|
146
|
+
process.stdout.write(`\r 📦 ${formatBytes(size)} | ${formatDuration(elapsed)}\r`);
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
gzipTicker = setInterval(pollGzip, 300);
|
|
150
|
+
|
|
151
|
+
proc.on('close', (code) => {
|
|
152
|
+
if (gzipTicker) {
|
|
153
|
+
clearInterval(gzipTicker);
|
|
154
|
+
gzipTicker = null;
|
|
155
|
+
}
|
|
156
|
+
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
157
|
+
if (code !== 0) {
|
|
158
|
+
reject(new Error(`gzip exited with code ${code}`));
|
|
159
|
+
} else {
|
|
160
|
+
resolve();
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
proc.on('error', (err) => {
|
|
165
|
+
if (gzipTicker) clearInterval(gzipTicker);
|
|
166
|
+
reject(err);
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
await runGzip();
|
|
171
|
+
|
|
64
172
|
const stats = await fs.stat(path.join(backupDir, finalFileName));
|
|
65
173
|
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
66
|
-
|
|
174
|
+
|
|
67
175
|
console.log(chalk.green(` ✅ Database backup: ${finalFileName} (${sizeKB} KB)`));
|
|
68
|
-
|
|
176
|
+
|
|
69
177
|
return { success: true, size: sizeKB, fileName: finalFileName };
|
|
70
178
|
} catch (error) {
|
|
71
179
|
const getT = global.smoonbI18n?.t || t;
|
|
@@ -73,4 +181,3 @@ module.exports = async ({ databaseUrl, backupDir }) => {
|
|
|
73
181
|
return { success: false };
|
|
74
182
|
}
|
|
75
183
|
};
|
|
76
|
-
|
|
@@ -1,9 +1,32 @@
|
|
|
1
1
|
const chalk = require('chalk');
|
|
2
2
|
const path = require('path');
|
|
3
3
|
const fs = require('fs').promises;
|
|
4
|
-
const {
|
|
4
|
+
const { spawn } = require('child_process');
|
|
5
5
|
const { t } = require('../../../i18n');
|
|
6
6
|
|
|
7
|
+
function runWithElapsedTicker(command, args, env, label) {
|
|
8
|
+
return new Promise((resolve, reject) => {
|
|
9
|
+
const start = Date.now();
|
|
10
|
+
const ticker = setInterval(() => {
|
|
11
|
+
const elapsed = Math.floor((Date.now() - start) / 1000);
|
|
12
|
+
process.stdout.write(`\r ⏱ ${label} ${elapsed}s`);
|
|
13
|
+
}, 1000);
|
|
14
|
+
const proc = spawn(command, args, {
|
|
15
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
16
|
+
shell: true,
|
|
17
|
+
env: { ...process.env, ...env }
|
|
18
|
+
});
|
|
19
|
+
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
20
|
+
proc.on('close', (code) => {
|
|
21
|
+
clearInterval(ticker);
|
|
22
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
23
|
+
if (code !== 0) reject(new Error(`Exited with code ${code}`));
|
|
24
|
+
else resolve();
|
|
25
|
+
});
|
|
26
|
+
proc.on('error', reject);
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
7
30
|
/**
|
|
8
31
|
* Etapa 2: Backup Database Separado (SQL files para troubleshooting)
|
|
9
32
|
*/
|
|
@@ -21,10 +44,12 @@ module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
|
|
|
21
44
|
const schemaFile = path.join(backupDir, 'schema.sql');
|
|
22
45
|
|
|
23
46
|
try {
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
47
|
+
await runWithElapsedTicker(
|
|
48
|
+
`supabase db dump --db-url "${dbUrl}" -f "${schemaFile}"`,
|
|
49
|
+
[],
|
|
50
|
+
{ SUPABASE_ACCESS_TOKEN: accessToken || '' },
|
|
51
|
+
getT('backup.steps.database.separated.exportingSchema')
|
|
52
|
+
);
|
|
28
53
|
const stats = await fs.stat(schemaFile);
|
|
29
54
|
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
30
55
|
files.push({ filename: 'schema.sql', sizeKB });
|
|
@@ -39,10 +64,12 @@ module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
|
|
|
39
64
|
const dataFile = path.join(backupDir, 'data.sql');
|
|
40
65
|
|
|
41
66
|
try {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
67
|
+
await runWithElapsedTicker(
|
|
68
|
+
`supabase db dump --db-url "${dbUrl}" --data-only -f "${dataFile}"`,
|
|
69
|
+
[],
|
|
70
|
+
{ SUPABASE_ACCESS_TOKEN: accessToken || '' },
|
|
71
|
+
getT('backup.steps.database.separated.exportingData')
|
|
72
|
+
);
|
|
46
73
|
const stats = await fs.stat(dataFile);
|
|
47
74
|
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
48
75
|
files.push({ filename: 'data.sql', sizeKB });
|
|
@@ -57,10 +84,12 @@ module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
|
|
|
57
84
|
const rolesFile = path.join(backupDir, 'roles.sql');
|
|
58
85
|
|
|
59
86
|
try {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
87
|
+
await runWithElapsedTicker(
|
|
88
|
+
`supabase db dump --db-url "${dbUrl}" --role-only -f "${rolesFile}"`,
|
|
89
|
+
[],
|
|
90
|
+
{ SUPABASE_ACCESS_TOKEN: accessToken || '' },
|
|
91
|
+
getT('backup.steps.database.separated.exportingRoles')
|
|
92
|
+
);
|
|
64
93
|
const stats = await fs.stat(rolesFile);
|
|
65
94
|
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
66
95
|
files.push({ filename: 'roles.sql', sizeKB });
|
|
@@ -1,11 +1,31 @@
|
|
|
1
1
|
const chalk = require('chalk');
|
|
2
2
|
const path = require('path');
|
|
3
3
|
const fs = require('fs').promises;
|
|
4
|
-
const {
|
|
5
|
-
const { exec } = require('child_process');
|
|
4
|
+
const { spawn } = require('child_process');
|
|
6
5
|
const { t } = require('../../../i18n');
|
|
7
6
|
|
|
8
|
-
|
|
7
|
+
function runWithElapsedTicker(command, env, label) {
|
|
8
|
+
return new Promise((resolve, reject) => {
|
|
9
|
+
const start = Date.now();
|
|
10
|
+
const ticker = setInterval(() => {
|
|
11
|
+
const elapsed = Math.floor((Date.now() - start) / 1000);
|
|
12
|
+
process.stdout.write(`\r ⏱ ${label} ${elapsed}s`);
|
|
13
|
+
}, 1000);
|
|
14
|
+
const proc = spawn(command, [], {
|
|
15
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
16
|
+
shell: true,
|
|
17
|
+
env: { ...process.env, ...env }
|
|
18
|
+
});
|
|
19
|
+
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
20
|
+
proc.on('close', (code) => {
|
|
21
|
+
clearInterval(ticker);
|
|
22
|
+
process.stdout.write('\r' + ' '.repeat(60) + '\r');
|
|
23
|
+
if (code !== 0) reject(new Error(`Exited with code ${code}`));
|
|
24
|
+
else resolve();
|
|
25
|
+
});
|
|
26
|
+
proc.on('error', reject);
|
|
27
|
+
});
|
|
28
|
+
}
|
|
9
29
|
|
|
10
30
|
/**
|
|
11
31
|
* Etapa 7: Backup Custom Roles via SQL
|
|
@@ -16,12 +36,10 @@ module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
|
|
|
16
36
|
console.log(chalk.white(` - ${getT('backup.steps.roles.exporting')}`));
|
|
17
37
|
|
|
18
38
|
const customRolesFile = path.join(backupDir, 'custom-roles.sql');
|
|
39
|
+
const cmd = `supabase db dump --db-url "${databaseUrl}" --role-only -f "${customRolesFile}"`;
|
|
19
40
|
|
|
20
41
|
try {
|
|
21
|
-
|
|
22
|
-
await execAsync(`supabase db dump --db-url "${databaseUrl}" --role-only -f "${customRolesFile}"`, {
|
|
23
|
-
env: { ...process.env, SUPABASE_ACCESS_TOKEN: accessToken || '' }
|
|
24
|
-
});
|
|
42
|
+
await runWithElapsedTicker(cmd, { SUPABASE_ACCESS_TOKEN: accessToken || '' }, getT('backup.steps.roles.exporting'));
|
|
25
43
|
|
|
26
44
|
const stats = await fs.stat(customRolesFile);
|
|
27
45
|
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
@@ -89,12 +89,17 @@ module.exports = async (context) => {
|
|
|
89
89
|
const downloadedFunctions = [];
|
|
90
90
|
let successCount = 0;
|
|
91
91
|
let errorCount = 0;
|
|
92
|
+
const totalFuncs = functions.length;
|
|
93
|
+
const stepStart = Date.now();
|
|
92
94
|
|
|
93
95
|
// Baixar cada Edge Function via Supabase CLI
|
|
94
96
|
// Nota: O CLI ignora o cwd e sempre baixa para supabase/functions
|
|
95
|
-
for (
|
|
97
|
+
for (let idx = 0; idx < functions.length; idx++) {
|
|
98
|
+
const func = functions[idx];
|
|
99
|
+
const current = idx + 1;
|
|
100
|
+
const elapsed = Math.floor((Date.now() - stepStart) / 1000);
|
|
96
101
|
try {
|
|
97
|
-
console.log(chalk.white(` - Baixando: ${func.name}
|
|
102
|
+
console.log(chalk.white(` - Baixando ${current}/${totalFuncs}: ${func.name}... (${elapsed}s)`));
|
|
98
103
|
|
|
99
104
|
// Criar diretório da função NO BACKUP
|
|
100
105
|
const functionTargetDir = path.join(functionsDir, func.name);
|