smoonb 0.0.87 → 0.0.89
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/commands/backup/index.js +39 -33
- package/src/commands/backup/steps/01-postgres-version.js +54 -0
- package/src/commands/backup/steps/02-database.js +365 -0
- package/src/commands/backup/utils.js +42 -2
- package/src/i18n/locales/en.json +14 -1
- package/src/i18n/locales/pt-BR.json +14 -1
- package/src/commands/backup/steps/01-database.js +0 -183
- /package/src/commands/backup/steps/{02-database-separated.js → 03-database-separated.js} +0 -0
- /package/src/commands/backup/steps/{03-database-settings.js → 04-database-settings.js} +0 -0
- /package/src/commands/backup/steps/{04-auth-settings.js → 05-auth-settings.js} +0 -0
- /package/src/commands/backup/steps/{05-realtime-settings.js → 06-realtime-settings.js} +0 -0
- /package/src/commands/backup/steps/{06-storage.js → 07-storage.js} +0 -0
- /package/src/commands/backup/steps/{07-custom-roles.js → 08-custom-roles.js} +0 -0
- /package/src/commands/backup/steps/{08-edge-functions.js → 09-edge-functions.js} +0 -0
- /package/src/commands/backup/steps/{09-supabase-temp.js → 10-supabase-temp.js} +0 -0
- /package/src/commands/backup/steps/{10-migrations.js → 11-migrations.js} +0 -0
package/package.json
CHANGED
|
@@ -9,18 +9,19 @@ const { saveEnvMap } = require('../../utils/envMap');
|
|
|
9
9
|
const { mapEnvVariablesInteractively, askComponentsFlags } = require('../../interactive/envMapper');
|
|
10
10
|
const { confirm } = require('../../utils/prompt');
|
|
11
11
|
|
|
12
|
-
// Importar todas as etapas
|
|
12
|
+
// Importar todas as etapas (arquivos 00..11)
|
|
13
13
|
const step00DockerValidation = require('./steps/00-docker-validation');
|
|
14
|
-
const
|
|
15
|
-
const
|
|
16
|
-
const
|
|
17
|
-
const
|
|
18
|
-
const
|
|
19
|
-
const
|
|
20
|
-
const
|
|
21
|
-
const
|
|
22
|
-
const
|
|
23
|
-
const
|
|
14
|
+
const step01PostgresVersion = require('./steps/01-postgres-version');
|
|
15
|
+
const step02Database = require('./steps/02-database');
|
|
16
|
+
const step03DatabaseSeparated = require('./steps/03-database-separated');
|
|
17
|
+
const step04DatabaseSettings = require('./steps/04-database-settings');
|
|
18
|
+
const step05AuthSettings = require('./steps/05-auth-settings');
|
|
19
|
+
const step06RealtimeSettings = require('./steps/06-realtime-settings');
|
|
20
|
+
const step07Storage = require('./steps/07-storage');
|
|
21
|
+
const step08CustomRoles = require('./steps/08-custom-roles');
|
|
22
|
+
const step09EdgeFunctions = require('./steps/09-edge-functions');
|
|
23
|
+
const step10SupabaseTemp = require('./steps/10-supabase-temp');
|
|
24
|
+
const step11Migrations = require('./steps/11-migrations');
|
|
24
25
|
|
|
25
26
|
// Exportar FUNÇÃO em vez de objeto Command
|
|
26
27
|
module.exports = async (options) => {
|
|
@@ -218,21 +219,26 @@ module.exports = async (options) => {
|
|
|
218
219
|
console.log(chalk.white(`🐳 ${getT('backup.start.docker')}`));
|
|
219
220
|
|
|
220
221
|
// Contar etapas totais para numeração
|
|
221
|
-
// Etapas fixas: Database, Database Separado, Database Settings, Custom Roles (
|
|
222
|
+
// Etapas fixas: Postgres version, Database, Database Separado, Database Settings, Custom Roles (5)
|
|
222
223
|
// Etapas condicionais: Auth, Realtime, Storage, Functions, Temp, Migrations
|
|
223
224
|
let stepNumber = 0;
|
|
224
|
-
const totalSteps =
|
|
225
|
+
const totalSteps = 5 + (flags?.includeAuth ? 1 : 0) + (flags?.includeRealtime ? 1 : 0) + (flags?.includeStorage ? 1 : 0) + (flags?.includeFunctions ? 1 : 0) + (flags?.includeTemp ? 1 : 0) + (flags?.includeMigrations ? 1 : 0);
|
|
225
226
|
|
|
226
|
-
// 1.
|
|
227
|
+
// 1. Postgres version (detect + optional override)
|
|
228
|
+
stepNumber++;
|
|
229
|
+
console.log(chalk.blue(`\n📊 ${stepNumber}/${totalSteps} - ${getT('backup.steps.postgresVersion.title')}`));
|
|
230
|
+
await step01PostgresVersion(context);
|
|
231
|
+
|
|
232
|
+
// 2. Backup Database via pg_dumpall Docker
|
|
227
233
|
stepNumber++;
|
|
228
234
|
console.log(chalk.blue(`\n📊 ${stepNumber}/${totalSteps} - ${getT('backup.steps.database.title')}`));
|
|
229
|
-
const databaseResult = await
|
|
235
|
+
const databaseResult = await step02Database(context);
|
|
230
236
|
manifest.components.database = databaseResult;
|
|
231
237
|
|
|
232
|
-
//
|
|
238
|
+
// 3. Backup Database Separado
|
|
233
239
|
stepNumber++;
|
|
234
240
|
console.log(chalk.blue(`\n📊 ${stepNumber}/${totalSteps} - ${getT('backup.steps.database.separated.title')}`));
|
|
235
|
-
const dbSeparatedResult = await
|
|
241
|
+
const dbSeparatedResult = await step03DatabaseSeparated(context);
|
|
236
242
|
manifest.components.database_separated = {
|
|
237
243
|
success: dbSeparatedResult.success,
|
|
238
244
|
method: 'supabase-cli',
|
|
@@ -240,63 +246,63 @@ module.exports = async (options) => {
|
|
|
240
246
|
total_size_kb: dbSeparatedResult.totalSizeKB || '0.0'
|
|
241
247
|
};
|
|
242
248
|
|
|
243
|
-
//
|
|
249
|
+
// 4. Backup Database Settings
|
|
244
250
|
stepNumber++;
|
|
245
251
|
console.log(chalk.blue(`\n🔧 ${stepNumber}/${totalSteps} - ${getT('backup.steps.databaseSettings.title')}`));
|
|
246
|
-
const databaseSettingsResult = await
|
|
252
|
+
const databaseSettingsResult = await step04DatabaseSettings(context);
|
|
247
253
|
manifest.components.database_settings = databaseSettingsResult;
|
|
248
254
|
|
|
249
|
-
//
|
|
255
|
+
// 5. Backup Auth Settings
|
|
250
256
|
if (flags?.includeAuth) {
|
|
251
257
|
stepNumber++;
|
|
252
258
|
console.log(chalk.blue(`\n🔐 ${stepNumber}/${totalSteps} - ${getT('backup.steps.auth.title')}`));
|
|
253
|
-
const authResult = await
|
|
259
|
+
const authResult = await step05AuthSettings(context);
|
|
254
260
|
manifest.components.auth_settings = authResult;
|
|
255
261
|
}
|
|
256
262
|
|
|
257
|
-
//
|
|
263
|
+
// 6. Backup Realtime Settings
|
|
258
264
|
if (flags?.includeRealtime) {
|
|
259
265
|
stepNumber++;
|
|
260
266
|
console.log(chalk.blue(`\n🔄 ${stepNumber}/${totalSteps} - ${getT('backup.steps.realtime.title')}`));
|
|
261
|
-
const realtimeResult = await
|
|
267
|
+
const realtimeResult = await step06RealtimeSettings(context);
|
|
262
268
|
manifest.components.realtime = realtimeResult;
|
|
263
269
|
}
|
|
264
270
|
|
|
265
|
-
//
|
|
271
|
+
// 7. Backup Storage
|
|
266
272
|
if (flags?.includeStorage) {
|
|
267
273
|
stepNumber++;
|
|
268
274
|
console.log(chalk.blue(`\n📦 ${stepNumber}/${totalSteps} - ${getT('backup.steps.storage.title')}`));
|
|
269
|
-
const storageResult = await
|
|
275
|
+
const storageResult = await step07Storage(context);
|
|
270
276
|
manifest.components.storage = storageResult;
|
|
271
277
|
}
|
|
272
278
|
|
|
273
|
-
//
|
|
279
|
+
// 8. Backup Custom Roles
|
|
274
280
|
stepNumber++;
|
|
275
281
|
console.log(chalk.blue(`\n👥 ${stepNumber}/${totalSteps} - ${getT('backup.steps.roles.title')}`));
|
|
276
|
-
const rolesResult = await
|
|
282
|
+
const rolesResult = await step08CustomRoles(context);
|
|
277
283
|
manifest.components.custom_roles = rolesResult;
|
|
278
284
|
|
|
279
|
-
//
|
|
285
|
+
// 9. Backup Edge Functions
|
|
280
286
|
if (flags?.includeFunctions) {
|
|
281
287
|
stepNumber++;
|
|
282
288
|
console.log(chalk.blue(`\n⚡ ${stepNumber}/${totalSteps} - ${getT('backup.steps.functions.title')}`));
|
|
283
|
-
const functionsResult = await
|
|
289
|
+
const functionsResult = await step09EdgeFunctions(context);
|
|
284
290
|
manifest.components.edge_functions = functionsResult;
|
|
285
291
|
}
|
|
286
292
|
|
|
287
|
-
//
|
|
293
|
+
// 10. Backup Supabase .temp
|
|
288
294
|
if (flags?.includeTemp) {
|
|
289
295
|
stepNumber++;
|
|
290
296
|
console.log(chalk.blue(`\n📁 ${stepNumber}/${totalSteps} - ${getT('backup.steps.temp.title')}`));
|
|
291
|
-
const supabaseTempResult = await
|
|
297
|
+
const supabaseTempResult = await step10SupabaseTemp(context);
|
|
292
298
|
manifest.components.supabase_temp = supabaseTempResult;
|
|
293
299
|
}
|
|
294
300
|
|
|
295
|
-
//
|
|
301
|
+
// 11. Backup Migrations
|
|
296
302
|
if (flags?.includeMigrations) {
|
|
297
303
|
stepNumber++;
|
|
298
304
|
console.log(chalk.blue(`\n📋 ${stepNumber}/${totalSteps} - ${getT('backup.steps.migrations.title')}`));
|
|
299
|
-
const migrationsResult = await
|
|
305
|
+
const migrationsResult = await step11Migrations(context);
|
|
300
306
|
manifest.components.migrations = migrationsResult;
|
|
301
307
|
}
|
|
302
308
|
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const inquirer = require('inquirer');
|
|
3
|
+
const { getPostgresServerMajor } = require('../utils');
|
|
4
|
+
const { t } = require('../../../i18n');
|
|
5
|
+
|
|
6
|
+
const POSSIBLE_MAJORS = [15, 16, 17, 18];
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Step: Mostra a versão do Postgres detectada e permite ao usuário sobrescrever com um menu.
|
|
10
|
+
* Preenche context.postgresMajor para o step 02-database usar.
|
|
11
|
+
*/
|
|
12
|
+
module.exports = async (context) => {
|
|
13
|
+
const getT = global.smoonbI18n?.t || t;
|
|
14
|
+
const { databaseUrl } = context;
|
|
15
|
+
|
|
16
|
+
if (!databaseUrl) {
|
|
17
|
+
context.postgresMajor = 17;
|
|
18
|
+
return { success: true, postgresMajor: 17 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
let detectedMajor = null;
|
|
22
|
+
try {
|
|
23
|
+
detectedMajor = await getPostgresServerMajor(databaseUrl);
|
|
24
|
+
} catch (err) {
|
|
25
|
+
console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.postgresVersion.detectError', { message: err.message })}`));
|
|
26
|
+
console.log(chalk.white(` - ${getT('backup.steps.postgresVersion.usingDefault')}`));
|
|
27
|
+
context.postgresMajor = 17;
|
|
28
|
+
return { success: true, postgresMajor: 17 };
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const major = detectedMajor != null ? detectedMajor : 17;
|
|
32
|
+
const choices = [
|
|
33
|
+
{ name: getT('backup.steps.postgresVersion.useDetected', { major, image: `postgres:${major}` }), value: major },
|
|
34
|
+
...POSSIBLE_MAJORS.filter(m => m !== major).map(m => ({
|
|
35
|
+
name: `postgres:${m}`,
|
|
36
|
+
value: m
|
|
37
|
+
}))
|
|
38
|
+
];
|
|
39
|
+
|
|
40
|
+
console.log(chalk.white(` - ${getT('backup.steps.postgresVersion.found', { major, image: `postgres:${major}` })}`));
|
|
41
|
+
console.log(chalk.white(` - ${getT('backup.steps.postgresVersion.proceedWith')}`));
|
|
42
|
+
|
|
43
|
+
const { postgresMajor } = await inquirer.prompt([{
|
|
44
|
+
type: 'list',
|
|
45
|
+
name: 'postgresMajor',
|
|
46
|
+
message: getT('backup.steps.postgresVersion.selectVersion'),
|
|
47
|
+
choices,
|
|
48
|
+
default: major
|
|
49
|
+
}]);
|
|
50
|
+
|
|
51
|
+
context.postgresMajor = postgresMajor;
|
|
52
|
+
console.log(chalk.green(` ✅ ${getT('backup.steps.postgresVersion.selected', { image: `postgres:${postgresMajor}` })}`));
|
|
53
|
+
return { success: true, postgresMajor };
|
|
54
|
+
};
|
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs').promises;
|
|
4
|
+
const readline = require('readline');
|
|
5
|
+
const { spawn } = require('child_process');
|
|
6
|
+
const { t } = require('../../../i18n');
|
|
7
|
+
const { getPostgresServerMajor } = require('../utils');
|
|
8
|
+
|
|
9
|
+
const DUMP_SIZE_FACTOR_DEFAULT = 1.4;
|
|
10
|
+
const BAR_WIDTH = 24;
|
|
11
|
+
const EMA_ALPHA = 0.25;
|
|
12
|
+
const ETA_MIN_TICKS = 4;
|
|
13
|
+
const ESTIMATE_TIMEOUT_MS = 10000;
|
|
14
|
+
|
|
15
|
+
function formatBytes(bytes) {
|
|
16
|
+
if (bytes === 0) return '0 B';
|
|
17
|
+
const k = 1024;
|
|
18
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
19
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
20
|
+
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function formatDuration(ms) {
|
|
24
|
+
if (ms < 1000) return `${ms}ms`;
|
|
25
|
+
const s = Math.floor(ms / 1000);
|
|
26
|
+
const m = Math.floor(s / 60);
|
|
27
|
+
const h = Math.floor(m / 60);
|
|
28
|
+
if (h > 0) return `${h}h ${m % 60}m ${s % 60}s`;
|
|
29
|
+
if (m > 0) return `${m}m ${s % 60}s`;
|
|
30
|
+
return `${s}s`;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async function exists(filePath) {
|
|
34
|
+
try {
|
|
35
|
+
await fs.access(filePath);
|
|
36
|
+
return true;
|
|
37
|
+
} catch {
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Estima tamanho total do cluster (soma dos bancos) via SQL. Timeout 10s; falha = null (fallback sem estimativa).
|
|
44
|
+
* @returns {Promise<number|null>} bytes ou null
|
|
45
|
+
*/
|
|
46
|
+
async function estimateClusterBytes({ postgresImage, username, password, host, port }) {
|
|
47
|
+
const query = "SELECT COALESCE(sum(pg_database_size(datname)),0) FROM pg_database WHERE datistemplate = false;";
|
|
48
|
+
const args = [
|
|
49
|
+
'run', '--rm', '--network', 'host',
|
|
50
|
+
'-e', `PGPASSWORD=${password}`,
|
|
51
|
+
postgresImage, 'psql',
|
|
52
|
+
'-h', host, '-p', port, '-U', username, '-d', 'postgres',
|
|
53
|
+
'-t', '-A', '-c', query
|
|
54
|
+
];
|
|
55
|
+
return new Promise((resolve) => {
|
|
56
|
+
const proc = spawn('docker', args, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
57
|
+
let stdout = '';
|
|
58
|
+
let done = false;
|
|
59
|
+
const timeout = setTimeout(() => {
|
|
60
|
+
if (done) return;
|
|
61
|
+
done = true;
|
|
62
|
+
try { proc.kill('SIGKILL'); } catch { }
|
|
63
|
+
resolve(null); // timeout: seguir sem estimativa
|
|
64
|
+
}, ESTIMATE_TIMEOUT_MS);
|
|
65
|
+
proc.stdout.on('data', (chunk) => { stdout += chunk.toString(); });
|
|
66
|
+
proc.on('close', (code) => {
|
|
67
|
+
if (done) return;
|
|
68
|
+
done = true;
|
|
69
|
+
clearTimeout(timeout);
|
|
70
|
+
if (code !== 0) {
|
|
71
|
+
resolve(null);
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
const trimmed = stdout.trim();
|
|
75
|
+
const bytes = parseInt(trimmed, 10);
|
|
76
|
+
if (Number.isNaN(bytes) || bytes < 0) {
|
|
77
|
+
resolve(null);
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
resolve(bytes);
|
|
81
|
+
});
|
|
82
|
+
proc.on('error', () => {
|
|
83
|
+
if (done) return;
|
|
84
|
+
done = true;
|
|
85
|
+
clearTimeout(timeout);
|
|
86
|
+
resolve(null);
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Monta uma linha de progresso: barra opcional + tamanho, tempo, velocidade, ETA (quando estimado).
|
|
93
|
+
* percent/etaSeconds null = modo indeterminado (sem % nem ETA).
|
|
94
|
+
*/
|
|
95
|
+
function renderProgressLine({ percent, width, sizeBytes, elapsedMs, speedBps, etaSeconds, estimated, getT }) {
|
|
96
|
+
const sizeStr = formatBytes(sizeBytes);
|
|
97
|
+
const elapsedStr = formatDuration(elapsedMs);
|
|
98
|
+
const speedStr = formatBytes(Math.max(0, speedBps)) + '/s';
|
|
99
|
+
let bar = '';
|
|
100
|
+
if (estimated && percent != null && percent >= 0) {
|
|
101
|
+
const filled = Math.round((percent / 100) * width);
|
|
102
|
+
const n = Math.min(filled, width);
|
|
103
|
+
bar = `[${'#'.repeat(n)}${'-'.repeat(width - n)}] ${Math.min(99, Math.floor(percent))}% (~) `;
|
|
104
|
+
} else {
|
|
105
|
+
bar = ' … ';
|
|
106
|
+
}
|
|
107
|
+
let etaStr = '';
|
|
108
|
+
if (estimated && etaSeconds != null && etaSeconds > 0) {
|
|
109
|
+
const etaLabel = getT ? getT('backup.steps.database.progress.eta') : 'ETA';
|
|
110
|
+
etaStr = ` | ${etaLabel} ~${formatDuration(etaSeconds * 1000)}`;
|
|
111
|
+
}
|
|
112
|
+
return ` ${bar}| ${sizeStr} | ${elapsedStr} | ${speedStr}${etaStr}`;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Etapa 2: Backup Database via pg_dumpall Docker (idêntico ao Dashboard)
|
|
117
|
+
* Com feedback de progresso: tamanho do arquivo, velocidade e tempo decorrido.
|
|
118
|
+
* Usa context.postgresMajor (definido no step Postgres version) ou detecta se não definido.
|
|
119
|
+
*/
|
|
120
|
+
module.exports = async ({ databaseUrl, backupDir, postgresMajor: contextMajor }) => {
|
|
121
|
+
try {
|
|
122
|
+
const getT = global.smoonbI18n?.t || t;
|
|
123
|
+
console.log(chalk.white(` - ${getT('backup.steps.database.creating')}`));
|
|
124
|
+
|
|
125
|
+
const urlMatch = databaseUrl.match(/postgresql:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)/);
|
|
126
|
+
|
|
127
|
+
if (!urlMatch) {
|
|
128
|
+
const getT = global.smoonbI18n?.t || t;
|
|
129
|
+
throw new Error(getT('error.databaseUrlInvalidSimple'));
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const [, username, password, host, port] = urlMatch;
|
|
133
|
+
|
|
134
|
+
let postgresMajor = contextMajor;
|
|
135
|
+
if (postgresMajor == null && databaseUrl) {
|
|
136
|
+
try {
|
|
137
|
+
postgresMajor = await getPostgresServerMajor(databaseUrl);
|
|
138
|
+
} catch {
|
|
139
|
+
postgresMajor = 17;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
if (postgresMajor == null) postgresMajor = 17;
|
|
143
|
+
const postgresImage = `postgres:${postgresMajor}`;
|
|
144
|
+
console.log(chalk.white(` - ${getT('backup.steps.database.postgresImage', { image: postgresImage, major: postgresMajor })}`));
|
|
145
|
+
|
|
146
|
+
const now = new Date();
|
|
147
|
+
const day = String(now.getDate()).padStart(2, '0');
|
|
148
|
+
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
149
|
+
const year = now.getFullYear();
|
|
150
|
+
const hours = String(now.getHours()).padStart(2, '0');
|
|
151
|
+
const minutes = String(now.getMinutes()).padStart(2, '0');
|
|
152
|
+
const seconds = String(now.getSeconds()).padStart(2, '0');
|
|
153
|
+
|
|
154
|
+
const fileName = `db_cluster-${day}-${month}-${year}@${hours}-${minutes}-${seconds}.backup`;
|
|
155
|
+
const backupDirAbs = path.resolve(backupDir);
|
|
156
|
+
const outputPath = path.join(backupDirAbs, fileName);
|
|
157
|
+
|
|
158
|
+
// Estimativa opcional: tamanho do cluster * fator (dump lógico costuma ser maior). Falha = sem %/ETA.
|
|
159
|
+
let expectedBytes = null;
|
|
160
|
+
try {
|
|
161
|
+
const clusterBytes = await estimateClusterBytes({ postgresImage, username, password, host, port });
|
|
162
|
+
if (clusterBytes != null && clusterBytes > 0) {
|
|
163
|
+
const factor = parseFloat(process.env.SMOONB_DUMP_SIZE_FACTOR || '', 10) || DUMP_SIZE_FACTOR_DEFAULT;
|
|
164
|
+
expectedBytes = Math.floor(clusterBytes * factor);
|
|
165
|
+
}
|
|
166
|
+
} catch {
|
|
167
|
+
expectedBytes = null;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const dockerArgs = [
|
|
171
|
+
'run', '--rm', '--network', 'host',
|
|
172
|
+
'-v', `${backupDirAbs}:/host`,
|
|
173
|
+
'-e', `PGPASSWORD=${password}`,
|
|
174
|
+
postgresImage, 'pg_dumpall',
|
|
175
|
+
'-h', host,
|
|
176
|
+
'-p', port,
|
|
177
|
+
'-U', username,
|
|
178
|
+
'-f', `/host/${fileName}`
|
|
179
|
+
];
|
|
180
|
+
|
|
181
|
+
console.log(chalk.white(` - ${getT('backup.steps.database.executing')}`));
|
|
182
|
+
|
|
183
|
+
const startTime = Date.now();
|
|
184
|
+
let lastSize = 0;
|
|
185
|
+
let lastTime = startTime;
|
|
186
|
+
let ticker = null;
|
|
187
|
+
let tickCount = 0;
|
|
188
|
+
let smoothedSpeed = 0;
|
|
189
|
+
const useTty = Boolean(process.stdout.isTTY);
|
|
190
|
+
let lastProgressLine = '';
|
|
191
|
+
|
|
192
|
+
const runDump = () => new Promise((resolve, reject) => {
|
|
193
|
+
const proc = spawn('docker', dockerArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
194
|
+
|
|
195
|
+
proc.stderr.on('data', (chunk) => {
|
|
196
|
+
process.stderr.write(chunk);
|
|
197
|
+
if (useTty && lastProgressLine) {
|
|
198
|
+
readline.cursorTo(process.stdout, 0);
|
|
199
|
+
process.stdout.write(lastProgressLine);
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
const pollFile = async () => {
|
|
204
|
+
if (!(await exists(outputPath))) return;
|
|
205
|
+
const stat = await fs.stat(outputPath).catch(() => null);
|
|
206
|
+
if (!stat) return;
|
|
207
|
+
tickCount++;
|
|
208
|
+
const currentSize = stat.size;
|
|
209
|
+
const now = Date.now();
|
|
210
|
+
const elapsed = now - startTime;
|
|
211
|
+
const deltaTime = (now - lastTime) / 1000;
|
|
212
|
+
const speed = deltaTime > 0 ? (currentSize - lastSize) / deltaTime : 0;
|
|
213
|
+
if (speed > 0) {
|
|
214
|
+
smoothedSpeed = tickCount === 1 ? speed : EMA_ALPHA * speed + (1 - EMA_ALPHA) * smoothedSpeed;
|
|
215
|
+
}
|
|
216
|
+
lastSize = currentSize;
|
|
217
|
+
lastTime = now;
|
|
218
|
+
|
|
219
|
+
const estimated = expectedBytes != null && expectedBytes > 0;
|
|
220
|
+
let percent = null;
|
|
221
|
+
let etaSeconds = null;
|
|
222
|
+
if (estimated) {
|
|
223
|
+
percent = Math.min(99, Math.floor((currentSize / expectedBytes) * 100));
|
|
224
|
+
if (smoothedSpeed > 0 && currentSize < expectedBytes && tickCount >= ETA_MIN_TICKS) {
|
|
225
|
+
etaSeconds = (expectedBytes - currentSize) / smoothedSpeed;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
const line = renderProgressLine({
|
|
230
|
+
percent,
|
|
231
|
+
width: BAR_WIDTH,
|
|
232
|
+
sizeBytes: currentSize,
|
|
233
|
+
elapsedMs: elapsed,
|
|
234
|
+
speedBps: smoothedSpeed || speed,
|
|
235
|
+
etaSeconds,
|
|
236
|
+
estimated,
|
|
237
|
+
getT
|
|
238
|
+
});
|
|
239
|
+
lastProgressLine = line;
|
|
240
|
+
|
|
241
|
+
if (useTty) {
|
|
242
|
+
readline.clearLine(process.stdout, 0);
|
|
243
|
+
readline.cursorTo(process.stdout, 0);
|
|
244
|
+
process.stdout.write(chalk.white(line));
|
|
245
|
+
} else if (tickCount % 30 === 1 && elapsed >= 15000) {
|
|
246
|
+
process.stdout.write(chalk.white(line) + '\n');
|
|
247
|
+
}
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
ticker = setInterval(pollFile, 500);
|
|
251
|
+
|
|
252
|
+
proc.on('close', async (code) => {
|
|
253
|
+
if (ticker) {
|
|
254
|
+
clearInterval(ticker);
|
|
255
|
+
ticker = null;
|
|
256
|
+
}
|
|
257
|
+
if (useTty) {
|
|
258
|
+
readline.clearLine(process.stdout, 0);
|
|
259
|
+
readline.cursorTo(process.stdout, 0);
|
|
260
|
+
}
|
|
261
|
+
if (code !== 0) {
|
|
262
|
+
reject(new Error(`pg_dumpall exited with code ${code}`));
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
// Sucesso: mostrar 100% uma vez (estimativa) com tamanho final real
|
|
266
|
+
if (expectedBytes != null && expectedBytes > 0) {
|
|
267
|
+
let finalSize = lastSize;
|
|
268
|
+
try {
|
|
269
|
+
const stat = await fs.stat(outputPath).catch(() => null);
|
|
270
|
+
if (stat) finalSize = stat.size;
|
|
271
|
+
} catch { }
|
|
272
|
+
const finalLine = renderProgressLine({
|
|
273
|
+
percent: 100,
|
|
274
|
+
width: BAR_WIDTH,
|
|
275
|
+
sizeBytes: finalSize,
|
|
276
|
+
elapsedMs: Date.now() - startTime,
|
|
277
|
+
speedBps: smoothedSpeed,
|
|
278
|
+
etaSeconds: null,
|
|
279
|
+
estimated: true,
|
|
280
|
+
getT
|
|
281
|
+
});
|
|
282
|
+
process.stdout.write(chalk.white(finalLine) + '\n');
|
|
283
|
+
}
|
|
284
|
+
if (useTty) {
|
|
285
|
+
readline.cursorTo(process.stdout, 0);
|
|
286
|
+
}
|
|
287
|
+
resolve();
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
proc.on('error', (err) => {
|
|
291
|
+
if (ticker) clearInterval(ticker);
|
|
292
|
+
reject(err);
|
|
293
|
+
});
|
|
294
|
+
});
|
|
295
|
+
|
|
296
|
+
await runDump();
|
|
297
|
+
|
|
298
|
+
const gzipArgs = [
|
|
299
|
+
'run', '--rm',
|
|
300
|
+
'-v', `${backupDirAbs}:/host`,
|
|
301
|
+
postgresImage, 'gzip', `/host/${fileName}`
|
|
302
|
+
];
|
|
303
|
+
|
|
304
|
+
const gzipStart = Date.now();
|
|
305
|
+
let gzipTicker = null;
|
|
306
|
+
const finalFileName = `${fileName}.gz`;
|
|
307
|
+
const gzipOutputPath = path.join(backupDirAbs, finalFileName);
|
|
308
|
+
|
|
309
|
+
const runGzip = () => new Promise((resolve, reject) => {
|
|
310
|
+
const proc = spawn('docker', gzipArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
311
|
+
|
|
312
|
+
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
313
|
+
|
|
314
|
+
const pollGzip = async () => {
|
|
315
|
+
if (!(await exists(gzipOutputPath))) return;
|
|
316
|
+
const stat = await fs.stat(gzipOutputPath).catch(() => null);
|
|
317
|
+
if (!stat) return;
|
|
318
|
+
const size = stat.size;
|
|
319
|
+
const elapsed = Date.now() - gzipStart;
|
|
320
|
+
if (process.stdout.isTTY) {
|
|
321
|
+
readline.clearLine(process.stdout, 0);
|
|
322
|
+
readline.cursorTo(process.stdout, 0);
|
|
323
|
+
}
|
|
324
|
+
process.stdout.write(` 📦 ${formatBytes(size)} | ${formatDuration(elapsed)}`);
|
|
325
|
+
if (process.stdout.isTTY) process.stdout.write('\r');
|
|
326
|
+
};
|
|
327
|
+
|
|
328
|
+
gzipTicker = setInterval(pollGzip, 300);
|
|
329
|
+
|
|
330
|
+
proc.on('close', (code) => {
|
|
331
|
+
if (gzipTicker) {
|
|
332
|
+
clearInterval(gzipTicker);
|
|
333
|
+
gzipTicker = null;
|
|
334
|
+
}
|
|
335
|
+
if (process.stdout.isTTY) {
|
|
336
|
+
readline.clearLine(process.stdout, 0);
|
|
337
|
+
readline.cursorTo(process.stdout, 0);
|
|
338
|
+
}
|
|
339
|
+
if (code !== 0) {
|
|
340
|
+
reject(new Error(`gzip exited with code ${code}`));
|
|
341
|
+
} else {
|
|
342
|
+
resolve();
|
|
343
|
+
}
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
proc.on('error', (err) => {
|
|
347
|
+
if (gzipTicker) clearInterval(gzipTicker);
|
|
348
|
+
reject(err);
|
|
349
|
+
});
|
|
350
|
+
});
|
|
351
|
+
|
|
352
|
+
await runGzip();
|
|
353
|
+
|
|
354
|
+
const stats = await fs.stat(path.join(backupDir, finalFileName));
|
|
355
|
+
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
356
|
+
|
|
357
|
+
console.log(chalk.green(` ✅ Database backup: ${finalFileName} (${sizeKB} KB)`));
|
|
358
|
+
|
|
359
|
+
return { success: true, size: sizeKB, fileName: finalFileName };
|
|
360
|
+
} catch (error) {
|
|
361
|
+
const getT = global.smoonbI18n?.t || t;
|
|
362
|
+
console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.database.error', { message: error.message })}`));
|
|
363
|
+
return { success: false };
|
|
364
|
+
}
|
|
365
|
+
};
|
|
@@ -1,6 +1,43 @@
|
|
|
1
1
|
const chalk = require('chalk');
|
|
2
|
+
const { spawn } = require('child_process');
|
|
2
3
|
const { t } = require('../../i18n');
|
|
3
4
|
|
|
5
|
+
/**
|
|
6
|
+
* Obtém a versão major do Postgres no servidor (ex: 17) via psql em container.
|
|
7
|
+
* @param {string} databaseUrl
|
|
8
|
+
* @returns {Promise<number|null>} major (15, 17, 18, ...) ou null
|
|
9
|
+
*/
|
|
10
|
+
async function getPostgresServerMajor(databaseUrl) {
|
|
11
|
+
const urlMatch = databaseUrl.match(/postgresql:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)/);
|
|
12
|
+
if (!urlMatch) return null;
|
|
13
|
+
const [, username, password, host, port, database] = urlMatch;
|
|
14
|
+
const bootstrapImage = 'postgres:17';
|
|
15
|
+
const args = [
|
|
16
|
+
'run', '--rm', '--network', 'host',
|
|
17
|
+
'-e', `PGPASSWORD=${password}`,
|
|
18
|
+
bootstrapImage, 'psql',
|
|
19
|
+
'-h', host, '-p', port, '-U', username, '-d', (database && database.trim()) || 'postgres',
|
|
20
|
+
'-t', '-A', '-c', "SELECT current_setting('server_version_num')"
|
|
21
|
+
];
|
|
22
|
+
const stdout = await new Promise((resolve, reject) => {
|
|
23
|
+
const proc = spawn('docker', args, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
24
|
+
let out = '';
|
|
25
|
+
let err = '';
|
|
26
|
+
proc.stdout.on('data', (chunk) => { out += chunk.toString(); });
|
|
27
|
+
proc.stderr.on('data', (chunk) => { err += chunk.toString(); });
|
|
28
|
+
proc.on('close', (code) => {
|
|
29
|
+
if (code !== 0) reject(new Error(err.trim() || `psql exited with code ${code}`));
|
|
30
|
+
else resolve(out.trim());
|
|
31
|
+
});
|
|
32
|
+
proc.on('error', reject);
|
|
33
|
+
});
|
|
34
|
+
const versionNum = parseInt(stdout, 10);
|
|
35
|
+
if (Number.isNaN(versionNum) || versionNum < 10000) {
|
|
36
|
+
throw new Error(`Invalid server_version_num: ${stdout}`);
|
|
37
|
+
}
|
|
38
|
+
return Math.floor(versionNum / 10000);
|
|
39
|
+
}
|
|
40
|
+
|
|
4
41
|
/**
|
|
5
42
|
* Função para mostrar mensagens educativas e encerrar elegantemente
|
|
6
43
|
* @param {string} reason - Motivo do bloqueio
|
|
@@ -54,7 +91,9 @@ function showDockerMessagesAndExit(reason, data = {}) {
|
|
|
54
91
|
console.log(chalk.red(`❌ ${getT('supabase.cliOutdated', { version: data.supabaseCliVersion || '?', latest: data.supabaseCliLatest || '?' })}`));
|
|
55
92
|
console.log('');
|
|
56
93
|
console.log(chalk.yellow(`📋 ${getT('supabase.cliUpdateInstructions')}`));
|
|
57
|
-
console.log(chalk.cyan(` ${getT('supabase.
|
|
94
|
+
console.log(chalk.cyan(` ${getT('supabase.cliUpdateCommandExamples')}`));
|
|
95
|
+
console.log(chalk.cyan(` ${getT('supabase.cliUpdateCommandGlobal')}`));
|
|
96
|
+
console.log(chalk.cyan(` ${getT('supabase.cliUpdateCommandLocal')}`));
|
|
58
97
|
console.log('');
|
|
59
98
|
console.log(chalk.gray(`💡 ${getT('supabase.cliUpdateLink')}`));
|
|
60
99
|
break;
|
|
@@ -78,6 +117,7 @@ function showDockerMessagesAndExit(reason, data = {}) {
|
|
|
78
117
|
}
|
|
79
118
|
|
|
80
119
|
module.exports = {
|
|
81
|
-
showDockerMessagesAndExit
|
|
120
|
+
showDockerMessagesAndExit,
|
|
121
|
+
getPostgresServerMajor
|
|
82
122
|
};
|
|
83
123
|
|
package/src/i18n/locales/en.json
CHANGED
|
@@ -75,7 +75,9 @@
|
|
|
75
75
|
"supabase.cliUpdateRecommended": "Supabase CLI {version} detected. We recommend v2.72 or newer for new features and bug fixes: https://supabase.com/docs/guides/cli/getting-started#updating-the-supabase-cli",
|
|
76
76
|
"supabase.cliOutdated": "Supabase CLI {version} is not the latest ({latest}). Update to the latest version.",
|
|
77
77
|
"supabase.cliUpdateInstructions": "Update the Supabase CLI and run smoonb again:",
|
|
78
|
-
"supabase.
|
|
78
|
+
"supabase.cliUpdateCommandExamples": "Examples (depending on how you installed):",
|
|
79
|
+
"supabase.cliUpdateCommandGlobal": "npm install -g supabase@latest (global)",
|
|
80
|
+
"supabase.cliUpdateCommandLocal": "npm install supabase@latest (local/project)",
|
|
79
81
|
"supabase.cliUpdateLink": "Docs: https://supabase.com/docs/guides/cli/getting-started#updating-the-supabase-cli",
|
|
80
82
|
"supabase.cliLatestUnknown": "Could not fetch the latest Supabase CLI version. Cannot continue.",
|
|
81
83
|
"supabase.cliLatestErrorLabel": "Error:",
|
|
@@ -319,9 +321,20 @@
|
|
|
319
321
|
"backup.start.directory": "Directory: {path}",
|
|
320
322
|
"backup.start.docker": "Backup via Docker Desktop",
|
|
321
323
|
|
|
324
|
+
"backup.steps.postgresVersion.title": "Postgres version",
|
|
325
|
+
"backup.steps.postgresVersion.found": "Postgres version found: major {major} (image {image}).",
|
|
326
|
+
"backup.steps.postgresVersion.proceedWith": "We will proceed with this version. You can override below.",
|
|
327
|
+
"backup.steps.postgresVersion.selectVersion": "Select Postgres image version to use for dump:",
|
|
328
|
+
"backup.steps.postgresVersion.useDetected": "Use detected (postgres:{major})",
|
|
329
|
+
"backup.steps.postgresVersion.selected": "Using image: {image}",
|
|
330
|
+
"backup.steps.postgresVersion.detectError": "Could not detect server version: {message}",
|
|
331
|
+
"backup.steps.postgresVersion.usingDefault": "Using default postgres:17",
|
|
332
|
+
|
|
322
333
|
"backup.steps.database.title": "PostgreSQL Database Backup via pg_dumpall Docker...",
|
|
323
334
|
"backup.steps.database.creating": "Creating full backup via pg_dumpall...",
|
|
335
|
+
"backup.steps.database.postgresImage": "Postgres server major {major} detected. Using image: {image}",
|
|
324
336
|
"backup.steps.database.executing": "Executing pg_dumpall via Docker...",
|
|
337
|
+
"backup.steps.database.progress.eta": "ETA",
|
|
325
338
|
"backup.steps.database.separated.title": "PostgreSQL Database Backup (separate SQL files)...",
|
|
326
339
|
"backup.steps.database.separated.creating": "Creating separate SQL backups via Supabase CLI...",
|
|
327
340
|
"backup.steps.database.separated.exportingSchema": "Exporting schema...",
|
|
@@ -75,7 +75,9 @@
|
|
|
75
75
|
"supabase.cliUpdateRecommended": "Supabase CLI {version} detectado. Recomendamos v2.72 ou mais recente para novos recursos e correções: https://supabase.com/docs/guides/cli/getting-started#updating-the-supabase-cli",
|
|
76
76
|
"supabase.cliOutdated": "Supabase CLI {version} não é a versão mais recente ({latest}). Atualize para a última versão.",
|
|
77
77
|
"supabase.cliUpdateInstructions": "Atualize o Supabase CLI e execute o smoonb novamente:",
|
|
78
|
-
"supabase.
|
|
78
|
+
"supabase.cliUpdateCommandExamples": "Exemplos (conforme a forma de instalação):",
|
|
79
|
+
"supabase.cliUpdateCommandGlobal": "npm install -g supabase@latest (global)",
|
|
80
|
+
"supabase.cliUpdateCommandLocal": "npm install supabase@latest (local/projeto)",
|
|
79
81
|
"supabase.cliUpdateLink": "Documentação: https://supabase.com/docs/guides/cli/getting-started#updating-the-supabase-cli",
|
|
80
82
|
"supabase.cliLatestUnknown": "Não foi possível obter a última versão do Supabase CLI. Não é possível continuar.",
|
|
81
83
|
"supabase.cliLatestErrorLabel": "Erro:",
|
|
@@ -319,9 +321,20 @@
|
|
|
319
321
|
"backup.start.directory": "Diretório: {path}",
|
|
320
322
|
"backup.start.docker": "Backup via Docker Desktop",
|
|
321
323
|
|
|
324
|
+
"backup.steps.postgresVersion.title": "Versão do Postgres",
|
|
325
|
+
"backup.steps.postgresVersion.found": "Versão do Postgres encontrada: major {major} (imagem {image}).",
|
|
326
|
+
"backup.steps.postgresVersion.proceedWith": "Seguiremos com esta versão. Você pode sobrescrever abaixo.",
|
|
327
|
+
"backup.steps.postgresVersion.selectVersion": "Selecione a versão da imagem Postgres para o dump:",
|
|
328
|
+
"backup.steps.postgresVersion.useDetected": "Usar detectada (postgres:{major})",
|
|
329
|
+
"backup.steps.postgresVersion.selected": "Usando imagem: {image}",
|
|
330
|
+
"backup.steps.postgresVersion.detectError": "Não foi possível detectar a versão do servidor: {message}",
|
|
331
|
+
"backup.steps.postgresVersion.usingDefault": "Usando padrão postgres:17",
|
|
332
|
+
|
|
322
333
|
"backup.steps.database.title": "Backup da Database PostgreSQL via pg_dumpall Docker...",
|
|
323
334
|
"backup.steps.database.creating": "Criando backup completo via pg_dumpall...",
|
|
335
|
+
"backup.steps.database.postgresImage": "Postgres servidor major {major} detectado. Usando imagem: {image}",
|
|
324
336
|
"backup.steps.database.executing": "Executando pg_dumpall via Docker...",
|
|
337
|
+
"backup.steps.database.progress.eta": "ETA",
|
|
325
338
|
"backup.steps.database.separated.title": "Backup da Database PostgreSQL (arquivos SQL separados)...",
|
|
326
339
|
"backup.steps.database.separated.creating": "Criando backups SQL separados via Supabase CLI...",
|
|
327
340
|
"backup.steps.database.separated.exportingSchema": "Exportando schema...",
|
|
@@ -1,183 +0,0 @@
|
|
|
1
|
-
const chalk = require('chalk');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const fs = require('fs').promises;
|
|
4
|
-
const { spawn } = require('child_process');
|
|
5
|
-
const { t } = require('../../../i18n');
|
|
6
|
-
|
|
7
|
-
function formatBytes(bytes) {
|
|
8
|
-
if (bytes === 0) return '0 B';
|
|
9
|
-
const k = 1024;
|
|
10
|
-
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
11
|
-
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
12
|
-
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
function formatDuration(ms) {
|
|
16
|
-
if (ms < 1000) return `${ms}ms`;
|
|
17
|
-
const s = Math.floor(ms / 1000);
|
|
18
|
-
const m = Math.floor(s / 60);
|
|
19
|
-
const h = Math.floor(m / 60);
|
|
20
|
-
if (h > 0) return `${h}h ${m % 60}m ${s % 60}s`;
|
|
21
|
-
if (m > 0) return `${m}m ${s % 60}s`;
|
|
22
|
-
return `${s}s`;
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
async function exists(filePath) {
|
|
26
|
-
try {
|
|
27
|
-
await fs.access(filePath);
|
|
28
|
-
return true;
|
|
29
|
-
} catch {
|
|
30
|
-
return false;
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
/**
|
|
35
|
-
* Etapa 1: Backup Database via pg_dumpall Docker (idêntico ao Dashboard)
|
|
36
|
-
* Com feedback de progresso: tamanho do arquivo, velocidade e tempo decorrido.
|
|
37
|
-
*/
|
|
38
|
-
module.exports = async ({ databaseUrl, backupDir }) => {
|
|
39
|
-
try {
|
|
40
|
-
const getT = global.smoonbI18n?.t || t;
|
|
41
|
-
console.log(chalk.white(` - ${getT('backup.steps.database.creating')}`));
|
|
42
|
-
|
|
43
|
-
const urlMatch = databaseUrl.match(/postgresql:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)/);
|
|
44
|
-
|
|
45
|
-
if (!urlMatch) {
|
|
46
|
-
const getT = global.smoonbI18n?.t || t;
|
|
47
|
-
throw new Error(getT('error.databaseUrlInvalidSimple'));
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
const [, username, password, host, port] = urlMatch;
|
|
51
|
-
|
|
52
|
-
const now = new Date();
|
|
53
|
-
const day = String(now.getDate()).padStart(2, '0');
|
|
54
|
-
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
55
|
-
const year = now.getFullYear();
|
|
56
|
-
const hours = String(now.getHours()).padStart(2, '0');
|
|
57
|
-
const minutes = String(now.getMinutes()).padStart(2, '0');
|
|
58
|
-
const seconds = String(now.getSeconds()).padStart(2, '0');
|
|
59
|
-
|
|
60
|
-
const fileName = `db_cluster-${day}-${month}-${year}@${hours}-${minutes}-${seconds}.backup`;
|
|
61
|
-
const backupDirAbs = path.resolve(backupDir);
|
|
62
|
-
const outputPath = path.join(backupDirAbs, fileName);
|
|
63
|
-
|
|
64
|
-
const dockerArgs = [
|
|
65
|
-
'run', '--rm', '--network', 'host',
|
|
66
|
-
'-v', `${backupDirAbs}:/host`,
|
|
67
|
-
'-e', `PGPASSWORD=${password}`,
|
|
68
|
-
'postgres:17', 'pg_dumpall',
|
|
69
|
-
'-h', host,
|
|
70
|
-
'-p', port,
|
|
71
|
-
'-U', username,
|
|
72
|
-
'-f', `/host/${fileName}`
|
|
73
|
-
];
|
|
74
|
-
|
|
75
|
-
console.log(chalk.white(` - ${getT('backup.steps.database.executing')}`));
|
|
76
|
-
|
|
77
|
-
const startTime = Date.now();
|
|
78
|
-
let lastSize = 0;
|
|
79
|
-
let lastTime = startTime;
|
|
80
|
-
let ticker = null;
|
|
81
|
-
|
|
82
|
-
const runDump = () => new Promise((resolve, reject) => {
|
|
83
|
-
const proc = spawn('docker', dockerArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
84
|
-
|
|
85
|
-
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
86
|
-
|
|
87
|
-
const pollFile = async () => {
|
|
88
|
-
if (!(await exists(outputPath))) return;
|
|
89
|
-
const stat = await fs.stat(outputPath).catch(() => null);
|
|
90
|
-
if (!stat) return;
|
|
91
|
-
const size = stat.size;
|
|
92
|
-
const elapsed = Date.now() - startTime;
|
|
93
|
-
const deltaTime = (Date.now() - lastTime) / 1000;
|
|
94
|
-
const speed = deltaTime > 0 ? (size - lastSize) / deltaTime : 0;
|
|
95
|
-
lastSize = size;
|
|
96
|
-
lastTime = Date.now();
|
|
97
|
-
const line = ` 📦 ${formatBytes(size)} | ${formatDuration(elapsed)} | ${formatBytes(speed)}/s`;
|
|
98
|
-
process.stdout.write(`\r${line}`);
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
ticker = setInterval(pollFile, 500);
|
|
102
|
-
|
|
103
|
-
proc.on('close', (code) => {
|
|
104
|
-
if (ticker) {
|
|
105
|
-
clearInterval(ticker);
|
|
106
|
-
ticker = null;
|
|
107
|
-
}
|
|
108
|
-
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
109
|
-
if (code !== 0) {
|
|
110
|
-
reject(new Error(`pg_dumpall exited with code ${code}`));
|
|
111
|
-
} else {
|
|
112
|
-
resolve();
|
|
113
|
-
}
|
|
114
|
-
});
|
|
115
|
-
|
|
116
|
-
proc.on('error', (err) => {
|
|
117
|
-
if (ticker) clearInterval(ticker);
|
|
118
|
-
reject(err);
|
|
119
|
-
});
|
|
120
|
-
});
|
|
121
|
-
|
|
122
|
-
await runDump();
|
|
123
|
-
|
|
124
|
-
const gzipArgs = [
|
|
125
|
-
'run', '--rm',
|
|
126
|
-
'-v', `${backupDirAbs}:/host`,
|
|
127
|
-
'postgres:17', 'gzip', `/host/${fileName}`
|
|
128
|
-
];
|
|
129
|
-
|
|
130
|
-
const gzipStart = Date.now();
|
|
131
|
-
let gzipTicker = null;
|
|
132
|
-
const finalFileName = `${fileName}.gz`;
|
|
133
|
-
const gzipOutputPath = path.join(backupDirAbs, finalFileName);
|
|
134
|
-
|
|
135
|
-
const runGzip = () => new Promise((resolve, reject) => {
|
|
136
|
-
const proc = spawn('docker', gzipArgs, { stdio: ['ignore', 'pipe', 'pipe'] });
|
|
137
|
-
|
|
138
|
-
proc.stderr.on('data', (chunk) => process.stderr.write(chunk));
|
|
139
|
-
|
|
140
|
-
const pollGzip = async () => {
|
|
141
|
-
if (!(await exists(gzipOutputPath))) return;
|
|
142
|
-
const stat = await fs.stat(gzipOutputPath).catch(() => null);
|
|
143
|
-
if (!stat) return;
|
|
144
|
-
const size = stat.size;
|
|
145
|
-
const elapsed = Date.now() - gzipStart;
|
|
146
|
-
process.stdout.write(`\r 📦 ${formatBytes(size)} | ${formatDuration(elapsed)}\r`);
|
|
147
|
-
};
|
|
148
|
-
|
|
149
|
-
gzipTicker = setInterval(pollGzip, 300);
|
|
150
|
-
|
|
151
|
-
proc.on('close', (code) => {
|
|
152
|
-
if (gzipTicker) {
|
|
153
|
-
clearInterval(gzipTicker);
|
|
154
|
-
gzipTicker = null;
|
|
155
|
-
}
|
|
156
|
-
process.stdout.write('\r' + ' '.repeat(80) + '\r');
|
|
157
|
-
if (code !== 0) {
|
|
158
|
-
reject(new Error(`gzip exited with code ${code}`));
|
|
159
|
-
} else {
|
|
160
|
-
resolve();
|
|
161
|
-
}
|
|
162
|
-
});
|
|
163
|
-
|
|
164
|
-
proc.on('error', (err) => {
|
|
165
|
-
if (gzipTicker) clearInterval(gzipTicker);
|
|
166
|
-
reject(err);
|
|
167
|
-
});
|
|
168
|
-
});
|
|
169
|
-
|
|
170
|
-
await runGzip();
|
|
171
|
-
|
|
172
|
-
const stats = await fs.stat(path.join(backupDir, finalFileName));
|
|
173
|
-
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
174
|
-
|
|
175
|
-
console.log(chalk.green(` ✅ Database backup: ${finalFileName} (${sizeKB} KB)`));
|
|
176
|
-
|
|
177
|
-
return { success: true, size: sizeKB, fileName: finalFileName };
|
|
178
|
-
} catch (error) {
|
|
179
|
-
const getT = global.smoonbI18n?.t || t;
|
|
180
|
-
console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.database.error', { message: error.message })}`));
|
|
181
|
-
return { success: false };
|
|
182
|
-
}
|
|
183
|
-
};
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|