smoonb 0.0.47 → 0.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +339 -87
- package/bin/smoonb.js +2 -2
- package/package.json +1 -1
- package/src/commands/backup/index.js +316 -0
- package/src/commands/backup/steps/00-docker-validation.js +24 -0
- package/src/commands/backup/steps/01-database.js +72 -0
- package/src/commands/backup/steps/02-database-separated.js +82 -0
- package/src/commands/backup/steps/03-database-settings.js +178 -0
- package/src/commands/backup/steps/04-auth-settings.js +43 -0
- package/src/commands/backup/steps/05-realtime-settings.js +26 -0
- package/src/commands/backup/steps/06-storage.js +90 -0
- package/src/commands/backup/steps/07-custom-roles.js +39 -0
- package/src/commands/backup/steps/08-edge-functions.js +153 -0
- package/src/commands/backup/steps/09-supabase-temp.js +42 -0
- package/src/commands/backup/steps/10-migrations.js +74 -0
- package/src/commands/backup/utils.js +69 -0
- package/src/commands/check.js +0 -1
- package/src/commands/config.js +0 -1
- package/src/commands/functions.js +1 -1
- package/src/commands/restore/index.js +206 -0
- package/src/commands/restore/steps/00-backup-selection.js +38 -0
- package/src/commands/restore/steps/01-components-selection.js +71 -0
- package/src/commands/restore/steps/02-confirmation.js +14 -0
- package/src/commands/restore/steps/03-database.js +81 -0
- package/src/commands/restore/steps/04-edge-functions.js +112 -0
- package/src/commands/restore/steps/05-auth-settings.js +51 -0
- package/src/commands/restore/steps/06-storage.js +58 -0
- package/src/commands/restore/steps/07-database-settings.js +65 -0
- package/src/commands/restore/steps/08-realtime-settings.js +50 -0
- package/src/commands/restore/utils.js +139 -0
- package/src/index.js +3 -3
- package/src/interactive/envMapper.js +38 -14
- package/src/utils/cli.js +1 -1
- package/src/utils/config.js +1 -3
- package/src/utils/docker.js +3 -3
- package/src/utils/env.js +2 -3
- package/src/utils/envMap.js +1 -1
- package/src/utils/fsExtra.js +98 -0
- package/src/utils/fsx.js +2 -2
- package/src/utils/prompt.js +34 -0
- package/src/utils/realtime-settings.js +2 -2
- package/src/utils/supabase.js +10 -10
- package/src/utils/supabaseLink.js +82 -0
- package/src/utils/validation.js +2 -2
- package/src/commands/backup.js +0 -939
- package/src/commands/restore.js +0 -786
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { writeJson } = require('../../../utils/fsx');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Etapa 4: Backup Auth Settings via Management API
|
|
7
|
+
*/
|
|
8
|
+
module.exports = async ({ projectId, accessToken, backupDir }) => {
|
|
9
|
+
try {
|
|
10
|
+
console.log(chalk.gray(' - Exportando configurações de Auth via Management API...'));
|
|
11
|
+
|
|
12
|
+
// Usar fetch direto para Management API com Personal Access Token
|
|
13
|
+
const authResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/config/auth`, {
|
|
14
|
+
headers: {
|
|
15
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
16
|
+
'Content-Type': 'application/json'
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
if (!authResponse.ok) {
|
|
21
|
+
console.log(chalk.yellow(` ⚠️ Erro ao obter Auth Settings: ${authResponse.status} ${authResponse.statusText}`));
|
|
22
|
+
return { success: false };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const authSettings = await authResponse.json();
|
|
26
|
+
|
|
27
|
+
// Salvar configurações de Auth
|
|
28
|
+
const authSettingsPath = path.join(backupDir, 'auth-settings.json');
|
|
29
|
+
await writeJson(authSettingsPath, {
|
|
30
|
+
project_id: projectId,
|
|
31
|
+
timestamp: new Date().toISOString(),
|
|
32
|
+
settings: authSettings
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
console.log(chalk.green(`✅ Auth Settings exportadas: ${path.basename(authSettingsPath)}`));
|
|
36
|
+
return { success: true };
|
|
37
|
+
|
|
38
|
+
} catch (error) {
|
|
39
|
+
console.log(chalk.yellow(` ⚠️ Erro no backup das Auth Settings: ${error.message}`));
|
|
40
|
+
return { success: false };
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs').promises;
|
|
4
|
+
const { captureRealtimeSettings } = require('../../../utils/realtime-settings');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Etapa 5: Backup Realtime Settings via Captura Interativa
|
|
8
|
+
*/
|
|
9
|
+
module.exports = async ({ projectId, backupDir, options }) => {
|
|
10
|
+
try {
|
|
11
|
+
console.log(chalk.gray(' - Capturando Realtime Settings interativamente...'));
|
|
12
|
+
|
|
13
|
+
const result = await captureRealtimeSettings(projectId, backupDir, options?.skipRealtime);
|
|
14
|
+
|
|
15
|
+
const stats = await fs.stat(path.join(backupDir, 'realtime-settings.json'));
|
|
16
|
+
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
17
|
+
|
|
18
|
+
console.log(chalk.green(` ✅ Realtime Settings capturadas: ${sizeKB} KB`));
|
|
19
|
+
|
|
20
|
+
return { success: true, settings: result };
|
|
21
|
+
} catch (error) {
|
|
22
|
+
console.log(chalk.yellow(` ⚠️ Erro ao capturar Realtime Settings: ${error.message}`));
|
|
23
|
+
return { success: false };
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { ensureDir, writeJson } = require('../../../utils/fsx');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Etapa 6: Backup Storage via Supabase API
|
|
7
|
+
*/
|
|
8
|
+
module.exports = async ({ projectId, accessToken, backupDir }) => {
|
|
9
|
+
try {
|
|
10
|
+
const storageDir = path.join(backupDir, 'storage');
|
|
11
|
+
await ensureDir(storageDir);
|
|
12
|
+
|
|
13
|
+
console.log(chalk.gray(' - Listando buckets de Storage via Management API...'));
|
|
14
|
+
|
|
15
|
+
// Usar fetch direto para Management API com Personal Access Token
|
|
16
|
+
const storageResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets`, {
|
|
17
|
+
headers: {
|
|
18
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
19
|
+
'Content-Type': 'application/json'
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
if (!storageResponse.ok) {
|
|
24
|
+
console.log(chalk.yellow(` ⚠️ Erro ao listar buckets: ${storageResponse.status} ${storageResponse.statusText}`));
|
|
25
|
+
return { success: false, buckets: [] };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const buckets = await storageResponse.json();
|
|
29
|
+
|
|
30
|
+
if (!buckets || buckets.length === 0) {
|
|
31
|
+
console.log(chalk.gray(' - Nenhum bucket encontrado'));
|
|
32
|
+
await writeJson(path.join(storageDir, 'README.md'), {
|
|
33
|
+
message: 'Nenhum bucket de Storage encontrado neste projeto'
|
|
34
|
+
});
|
|
35
|
+
return { success: true, buckets: [] };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
console.log(chalk.gray(` - Encontrados ${buckets.length} buckets`));
|
|
39
|
+
|
|
40
|
+
const processedBuckets = [];
|
|
41
|
+
|
|
42
|
+
for (const bucket of buckets || []) {
|
|
43
|
+
try {
|
|
44
|
+
console.log(chalk.gray(` - Processando bucket: ${bucket.name}`));
|
|
45
|
+
|
|
46
|
+
// Listar objetos do bucket via Management API com Personal Access Token
|
|
47
|
+
const objectsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets/${bucket.name}/objects`, {
|
|
48
|
+
headers: {
|
|
49
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
50
|
+
'Content-Type': 'application/json'
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
let objects = [];
|
|
55
|
+
if (objectsResponse.ok) {
|
|
56
|
+
objects = await objectsResponse.json();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const bucketInfo = {
|
|
60
|
+
id: bucket.id,
|
|
61
|
+
name: bucket.name,
|
|
62
|
+
public: bucket.public,
|
|
63
|
+
file_size_limit: bucket.file_size_limit,
|
|
64
|
+
allowed_mime_types: bucket.allowed_mime_types,
|
|
65
|
+
objects: objects || []
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
// Salvar informações do bucket
|
|
69
|
+
const bucketPath = path.join(storageDir, `${bucket.name}.json`);
|
|
70
|
+
await writeJson(bucketPath, bucketInfo);
|
|
71
|
+
|
|
72
|
+
processedBuckets.push({
|
|
73
|
+
name: bucket.name,
|
|
74
|
+
objectCount: objects?.length || 0
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
console.log(chalk.green(` ✅ Bucket ${bucket.name}: ${objects?.length || 0} objetos`));
|
|
78
|
+
} catch (error) {
|
|
79
|
+
console.log(chalk.yellow(` ⚠️ Erro ao processar bucket ${bucket.name}: ${error.message}`));
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
console.log(chalk.green(`✅ Storage backupado: ${processedBuckets.length} buckets`));
|
|
84
|
+
return { success: true, buckets: processedBuckets };
|
|
85
|
+
} catch (error) {
|
|
86
|
+
console.log(chalk.yellow(`⚠️ Erro no backup do Storage: ${error.message}`));
|
|
87
|
+
return { success: false, buckets: [] };
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs').promises;
|
|
4
|
+
const { promisify } = require('util');
|
|
5
|
+
const { exec } = require('child_process');
|
|
6
|
+
|
|
7
|
+
const execAsync = promisify(exec);
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Etapa 7: Backup Custom Roles via SQL
|
|
11
|
+
*/
|
|
12
|
+
module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
|
|
13
|
+
try {
|
|
14
|
+
console.log(chalk.gray(' - Exportando Custom Roles via Docker...'));
|
|
15
|
+
|
|
16
|
+
const customRolesFile = path.join(backupDir, 'custom-roles.sql');
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
// Usar Supabase CLI via Docker para roles
|
|
20
|
+
await execAsync(`supabase db dump --db-url "${databaseUrl}" --role-only -f "${customRolesFile}"`, {
|
|
21
|
+
env: { ...process.env, SUPABASE_ACCESS_TOKEN: accessToken || '' }
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
const stats = await fs.stat(customRolesFile);
|
|
25
|
+
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
26
|
+
|
|
27
|
+
console.log(chalk.green(` ✅ Custom Roles exportados via Docker: ${sizeKB} KB`));
|
|
28
|
+
|
|
29
|
+
return { success: true, roles: [{ filename: 'custom-roles.sql', sizeKB }] };
|
|
30
|
+
} catch (error) {
|
|
31
|
+
console.log(chalk.yellow(` ⚠️ Erro ao exportar Custom Roles via Docker: ${error.message}`));
|
|
32
|
+
return { success: false, roles: [] };
|
|
33
|
+
}
|
|
34
|
+
} catch (error) {
|
|
35
|
+
console.log(chalk.yellow(` ⚠️ Erro no backup dos Custom Roles: ${error.message}`));
|
|
36
|
+
return { success: false, roles: [] };
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs').promises;
|
|
4
|
+
const { ensureDir, writeJson } = require('../../../utils/fsx');
|
|
5
|
+
const { extractPasswordFromDbUrl, ensureCleanLink } = require('../../../utils/supabaseLink');
|
|
6
|
+
const { cleanDir } = require('../../../utils/fsExtra');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Etapa 8: Backup Edge Functions via Docker (reset link + limpeza opcional)
|
|
10
|
+
*/
|
|
11
|
+
module.exports = async (context) => {
|
|
12
|
+
const { projectId, accessToken, databaseUrl, backupDir } = context;
|
|
13
|
+
try {
|
|
14
|
+
const functionsDir = path.join(backupDir, 'edge-functions');
|
|
15
|
+
await ensureDir(functionsDir);
|
|
16
|
+
|
|
17
|
+
// Reset de link ao projeto de ORIGEM
|
|
18
|
+
const dbPassword = extractPasswordFromDbUrl(databaseUrl);
|
|
19
|
+
await ensureCleanLink(projectId, accessToken, dbPassword);
|
|
20
|
+
|
|
21
|
+
// Limpar pasta supabase/functions antes do backup
|
|
22
|
+
const supabaseFunctionsDir = path.join(process.cwd(), 'supabase', 'functions');
|
|
23
|
+
await cleanDir(supabaseFunctionsDir);
|
|
24
|
+
console.log(chalk.gray(' - Pasta supabase/functions limpa.'));
|
|
25
|
+
|
|
26
|
+
console.log(chalk.gray(' - Listando Edge Functions via Management API...'));
|
|
27
|
+
|
|
28
|
+
// Usar fetch direto para Management API com Personal Access Token
|
|
29
|
+
const functionsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/functions`, {
|
|
30
|
+
headers: {
|
|
31
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
32
|
+
'Content-Type': 'application/json'
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
if (!functionsResponse.ok) {
|
|
37
|
+
console.log(chalk.yellow(` ⚠️ Erro ao listar Edge Functions: ${functionsResponse.status} ${functionsResponse.statusText}`));
|
|
38
|
+
return { success: false, reason: 'api_error', functions: [] };
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const functions = await functionsResponse.json();
|
|
42
|
+
|
|
43
|
+
if (!functions || functions.length === 0) {
|
|
44
|
+
console.log(chalk.gray(' - Nenhuma Edge Function encontrada'));
|
|
45
|
+
await writeJson(path.join(functionsDir, 'README.md'), {
|
|
46
|
+
message: 'Nenhuma Edge Function encontrada neste projeto'
|
|
47
|
+
});
|
|
48
|
+
return { success: true, reason: 'no_functions', functions: [] };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
console.log(chalk.gray(` - Encontradas ${functions.length} Edge Function(s)`));
|
|
52
|
+
|
|
53
|
+
const downloadedFunctions = [];
|
|
54
|
+
let successCount = 0;
|
|
55
|
+
let errorCount = 0;
|
|
56
|
+
|
|
57
|
+
// Baixar cada Edge Function via Supabase CLI
|
|
58
|
+
// Nota: O CLI ignora o cwd e sempre baixa para supabase/functions
|
|
59
|
+
for (const func of functions) {
|
|
60
|
+
try {
|
|
61
|
+
console.log(chalk.gray(` - Baixando: ${func.name}...`));
|
|
62
|
+
|
|
63
|
+
// Criar diretório da função NO BACKUP
|
|
64
|
+
const functionTargetDir = path.join(functionsDir, func.name);
|
|
65
|
+
await ensureDir(functionTargetDir);
|
|
66
|
+
|
|
67
|
+
// Diretório temporário onde o supabase CLI irá baixar (supabase/functions)
|
|
68
|
+
const tempDownloadDir = path.join(process.cwd(), 'supabase', 'functions', func.name);
|
|
69
|
+
|
|
70
|
+
// Baixar Edge Function via Supabase CLI (sempre vai para supabase/functions)
|
|
71
|
+
const { execSync } = require('child_process');
|
|
72
|
+
|
|
73
|
+
execSync(`supabase functions download ${func.name}`, {
|
|
74
|
+
timeout: 60000,
|
|
75
|
+
stdio: 'pipe'
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
// COPIAR arquivos de supabase/functions para o backup
|
|
79
|
+
try {
|
|
80
|
+
const stat = await fs.stat(tempDownloadDir);
|
|
81
|
+
if (stat.isDirectory()) {
|
|
82
|
+
const files = await fs.readdir(tempDownloadDir);
|
|
83
|
+
for (const file of files) {
|
|
84
|
+
const srcPath = path.join(tempDownloadDir, file);
|
|
85
|
+
const dstPath = path.join(functionTargetDir, file);
|
|
86
|
+
|
|
87
|
+
const fileStats = await fs.stat(srcPath);
|
|
88
|
+
if (fileStats.isDirectory()) {
|
|
89
|
+
// Copiar diretórios recursivamente
|
|
90
|
+
await fs.cp(srcPath, dstPath, { recursive: true });
|
|
91
|
+
} else {
|
|
92
|
+
// Copiar arquivos
|
|
93
|
+
await fs.copyFile(srcPath, dstPath);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
} catch {
|
|
98
|
+
// Arquivos não foram baixados, continuar
|
|
99
|
+
console.log(chalk.yellow(` ⚠️ Nenhum arquivo encontrado em ${tempDownloadDir}`));
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// LIMPAR supabase/functions após copiar
|
|
103
|
+
try {
|
|
104
|
+
await fs.rm(tempDownloadDir, { recursive: true, force: true });
|
|
105
|
+
} catch {
|
|
106
|
+
// Ignorar erro de limpeza
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
console.log(chalk.green(` ✅ ${func.name} baixada com sucesso`));
|
|
110
|
+
successCount++;
|
|
111
|
+
|
|
112
|
+
downloadedFunctions.push({
|
|
113
|
+
name: func.name,
|
|
114
|
+
slug: func.name,
|
|
115
|
+
version: func.version || 'unknown',
|
|
116
|
+
files: await fs.readdir(functionTargetDir).catch(() => [])
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
} catch (error) {
|
|
120
|
+
console.log(chalk.yellow(` ⚠️ Erro ao baixar ${func.name}: ${error.message}`));
|
|
121
|
+
errorCount++;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
console.log(chalk.green(`📊 Backup de Edge Functions concluído:`));
|
|
126
|
+
console.log(chalk.green(` ✅ Sucessos: ${successCount}`));
|
|
127
|
+
console.log(chalk.green(` ❌ Erros: ${errorCount}`));
|
|
128
|
+
|
|
129
|
+
// Usar flag de limpeza do contexto (já foi perguntado no início)
|
|
130
|
+
const shouldClean = context?.cleanupFlags?.cleanFunctions || false;
|
|
131
|
+
|
|
132
|
+
if (shouldClean) {
|
|
133
|
+
await cleanDir(supabaseFunctionsDir);
|
|
134
|
+
console.log(chalk.gray(' - supabase/functions limpo.'));
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
success: true,
|
|
139
|
+
reason: 'success',
|
|
140
|
+
functions: downloadedFunctions,
|
|
141
|
+
functions_count: functions.length,
|
|
142
|
+
success_count: successCount,
|
|
143
|
+
error_count: errorCount,
|
|
144
|
+
method: 'docker'
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
} catch (error) {
|
|
148
|
+
console.log(chalk.yellow(` ⚠️ Erro durante backup de Edge Functions: ${error.message}`));
|
|
149
|
+
console.log('⏭️ Continuando com outros componentes...');
|
|
150
|
+
return { success: false, reason: 'download_error', error: error.message, functions: [] };
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { copyDirSafe } = require('../../../utils/fsExtra');
|
|
4
|
+
const { cleanDir } = require('../../../utils/fsExtra');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Etapa 9: Backup Supabase .temp (NOVA ETAPA INDEPENDENTE)
|
|
8
|
+
*/
|
|
9
|
+
module.exports = async (context) => {
|
|
10
|
+
const { backupDir } = context;
|
|
11
|
+
try {
|
|
12
|
+
const tempDir = path.join(process.cwd(), 'supabase', '.temp');
|
|
13
|
+
const backupTempDir = path.join(backupDir, 'supabase-temp');
|
|
14
|
+
|
|
15
|
+
const fileCount = await copyDirSafe(tempDir, backupTempDir);
|
|
16
|
+
|
|
17
|
+
console.log(chalk.gray(` - Copiando supabase/.temp → backups/backup-${path.basename(backupDir)}/supabase-temp (${fileCount} arquivos)...`));
|
|
18
|
+
|
|
19
|
+
if (fileCount === 0) {
|
|
20
|
+
console.log(chalk.gray(' - Nenhum arquivo encontrado em supabase/.temp'));
|
|
21
|
+
} else {
|
|
22
|
+
console.log(chalk.green(` ✅ ${fileCount} arquivo(s) copiado(s)`));
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Usar flag de limpeza do contexto (já foi perguntado no início)
|
|
26
|
+
const shouldClean = context?.cleanupFlags?.cleanTemp || false;
|
|
27
|
+
|
|
28
|
+
if (shouldClean) {
|
|
29
|
+
await cleanDir(tempDir);
|
|
30
|
+
console.log(chalk.gray(' - supabase/.temp apagado.'));
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return {
|
|
34
|
+
success: true,
|
|
35
|
+
file_count: fileCount
|
|
36
|
+
};
|
|
37
|
+
} catch (error) {
|
|
38
|
+
console.log(chalk.yellow(` ⚠️ Erro no backup do supabase/.temp: ${error.message}`));
|
|
39
|
+
return { success: false };
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { execSync } = require('child_process');
|
|
4
|
+
const { extractPasswordFromDbUrl, ensureCleanLink } = require('../../../utils/supabaseLink');
|
|
5
|
+
const { cleanDir, countFiles, copyDirSafe } = require('../../../utils/fsExtra');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Etapa 10: Backup Migrations (NOVA ETAPA INDEPENDENTE)
|
|
9
|
+
*/
|
|
10
|
+
module.exports = async (context) => {
|
|
11
|
+
const { projectId, accessToken, databaseUrl, backupDir } = context;
|
|
12
|
+
try {
|
|
13
|
+
// Reset de link ao projeto de ORIGEM
|
|
14
|
+
const dbPassword = extractPasswordFromDbUrl(databaseUrl);
|
|
15
|
+
await ensureCleanLink(projectId, accessToken, dbPassword);
|
|
16
|
+
|
|
17
|
+
// Limpar migrations local (opcional, mas recomendado para garantir servidor como fonte da verdade)
|
|
18
|
+
const migrationsDir = path.join(process.cwd(), 'supabase', 'migrations');
|
|
19
|
+
await cleanDir(migrationsDir);
|
|
20
|
+
console.log(chalk.gray(' - Limpando supabase/migrations...'));
|
|
21
|
+
|
|
22
|
+
// Baixar todas as migrations do servidor usando migration fetch
|
|
23
|
+
console.log(chalk.gray(' - Baixando todas as migrations do servidor usando migration fetch...'));
|
|
24
|
+
|
|
25
|
+
const env = {
|
|
26
|
+
...process.env,
|
|
27
|
+
SUPABASE_ACCESS_TOKEN: accessToken
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
execSync('supabase migration fetch', {
|
|
32
|
+
cwd: process.cwd(),
|
|
33
|
+
stdio: 'pipe',
|
|
34
|
+
encoding: 'utf8',
|
|
35
|
+
timeout: 120000,
|
|
36
|
+
env
|
|
37
|
+
});
|
|
38
|
+
} catch (error) {
|
|
39
|
+
console.log(chalk.yellow(` ⚠️ Erro ao executar migration fetch: ${error.message}`));
|
|
40
|
+
console.log(chalk.yellow(' 💡 Verifique se o projeto está linkado corretamente e se o token está válido.'));
|
|
41
|
+
return { success: false };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Contar arquivos baixados
|
|
45
|
+
const fileCount = await countFiles(migrationsDir);
|
|
46
|
+
console.log(chalk.gray(` - Arquivos baixados: ${fileCount} migrations`));
|
|
47
|
+
|
|
48
|
+
// Copiar migrations para o backup
|
|
49
|
+
const backupMigrationsDir = path.join(backupDir, 'migrations');
|
|
50
|
+
const copiedCount = await copyDirSafe(migrationsDir, backupMigrationsDir);
|
|
51
|
+
console.log(chalk.gray(` - Copiando supabase/migrations → backups/backup-${path.basename(backupDir)}/migrations (${copiedCount} arquivos)...`));
|
|
52
|
+
|
|
53
|
+
if (copiedCount > 0) {
|
|
54
|
+
console.log(chalk.green(` ✅ ${copiedCount} migration(s) copiada(s)`));
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Usar flag de limpeza do contexto (já foi perguntado no início)
|
|
58
|
+
const shouldClean = context?.cleanupFlags?.cleanMigrations || false;
|
|
59
|
+
|
|
60
|
+
if (shouldClean) {
|
|
61
|
+
await cleanDir(migrationsDir);
|
|
62
|
+
console.log(chalk.gray(' - supabase/migrations apagado.'));
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
success: true,
|
|
67
|
+
file_count: copiedCount
|
|
68
|
+
};
|
|
69
|
+
} catch (error) {
|
|
70
|
+
console.log(chalk.yellow(` ⚠️ Erro no backup das migrations: ${error.message}`));
|
|
71
|
+
return { success: false };
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
const chalk = require('chalk');
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Função para mostrar mensagens educativas e encerrar elegantemente
|
|
5
|
+
*/
|
|
6
|
+
function showDockerMessagesAndExit(reason) {
|
|
7
|
+
console.log('');
|
|
8
|
+
|
|
9
|
+
switch (reason) {
|
|
10
|
+
case 'docker_not_installed':
|
|
11
|
+
console.log(chalk.red('❌ DOCKER DESKTOP NÃO ENCONTRADO'));
|
|
12
|
+
console.log('');
|
|
13
|
+
console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
|
|
14
|
+
console.log(chalk.yellow(' 1. Instalar Docker Desktop'));
|
|
15
|
+
console.log(chalk.yellow(' 2. Executar Docker Desktop'));
|
|
16
|
+
console.log(chalk.yellow(' 3. Repetir o comando de backup'));
|
|
17
|
+
console.log('');
|
|
18
|
+
console.log(chalk.blue('🔗 Download: https://docs.docker.com/desktop/install/'));
|
|
19
|
+
console.log('');
|
|
20
|
+
console.log(chalk.gray('💡 O Docker Desktop é obrigatório para backup completo do Supabase'));
|
|
21
|
+
console.log(chalk.gray(' - Database PostgreSQL'));
|
|
22
|
+
console.log(chalk.gray(' - Edge Functions'));
|
|
23
|
+
console.log(chalk.gray(' - Todos os componentes via Supabase CLI'));
|
|
24
|
+
break;
|
|
25
|
+
|
|
26
|
+
case 'docker_not_running':
|
|
27
|
+
console.log(chalk.red('❌ DOCKER DESKTOP NÃO ESTÁ EXECUTANDO'));
|
|
28
|
+
console.log('');
|
|
29
|
+
console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
|
|
30
|
+
console.log(chalk.yellow(' 1. Abrir Docker Desktop'));
|
|
31
|
+
console.log(chalk.yellow(' 2. Aguardar inicialização completa'));
|
|
32
|
+
console.log(chalk.yellow(' 3. Repetir o comando de backup'));
|
|
33
|
+
console.log('');
|
|
34
|
+
console.log(chalk.blue('💡 Dica: Docker Desktop deve estar rodando em segundo plano'));
|
|
35
|
+
console.log('');
|
|
36
|
+
console.log(chalk.gray('💡 O Docker Desktop é obrigatório para backup completo do Supabase'));
|
|
37
|
+
console.log(chalk.gray(' - Database PostgreSQL'));
|
|
38
|
+
console.log(chalk.gray(' - Edge Functions'));
|
|
39
|
+
console.log(chalk.gray(' - Todos os componentes via Supabase CLI'));
|
|
40
|
+
break;
|
|
41
|
+
|
|
42
|
+
case 'supabase_cli_not_found':
|
|
43
|
+
console.log(chalk.red('❌ SUPABASE CLI NÃO ENCONTRADO'));
|
|
44
|
+
console.log('');
|
|
45
|
+
console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
|
|
46
|
+
console.log(chalk.yellow(' 1. Instalar Supabase CLI'));
|
|
47
|
+
console.log(chalk.yellow(' 2. Repetir o comando de backup'));
|
|
48
|
+
console.log('');
|
|
49
|
+
console.log(chalk.blue('🔗 Instalação: npm install -g supabase'));
|
|
50
|
+
console.log('');
|
|
51
|
+
console.log(chalk.gray('💡 O Supabase CLI é obrigatório para backup completo do Supabase'));
|
|
52
|
+
console.log(chalk.gray(' - Database PostgreSQL'));
|
|
53
|
+
console.log(chalk.gray(' - Edge Functions'));
|
|
54
|
+
console.log(chalk.gray(' - Todos os componentes via Docker'));
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
console.log('');
|
|
59
|
+
console.log(chalk.red('🚫 Backup cancelado - Pré-requisitos não atendidos'));
|
|
60
|
+
console.log(chalk.gray(' Instale os componentes necessários e tente novamente'));
|
|
61
|
+
console.log('');
|
|
62
|
+
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
module.exports = {
|
|
67
|
+
showDockerMessagesAndExit
|
|
68
|
+
};
|
|
69
|
+
|
package/src/commands/check.js
CHANGED
package/src/commands/config.js
CHANGED
|
@@ -4,7 +4,7 @@ const { readConfig, validateFor } = require('../utils/config');
|
|
|
4
4
|
const { showBetaBanner } = require('../utils/banner');
|
|
5
5
|
|
|
6
6
|
// Exportar FUNÇÃO em vez de objeto Command
|
|
7
|
-
module.exports = async (
|
|
7
|
+
module.exports = async (_options) => {
|
|
8
8
|
showBetaBanner();
|
|
9
9
|
|
|
10
10
|
try {
|