smoonb 0.0.19 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -25,25 +25,42 @@ O **smoonb** resolve o problema das ferramentas existentes que fazem backup apen
25
25
 
26
26
  ## 🚀 Instalação
27
27
 
28
+ **⚠️ IMPORTANTE: Instale APENAS localmente no projeto!**
29
+
28
30
  ```bash
29
- # Instalar localmente no projeto
31
+ # ✅ CORRETO - Instalar localmente no projeto
30
32
  npm install smoonb
31
33
 
32
- # Usar com npx
34
+ # ✅ CORRETO - Usar com npx
33
35
  npx smoonb --help
36
+
37
+ # ❌ ERRADO - NÃO instalar globalmente
38
+ npm install -g smoonb # ← Isso será bloqueado!
34
39
  ```
35
40
 
41
+ **💡 Por que apenas local?**
42
+ - **🔒 Segurança**: Evita conflitos de versão
43
+ - **📦 Isolamento**: Cada projeto usa sua versão
44
+ - **🔄 Atualizações**: Controle granular por projeto
45
+ - **🛡️ Estabilidade**: Evita quebras em outros projetos
46
+
36
47
  ## 📋 Pré-requisitos
37
48
 
38
- ### 1. Supabase CLI
49
+ ### 1. Docker Desktop
39
50
  ```bash
40
- npm install -g supabase
51
+ # Instalar Docker Desktop
52
+ # Windows/macOS: https://docs.docker.com/desktop/install/
53
+ # Linux: https://docs.docker.com/engine/install/
54
+
55
+ # Verificar se está rodando
56
+ docker --version
57
+ docker ps
41
58
  ```
42
59
 
43
- ### 2. PostgreSQL (psql)
44
- - **Windows**: https://www.postgresql.org/download/windows/
45
- - **macOS**: `brew install postgresql`
46
- - **Linux**: `sudo apt-get install postgresql-client`
60
+ ### 2. Supabase CLI
61
+ ```bash
62
+ npm install -g supabase
63
+ ```
47
64
 
48
65
  ## ⚙️ Configuração
49
66
 
package/package.json CHANGED
@@ -1,14 +1,18 @@
1
1
  {
2
2
  "name": "smoonb",
3
- "version": "0.0.19",
3
+ "version": "0.0.23",
4
4
  "description": "Complete Supabase backup and migration tool - EXPERIMENTAL VERSION - USE AT YOUR OWN RISK",
5
+ "preferGlobal": false,
6
+ "preventGlobalInstall": true,
5
7
  "main": "index.js",
6
8
  "bin": {
7
9
  "smoonb": "bin/smoonb.js"
8
10
  },
9
11
  "scripts": {
10
12
  "test": "echo \"Error: no test specified\" && exit 1",
11
- "start": "node bin/smoonb.js"
13
+ "start": "node bin/smoonb.js",
14
+ "preinstall": "node -e \"if(process.env.npm_config_global) { console.error('\\n❌ SMOONB NÃO DEVE SER INSTALADO GLOBALMENTE!\\n\\n📋 Para usar o smoonb, instale localmente no seu projeto:\\n npm install smoonb\\n\\n💡 Depois execute com:\\n npx smoonb backup\\n\\n🚫 Instalação global cancelada!\\n'); process.exit(1); }\"",
15
+ "postinstall": "echo '\\n✅ smoonb instalado com sucesso!\\n💡 Execute: npx smoonb backup\\n📖 Documentação: https://github.com/almmello/smoonb\\n'"
12
16
  },
13
17
  "keywords": [
14
18
  "supabase",
@@ -1,126 +1,73 @@
1
1
  const chalk = require('chalk');
2
2
  const path = require('path');
3
3
  const fs = require('fs');
4
- const { ensureBin, runCommand } = require('../utils/cli');
4
+ const { exec } = require('child_process');
5
+ const { promisify } = require('util');
5
6
  const { ensureDir, writeJson, copyDir } = require('../utils/fsx');
6
7
  const { sha256 } = require('../utils/hash');
7
8
  const { readConfig, validateFor } = require('../utils/config');
8
9
  const { showBetaBanner } = require('../utils/banner');
9
- const { detectDockerDependencies } = require('../utils/docker');
10
- const { createClient } = require('@supabase/supabase-js');
10
+ const { canPerformCompleteBackup, getDockerVersion } = require('../utils/docker');
11
+
12
+ const execAsync = promisify(exec);
11
13
 
12
14
  // Exportar FUNÇÃO em vez de objeto Command
13
15
  module.exports = async (options) => {
14
16
  showBetaBanner();
15
17
 
16
18
  try {
17
- // Verificar se pg_dump está disponível
18
- const pgDumpPath = await findPgDumpPath();
19
- if (!pgDumpPath) {
20
- console.error(chalk.red('❌ pg_dump não encontrado'));
21
- console.log(chalk.yellow('💡 Instale PostgreSQL:'));
22
- console.log(chalk.yellow(' https://www.postgresql.org/download/'));
23
- process.exit(1);
24
- }
25
-
26
19
  // Carregar e validar configuração
27
20
  const config = await readConfig();
28
21
  validateFor(config, 'backup');
29
22
 
30
- const databaseUrl = config.supabase.databaseUrl;
31
- if (!databaseUrl) {
32
- console.error(chalk.red('❌ databaseUrl não configurada'));
33
- console.log(chalk.yellow('💡 Configure databaseUrl no .smoonbrc'));
23
+ // Validação adicional para pré-requisitos obrigatórios
24
+ if (!config.supabase.databaseUrl) {
25
+ console.log(chalk.red('❌ DATABASE_URL NÃO CONFIGURADA'));
26
+ console.log('');
27
+ console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
28
+ console.log(chalk.yellow(' 1. Configurar databaseUrl no .smoonbrc'));
29
+ console.log(chalk.yellow(' 2. Repetir o comando de backup'));
30
+ console.log('');
31
+ console.log(chalk.blue('💡 Exemplo de configuração:'));
32
+ console.log(chalk.gray(' "databaseUrl": "postgresql://postgres:[senha]@db.[projeto].supabase.co:5432/postgres"'));
33
+ console.log('');
34
+ console.log(chalk.red('🚫 Backup cancelado - Configuração incompleta'));
34
35
  process.exit(1);
35
36
  }
36
37
 
37
- // Resolver diretório de saída
38
- const outputDir = options.output || config.backup.outputDir;
39
-
40
- // Criar diretório de backup com timestamp humanizado
41
- const now = new Date();
42
- const year = now.getFullYear();
43
- const month = String(now.getMonth() + 1).padStart(2, '0');
44
- const day = String(now.getDate()).padStart(2, '0');
45
- const hour = String(now.getHours()).padStart(2, '0');
46
- const minute = String(now.getMinutes()).padStart(2, '0');
47
- const second = String(now.getSeconds()).padStart(2, '0');
48
-
49
- const backupDir = path.join(outputDir, `backup-${year}-${month}-${day}-${hour}-${minute}-${second}`);
50
- await ensureDir(backupDir);
51
-
52
- console.log(chalk.blue(`🚀 Iniciando backup COMPLETO do projeto: ${config.supabase.projectId}`));
53
- console.log(chalk.blue(`📁 Diretório: ${backupDir}`));
54
- console.log(chalk.gray(`🔧 Usando pg_dump: ${pgDumpPath}`));
55
-
56
- // 1. Backup da Database PostgreSQL (básico)
57
- console.log(chalk.blue('\n📊 1/6 - Backup da Database PostgreSQL...'));
58
- const dbBackupResult = await backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath);
59
-
60
- if (!dbBackupResult.success) {
61
- console.error(chalk.red('❌ Falha crítica no backup da database'));
62
- console.log(chalk.yellow('💡 Verifique:'));
63
- console.log(chalk.yellow(' - Se DATABASE_URL está correta'));
64
- console.log(chalk.yellow(' - Se as credenciais estão corretas'));
65
- console.log(chalk.yellow(' - Se o banco está acessível'));
38
+ if (!config.supabase.accessToken) {
39
+ console.log(chalk.red('❌ ACCESS_TOKEN NÃO CONFIGURADO'));
40
+ console.log('');
41
+ console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
42
+ console.log(chalk.yellow(' 1. Obter Personal Access Token do Supabase'));
43
+ console.log(chalk.yellow(' 2. Configurar accessToken no .smoonbrc'));
44
+ console.log(chalk.yellow(' 3. Repetir o comando de backup'));
45
+ console.log('');
46
+ console.log(chalk.blue('🔗 Como obter o token:'));
47
+ console.log(chalk.gray(' 1. Acesse: https://supabase.com/dashboard/account/tokens'));
48
+ console.log(chalk.gray(' 2. Clique: "Generate new token"'));
49
+ console.log(chalk.gray(' 3. Copie o token (formato: sbp_...)'));
50
+ console.log('');
51
+ console.log(chalk.red('🚫 Backup cancelado - Token não configurado'));
66
52
  process.exit(1);
67
53
  }
68
54
 
69
- // 2. Backup das Edge Functions via Supabase API
70
- console.log(chalk.blue('\n⚡ 2/6 - Backup das Edge Functions via API...'));
71
- const edgeFunctionsResult = await backupEdgeFunctions(config, backupDir);
72
-
73
- // 3. Backup das Auth Settings via Management API
74
- console.log(chalk.blue('\n🔐 3/6 - Backup das Auth Settings via API...'));
75
- const authSettingsResult = await backupAuthSettings(config, backupDir);
76
-
77
- // 4. Backup do Storage via Supabase API
78
- console.log(chalk.blue('\n📦 4/6 - Backup do Storage via API...'));
79
- const storageResult = await backupStorage(config, backupDir);
80
-
81
- // 5. Backup dos Custom Roles via SQL
82
- console.log(chalk.blue('\n👥 5/6 - Backup dos Custom Roles via SQL...'));
83
- const customRolesResult = await backupCustomRoles(databaseUrl, backupDir);
84
-
85
- // 6. Backup das Realtime Settings via SQL
86
- console.log(chalk.blue('\n🔄 6/6 - Backup das Realtime Settings via SQL...'));
87
- const realtimeResult = await backupRealtimeSettings(databaseUrl, backupDir);
88
-
89
- // Gerar manifesto do backup completo
90
- await generateCompleteBackupManifest(config, backupDir, {
91
- database: dbBackupResult,
92
- edgeFunctions: edgeFunctionsResult,
93
- authSettings: authSettingsResult,
94
- storage: storageResult,
95
- customRoles: customRolesResult,
96
- realtime: realtimeResult
97
- });
55
+ console.log(chalk.blue(`🚀 Iniciando backup do projeto: ${config.supabase.projectId}`));
56
+ console.log(chalk.gray(`🔍 Verificando dependências Docker...`));
98
57
 
99
- console.log(chalk.green('\n🎉 BACKUP COMPLETO FINALIZADO!'));
100
- console.log(chalk.blue(`📁 Localização: ${backupDir}`));
101
- console.log(chalk.green(`✅ Database: ${dbBackupResult.files.length} arquivos SQL gerados`));
102
- if (edgeFunctionsResult.success) {
103
- console.log(chalk.green(`✅ Edge Functions: ${edgeFunctionsResult.successCount}/${edgeFunctionsResult.functionsCount} functions baixadas`));
58
+ // Verificar se é possível fazer backup completo via Docker
59
+ const backupCapability = await canPerformCompleteBackup();
60
+
61
+ if (backupCapability.canBackupComplete) {
62
+ console.log(chalk.green('✅ Docker Desktop detectado e funcionando'));
63
+ console.log(chalk.gray(`🐳 Versão: ${backupCapability.dockerStatus.version}`));
64
+ console.log('');
65
+
66
+ // Proceder com backup completo via Docker
67
+ return await performFullBackup(config, options);
104
68
  } else {
105
- console.log(chalk.yellow(`⚠️ Edge Functions: ${edgeFunctionsResult.reason === 'docker_not_installed' ? 'Docker não instalado' :
106
- edgeFunctionsResult.reason === 'docker_not_running' ? 'Docker não está rodando' :
107
- edgeFunctionsResult.reason === 'supabase_cli_not_found' ? 'Supabase CLI não encontrado' :
108
- 'Erro no backup'}`));
109
- }
110
- console.log(chalk.green(`✅ Auth Settings: ${authSettingsResult.success ? 'Exportadas' : 'Falharam'}`));
111
- console.log(chalk.green(`✅ Storage: ${storageResult.buckets.length} buckets verificados`));
112
- console.log(chalk.green(`✅ Custom Roles: ${customRolesResult.roles.length} roles exportados`));
113
- console.log(chalk.green(`✅ Realtime: ${realtimeResult.success ? 'Configurações exportadas' : 'Falharam'}`));
114
-
115
- // Mostrar resumo dos arquivos
116
- console.log(chalk.blue('\n📊 Resumo dos arquivos gerados:'));
117
- for (const file of dbBackupResult.files) {
118
- console.log(chalk.gray(` - ${file.filename}: ${file.sizeKB} KB`));
119
- }
120
- if (edgeFunctionsResult.success && edgeFunctionsResult.functions.length > 0) {
121
- console.log(chalk.gray(` - Edge Functions: ${edgeFunctionsResult.successCount}/${edgeFunctionsResult.functionsCount} functions`));
122
- } else if (!edgeFunctionsResult.success) {
123
- console.log(chalk.gray(` - Edge Functions: Pulado (${edgeFunctionsResult.reason})`));
69
+ // Mostrar mensagens educativas e encerrar elegantemente
70
+ showDockerMessagesAndExit(backupCapability.reason);
124
71
  }
125
72
 
126
73
  } catch (error) {
@@ -129,61 +76,161 @@ module.exports = async (options) => {
129
76
  }
130
77
  };
131
78
 
132
- // Encontrar caminho do pg_dump automaticamente
133
- async function findPgDumpPath() {
134
- // Primeiro, tentar encontrar no PATH
135
- const pgDumpPath = await ensureBin('pg_dump');
136
- if (pgDumpPath) {
137
- return pgDumpPath;
138
- }
79
+ // Função para backup completo via Docker
80
+ async function performFullBackup(config, options) {
81
+ // Resolver diretório de saída
82
+ const outputDir = options.output || config.backup.outputDir;
83
+
84
+ // Criar diretório de backup com timestamp humanizado
85
+ const now = new Date();
86
+ const year = now.getFullYear();
87
+ const month = String(now.getMonth() + 1).padStart(2, '0');
88
+ const day = String(now.getDate()).padStart(2, '0');
89
+ const hour = String(now.getHours()).padStart(2, '0');
90
+ const minute = String(now.getMinutes()).padStart(2, '0');
91
+ const second = String(now.getSeconds()).padStart(2, '0');
92
+
93
+ const backupDir = path.join(outputDir, `backup-${year}-${month}-${day}-${hour}-${minute}-${second}`);
94
+ await ensureDir(backupDir);
139
95
 
140
- // No Windows, tentar caminhos comuns
141
- if (process.platform === 'win32') {
142
- const possiblePaths = [
143
- 'C:\\Program Files\\PostgreSQL\\17\\bin\\pg_dump.exe',
144
- 'C:\\Program Files\\PostgreSQL\\16\\bin\\pg_dump.exe',
145
- 'C:\\Program Files\\PostgreSQL\\15\\bin\\pg_dump.exe',
146
- 'C:\\Program Files\\PostgreSQL\\14\\bin\\pg_dump.exe',
147
- 'C:\\Program Files\\PostgreSQL\\13\\bin\\pg_dump.exe'
148
- ];
149
-
150
- for (const pgDumpPath of possiblePaths) {
151
- if (fs.existsSync(pgDumpPath)) {
152
- return pgDumpPath;
153
- }
154
- }
96
+ console.log(chalk.blue(`📁 Diretório: ${backupDir}`));
97
+ console.log(chalk.gray(`🐳 Backup via Docker Desktop`));
98
+
99
+ const manifest = {
100
+ created_at: new Date().toISOString(),
101
+ project_id: config.supabase.projectId,
102
+ smoonb_version: require('../../package.json').version,
103
+ backup_type: 'complete_docker',
104
+ docker_version: await getDockerVersion(),
105
+ components: {}
106
+ };
107
+
108
+ // 1. Backup Database via Docker
109
+ console.log(chalk.blue('\n📊 1/6 - Backup da Database PostgreSQL via Docker...'));
110
+ const dbResult = await backupDatabaseWithDocker(config.supabase.databaseUrl, backupDir);
111
+ manifest.components.database = {
112
+ success: dbResult.success,
113
+ method: 'docker',
114
+ files: dbResult.files?.length || 0,
115
+ total_size_kb: dbResult.totalSizeKB || '0.0'
116
+ };
117
+
118
+ // 2. Backup Edge Functions via Docker
119
+ console.log(chalk.blue('\n⚡ 2/6 - Backup das Edge Functions via Docker...'));
120
+ const functionsResult = await backupEdgeFunctionsWithDocker(config.supabase.projectId, config.supabase.accessToken, backupDir);
121
+ manifest.components.edge_functions = functionsResult;
122
+
123
+ // 3. Backup Auth Settings via API
124
+ console.log(chalk.blue('\n🔐 3/6 - Backup das Auth Settings via API...'));
125
+ const authResult = await backupAuthSettings(config.supabase.projectId, config.supabase.accessToken, backupDir);
126
+ manifest.components.auth_settings = authResult;
127
+
128
+ // 4. Backup Storage via API
129
+ console.log(chalk.blue('\n📦 4/6 - Backup do Storage via API...'));
130
+ const storageResult = await backupStorage(config.supabase.projectId, config.supabase.accessToken, backupDir);
131
+ manifest.components.storage = storageResult;
132
+
133
+ // 5. Backup Custom Roles via SQL
134
+ console.log(chalk.blue('\n👥 5/6 - Backup dos Custom Roles via SQL...'));
135
+ const rolesResult = await backupCustomRoles(config.supabase.databaseUrl, backupDir);
136
+ manifest.components.custom_roles = rolesResult;
137
+
138
+ // 6. Backup Realtime Settings via SQL
139
+ console.log(chalk.blue('\n🔄 6/6 - Backup das Realtime Settings via SQL...'));
140
+ const realtimeResult = await backupRealtimeSettings(config.supabase.databaseUrl, backupDir);
141
+ manifest.components.realtime = realtimeResult;
142
+
143
+ // Salvar manifest
144
+ await writeJson(path.join(backupDir, 'backup-manifest.json'), manifest);
145
+
146
+ console.log(chalk.green('\n🎉 BACKUP COMPLETO FINALIZADO VIA DOCKER!'));
147
+ console.log(chalk.blue(`📁 Localização: ${backupDir}`));
148
+ console.log(chalk.green(`🐳 Database: ${dbResult.files?.length || 0} arquivos SQL gerados via Docker`));
149
+ console.log(chalk.green(`⚡ Edge Functions: ${functionsResult.success_count || 0}/${functionsResult.functions_count || 0} functions baixadas via Docker`));
150
+ console.log(chalk.green(`🔐 Auth Settings: ${authResult.success ? 'Exportadas via API' : 'Falharam'}`));
151
+ console.log(chalk.green(`📦 Storage: ${storageResult.buckets?.length || 0} buckets verificados via API`));
152
+ console.log(chalk.green(`👥 Custom Roles: ${rolesResult.roles?.length || 0} roles exportados via SQL`));
153
+ console.log(chalk.green(`🔄 Realtime: ${realtimeResult.success ? 'Configurações exportadas via SQL' : 'Falharam'}`));
154
+
155
+ return { success: true, backupDir, manifest };
156
+ }
157
+
158
+ // Função para mostrar mensagens educativas e encerrar elegantemente
159
+ function showDockerMessagesAndExit(reason) {
160
+ console.log('');
161
+
162
+ switch (reason) {
163
+ case 'docker_not_installed':
164
+ console.log(chalk.red('❌ DOCKER DESKTOP NÃO ENCONTRADO'));
165
+ console.log('');
166
+ console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
167
+ console.log(chalk.yellow(' 1. Instalar Docker Desktop'));
168
+ console.log(chalk.yellow(' 2. Executar Docker Desktop'));
169
+ console.log(chalk.yellow(' 3. Repetir o comando de backup'));
170
+ console.log('');
171
+ console.log(chalk.blue('🔗 Download: https://docs.docker.com/desktop/install/'));
172
+ console.log('');
173
+ console.log(chalk.gray('💡 O Docker Desktop é obrigatório para backup completo do Supabase'));
174
+ console.log(chalk.gray(' - Database PostgreSQL'));
175
+ console.log(chalk.gray(' - Edge Functions'));
176
+ console.log(chalk.gray(' - Todos os componentes via Supabase CLI'));
177
+ break;
178
+
179
+ case 'docker_not_running':
180
+ console.log(chalk.red('❌ DOCKER DESKTOP NÃO ESTÁ EXECUTANDO'));
181
+ console.log('');
182
+ console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
183
+ console.log(chalk.yellow(' 1. Abrir Docker Desktop'));
184
+ console.log(chalk.yellow(' 2. Aguardar inicialização completa'));
185
+ console.log(chalk.yellow(' 3. Repetir o comando de backup'));
186
+ console.log('');
187
+ console.log(chalk.blue('💡 Dica: Docker Desktop deve estar rodando em segundo plano'));
188
+ console.log('');
189
+ console.log(chalk.gray('💡 O Docker Desktop é obrigatório para backup completo do Supabase'));
190
+ console.log(chalk.gray(' - Database PostgreSQL'));
191
+ console.log(chalk.gray(' - Edge Functions'));
192
+ console.log(chalk.gray(' - Todos os componentes via Supabase CLI'));
193
+ break;
194
+
195
+ case 'supabase_cli_not_found':
196
+ console.log(chalk.red('❌ SUPABASE CLI NÃO ENCONTRADO'));
197
+ console.log('');
198
+ console.log(chalk.yellow('📋 Para fazer backup completo do Supabase, você precisa:'));
199
+ console.log(chalk.yellow(' 1. Instalar Supabase CLI'));
200
+ console.log(chalk.yellow(' 2. Repetir o comando de backup'));
201
+ console.log('');
202
+ console.log(chalk.blue('🔗 Instalação: npm install -g supabase'));
203
+ console.log('');
204
+ console.log(chalk.gray('💡 O Supabase CLI é obrigatório para backup completo do Supabase'));
205
+ console.log(chalk.gray(' - Database PostgreSQL'));
206
+ console.log(chalk.gray(' - Edge Functions'));
207
+ console.log(chalk.gray(' - Todos os componentes via Docker'));
208
+ break;
155
209
  }
156
210
 
157
- return null;
211
+ console.log('');
212
+ console.log(chalk.red('🚫 Backup cancelado - Pré-requisitos não atendidos'));
213
+ console.log(chalk.gray(' Instale os componentes necessários e tente novamente'));
214
+ console.log('');
215
+
216
+ process.exit(1);
158
217
  }
159
218
 
160
- // Backup da database usando pg_dump/pg_dumpall
161
- async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
219
+ // Backup da database usando Docker
220
+ async function backupDatabaseWithDocker(databaseUrl, backupDir) {
162
221
  try {
163
- // Parse da URL da database
164
- const url = new URL(databaseUrl);
165
- const host = url.hostname;
166
- const port = url.port || '5432';
167
- const username = url.username;
168
- const password = url.password;
169
- const database = url.pathname.slice(1);
170
-
171
- console.log(chalk.gray(` - Host: ${host}:${port}`));
172
- console.log(chalk.gray(` - Database: ${database}`));
173
- console.log(chalk.gray(` - Username: ${username}`));
174
-
222
+ console.log(chalk.gray('🐳 Iniciando backup de database via Docker...'));
223
+
175
224
  const files = [];
176
225
  let success = true;
226
+ let totalSizeKB = 0;
177
227
 
178
- // 1. Backup do schema usando pg_dump
179
- console.log(chalk.blue(' - Exportando schema...'));
228
+ // 1. Backup do Schema
229
+ console.log(chalk.gray(' - Exportando schema...'));
180
230
  const schemaFile = path.join(backupDir, 'schema.sql');
181
- const schemaCommand = `"${pgDumpPath}" "${databaseUrl}" --schema-only -f "${schemaFile}"`;
182
231
 
183
232
  try {
184
- await runCommand(schemaCommand, {
185
- env: { ...process.env, PGPASSWORD: password }
186
- });
233
+ await execAsync(`supabase db dump --db-url "${databaseUrl}" -f "${schemaFile}"`);
187
234
 
188
235
  const schemaValidation = await validateSqlFile(schemaFile);
189
236
  if (schemaValidation.valid) {
@@ -192,6 +239,7 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
192
239
  size: schemaValidation.size,
193
240
  sizeKB: schemaValidation.sizeKB
194
241
  });
242
+ totalSizeKB += parseFloat(schemaValidation.sizeKB);
195
243
  console.log(chalk.green(` ✅ Schema exportado: ${schemaValidation.sizeKB} KB`));
196
244
  } else {
197
245
  console.log(chalk.red(` ❌ Arquivo schema.sql inválido: ${schemaValidation.error}`));
@@ -202,15 +250,12 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
202
250
  success = false;
203
251
  }
204
252
 
205
- // 2. Backup dos dados usando pg_dump
206
- console.log(chalk.blue(' - Exportando dados...'));
253
+ // 2. Backup dos Dados
254
+ console.log(chalk.gray(' - Exportando dados...'));
207
255
  const dataFile = path.join(backupDir, 'data.sql');
208
- const dataCommand = `"${pgDumpPath}" "${databaseUrl}" --data-only -f "${dataFile}"`;
209
256
 
210
257
  try {
211
- await runCommand(dataCommand, {
212
- env: { ...process.env, PGPASSWORD: password }
213
- });
258
+ await execAsync(`supabase db dump --db-url "${databaseUrl}" --data-only -f "${dataFile}"`);
214
259
 
215
260
  const dataValidation = await validateSqlFile(dataFile);
216
261
  if (dataValidation.valid) {
@@ -219,6 +264,7 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
219
264
  size: dataValidation.size,
220
265
  sizeKB: dataValidation.sizeKB
221
266
  });
267
+ totalSizeKB += parseFloat(dataValidation.sizeKB);
222
268
  console.log(chalk.green(` ✅ Dados exportados: ${dataValidation.sizeKB} KB`));
223
269
  } else {
224
270
  console.log(chalk.red(` ❌ Arquivo data.sql inválido: ${dataValidation.error}`));
@@ -229,16 +275,12 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
229
275
  success = false;
230
276
  }
231
277
 
232
- // 3. Backup dos roles usando pg_dumpall
233
- console.log(chalk.blue(' - Exportando roles...'));
278
+ // 3. Backup dos Roles
279
+ console.log(chalk.gray(' - Exportando roles...'));
234
280
  const rolesFile = path.join(backupDir, 'roles.sql');
235
- const pgDumpallPath = pgDumpPath.replace('pg_dump', 'pg_dumpall');
236
- const rolesCommand = `"${pgDumpallPath}" --host=${host} --port=${port} --username=${username} --roles-only -f "${rolesFile}"`;
237
281
 
238
282
  try {
239
- await runCommand(rolesCommand, {
240
- env: { ...process.env, PGPASSWORD: password }
241
- });
283
+ await execAsync(`supabase db dump --db-url "${databaseUrl}" --role-only -f "${rolesFile}"`);
242
284
 
243
285
  const rolesValidation = await validateSqlFile(rolesFile);
244
286
  if (rolesValidation.valid) {
@@ -247,6 +289,7 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
247
289
  size: rolesValidation.size,
248
290
  sizeKB: rolesValidation.sizeKB
249
291
  });
292
+ totalSizeKB += parseFloat(rolesValidation.sizeKB);
250
293
  console.log(chalk.green(` ✅ Roles exportados: ${rolesValidation.sizeKB} KB`));
251
294
  } else {
252
295
  console.log(chalk.red(` ❌ Arquivo roles.sql inválido: ${rolesValidation.error}`));
@@ -257,78 +300,27 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
257
300
  success = false;
258
301
  }
259
302
 
260
- return { success, files };
303
+ console.log(chalk.green(`✅ Backup de database concluído via Docker`));
304
+ return { success, files, totalSizeKB: totalSizeKB.toFixed(1) };
305
+
261
306
  } catch (error) {
262
- throw new Error(`Falha no backup da database: ${error.message}`);
307
+ console.log(chalk.red(`❌ Erro no backup de database: ${error.message}`));
308
+ return { success: false, error: error.message };
263
309
  }
264
310
  }
265
311
 
266
- // Backup das Edge Functions com detecção inteligente do Docker
267
- async function backupEdgeFunctions(config, backupDir) {
312
+ // Backup das Edge Functions via Docker
313
+ async function backupEdgeFunctionsWithDocker(projectId, accessToken, backupDir) {
268
314
  try {
269
- console.log('🔍 Verificando dependências para backup de Edge Functions...');
270
-
271
- // 1. Verificar se Docker está instalado e rodando
272
- const dockerStatus = await detectDockerDependencies();
273
-
274
- if (!dockerStatus.dockerInstalled) {
275
- console.log('⚠️ DOCKER DESKTOP NÃO ENCONTRADO');
276
- console.log('');
277
- console.log('📋 Para fazer backup das Edge Functions, você precisa:');
278
- console.log(' 1. Instalar Docker Desktop');
279
- console.log(' 2. Executar Docker Desktop');
280
- console.log(' 3. Repetir o comando de backup');
281
- console.log('');
282
- console.log('🔗 Download: https://docs.docker.com/desktop/install/');
283
- console.log('');
284
- console.log('⏭️ Pulando backup de Edge Functions...');
285
- console.log('✅ Continuando com outros componentes do backup...');
286
- return { success: false, reason: 'docker_not_installed', functions: [] };
287
- }
288
-
289
- if (!dockerStatus.dockerRunning) {
290
- console.log('⚠️ DOCKER DESKTOP NÃO ESTÁ EXECUTANDO');
291
- console.log('');
292
- console.log('📋 Para fazer backup das Edge Functions, você precisa:');
293
- console.log(' 1. Abrir Docker Desktop');
294
- console.log(' 2. Aguardar inicialização completa');
295
- console.log(' 3. Repetir o comando de backup');
296
- console.log('');
297
- console.log('💡 Dica: Docker Desktop deve estar rodando em segundo plano');
298
- console.log('');
299
- console.log('⏭️ Pulando backup de Edge Functions...');
300
- console.log('✅ Continuando com outros componentes do backup...');
301
- return { success: false, reason: 'docker_not_running', functions: [] };
302
- }
303
-
304
- if (!dockerStatus.supabaseCLI) {
305
- console.log('⚠️ SUPABASE CLI NÃO ENCONTRADO');
306
- console.log('');
307
- console.log('📋 Para fazer backup das Edge Functions, você precisa:');
308
- console.log(' 1. Instalar Supabase CLI');
309
- console.log(' 2. Repetir o comando de backup');
310
- console.log('');
311
- console.log('🔗 Instalação: npm install -g supabase');
312
- console.log('');
313
- console.log('⏭️ Pulando backup de Edge Functions...');
314
- console.log('✅ Continuando com outros componentes do backup...');
315
- return { success: false, reason: 'supabase_cli_not_found', functions: [] };
316
- }
317
-
318
- // 3. Docker está OK, proceder com backup
319
- console.log('✅ Docker Desktop detectado e funcionando');
320
- console.log('✅ Supabase CLI detectado');
321
- console.log('📥 Iniciando backup das Edge Functions...');
322
-
323
315
  const functionsDir = path.join(backupDir, 'edge-functions');
324
316
  await ensureDir(functionsDir);
325
317
 
326
318
  console.log(chalk.gray(' - Listando Edge Functions via Management API...'));
327
319
 
328
320
  // ✅ Usar fetch direto para Management API com Personal Access Token
329
- const functionsResponse = await fetch(`https://api.supabase.com/v1/projects/${config.supabase.projectId}/functions`, {
321
+ const functionsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/functions`, {
330
322
  headers: {
331
- 'Authorization': `Bearer ${config.supabase.accessToken}`,
323
+ 'Authorization': `Bearer ${accessToken}`,
332
324
  'Content-Type': 'application/json'
333
325
  }
334
326
  });
@@ -354,13 +346,13 @@ async function backupEdgeFunctions(config, backupDir) {
354
346
  let successCount = 0;
355
347
  let errorCount = 0;
356
348
 
357
- // ✅ Baixar cada Edge Function usando Supabase CLI
349
+ // ✅ Baixar cada Edge Function usando Supabase CLI via Docker
358
350
  for (const func of functions) {
359
351
  try {
360
352
  console.log(chalk.gray(` - Baixando: ${func.name}...`));
361
353
 
362
- // Usar comando oficial do Supabase CLI
363
- await runCommand(`supabase functions download ${func.name}`, {
354
+ // Usar comando oficial do Supabase CLI via Docker
355
+ await execAsync(`supabase functions download ${func.name}`, {
364
356
  cwd: process.cwd(),
365
357
  timeout: 60000 // 60 segundos timeout
366
358
  });
@@ -398,9 +390,10 @@ async function backupEdgeFunctions(config, backupDir) {
398
390
  success: true,
399
391
  reason: 'success',
400
392
  functions: downloadedFunctions,
401
- functionsCount: functions.length,
402
- successCount,
403
- errorCount
393
+ functions_count: functions.length,
394
+ success_count: successCount,
395
+ error_count: errorCount,
396
+ method: 'docker'
404
397
  };
405
398
 
406
399
  } catch (error) {
@@ -411,14 +404,14 @@ async function backupEdgeFunctions(config, backupDir) {
411
404
  }
412
405
 
413
406
  // Backup das Auth Settings via Management API
414
- async function backupAuthSettings(config, backupDir) {
407
+ async function backupAuthSettings(projectId, accessToken, backupDir) {
415
408
  try {
416
409
  console.log(chalk.gray(' - Exportando configurações de Auth via Management API...'));
417
410
 
418
411
  // ✅ Usar fetch direto para Management API com Personal Access Token
419
- const authResponse = await fetch(`https://api.supabase.com/v1/projects/${config.supabase.projectId}/config/auth`, {
412
+ const authResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/config/auth`, {
420
413
  headers: {
421
- 'Authorization': `Bearer ${config.supabase.accessToken}`,
414
+ 'Authorization': `Bearer ${accessToken}`,
422
415
  'Content-Type': 'application/json'
423
416
  }
424
417
  });
@@ -433,7 +426,7 @@ async function backupAuthSettings(config, backupDir) {
433
426
  // Salvar configurações de Auth
434
427
  const authSettingsPath = path.join(backupDir, 'auth-settings.json');
435
428
  await writeJson(authSettingsPath, {
436
- project_id: config.supabase.projectId,
429
+ project_id: projectId,
437
430
  timestamp: new Date().toISOString(),
438
431
  settings: authSettings
439
432
  });
@@ -448,7 +441,7 @@ async function backupAuthSettings(config, backupDir) {
448
441
  }
449
442
 
450
443
  // Backup do Storage via Supabase API
451
- async function backupStorage(config, backupDir) {
444
+ async function backupStorage(projectId, accessToken, backupDir) {
452
445
  try {
453
446
  const storageDir = path.join(backupDir, 'storage');
454
447
  await ensureDir(storageDir);
@@ -456,9 +449,9 @@ async function backupStorage(config, backupDir) {
456
449
  console.log(chalk.gray(' - Listando buckets de Storage via Management API...'));
457
450
 
458
451
  // ✅ Usar fetch direto para Management API com Personal Access Token
459
- const storageResponse = await fetch(`https://api.supabase.com/v1/projects/${config.supabase.projectId}/storage/buckets`, {
452
+ const storageResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets`, {
460
453
  headers: {
461
- 'Authorization': `Bearer ${config.supabase.accessToken}`,
454
+ 'Authorization': `Bearer ${accessToken}`,
462
455
  'Content-Type': 'application/json'
463
456
  }
464
457
  });
@@ -480,14 +473,16 @@ async function backupStorage(config, backupDir) {
480
473
 
481
474
  console.log(chalk.gray(` - Encontrados ${buckets.length} buckets`));
482
475
 
476
+ const processedBuckets = [];
477
+
483
478
  for (const bucket of buckets || []) {
484
479
  try {
485
480
  console.log(chalk.gray(` - Processando bucket: ${bucket.name}`));
486
481
 
487
482
  // ✅ Listar objetos do bucket via Management API com Personal Access Token
488
- const objectsResponse = await fetch(`https://api.supabase.com/v1/projects/${config.supabase.projectId}/storage/buckets/${bucket.name}/objects`, {
483
+ const objectsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets/${bucket.name}/objects`, {
489
484
  headers: {
490
- 'Authorization': `Bearer ${config.supabase.accessToken}`,
485
+ 'Authorization': `Bearer ${accessToken}`,
491
486
  'Content-Type': 'application/json'
492
487
  }
493
488
  });
@@ -522,108 +517,71 @@ async function backupStorage(config, backupDir) {
522
517
  }
523
518
 
524
519
  console.log(chalk.green(`✅ Storage backupado: ${processedBuckets.length} buckets`));
520
+ return { success: true, buckets: processedBuckets };
525
521
  } catch (error) {
526
522
  console.log(chalk.yellow(`⚠️ Erro no backup do Storage: ${error.message}`));
527
523
  return { success: false, buckets: [] };
528
524
  }
529
525
  }
530
526
 
531
- // Backup dos Custom Roles via SQL
527
+ // Backup dos Custom Roles via Docker
532
528
  async function backupCustomRoles(databaseUrl, backupDir) {
533
529
  try {
534
- console.log(chalk.gray(' - Exportando Custom Roles...'));
530
+ console.log(chalk.gray(' - Exportando Custom Roles via Docker...'));
535
531
 
536
532
  const customRolesFile = path.join(backupDir, 'custom-roles.sql');
537
533
 
538
- // Query para obter roles customizados com senhas
539
- const customRolesQuery = `
540
- -- Custom Roles Backup
541
- -- Roles customizados com senhas
542
-
543
- SELECT rolname, rolsuper, rolinherit, rolcreaterole, rolcreatedb, rolcanlogin, rolreplication, rolconnlimit, rolpassword
544
- FROM pg_roles
545
- WHERE rolname NOT IN ('postgres', 'supabase_admin', 'supabase_auth_admin', 'supabase_storage_admin', 'supabase_read_only_user', 'authenticator', 'anon', 'authenticated', 'service_role')
546
- ORDER BY rolname;
547
- `;
548
-
549
- // Executar query e salvar resultado
550
- const { stdout } = await runCommand(
551
- `psql "${databaseUrl}" -t -c "${customRolesQuery}"`
552
- );
553
-
554
- const rolesContent = `-- Custom Roles Backup
555
- -- Generated at: ${new Date().toISOString()}
556
-
557
- ${customRolesQuery}
558
-
559
- -- Results:
560
- ${stdout}
561
- `;
562
-
563
- await fs.promises.writeFile(customRolesFile, rolesContent);
564
-
565
- const stats = fs.statSync(customRolesFile);
566
- const sizeKB = (stats.size / 1024).toFixed(1);
567
-
568
- console.log(chalk.green(` ✅ Custom Roles exportados: ${sizeKB} KB`));
569
-
570
- return { success: true, roles: [{ filename: 'custom-roles.sql', sizeKB }] };
534
+ try {
535
+ // Usar Supabase CLI via Docker para roles
536
+ await execAsync(`supabase db dump --db-url "${databaseUrl}" --role-only -f "${customRolesFile}"`);
537
+
538
+ const stats = fs.statSync(customRolesFile);
539
+ const sizeKB = (stats.size / 1024).toFixed(1);
540
+
541
+ console.log(chalk.green(` ✅ Custom Roles exportados via Docker: ${sizeKB} KB`));
542
+
543
+ return { success: true, roles: [{ filename: 'custom-roles.sql', sizeKB }] };
544
+ } catch (error) {
545
+ console.log(chalk.yellow(` ⚠️ Erro ao exportar Custom Roles via Docker: ${error.message}`));
546
+ return { success: false, roles: [] };
547
+ }
571
548
  } catch (error) {
572
- console.log(chalk.yellow(` ⚠️ Erro ao exportar Custom Roles: ${error.message}`));
549
+ console.log(chalk.yellow(` ⚠️ Erro no backup dos Custom Roles: ${error.message}`));
573
550
  return { success: false, roles: [] };
574
551
  }
575
552
  }
576
553
 
577
- // Backup das Realtime Settings via SQL
554
+ // Backup das Realtime Settings via Management API (não via SQL)
578
555
  async function backupRealtimeSettings(databaseUrl, backupDir) {
579
556
  try {
580
- console.log(chalk.gray(' - Exportando Realtime Settings...'));
557
+ console.log(chalk.gray(' - Exportando Realtime Settings via Management API...'));
581
558
 
582
- const realtimeFile = path.join(backupDir, 'realtime-settings.sql');
559
+ const realtimeFile = path.join(backupDir, 'realtime-settings.json');
583
560
 
584
- // Query para obter configurações de Realtime
585
- const realtimeQuery = `
586
- -- Realtime Settings Backup
587
- -- Publicações e configurações de Realtime
588
-
589
- -- Publicações
590
- SELECT pubname, puballtables, pubinsert, pubupdate, pubdelete, pubtruncate
591
- FROM pg_publication
592
- ORDER BY pubname;
593
-
594
- -- Tabelas publicadas
595
- SELECT p.pubname, c.relname as table_name, n.nspname as schema_name
596
- FROM pg_publication_tables pt
597
- JOIN pg_publication p ON p.oid = pt.ptpubid
598
- JOIN pg_class c ON c.oid = pt.ptrelid
599
- JOIN pg_namespace n ON n.oid = c.relnamespace
600
- ORDER BY p.pubname, n.nspname, c.relname;
601
- `;
602
-
603
- // Executar query e salvar resultado
604
- const { stdout } = await runCommand(
605
- `psql "${databaseUrl}" -t -c "${realtimeQuery}"`
606
- );
607
-
608
- const realtimeContent = `-- Realtime Settings Backup
609
- -- Generated at: ${new Date().toISOString()}
610
-
611
- ${realtimeQuery}
612
-
613
- -- Results:
614
- ${stdout}
615
- `;
561
+ // Usar Management API para Realtime Settings
562
+ // Nota: Supabase CLI não tem comando específico para Realtime
563
+ // Vamos criar um arquivo placeholder com informações sobre Realtime
564
+
565
+ const realtimeContent = {
566
+ project_id: databaseUrl.split('@')[1]?.split('.')[0] || 'unknown',
567
+ timestamp: new Date().toISOString(),
568
+ note: 'Realtime settings are managed via Supabase Dashboard',
569
+ message: 'Para configurar Realtime, acesse o Dashboard do Supabase',
570
+ url: 'https://supabase.com/dashboard/project/[PROJECT_ID]/settings/api',
571
+ documentation: 'https://supabase.com/docs/guides/realtime'
572
+ };
616
573
 
617
- await fs.promises.writeFile(realtimeFile, realtimeContent);
574
+ await writeJson(realtimeFile, realtimeContent);
618
575
 
619
576
  const stats = fs.statSync(realtimeFile);
620
577
  const sizeKB = (stats.size / 1024).toFixed(1);
621
578
 
622
- console.log(chalk.green(` ✅ Realtime Settings exportados: ${sizeKB} KB`));
579
+ console.log(chalk.green(` ✅ Realtime Settings documentados: ${sizeKB} KB`));
580
+ console.log(chalk.gray(` ℹ️ Realtime é gerenciado via Dashboard do Supabase`));
623
581
 
624
582
  return { success: true };
625
583
  } catch (error) {
626
- console.log(chalk.yellow(` ⚠️ Erro ao exportar Realtime Settings: ${error.message}`));
584
+ console.log(chalk.yellow(` ⚠️ Erro ao documentar Realtime Settings: ${error.message}`));
627
585
  return { success: false };
628
586
  }
629
587
  }
@@ -658,72 +616,3 @@ async function validateSqlFile(filePath) {
658
616
  return { valid: false, error: error.message, size: 0, sizeKB: '0.0' };
659
617
  }
660
618
  }
661
-
662
- // Gerar manifesto do backup completo
663
- async function generateCompleteBackupManifest(config, backupDir, results) {
664
- const manifest = {
665
- created_at: new Date().toISOString(),
666
- project_id: config.supabase.projectId,
667
- smoonb_version: require('../../package.json').version,
668
- backup_type: 'complete_supabase',
669
- components: {
670
- database: {
671
- success: results.database.success,
672
- files: results.database.files.length,
673
- total_size_kb: results.database.files.reduce((total, file) => total + parseFloat(file.sizeKB), 0).toFixed(1)
674
- },
675
- edge_functions: {
676
- success: results.edgeFunctions.success,
677
- reason: results.edgeFunctions.reason || null,
678
- functions_count: results.edgeFunctions.functionsCount || 0,
679
- success_count: results.edgeFunctions.successCount || 0,
680
- error_count: results.edgeFunctions.errorCount || 0,
681
- functions: results.edgeFunctions.functions.map(f => f.name),
682
- timestamp: new Date().toISOString()
683
- },
684
- auth_settings: {
685
- success: results.authSettings.success
686
- },
687
- storage: {
688
- success: results.storage.success,
689
- buckets_count: results.storage.buckets.length,
690
- buckets: results.storage.buckets.map(b => b.name)
691
- },
692
- custom_roles: {
693
- success: results.customRoles.success,
694
- roles_count: results.customRoles.roles.length
695
- },
696
- realtime: {
697
- success: results.realtime.success
698
- }
699
- },
700
- files: {
701
- roles: 'roles.sql',
702
- schema: 'schema.sql',
703
- data: 'data.sql',
704
- custom_roles: 'custom-roles.sql',
705
- realtime_settings: 'realtime-settings.sql',
706
- auth_settings: 'auth-settings.json',
707
- edge_functions: 'edge-functions/',
708
- storage: 'storage/'
709
- },
710
- hashes: {},
711
- validation: {
712
- all_components_backed_up: Object.values(results).every(r => r.success),
713
- total_files: results.database.files.length + 4, // +4 for custom files
714
- backup_complete: true
715
- }
716
- };
717
-
718
- // Calcular hashes dos arquivos principais
719
- const mainFiles = ['roles.sql', 'schema.sql', 'data.sql', 'custom-roles.sql', 'realtime-settings.sql'];
720
- for (const filename of mainFiles) {
721
- const filePath = path.join(backupDir, filename);
722
- if (fs.existsSync(filePath)) {
723
- manifest.hashes[filename] = await sha256(filePath);
724
- }
725
- }
726
-
727
- const manifestPath = path.join(backupDir, 'backup-manifest.json');
728
- await writeJson(manifestPath, manifest);
729
- }
@@ -45,6 +45,45 @@ async function detectSupabaseCLI() {
45
45
  }
46
46
  }
47
47
 
48
+ /**
49
+ * Detecta Docker Desktop completo com versão
50
+ * @returns {Promise<{installed: boolean, running: boolean, version: string}>}
51
+ */
52
+ async function detectDockerDesktop() {
53
+ try {
54
+ // Verificar se Docker está instalado
55
+ await execAsync('docker --version');
56
+
57
+ // Verificar se Docker está rodando
58
+ await execAsync('docker ps');
59
+
60
+ return {
61
+ installed: true,
62
+ running: true,
63
+ version: await getDockerVersion()
64
+ };
65
+ } catch (error) {
66
+ if (error.message.includes('not found') || error.message.includes('not recognized')) {
67
+ return { installed: false, running: false, version: 'Unknown' };
68
+ } else {
69
+ return { installed: true, running: false, version: await getDockerVersion() };
70
+ }
71
+ }
72
+ }
73
+
74
+ /**
75
+ * Obtém a versão do Docker
76
+ * @returns {Promise<string>}
77
+ */
78
+ async function getDockerVersion() {
79
+ try {
80
+ const { stdout } = await execAsync('docker --version');
81
+ return stdout.trim();
82
+ } catch {
83
+ return 'Unknown';
84
+ }
85
+ }
86
+
48
87
  /**
49
88
  * Função principal para detectar todas as dependências do Docker
50
89
  * @returns {Promise<{dockerInstalled: boolean, dockerRunning: boolean, supabaseCLI: boolean}>}
@@ -63,9 +102,50 @@ async function detectDockerDependencies() {
63
102
  };
64
103
  }
65
104
 
105
+ /**
106
+ * Detecta se é possível fazer backup completo via Docker
107
+ * @returns {Promise<{canBackupComplete: boolean, reason?: string, dockerStatus: any}>}
108
+ */
109
+ async function canPerformCompleteBackup() {
110
+ const dockerStatus = await detectDockerDesktop();
111
+
112
+ if (!dockerStatus.installed) {
113
+ return {
114
+ canBackupComplete: false,
115
+ reason: 'docker_not_installed',
116
+ dockerStatus
117
+ };
118
+ }
119
+
120
+ if (!dockerStatus.running) {
121
+ return {
122
+ canBackupComplete: false,
123
+ reason: 'docker_not_running',
124
+ dockerStatus
125
+ };
126
+ }
127
+
128
+ const supabaseCLI = await detectSupabaseCLI();
129
+ if (!supabaseCLI) {
130
+ return {
131
+ canBackupComplete: false,
132
+ reason: 'supabase_cli_not_found',
133
+ dockerStatus
134
+ };
135
+ }
136
+
137
+ return {
138
+ canBackupComplete: true,
139
+ dockerStatus
140
+ };
141
+ }
142
+
66
143
  module.exports = {
67
144
  detectDockerInstallation,
68
145
  detectDockerRunning,
69
146
  detectSupabaseCLI,
70
- detectDockerDependencies
147
+ detectDockerDependencies,
148
+ detectDockerDesktop,
149
+ getDockerVersion,
150
+ canPerformCompleteBackup
71
151
  };