smoonb 0.0.11 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "smoonb",
3
- "version": "0.0.11",
3
+ "version": "0.0.13",
4
4
  "description": "Complete Supabase backup and migration tool - EXPERIMENTAL VERSION - USE AT YOUR OWN RISK",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -1,10 +1,10 @@
1
1
  const chalk = require('chalk');
2
2
  const path = require('path');
3
+ const fs = require('fs');
3
4
  const { ensureBin, runCommand } = require('../utils/cli');
4
5
  const { ensureDir, writeJson, copyDir } = require('../utils/fsx');
5
6
  const { sha256 } = require('../utils/hash');
6
7
  const { readConfig, validateFor } = require('../utils/config');
7
- const { IntrospectionService } = require('../services/introspect');
8
8
  const { showBetaBanner } = require('../utils/banner');
9
9
 
10
10
  // Exportar FUNÇÃO em vez de objeto Command
@@ -12,13 +12,12 @@ module.exports = async (options) => {
12
12
  showBetaBanner();
13
13
 
14
14
  try {
15
- // Verificar se Supabase CLI está disponível
16
- const supabasePath = await ensureBin('supabase');
17
- if (!supabasePath) {
18
- console.error(chalk.red('❌ Supabase CLI não encontrado'));
19
- console.log(chalk.yellow('💡 Instale o Supabase CLI:'));
20
- console.log(chalk.yellow(' npm install -g supabase'));
21
- console.log(chalk.yellow(' ou visite: https://supabase.com/docs/guides/cli'));
15
+ // Verificar se pg_dump está disponível
16
+ const pgDumpPath = await findPgDumpPath();
17
+ if (!pgDumpPath) {
18
+ console.error(chalk.red('❌ pg_dump não encontrado'));
19
+ console.log(chalk.yellow('💡 Instale PostgreSQL:'));
20
+ console.log(chalk.yellow(' https://www.postgresql.org/download/'));
22
21
  process.exit(1);
23
22
  }
24
23
 
@@ -43,24 +42,40 @@ module.exports = async (options) => {
43
42
 
44
43
  console.log(chalk.blue(`🚀 Iniciando backup do projeto: ${config.supabase.projectId}`));
45
44
  console.log(chalk.blue(`📁 Diretório: ${backupDir}`));
45
+ console.log(chalk.gray(`🔧 Usando pg_dump: ${pgDumpPath}`));
46
46
 
47
- // 1. Backup da Database usando Supabase CLI
48
- console.log(chalk.blue('\n📊 1/3 - Backup da Database PostgreSQL...'));
49
- await backupDatabaseWithSupabaseCLI(databaseUrl, backupDir);
50
-
51
- // 2. Gerar inventário real
52
- console.log(chalk.blue('\n🔍 2/3 - Gerando inventário completo...'));
53
- await generateInventory(config, backupDir);
47
+ // 1. Backup da Database usando APENAS pg_dump/pg_dumpall
48
+ console.log(chalk.blue('\n📊 1/2 - Backup da Database PostgreSQL...'));
49
+ const dbBackupResult = await backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath);
50
+
51
+ if (!dbBackupResult.success) {
52
+ console.error(chalk.red(' Falha crítica no backup da database'));
53
+ console.log(chalk.yellow('💡 Verifique:'));
54
+ console.log(chalk.yellow(' - Se DATABASE_URL está correta'));
55
+ console.log(chalk.yellow(' - Se as credenciais estão corretas'));
56
+ console.log(chalk.yellow(' - Se o banco está acessível'));
57
+ process.exit(1);
58
+ }
54
59
 
55
- // 3. Backup das Edge Functions locais
56
- console.log(chalk.blue('\n⚡ 3/3 - Backup das Edge Functions locais...'));
60
+ // 2. Backup das Edge Functions locais (se existirem)
61
+ console.log(chalk.blue('\n⚡ 2/2 - Backup das Edge Functions locais...'));
57
62
  await backupLocalFunctions(backupDir);
58
63
 
59
64
  // Gerar manifesto do backup
60
- await generateBackupManifest(config, backupDir);
65
+ await generateBackupManifest(config, backupDir, dbBackupResult.files);
61
66
 
62
67
  console.log(chalk.green('\n🎉 Backup completo finalizado!'));
63
68
  console.log(chalk.blue(`📁 Localização: ${backupDir}`));
69
+ console.log(chalk.green(`✅ Database: ${dbBackupResult.files.length} arquivos SQL gerados`));
70
+
71
+ // Mostrar resumo dos arquivos
72
+ console.log(chalk.blue('\n📊 Resumo dos arquivos gerados:'));
73
+ for (const file of dbBackupResult.files) {
74
+ const filePath = path.join(backupDir, file.filename);
75
+ const stats = fs.statSync(filePath);
76
+ const sizeKB = (stats.size / 1024).toFixed(1);
77
+ console.log(chalk.gray(` - ${file.filename}: ${sizeKB} KB`));
78
+ }
64
79
 
65
80
  } catch (error) {
66
81
  console.error(chalk.red(`❌ Erro no backup: ${error.message}`));
@@ -68,57 +83,177 @@ module.exports = async (options) => {
68
83
  }
69
84
  };
70
85
 
71
- // Backup da database usando Supabase CLI
72
- async function backupDatabaseWithSupabaseCLI(databaseUrl, backupDir) {
73
- try {
74
- console.log(chalk.blue(' - Exportando roles...'));
75
- const { stdout: rolesOutput } = await runCommand(
76
- `supabase db dump --db-url "${databaseUrl}" -f roles.sql --role-only`
77
- );
86
+ // Encontrar caminho do pg_dump automaticamente
87
+ async function findPgDumpPath() {
88
+ // Primeiro, tentar encontrar no PATH
89
+ const pgDumpPath = await ensureBin('pg_dump');
90
+ if (pgDumpPath) {
91
+ return pgDumpPath;
92
+ }
93
+
94
+ // No Windows, tentar caminhos comuns
95
+ if (process.platform === 'win32') {
96
+ const possiblePaths = [
97
+ 'C:\\Program Files\\PostgreSQL\\17\\bin\\pg_dump.exe',
98
+ 'C:\\Program Files\\PostgreSQL\\16\\bin\\pg_dump.exe',
99
+ 'C:\\Program Files\\PostgreSQL\\15\\bin\\pg_dump.exe',
100
+ 'C:\\Program Files\\PostgreSQL\\14\\bin\\pg_dump.exe',
101
+ 'C:\\Program Files\\PostgreSQL\\13\\bin\\pg_dump.exe'
102
+ ];
78
103
 
104
+ for (const pgDumpPath of possiblePaths) {
105
+ if (fs.existsSync(pgDumpPath)) {
106
+ return pgDumpPath;
107
+ }
108
+ }
109
+ }
110
+
111
+ return null;
112
+ }
113
+
114
+ // Backup da database usando APENAS pg_dump/pg_dumpall
115
+ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
116
+ try {
117
+ // Parse da URL da database
118
+ const url = new URL(databaseUrl);
119
+ const host = url.hostname;
120
+ const port = url.port || '5432';
121
+ const username = url.username;
122
+ const password = url.password;
123
+ const database = url.pathname.slice(1);
124
+
125
+ console.log(chalk.gray(` - Host: ${host}:${port}`));
126
+ console.log(chalk.gray(` - Database: ${database}`));
127
+ console.log(chalk.gray(` - Username: ${username}`));
128
+
129
+ const files = [];
130
+ let success = true;
131
+
132
+ // 1. Backup do schema usando pg_dump (COMANDO VALIDADO)
79
133
  console.log(chalk.blue(' - Exportando schema...'));
80
- const { stdout: schemaOutput } = await runCommand(
81
- `supabase db dump --db-url "${databaseUrl}" -f schema.sql`
82
- );
134
+ const schemaFile = path.join(backupDir, 'schema.sql');
135
+ const schemaCommand = `"${pgDumpPath}" "${databaseUrl}" --schema-only -f "${schemaFile}"`;
83
136
 
137
+ try {
138
+ await runCommand(schemaCommand, {
139
+ env: { ...process.env, PGPASSWORD: password }
140
+ });
141
+
142
+ const schemaValidation = await validateSqlFile(schemaFile);
143
+ if (schemaValidation.valid) {
144
+ files.push({
145
+ filename: 'schema.sql',
146
+ size: schemaValidation.size,
147
+ sizeKB: schemaValidation.sizeKB
148
+ });
149
+ console.log(chalk.green(` ✅ Schema exportado: ${schemaValidation.sizeKB} KB`));
150
+ } else {
151
+ console.log(chalk.red(` ❌ Arquivo schema.sql inválido: ${schemaValidation.error}`));
152
+ success = false;
153
+ }
154
+ } catch (error) {
155
+ console.log(chalk.red(` ❌ Erro ao exportar schema: ${error.message}`));
156
+ success = false;
157
+ }
158
+
159
+ // 2. Backup dos dados usando pg_dump (COMANDO VALIDADO)
84
160
  console.log(chalk.blue(' - Exportando dados...'));
85
- const { stdout: dataOutput } = await runCommand(
86
- `supabase db dump --db-url "${databaseUrl}" -f data.sql --use-copy --data-only`
87
- );
161
+ const dataFile = path.join(backupDir, 'data.sql');
162
+ const dataCommand = `"${pgDumpPath}" "${databaseUrl}" --data-only -f "${dataFile}"`;
163
+
164
+ try {
165
+ await runCommand(dataCommand, {
166
+ env: { ...process.env, PGPASSWORD: password }
167
+ });
168
+
169
+ const dataValidation = await validateSqlFile(dataFile);
170
+ if (dataValidation.valid) {
171
+ files.push({
172
+ filename: 'data.sql',
173
+ size: dataValidation.size,
174
+ sizeKB: dataValidation.sizeKB
175
+ });
176
+ console.log(chalk.green(` ✅ Dados exportados: ${dataValidation.sizeKB} KB`));
177
+ } else {
178
+ console.log(chalk.red(` ❌ Arquivo data.sql inválido: ${dataValidation.error}`));
179
+ success = false;
180
+ }
181
+ } catch (error) {
182
+ console.log(chalk.red(` ❌ Erro ao exportar dados: ${error.message}`));
183
+ success = false;
184
+ }
185
+
186
+ // 3. Backup dos roles usando pg_dumpall (COMANDO VALIDADO)
187
+ console.log(chalk.blue(' - Exportando roles...'));
188
+ const rolesFile = path.join(backupDir, 'roles.sql');
189
+ const pgDumpallPath = pgDumpPath.replace('pg_dump', 'pg_dumpall');
190
+ const rolesCommand = `"${pgDumpallPath}" --host=${host} --port=${port} --username=${username} --roles-only -f "${rolesFile}"`;
191
+
192
+ try {
193
+ await runCommand(rolesCommand, {
194
+ env: { ...process.env, PGPASSWORD: password }
195
+ });
196
+
197
+ const rolesValidation = await validateSqlFile(rolesFile);
198
+ if (rolesValidation.valid) {
199
+ files.push({
200
+ filename: 'roles.sql',
201
+ size: rolesValidation.size,
202
+ sizeKB: rolesValidation.sizeKB
203
+ });
204
+ console.log(chalk.green(` ✅ Roles exportados: ${rolesValidation.sizeKB} KB`));
205
+ } else {
206
+ console.log(chalk.red(` ❌ Arquivo roles.sql inválido: ${rolesValidation.error}`));
207
+ success = false;
208
+ }
209
+ } catch (error) {
210
+ console.log(chalk.red(` ❌ Erro ao exportar roles: ${error.message}`));
211
+ success = false;
212
+ }
88
213
 
89
- console.log(chalk.green('✅ Database exportada com sucesso'));
214
+ return { success, files };
90
215
  } catch (error) {
91
216
  throw new Error(`Falha no backup da database: ${error.message}`);
92
217
  }
93
218
  }
94
219
 
95
- // Gerar inventário completo
96
- async function generateInventory(config, backupDir) {
220
+ // Validar arquivo SQL (não vazio e com conteúdo válido)
221
+ async function validateSqlFile(filePath) {
97
222
  try {
98
- const introspection = new IntrospectionService(config);
99
- const inventory = await introspection.generateFullInventory();
223
+ if (!fs.existsSync(filePath)) {
224
+ return { valid: false, error: 'Arquivo não existe', size: 0, sizeKB: '0.0' };
225
+ }
100
226
 
101
- // Salvar inventário em arquivos separados
102
- const inventoryDir = path.join(backupDir, 'inventory');
103
- await ensureDir(inventoryDir);
227
+ const stats = fs.statSync(filePath);
228
+ const sizeKB = (stats.size / 1024).toFixed(1);
229
+
230
+ if (stats.size === 0) {
231
+ return { valid: false, error: 'Arquivo vazio', size: 0, sizeKB: '0.0' };
232
+ }
104
233
 
105
- for (const [component, data] of Object.entries(inventory.components)) {
106
- const filePath = path.join(inventoryDir, `${component}.json`);
107
- await writeJson(filePath, data);
234
+ const content = fs.readFileSync(filePath, 'utf8');
235
+
236
+ // Verificar se contém conteúdo SQL válido
237
+ const sqlKeywords = ['CREATE', 'INSERT', 'COPY', 'ALTER', 'DROP', 'GRANT', 'REVOKE'];
238
+ const hasValidContent = sqlKeywords.some(keyword =>
239
+ content.toUpperCase().includes(keyword)
240
+ );
241
+
242
+ if (!hasValidContent) {
243
+ return { valid: false, error: 'Sem conteúdo SQL válido', size: stats.size, sizeKB };
108
244
  }
109
245
 
110
- console.log(chalk.green('✅ Inventário completo gerado'));
246
+ return { valid: true, error: null, size: stats.size, sizeKB };
111
247
  } catch (error) {
112
- console.log(chalk.yellow(`⚠️ Erro ao gerar inventário: ${error.message}`));
248
+ return { valid: false, error: error.message, size: 0, sizeKB: '0.0' };
113
249
  }
114
250
  }
115
251
 
116
- // Backup das Edge Functions locais
252
+ // Backup das Edge Functions locais (se existirem)
117
253
  async function backupLocalFunctions(backupDir) {
118
254
  const localFunctionsPath = 'supabase/functions';
119
255
 
120
256
  try {
121
- const fs = require('fs');
122
257
  if (fs.existsSync(localFunctionsPath)) {
123
258
  const functionsBackupDir = path.join(backupDir, 'functions');
124
259
  await copyDir(localFunctionsPath, functionsBackupDir);
@@ -132,23 +267,26 @@ async function backupLocalFunctions(backupDir) {
132
267
  }
133
268
 
134
269
  // Gerar manifesto do backup
135
- async function generateBackupManifest(config, backupDir) {
270
+ async function generateBackupManifest(config, backupDir, sqlFiles) {
136
271
  const manifest = {
137
272
  created_at: new Date().toISOString(),
138
273
  project_id: config.supabase.projectId,
139
274
  smoonb_version: require('../../package.json').version,
140
- backup_type: 'complete',
275
+ backup_type: 'postgresql_native',
141
276
  files: {
142
277
  roles: 'roles.sql',
143
278
  schema: 'schema.sql',
144
279
  data: 'data.sql'
145
280
  },
146
281
  hashes: {},
147
- inventory: {}
282
+ validation: {
283
+ sql_files_created: sqlFiles.length,
284
+ sql_files_valid: sqlFiles.length === 3,
285
+ total_size_kb: sqlFiles.reduce((total, file) => total + parseFloat(file.sizeKB), 0).toFixed(1)
286
+ }
148
287
  };
149
288
 
150
289
  // Calcular hashes dos arquivos SQL
151
- const fs = require('fs');
152
290
  for (const [type, filename] of Object.entries(manifest.files)) {
153
291
  const filePath = path.join(backupDir, filename);
154
292
  if (fs.existsSync(filePath)) {
@@ -156,17 +294,6 @@ async function generateBackupManifest(config, backupDir) {
156
294
  }
157
295
  }
158
296
 
159
- // Adicionar referências ao inventário
160
- const inventoryDir = path.join(backupDir, 'inventory');
161
- if (fs.existsSync(inventoryDir)) {
162
- const inventoryFiles = fs.readdirSync(inventoryDir);
163
- manifest.inventory = inventoryFiles.reduce((acc, file) => {
164
- const component = path.basename(file, '.json');
165
- acc[component] = `inventory/${file}`;
166
- return acc;
167
- }, {});
168
- }
169
-
170
297
  const manifestPath = path.join(backupDir, 'backup-manifest.json');
171
298
  await writeJson(manifestPath, manifest);
172
299
  }