smoonb 0.0.12 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "smoonb",
3
- "version": "0.0.12",
3
+ "version": "0.0.13",
4
4
  "description": "Complete Supabase backup and migration tool - EXPERIMENTAL VERSION - USE AT YOUR OWN RISK",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -5,7 +5,6 @@ const { ensureBin, runCommand } = require('../utils/cli');
5
5
  const { ensureDir, writeJson, copyDir } = require('../utils/fsx');
6
6
  const { sha256 } = require('../utils/hash');
7
7
  const { readConfig, validateFor } = require('../utils/config');
8
- const { IntrospectionService } = require('../services/introspect');
9
8
  const { showBetaBanner } = require('../utils/banner');
10
9
 
11
10
  // Exportar FUNÇÃO em vez de objeto Command
@@ -45,8 +44,8 @@ module.exports = async (options) => {
45
44
  console.log(chalk.blue(`📁 Diretório: ${backupDir}`));
46
45
  console.log(chalk.gray(`🔧 Usando pg_dump: ${pgDumpPath}`));
47
46
 
48
- // 1. Backup da Database usando pg_dump/pg_dumpall
49
- console.log(chalk.blue('\n📊 1/3 - Backup da Database PostgreSQL...'));
47
+ // 1. Backup da Database usando APENAS pg_dump/pg_dumpall
48
+ console.log(chalk.blue('\n📊 1/2 - Backup da Database PostgreSQL...'));
50
49
  const dbBackupResult = await backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath);
51
50
 
52
51
  if (!dbBackupResult.success) {
@@ -58,12 +57,8 @@ module.exports = async (options) => {
58
57
  process.exit(1);
59
58
  }
60
59
 
61
- // 2. Gerar inventário real
62
- console.log(chalk.blue('\n🔍 2/3 - Gerando inventário completo...'));
63
- await generateInventory(config, backupDir);
64
-
65
- // 3. Backup das Edge Functions locais
66
- console.log(chalk.blue('\n⚡ 3/3 - Backup das Edge Functions locais...'));
60
+ // 2. Backup das Edge Functions locais (se existirem)
61
+ console.log(chalk.blue('\n 2/2 - Backup das Edge Functions locais...'));
67
62
  await backupLocalFunctions(backupDir);
68
63
 
69
64
  // Gerar manifesto do backup
@@ -72,6 +67,15 @@ module.exports = async (options) => {
72
67
  console.log(chalk.green('\n🎉 Backup completo finalizado!'));
73
68
  console.log(chalk.blue(`📁 Localização: ${backupDir}`));
74
69
  console.log(chalk.green(`✅ Database: ${dbBackupResult.files.length} arquivos SQL gerados`));
70
+
71
+ // Mostrar resumo dos arquivos
72
+ console.log(chalk.blue('\n📊 Resumo dos arquivos gerados:'));
73
+ for (const file of dbBackupResult.files) {
74
+ const filePath = path.join(backupDir, file.filename);
75
+ const stats = fs.statSync(filePath);
76
+ const sizeKB = (stats.size / 1024).toFixed(1);
77
+ console.log(chalk.gray(` - ${file.filename}: ${sizeKB} KB`));
78
+ }
75
79
 
76
80
  } catch (error) {
77
81
  console.error(chalk.red(`❌ Erro no backup: ${error.message}`));
@@ -107,7 +111,7 @@ async function findPgDumpPath() {
107
111
  return null;
108
112
  }
109
113
 
110
- // Backup da database usando pg_dump/pg_dumpall
114
+ // Backup da database usando APENAS pg_dump/pg_dumpall
111
115
  async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
112
116
  try {
113
117
  // Parse da URL da database
@@ -125,69 +129,85 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
125
129
  const files = [];
126
130
  let success = true;
127
131
 
128
- // 1. Backup dos roles usando pg_dumpall
129
- console.log(chalk.blue(' - Exportando roles...'));
130
- const rolesFile = path.join(backupDir, 'roles.sql');
131
- const rolesCommand = `"${pgDumpPath.replace('pg_dump', 'pg_dumpall')}" --host=${host} --port=${port} --username=${username} --roles-only -f "${rolesFile}"`;
132
+ // 1. Backup do schema usando pg_dump (COMANDO VALIDADO)
133
+ console.log(chalk.blue(' - Exportando schema...'));
134
+ const schemaFile = path.join(backupDir, 'schema.sql');
135
+ const schemaCommand = `"${pgDumpPath}" "${databaseUrl}" --schema-only -f "${schemaFile}"`;
132
136
 
133
137
  try {
134
- await runCommand(rolesCommand, {
138
+ await runCommand(schemaCommand, {
135
139
  env: { ...process.env, PGPASSWORD: password }
136
140
  });
137
141
 
138
- if (await validateSqlFile(rolesFile)) {
139
- files.push('roles.sql');
140
- console.log(chalk.green(' ✅ Roles exportados com sucesso'));
142
+ const schemaValidation = await validateSqlFile(schemaFile);
143
+ if (schemaValidation.valid) {
144
+ files.push({
145
+ filename: 'schema.sql',
146
+ size: schemaValidation.size,
147
+ sizeKB: schemaValidation.sizeKB
148
+ });
149
+ console.log(chalk.green(` ✅ Schema exportado: ${schemaValidation.sizeKB} KB`));
141
150
  } else {
142
- console.log(chalk.yellow(' ⚠️ Arquivo roles.sql está vazio'));
151
+ console.log(chalk.red(` Arquivo schema.sql inválido: ${schemaValidation.error}`));
143
152
  success = false;
144
153
  }
145
154
  } catch (error) {
146
- console.log(chalk.red(` ❌ Erro ao exportar roles: ${error.message}`));
155
+ console.log(chalk.red(` ❌ Erro ao exportar schema: ${error.message}`));
147
156
  success = false;
148
157
  }
149
158
 
150
- // 2. Backup do schema usando pg_dump
151
- console.log(chalk.blue(' - Exportando schema...'));
152
- const schemaFile = path.join(backupDir, 'schema.sql');
153
- const schemaCommand = `"${pgDumpPath}" --host=${host} --port=${port} --username=${username} --schema-only -f "${schemaFile}" ${database}`;
159
+ // 2. Backup dos dados usando pg_dump (COMANDO VALIDADO)
160
+ console.log(chalk.blue(' - Exportando dados...'));
161
+ const dataFile = path.join(backupDir, 'data.sql');
162
+ const dataCommand = `"${pgDumpPath}" "${databaseUrl}" --data-only -f "${dataFile}"`;
154
163
 
155
164
  try {
156
- await runCommand(schemaCommand, {
165
+ await runCommand(dataCommand, {
157
166
  env: { ...process.env, PGPASSWORD: password }
158
167
  });
159
168
 
160
- if (await validateSqlFile(schemaFile)) {
161
- files.push('schema.sql');
162
- console.log(chalk.green(' ✅ Schema exportado com sucesso'));
169
+ const dataValidation = await validateSqlFile(dataFile);
170
+ if (dataValidation.valid) {
171
+ files.push({
172
+ filename: 'data.sql',
173
+ size: dataValidation.size,
174
+ sizeKB: dataValidation.sizeKB
175
+ });
176
+ console.log(chalk.green(` ✅ Dados exportados: ${dataValidation.sizeKB} KB`));
163
177
  } else {
164
- console.log(chalk.yellow(' ⚠️ Arquivo schema.sql está vazio'));
178
+ console.log(chalk.red(` Arquivo data.sql inválido: ${dataValidation.error}`));
165
179
  success = false;
166
180
  }
167
181
  } catch (error) {
168
- console.log(chalk.red(` ❌ Erro ao exportar schema: ${error.message}`));
182
+ console.log(chalk.red(` ❌ Erro ao exportar dados: ${error.message}`));
169
183
  success = false;
170
184
  }
171
185
 
172
- // 3. Backup dos dados usando pg_dump
173
- console.log(chalk.blue(' - Exportando dados...'));
174
- const dataFile = path.join(backupDir, 'data.sql');
175
- const dataCommand = `"${pgDumpPath}" --host=${host} --port=${port} --username=${username} --data-only --use-copy -f "${dataFile}" ${database}`;
186
+ // 3. Backup dos roles usando pg_dumpall (COMANDO VALIDADO)
187
+ console.log(chalk.blue(' - Exportando roles...'));
188
+ const rolesFile = path.join(backupDir, 'roles.sql');
189
+ const pgDumpallPath = pgDumpPath.replace('pg_dump', 'pg_dumpall');
190
+ const rolesCommand = `"${pgDumpallPath}" --host=${host} --port=${port} --username=${username} --roles-only -f "${rolesFile}"`;
176
191
 
177
192
  try {
178
- await runCommand(dataCommand, {
193
+ await runCommand(rolesCommand, {
179
194
  env: { ...process.env, PGPASSWORD: password }
180
195
  });
181
196
 
182
- if (await validateSqlFile(dataFile)) {
183
- files.push('data.sql');
184
- console.log(chalk.green(' ✅ Dados exportados com sucesso'));
197
+ const rolesValidation = await validateSqlFile(rolesFile);
198
+ if (rolesValidation.valid) {
199
+ files.push({
200
+ filename: 'roles.sql',
201
+ size: rolesValidation.size,
202
+ sizeKB: rolesValidation.sizeKB
203
+ });
204
+ console.log(chalk.green(` ✅ Roles exportados: ${rolesValidation.sizeKB} KB`));
185
205
  } else {
186
- console.log(chalk.yellow(' ⚠️ Arquivo data.sql está vazio'));
206
+ console.log(chalk.red(` Arquivo roles.sql inválido: ${rolesValidation.error}`));
187
207
  success = false;
188
208
  }
189
209
  } catch (error) {
190
- console.log(chalk.red(` ❌ Erro ao exportar dados: ${error.message}`));
210
+ console.log(chalk.red(` ❌ Erro ao exportar roles: ${error.message}`));
191
211
  success = false;
192
212
  }
193
213
 
@@ -201,12 +221,14 @@ async function backupDatabaseWithPgDump(databaseUrl, backupDir, pgDumpPath) {
201
221
  async function validateSqlFile(filePath) {
202
222
  try {
203
223
  if (!fs.existsSync(filePath)) {
204
- return false;
224
+ return { valid: false, error: 'Arquivo não existe', size: 0, sizeKB: '0.0' };
205
225
  }
206
226
 
207
227
  const stats = fs.statSync(filePath);
228
+ const sizeKB = (stats.size / 1024).toFixed(1);
229
+
208
230
  if (stats.size === 0) {
209
- return false;
231
+ return { valid: false, error: 'Arquivo vazio', size: 0, sizeKB: '0.0' };
210
232
  }
211
233
 
212
234
  const content = fs.readFileSync(filePath, 'utf8');
@@ -217,34 +239,17 @@ async function validateSqlFile(filePath) {
217
239
  content.toUpperCase().includes(keyword)
218
240
  );
219
241
 
220
- return hasValidContent;
221
- } catch (error) {
222
- return false;
223
- }
224
- }
225
-
226
- // Gerar inventário completo
227
- async function generateInventory(config, backupDir) {
228
- try {
229
- const introspection = new IntrospectionService(config);
230
- const inventory = await introspection.generateFullInventory();
231
-
232
- // Salvar inventário em arquivos separados
233
- const inventoryDir = path.join(backupDir, 'inventory');
234
- await ensureDir(inventoryDir);
235
-
236
- for (const [component, data] of Object.entries(inventory.components)) {
237
- const filePath = path.join(inventoryDir, `${component}.json`);
238
- await writeJson(filePath, data);
242
+ if (!hasValidContent) {
243
+ return { valid: false, error: 'Sem conteúdo SQL válido', size: stats.size, sizeKB };
239
244
  }
240
245
 
241
- console.log(chalk.green('✅ Inventário completo gerado'));
246
+ return { valid: true, error: null, size: stats.size, sizeKB };
242
247
  } catch (error) {
243
- console.log(chalk.yellow(`⚠️ Erro ao gerar inventário: ${error.message}`));
248
+ return { valid: false, error: error.message, size: 0, sizeKB: '0.0' };
244
249
  }
245
250
  }
246
251
 
247
- // Backup das Edge Functions locais
252
+ // Backup das Edge Functions locais (se existirem)
248
253
  async function backupLocalFunctions(backupDir) {
249
254
  const localFunctionsPath = 'supabase/functions';
250
255
 
@@ -267,17 +272,17 @@ async function generateBackupManifest(config, backupDir, sqlFiles) {
267
272
  created_at: new Date().toISOString(),
268
273
  project_id: config.supabase.projectId,
269
274
  smoonb_version: require('../../package.json').version,
270
- backup_type: 'complete',
275
+ backup_type: 'postgresql_native',
271
276
  files: {
272
277
  roles: 'roles.sql',
273
278
  schema: 'schema.sql',
274
279
  data: 'data.sql'
275
280
  },
276
281
  hashes: {},
277
- inventory: {},
278
282
  validation: {
279
283
  sql_files_created: sqlFiles.length,
280
- sql_files_valid: sqlFiles.length === 3
284
+ sql_files_valid: sqlFiles.length === 3,
285
+ total_size_kb: sqlFiles.reduce((total, file) => total + parseFloat(file.sizeKB), 0).toFixed(1)
281
286
  }
282
287
  };
283
288
 
@@ -289,17 +294,6 @@ async function generateBackupManifest(config, backupDir, sqlFiles) {
289
294
  }
290
295
  }
291
296
 
292
- // Adicionar referências ao inventário
293
- const inventoryDir = path.join(backupDir, 'inventory');
294
- if (fs.existsSync(inventoryDir)) {
295
- const inventoryFiles = fs.readdirSync(inventoryDir);
296
- manifest.inventory = inventoryFiles.reduce((acc, file) => {
297
- const component = path.basename(file, '.json');
298
- acc[component] = `inventory/${file}`;
299
- return acc;
300
- }, {});
301
- }
302
-
303
297
  const manifestPath = path.join(backupDir, 'backup-manifest.json');
304
298
  await writeJson(manifestPath, manifest);
305
299
  }