smoonb 0.0.74 → 0.0.76

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/package.json +1 -1
  2. package/src/commands/backup/index.js +62 -65
  3. package/src/commands/backup/steps/00-docker-validation.js +6 -4
  4. package/src/commands/backup/steps/01-database.js +8 -4
  5. package/src/commands/backup/steps/02-database-separated.js +11 -8
  6. package/src/commands/backup/steps/03-database-settings.js +8 -4
  7. package/src/commands/backup/steps/04-auth-settings.js +6 -3
  8. package/src/commands/backup/steps/05-realtime-settings.js +5 -2
  9. package/src/commands/backup/steps/06-storage.js +29 -26
  10. package/src/commands/backup/steps/07-custom-roles.js +6 -3
  11. package/src/commands/backup/steps/08-edge-functions.js +15 -11
  12. package/src/commands/backup/steps/09-supabase-temp.js +9 -6
  13. package/src/commands/backup/steps/10-migrations.js +14 -10
  14. package/src/commands/check.js +5 -3
  15. package/src/commands/restore/index.js +51 -46
  16. package/src/commands/restore/steps/00-backup-selection.js +6 -4
  17. package/src/commands/restore/steps/01-components-selection.js +30 -28
  18. package/src/commands/restore/steps/03-database.js +21 -17
  19. package/src/commands/restore/steps/04-edge-functions.js +16 -13
  20. package/src/commands/restore/steps/05-auth-settings.js +10 -7
  21. package/src/commands/restore/steps/06-storage.js +50 -42
  22. package/src/commands/restore/steps/07-database-settings.js +10 -7
  23. package/src/commands/restore/steps/08-realtime-settings.js +10 -7
  24. package/src/commands/restore/utils.js +15 -13
  25. package/src/i18n/locales/en.json +427 -1
  26. package/src/i18n/locales/pt-BR.json +426 -1
  27. package/src/interactive/envMapper.js +30 -25
  28. package/src/utils/realtime-settings.js +15 -9
  29. package/src/utils/supabaseLink.js +11 -10
@@ -5,6 +5,7 @@ const AdmZip = require('adm-zip');
5
5
  const { createClient } = require('@supabase/supabase-js');
6
6
  const { ensureDir, writeJson } = require('../../../utils/fsx');
7
7
  const { confirm } = require('../../../utils/prompt');
8
+ const { t } = require('../../../i18n');
8
9
 
9
10
  /**
10
11
  * Etapa 6: Backup Storage via Supabase API
@@ -12,10 +13,11 @@ const { confirm } = require('../../../utils/prompt');
12
13
  */
13
14
  module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supabaseServiceKey }) => {
14
15
  try {
16
+ const getT = global.smoonbI18n?.t || t;
15
17
  const storageDir = path.join(backupDir, 'storage');
16
18
  await ensureDir(storageDir);
17
19
 
18
- console.log(chalk.white(' - Listando buckets de Storage via Management API...'));
20
+ console.log(chalk.white(` - ${getT('backup.steps.storage.listing')}`));
19
21
 
20
22
  // Usar fetch direto para Management API com Personal Access Token
21
23
  const storageResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets`, {
@@ -26,25 +28,25 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
26
28
  });
27
29
 
28
30
  if (!storageResponse.ok) {
29
- console.log(chalk.yellow(` ⚠️ Erro ao listar buckets: ${storageResponse.status} ${storageResponse.statusText}`));
31
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.listBucketsError', { status: storageResponse.status, statusText: storageResponse.statusText })}`));
30
32
  return { success: false, buckets: [] };
31
33
  }
32
34
 
33
35
  const buckets = await storageResponse.json();
34
36
 
35
37
  if (!buckets || buckets.length === 0) {
36
- console.log(chalk.white(' - Nenhum bucket encontrado'));
38
+ console.log(chalk.white(` - ${getT('backup.steps.storage.noBuckets')}`));
37
39
  await writeJson(path.join(storageDir, 'README.md'), {
38
- message: 'Nenhum bucket de Storage encontrado neste projeto'
40
+ message: getT('backup.steps.storage.noBucketsMessage')
39
41
  });
40
42
  return { success: true, buckets: [] };
41
43
  }
42
44
 
43
- console.log(chalk.white(` - Encontrados ${buckets.length} buckets`));
45
+ console.log(chalk.white(` - ${getT('backup.steps.storage.found', { count: buckets.length })}`));
44
46
 
45
47
  // Validar credenciais do Supabase para download de arquivos
46
48
  if (!supabaseUrl || !supabaseServiceKey) {
47
- console.log(chalk.yellow(' ⚠️ Credenciais do Supabase não disponíveis. Fazendo backup apenas de metadados...'));
49
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.credentialsNotAvailable')}`));
48
50
  return await backupMetadataOnly(buckets, storageDir, projectId, accessToken);
49
51
  }
50
52
 
@@ -64,7 +66,7 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
64
66
 
65
67
  for (const bucket of buckets || []) {
66
68
  try {
67
- console.log(chalk.white(` - Processando bucket: ${bucket.name}`));
69
+ console.log(chalk.white(` - ${getT('backup.steps.storage.processing', { bucketName: bucket.name })}`));
68
70
 
69
71
  // Listar objetos do bucket via Management API com Personal Access Token
70
72
  const objectsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/storage/buckets/${bucket.name}/objects`, {
@@ -97,12 +99,12 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
97
99
  await ensureDir(bucketDir);
98
100
 
99
101
  // Listar todos os arquivos recursivamente usando Supabase client
100
- console.log(chalk.white(` - Listando arquivos do bucket ${bucket.name}...`));
102
+ console.log(chalk.white(` - ${getT('backup.steps.storage.listingFiles', { bucketName: bucket.name })}`));
101
103
  const allFiles = await listAllFilesRecursively(supabase, bucket.name, '');
102
104
 
103
105
  let filesDownloaded = 0;
104
106
  if (allFiles.length > 0) {
105
- console.log(chalk.white(` - Baixando ${allFiles.length} arquivo(s) do bucket ${bucket.name}...`));
107
+ console.log(chalk.white(` - ${getT('backup.steps.storage.downloading', { count: allFiles.length, bucketName: bucket.name })}`));
106
108
 
107
109
  for (const filePath of allFiles) {
108
110
  try {
@@ -112,7 +114,7 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
112
114
  .download(filePath);
113
115
 
114
116
  if (downloadError) {
115
- console.log(chalk.yellow(` ⚠️ Erro ao baixar ${filePath}: ${downloadError.message}`));
117
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.downloadError', { path: filePath, message: downloadError.message })}`));
116
118
  continue;
117
119
  }
118
120
 
@@ -129,10 +131,10 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
129
131
 
130
132
  // Mostrar progresso a cada 10 arquivos ou se for o último
131
133
  if (filesDownloaded % 10 === 0 || filesDownloaded === allFiles.length) {
132
- console.log(chalk.white(` - Baixados ${filesDownloaded}/${allFiles.length} arquivo(s)...`));
134
+ console.log(chalk.white(` - ${getT('backup.steps.storage.downloaded', { current: filesDownloaded, total: allFiles.length })}`));
133
135
  }
134
136
  } catch (fileError) {
135
- console.log(chalk.yellow(` ⚠️ Erro ao processar arquivo ${filePath}: ${fileError.message}`));
137
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.processFileError', { path: filePath, message: fileError.message })}`));
136
138
  }
137
139
  }
138
140
  }
@@ -145,14 +147,14 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
145
147
  totalFiles: allFiles.length
146
148
  });
147
149
 
148
- console.log(chalk.green(` ✅ Bucket ${bucket.name}: ${filesDownloaded}/${allFiles.length} arquivo(s) baixado(s)`));
150
+ console.log(chalk.green(` ✅ ${getT('backup.steps.storage.bucketDone', { bucketName: bucket.name, downloaded: filesDownloaded, total: allFiles.length })}`));
149
151
  } catch (error) {
150
- console.log(chalk.yellow(` ⚠️ Erro ao processar bucket ${bucket.name}: ${error.message}`));
152
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.processBucketError', { bucketName: bucket.name, message: error.message })}`));
151
153
  }
152
154
  }
153
155
 
154
156
  // Criar ZIP no padrão do Dashboard: {project-id}.storage.zip
155
- console.log(chalk.white('\n - Criando arquivo ZIP no padrão do Dashboard...'));
157
+ console.log(chalk.white(`\n - ${getT('backup.steps.storage.creatingZip')}`));
156
158
  const zipFileName = `${projectId}.storage.zip`;
157
159
  const zipFilePath = path.join(backupDir, zipFileName);
158
160
 
@@ -167,25 +169,24 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
167
169
  const zipStats = await fs.stat(zipFilePath);
168
170
  const zipSizeMB = (zipStats.size / (1024 * 1024)).toFixed(2);
169
171
 
170
- console.log(chalk.green(` ✅ Arquivo ZIP criado: ${zipFileName} (${zipSizeMB} MB)`));
172
+ console.log(chalk.green(` ✅ ${getT('backup.steps.storage.zipCreated', { fileName: zipFileName, size: zipSizeMB })}`));
171
173
 
172
174
  // Perguntar ao usuário se deseja limpar a estrutura temporária
173
- const tempDirName = path.basename(tempStorageDir);
174
- const shouldCleanup = await confirm(` Deseja limpar ${tempDirName} após o backup`, false);
175
+ const shouldCleanup = await confirm(` ${getT('backup.steps.storage.cleanup')}`, false);
175
176
 
176
177
  if (shouldCleanup) {
177
- console.log(chalk.white(` - Limpando estrutura temporária...`));
178
+ console.log(chalk.white(` - ${getT('backup.steps.storage.cleanupRemoving')}`));
178
179
  try {
179
180
  await fs.rm(tempStorageDir, { recursive: true, force: true });
180
- console.log(chalk.green(` ✅ Estrutura temporária removida`));
181
+ console.log(chalk.green(` ✅ ${getT('backup.steps.storage.cleanupRemoved')}`));
181
182
  } catch (cleanupError) {
182
- console.log(chalk.yellow(` ⚠️ Erro ao limpar estrutura temporária: ${cleanupError.message}`));
183
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.cleanupError', { message: cleanupError.message })}`));
183
184
  }
184
185
  } else {
185
- console.log(chalk.white(` ℹ️ Estrutura temporária mantida em: ${path.relative(process.cwd(), tempStorageDir)}`));
186
+ console.log(chalk.white(` ℹ️ ${getT('backup.steps.storage.tempKept', { path: path.relative(process.cwd(), tempStorageDir) })}`));
186
187
  }
187
188
 
188
- console.log(chalk.green(`✅ Storage backupado: ${processedBuckets.length} buckets, ${totalFilesDownloaded} arquivo(s) baixado(s)`));
189
+ console.log(chalk.green(`✅ ${getT('backup.steps.storage.done', { buckets: processedBuckets.length, files: totalFilesDownloaded })}`));
189
190
  return {
190
191
  success: true,
191
192
  buckets: processedBuckets,
@@ -195,7 +196,8 @@ module.exports = async ({ projectId, accessToken, backupDir, supabaseUrl, supaba
195
196
  tempDirCleaned: shouldCleanup
196
197
  };
197
198
  } catch (error) {
198
- console.log(chalk.yellow(`⚠️ Erro no backup do Storage: ${error.message}`));
199
+ const getT = global.smoonbI18n?.t || t;
200
+ console.log(chalk.yellow(`⚠️ ${getT('backup.steps.storage.error', { message: error.message })}`));
199
201
  return { success: false, buckets: [] };
200
202
  }
201
203
  };
@@ -254,6 +256,7 @@ async function backupMetadataOnly(buckets, storageDir, projectId, accessToken) {
254
256
  */
255
257
  async function listAllFilesRecursively(supabase, bucketName, folderPath = '') {
256
258
  const allFiles = [];
259
+ const getT = global.smoonbI18n?.t || t;
257
260
 
258
261
  try {
259
262
  // Listar arquivos e pastas no caminho atual
@@ -265,7 +268,7 @@ async function listAllFilesRecursively(supabase, bucketName, folderPath = '') {
265
268
  });
266
269
 
267
270
  if (error) {
268
- console.log(chalk.yellow(` ⚠️ Erro ao listar ${folderPath || 'raiz'}: ${error.message}`));
271
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.listError', { path: folderPath || 'raiz', message: error.message })}`));
269
272
  return allFiles;
270
273
  }
271
274
 
@@ -286,7 +289,7 @@ async function listAllFilesRecursively(supabase, bucketName, folderPath = '') {
286
289
  }
287
290
  }
288
291
  } catch (error) {
289
- console.log(chalk.yellow(` ⚠️ Erro ao processar ${folderPath || 'raiz'}: ${error.message}`));
292
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.storage.processError', { path: folderPath || 'raiz', message: error.message })}`));
290
293
  }
291
294
 
292
295
  return allFiles;
@@ -3,6 +3,7 @@ const path = require('path');
3
3
  const fs = require('fs').promises;
4
4
  const { promisify } = require('util');
5
5
  const { exec } = require('child_process');
6
+ const { t } = require('../../../i18n');
6
7
 
7
8
  const execAsync = promisify(exec);
8
9
 
@@ -11,7 +12,8 @@ const execAsync = promisify(exec);
11
12
  */
12
13
  module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
13
14
  try {
14
- console.log(chalk.white(' - Exportando Custom Roles via Docker...'));
15
+ const getT = global.smoonbI18n?.t || t;
16
+ console.log(chalk.white(` - ${getT('backup.steps.roles.exporting')}`));
15
17
 
16
18
  const customRolesFile = path.join(backupDir, 'custom-roles.sql');
17
19
 
@@ -28,11 +30,12 @@ module.exports = async ({ databaseUrl, backupDir, accessToken }) => {
28
30
 
29
31
  return { success: true, roles: [{ filename: 'custom-roles.sql', sizeKB }] };
30
32
  } catch (error) {
31
- console.log(chalk.yellow(` ⚠️ Erro ao exportar Custom Roles via Docker: ${error.message}`));
33
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.roles.exportError', { message: error.message })}`));
32
34
  return { success: false, roles: [] };
33
35
  }
34
36
  } catch (error) {
35
- console.log(chalk.yellow(` ⚠️ Erro no backup dos Custom Roles: ${error.message}`));
37
+ const getT = global.smoonbI18n?.t || t;
38
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.roles.error', { message: error.message })}`));
36
39
  return { success: false, roles: [] };
37
40
  }
38
41
  };
@@ -4,6 +4,7 @@ const fs = require('fs').promises;
4
4
  const { ensureDir, writeJson } = require('../../../utils/fsx');
5
5
  const { extractPasswordFromDbUrl, ensureCleanLink } = require('../../../utils/supabaseLink');
6
6
  const { cleanDir } = require('../../../utils/fsExtra');
7
+ const { t } = require('../../../i18n');
7
8
 
8
9
  /**
9
10
  * Etapa 8: Backup Edge Functions via Docker (reset link + limpeza opcional)
@@ -42,21 +43,23 @@ module.exports = async (context) => {
42
43
 
43
44
  // Se o usuário escolheu limpar APÓS, podemos limpar ANTES também para garantir ambiente limpo
44
45
  // Mas se escolheu NÃO limpar, preservamos o que já existe
46
+ const getT = global.smoonbI18n?.t || t;
47
+
45
48
  if (shouldCleanAfter) {
46
49
  // Limpar antes se o usuário escolheu limpar após (garante ambiente limpo)
47
50
  await cleanDir(supabaseFunctionsDir);
48
- console.log(chalk.white(' - Pasta supabase/functions limpa antes do backup.'));
51
+ console.log(chalk.white(` - ${getT('backup.steps.functions.cleanBefore')}`));
49
52
  } else {
50
53
  // Apenas garantir que o diretório existe
51
54
  await fs.mkdir(supabaseFunctionsDir, { recursive: true });
52
55
  if (existingFunctionsBefore.length > 0) {
53
- console.log(chalk.white(` - Preservando ${existingFunctionsBefore.length} função(ões) existente(s) na pasta supabase/functions.`));
56
+ console.log(chalk.white(` - ${getT('backup.steps.functions.preserving', { count: existingFunctionsBefore.length })}`));
54
57
  } else {
55
- console.log(chalk.white(' - Pasta supabase/functions preparada (será preservada após backup).'));
58
+ console.log(chalk.white(` - ${getT('backup.steps.functions.prepared')}`));
56
59
  }
57
60
  }
58
61
 
59
- console.log(chalk.white(' - Listando Edge Functions via Management API...'));
62
+ console.log(chalk.white(` - ${getT('backup.steps.functions.listing')}`));
60
63
 
61
64
  // Usar fetch direto para Management API com Personal Access Token
62
65
  const functionsResponse = await fetch(`https://api.supabase.com/v1/projects/${projectId}/functions`, {
@@ -67,16 +70,16 @@ module.exports = async (context) => {
67
70
  });
68
71
 
69
72
  if (!functionsResponse.ok) {
70
- console.log(chalk.yellow(` ⚠️ Erro ao listar Edge Functions: ${functionsResponse.status} ${functionsResponse.statusText}`));
73
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.functions.listError', { status: functionsResponse.status, statusText: functionsResponse.statusText })}`));
71
74
  return { success: false, reason: 'api_error', functions: [] };
72
75
  }
73
76
 
74
77
  const functions = await functionsResponse.json();
75
78
 
76
79
  if (!functions || functions.length === 0) {
77
- console.log(chalk.white(' - Nenhuma Edge Function encontrada'));
80
+ console.log(chalk.white(` - ${getT('backup.steps.functions.noneFound')}`));
78
81
  await writeJson(path.join(functionsDir, 'README.md'), {
79
- message: 'Nenhuma Edge Function encontrada neste projeto'
82
+ message: getT('backup.steps.functions.noFunctionsMessage')
80
83
  });
81
84
  return { success: true, reason: 'no_functions', functions: [] };
82
85
  }
@@ -154,7 +157,7 @@ module.exports = async (context) => {
154
157
  });
155
158
 
156
159
  } catch (error) {
157
- console.log(chalk.yellow(` ⚠️ Erro ao baixar ${func.name}: ${error.message}`));
160
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.functions.downloadError', { funcName: func.name, message: error.message })}`));
158
161
  errorCount++;
159
162
  }
160
163
  }
@@ -167,11 +170,11 @@ module.exports = async (context) => {
167
170
  // Nota: shouldCleanAfter já foi definido acima
168
171
  if (shouldCleanAfter) {
169
172
  await cleanDir(supabaseFunctionsDir);
170
- console.log(chalk.white(' - supabase/functions limpo após o backup.'));
173
+ console.log(chalk.white(` - ${getT('backup.steps.functions.cleanAfter')}`));
171
174
  } else {
172
175
  // Preservar tudo: tanto as funções que já existiam quanto as que foram baixadas
173
176
  // As funções baixadas não foram removidas individualmente (linha acima foi ajustada)
174
- console.log(chalk.white(' - supabase/functions preservada conforme solicitado.'));
177
+ console.log(chalk.white(` - ${getT('backup.steps.functions.preserved')}`));
175
178
  }
176
179
 
177
180
  return {
@@ -185,7 +188,8 @@ module.exports = async (context) => {
185
188
  };
186
189
 
187
190
  } catch (error) {
188
- console.log(chalk.yellow(` ⚠️ Erro durante backup de Edge Functions: ${error.message}`));
191
+ const getT = global.smoonbI18n?.t || t;
192
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.functions.error', { message: error.message })}`));
189
193
  console.log('⏭️ Continuando com outros componentes...');
190
194
  return { success: false, reason: 'download_error', error: error.message, functions: [] };
191
195
  }
@@ -2,6 +2,7 @@ const chalk = require('chalk');
2
2
  const path = require('path');
3
3
  const { copyDirSafe } = require('../../../utils/fsExtra');
4
4
  const { cleanDir } = require('../../../utils/fsExtra');
5
+ const { t } = require('../../../i18n');
5
6
 
6
7
  /**
7
8
  * Etapa 9: Backup Supabase .temp (NOVA ETAPA INDEPENDENTE)
@@ -9,17 +10,18 @@ const { cleanDir } = require('../../../utils/fsExtra');
9
10
  module.exports = async (context) => {
10
11
  const { backupDir } = context;
11
12
  try {
13
+ const getT = global.smoonbI18n?.t || t;
12
14
  const tempDir = path.join(process.cwd(), 'supabase', '.temp');
13
15
  const backupTempDir = path.join(backupDir, 'supabase-temp');
14
16
 
15
17
  const fileCount = await copyDirSafe(tempDir, backupTempDir);
16
-
17
- console.log(chalk.white(` - Copiando supabase/.temp backups/backup-${path.basename(backupDir)}/supabase-temp (${fileCount} arquivos)...`));
18
+ const relativePath = path.relative(process.cwd(), backupTempDir);
19
+ console.log(chalk.white(` - ${getT('backup.steps.temp.copying', { path: relativePath, count: fileCount })}`));
18
20
 
19
21
  if (fileCount === 0) {
20
- console.log(chalk.white(' - Nenhum arquivo encontrado em supabase/.temp'));
22
+ console.log(chalk.white(` - ${getT('backup.steps.temp.noFiles')}`));
21
23
  } else {
22
- console.log(chalk.green(` ✅ ${fileCount} arquivo(s) copiado(s)`));
24
+ console.log(chalk.green(` ✅ ${getT('backup.steps.temp.copied', { count: fileCount })}`));
23
25
  }
24
26
 
25
27
  // Usar flag de limpeza do contexto (já foi perguntado no início)
@@ -27,7 +29,7 @@ module.exports = async (context) => {
27
29
 
28
30
  if (shouldClean) {
29
31
  await cleanDir(tempDir);
30
- console.log(chalk.white(' - supabase/.temp apagado.'));
32
+ console.log(chalk.white(` - ${getT('backup.steps.temp.cleaned')}`));
31
33
  }
32
34
 
33
35
  return {
@@ -35,7 +37,8 @@ module.exports = async (context) => {
35
37
  file_count: fileCount
36
38
  };
37
39
  } catch (error) {
38
- console.log(chalk.yellow(` ⚠️ Erro no backup do supabase/.temp: ${error.message}`));
40
+ const getT = global.smoonbI18n?.t || t;
41
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.temp.error', { message: error.message })}`));
39
42
  return { success: false };
40
43
  }
41
44
  };
@@ -3,13 +3,15 @@ const path = require('path');
3
3
  const { execSync } = require('child_process');
4
4
  const { extractPasswordFromDbUrl, ensureCleanLink } = require('../../../utils/supabaseLink');
5
5
  const { cleanDir, countFiles, copyDirSafe } = require('../../../utils/fsExtra');
6
+ const { t } = require('../../../i18n');
6
7
 
7
8
  /**
8
- * Etapa 10: Backup Migrations (NOVA ETAPA INDEPENDENTE)
9
+ * Etapa 10: Migrations Backup (NOVA ETAPA INDEPENDENTE)
9
10
  */
10
11
  module.exports = async (context) => {
11
12
  const { projectId, accessToken, databaseUrl, backupDir } = context;
12
13
  try {
14
+ const getT = global.smoonbI18n?.t || t;
13
15
  // Reset de link ao projeto de ORIGEM
14
16
  const dbPassword = extractPasswordFromDbUrl(databaseUrl);
15
17
  await ensureCleanLink(projectId, accessToken, dbPassword);
@@ -17,10 +19,10 @@ module.exports = async (context) => {
17
19
  // Limpar migrations local (opcional, mas recomendado para garantir servidor como fonte da verdade)
18
20
  const migrationsDir = path.join(process.cwd(), 'supabase', 'migrations');
19
21
  await cleanDir(migrationsDir);
20
- console.log(chalk.white(' - Limpando supabase/migrations...'));
22
+ console.log(chalk.white(` - ${getT('backup.steps.migrations.cleaning')}`));
21
23
 
22
24
  // Baixar todas as migrations do servidor usando migration fetch
23
- console.log(chalk.white(' - Baixando todas as migrations do servidor usando migration fetch...'));
25
+ console.log(chalk.white(` - ${getT('backup.steps.migrations.downloading')}`));
24
26
 
25
27
  const env = {
26
28
  ...process.env,
@@ -36,22 +38,23 @@ module.exports = async (context) => {
36
38
  env
37
39
  });
38
40
  } catch (error) {
39
- console.log(chalk.yellow(` ⚠️ Erro ao executar migration fetch: ${error.message}`));
40
- console.log(chalk.yellow(' 💡 Verifique se o projeto está linkado corretamente e se o token está válido.'));
41
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.migrations.fetchError', { message: error.message })}`));
42
+ console.log(chalk.yellow(` 💡 ${getT('backup.steps.migrations.fetchTip')}`));
41
43
  return { success: false };
42
44
  }
43
45
 
44
46
  // Contar arquivos baixados
45
47
  const fileCount = await countFiles(migrationsDir);
46
- console.log(chalk.white(` - Arquivos baixados: ${fileCount} migrations`));
48
+ console.log(chalk.white(` - ${getT('backup.steps.migrations.downloaded', { count: fileCount })}`));
47
49
 
48
50
  // Copiar migrations para o backup
49
51
  const backupMigrationsDir = path.join(backupDir, 'migrations');
50
52
  const copiedCount = await copyDirSafe(migrationsDir, backupMigrationsDir);
51
- console.log(chalk.white(` - Copiando supabase/migrations → backups/backup-${path.basename(backupDir)}/migrations (${copiedCount} arquivos)...`));
53
+ const relativePath = path.relative(process.cwd(), backupMigrationsDir);
54
+ console.log(chalk.white(` - ${getT('backup.steps.migrations.copying', { path: relativePath, count: copiedCount })}`));
52
55
 
53
56
  if (copiedCount > 0) {
54
- console.log(chalk.green(` ✅ ${copiedCount} migration(s) copiada(s)`));
57
+ console.log(chalk.green(` ✅ ${getT('backup.steps.migrations.copied', { count: copiedCount })}`));
55
58
  }
56
59
 
57
60
  // Usar flag de limpeza do contexto (já foi perguntado no início)
@@ -59,7 +62,7 @@ module.exports = async (context) => {
59
62
 
60
63
  if (shouldClean) {
61
64
  await cleanDir(migrationsDir);
62
- console.log(chalk.gray(' - supabase/migrations apagado.'));
65
+ console.log(chalk.gray(` - ${getT('backup.steps.migrations.cleaned')}`));
63
66
  }
64
67
 
65
68
  return {
@@ -67,7 +70,8 @@ module.exports = async (context) => {
67
70
  file_count: copiedCount
68
71
  };
69
72
  } catch (error) {
70
- console.log(chalk.yellow(` ⚠️ Erro no backup das migrations: ${error.message}`));
73
+ const getT = global.smoonbI18n?.t || t;
74
+ console.log(chalk.yellow(` ⚠️ ${getT('backup.steps.migrations.error', { message: error.message })}`));
71
75
  return { success: false };
72
76
  }
73
77
  };
@@ -98,13 +98,15 @@ async function checkDatabaseConnection(databaseUrl) {
98
98
  `psql "${databaseUrl}" -t -c "SELECT 1 as test_connection;"`
99
99
  );
100
100
 
101
+ const getT = global.smoonbI18n?.t || t;
101
102
  if (stdout.trim() === '1') {
102
- return { status: 'ok', message: 'Conexão estabelecida com sucesso' };
103
+ return { status: 'ok', message: getT('check.connectionSuccess') };
103
104
  } else {
104
- return { status: 'error', message: 'Resposta inesperada da database' };
105
+ return { status: 'error', message: getT('check.unexpectedResponse') };
105
106
  }
106
107
  } catch (error) {
107
- return { status: 'error', message: `Falha na conexão: ${error.message}` };
108
+ const getT = global.smoonbI18n?.t || t;
109
+ return { status: 'error', message: getT('check.connectionError', { message: error.message }) };
108
110
  }
109
111
  }
110
112