@deinossrl/dgp-agent 1.4.39 → 1.4.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/index.mjs +318 -11
- package/package.json +4 -1
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# Changelog - DGP Agent
|
|
2
|
+
|
|
3
|
+
## [1.4.41] - 2026-01-12
|
|
4
|
+
|
|
5
|
+
### Added
|
|
6
|
+
- Soporte completo para comandos `pg_dump` desde la plataforma web
|
|
7
|
+
- Integración con Supabase Storage para subir backups automáticamente
|
|
8
|
+
- Nueva dependencia: `@supabase/supabase-js` v2.39.0
|
|
9
|
+
|
|
10
|
+
### Fixed
|
|
11
|
+
- Corregido schema de comandos: ahora busca en `dgp.agent_commands` correctamente
|
|
12
|
+
- Agregados headers `Accept-Profile` y `Content-Profile` para acceso al schema dgp
|
|
13
|
+
- URL de comandos actualizada de `dgp.agent_commands` a `agent_commands` con headers de schema
|
|
14
|
+
|
|
15
|
+
### Changed
|
|
16
|
+
- Mejorado manejo de comandos con soporte para múltiples schemas en Supabase
|
|
17
|
+
|
|
18
|
+
## [1.4.40] - 2026-01-10
|
|
19
|
+
|
|
20
|
+
### Previous Release
|
|
21
|
+
- Funcionalidad base de reporte de estado Git
|
|
22
|
+
- Comandos deploy, git_commit_push, test_connection
|
|
23
|
+
- Integración con IA (Claude) para tareas automatizadas
|
package/index.mjs
CHANGED
|
@@ -25,9 +25,10 @@
|
|
|
25
25
|
|
|
26
26
|
import { execSync, spawn, spawnSync } from 'child_process';
|
|
27
27
|
import { hostname, homedir } from 'os';
|
|
28
|
-
import { existsSync, readFileSync, writeFileSync, mkdirSync, chmodSync, createWriteStream } from 'fs';
|
|
28
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, chmodSync, createWriteStream, statSync, unlinkSync } from 'fs';
|
|
29
29
|
import { join, dirname } from 'path';
|
|
30
30
|
import https from 'https';
|
|
31
|
+
import { createClient } from '@supabase/supabase-js';
|
|
31
32
|
|
|
32
33
|
// ============================================
|
|
33
34
|
// CONFIG FILE MANAGEMENT
|
|
@@ -352,13 +353,13 @@ function getPlinkPath() {
|
|
|
352
353
|
}
|
|
353
354
|
|
|
354
355
|
// Versión del agente
|
|
355
|
-
const AGENT_VERSION = '1.4.
|
|
356
|
+
const AGENT_VERSION = '1.4.41';
|
|
356
357
|
let AGENT_MODE = 'smart'; // Siempre inteligente
|
|
357
358
|
|
|
358
359
|
// Configuración (prioridad: env vars > archivo config > platform config > defaults)
|
|
359
360
|
const CONFIG = {
|
|
360
361
|
apiUrl: process.env.DGP_API_URL || fileConfig.apiUrl || 'https://asivayhbrqennwiwttds.supabase.co/functions/v1/dgp-agent-status',
|
|
361
|
-
commandsUrl: process.env.DGP_COMMANDS_URL || fileConfig.commandsUrl || 'https://asivayhbrqennwiwttds.supabase.co/rest/v1/
|
|
362
|
+
commandsUrl: process.env.DGP_COMMANDS_URL || fileConfig.commandsUrl || 'https://asivayhbrqennwiwttds.supabase.co/rest/v1/agent_commands',
|
|
362
363
|
supabaseKey: process.env.DGP_SUPABASE_KEY || fileConfig.supabaseKey || 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImFzaXZheWhicnFlbm53aXd0dGRzIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NjczMDAwOTcsImV4cCI6MjA4Mjg3NjA5N30.s3a7dR-dPkEXI7B2lUTUXU69923hhuX6meheNeo5EKA',
|
|
363
364
|
interval: parseInt(process.env.DGP_INTERVAL || fileConfig.interval || '30', 10),
|
|
364
365
|
commandPollInterval: parseInt(process.env.DGP_COMMAND_POLL_INTERVAL || fileConfig.commandPollInterval || '10', 10),
|
|
@@ -783,6 +784,8 @@ async function runAIMode() {
|
|
|
783
784
|
headers: {
|
|
784
785
|
'apikey': CONFIG.supabaseKey,
|
|
785
786
|
'Authorization': `Bearer ${CONFIG.supabaseKey}`,
|
|
787
|
+
'Accept-Profile': 'dgp',
|
|
788
|
+
'Content-Profile': 'dgp',
|
|
786
789
|
},
|
|
787
790
|
});
|
|
788
791
|
|
|
@@ -887,6 +890,61 @@ function shell(command, options = {}) {
|
|
|
887
890
|
// Alias para compatibilidad
|
|
888
891
|
const shellSync = shell;
|
|
889
892
|
|
|
893
|
+
/**
|
|
894
|
+
* Verifica si un error es recuperable (timeout, conexión, etc)
|
|
895
|
+
*/
|
|
896
|
+
function isRecoverableError(error) {
|
|
897
|
+
const msg = error.message.toLowerCase();
|
|
898
|
+
return (
|
|
899
|
+
msg.includes('timeout') ||
|
|
900
|
+
msg.includes('timed out') ||
|
|
901
|
+
msg.includes('connection refused') ||
|
|
902
|
+
msg.includes('network') ||
|
|
903
|
+
msg.includes('temporarily unavailable') ||
|
|
904
|
+
(error.code === 255 && msg.includes('ssh'))
|
|
905
|
+
);
|
|
906
|
+
}
|
|
907
|
+
|
|
908
|
+
/**
|
|
909
|
+
* Ejecuta un comando shell de forma asíncrona con reintentos automáticos
|
|
910
|
+
*/
|
|
911
|
+
async function shellAsyncWithRetry(command, options = {}, maxRetries = 3, baseDelay = 2000) {
|
|
912
|
+
let lastError;
|
|
913
|
+
|
|
914
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
915
|
+
try {
|
|
916
|
+
logInfo(`[Attempt ${attempt}/${maxRetries}] Executing: ${command.substring(0, 80)}...`);
|
|
917
|
+
const result = await shellAsync(command, options);
|
|
918
|
+
if (attempt > 1) {
|
|
919
|
+
logSuccess(`✓ Command succeeded after ${attempt} attempts`);
|
|
920
|
+
}
|
|
921
|
+
return result;
|
|
922
|
+
} catch (error) {
|
|
923
|
+
lastError = error;
|
|
924
|
+
|
|
925
|
+
// Si no es recuperable, fallar inmediatamente
|
|
926
|
+
if (!isRecoverableError(error)) {
|
|
927
|
+
throw error;
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
// Si es el último intento, fallar
|
|
931
|
+
if (attempt === maxRetries) {
|
|
932
|
+
logError(`✗ Command failed after ${maxRetries} attempts`);
|
|
933
|
+
throw error;
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
// Calcular delay con backoff exponencial
|
|
937
|
+
const delay = baseDelay * Math.pow(2, attempt - 1);
|
|
938
|
+
log(`⚠️ Recoverable error (${error.message}), retrying in ${delay}ms...`, 'yellow');
|
|
939
|
+
|
|
940
|
+
// Esperar antes de reintentar
|
|
941
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
throw lastError;
|
|
946
|
+
}
|
|
947
|
+
|
|
890
948
|
/**
|
|
891
949
|
* Ejecuta un comando shell de forma asíncrona con output en tiempo real
|
|
892
950
|
*/
|
|
@@ -1064,6 +1122,8 @@ async function getPendingCommands() {
|
|
|
1064
1122
|
headers: {
|
|
1065
1123
|
'apikey': CONFIG.supabaseKey,
|
|
1066
1124
|
'Authorization': `Bearer ${CONFIG.supabaseKey}`,
|
|
1125
|
+
'Accept-Profile': 'dgp',
|
|
1126
|
+
'Content-Profile': 'dgp',
|
|
1067
1127
|
},
|
|
1068
1128
|
});
|
|
1069
1129
|
|
|
@@ -1115,6 +1175,8 @@ async function updateCommandLogs(commandId, logs) {
|
|
|
1115
1175
|
'Authorization': `Bearer ${CONFIG.supabaseKey}`,
|
|
1116
1176
|
'Content-Type': 'application/json',
|
|
1117
1177
|
'Prefer': 'return=minimal',
|
|
1178
|
+
'Accept-Profile': 'dgp',
|
|
1179
|
+
'Content-Profile': 'dgp',
|
|
1118
1180
|
},
|
|
1119
1181
|
body: JSON.stringify({
|
|
1120
1182
|
result: { logs: logs },
|
|
@@ -1225,10 +1287,10 @@ async function executeDeploy(command) {
|
|
|
1225
1287
|
? '/var/www/tenminuteia-prod/'
|
|
1226
1288
|
: '/var/www/tenminuteia-staging/');
|
|
1227
1289
|
|
|
1228
|
-
// Construir base del comando SSH
|
|
1290
|
+
// Construir base del comando SSH con timeout largo para conexiones lentas
|
|
1229
1291
|
const sshBase = sshKeyPath
|
|
1230
|
-
? `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no ${ssh_user}@${server_host}`
|
|
1231
|
-
: `ssh ${ssh_user}@${server_host}`;
|
|
1292
|
+
? `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o ConnectTimeout=60 -o ServerAliveInterval=10 ${ssh_user}@${server_host}`
|
|
1293
|
+
: `ssh -o ConnectTimeout=60 -o ServerAliveInterval=10 ${ssh_user}@${server_host}`;
|
|
1232
1294
|
|
|
1233
1295
|
// Step 1: Git clone/pull on server
|
|
1234
1296
|
currentStep = 'git_setup_server';
|
|
@@ -1240,7 +1302,7 @@ async function executeDeploy(command) {
|
|
|
1240
1302
|
try {
|
|
1241
1303
|
logInfo(`Attempting to update existing repository...`);
|
|
1242
1304
|
const gitPullCmd = `${sshBase} "cd ${projectFolder} && git fetch origin && git checkout ${branch} && git pull origin ${branch}"`;
|
|
1243
|
-
await
|
|
1305
|
+
await shellAsyncWithRetry(gitPullCmd, {}, 5, 3000); // 5 reintentos, 3s base delay
|
|
1244
1306
|
logSuccess(`Code updated on server`);
|
|
1245
1307
|
} catch (pullError) {
|
|
1246
1308
|
// Si falla el pull, probablemente el repo no existe
|
|
@@ -1260,7 +1322,7 @@ async function executeDeploy(command) {
|
|
|
1260
1322
|
// Crear directorio padre y clonar
|
|
1261
1323
|
const parentDir = projectFolder.substring(0, projectFolder.lastIndexOf('/'));
|
|
1262
1324
|
const cloneCmd = `${sshBase} "mkdir -p ${parentDir} && git clone ${repoUrl} ${projectFolder} && cd ${projectFolder} && git checkout ${branch}"`;
|
|
1263
|
-
await
|
|
1325
|
+
await shellAsyncWithRetry(cloneCmd, {}, 5, 3000); // 5 reintentos, 3s base delay
|
|
1264
1326
|
|
|
1265
1327
|
logSuccess(`Repository cloned successfully`);
|
|
1266
1328
|
}
|
|
@@ -1273,7 +1335,7 @@ async function executeDeploy(command) {
|
|
|
1273
1335
|
steps.push({ step: currentStep, status: 'running' });
|
|
1274
1336
|
|
|
1275
1337
|
const npmInstallCmd = `${sshBase} "cd ${projectFolder} && npm ci"`;
|
|
1276
|
-
await
|
|
1338
|
+
await shellAsyncWithRetry(npmInstallCmd, {}, 3, 2000); // 3 reintentos, 2s base delay
|
|
1277
1339
|
|
|
1278
1340
|
steps[steps.length - 1].status = 'success';
|
|
1279
1341
|
logSuccess(`Dependencies installed on server`);
|
|
@@ -1284,7 +1346,7 @@ async function executeDeploy(command) {
|
|
|
1284
1346
|
steps.push({ step: currentStep, status: 'running' });
|
|
1285
1347
|
|
|
1286
1348
|
const npmBuildCmd = `${sshBase} "cd ${projectFolder} && npm run build"`;
|
|
1287
|
-
await
|
|
1349
|
+
await shellAsyncWithRetry(npmBuildCmd, {}, 3, 2000); // 3 reintentos, 2s base delay
|
|
1288
1350
|
|
|
1289
1351
|
steps[steps.length - 1].status = 'success';
|
|
1290
1352
|
logSuccess(`Build completed on server`);
|
|
@@ -1295,7 +1357,7 @@ async function executeDeploy(command) {
|
|
|
1295
1357
|
steps.push({ step: currentStep, status: 'running' });
|
|
1296
1358
|
|
|
1297
1359
|
const reloadNginxCmd = `${sshBase} "sudo nginx -t && sudo systemctl reload nginx"`;
|
|
1298
|
-
await
|
|
1360
|
+
await shellAsyncWithRetry(reloadNginxCmd, {}, 3, 2000); // 3 reintentos, 2s base delay
|
|
1299
1361
|
|
|
1300
1362
|
steps[steps.length - 1].status = 'success';
|
|
1301
1363
|
logSuccess(`Nginx reloaded`);
|
|
@@ -1354,6 +1416,246 @@ async function executeDeploy(command) {
|
|
|
1354
1416
|
}
|
|
1355
1417
|
}
|
|
1356
1418
|
|
|
1419
|
+
/**
|
|
1420
|
+
* Ejecuta pg_dump para crear backup de PostgreSQL
|
|
1421
|
+
*/
|
|
1422
|
+
async function executePgDump(command) {
|
|
1423
|
+
const { id, params } = command;
|
|
1424
|
+
const backupId = params.backup_id;
|
|
1425
|
+
const environmentId = params.environment_id;
|
|
1426
|
+
|
|
1427
|
+
logCommand(`=== Ejecutando pg_dump (Backup) ===`);
|
|
1428
|
+
logInfo(`Backup ID: ${backupId}`);
|
|
1429
|
+
logInfo(`Environment ID: ${environmentId}`);
|
|
1430
|
+
|
|
1431
|
+
// Iniciar tracking de logs
|
|
1432
|
+
currentCommandId = id;
|
|
1433
|
+
commandLogs = [];
|
|
1434
|
+
|
|
1435
|
+
// Crear directorio para backups si no existe
|
|
1436
|
+
const backupsDir = join(CONFIG_DIR, 'backups');
|
|
1437
|
+
if (!existsSync(backupsDir)) {
|
|
1438
|
+
mkdirSync(backupsDir, { recursive: true });
|
|
1439
|
+
}
|
|
1440
|
+
|
|
1441
|
+
let localFilePath = null;
|
|
1442
|
+
let storagePath = null;
|
|
1443
|
+
|
|
1444
|
+
try {
|
|
1445
|
+
await updateCommandStatus(id, 'running', {});
|
|
1446
|
+
await addCommandLog('info', 'Iniciando backup de base de datos...');
|
|
1447
|
+
|
|
1448
|
+
// Determinar extensión según formato
|
|
1449
|
+
const format = params.format || 'custom';
|
|
1450
|
+
const extension = format === 'custom' ? 'dump' : format === 'tar' ? 'tar' : 'sql';
|
|
1451
|
+
|
|
1452
|
+
// Generar nombre de archivo local
|
|
1453
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, -5);
|
|
1454
|
+
const fileName = `backup_${environmentId}_${timestamp}.${extension}`;
|
|
1455
|
+
localFilePath = join(backupsDir, fileName);
|
|
1456
|
+
|
|
1457
|
+
// Construir comando pg_dump
|
|
1458
|
+
let pgDumpCmd = 'pg_dump';
|
|
1459
|
+
const pgDumpArgs = [];
|
|
1460
|
+
|
|
1461
|
+
// Configurar conexión según tipo
|
|
1462
|
+
if (params.connection_type === 'postgresql') {
|
|
1463
|
+
// PostgreSQL genérico
|
|
1464
|
+
pgDumpArgs.push(`--host=${params.host}`);
|
|
1465
|
+
pgDumpArgs.push(`--port=${params.port || 5432}`);
|
|
1466
|
+
pgDumpArgs.push(`--username=${params.username}`);
|
|
1467
|
+
pgDumpArgs.push(`--dbname=${params.database}`);
|
|
1468
|
+
|
|
1469
|
+
// Si hay password, configurar PGPASSWORD
|
|
1470
|
+
if (params.password_encrypted) {
|
|
1471
|
+
// TODO: Desencriptar password si está encriptado
|
|
1472
|
+
process.env.PGPASSWORD = params.password_encrypted;
|
|
1473
|
+
}
|
|
1474
|
+
} else {
|
|
1475
|
+
// Supabase
|
|
1476
|
+
const projectRef = params.project_ref;
|
|
1477
|
+
pgDumpArgs.push(`--host=db.${projectRef}.supabase.co`);
|
|
1478
|
+
pgDumpArgs.push(`--port=5432`);
|
|
1479
|
+
pgDumpArgs.push(`--username=postgres`);
|
|
1480
|
+
pgDumpArgs.push(`--dbname=postgres`);
|
|
1481
|
+
|
|
1482
|
+
// Para Supabase, necesitamos el service_role_key como password
|
|
1483
|
+
if (params.service_role_key) {
|
|
1484
|
+
process.env.PGPASSWORD = params.service_role_key;
|
|
1485
|
+
}
|
|
1486
|
+
}
|
|
1487
|
+
|
|
1488
|
+
// Opciones de formato
|
|
1489
|
+
if (format === 'custom') {
|
|
1490
|
+
pgDumpArgs.push('--format=custom');
|
|
1491
|
+
} else if (format === 'tar') {
|
|
1492
|
+
pgDumpArgs.push('--format=tar');
|
|
1493
|
+
} else {
|
|
1494
|
+
pgDumpArgs.push('--format=plain');
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
// Opciones de contenido
|
|
1498
|
+
if (params.backup_type === 'schema') {
|
|
1499
|
+
pgDumpArgs.push('--schema-only');
|
|
1500
|
+
} else if (params.backup_type === 'data') {
|
|
1501
|
+
pgDumpArgs.push('--data-only');
|
|
1502
|
+
}
|
|
1503
|
+
|
|
1504
|
+
// Incluir opciones adicionales
|
|
1505
|
+
if (params.include_privileges) {
|
|
1506
|
+
pgDumpArgs.push('--no-privileges');
|
|
1507
|
+
}
|
|
1508
|
+
if (params.include_owner) {
|
|
1509
|
+
pgDumpArgs.push('--no-owner');
|
|
1510
|
+
}
|
|
1511
|
+
|
|
1512
|
+
// Schemas específicos
|
|
1513
|
+
if (params.schemas && params.schemas.length > 0) {
|
|
1514
|
+
params.schemas.forEach(schema => {
|
|
1515
|
+
pgDumpArgs.push(`--schema=${schema}`);
|
|
1516
|
+
});
|
|
1517
|
+
}
|
|
1518
|
+
|
|
1519
|
+
// Tablas específicas
|
|
1520
|
+
if (params.tables && params.tables.length > 0) {
|
|
1521
|
+
params.tables.forEach(table => {
|
|
1522
|
+
pgDumpArgs.push(`--table=${table}`);
|
|
1523
|
+
});
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
// Archivo de salida
|
|
1527
|
+
pgDumpArgs.push(`--file=${localFilePath}`);
|
|
1528
|
+
|
|
1529
|
+
// Ejecutar pg_dump
|
|
1530
|
+
const fullCommand = `${pgDumpCmd} ${pgDumpArgs.join(' ')}`;
|
|
1531
|
+
logCommand(`Ejecutando: pg_dump [conexión oculta]`);
|
|
1532
|
+
await addCommandLog('command', 'Ejecutando pg_dump...');
|
|
1533
|
+
|
|
1534
|
+
await shellAsync(fullCommand);
|
|
1535
|
+
|
|
1536
|
+
// Verificar que el archivo se creó
|
|
1537
|
+
if (!existsSync(localFilePath)) {
|
|
1538
|
+
throw new Error('El archivo de backup no se generó correctamente');
|
|
1539
|
+
}
|
|
1540
|
+
|
|
1541
|
+
const fileSize = statSync(localFilePath).size;
|
|
1542
|
+
logSuccess(`Backup creado localmente (${(fileSize / 1024 / 1024).toFixed(2)} MB)`);
|
|
1543
|
+
await addCommandLog('success', `Backup creado: ${(fileSize / 1024 / 1024).toFixed(2)} MB`);
|
|
1544
|
+
|
|
1545
|
+
// Subir a Supabase Storage
|
|
1546
|
+
logInfo('Subiendo backup a Supabase Storage...');
|
|
1547
|
+
await addCommandLog('info', 'Subiendo a Storage...');
|
|
1548
|
+
|
|
1549
|
+
const supabase = createClient(
|
|
1550
|
+
CONFIG.apiUrl.replace('/functions/v1/dgp-agent-status', ''),
|
|
1551
|
+
CONFIG.supabaseKey
|
|
1552
|
+
);
|
|
1553
|
+
|
|
1554
|
+
// Usar storage_path de los params (ya definido en la DB)
|
|
1555
|
+
storagePath = params.storage_path;
|
|
1556
|
+
if (!storagePath) {
|
|
1557
|
+
throw new Error('storage_path no definido en los parámetros');
|
|
1558
|
+
}
|
|
1559
|
+
|
|
1560
|
+
// Leer archivo y subirlo
|
|
1561
|
+
const fileBuffer = readFileSync(localFilePath);
|
|
1562
|
+
const { data: uploadData, error: uploadError } = await supabase.storage
|
|
1563
|
+
.from('database-backups')
|
|
1564
|
+
.upload(storagePath, fileBuffer, {
|
|
1565
|
+
contentType: format === 'sql' ? 'application/sql' : 'application/octet-stream',
|
|
1566
|
+
upsert: false
|
|
1567
|
+
});
|
|
1568
|
+
|
|
1569
|
+
if (uploadError) {
|
|
1570
|
+
throw new Error(`Error subiendo a Storage: ${uploadError.message}`);
|
|
1571
|
+
}
|
|
1572
|
+
|
|
1573
|
+
logSuccess(`Archivo subido a Storage: ${storagePath}`);
|
|
1574
|
+
await addCommandLog('success', `Subido a: ${storagePath}`);
|
|
1575
|
+
|
|
1576
|
+
// Actualizar registro en db_backups
|
|
1577
|
+
logInfo('Actualizando registro de backup...');
|
|
1578
|
+
const { error: updateError } = await supabase
|
|
1579
|
+
.schema('dgp')
|
|
1580
|
+
.from('db_backups')
|
|
1581
|
+
.update({
|
|
1582
|
+
status: 'completed',
|
|
1583
|
+
storage_path: storagePath,
|
|
1584
|
+
size_bytes: fileSize,
|
|
1585
|
+
completed_at: new Date().toISOString()
|
|
1586
|
+
})
|
|
1587
|
+
.eq('id', backupId);
|
|
1588
|
+
|
|
1589
|
+
if (updateError) {
|
|
1590
|
+
logError(`Error actualizando registro: ${updateError.message}`);
|
|
1591
|
+
await addCommandLog('error', `Error actualizando DB: ${updateError.message}`);
|
|
1592
|
+
} else {
|
|
1593
|
+
logSuccess('Registro de backup actualizado');
|
|
1594
|
+
await addCommandLog('success', 'Backup registrado en DB');
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
// Limpiar archivo local (opcional)
|
|
1598
|
+
try {
|
|
1599
|
+
unlinkSync(localFilePath);
|
|
1600
|
+
logInfo('Archivo local eliminado');
|
|
1601
|
+
} catch (e) {
|
|
1602
|
+
// Ignorar errores al eliminar
|
|
1603
|
+
}
|
|
1604
|
+
|
|
1605
|
+
// Marcar comando como completado
|
|
1606
|
+
await updateCommandStatus(id, 'completed', {
|
|
1607
|
+
backup_id: backupId,
|
|
1608
|
+
storage_path: storagePath,
|
|
1609
|
+
size_bytes: fileSize,
|
|
1610
|
+
local_path: localFilePath
|
|
1611
|
+
});
|
|
1612
|
+
|
|
1613
|
+
console.log('');
|
|
1614
|
+
logSuccess(`=== Backup completado exitosamente ===`);
|
|
1615
|
+
console.log('');
|
|
1616
|
+
|
|
1617
|
+
return { success: true };
|
|
1618
|
+
|
|
1619
|
+
} catch (error) {
|
|
1620
|
+
logError(`Backup failed: ${error.message}`);
|
|
1621
|
+
await addCommandLog('error', error.message);
|
|
1622
|
+
|
|
1623
|
+
// Limpiar archivo local si existe
|
|
1624
|
+
if (localFilePath && existsSync(localFilePath)) {
|
|
1625
|
+
try {
|
|
1626
|
+
unlinkSync(localFilePath);
|
|
1627
|
+
} catch (e) {
|
|
1628
|
+
// Ignorar
|
|
1629
|
+
}
|
|
1630
|
+
}
|
|
1631
|
+
|
|
1632
|
+
// Actualizar estado del backup en DB
|
|
1633
|
+
try {
|
|
1634
|
+
const supabase = createClient(
|
|
1635
|
+
CONFIG.apiUrl.replace('/functions/v1/dgp-agent-status', ''),
|
|
1636
|
+
CONFIG.supabaseKey
|
|
1637
|
+
);
|
|
1638
|
+
|
|
1639
|
+
await supabase
|
|
1640
|
+
.schema('dgp')
|
|
1641
|
+
.from('db_backups')
|
|
1642
|
+
.update({
|
|
1643
|
+
status: 'failed',
|
|
1644
|
+
error_message: error.message
|
|
1645
|
+
})
|
|
1646
|
+
.eq('id', backupId);
|
|
1647
|
+
} catch (e) {
|
|
1648
|
+
// Ignorar errores al actualizar
|
|
1649
|
+
}
|
|
1650
|
+
|
|
1651
|
+
await updateCommandStatus(id, 'failed', {}, error.message);
|
|
1652
|
+
return { success: false, error: error.message };
|
|
1653
|
+
} finally {
|
|
1654
|
+
// Limpiar variables de entorno
|
|
1655
|
+
delete process.env.PGPASSWORD;
|
|
1656
|
+
}
|
|
1657
|
+
}
|
|
1658
|
+
|
|
1357
1659
|
/**
|
|
1358
1660
|
* Ejecuta un comando recibido
|
|
1359
1661
|
* @param {Object} command - El comando a ejecutar
|
|
@@ -1377,6 +1679,9 @@ async function executeCommand(command, useAI = false) {
|
|
|
1377
1679
|
case 'test_connection':
|
|
1378
1680
|
return await executeTestConnection(command);
|
|
1379
1681
|
|
|
1682
|
+
case 'pg_dump':
|
|
1683
|
+
return await executePgDump(command);
|
|
1684
|
+
|
|
1380
1685
|
case 'rollback':
|
|
1381
1686
|
logError('Rollback not implemented yet');
|
|
1382
1687
|
await updateCommandStatus(command.id, 'failed', {}, 'Rollback not implemented');
|
|
@@ -1907,6 +2212,8 @@ async function runAgent() {
|
|
|
1907
2212
|
headers: {
|
|
1908
2213
|
'apikey': CONFIG.supabaseKey,
|
|
1909
2214
|
'Authorization': `Bearer ${CONFIG.supabaseKey}`,
|
|
2215
|
+
'Accept-Profile': 'dgp',
|
|
2216
|
+
'Content-Profile': 'dgp',
|
|
1910
2217
|
},
|
|
1911
2218
|
});
|
|
1912
2219
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@deinossrl/dgp-agent",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.41",
|
|
4
4
|
"description": "Agente local para Despliegue-GPT - Reporta el estado del repositorio Git a la plataforma TenMinute IA",
|
|
5
5
|
"main": "index.mjs",
|
|
6
6
|
"bin": {
|
|
@@ -29,5 +29,8 @@
|
|
|
29
29
|
"homepage": "https://github.com/DEINOS-SRL/tenminuteia#readme",
|
|
30
30
|
"engines": {
|
|
31
31
|
"node": ">=18.0.0"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@supabase/supabase-js": "^2.39.0"
|
|
32
35
|
}
|
|
33
36
|
}
|