@runa-ai/runa-cli 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{build-BXUJKYHC.js → build-V66FAQXB.js} +152 -163
- package/dist/{cache-H63JKFYH.js → cache-N7WNPEYF.js} +2 -3
- package/dist/check-LOMVIRHX.js +12 -0
- package/dist/{chunk-HPYJPB5Y.js → chunk-2APB25TT.js} +44 -10
- package/dist/chunk-3WDV32GA.js +33 -0
- package/dist/chunk-5FT3F36G.js +59 -0
- package/dist/{chunk-7QV7U6NI.js → chunk-6FAU4IGR.js} +2 -1
- package/dist/{chunk-CE3DEYFT.js → chunk-7B5C6U2K.js} +2 -208
- package/dist/{chunk-GOGRLQNP.js → chunk-AIP6MR42.js} +1 -1
- package/dist/chunk-FHG3ILE4.js +2011 -0
- package/dist/{chunk-22CS6EMA.js → chunk-H2AHNI75.js} +1 -1
- package/dist/{chunk-UU55OH7P.js → chunk-KE6QJBZG.js} +2 -3
- package/dist/{check-6AB5NGWK.js → chunk-QM53IQHM.js} +14 -12
- package/dist/{chunk-RRGQCUKT.js → chunk-WJXC4MVY.js} +30 -3
- package/dist/chunk-XDCHRVE3.js +215 -0
- package/dist/{chunk-P7U52PBY.js → chunk-Z4Z5DNW4.js} +49 -2
- package/dist/{ci-V3PIG2GI.js → ci-ZWRVWNFX.js} +1108 -132
- package/dist/cli/contract-output.d.ts +1 -0
- package/dist/{cli-GFRZCJQR.js → cli-2JNBJUBB.js} +216 -173
- package/dist/commands/build/actors/validate.d.ts +2 -0
- package/dist/commands/check/commands/check.d.ts +8 -3
- package/dist/commands/ci/machine/actors/db/collect-schema-stats.d.ts +9 -6
- package/dist/commands/ci/machine/actors/db/schema-canonical-diff.d.ts +55 -0
- package/dist/commands/ci/machine/actors/db/schema-stats.d.ts +11 -0
- package/dist/commands/ci/machine/actors/db/sync-schema.d.ts +9 -1
- package/dist/commands/ci/machine/formatters/sections/schema-matrix.d.ts +3 -3
- package/dist/commands/ci/machine/types.d.ts +2 -0
- package/dist/commands/ci/utils/execa-helpers.d.ts +1 -0
- package/dist/commands/db/commands/db-sync/error-classifier.d.ts +9 -0
- package/dist/commands/dev/actors/index.d.ts +5 -0
- package/dist/commands/dev/actors/tables-manifest.d.ts +16 -0
- package/dist/commands/dev/contract.d.ts +1 -1
- package/dist/commands/dev/guards.d.ts +24 -0
- package/dist/commands/dev/machine.d.ts +22 -3
- package/dist/commands/dev/types.d.ts +2 -0
- package/dist/commands/doctor.d.ts +9 -0
- package/dist/commands/inject-test-attrs/defaults.d.ts +9 -0
- package/dist/commands/utils/machine-state-logging.d.ts +20 -0
- package/dist/commands/utils/repo-root.d.ts +2 -0
- package/dist/{db-HR7CREX2.js → db-XULCILOU.js} +440 -2216
- package/dist/{dev-A7RW6XQV.js → dev-5YXNPTCJ.js} +168 -49
- package/dist/doctor-MZLOA53G.js +44 -0
- package/dist/{env-B47Z4747.js → env-SS66PZ4B.js} +6 -7
- package/dist/{env-files-K2C7O7L5.js → env-files-2UIUYLLR.js} +2 -2
- package/dist/{error-handler-4EYSDOSE.js → error-handler-HEXBRNVV.js} +2 -2
- package/dist/{hotfix-CULKKMGS.js → hotfix-YA3DGLOM.js} +3 -3
- package/dist/index.js +4 -4
- package/dist/{init-ELK5QCWR.js → init-ZIL6LRFO.js} +5 -6
- package/dist/{inject-test-attrs-Y5UD5P7Q.js → inject-test-attrs-P44BVTQS.js} +5 -18
- package/dist/{link-C43JRZWY.js → link-VSNDVZZD.js} +2 -3
- package/dist/manifest-TMFLESHW.js +19 -0
- package/dist/{risk-detector-BXUY2WKS.js → risk-detector-4U6ZJ2G5.js} +1 -1
- package/dist/{risk-detector-core-O7I7SPR7.js → risk-detector-core-TK4OAI3N.js} +2 -2
- package/dist/{risk-detector-plpgsql-SGMVKYJP.js → risk-detector-plpgsql-HWKS4OLR.js} +37 -7
- package/dist/{status-IJ4ZWHMX.js → status-UTKS63AB.js} +2 -3
- package/dist/{telemetry-FN7V727Y.js → telemetry-P56UBLZ2.js} +2 -3
- package/dist/{template-check-PNG5NQ5H.js → template-check-3P4HZXVY.js} +40 -29
- package/dist/{test-QYXE5UVW.js → test-V4KQL574.js} +34 -10
- package/dist/{test-gen-QPWOIEHU.js → test-gen-FS4CEY3P.js} +2 -3
- package/dist/{upgrade-3SLWVNAC.js → upgrade-NUK3ZBCL.js} +18 -6
- package/dist/{validate-SM4PXPS7.js → validate-CAAW4Y44.js} +2 -3
- package/dist/{vuln-check-TYQNEFS7.js → vuln-check-2W7N5TA2.js} +3 -4
- package/dist/{vuln-checker-2QXGN5YT.js → vuln-checker-IQJ56RUV.js} +413 -140
- package/dist/{watch-UCDVOQAH.js → watch-PNTKZYFB.js} +1 -1
- package/dist/{workflow-ZB5Q2PFY.js → workflow-H75N4BXX.js} +3 -4
- package/package.json +2 -2
- package/dist/chunk-JT5SUTWE.js +0 -9
- package/dist/chunk-M47WJJVS.js +0 -71
- package/dist/manifest-2NOQ2IMK.js +0 -32
- package/dist/{chunk-MNPMZERI.js → chunk-644FVGIQ.js} +1 -1
- package/dist/{chunk-XJBQINSA.js → chunk-SGJG3BKD.js} +1 -1
|
@@ -1,47 +1,43 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { createRequire } from 'module';
|
|
3
|
+
import { detectDatabaseStack, getStackPaths } from './chunk-CCKG5R4Y.js';
|
|
3
4
|
import './chunk-ZZOXM6Q4.js';
|
|
4
5
|
import { createError } from './chunk-JQXOVCOP.js';
|
|
5
6
|
import { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-KWX3JHCY.js';
|
|
6
7
|
export { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-KWX3JHCY.js';
|
|
7
|
-
import {
|
|
8
|
-
import { detectAppSchemas, normalizeDatabaseUrlForDdl, parsePostgresUrl, buildPsqlEnv, buildPsqlArgs, psqlSyncQuery, formatSchemasForSql, blankDollarQuotedBodies, stripSqlComments, psqlQuery, psqlSyncFile, psqlExec } from './chunk-CE3DEYFT.js';
|
|
8
|
+
import { detectAppSchemas, normalizeDatabaseUrlForDdl, formatSchemasForSql } from './chunk-XDCHRVE3.js';
|
|
9
9
|
import './chunk-NPSRD26F.js';
|
|
10
|
-
import { categorizeRisks, detectSchemaRisks } from './chunk-
|
|
11
|
-
import {
|
|
12
|
-
import { loadEnvFiles } from './chunk-MNPMZERI.js';
|
|
13
|
-
import { findWorkspaceRoot } from './chunk-JMJP4A47.js';
|
|
10
|
+
import { categorizeRisks, detectSchemaRisks } from './chunk-H2AHNI75.js';
|
|
11
|
+
import { loadEnvFiles } from './chunk-644FVGIQ.js';
|
|
14
12
|
import { diagnoseSupabaseStart } from './chunk-AAIE4F2U.js';
|
|
13
|
+
import { validateUserFilePath, filterSafePaths, resolveSafePath } from './chunk-DRSUEMAK.js';
|
|
15
14
|
import { runMachine } from './chunk-QDF7QXBL.js';
|
|
16
15
|
import './chunk-IBVVGH6X.js';
|
|
16
|
+
import { extractSchemaTablesAndEnums, fetchDbTablesAndEnums, extractTablesFromIdempotentSql, diffSchema, generateTablesManifest, writeEnvLocalBridge, removeEnvLocalBridge } from './chunk-FHG3ILE4.js';
|
|
17
|
+
import { parsePostgresUrl, buildPsqlEnv, buildPsqlArgs, psqlSyncQuery, blankDollarQuotedBodies, stripSqlComments, psqlSyncFile, psqlExec, psqlQuery } from './chunk-7B5C6U2K.js';
|
|
17
18
|
import { redactSecrets } from './chunk-II7VYQEM.js';
|
|
18
|
-
import { writeEnvLocalBridge, removeEnvLocalBridge } from './chunk-M47WJJVS.js';
|
|
19
19
|
import { init_local_supabase, init_constants, detectLocalSupabasePorts, buildLocalDatabaseUrl, DATABASE_DEFAULTS, SEED_DEFAULTS, SCRIPT_LOCATIONS } from './chunk-VM3IWOT5.js';
|
|
20
20
|
export { DATABASE_DEFAULTS, SCRIPT_LOCATIONS, SEED_DEFAULTS } from './chunk-VM3IWOT5.js';
|
|
21
21
|
import { secureSupabase, secureDocker } from './chunk-RZLYEO4U.js';
|
|
22
|
-
import {
|
|
23
|
-
import
|
|
24
|
-
import './chunk-RRGQCUKT.js';
|
|
25
|
-
import './chunk-JT5SUTWE.js';
|
|
22
|
+
import { emitJsonSuccess } from './chunk-KE6QJBZG.js';
|
|
23
|
+
import './chunk-WJXC4MVY.js';
|
|
26
24
|
import { getOutputFormatFromEnv } from './chunk-HKUWEGUX.js';
|
|
25
|
+
import { getDatabasePackagePath, loadRunaConfig, getSDKScriptsPath } from './chunk-5NKWR4FF.js';
|
|
26
|
+
import { findWorkspaceRoot } from './chunk-JMJP4A47.js';
|
|
27
27
|
import { init_esm_shims } from './chunk-VRXHCR5K.js';
|
|
28
28
|
import { Command } from 'commander';
|
|
29
|
-
import { createCLILogger,
|
|
29
|
+
import { createCLILogger, CLIError, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, loadRunaConfigOrThrow, dbDetectSchemaRisks, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, checkExtensionConfig, resolveAvailablePorts, calculatePortOffset, getPortsWithOffset, formatExtensionWarnings } from '@runa-ai/runa';
|
|
30
30
|
import { fromPromise, setup, assign, createActor } from 'xstate';
|
|
31
31
|
import { spawn, spawnSync, execFileSync } from 'child_process';
|
|
32
32
|
import { z } from 'zod';
|
|
33
|
-
import { existsSync, mkdirSync, copyFileSync, readdirSync, mkdtempSync, readFileSync, writeFileSync, rmSync, lstatSync,
|
|
34
|
-
import
|
|
33
|
+
import { existsSync, mkdirSync, copyFileSync, readdirSync, mkdtempSync, readFileSync, writeFileSync, rmSync, lstatSync, unlinkSync, statSync } from 'fs';
|
|
34
|
+
import path6, { join, dirname, resolve, basename, relative } from 'path';
|
|
35
35
|
import crypto, { randomBytes, randomUUID } from 'crypto';
|
|
36
36
|
import { tmpdir } from 'os';
|
|
37
|
-
import postgres2 from 'postgres';
|
|
38
|
-
import { isTable, getTableUniqueName, getTableName } from 'drizzle-orm';
|
|
39
|
-
import { isPgEnum } from 'drizzle-orm/pg-core';
|
|
40
|
-
import { createJiti } from 'jiti';
|
|
41
|
-
import { introspectDatabase } from '@runa-ai/runa/test-generators';
|
|
42
37
|
import { writeFile, appendFile, readFile, stat, realpath } from 'fs/promises';
|
|
43
38
|
import chalk from 'chalk';
|
|
44
39
|
import { execa } from 'execa';
|
|
40
|
+
import postgres from 'postgres';
|
|
45
41
|
import { fileURLToPath } from 'url';
|
|
46
42
|
import { Project } from 'ts-morph';
|
|
47
43
|
import ora from 'ora';
|
|
@@ -1446,6 +1442,46 @@ function cleanupPlanFile(planFile) {
|
|
|
1446
1442
|
} catch {
|
|
1447
1443
|
}
|
|
1448
1444
|
}
|
|
1445
|
+
function validatePreparedPlan(config, attempt, totalWaitMs, planSql, verbose) {
|
|
1446
|
+
const plan = parsePlanOutput(planSql);
|
|
1447
|
+
if (config?.allowedHazardTypes) {
|
|
1448
|
+
validatePlanForExecution(plan, config.allowedHazardTypes);
|
|
1449
|
+
}
|
|
1450
|
+
if (plan.totalStatements === 0) {
|
|
1451
|
+
return {
|
|
1452
|
+
currentPlanSql: plan.rawSql,
|
|
1453
|
+
result: { success: true, attempts: attempt, totalWaitMs }
|
|
1454
|
+
};
|
|
1455
|
+
}
|
|
1456
|
+
validateStatementTypes(plan);
|
|
1457
|
+
if (verbose) {
|
|
1458
|
+
logger6.debug(`Plan validated: ${plan.totalStatements} statement(s)`);
|
|
1459
|
+
}
|
|
1460
|
+
return { currentPlanSql: plan.rawSql };
|
|
1461
|
+
}
|
|
1462
|
+
function writePlanFile(planSql, attempt) {
|
|
1463
|
+
const planFile = join(tmpdir(), `runa-plan-${randomUUID()}-${attempt}.sql`);
|
|
1464
|
+
const wrappedSql = wrapPlanSql(planSql);
|
|
1465
|
+
writeFileSync(planFile, wrappedSql, "utf-8");
|
|
1466
|
+
return planFile;
|
|
1467
|
+
}
|
|
1468
|
+
function runPlanAttempt(context, planFile) {
|
|
1469
|
+
const execution = runPlanExecution(context.dbUrl, planFile, context.verbose);
|
|
1470
|
+
if (execution.kind !== "retry") {
|
|
1471
|
+
return execution;
|
|
1472
|
+
}
|
|
1473
|
+
logRetryableExecution(context.attempt, context.maxRetries, execution.errorOutput);
|
|
1474
|
+
return execution;
|
|
1475
|
+
}
|
|
1476
|
+
function buildRetryFailure(maxRetries, totalWaitMs, lastError) {
|
|
1477
|
+
logger6.error(`Migration failed after ${maxRetries} attempts (total wait: ${totalWaitMs}ms)`);
|
|
1478
|
+
return {
|
|
1479
|
+
success: false,
|
|
1480
|
+
error: lastError || new Error("Migration failed after max retries"),
|
|
1481
|
+
attempts: maxRetries,
|
|
1482
|
+
totalWaitMs
|
|
1483
|
+
};
|
|
1484
|
+
}
|
|
1449
1485
|
async function prepareRetryIteration(params) {
|
|
1450
1486
|
if (params.attempt === 0) {
|
|
1451
1487
|
return {
|
|
@@ -1497,44 +1533,31 @@ async function executePlanSqlWithRetry(dbUrl, initialPlanSql, verbose, config) {
|
|
|
1497
1533
|
return preparedPlan.result;
|
|
1498
1534
|
}
|
|
1499
1535
|
currentPlanSql = preparedPlan.currentPlanSql;
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
`Filtered ${removedStatements.length} DROP statement(s) targeting idempotent-managed tables`
|
|
1510
|
-
);
|
|
1511
|
-
for (const stmt of removedStatements) {
|
|
1512
|
-
logger6.warn(` Skipped: ${stmt.sql.split("\n")[0]}`);
|
|
1513
|
-
}
|
|
1514
|
-
plan = filteredPlan;
|
|
1515
|
-
currentPlanSql = filteredPlan.rawSql;
|
|
1516
|
-
}
|
|
1517
|
-
}
|
|
1518
|
-
if (plan.totalStatements === 0) {
|
|
1519
|
-
return { success: true, attempts: attempt, totalWaitMs };
|
|
1520
|
-
}
|
|
1521
|
-
if (config?.allowedHazardTypes) {
|
|
1522
|
-
validatePlanForExecution(plan, config.allowedHazardTypes);
|
|
1523
|
-
}
|
|
1524
|
-
validateStatementTypes(plan);
|
|
1525
|
-
if (verbose) {
|
|
1526
|
-
logger6.debug(`Plan validated: ${plan.totalStatements} statement(s)`);
|
|
1536
|
+
const validatedPlan = validatePreparedPlan(
|
|
1537
|
+
config,
|
|
1538
|
+
attempt,
|
|
1539
|
+
totalWaitMs,
|
|
1540
|
+
currentPlanSql,
|
|
1541
|
+
verbose
|
|
1542
|
+
);
|
|
1543
|
+
if (validatedPlan.result) {
|
|
1544
|
+
return validatedPlan.result;
|
|
1527
1545
|
}
|
|
1528
|
-
|
|
1529
|
-
const
|
|
1530
|
-
|
|
1546
|
+
currentPlanSql = validatedPlan.currentPlanSql;
|
|
1547
|
+
const planFile = writePlanFile(currentPlanSql, attempt);
|
|
1548
|
+
const context = {
|
|
1549
|
+
dbUrl,
|
|
1550
|
+
attempt,
|
|
1551
|
+
maxRetries,
|
|
1552
|
+
verbose,
|
|
1553
|
+
totalWaitMs
|
|
1554
|
+
};
|
|
1531
1555
|
try {
|
|
1532
|
-
const execution =
|
|
1556
|
+
const execution = runPlanAttempt(context, planFile);
|
|
1533
1557
|
if (execution.kind === "success") {
|
|
1534
1558
|
return { success: true, attempts: attempt, totalWaitMs };
|
|
1535
1559
|
}
|
|
1536
1560
|
if (execution.kind === "retry") {
|
|
1537
|
-
logRetryableExecution(attempt, maxRetries, execution.errorOutput);
|
|
1538
1561
|
lastError = execution.error;
|
|
1539
1562
|
continue;
|
|
1540
1563
|
}
|
|
@@ -1548,13 +1571,7 @@ async function executePlanSqlWithRetry(dbUrl, initialPlanSql, verbose, config) {
|
|
|
1548
1571
|
cleanupPlanFile(planFile);
|
|
1549
1572
|
}
|
|
1550
1573
|
}
|
|
1551
|
-
|
|
1552
|
-
return {
|
|
1553
|
-
success: false,
|
|
1554
|
-
error: lastError || new Error("Migration failed after max retries"),
|
|
1555
|
-
attempts: maxRetries,
|
|
1556
|
-
totalWaitMs
|
|
1557
|
-
};
|
|
1574
|
+
return buildRetryFailure(maxRetries, totalWaitMs, lastError);
|
|
1558
1575
|
}
|
|
1559
1576
|
|
|
1560
1577
|
// src/commands/db/apply/helpers/shadow-db-manager.ts
|
|
@@ -3234,7 +3251,7 @@ function logIdempotentRiskSummary(summary) {
|
|
|
3234
3251
|
async function detectIdempotentRiskSummary(schemasDir, files, verbose) {
|
|
3235
3252
|
const summary = emptyRiskSummary();
|
|
3236
3253
|
try {
|
|
3237
|
-
const { detectSchemaRisks: detectSchemaRisks2 } = await import('./risk-detector-
|
|
3254
|
+
const { detectSchemaRisks: detectSchemaRisks2 } = await import('./risk-detector-4U6ZJ2G5.js');
|
|
3238
3255
|
for (const file of files) {
|
|
3239
3256
|
const filePath = join(schemasDir, file);
|
|
3240
3257
|
const risks = await detectSchemaRisks2(filePath);
|
|
@@ -3523,2088 +3540,154 @@ function buildCheckModeResult(input, planOutput, hazards, protectedTables, prote
|
|
|
3523
3540
|
applied: false,
|
|
3524
3541
|
dataViolations: dataViolationCount > 0 ? dataViolationCount : void 0
|
|
3525
3542
|
};
|
|
3526
|
-
}
|
|
3527
|
-
function backupProtectedTablesForProduction(dbUrl, protectedTables, input) {
|
|
3528
|
-
if (input.env !== "production") {
|
|
3529
|
-
return;
|
|
3530
|
-
}
|
|
3531
|
-
const { backupPath } = backupIdempotentTables(dbUrl, protectedTables, input.verbose);
|
|
3532
|
-
if (backupPath) {
|
|
3533
|
-
logger13.info(`Recovery: pg_restore -d <DATABASE_URL> ${backupPath}`);
|
|
3534
|
-
return;
|
|
3535
|
-
}
|
|
3536
|
-
if (protectedTables.length > 0 && !input.allowDataLoss) {
|
|
3537
|
-
throw new Error(
|
|
3538
|
-
"Pre-apply backup failed for production deployment.\n Protected tables exist but could not be backed up.\n Use --allow-data-loss to proceed without backup (emergency only)."
|
|
3539
|
-
);
|
|
3540
|
-
}
|
|
3541
|
-
}
|
|
3542
|
-
async function cleanupApplyResources(params) {
|
|
3543
|
-
if (params.shadowDb) {
|
|
3544
|
-
try {
|
|
3545
|
-
await params.shadowDb.cleanup();
|
|
3546
|
-
if (params.verbose) {
|
|
3547
|
-
logger13.debug("Shadow DB cleaned up");
|
|
3548
|
-
}
|
|
3549
|
-
} catch (cleanupError) {
|
|
3550
|
-
logger13.warn(`Failed to cleanup shadow DB: ${cleanupError}`);
|
|
3551
|
-
}
|
|
3552
|
-
}
|
|
3553
|
-
if (params.prefilter) {
|
|
3554
|
-
try {
|
|
3555
|
-
rmSync(params.prefilter.filteredDir, { recursive: true, force: true });
|
|
3556
|
-
} catch {
|
|
3557
|
-
}
|
|
3558
|
-
}
|
|
3559
|
-
try {
|
|
3560
|
-
rmSync(params.tmpDir, { recursive: true, force: true });
|
|
3561
|
-
} catch {
|
|
3562
|
-
}
|
|
3563
|
-
}
|
|
3564
|
-
var applyPgSchemaDiff = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3565
|
-
const schemasDir = join(targetDir, "supabase/schemas/declarative");
|
|
3566
|
-
if (!existsSync(schemasDir)) {
|
|
3567
|
-
logger13.info("No declarative schemas found");
|
|
3568
|
-
return { sql: "", hazards: [], applied: false };
|
|
3569
|
-
}
|
|
3570
|
-
const dbUrl = getDbUrl(input);
|
|
3571
|
-
const configState = loadPgSchemaDiffConfigState(targetDir, input.verbose);
|
|
3572
|
-
const prefilterState = createPrefilterState(
|
|
3573
|
-
schemasDir,
|
|
3574
|
-
input.verbose,
|
|
3575
|
-
configState.configExclusions
|
|
3576
|
-
);
|
|
3577
|
-
const freshDbResult = handleFreshDbCase(input, dbUrl, targetDir, prefilterState.pgSchemaDiffDir);
|
|
3578
|
-
if (freshDbResult) return freshDbResult;
|
|
3579
|
-
const schemaFiles = collectSchemaFiles(schemasDir);
|
|
3580
|
-
if (schemaFiles.length === 0) {
|
|
3581
|
-
logger13.info("No schema files to apply");
|
|
3582
|
-
return { sql: "", hazards: [], applied: false };
|
|
3583
|
-
}
|
|
3584
|
-
const tmpDir = createCombinedSchemaBundle(schemaFiles, input.verbose);
|
|
3585
|
-
logger13.step("Running pg-schema-diff (incremental changes)...");
|
|
3586
|
-
let shadowDb = null;
|
|
3587
|
-
try {
|
|
3588
|
-
verifyPgSchemaDiffBinary({ strictVersion: input.env === "production" });
|
|
3589
|
-
await verifyDatabaseConnection(dbUrl);
|
|
3590
|
-
shadowDb = await createShadowDbForRun(dbUrl, configState.shadowExtensions, input.verbose);
|
|
3591
|
-
const includeSchemas = detectAppSchemas(schemasDir, input.verbose);
|
|
3592
|
-
cleanPartitionAclsForPgSchemaDiff(dbUrl, includeSchemas, input.verbose);
|
|
3593
|
-
const { planOutput } = executePgSchemaDiffPlan(
|
|
3594
|
-
dbUrl,
|
|
3595
|
-
prefilterState.pgSchemaDiffDir,
|
|
3596
|
-
includeSchemas,
|
|
3597
|
-
input.verbose,
|
|
3598
|
-
{ tempDbDsn: shadowDb?.dsn }
|
|
3599
|
-
);
|
|
3600
|
-
const noChangesResult = buildNoChangesResult(planOutput);
|
|
3601
|
-
if (noChangesResult) return noChangesResult;
|
|
3602
|
-
const { hazards } = handleHazardsWithContext(planOutput, input, schemasDir);
|
|
3603
|
-
const droppedTables = detectDropTableStatements(planOutput);
|
|
3604
|
-
enforceDropSafety(input, droppedTables);
|
|
3605
|
-
const dataViolationCount = runPreApplyDataCompatibility(dbUrl, planOutput, input);
|
|
3606
|
-
const protectedTables = getIdempotentProtectedTables(
|
|
3607
|
-
schemasDir,
|
|
3608
|
-
prefilterState.configExclusions
|
|
3609
|
-
);
|
|
3610
|
-
const protectedObjects = getIdempotentProtectedObjects(
|
|
3611
|
-
schemasDir,
|
|
3612
|
-
prefilterState.configExclusions
|
|
3613
|
-
);
|
|
3614
|
-
const checkModeResult = buildCheckModeResult(
|
|
3615
|
-
input,
|
|
3616
|
-
planOutput,
|
|
3617
|
-
hazards,
|
|
3618
|
-
protectedTables,
|
|
3619
|
-
protectedObjects,
|
|
3620
|
-
dataViolationCount
|
|
3621
|
-
);
|
|
3622
|
-
if (checkModeResult) return checkModeResult;
|
|
3623
|
-
backupProtectedTablesForProduction(dbUrl, protectedTables, input);
|
|
3624
|
-
const preApplyCounts = getTableRowEstimates(dbUrl, schemasDir, input.verbose);
|
|
3625
|
-
const applyResult = await applyWithRetry({
|
|
3626
|
-
dbUrl,
|
|
3627
|
-
schemasDir,
|
|
3628
|
-
includeSchemas,
|
|
3629
|
-
input,
|
|
3630
|
-
planOutput,
|
|
3631
|
-
hazards,
|
|
3632
|
-
protectedTables,
|
|
3633
|
-
protectedObjects,
|
|
3634
|
-
tempDbDsn: shadowDb?.dsn,
|
|
3635
|
-
pgSchemaDiffDir: prefilterState.pgSchemaDiffDir
|
|
3636
|
-
});
|
|
3637
|
-
if (applyResult.applied) {
|
|
3638
|
-
verifyDataIntegrity(dbUrl, schemasDir, preApplyCounts, input.verbose, input.allowDataLoss);
|
|
3639
|
-
}
|
|
3640
|
-
return {
|
|
3641
|
-
...applyResult,
|
|
3642
|
-
dataViolations: dataViolationCount > 0 ? dataViolationCount : void 0
|
|
3643
|
-
};
|
|
3644
|
-
} finally {
|
|
3645
|
-
await cleanupApplyResources({
|
|
3646
|
-
shadowDb,
|
|
3647
|
-
prefilter: prefilterState.prefilter,
|
|
3648
|
-
tmpDir,
|
|
3649
|
-
verbose: input.verbose
|
|
3650
|
-
});
|
|
3651
|
-
}
|
|
3652
|
-
});
|
|
3653
|
-
var validatePartitions = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3654
|
-
if (input.check) return { warnings: [] };
|
|
3655
|
-
const idempotentDir = join(targetDir, "supabase/schemas/idempotent");
|
|
3656
|
-
if (!existsSync(idempotentDir)) return { warnings: [] };
|
|
3657
|
-
const expected = parseExpectedPartitions(idempotentDir);
|
|
3658
|
-
if (expected.length === 0) return { warnings: [] };
|
|
3659
|
-
const dbUrl = getDbUrl(input);
|
|
3660
|
-
const schemas = [...new Set(expected.map((e) => e.parent.split(".")[0] ?? ""))];
|
|
3661
|
-
const actual = queryActualPartitions(dbUrl, schemas);
|
|
3662
|
-
const drift = detectPartitionDrift(expected, actual);
|
|
3663
|
-
if (drift.missing.length === 0) {
|
|
3664
|
-
logger13.success(`All ${expected.length} expected partition(s) verified`);
|
|
3665
|
-
return { warnings: [] };
|
|
3666
|
-
}
|
|
3667
|
-
const warnings = formatPartitionWarnings(drift);
|
|
3668
|
-
for (const w of warnings) logger13.warn(w);
|
|
3669
|
-
return { warnings };
|
|
3670
|
-
});
|
|
3671
|
-
|
|
3672
|
-
// src/commands/db/apply/actors/seed-actors.ts
|
|
3673
|
-
init_esm_shims();
|
|
3674
|
-
|
|
3675
|
-
// src/commands/db/utils/table-registry.ts
|
|
3676
|
-
init_esm_shims();
|
|
3677
|
-
|
|
3678
|
-
// src/commands/db/utils/semantic-mapper.ts
|
|
3679
|
-
init_esm_shims();
|
|
3680
|
-
function snakeToCamel(str) {
|
|
3681
|
-
return str.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
|
|
3682
|
-
}
|
|
3683
|
-
function generateSemanticName(schema, tableName, useSchemaPrefix = false) {
|
|
3684
|
-
const baseName = snakeToCamel(tableName);
|
|
3685
|
-
if (useSchemaPrefix) {
|
|
3686
|
-
const schemaPrefix = snakeToCamel(schema);
|
|
3687
|
-
return schemaPrefix + baseName.charAt(0).toUpperCase() + baseName.slice(1);
|
|
3688
|
-
}
|
|
3689
|
-
return baseName;
|
|
3690
|
-
}
|
|
3691
|
-
function groupBySemanticName(tables) {
|
|
3692
|
-
const bySemanticName = /* @__PURE__ */ new Map();
|
|
3693
|
-
for (const table of tables) {
|
|
3694
|
-
const baseName = snakeToCamel(table.name);
|
|
3695
|
-
const existing = bySemanticName.get(baseName) ?? [];
|
|
3696
|
-
existing.push(table);
|
|
3697
|
-
bySemanticName.set(baseName, existing);
|
|
3698
|
-
}
|
|
3699
|
-
return bySemanticName;
|
|
3700
|
-
}
|
|
3701
|
-
function collectConflicts(bySemanticName) {
|
|
3702
|
-
const conflicts = [];
|
|
3703
|
-
for (const [semanticName, tables] of bySemanticName) {
|
|
3704
|
-
if (tables.length <= 1) continue;
|
|
3705
|
-
conflicts.push({
|
|
3706
|
-
semanticName,
|
|
3707
|
-
tables: tables.map((table) => table.qualifiedName)
|
|
3708
|
-
});
|
|
3709
|
-
}
|
|
3710
|
-
return conflicts;
|
|
3711
|
-
}
|
|
3712
|
-
function getSchemaPriorityRank(schema, prioritySchemas) {
|
|
3713
|
-
const index = prioritySchemas.indexOf(schema);
|
|
3714
|
-
return index === -1 ? Number.POSITIVE_INFINITY : index;
|
|
3715
|
-
}
|
|
3716
|
-
function compareTablesByPriorityAndName(left, right, prioritySchemas) {
|
|
3717
|
-
const leftRank = getSchemaPriorityRank(left.schema, prioritySchemas);
|
|
3718
|
-
const rightRank = getSchemaPriorityRank(right.schema, prioritySchemas);
|
|
3719
|
-
if (leftRank !== rightRank) {
|
|
3720
|
-
return leftRank - rightRank;
|
|
3721
|
-
}
|
|
3722
|
-
return left.qualifiedName.localeCompare(right.qualifiedName);
|
|
3723
|
-
}
|
|
3724
|
-
function applyOverride(table, overrides, mapping) {
|
|
3725
|
-
const overrideName = overrides[table.qualifiedName];
|
|
3726
|
-
if (!overrideName) return false;
|
|
3727
|
-
mapping[overrideName] = table.qualifiedName;
|
|
3728
|
-
return true;
|
|
3729
|
-
}
|
|
3730
|
-
function resolveFirstStrategy(table, baseName, tablesWithSameName, prioritySchemas) {
|
|
3731
|
-
const sorted = [...tablesWithSameName].sort(
|
|
3732
|
-
(left, right) => compareTablesByPriorityAndName(left, right, prioritySchemas)
|
|
3733
|
-
);
|
|
3734
|
-
if (sorted[0]?.qualifiedName === table.qualifiedName) {
|
|
3735
|
-
return { mappedName: baseName };
|
|
3736
|
-
}
|
|
3737
|
-
return { skipped: true };
|
|
3738
|
-
}
|
|
3739
|
-
function resolveConflict(table, baseName, tablesWithSameName, conflictStrategy, prioritySchemas) {
|
|
3740
|
-
switch (conflictStrategy) {
|
|
3741
|
-
case "prefix":
|
|
3742
|
-
return { mappedName: generateSemanticName(table.schema, table.name, true) };
|
|
3743
|
-
case "error":
|
|
3744
|
-
throw new Error(
|
|
3745
|
-
`Semantic name conflict: '${baseName}' maps to multiple tables: ${tablesWithSameName.map((candidate) => candidate.qualifiedName).join(", ")}`
|
|
3746
|
-
);
|
|
3747
|
-
case "first":
|
|
3748
|
-
return resolveFirstStrategy(table, baseName, tablesWithSameName, prioritySchemas);
|
|
3749
|
-
}
|
|
3750
|
-
}
|
|
3751
|
-
function generateMapping(tables, options = {}) {
|
|
3752
|
-
const { conflictStrategy = "prefix", prioritySchemas = [], overrides = {} } = options;
|
|
3753
|
-
const bySemanticName = groupBySemanticName(tables);
|
|
3754
|
-
const conflicts = collectConflicts(bySemanticName);
|
|
3755
|
-
const mapping = {};
|
|
3756
|
-
const skipped = [];
|
|
3757
|
-
for (const table of tables) {
|
|
3758
|
-
if (applyOverride(table, overrides, mapping)) {
|
|
3759
|
-
continue;
|
|
3760
|
-
}
|
|
3761
|
-
const baseName = snakeToCamel(table.name);
|
|
3762
|
-
const tablesWithSameName = bySemanticName.get(baseName) ?? [];
|
|
3763
|
-
if (tablesWithSameName.length === 1) {
|
|
3764
|
-
mapping[baseName] = table.qualifiedName;
|
|
3765
|
-
continue;
|
|
3766
|
-
}
|
|
3767
|
-
const resolution = resolveConflict(
|
|
3768
|
-
table,
|
|
3769
|
-
baseName,
|
|
3770
|
-
tablesWithSameName,
|
|
3771
|
-
conflictStrategy,
|
|
3772
|
-
prioritySchemas
|
|
3773
|
-
);
|
|
3774
|
-
if (resolution.mappedName) {
|
|
3775
|
-
mapping[resolution.mappedName] = table.qualifiedName;
|
|
3776
|
-
} else if (resolution.skipped) {
|
|
3777
|
-
skipped.push(table.qualifiedName);
|
|
3778
|
-
}
|
|
3779
|
-
}
|
|
3780
|
-
return { mapping, conflicts, skipped };
|
|
3781
|
-
}
|
|
3782
|
-
function applyMappingToTables(tables, mapping) {
|
|
3783
|
-
const reverseMapping = /* @__PURE__ */ new Map();
|
|
3784
|
-
for (const [semantic, qualified] of Object.entries(mapping)) {
|
|
3785
|
-
reverseMapping.set(qualified, semantic);
|
|
3786
|
-
}
|
|
3787
|
-
return tables.map((table) => ({
|
|
3788
|
-
...table,
|
|
3789
|
-
semanticName: reverseMapping.get(table.qualifiedName) || table.semanticName
|
|
3790
|
-
}));
|
|
3791
|
-
}
|
|
3792
|
-
|
|
3793
|
-
// src/commands/db/utils/schema-sync.ts
|
|
3794
|
-
init_esm_shims();
|
|
3795
|
-
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
3796
|
-
function validatePgIdentifier(name, context) {
|
|
3797
|
-
if (!name || typeof name !== "string") {
|
|
3798
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
3799
|
-
}
|
|
3800
|
-
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
3801
|
-
throw new Error(
|
|
3802
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
3803
|
-
);
|
|
3804
|
-
}
|
|
3805
|
-
}
|
|
3806
|
-
function escapePgStringLiteral(value) {
|
|
3807
|
-
if (typeof value !== "string") {
|
|
3808
|
-
throw new Error("Value must be a string");
|
|
3809
|
-
}
|
|
3810
|
-
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
3811
|
-
}
|
|
3812
|
-
function buildSafeSchemaInClause(schemas) {
|
|
3813
|
-
if (schemas.length === 0) {
|
|
3814
|
-
throw new Error("No schemas provided for IN clause");
|
|
3815
|
-
}
|
|
3816
|
-
const safeSchemas = [];
|
|
3817
|
-
for (const schema of schemas) {
|
|
3818
|
-
validatePgIdentifier(schema, "schema name");
|
|
3819
|
-
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
3820
|
-
}
|
|
3821
|
-
return safeSchemas.join(",");
|
|
3822
|
-
}
|
|
3823
|
-
var ERROR_MESSAGES = {
|
|
3824
|
-
PATH_TRAVERSAL: "Schema path validation failed",
|
|
3825
|
-
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
3826
|
-
};
|
|
3827
|
-
function containsPathTraversal(inputPath) {
|
|
3828
|
-
const normalized = path.normalize(inputPath);
|
|
3829
|
-
return normalized.includes("..") || inputPath.includes("\0");
|
|
3830
|
-
}
|
|
3831
|
-
function isPathWithinBase(filePath, baseDir) {
|
|
3832
|
-
try {
|
|
3833
|
-
const resolvedFile = path.resolve(filePath);
|
|
3834
|
-
const resolvedBase = path.resolve(baseDir);
|
|
3835
|
-
const normalizedFile = path.normalize(resolvedFile);
|
|
3836
|
-
const normalizedBase = path.normalize(resolvedBase);
|
|
3837
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path.sep);
|
|
3838
|
-
} catch {
|
|
3839
|
-
return false;
|
|
3840
|
-
}
|
|
3841
|
-
}
|
|
3842
|
-
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
3843
|
-
if (containsPathTraversal(dbPackagePath)) {
|
|
3844
|
-
throw new Error(ERROR_MESSAGES.PATH_TRAVERSAL);
|
|
3845
|
-
}
|
|
3846
|
-
const schemaEntry = path.join(dbPackagePath, "src", "schema", "index.ts");
|
|
3847
|
-
const absoluteSchemaPath = path.resolve(projectRoot, schemaEntry);
|
|
3848
|
-
let resolvedProjectRoot;
|
|
3849
|
-
try {
|
|
3850
|
-
resolvedProjectRoot = realpathSync(projectRoot);
|
|
3851
|
-
} catch {
|
|
3852
|
-
resolvedProjectRoot = path.resolve(projectRoot);
|
|
3853
|
-
}
|
|
3854
|
-
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
3855
|
-
throw new Error(ERROR_MESSAGES.PATH_TRAVERSAL);
|
|
3856
|
-
}
|
|
3857
|
-
if (!existsSync(absoluteSchemaPath)) {
|
|
3858
|
-
throw new Error(ERROR_MESSAGES.SCHEMA_NOT_FOUND);
|
|
3859
|
-
}
|
|
3860
|
-
return absoluteSchemaPath;
|
|
3861
|
-
}
|
|
3862
|
-
function uniqueSorted(values) {
|
|
3863
|
-
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
3864
|
-
}
|
|
3865
|
-
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
3866
|
-
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
3867
|
-
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
3868
|
-
let schemaModule;
|
|
3869
|
-
try {
|
|
3870
|
-
schemaModule = await jiti.import(validatedSchemaPath);
|
|
3871
|
-
} catch (error) {
|
|
3872
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
3873
|
-
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
3874
|
-
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
3875
|
-
}
|
|
3876
|
-
const expectedTables = /* @__PURE__ */ new Set();
|
|
3877
|
-
const expectedEnums = /* @__PURE__ */ new Map();
|
|
3878
|
-
for (const value of Object.values(schemaModule)) {
|
|
3879
|
-
if (isTable(value)) {
|
|
3880
|
-
const unique = String(getTableUniqueName(value));
|
|
3881
|
-
if (unique.startsWith("undefined.")) {
|
|
3882
|
-
expectedTables.add(`public.${getTableName(value)}`);
|
|
3883
|
-
} else {
|
|
3884
|
-
expectedTables.add(unique);
|
|
3885
|
-
}
|
|
3886
|
-
continue;
|
|
3887
|
-
}
|
|
3888
|
-
if (isPgEnum(value)) {
|
|
3889
|
-
expectedEnums.set(value.enumName, {
|
|
3890
|
-
name: value.enumName,
|
|
3891
|
-
values: uniqueSorted(value.enumValues)
|
|
3892
|
-
});
|
|
3893
|
-
}
|
|
3894
|
-
}
|
|
3895
|
-
return { expectedTables, expectedEnums };
|
|
3896
|
-
}
|
|
3897
|
-
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
3898
|
-
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
3899
|
-
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
3900
|
-
const systemSchemas = /* @__PURE__ */ new Set([
|
|
3901
|
-
...SUPABASE_SYSTEM_SCHEMAS,
|
|
3902
|
-
...options?.additionalSystemSchemas ?? []
|
|
3903
|
-
]);
|
|
3904
|
-
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
3905
|
-
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
3906
|
-
const tablesSql = `
|
|
3907
|
-
SELECT schemaname || '.' || tablename
|
|
3908
|
-
FROM pg_tables
|
|
3909
|
-
WHERE schemaname IN (${schemaList})
|
|
3910
|
-
ORDER BY schemaname, tablename;`.trim();
|
|
3911
|
-
const enumsSql = `
|
|
3912
|
-
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
3913
|
-
FROM pg_type t
|
|
3914
|
-
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
3915
|
-
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
3916
|
-
WHERE n.nspname = 'public'
|
|
3917
|
-
GROUP BY t.typname
|
|
3918
|
-
ORDER BY t.typname;`.trim();
|
|
3919
|
-
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
3920
|
-
const dbTables = /* @__PURE__ */ new Set();
|
|
3921
|
-
for (const line of tablesOut.split("\n")) {
|
|
3922
|
-
const v = line.trim();
|
|
3923
|
-
if (v.length > 0) dbTables.add(v);
|
|
3924
|
-
}
|
|
3925
|
-
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
3926
|
-
const dbEnums = /* @__PURE__ */ new Map();
|
|
3927
|
-
for (const line of enumsOut.split("\n")) {
|
|
3928
|
-
const trimmed = line.trim();
|
|
3929
|
-
if (trimmed.length === 0) continue;
|
|
3930
|
-
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
3931
|
-
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
3932
|
-
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
3933
|
-
}
|
|
3934
|
-
return { dbTables, dbEnums };
|
|
3935
|
-
}
|
|
3936
|
-
function diffSchema(params) {
|
|
3937
|
-
const missingTables = uniqueSorted(
|
|
3938
|
-
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
3939
|
-
);
|
|
3940
|
-
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
3941
|
-
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
3942
|
-
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
3943
|
-
const isExcluded = (table) => {
|
|
3944
|
-
if (exactExclusions.includes(table)) return true;
|
|
3945
|
-
for (const pattern of exclusionPatterns) {
|
|
3946
|
-
const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
3947
|
-
const regex = new RegExp(`^${escaped.replace(/\\\*/g, ".*")}$`);
|
|
3948
|
-
if (regex.test(table)) return true;
|
|
3949
|
-
}
|
|
3950
|
-
return false;
|
|
3951
|
-
};
|
|
3952
|
-
const orphanTables = uniqueSorted(
|
|
3953
|
-
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
3954
|
-
);
|
|
3955
|
-
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
3956
|
-
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
3957
|
-
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
3958
|
-
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
3959
|
-
const enumValueMismatches = [];
|
|
3960
|
-
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
3961
|
-
const s = params.expectedEnums.get(name);
|
|
3962
|
-
const d = params.dbEnums.get(name);
|
|
3963
|
-
if (!s || !d) continue;
|
|
3964
|
-
const schemaValues = uniqueSorted(s.values);
|
|
3965
|
-
const dbValues = uniqueSorted(d.values);
|
|
3966
|
-
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
3967
|
-
if (same) continue;
|
|
3968
|
-
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
3969
|
-
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
3970
|
-
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
3971
|
-
}
|
|
3972
|
-
return {
|
|
3973
|
-
expectedTables: params.expectedTables,
|
|
3974
|
-
expectedEnums: params.expectedEnums,
|
|
3975
|
-
dbTables: params.dbTables,
|
|
3976
|
-
dbEnums: params.dbEnums,
|
|
3977
|
-
missingTables,
|
|
3978
|
-
orphanTables,
|
|
3979
|
-
missingEnums,
|
|
3980
|
-
extraEnums,
|
|
3981
|
-
enumValueMismatches
|
|
3982
|
-
};
|
|
3983
|
-
}
|
|
3984
|
-
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
3985
|
-
const fullPath = path.resolve(projectRoot, idempotentDir);
|
|
3986
|
-
if (!existsSync(fullPath)) {
|
|
3987
|
-
return [];
|
|
3988
|
-
}
|
|
3989
|
-
const tables = [];
|
|
3990
|
-
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
3991
|
-
try {
|
|
3992
|
-
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
3993
|
-
for (const file of files) {
|
|
3994
|
-
const filePath = path.join(fullPath, file);
|
|
3995
|
-
const content = readFileSync(filePath, "utf-8");
|
|
3996
|
-
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
3997
|
-
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
3998
|
-
const schema = match[1] || "public";
|
|
3999
|
-
const tableName = match[2];
|
|
4000
|
-
if (tableName) {
|
|
4001
|
-
tables.push(`${schema}.${tableName}`);
|
|
4002
|
-
}
|
|
4003
|
-
}
|
|
4004
|
-
}
|
|
4005
|
-
} catch {
|
|
4006
|
-
return [];
|
|
4007
|
-
}
|
|
4008
|
-
return [...new Set(tables)].sort();
|
|
4009
|
-
}
|
|
4010
|
-
|
|
4011
|
-
// src/commands/db/utils/sql-table-extractor.ts
|
|
4012
|
-
init_esm_shims();
|
|
4013
|
-
|
|
4014
|
-
// src/commands/db/utils/sql-table-extractor-regex.ts
|
|
4015
|
-
init_esm_shims();
|
|
4016
|
-
function findTablesRegex(ctx) {
|
|
4017
|
-
const tables = [];
|
|
4018
|
-
const regex = new RegExp(SQL_PATTERNS.createTable.source, "gi");
|
|
4019
|
-
for (const match of ctx.content.matchAll(regex)) {
|
|
4020
|
-
const reference = parseTableReference(match[1] ?? "");
|
|
4021
|
-
if (!reference) continue;
|
|
4022
|
-
const lineNumber = getLineNumber(ctx.content, match.index ?? 0);
|
|
4023
|
-
const tableBody = extractTableBody(ctx.content, match.index ?? 0);
|
|
4024
|
-
tables.push({ schema: reference.schema, name: reference.name, lineNumber, tableBody });
|
|
4025
|
-
}
|
|
4026
|
-
return tables;
|
|
4027
|
-
}
|
|
4028
|
-
function shouldSkipColumnLine(trimmed) {
|
|
4029
|
-
return !trimmed || trimmed.startsWith("--") || /^(?:PRIMARY|FOREIGN|UNIQUE|CHECK|CONSTRAINT)\s/i.test(trimmed);
|
|
4030
|
-
}
|
|
4031
|
-
function isReservedKeyword(name) {
|
|
4032
|
-
return /^(?:PRIMARY|FOREIGN|UNIQUE|CHECK|CONSTRAINT)$/i.test(name);
|
|
4033
|
-
}
|
|
4034
|
-
var COLUMN_CONSTRAINT_KEYWORDS = [
|
|
4035
|
-
"NOT NULL",
|
|
4036
|
-
"DEFAULT",
|
|
4037
|
-
"REFERENCES",
|
|
4038
|
-
"PRIMARY",
|
|
4039
|
-
"UNIQUE",
|
|
4040
|
-
"CHECK",
|
|
4041
|
-
"CONSTRAINT"
|
|
4042
|
-
];
|
|
4043
|
-
function isBoundaryAtTopLevel(source, index, keyword) {
|
|
4044
|
-
const before = index === 0 || /\s|[(),]/.test(source[index - 1] ?? "");
|
|
4045
|
-
const after = index + keyword.length;
|
|
4046
|
-
const afterChar = source[after] ?? "";
|
|
4047
|
-
const afterBoundary = after === source.length || /\s|[(),]/.test(afterChar);
|
|
4048
|
-
if (!before || !afterBoundary) {
|
|
4049
|
-
return false;
|
|
4050
|
-
}
|
|
4051
|
-
return source.substring(index, after).toUpperCase() === keyword;
|
|
4052
|
-
}
|
|
4053
|
-
function createConstraintScanState() {
|
|
4054
|
-
return {
|
|
4055
|
-
depth: 0,
|
|
4056
|
-
dollarTag: "",
|
|
4057
|
-
inDollarQuote: false,
|
|
4058
|
-
inDoubleQuote: false,
|
|
4059
|
-
inSingleQuote: false
|
|
4060
|
-
};
|
|
4061
|
-
}
|
|
4062
|
-
function consumeConstraintDollarQuote(line, index, state) {
|
|
4063
|
-
if ((line[index] ?? "") !== "$" || state.inSingleQuote || state.inDoubleQuote) {
|
|
4064
|
-
return null;
|
|
4065
|
-
}
|
|
4066
|
-
if (state.inDollarQuote) {
|
|
4067
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4068
|
-
if (!line.slice(index).startsWith(closeTag)) {
|
|
4069
|
-
return null;
|
|
4070
|
-
}
|
|
4071
|
-
state.inDollarQuote = false;
|
|
4072
|
-
state.dollarTag = "";
|
|
4073
|
-
return index + closeTag.length - 1;
|
|
4074
|
-
}
|
|
4075
|
-
const tagMatch = line.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4076
|
-
if (!tagMatch) {
|
|
4077
|
-
return null;
|
|
4078
|
-
}
|
|
4079
|
-
state.inDollarQuote = true;
|
|
4080
|
-
state.dollarTag = tagMatch[1] ?? "";
|
|
4081
|
-
return index + tagMatch[0].length - 1;
|
|
4082
|
-
}
|
|
4083
|
-
function consumeConstraintSingleQuote(line, index, state) {
|
|
4084
|
-
if ((line[index] ?? "") !== "'" || state.inDoubleQuote || state.inDollarQuote) {
|
|
4085
|
-
return null;
|
|
4086
|
-
}
|
|
4087
|
-
if (state.inSingleQuote && (line[index + 1] ?? "") === "'") {
|
|
4088
|
-
return index + 1;
|
|
4089
|
-
}
|
|
4090
|
-
state.inSingleQuote = !state.inSingleQuote;
|
|
4091
|
-
return index;
|
|
4092
|
-
}
|
|
4093
|
-
function consumeConstraintDoubleQuote(line, index, state) {
|
|
4094
|
-
if ((line[index] ?? "") !== '"' || state.inSingleQuote || state.inDollarQuote) {
|
|
4095
|
-
return null;
|
|
4096
|
-
}
|
|
4097
|
-
if (state.inDoubleQuote && (line[index + 1] ?? "") === '"') {
|
|
4098
|
-
return index + 1;
|
|
4099
|
-
}
|
|
4100
|
-
state.inDoubleQuote = !state.inDoubleQuote;
|
|
4101
|
-
return index;
|
|
4102
|
-
}
|
|
4103
|
-
function consumeConstraintQuote(line, index, state) {
|
|
4104
|
-
const adjustedByDollar = consumeConstraintDollarQuote(line, index, state);
|
|
4105
|
-
if (adjustedByDollar !== null) {
|
|
4106
|
-
return adjustedByDollar;
|
|
4107
|
-
}
|
|
4108
|
-
const adjustedBySingle = consumeConstraintSingleQuote(line, index, state);
|
|
4109
|
-
if (adjustedBySingle !== null) {
|
|
4110
|
-
return adjustedBySingle;
|
|
4111
|
-
}
|
|
4112
|
-
return consumeConstraintDoubleQuote(line, index, state);
|
|
4113
|
-
}
|
|
4114
|
-
function isInsideConstraintQuote(state) {
|
|
4115
|
-
return state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote;
|
|
4116
|
-
}
|
|
4117
|
-
function updateConstraintDepth(char, state) {
|
|
4118
|
-
if (char === "(") {
|
|
4119
|
-
state.depth += 1;
|
|
4120
|
-
return;
|
|
4121
|
-
}
|
|
4122
|
-
if (char === ")" && state.depth > 0) {
|
|
4123
|
-
state.depth -= 1;
|
|
4124
|
-
}
|
|
4125
|
-
}
|
|
4126
|
-
function findKeywordAtTopLevel(upper, index, keywords) {
|
|
4127
|
-
return keywords.some(
|
|
4128
|
-
(keyword) => upper.startsWith(keyword, index) && isBoundaryAtTopLevel(upper, index, keyword)
|
|
4129
|
-
);
|
|
4130
|
-
}
|
|
4131
|
-
function findConstraintStart(line, keywords = COLUMN_CONSTRAINT_KEYWORDS) {
|
|
4132
|
-
const upper = line.toUpperCase();
|
|
4133
|
-
const state = createConstraintScanState();
|
|
4134
|
-
for (let i = 0; i < upper.length; i++) {
|
|
4135
|
-
const adjustedIndex = consumeConstraintQuote(line, i, state);
|
|
4136
|
-
if (adjustedIndex !== null) {
|
|
4137
|
-
i = adjustedIndex;
|
|
4138
|
-
continue;
|
|
4139
|
-
}
|
|
4140
|
-
if (isInsideConstraintQuote(state)) continue;
|
|
4141
|
-
const char = upper[i] ?? "";
|
|
4142
|
-
updateConstraintDepth(char, state);
|
|
4143
|
-
if (char === "(" || char === ")") continue;
|
|
4144
|
-
if (state.depth !== 0) continue;
|
|
4145
|
-
if (findKeywordAtTopLevel(upper, i, keywords)) return i;
|
|
4146
|
-
}
|
|
4147
|
-
return -1;
|
|
4148
|
-
}
|
|
4149
|
-
function parseInlineReference(constraintSource) {
|
|
4150
|
-
const referenceStart = findConstraintStart(constraintSource, ["REFERENCES"]);
|
|
4151
|
-
if (referenceStart === -1) {
|
|
4152
|
-
return null;
|
|
4153
|
-
}
|
|
4154
|
-
const afterConstraint = constraintSource.slice(referenceStart);
|
|
4155
|
-
if (!/^\s*REFERENCES\s+/i.test(afterConstraint)) {
|
|
4156
|
-
return null;
|
|
4157
|
-
}
|
|
4158
|
-
const refMatch = afterConstraint.match(
|
|
4159
|
-
new RegExp(`^\\s*REFERENCES\\s+(${TABLE_REFERENCE})\\s*\\(\\s*(${SQL_IDENTIFIER})\\s*\\)`, "i")
|
|
4160
|
-
);
|
|
4161
|
-
if (!refMatch) {
|
|
4162
|
-
return null;
|
|
4163
|
-
}
|
|
4164
|
-
const ref = parseTableReference(refMatch[1] ?? "");
|
|
4165
|
-
const refColumn = unquoteIdentifier(refMatch[2] ?? "");
|
|
4166
|
-
if (!ref || !refColumn) {
|
|
4167
|
-
return null;
|
|
4168
|
-
}
|
|
4169
|
-
return { table: `${ref.schema}.${ref.name}`, column: refColumn };
|
|
4170
|
-
}
|
|
4171
|
-
function splitColumnDeclaration(line) {
|
|
4172
|
-
const columnMatch = line.match(
|
|
4173
|
-
/^((?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*))\s+(.+?)(?:\s*,\s*)?$/
|
|
4174
|
-
);
|
|
4175
|
-
if (!columnMatch) {
|
|
4176
|
-
return null;
|
|
4177
|
-
}
|
|
4178
|
-
const name = unquoteIdentifier(columnMatch[1] ?? "");
|
|
4179
|
-
const rest = (columnMatch[2] ?? "").trim();
|
|
4180
|
-
if (!name || isReservedKeyword(name)) {
|
|
4181
|
-
return null;
|
|
4182
|
-
}
|
|
4183
|
-
const typeEndIndex = (() => {
|
|
4184
|
-
const withParens = rest;
|
|
4185
|
-
const constraintsIndex = findConstraintStart(withParens);
|
|
4186
|
-
if (constraintsIndex === -1) {
|
|
4187
|
-
return withParens.length;
|
|
4188
|
-
}
|
|
4189
|
-
return constraintsIndex;
|
|
4190
|
-
})();
|
|
4191
|
-
const type = rest.slice(0, typeEndIndex).trim();
|
|
4192
|
-
const remaining = rest.slice(typeEndIndex);
|
|
4193
|
-
const inlineReference = parseInlineReference(remaining);
|
|
4194
|
-
const hasDefault = findConstraintStart(remaining, ["DEFAULT"]) !== -1;
|
|
4195
|
-
const notNull = findConstraintStart(remaining, ["NOT NULL"]) !== -1;
|
|
4196
|
-
const isPrimaryKey = findConstraintStart(remaining, ["PRIMARY KEY"]) !== -1;
|
|
4197
|
-
return {
|
|
4198
|
-
name,
|
|
4199
|
-
type,
|
|
4200
|
-
hasDefault,
|
|
4201
|
-
notNull,
|
|
4202
|
-
isPrimaryKey,
|
|
4203
|
-
inlineReferenceTable: inlineReference?.table,
|
|
4204
|
-
inlineReferenceColumn: inlineReference?.column
|
|
4205
|
-
};
|
|
4206
|
-
}
|
|
4207
|
-
function parseColumnsRegex(tableBody) {
|
|
4208
|
-
const columns = [];
|
|
4209
|
-
const seen = /* @__PURE__ */ new Set();
|
|
4210
|
-
const lines = splitTopLevelSqlStatements(tableBody);
|
|
4211
|
-
for (const line of lines) {
|
|
4212
|
-
const trimmed = line.trim();
|
|
4213
|
-
if (shouldSkipColumnLine(trimmed)) continue;
|
|
4214
|
-
const column = splitColumnDeclaration(trimmed);
|
|
4215
|
-
if (!column) continue;
|
|
4216
|
-
if (!column.name || seen.has(column.name) || isReservedKeyword(column.name)) continue;
|
|
4217
|
-
seen.add(column.name);
|
|
4218
|
-
columns.push({
|
|
4219
|
-
name: column.name,
|
|
4220
|
-
type: normalizeType(column.type),
|
|
4221
|
-
notNull: column.notNull || column.isPrimaryKey,
|
|
4222
|
-
hasDefault: column.hasDefault,
|
|
4223
|
-
isPrimaryKey: column.isPrimaryKey
|
|
4224
|
-
});
|
|
4225
|
-
}
|
|
4226
|
-
return columns;
|
|
4227
|
-
}
|
|
4228
|
-
function parsePrimaryKeyRegex(tableBody) {
|
|
4229
|
-
const regex = new RegExp(SQL_PATTERNS.primaryKey.source, "i");
|
|
4230
|
-
const found = [];
|
|
4231
|
-
for (const line of splitTopLevelSqlStatements(tableBody)) {
|
|
4232
|
-
const match = line.match(regex);
|
|
4233
|
-
if (!match || !match[1]) {
|
|
4234
|
-
continue;
|
|
4235
|
-
}
|
|
4236
|
-
for (const col of match[1]?.split(",") ?? []) {
|
|
4237
|
-
const normalized = unquoteIdentifier(col.trim());
|
|
4238
|
-
if (normalized && !found.includes(normalized)) {
|
|
4239
|
-
found.push(normalized);
|
|
4240
|
-
}
|
|
4241
|
-
}
|
|
4242
|
-
regex.lastIndex = 0;
|
|
4243
|
-
}
|
|
4244
|
-
return found;
|
|
4245
|
-
}
|
|
4246
|
-
function parseExplicitForeignKeys(tableBody) {
|
|
4247
|
-
const fks = [];
|
|
4248
|
-
const fkRegex = new RegExp(SQL_PATTERNS.foreignKey.source, "gi");
|
|
4249
|
-
for (const line of splitTopLevelSqlStatements(tableBody)) {
|
|
4250
|
-
for (const match of line.matchAll(fkRegex)) {
|
|
4251
|
-
const column = unquoteIdentifier(match[1] ?? "");
|
|
4252
|
-
const ref = parseTableReference(match[2] ?? "");
|
|
4253
|
-
const refColumn = unquoteIdentifier(match[3] ?? "");
|
|
4254
|
-
if (!column || !ref || !refColumn) continue;
|
|
4255
|
-
fks.push({
|
|
4256
|
-
column,
|
|
4257
|
-
referencesTable: `${ref.schema}.${ref.name}`,
|
|
4258
|
-
referencesColumn: refColumn,
|
|
4259
|
-
onDelete: normalizeOnAction(match[4]),
|
|
4260
|
-
onUpdate: normalizeOnAction(match[5])
|
|
4261
|
-
});
|
|
4262
|
-
}
|
|
4263
|
-
}
|
|
4264
|
-
return fks;
|
|
4265
|
-
}
|
|
4266
|
-
function parseInlineForeignKeys(tableBody, existingColumns) {
|
|
4267
|
-
const fks = [];
|
|
4268
|
-
for (const fragment of splitTopLevelSqlStatements(tableBody)) {
|
|
4269
|
-
const declaration = splitColumnDeclaration(fragment);
|
|
4270
|
-
if (!declaration) continue;
|
|
4271
|
-
if (!declaration.inlineReferenceTable || !declaration.inlineReferenceColumn) continue;
|
|
4272
|
-
if (existingColumns.has(declaration.name)) continue;
|
|
4273
|
-
fks.push({
|
|
4274
|
-
column: declaration.name,
|
|
4275
|
-
referencesTable: declaration.inlineReferenceTable,
|
|
4276
|
-
referencesColumn: declaration.inlineReferenceColumn
|
|
4277
|
-
});
|
|
4278
|
-
}
|
|
4279
|
-
return fks;
|
|
4280
|
-
}
|
|
4281
|
-
function parseForeignKeysRegex(tableBody) {
|
|
4282
|
-
const explicitFks = parseExplicitForeignKeys(tableBody);
|
|
4283
|
-
const existingColumns = new Set(explicitFks.map((fk) => fk.column));
|
|
4284
|
-
const inlineFks = parseInlineForeignKeys(tableBody, existingColumns);
|
|
4285
|
-
return [...explicitFks, ...inlineFks];
|
|
4286
|
-
}
|
|
4287
|
-
function parseIndexesRegex(content, schema, tableName) {
|
|
4288
|
-
const indexes = [];
|
|
4289
|
-
const regex = new RegExp(SQL_PATTERNS.createIndex.source, "gi");
|
|
4290
|
-
for (const match of content.matchAll(regex)) {
|
|
4291
|
-
const indexTableRef = parseTableReference(match[3] ?? "");
|
|
4292
|
-
if (!indexTableRef) continue;
|
|
4293
|
-
const indexSchema = indexTableRef.schema;
|
|
4294
|
-
const indexTable = indexTableRef.name;
|
|
4295
|
-
if (indexSchema === schema && indexTable === tableName) {
|
|
4296
|
-
const indexName = unquoteIdentifier(match[2] ?? "");
|
|
4297
|
-
if (!indexName) continue;
|
|
4298
|
-
const rawColumns = match[4] ?? "";
|
|
4299
|
-
indexes.push({
|
|
4300
|
-
name: indexName,
|
|
4301
|
-
columns: parseIndexColumns(rawColumns),
|
|
4302
|
-
isUnique: !!match[1]
|
|
4303
|
-
});
|
|
4304
|
-
}
|
|
4305
|
-
}
|
|
4306
|
-
return indexes;
|
|
4307
|
-
}
|
|
4308
|
-
function hasRlsEnabledRegex(content, schema, tableName) {
|
|
4309
|
-
const regex = new RegExp(SQL_PATTERNS.enableRls.source, "gi");
|
|
4310
|
-
for (const match of content.matchAll(regex)) {
|
|
4311
|
-
const matchTable = parseTableReference(match[1] ?? "");
|
|
4312
|
-
if (!matchTable) continue;
|
|
4313
|
-
if (matchTable.schema === schema && matchTable.name === tableName) {
|
|
4314
|
-
return true;
|
|
4315
|
-
}
|
|
4316
|
-
}
|
|
4317
|
-
return false;
|
|
4318
|
-
}
|
|
4319
|
-
|
|
4320
|
-
// src/commands/db/utils/sql-table-extractor-rls.ts
|
|
4321
|
-
init_esm_shims();
|
|
4322
|
-
function readDollarTagAt(content, index) {
|
|
4323
|
-
if (content[index] !== "$") return void 0;
|
|
4324
|
-
const match = content.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4325
|
-
return match?.[0];
|
|
4326
|
-
}
|
|
4327
|
-
function consumePolicySingleQuote(content, state) {
|
|
4328
|
-
if (!state.inSingleQuote) return false;
|
|
4329
|
-
const char = content[state.cursor] ?? "";
|
|
4330
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4331
|
-
if (char === "'" && next === "'") {
|
|
4332
|
-
state.cursor += 2;
|
|
4333
|
-
return true;
|
|
4334
|
-
}
|
|
4335
|
-
if (char === "'") {
|
|
4336
|
-
state.inSingleQuote = false;
|
|
4337
|
-
}
|
|
4338
|
-
state.cursor += 1;
|
|
4339
|
-
return true;
|
|
4340
|
-
}
|
|
4341
|
-
function consumePolicyDoubleQuote(content, state) {
|
|
4342
|
-
if (!state.inDoubleQuote) return false;
|
|
4343
|
-
const char = content[state.cursor] ?? "";
|
|
4344
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4345
|
-
if (char === '"' && next === '"') {
|
|
4346
|
-
state.cursor += 2;
|
|
4347
|
-
return true;
|
|
4348
|
-
}
|
|
4349
|
-
if (char === '"') {
|
|
4350
|
-
state.inDoubleQuote = false;
|
|
4351
|
-
}
|
|
4352
|
-
state.cursor += 1;
|
|
4353
|
-
return true;
|
|
4354
|
-
}
|
|
4355
|
-
function consumePolicyDollarQuote(content, state) {
|
|
4356
|
-
if (!state.inDollarQuote) return false;
|
|
4357
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4358
|
-
if (content.startsWith(closeTag, state.cursor)) {
|
|
4359
|
-
state.inDollarQuote = false;
|
|
4360
|
-
state.dollarTag = "";
|
|
4361
|
-
state.cursor += closeTag.length;
|
|
4362
|
-
return true;
|
|
4363
|
-
}
|
|
4364
|
-
state.cursor += 1;
|
|
4365
|
-
return true;
|
|
4366
|
-
}
|
|
4367
|
-
function trySkipPolicyLineComment(content, state) {
|
|
4368
|
-
if (state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote) return false;
|
|
4369
|
-
const char = content[state.cursor] ?? "";
|
|
4370
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4371
|
-
if (char !== "-" || next !== "-") return false;
|
|
4372
|
-
const newlineIndex = content.indexOf("\n", state.cursor);
|
|
4373
|
-
state.cursor = newlineIndex === -1 ? content.length : newlineIndex + 1;
|
|
4374
|
-
return true;
|
|
4375
|
-
}
|
|
4376
|
-
function trySkipPolicyBlockComment(content, state) {
|
|
4377
|
-
if (state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote) return false;
|
|
4378
|
-
const char = content[state.cursor] ?? "";
|
|
4379
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4380
|
-
if (char !== "/" || next !== "*") return false;
|
|
4381
|
-
const closeIndex = content.indexOf("*/", state.cursor + 2);
|
|
4382
|
-
state.cursor = closeIndex === -1 ? content.length : closeIndex + 2;
|
|
4383
|
-
return true;
|
|
4384
|
-
}
|
|
4385
|
-
function tryStartPolicyDollarQuote(content, state) {
|
|
4386
|
-
if (state.inSingleQuote || state.inDoubleQuote) return false;
|
|
4387
|
-
const tag = readDollarTagAt(content, state.cursor);
|
|
4388
|
-
if (!tag) return false;
|
|
4389
|
-
state.inDollarQuote = true;
|
|
4390
|
-
state.dollarTag = tag.slice(1, -1);
|
|
4391
|
-
state.cursor += tag.length;
|
|
4392
|
-
return true;
|
|
4393
|
-
}
|
|
4394
|
-
function tryStartPolicySingleQuote(content, state) {
|
|
4395
|
-
if (state.inDoubleQuote || state.inDollarQuote || content[state.cursor] !== "'") return false;
|
|
4396
|
-
state.inSingleQuote = true;
|
|
4397
|
-
state.cursor += 1;
|
|
4398
|
-
return true;
|
|
4399
|
-
}
|
|
4400
|
-
function tryStartPolicyDoubleQuote(content, state) {
|
|
4401
|
-
if (state.inSingleQuote || state.inDollarQuote || content[state.cursor] !== '"') return false;
|
|
4402
|
-
state.inDoubleQuote = true;
|
|
4403
|
-
state.cursor += 1;
|
|
4404
|
-
return true;
|
|
4405
|
-
}
|
|
4406
|
-
function consumePolicySqlTrivia(content, state) {
|
|
4407
|
-
if (consumePolicySingleQuote(content, state)) return true;
|
|
4408
|
-
if (consumePolicyDoubleQuote(content, state)) return true;
|
|
4409
|
-
if (consumePolicyDollarQuote(content, state)) return true;
|
|
4410
|
-
if (trySkipPolicyLineComment(content, state)) return true;
|
|
4411
|
-
if (trySkipPolicyBlockComment(content, state)) return true;
|
|
4412
|
-
if (tryStartPolicyDollarQuote(content, state)) return true;
|
|
4413
|
-
if (tryStartPolicySingleQuote(content, state)) return true;
|
|
4414
|
-
return tryStartPolicyDoubleQuote(content, state);
|
|
4415
|
-
}
|
|
4416
|
-
function findOutsideSqlCharIndex(content, startIndex, predicate) {
|
|
4417
|
-
const state = {
|
|
4418
|
-
cursor: startIndex,
|
|
4419
|
-
inSingleQuote: false,
|
|
4420
|
-
inDoubleQuote: false,
|
|
4421
|
-
inDollarQuote: false,
|
|
4422
|
-
dollarTag: ""
|
|
4423
|
-
};
|
|
4424
|
-
while (state.cursor < content.length) {
|
|
4425
|
-
if (consumePolicySqlTrivia(content, state)) continue;
|
|
4426
|
-
const char = content[state.cursor] ?? "";
|
|
4427
|
-
if (predicate(char, state.cursor)) return state.cursor;
|
|
4428
|
-
state.cursor += 1;
|
|
4429
|
-
}
|
|
4430
|
-
return void 0;
|
|
4431
|
-
}
|
|
4432
|
-
function extractCreatePolicyStatements(content) {
|
|
4433
|
-
const statements = [];
|
|
4434
|
-
const startRegex = /\bCREATE\s+POLICY\b/gi;
|
|
4435
|
-
let match;
|
|
4436
|
-
while ((match = startRegex.exec(content)) !== null) {
|
|
4437
|
-
const startIndex = match.index ?? 0;
|
|
4438
|
-
const endIndex = findSqlStatementEndForPolicy(content, startIndex);
|
|
4439
|
-
statements.push(content.slice(startIndex, endIndex).trim());
|
|
4440
|
-
}
|
|
4441
|
-
return statements;
|
|
4442
|
-
}
|
|
4443
|
-
function findSqlStatementEndForPolicy(content, startIndex) {
|
|
4444
|
-
const semicolonIndex = findOutsideSqlCharIndex(content, startIndex, (char) => char === ";");
|
|
4445
|
-
return semicolonIndex === void 0 ? content.length : semicolonIndex + 1;
|
|
4446
|
-
}
|
|
4447
|
-
function extractBalancedClause(statement, startIndex) {
|
|
4448
|
-
const openParenIndex = statement.indexOf("(", startIndex);
|
|
4449
|
-
if (openParenIndex === -1) return void 0;
|
|
4450
|
-
let depth = 0;
|
|
4451
|
-
let clauseStart = -1;
|
|
4452
|
-
const closeParenIndex = findOutsideSqlCharIndex(statement, openParenIndex, (char, index) => {
|
|
4453
|
-
if (char === "(") {
|
|
4454
|
-
if (depth === 0) clauseStart = index + 1;
|
|
4455
|
-
depth++;
|
|
4456
|
-
return false;
|
|
4457
|
-
}
|
|
4458
|
-
if (char === ")") {
|
|
4459
|
-
depth--;
|
|
4460
|
-
return depth === 0 && clauseStart !== -1;
|
|
4461
|
-
}
|
|
4462
|
-
return false;
|
|
4463
|
-
});
|
|
4464
|
-
if (closeParenIndex === void 0 || clauseStart === -1) return void 0;
|
|
4465
|
-
return statement.slice(clauseStart, closeParenIndex).trim();
|
|
4466
|
-
}
|
|
4467
|
-
function parsePolicyDefinitionFromStatement(statement, headerRegex, schema, tableName) {
|
|
4468
|
-
const match = statement.match(headerRegex);
|
|
4469
|
-
if (!match) return void 0;
|
|
4470
|
-
const policyName = unquoteIdentifier(match[1] ?? match[2] ?? "");
|
|
4471
|
-
const policyTableRef = parseTableReference(match[3] ?? "");
|
|
4472
|
-
if (!policyName || !policyTableRef) return void 0;
|
|
4473
|
-
if (policyTableRef.schema !== schema || policyTableRef.name !== tableName) return void 0;
|
|
4474
|
-
const usingIndex = statement.search(/\bUSING\s*\(/i);
|
|
4475
|
-
const withCheckIndex = statement.search(/\bWITH\s+CHECK\s*\(/i);
|
|
4476
|
-
return {
|
|
4477
|
-
name: policyName,
|
|
4478
|
-
command: (match[4] || "ALL").toUpperCase(),
|
|
4479
|
-
using: usingIndex !== -1 ? extractBalancedClause(statement, usingIndex) : void 0,
|
|
4480
|
-
withCheck: withCheckIndex !== -1 ? extractBalancedClause(statement, withCheckIndex) : void 0
|
|
4481
|
-
};
|
|
4482
|
-
}
|
|
4483
|
-
function parsePoliciesRegex(content, schema, tableName) {
|
|
4484
|
-
const policies = [];
|
|
4485
|
-
const statements = extractCreatePolicyStatements(content);
|
|
4486
|
-
const headerRegex = new RegExp(
|
|
4487
|
-
`^\\s*CREATE\\s+POLICY\\s+(?:"((?:[^"]|"")*)"|([a-zA-Z_][a-zA-Z0-9_]*))\\s+ON\\s+(${TABLE_REFERENCE})(?:\\s+AS\\s+\\w+)?(?:\\s+FOR\\s+(\\w+))?`,
|
|
4488
|
-
"i"
|
|
4489
|
-
);
|
|
4490
|
-
for (const statement of statements) {
|
|
4491
|
-
const parsed = parsePolicyDefinitionFromStatement(statement, headerRegex, schema, tableName);
|
|
4492
|
-
if (parsed) {
|
|
4493
|
-
policies.push(parsed);
|
|
4494
|
-
}
|
|
4495
|
-
}
|
|
4496
|
-
return policies;
|
|
4497
|
-
}
|
|
4498
|
-
|
|
4499
|
-
// src/commands/db/utils/sql-table-extractor-ast.ts
|
|
4500
|
-
init_esm_shims();
|
|
4501
|
-
function convertAstColumns(columns, include) {
|
|
4502
|
-
if (!include) return void 0;
|
|
4503
|
-
return columns.map((col) => ({
|
|
4504
|
-
name: col.name,
|
|
4505
|
-
type: col.type,
|
|
4506
|
-
notNull: col.notNull,
|
|
4507
|
-
hasDefault: col.hasDefault,
|
|
4508
|
-
isPrimaryKey: col.isPrimaryKey
|
|
4509
|
-
}));
|
|
4510
|
-
}
|
|
4511
|
-
function convertAstForeignKeys(fks, include) {
|
|
4512
|
-
if (!include) return void 0;
|
|
4513
|
-
const result = fks.map((fk) => ({
|
|
4514
|
-
column: fk.column,
|
|
4515
|
-
referencesTable: fk.referencesTable,
|
|
4516
|
-
referencesColumn: fk.referencesColumn,
|
|
4517
|
-
onDelete: fk.onDelete,
|
|
4518
|
-
onUpdate: fk.onUpdate
|
|
4519
|
-
}));
|
|
4520
|
-
return result.length > 0 ? result : void 0;
|
|
4521
|
-
}
|
|
4522
|
-
function convertAstIndexes(indexes, include) {
|
|
4523
|
-
if (!include || indexes.length === 0) return void 0;
|
|
4524
|
-
return indexes.map((idx) => ({
|
|
4525
|
-
name: idx.name,
|
|
4526
|
-
columns: idx.columns,
|
|
4527
|
-
isUnique: idx.isUnique
|
|
4528
|
-
}));
|
|
4529
|
-
}
|
|
4530
|
-
async function extractTablesWithAst(content, filePath, opts) {
|
|
4531
|
-
const parser = await getSqlParserUtils();
|
|
4532
|
-
if (!parser) return [];
|
|
4533
|
-
const astTables = await parser.parseCreateTables(content);
|
|
4534
|
-
const tables = [];
|
|
4535
|
-
for (const astTable of astTables) {
|
|
4536
|
-
if (opts.includeIndexes) {
|
|
4537
|
-
await parser.parseIndexesForTables(content, [astTable]);
|
|
4538
|
-
}
|
|
4539
|
-
const hasRls = hasRlsEnabledRegex(content, astTable.schema, astTable.name);
|
|
4540
|
-
const rlsPolicies = opts.includeRlsPolicies && hasRls ? parsePoliciesRegex(content, astTable.schema, astTable.name) : void 0;
|
|
4541
|
-
tables.push({
|
|
4542
|
-
schema: astTable.schema,
|
|
4543
|
-
name: astTable.name,
|
|
4544
|
-
qualifiedName: astTable.qualifiedName,
|
|
4545
|
-
semanticName: snakeToCamel2(astTable.name),
|
|
4546
|
-
sourceFile: filePath,
|
|
4547
|
-
lineNumber: astTable.lineNumber ?? 1,
|
|
4548
|
-
columns: convertAstColumns(astTable.columns, opts.includeColumns),
|
|
4549
|
-
primaryKey: astTable.primaryKey,
|
|
4550
|
-
foreignKeys: convertAstForeignKeys(astTable.foreignKeys, opts.includeForeignKeys),
|
|
4551
|
-
indexes: convertAstIndexes(astTable.indexes, opts.includeIndexes),
|
|
4552
|
-
hasRls,
|
|
4553
|
-
rlsPolicies: rlsPolicies?.length ? rlsPolicies : void 0
|
|
4554
|
-
});
|
|
4555
|
-
}
|
|
4556
|
-
return tables;
|
|
4557
|
-
}
|
|
4558
|
-
|
|
4559
|
-
// src/commands/db/utils/sql-table-extractor.ts
|
|
4560
|
-
var sqlParserUtils = null;
|
|
4561
|
-
var astAvailable = null;
|
|
4562
|
-
async function isAstParserAvailable() {
|
|
4563
|
-
if (astAvailable !== null) return astAvailable;
|
|
4564
|
-
try {
|
|
4565
|
-
const { loadSqlParserUtils } = await import('@runa-ai/runa/ast');
|
|
4566
|
-
sqlParserUtils = await loadSqlParserUtils();
|
|
4567
|
-
const isAvailable = await sqlParserUtils.isSqlParserAvailable();
|
|
4568
|
-
astAvailable = isAvailable;
|
|
4569
|
-
return isAvailable;
|
|
4570
|
-
} catch {
|
|
4571
|
-
astAvailable = false;
|
|
4572
|
-
return false;
|
|
4573
|
-
}
|
|
4574
|
-
}
|
|
4575
|
-
async function getSqlParserUtils() {
|
|
4576
|
-
if (sqlParserUtils) return sqlParserUtils;
|
|
4577
|
-
await isAstParserAvailable();
|
|
4578
|
-
return sqlParserUtils;
|
|
4579
|
-
}
|
|
4580
|
-
var SQL_IDENTIFIER = '(?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*)';
|
|
4581
|
-
var TABLE_REFERENCE = `${SQL_IDENTIFIER}(?:\\s*\\.\\s*${SQL_IDENTIFIER})?`;
|
|
4582
|
-
var TABLE_IDENTIFIER = /(?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*)/g;
|
|
4583
|
-
var SQL_PATTERNS = {
|
|
4584
|
-
// CREATE TABLE [IF NOT EXISTS] schema.table_name (
|
|
4585
|
-
createTable: new RegExp(
|
|
4586
|
-
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${TABLE_REFERENCE})\\s*\\(`,
|
|
4587
|
-
"gi"
|
|
4588
|
-
),
|
|
4589
|
-
// PRIMARY KEY (columns)
|
|
4590
|
-
primaryKey: /PRIMARY\s+KEY\s*\(([^)]+)\)/gi,
|
|
4591
|
-
// FOREIGN KEY (column) REFERENCES schema.table(column) [ON DELETE|UPDATE ...]
|
|
4592
|
-
foreignKey: new RegExp(
|
|
4593
|
-
`FOREIGN\\s+KEY\\s*\\((${SQL_IDENTIFIER})\\)\\s*REFERENCES\\s+(${TABLE_REFERENCE})\\s*\\((${SQL_IDENTIFIER})\\)(?:\\s+ON\\s+DELETE\\s+(\\w+(?:\\s+\\w+)?))?(?:\\s+ON\\s+UPDATE\\s+(\\w+(?:\\s+\\w+)?))?`,
|
|
4594
|
-
"gi"
|
|
4595
|
-
),
|
|
4596
|
-
// CREATE [UNIQUE] INDEX name ON schema.table (columns)
|
|
4597
|
-
createIndex: new RegExp(
|
|
4598
|
-
`CREATE\\s+(UNIQUE\\s+)?INDEX\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})\\s+ON\\s+(${TABLE_REFERENCE})(?:\\s+USING\\s+\\w+)?\\s*\\(([^)]+)\\)`,
|
|
4599
|
-
"gi"
|
|
4600
|
-
),
|
|
4601
|
-
// ALTER TABLE ... ENABLE ROW LEVEL SECURITY
|
|
4602
|
-
enableRls: new RegExp(
|
|
4603
|
-
`ALTER\\s+TABLE\\s+(${TABLE_REFERENCE})\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`,
|
|
4604
|
-
"gi"
|
|
4605
|
-
)
|
|
4606
|
-
};
|
|
4607
|
-
function createTopLevelSplitState() {
|
|
4608
|
-
return {
|
|
4609
|
-
depth: 0,
|
|
4610
|
-
dollarTag: "",
|
|
4611
|
-
inDollarQuote: false,
|
|
4612
|
-
inDoubleQuote: false,
|
|
4613
|
-
inSingleQuote: false,
|
|
4614
|
-
start: 0
|
|
4615
|
-
};
|
|
4616
|
-
}
|
|
4617
|
-
function consumeDollarQuoteToken(content, index, state) {
|
|
4618
|
-
if ((content[index] ?? "") !== "$" || state.inSingleQuote || state.inDoubleQuote) {
|
|
4619
|
-
return null;
|
|
4620
|
-
}
|
|
4621
|
-
if (state.inDollarQuote) {
|
|
4622
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4623
|
-
if (!content.slice(index).startsWith(closeTag)) {
|
|
4624
|
-
return null;
|
|
4625
|
-
}
|
|
4626
|
-
state.inDollarQuote = false;
|
|
4627
|
-
state.dollarTag = "";
|
|
4628
|
-
return index + closeTag.length - 1;
|
|
4629
|
-
}
|
|
4630
|
-
const tagMatch = content.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4631
|
-
if (!tagMatch) {
|
|
4632
|
-
return null;
|
|
4633
|
-
}
|
|
4634
|
-
state.inDollarQuote = true;
|
|
4635
|
-
state.dollarTag = tagMatch[1] ?? "";
|
|
4636
|
-
return index + tagMatch[0].length - 1;
|
|
4637
|
-
}
|
|
4638
|
-
function consumeSingleQuoteToken(content, index, state) {
|
|
4639
|
-
if ((content[index] ?? "") !== "'" || state.inDoubleQuote || state.inDollarQuote) {
|
|
4640
|
-
return null;
|
|
4641
|
-
}
|
|
4642
|
-
if (state.inSingleQuote && (content[index + 1] ?? "") === "'") {
|
|
4643
|
-
return index + 1;
|
|
4644
|
-
}
|
|
4645
|
-
state.inSingleQuote = !state.inSingleQuote;
|
|
4646
|
-
return index;
|
|
4647
|
-
}
|
|
4648
|
-
function consumeDoubleQuoteToken(content, index, state) {
|
|
4649
|
-
if ((content[index] ?? "") !== '"' || state.inSingleQuote || state.inDollarQuote) {
|
|
4650
|
-
return null;
|
|
4651
|
-
}
|
|
4652
|
-
if (state.inDoubleQuote && (content[index + 1] ?? "") === '"') {
|
|
4653
|
-
return index + 1;
|
|
4654
|
-
}
|
|
4655
|
-
state.inDoubleQuote = !state.inDoubleQuote;
|
|
4656
|
-
return index;
|
|
4657
|
-
}
|
|
4658
|
-
function consumeQuoteToken(content, index, state) {
|
|
4659
|
-
const adjustedByDollarQuote = consumeDollarQuoteToken(content, index, state);
|
|
4660
|
-
if (adjustedByDollarQuote !== null) {
|
|
4661
|
-
return adjustedByDollarQuote;
|
|
4662
|
-
}
|
|
4663
|
-
const adjustedBySingleQuote = consumeSingleQuoteToken(content, index, state);
|
|
4664
|
-
if (adjustedBySingleQuote !== null) {
|
|
4665
|
-
return adjustedBySingleQuote;
|
|
4666
|
-
}
|
|
4667
|
-
return consumeDoubleQuoteToken(content, index, state);
|
|
4668
|
-
}
|
|
4669
|
-
function isInQuotedScope(state) {
|
|
4670
|
-
return state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote;
|
|
4671
|
-
}
|
|
4672
|
-
function processStructuralToken(content, index, state, chunks) {
|
|
4673
|
-
const char = content[index] ?? "";
|
|
4674
|
-
if (char === "(") {
|
|
4675
|
-
state.depth += 1;
|
|
4676
|
-
return;
|
|
4677
|
-
}
|
|
4678
|
-
if (char === ")" && state.depth > 0) {
|
|
4679
|
-
state.depth -= 1;
|
|
4680
|
-
return;
|
|
4681
|
-
}
|
|
4682
|
-
if (char === "," && state.depth === 0) {
|
|
4683
|
-
chunks.push(content.slice(state.start, index));
|
|
4684
|
-
state.start = index + 1;
|
|
4685
|
-
}
|
|
4686
|
-
}
|
|
4687
|
-
function splitByTopLevelComma(content) {
|
|
4688
|
-
const chunks = [];
|
|
4689
|
-
const state = createTopLevelSplitState();
|
|
4690
|
-
for (let i = 0; i < content.length; i++) {
|
|
4691
|
-
const adjustedIndex = consumeQuoteToken(content, i, state);
|
|
4692
|
-
if (adjustedIndex !== null) {
|
|
4693
|
-
i = adjustedIndex;
|
|
4694
|
-
continue;
|
|
4695
|
-
}
|
|
4696
|
-
if (isInQuotedScope(state)) continue;
|
|
4697
|
-
processStructuralToken(content, i, state, chunks);
|
|
4698
|
-
}
|
|
4699
|
-
chunks.push(content.slice(state.start));
|
|
4700
|
-
return chunks.map((chunk) => chunk.trim()).filter(Boolean);
|
|
4701
|
-
}
|
|
4702
|
-
function splitTopLevelSqlStatements(content) {
|
|
4703
|
-
return splitByTopLevelComma(content);
|
|
4704
|
-
}
|
|
4705
|
-
function splitTopLevelCsv(content) {
|
|
4706
|
-
return splitByTopLevelComma(content);
|
|
4707
|
-
}
|
|
4708
|
-
function collectSqlFiles(dir) {
|
|
4709
|
-
const entries = readdirSync(dir, { withFileTypes: true });
|
|
4710
|
-
const files = [];
|
|
4711
|
-
for (const entry of entries) {
|
|
4712
|
-
const fullPath = join(dir, entry.name);
|
|
4713
|
-
if (entry.isDirectory()) {
|
|
4714
|
-
files.push(...collectSqlFiles(fullPath));
|
|
4715
|
-
} else if (entry.isFile() && fullPath.endsWith(".sql")) {
|
|
4716
|
-
files.push(fullPath);
|
|
4717
|
-
}
|
|
4718
|
-
}
|
|
4719
|
-
return files;
|
|
4720
|
-
}
|
|
4721
|
-
function snakeToCamel2(str) {
|
|
4722
|
-
return str.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
|
|
4723
|
-
}
|
|
4724
|
-
function getLineNumber(content, position) {
|
|
4725
|
-
return content.substring(0, position).split("\n").length;
|
|
4726
|
-
}
|
|
4727
|
-
function extractTableBody(content, startPos) {
|
|
4728
|
-
let depth = 0;
|
|
4729
|
-
let bodyStart = -1;
|
|
4730
|
-
let bodyEnd = -1;
|
|
4731
|
-
for (let i = startPos; i < content.length; i++) {
|
|
4732
|
-
const char = content[i];
|
|
4733
|
-
if (char === "(") {
|
|
4734
|
-
if (depth === 0) {
|
|
4735
|
-
bodyStart = i + 1;
|
|
4736
|
-
}
|
|
4737
|
-
depth++;
|
|
4738
|
-
} else if (char === ")") {
|
|
4739
|
-
depth--;
|
|
4740
|
-
if (depth === 0) {
|
|
4741
|
-
bodyEnd = i;
|
|
4742
|
-
break;
|
|
4743
|
-
}
|
|
4744
|
-
}
|
|
4745
|
-
}
|
|
4746
|
-
if (bodyStart === -1 || bodyEnd === -1) {
|
|
4747
|
-
return "";
|
|
4748
|
-
}
|
|
4749
|
-
return content.substring(bodyStart, bodyEnd);
|
|
4750
|
-
}
|
|
4751
|
-
function normalizeType(type) {
|
|
4752
|
-
return type.trim().replace(/\s+/g, " ").toLowerCase().replace("character varying", "varchar").replace("timestamp with time zone", "timestamptz").replace("timestamp without time zone", "timestamp");
|
|
4753
|
-
}
|
|
4754
|
-
function unquoteIdentifier(identifier) {
|
|
4755
|
-
const trimmed = identifier.trim();
|
|
4756
|
-
if (trimmed.startsWith('"') && trimmed.endsWith('"')) {
|
|
4757
|
-
return trimmed.slice(1, -1).replace(/""/g, '"');
|
|
4758
|
-
}
|
|
4759
|
-
return trimmed;
|
|
4760
|
-
}
|
|
4761
|
-
function parseTableReference(ref) {
|
|
4762
|
-
const tokens = [...ref.matchAll(new RegExp(TABLE_IDENTIFIER.source, "g"))].map(
|
|
4763
|
-
(match) => unquoteIdentifier(match[0] ?? "")
|
|
4764
|
-
);
|
|
4765
|
-
if (tokens.length === 1) {
|
|
4766
|
-
return { schema: "public", name: tokens[0] ?? "" };
|
|
4767
|
-
}
|
|
4768
|
-
if (tokens.length === 2) {
|
|
4769
|
-
return { schema: tokens[0] ?? "", name: tokens[1] ?? "" };
|
|
4770
|
-
}
|
|
4771
|
-
return null;
|
|
4772
|
-
}
|
|
4773
|
-
function parseIndexColumns(rawColumns) {
|
|
4774
|
-
return splitTopLevelCsv(rawColumns).map((col) => {
|
|
4775
|
-
const trimmed = col.trim();
|
|
4776
|
-
const quotedMatch = trimmed.match(/^"([^"]+)"/);
|
|
4777
|
-
if (quotedMatch) {
|
|
4778
|
-
return quotedMatch[1];
|
|
4779
|
-
}
|
|
4780
|
-
const unquotedMatch = trimmed.match(/^(\w+)/);
|
|
4781
|
-
if (unquotedMatch) {
|
|
4782
|
-
return unquotedMatch[1];
|
|
4783
|
-
}
|
|
4784
|
-
return "";
|
|
4785
|
-
}).filter(Boolean);
|
|
4786
|
-
}
|
|
4787
|
-
function normalizeOnAction(action) {
|
|
4788
|
-
if (!action) return void 0;
|
|
4789
|
-
const normalized = action.toUpperCase().replace(/\s+/g, " ").trim();
|
|
4790
|
-
switch (normalized) {
|
|
4791
|
-
case "CASCADE":
|
|
4792
|
-
return "CASCADE";
|
|
4793
|
-
case "SET NULL":
|
|
4794
|
-
return "SET NULL";
|
|
4795
|
-
case "SET DEFAULT":
|
|
4796
|
-
return "SET DEFAULT";
|
|
4797
|
-
case "RESTRICT":
|
|
4798
|
-
return "RESTRICT";
|
|
4799
|
-
case "NO ACTION":
|
|
4800
|
-
return "NO ACTION";
|
|
4801
|
-
default:
|
|
4802
|
-
return void 0;
|
|
4803
|
-
}
|
|
4804
|
-
}
|
|
4805
|
-
function resolveOptions(options) {
|
|
4806
|
-
return {
|
|
4807
|
-
includeColumns: options.includeColumns ?? true,
|
|
4808
|
-
includeForeignKeys: options.includeForeignKeys ?? true,
|
|
4809
|
-
includeIndexes: options.includeIndexes ?? true,
|
|
4810
|
-
includeRlsPolicies: options.includeRlsPolicies ?? true
|
|
4811
|
-
};
|
|
4812
|
-
}
|
|
4813
|
-
function buildTableEntryRegex(table, content, filePath, opts) {
|
|
4814
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
4815
|
-
const pkFromBody = parsePrimaryKeyRegex(table.tableBody);
|
|
4816
|
-
let columns;
|
|
4817
|
-
if (opts.includeColumns) {
|
|
4818
|
-
columns = parseColumnsRegex(table.tableBody);
|
|
4819
|
-
for (const col of columns) {
|
|
4820
|
-
if (pkFromBody.includes(col.name)) {
|
|
4821
|
-
col.isPrimaryKey = true;
|
|
4822
|
-
}
|
|
4823
|
-
}
|
|
4824
|
-
}
|
|
4825
|
-
const foreignKeys = opts.includeForeignKeys ? parseForeignKeysRegex(table.tableBody) : void 0;
|
|
4826
|
-
const indexes = opts.includeIndexes ? parseIndexesRegex(content, table.schema, table.name) : void 0;
|
|
4827
|
-
const hasRls = hasRlsEnabledRegex(content, table.schema, table.name);
|
|
4828
|
-
const rlsPolicies = opts.includeRlsPolicies && hasRls ? parsePoliciesRegex(content, table.schema, table.name) : void 0;
|
|
4829
|
-
return {
|
|
4830
|
-
schema: table.schema,
|
|
4831
|
-
name: table.name,
|
|
4832
|
-
qualifiedName,
|
|
4833
|
-
semanticName: snakeToCamel2(table.name),
|
|
4834
|
-
sourceFile: filePath,
|
|
4835
|
-
lineNumber: table.lineNumber,
|
|
4836
|
-
columns,
|
|
4837
|
-
primaryKey: pkFromBody.length > 0 ? pkFromBody : void 0,
|
|
4838
|
-
foreignKeys: foreignKeys?.length ? foreignKeys : void 0,
|
|
4839
|
-
indexes: indexes?.length ? indexes : void 0,
|
|
4840
|
-
hasRls,
|
|
4841
|
-
rlsPolicies: rlsPolicies?.length ? rlsPolicies : void 0
|
|
4842
|
-
};
|
|
4843
|
-
}
|
|
4844
|
-
function processTablesFromFileRegex(filePath, opts, seen) {
|
|
4845
|
-
const rawContent = readFileSync(filePath, "utf-8");
|
|
4846
|
-
const content = stripSqlComments(rawContent);
|
|
4847
|
-
const ctx = { content, lines: content.split("\n") };
|
|
4848
|
-
const tables = findTablesRegex(ctx);
|
|
4849
|
-
const entries = [];
|
|
4850
|
-
for (const table of tables) {
|
|
4851
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
4852
|
-
if (seen?.has(qualifiedName)) continue;
|
|
4853
|
-
seen?.add(qualifiedName);
|
|
4854
|
-
entries.push(buildTableEntryRegex(table, content, filePath, opts));
|
|
4855
|
-
}
|
|
4856
|
-
return entries;
|
|
4857
|
-
}
|
|
4858
|
-
async function processTablesFromFile(filePath, opts, seen) {
|
|
4859
|
-
const content = readFileSync(filePath, "utf-8");
|
|
4860
|
-
if (await isAstParserAvailable()) {
|
|
4861
|
-
const astTables = await extractTablesWithAst(content, filePath, opts);
|
|
4862
|
-
if (astTables.length > 0) {
|
|
4863
|
-
const entries = [];
|
|
4864
|
-
for (const table of astTables) {
|
|
4865
|
-
if (seen?.has(table.qualifiedName)) continue;
|
|
4866
|
-
seen?.add(table.qualifiedName);
|
|
4867
|
-
entries.push(table);
|
|
4868
|
-
}
|
|
4869
|
-
return entries;
|
|
4870
|
-
}
|
|
4871
|
-
}
|
|
4872
|
-
return processTablesFromFileRegex(filePath, opts, seen);
|
|
4873
|
-
}
|
|
4874
|
-
async function extractTablesFromSqlDir(sqlDir, options = {}) {
|
|
4875
|
-
if (!existsSync(sqlDir)) return [];
|
|
4876
|
-
const opts = resolveOptions(options);
|
|
4877
|
-
const seen = /* @__PURE__ */ new Set();
|
|
4878
|
-
const tableEntries = [];
|
|
4879
|
-
const files = collectSqlFiles(sqlDir).sort();
|
|
4880
|
-
for (const file of files) {
|
|
4881
|
-
const filePath = file;
|
|
4882
|
-
const entries = await processTablesFromFile(filePath, opts, seen);
|
|
4883
|
-
tableEntries.push(...entries);
|
|
4884
|
-
}
|
|
4885
|
-
return tableEntries;
|
|
4886
|
-
}
|
|
4887
|
-
|
|
4888
|
-
// src/commands/db/utils/table-source-classifier.ts
|
|
4889
|
-
init_esm_shims();
|
|
4890
|
-
function splitQualifiedName(qualifiedName) {
|
|
4891
|
-
const [schema = "", table = ""] = qualifiedName.split(".", 2);
|
|
4892
|
-
return { schema, table };
|
|
4893
|
-
}
|
|
4894
|
-
function escapeRegexLiteral(value) {
|
|
4895
|
-
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
4896
|
-
}
|
|
4897
|
-
function buildTablePatternMatcher(patterns) {
|
|
4898
|
-
const compiled = patterns.map((p) => p.trim()).filter((p) => p.length > 0).map((pattern) => {
|
|
4899
|
-
const target = pattern.includes(".") ? "qualified" : "table";
|
|
4900
|
-
const regex = new RegExp(`^${escapeRegexLiteral(pattern).replace(/\\\*/g, ".*")}$`);
|
|
4901
|
-
return { target, regex };
|
|
4902
|
-
});
|
|
4903
|
-
return (qualifiedName) => {
|
|
4904
|
-
const { table } = splitQualifiedName(qualifiedName);
|
|
4905
|
-
for (const entry of compiled) {
|
|
4906
|
-
const candidate = entry.target === "qualified" ? qualifiedName : table;
|
|
4907
|
-
if (entry.regex.test(candidate)) {
|
|
4908
|
-
return true;
|
|
4909
|
-
}
|
|
4910
|
-
}
|
|
4911
|
-
return false;
|
|
4912
|
-
};
|
|
4913
|
-
}
|
|
4914
|
-
function findIdempotentAncestor(table, partitionParentMap, idempotentManagedTables) {
|
|
4915
|
-
if (idempotentManagedTables.has(table)) {
|
|
4916
|
-
return table;
|
|
4917
|
-
}
|
|
4918
|
-
let current = table;
|
|
4919
|
-
const visited = /* @__PURE__ */ new Set();
|
|
4920
|
-
while (!visited.has(current)) {
|
|
4921
|
-
visited.add(current);
|
|
4922
|
-
const parent = partitionParentMap.get(current);
|
|
4923
|
-
if (!parent) {
|
|
4924
|
-
return null;
|
|
4925
|
-
}
|
|
4926
|
-
if (idempotentManagedTables.has(parent)) {
|
|
4927
|
-
return parent;
|
|
4928
|
-
}
|
|
4929
|
-
current = parent;
|
|
4930
|
-
}
|
|
4931
|
-
return null;
|
|
4932
|
-
}
|
|
4933
|
-
function isSystemManagedTable(params) {
|
|
4934
|
-
const { schema } = splitQualifiedName(params.qualifiedName);
|
|
4935
|
-
return params.systemSchemas.has(schema) || params.knownSystemTables.has(params.qualifiedName);
|
|
4936
|
-
}
|
|
4937
|
-
function classifyIdempotentManagedTable(params) {
|
|
4938
|
-
const idempotentAncestor = findIdempotentAncestor(
|
|
4939
|
-
params.qualifiedName,
|
|
4940
|
-
params.partitionParentMap,
|
|
4941
|
-
params.idempotentManagedTables
|
|
4942
|
-
);
|
|
4943
|
-
if (!idempotentAncestor) {
|
|
4944
|
-
return null;
|
|
4945
|
-
}
|
|
4946
|
-
return {
|
|
4947
|
-
qualifiedName: params.qualifiedName,
|
|
4948
|
-
detail: idempotentAncestor === params.qualifiedName ? "matched CREATE TABLE in idempotent SQL" : `partition child of ${idempotentAncestor}`
|
|
4949
|
-
};
|
|
4950
|
-
}
|
|
4951
|
-
function classifyExtensionSystemOrAllowlistedTable(params) {
|
|
4952
|
-
const extensionName = params.extensionManagedTables.get(params.qualifiedName);
|
|
4953
|
-
if (extensionName) {
|
|
4954
|
-
return {
|
|
4955
|
-
qualifiedName: params.qualifiedName,
|
|
4956
|
-
detail: `managed by extension "${extensionName}"`
|
|
4957
|
-
};
|
|
4958
|
-
}
|
|
4959
|
-
if (isSystemManagedTable({
|
|
4960
|
-
qualifiedName: params.qualifiedName,
|
|
4961
|
-
systemSchemas: params.systemSchemas,
|
|
4962
|
-
knownSystemTables: params.knownSystemTables
|
|
4963
|
-
})) {
|
|
4964
|
-
return {
|
|
4965
|
-
qualifiedName: params.qualifiedName,
|
|
4966
|
-
detail: "system-managed schema/table"
|
|
4967
|
-
};
|
|
4968
|
-
}
|
|
4969
|
-
if (params.exclusionMatcher(params.qualifiedName)) {
|
|
4970
|
-
return {
|
|
4971
|
-
qualifiedName: params.qualifiedName,
|
|
4972
|
-
detail: "allowlisted by database.pgSchemaDiff.excludeFromOrphanDetection"
|
|
4973
|
-
};
|
|
4974
|
-
}
|
|
4975
|
-
return null;
|
|
4976
|
-
}
|
|
4977
|
-
function classifyMissingSourceTables(params) {
|
|
4978
|
-
const extensionManagedTables = params.extensionManagedTables ?? /* @__PURE__ */ new Map();
|
|
4979
|
-
const partitionParentMap = params.partitionParentMap ?? /* @__PURE__ */ new Map();
|
|
4980
|
-
const exclusionMatcher = buildTablePatternMatcher(params.excludeFromOrphanDetection ?? []);
|
|
4981
|
-
const systemSchemas = new Set(params.systemSchemas ?? []);
|
|
4982
|
-
const knownSystemTables = new Set(params.knownSystemTables ?? []);
|
|
4983
|
-
const classified = {
|
|
4984
|
-
definedInIdempotentDynamicDdl: [],
|
|
4985
|
-
extensionManagedOrSystemTable: [],
|
|
4986
|
-
trulyOrphaned: []
|
|
4987
|
-
};
|
|
4988
|
-
for (const qualifiedName of params.tablesWithoutSource) {
|
|
4989
|
-
const idempotentManagedItem = classifyIdempotentManagedTable({
|
|
4990
|
-
qualifiedName,
|
|
4991
|
-
partitionParentMap,
|
|
4992
|
-
idempotentManagedTables: params.idempotentManagedTables
|
|
4993
|
-
});
|
|
4994
|
-
if (idempotentManagedItem) {
|
|
4995
|
-
classified.definedInIdempotentDynamicDdl.push(idempotentManagedItem);
|
|
4996
|
-
continue;
|
|
4997
|
-
}
|
|
4998
|
-
const extensionSystemOrAllowlistedItem = classifyExtensionSystemOrAllowlistedTable({
|
|
4999
|
-
qualifiedName,
|
|
5000
|
-
extensionManagedTables,
|
|
5001
|
-
systemSchemas,
|
|
5002
|
-
knownSystemTables,
|
|
5003
|
-
exclusionMatcher
|
|
5004
|
-
});
|
|
5005
|
-
if (extensionSystemOrAllowlistedItem) {
|
|
5006
|
-
classified.extensionManagedOrSystemTable.push(extensionSystemOrAllowlistedItem);
|
|
5007
|
-
continue;
|
|
5008
|
-
}
|
|
5009
|
-
classified.trulyOrphaned.push(qualifiedName);
|
|
5010
|
-
}
|
|
5011
|
-
return classified;
|
|
5012
|
-
}
|
|
5013
|
-
|
|
5014
|
-
// src/commands/db/utils/table-registry-introspection.ts
|
|
5015
|
-
init_esm_shims();
|
|
5016
|
-
var VALID_PG_IDENTIFIER = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
5017
|
-
function validatePgIdentifier2(name, context) {
|
|
5018
|
-
if (!name || typeof name !== "string") {
|
|
5019
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
5020
|
-
}
|
|
5021
|
-
if (!VALID_PG_IDENTIFIER.test(name)) {
|
|
5022
|
-
throw new Error(
|
|
5023
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
5024
|
-
);
|
|
5025
|
-
}
|
|
5026
|
-
}
|
|
5027
|
-
function buildSafeSchemaInClause2(schemas) {
|
|
5028
|
-
if (schemas.length === 0) {
|
|
5029
|
-
throw new Error("No schemas provided for IN clause");
|
|
5030
|
-
}
|
|
5031
|
-
const safeSchemas = [];
|
|
5032
|
-
for (const schema of schemas) {
|
|
5033
|
-
validatePgIdentifier2(schema, "schema name");
|
|
5034
|
-
safeSchemas.push(`'${schema.replace(/'/g, "''")}'`);
|
|
5035
|
-
}
|
|
5036
|
-
return safeSchemas.join(",");
|
|
5037
|
-
}
|
|
5038
|
-
async function introspectTablesFromDb(databaseUrl, schemas) {
|
|
5039
|
-
try {
|
|
5040
|
-
const result = await introspectDatabase(databaseUrl, { schemas });
|
|
5041
|
-
return convertIntrospectionToTableEntries(result);
|
|
5042
|
-
} catch (error) {
|
|
5043
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
5044
|
-
if (message.includes("ECONNREFUSED") || message.includes("connection refused")) {
|
|
5045
|
-
throw new Error(
|
|
5046
|
-
`[DB Introspection] Cannot connect to database.
|
|
5047
|
-
URL: ${databaseUrl.replace(/:[^:@]+@/, ":***@")}
|
|
5048
|
-
Cause: ${message}
|
|
5049
|
-
|
|
5050
|
-
Solutions:
|
|
5051
|
-
1. Ensure database is running: runa check --fix
|
|
5052
|
-
2. Check DATABASE_URL in .env.development`
|
|
5053
|
-
);
|
|
5054
|
-
}
|
|
5055
|
-
if (message.includes("authentication") || message.includes("password")) {
|
|
5056
|
-
throw new Error(
|
|
5057
|
-
`[DB Introspection] Database authentication failed.
|
|
5058
|
-
Cause: ${message}
|
|
5059
|
-
|
|
5060
|
-
Solutions:
|
|
5061
|
-
1. Check DATABASE_URL credentials
|
|
5062
|
-
2. Verify database user has SELECT on pg_catalog`
|
|
5063
|
-
);
|
|
5064
|
-
}
|
|
5065
|
-
if (message.includes("permission") || message.includes("denied")) {
|
|
5066
|
-
throw new Error(
|
|
5067
|
-
`[DB Introspection] Permission denied.
|
|
5068
|
-
Cause: ${message}
|
|
5069
|
-
|
|
5070
|
-
Solutions:
|
|
5071
|
-
1. Ensure database user has SELECT on information_schema
|
|
5072
|
-
2. Ensure database user has SELECT on pg_catalog`
|
|
5073
|
-
);
|
|
5074
|
-
}
|
|
5075
|
-
throw new Error(
|
|
5076
|
-
`[DB Introspection] Failed to introspect database.
|
|
5077
|
-
Cause: ${message}
|
|
5078
|
-
|
|
5079
|
-
Solutions:
|
|
5080
|
-
1. Run: runa check --verbose
|
|
5081
|
-
2. Verify database is running and accessible`
|
|
5082
|
-
);
|
|
5083
|
-
}
|
|
5084
|
-
}
|
|
5085
|
-
function buildForeignKeyMap(foreignKeys) {
|
|
5086
|
-
const fksByTable = /* @__PURE__ */ new Map();
|
|
5087
|
-
for (const fk of foreignKeys) {
|
|
5088
|
-
const key = `${fk.schemaName}.${fk.tableName}`;
|
|
5089
|
-
if (!fksByTable.has(key)) fksByTable.set(key, []);
|
|
5090
|
-
fksByTable.get(key)?.push({
|
|
5091
|
-
column: fk.columnName,
|
|
5092
|
-
referencesTable: `${fk.referencedSchema}.${fk.referencedTable}`,
|
|
5093
|
-
referencesColumn: fk.referencedColumn,
|
|
5094
|
-
onDelete: normalizeOnAction2(fk.onDelete),
|
|
5095
|
-
onUpdate: normalizeOnAction2(fk.onUpdate)
|
|
5096
|
-
});
|
|
5097
|
-
}
|
|
5098
|
-
return fksByTable;
|
|
5099
|
-
}
|
|
5100
|
-
function buildIndexMap(indexes) {
|
|
5101
|
-
const indexesByTable = /* @__PURE__ */ new Map();
|
|
5102
|
-
for (const idx of indexes) {
|
|
5103
|
-
const key = `${idx.schemaName}.${idx.tableName}`;
|
|
5104
|
-
if (!indexesByTable.has(key)) indexesByTable.set(key, []);
|
|
5105
|
-
indexesByTable.get(key)?.push({
|
|
5106
|
-
name: idx.indexName,
|
|
5107
|
-
columns: idx.columns,
|
|
5108
|
-
isUnique: idx.isUnique
|
|
5109
|
-
});
|
|
5110
|
-
}
|
|
5111
|
-
return indexesByTable;
|
|
5112
|
-
}
|
|
5113
|
-
function buildRlsEnabledMap(rlsTables) {
|
|
5114
|
-
const rlsByTable = /* @__PURE__ */ new Map();
|
|
5115
|
-
for (const rls of rlsTables) {
|
|
5116
|
-
rlsByTable.set(`${rls.schemaName}.${rls.tableName}`, rls.rlsEnabled);
|
|
5117
|
-
}
|
|
5118
|
-
return rlsByTable;
|
|
5119
|
-
}
|
|
5120
|
-
function buildRlsPoliciesMap(rlsPolicies) {
|
|
5121
|
-
const rlsPoliciesByTable = /* @__PURE__ */ new Map();
|
|
5122
|
-
for (const policy of rlsPolicies) {
|
|
5123
|
-
const key = `${policy.schemaName}.${policy.tableName}`;
|
|
5124
|
-
if (!rlsPoliciesByTable.has(key)) rlsPoliciesByTable.set(key, []);
|
|
5125
|
-
rlsPoliciesByTable.get(key)?.push({
|
|
5126
|
-
name: policy.policyName,
|
|
5127
|
-
command: policy.command,
|
|
5128
|
-
using: policy.usingExpr ?? void 0,
|
|
5129
|
-
withCheck: policy.withCheckExpr ?? void 0
|
|
5130
|
-
});
|
|
5131
|
-
}
|
|
5132
|
-
return rlsPoliciesByTable;
|
|
5133
|
-
}
|
|
5134
|
-
function buildCheckConstraintsMap(checkConstraints) {
|
|
5135
|
-
const checksByTable = /* @__PURE__ */ new Map();
|
|
5136
|
-
for (const check of checkConstraints) {
|
|
5137
|
-
if (!check.name || !check.definition) {
|
|
5138
|
-
console.warn(
|
|
5139
|
-
`[DB Introspection] Skipping CHECK constraint with missing name or definition in ${check.schemaName}.${check.tableName}`
|
|
5140
|
-
);
|
|
5141
|
-
continue;
|
|
5142
|
-
}
|
|
5143
|
-
const key = `${check.schemaName}.${check.tableName}`;
|
|
5144
|
-
if (!checksByTable.has(key)) checksByTable.set(key, []);
|
|
5145
|
-
checksByTable.get(key)?.push({
|
|
5146
|
-
name: check.name,
|
|
5147
|
-
definition: check.definition,
|
|
5148
|
-
columns: check.columns ?? []
|
|
5149
|
-
});
|
|
5150
|
-
}
|
|
5151
|
-
return checksByTable;
|
|
5152
|
-
}
|
|
5153
|
-
function buildTriggersMap(triggers) {
|
|
5154
|
-
const triggersByTable = /* @__PURE__ */ new Map();
|
|
5155
|
-
for (const trigger of triggers) {
|
|
5156
|
-
if (!trigger.actionStatement) {
|
|
5157
|
-
console.warn(
|
|
5158
|
-
`[DB Introspection] Skipping trigger ${trigger.triggerName} with missing actionStatement`
|
|
5159
|
-
);
|
|
5160
|
-
continue;
|
|
5161
|
-
}
|
|
5162
|
-
const key = `${trigger.schemaName}.${trigger.tableName}`;
|
|
5163
|
-
if (!triggersByTable.has(key)) triggersByTable.set(key, []);
|
|
5164
|
-
const functionMatch = trigger.actionStatement.match(
|
|
5165
|
-
/EXECUTE\s+(?:FUNCTION|PROCEDURE)\s+([^\s(]+)/i
|
|
5166
|
-
);
|
|
5167
|
-
const functionName = functionMatch?.[1] ?? trigger.actionStatement;
|
|
5168
|
-
triggersByTable.get(key)?.push({
|
|
5169
|
-
name: trigger.triggerName,
|
|
5170
|
-
event: trigger.eventManipulation,
|
|
5171
|
-
timing: trigger.actionTiming,
|
|
5172
|
-
function: functionName
|
|
5173
|
-
});
|
|
5174
|
-
}
|
|
5175
|
-
return triggersByTable;
|
|
5176
|
-
}
|
|
5177
|
-
function buildPrimaryKeyMap(indexes) {
|
|
5178
|
-
const pkByTable = /* @__PURE__ */ new Map();
|
|
5179
|
-
for (const idx of indexes) {
|
|
5180
|
-
if (idx.indexName.endsWith("_pkey")) {
|
|
5181
|
-
const key = `${idx.schemaName}.${idx.tableName}`;
|
|
5182
|
-
pkByTable.set(key, idx.columns);
|
|
5183
|
-
}
|
|
5184
|
-
}
|
|
5185
|
-
return pkByTable;
|
|
5186
|
-
}
|
|
5187
|
-
function convertIntrospectionToTableEntries(result) {
|
|
5188
|
-
const fksByTable = buildForeignKeyMap(result.foreignKeys);
|
|
5189
|
-
const indexesByTable = buildIndexMap(result.indexes);
|
|
5190
|
-
const rlsByTable = buildRlsEnabledMap(result.rlsTables);
|
|
5191
|
-
const rlsPoliciesByTable = buildRlsPoliciesMap(result.rlsPolicies);
|
|
5192
|
-
const checksByTable = buildCheckConstraintsMap(result.checkConstraints);
|
|
5193
|
-
const triggersByTable = buildTriggersMap(result.triggers);
|
|
5194
|
-
const pkByTable = buildPrimaryKeyMap(result.indexes);
|
|
5195
|
-
return result.tables.map((table) => {
|
|
5196
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
5197
|
-
const primaryKeyColumns = pkByTable.get(qualifiedName) ?? [];
|
|
5198
|
-
const columns = table.columns.map((col) => ({
|
|
5199
|
-
name: col.name,
|
|
5200
|
-
type: col.type,
|
|
5201
|
-
notNull: !col.nullable,
|
|
5202
|
-
hasDefault: col.default !== null && col.default !== void 0,
|
|
5203
|
-
isPrimaryKey: primaryKeyColumns.includes(col.name)
|
|
5204
|
-
}));
|
|
5205
|
-
return {
|
|
5206
|
-
schema: table.schema,
|
|
5207
|
-
name: table.name,
|
|
5208
|
-
qualifiedName,
|
|
5209
|
-
semanticName: "",
|
|
5210
|
-
// Will be set by semantic mapper
|
|
5211
|
-
sourceFile: "",
|
|
5212
|
-
// Will be set by SQL file mapping
|
|
5213
|
-
columns,
|
|
5214
|
-
primaryKey: pkByTable.get(qualifiedName) ?? table.primaryKey,
|
|
5215
|
-
foreignKeys: fksByTable.get(qualifiedName) ?? [],
|
|
5216
|
-
indexes: indexesByTable.get(qualifiedName) ?? [],
|
|
5217
|
-
hasRls: rlsByTable.get(qualifiedName) ?? false,
|
|
5218
|
-
rlsPolicies: rlsPoliciesByTable.get(qualifiedName) ?? [],
|
|
5219
|
-
checkConstraints: checksByTable.get(qualifiedName) ?? [],
|
|
5220
|
-
triggers: triggersByTable.get(qualifiedName) ?? []
|
|
5221
|
-
};
|
|
5222
|
-
});
|
|
5223
|
-
}
|
|
5224
|
-
function normalizeOnAction2(action) {
|
|
5225
|
-
if (!action) return void 0;
|
|
5226
|
-
const upper = action.toUpperCase().replace(/\s+/g, " ").trim();
|
|
5227
|
-
switch (upper) {
|
|
5228
|
-
case "CASCADE":
|
|
5229
|
-
return "CASCADE";
|
|
5230
|
-
case "SET NULL":
|
|
5231
|
-
return "SET NULL";
|
|
5232
|
-
case "SET DEFAULT":
|
|
5233
|
-
return "SET DEFAULT";
|
|
5234
|
-
case "RESTRICT":
|
|
5235
|
-
return "RESTRICT";
|
|
5236
|
-
case "NO ACTION":
|
|
5237
|
-
return "NO ACTION";
|
|
5238
|
-
default:
|
|
5239
|
-
return void 0;
|
|
5240
|
-
}
|
|
5241
|
-
}
|
|
5242
|
-
function shouldSkipDrizzleExport(exportName) {
|
|
5243
|
-
return exportName.endsWith("Schema") || exportName.endsWith("Enum") || exportName.endsWith("Relations");
|
|
5244
|
-
}
|
|
5245
|
-
function extractDrizzleTableNames(schemaModule) {
|
|
5246
|
-
const drizzleTables = /* @__PURE__ */ new Set();
|
|
5247
|
-
for (const [exportName, exportValue] of Object.entries(schemaModule)) {
|
|
5248
|
-
if (shouldSkipDrizzleExport(exportName)) continue;
|
|
5249
|
-
const maybeTable = exportValue;
|
|
5250
|
-
if (maybeTable?._?.name) {
|
|
5251
|
-
const fullName = maybeTable._.schema ? `${maybeTable._.schema}.${maybeTable._.name}` : maybeTable._.name;
|
|
5252
|
-
drizzleTables.add(fullName);
|
|
5253
|
-
}
|
|
5254
|
-
}
|
|
5255
|
-
return drizzleTables;
|
|
5256
|
-
}
|
|
5257
|
-
function compareTables(sqlTables, drizzleTables) {
|
|
5258
|
-
const matched = [];
|
|
5259
|
-
const sqlOnly = [];
|
|
5260
|
-
const sqlFullNames = new Set(sqlTables.map((t) => t.qualifiedName));
|
|
5261
|
-
for (const table of sqlTables) {
|
|
5262
|
-
if (drizzleTables.has(table.qualifiedName)) {
|
|
5263
|
-
matched.push(table);
|
|
5264
|
-
} else {
|
|
5265
|
-
sqlOnly.push(table);
|
|
5266
|
-
}
|
|
5267
|
-
}
|
|
5268
|
-
const drizzleOnly = [...drizzleTables].filter((name) => !sqlFullNames.has(name));
|
|
5269
|
-
return { matched, sqlOnly, drizzleOnly };
|
|
5270
|
-
}
|
|
5271
|
-
async function crossCheckWithDrizzle(sqlTables, drizzleSchemaPath) {
|
|
5272
|
-
if (!existsSync(drizzleSchemaPath)) {
|
|
5273
|
-
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
5274
|
-
}
|
|
5275
|
-
try {
|
|
5276
|
-
const schemaModule = await import(drizzleSchemaPath);
|
|
5277
|
-
const drizzleTables = extractDrizzleTableNames(schemaModule);
|
|
5278
|
-
return compareTables(sqlTables, drizzleTables);
|
|
5279
|
-
} catch {
|
|
5280
|
-
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
5281
|
-
}
|
|
5282
|
-
}
|
|
5283
|
-
|
|
5284
|
-
// src/commands/db/utils/table-registry.ts
|
|
5285
|
-
var MANIFEST_VERSION = 2;
|
|
5286
|
-
var GENERATOR_VERSION = "1.0.0";
|
|
5287
|
-
var DEFAULT_IDEMPOTENT_SQL_DIR = "supabase/schemas/idempotent";
|
|
5288
|
-
var KNOWN_EXTENSION_SYSTEM_TABLES = /* @__PURE__ */ new Set([
|
|
5289
|
-
"public.spatial_ref_sys",
|
|
5290
|
-
"public.geometry_columns",
|
|
5291
|
-
"public.geography_columns"
|
|
5292
|
-
]);
|
|
5293
|
-
var SUPABASE_SYSTEM_SCHEMA_SET = new Set(SUPABASE_SYSTEM_SCHEMAS);
|
|
5294
|
-
function toRelativeSourcePath(projectRoot, sourceFile) {
|
|
5295
|
-
let relativeSource = relative(projectRoot, sourceFile);
|
|
5296
|
-
if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
|
|
5297
|
-
const schemaMatch = sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
|
|
5298
|
-
relativeSource = schemaMatch ? schemaMatch[0] : sourceFile;
|
|
5299
|
-
}
|
|
5300
|
-
return relativeSource;
|
|
5301
|
-
}
|
|
5302
|
-
function resolveSourceConfig(projectRoot, options) {
|
|
5303
|
-
let idempotentSqlDir = options.idempotentSqlDir ?? DEFAULT_IDEMPOTENT_SQL_DIR;
|
|
5304
|
-
const exclusions = new Set(options.excludeFromOrphanDetection ?? []);
|
|
5305
|
-
try {
|
|
5306
|
-
const config = loadRunaConfig(projectRoot);
|
|
5307
|
-
const pgSchemaDiff = config.database?.pgSchemaDiff;
|
|
5308
|
-
if (!options.idempotentSqlDir && pgSchemaDiff?.idempotentSqlDir) {
|
|
5309
|
-
idempotentSqlDir = pgSchemaDiff.idempotentSqlDir;
|
|
5310
|
-
}
|
|
5311
|
-
if (pgSchemaDiff?.excludeFromOrphanDetection) {
|
|
5312
|
-
for (const pattern of pgSchemaDiff.excludeFromOrphanDetection) {
|
|
5313
|
-
exclusions.add(pattern);
|
|
5314
|
-
}
|
|
5315
|
-
}
|
|
5316
|
-
} catch {
|
|
5317
|
-
}
|
|
5318
|
-
return {
|
|
5319
|
-
idempotentSqlDir: isAbsolute(idempotentSqlDir) ? idempotentSqlDir : join(projectRoot, idempotentSqlDir),
|
|
5320
|
-
excludeFromOrphanDetection: [...exclusions].sort((a, b) => a.localeCompare(b))
|
|
5321
|
-
};
|
|
5322
|
-
}
|
|
5323
|
-
async function fetchMissingSourceMetadata(params) {
|
|
5324
|
-
const { databaseUrl, schemas } = params;
|
|
5325
|
-
if (schemas.length === 0) {
|
|
5326
|
-
return {
|
|
5327
|
-
extensionManagedTables: /* @__PURE__ */ new Map(),
|
|
5328
|
-
partitionParentMap: /* @__PURE__ */ new Map()
|
|
5329
|
-
};
|
|
5330
|
-
}
|
|
5331
|
-
const isRemoteSupabase = databaseUrl.includes(".supabase.co");
|
|
5332
|
-
const sql = postgres2(databaseUrl, {
|
|
5333
|
-
...isRemoteSupabase && { ssl: "require" }
|
|
5334
|
-
});
|
|
5335
|
-
try {
|
|
5336
|
-
const schemaList = buildSafeSchemaInClause2(schemas);
|
|
5337
|
-
const [extensionRows, partitionRows] = await Promise.all([
|
|
5338
|
-
sql`
|
|
5339
|
-
SELECT
|
|
5340
|
-
n.nspname AS schema_name,
|
|
5341
|
-
c.relname AS table_name,
|
|
5342
|
-
ext.extname AS extension_name
|
|
5343
|
-
FROM pg_class c
|
|
5344
|
-
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
5345
|
-
JOIN pg_depend d
|
|
5346
|
-
ON d.classid = 'pg_class'::regclass
|
|
5347
|
-
AND d.objid = c.oid
|
|
5348
|
-
AND d.refclassid = 'pg_extension'::regclass
|
|
5349
|
-
AND d.deptype = 'e'
|
|
5350
|
-
JOIN pg_extension ext ON ext.oid = d.refobjid
|
|
5351
|
-
WHERE c.relkind IN ('r', 'p')
|
|
5352
|
-
AND n.nspname IN (${sql.unsafe(schemaList)})
|
|
5353
|
-
`,
|
|
5354
|
-
sql`
|
|
5355
|
-
SELECT
|
|
5356
|
-
child_ns.nspname AS child_schema,
|
|
5357
|
-
child.relname AS child_table,
|
|
5358
|
-
parent_ns.nspname AS parent_schema,
|
|
5359
|
-
parent.relname AS parent_table
|
|
5360
|
-
FROM pg_inherits i
|
|
5361
|
-
JOIN pg_class child ON child.oid = i.inhrelid
|
|
5362
|
-
JOIN pg_namespace child_ns ON child_ns.oid = child.relnamespace
|
|
5363
|
-
JOIN pg_class parent ON parent.oid = i.inhparent
|
|
5364
|
-
JOIN pg_namespace parent_ns ON parent_ns.oid = parent.relnamespace
|
|
5365
|
-
WHERE child.relkind IN ('r', 'p')
|
|
5366
|
-
AND child_ns.nspname IN (${sql.unsafe(schemaList)})
|
|
5367
|
-
`
|
|
5368
|
-
]);
|
|
5369
|
-
const extensionManagedTables = /* @__PURE__ */ new Map();
|
|
5370
|
-
for (const row of extensionRows) {
|
|
5371
|
-
extensionManagedTables.set(
|
|
5372
|
-
`${String(row.schema_name)}.${String(row.table_name)}`,
|
|
5373
|
-
String(row.extension_name)
|
|
5374
|
-
);
|
|
5375
|
-
}
|
|
5376
|
-
const partitionParentMap = /* @__PURE__ */ new Map();
|
|
5377
|
-
for (const row of partitionRows) {
|
|
5378
|
-
partitionParentMap.set(
|
|
5379
|
-
`${String(row.child_schema)}.${String(row.child_table)}`,
|
|
5380
|
-
`${String(row.parent_schema)}.${String(row.parent_table)}`
|
|
5381
|
-
);
|
|
5382
|
-
}
|
|
5383
|
-
return { extensionManagedTables, partitionParentMap };
|
|
5384
|
-
} finally {
|
|
5385
|
-
await sql.end();
|
|
5386
|
-
}
|
|
5387
|
-
}
|
|
5388
|
-
function formatMissingSourceItems(items) {
|
|
5389
|
-
return items.map((item) => item.detail ? `${item.qualifiedName} (${item.detail})` : item.qualifiedName).join(", ");
|
|
5390
|
-
}
|
|
5391
|
-
function logMissingSourceClassification(classification) {
|
|
5392
|
-
const total = classification.definedInIdempotentDynamicDdl.length + classification.extensionManagedOrSystemTable.length + classification.trulyOrphaned.length;
|
|
5393
|
-
if (total === 0) return;
|
|
5394
|
-
console.warn(`[tables-manifest] \u26A0 ${total} table(s) exist in DB but not in SQL files.`);
|
|
5395
|
-
if (classification.definedInIdempotentDynamicDdl.length > 0) {
|
|
5396
|
-
console.log(
|
|
5397
|
-
`[tables-manifest] info: defined_in_idempotent_dynamic_ddl (${classification.definedInIdempotentDynamicDdl.length})`
|
|
5398
|
-
);
|
|
5399
|
-
console.log(` ${formatMissingSourceItems(classification.definedInIdempotentDynamicDdl)}`);
|
|
3543
|
+
}
|
|
3544
|
+
function backupProtectedTablesForProduction(dbUrl, protectedTables, input) {
|
|
3545
|
+
if (input.env !== "production") {
|
|
3546
|
+
return;
|
|
5400
3547
|
}
|
|
5401
|
-
|
|
5402
|
-
|
|
5403
|
-
|
|
5404
|
-
|
|
5405
|
-
console.log(` ${formatMissingSourceItems(classification.extensionManagedOrSystemTable)}`);
|
|
3548
|
+
const { backupPath } = backupIdempotentTables(dbUrl, protectedTables, input.verbose);
|
|
3549
|
+
if (backupPath) {
|
|
3550
|
+
logger13.info(`Recovery: pg_restore -d <DATABASE_URL> ${backupPath}`);
|
|
3551
|
+
return;
|
|
5406
3552
|
}
|
|
5407
|
-
if (
|
|
5408
|
-
|
|
5409
|
-
|
|
5410
|
-
console.warn(
|
|
5411
|
-
" \u2192 Add declarative/idempotent SQL definitions or allowlist via database.pgSchemaDiff.excludeFromOrphanDetection."
|
|
3553
|
+
if (protectedTables.length > 0 && !input.allowDataLoss) {
|
|
3554
|
+
throw new Error(
|
|
3555
|
+
"Pre-apply backup failed for production deployment.\n Protected tables exist but could not be backed up.\n Use --allow-data-loss to proceed without backup (emergency only)."
|
|
5412
3556
|
);
|
|
5413
|
-
} else {
|
|
5414
|
-
console.log("[tables-manifest] info: no truly_orphaned tables detected.");
|
|
5415
3557
|
}
|
|
5416
3558
|
}
|
|
5417
|
-
async function
|
|
5418
|
-
|
|
5419
|
-
|
|
5420
|
-
|
|
5421
|
-
|
|
5422
|
-
|
|
3559
|
+
async function cleanupApplyResources(params) {
|
|
3560
|
+
if (params.shadowDb) {
|
|
3561
|
+
try {
|
|
3562
|
+
await params.shadowDb.cleanup();
|
|
3563
|
+
if (params.verbose) {
|
|
3564
|
+
logger13.debug("Shadow DB cleaned up");
|
|
3565
|
+
}
|
|
3566
|
+
} catch (cleanupError) {
|
|
3567
|
+
logger13.warn(`Failed to cleanup shadow DB: ${cleanupError}`);
|
|
3568
|
+
}
|
|
5423
3569
|
}
|
|
5424
|
-
if (
|
|
5425
|
-
|
|
3570
|
+
if (params.prefilter) {
|
|
3571
|
+
try {
|
|
3572
|
+
rmSync(params.prefilter.filteredDir, { recursive: true, force: true });
|
|
3573
|
+
} catch {
|
|
3574
|
+
}
|
|
5426
3575
|
}
|
|
5427
|
-
|
|
5428
|
-
|
|
5429
|
-
|
|
5430
|
-
console.warn("[tables-manifest] Semantic name conflicts detected:");
|
|
5431
|
-
for (const conflict of conflicts) {
|
|
5432
|
-
console.warn(` '${conflict.semanticName}': ${conflict.tables.join(", ")}`);
|
|
3576
|
+
try {
|
|
3577
|
+
rmSync(params.tmpDir, { recursive: true, force: true });
|
|
3578
|
+
} catch {
|
|
5433
3579
|
}
|
|
5434
3580
|
}
|
|
5435
|
-
|
|
5436
|
-
const
|
|
5437
|
-
|
|
5438
|
-
|
|
5439
|
-
|
|
5440
|
-
|
|
5441
|
-
|
|
5442
|
-
|
|
5443
|
-
|
|
5444
|
-
|
|
5445
|
-
|
|
5446
|
-
|
|
5447
|
-
const sourceConfig = resolveSourceConfig(projectRoot, options);
|
|
5448
|
-
let tables = [];
|
|
5449
|
-
const source = "introspection";
|
|
5450
|
-
const declarativeTables = await extractTablesFromSqlDir(sqlDir, {
|
|
5451
|
-
includeColumns: false,
|
|
5452
|
-
// Don't need columns from SQL (DB introspection is more accurate)
|
|
5453
|
-
includeForeignKeys: false,
|
|
5454
|
-
includeIndexes: false,
|
|
5455
|
-
includeRlsPolicies: false
|
|
5456
|
-
});
|
|
5457
|
-
const idempotentTablesForSource = await extractTablesFromSqlDir(sourceConfig.idempotentSqlDir, {
|
|
5458
|
-
includeColumns: false,
|
|
5459
|
-
includeForeignKeys: false,
|
|
5460
|
-
includeIndexes: false,
|
|
5461
|
-
includeRlsPolicies: false
|
|
5462
|
-
});
|
|
5463
|
-
const idempotentTablesFromRegex = extractTablesFromIdempotentSql(
|
|
5464
|
-
sourceConfig.idempotentSqlDir,
|
|
5465
|
-
projectRoot
|
|
3581
|
+
var applyPgSchemaDiff = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3582
|
+
const schemasDir = join(targetDir, "supabase/schemas/declarative");
|
|
3583
|
+
if (!existsSync(schemasDir)) {
|
|
3584
|
+
logger13.info("No declarative schemas found");
|
|
3585
|
+
return { sql: "", hazards: [], applied: false };
|
|
3586
|
+
}
|
|
3587
|
+
const dbUrl = getDbUrl(input);
|
|
3588
|
+
const configState = loadPgSchemaDiffConfigState(targetDir, input.verbose);
|
|
3589
|
+
const prefilterState = createPrefilterState(
|
|
3590
|
+
schemasDir,
|
|
3591
|
+
input.verbose,
|
|
3592
|
+
configState.configExclusions
|
|
5466
3593
|
);
|
|
5467
|
-
const
|
|
5468
|
-
|
|
5469
|
-
|
|
5470
|
-
|
|
5471
|
-
|
|
5472
|
-
|
|
5473
|
-
for (const t of sourceTables) {
|
|
5474
|
-
if (sourceFileMap.has(t.qualifiedName)) {
|
|
5475
|
-
continue;
|
|
5476
|
-
}
|
|
5477
|
-
sourceFileMap.set(t.qualifiedName, {
|
|
5478
|
-
sourceFile: toRelativeSourcePath(projectRoot, t.sourceFile),
|
|
5479
|
-
lineNumber: t.lineNumber
|
|
5480
|
-
});
|
|
3594
|
+
const freshDbResult = handleFreshDbCase(input, dbUrl, targetDir, prefilterState.pgSchemaDiffDir);
|
|
3595
|
+
if (freshDbResult) return freshDbResult;
|
|
3596
|
+
const schemaFiles = collectSchemaFiles(schemasDir);
|
|
3597
|
+
if (schemaFiles.length === 0) {
|
|
3598
|
+
logger13.info("No schema files to apply");
|
|
3599
|
+
return { sql: "", hazards: [], applied: false };
|
|
5481
3600
|
}
|
|
5482
|
-
|
|
5483
|
-
|
|
5484
|
-
|
|
3601
|
+
const tmpDir = createCombinedSchemaBundle(schemaFiles, input.verbose);
|
|
3602
|
+
logger13.step("Running pg-schema-diff (incremental changes)...");
|
|
3603
|
+
let shadowDb = null;
|
|
3604
|
+
try {
|
|
3605
|
+
verifyPgSchemaDiffBinary({ strictVersion: input.env === "production" });
|
|
3606
|
+
await verifyDatabaseConnection(dbUrl);
|
|
3607
|
+
shadowDb = await createShadowDbForRun(dbUrl, configState.shadowExtensions, input.verbose);
|
|
3608
|
+
const includeSchemas = detectAppSchemas(schemasDir, input.verbose);
|
|
3609
|
+
cleanPartitionAclsForPgSchemaDiff(dbUrl, includeSchemas, input.verbose);
|
|
3610
|
+
const { planOutput } = executePgSchemaDiffPlan(
|
|
3611
|
+
dbUrl,
|
|
3612
|
+
prefilterState.pgSchemaDiffDir,
|
|
3613
|
+
includeSchemas,
|
|
3614
|
+
input.verbose,
|
|
3615
|
+
{ tempDbDsn: shadowDb?.dsn }
|
|
5485
3616
|
);
|
|
5486
|
-
|
|
5487
|
-
|
|
5488
|
-
|
|
5489
|
-
|
|
5490
|
-
|
|
3617
|
+
const noChangesResult = buildNoChangesResult(planOutput);
|
|
3618
|
+
if (noChangesResult) return noChangesResult;
|
|
3619
|
+
const { hazards } = handleHazardsWithContext(planOutput, input, schemasDir);
|
|
3620
|
+
const droppedTables = detectDropTableStatements(planOutput);
|
|
3621
|
+
enforceDropSafety(input, droppedTables);
|
|
3622
|
+
const dataViolationCount = runPreApplyDataCompatibility(dbUrl, planOutput, input);
|
|
3623
|
+
const protectedTables = getIdempotentProtectedTables(
|
|
3624
|
+
schemasDir,
|
|
3625
|
+
prefilterState.configExclusions
|
|
3626
|
+
);
|
|
3627
|
+
const protectedObjects = getIdempotentProtectedObjects(
|
|
3628
|
+
schemasDir,
|
|
3629
|
+
prefilterState.configExclusions
|
|
3630
|
+
);
|
|
3631
|
+
const checkModeResult = buildCheckModeResult(
|
|
3632
|
+
input,
|
|
3633
|
+
planOutput,
|
|
3634
|
+
hazards,
|
|
3635
|
+
protectedTables,
|
|
3636
|
+
protectedObjects,
|
|
3637
|
+
dataViolationCount
|
|
3638
|
+
);
|
|
3639
|
+
if (checkModeResult) return checkModeResult;
|
|
3640
|
+
backupProtectedTablesForProduction(dbUrl, protectedTables, input);
|
|
3641
|
+
const preApplyCounts = getTableRowEstimates(dbUrl, schemasDir, input.verbose);
|
|
3642
|
+
const applyResult = await applyWithRetry({
|
|
3643
|
+
dbUrl,
|
|
3644
|
+
schemasDir,
|
|
3645
|
+
includeSchemas,
|
|
3646
|
+
input,
|
|
3647
|
+
planOutput,
|
|
3648
|
+
hazards,
|
|
3649
|
+
protectedTables,
|
|
3650
|
+
protectedObjects,
|
|
3651
|
+
tempDbDsn: shadowDb?.dsn,
|
|
3652
|
+
pgSchemaDiffDir: prefilterState.pgSchemaDiffDir
|
|
3653
|
+
});
|
|
3654
|
+
if (applyResult.applied) {
|
|
3655
|
+
verifyDataIntegrity(dbUrl, schemasDir, preApplyCounts, input.verbose, input.allowDataLoss);
|
|
3656
|
+
}
|
|
5491
3657
|
return {
|
|
5492
|
-
...
|
|
5493
|
-
|
|
5494
|
-
lineNumber: fileInfo?.lineNumber
|
|
3658
|
+
...applyResult,
|
|
3659
|
+
dataViolations: dataViolationCount > 0 ? dataViolationCount : void 0
|
|
5495
3660
|
};
|
|
5496
|
-
}
|
|
5497
|
-
|
|
5498
|
-
|
|
5499
|
-
|
|
5500
|
-
|
|
5501
|
-
|
|
5502
|
-
let extensionManagedTables = /* @__PURE__ */ new Map();
|
|
5503
|
-
let partitionParentMap = /* @__PURE__ */ new Map();
|
|
5504
|
-
try {
|
|
5505
|
-
const metadata = await fetchMissingSourceMetadata({
|
|
5506
|
-
databaseUrl,
|
|
5507
|
-
schemas: missingSchemas
|
|
5508
|
-
});
|
|
5509
|
-
extensionManagedTables = metadata.extensionManagedTables;
|
|
5510
|
-
partitionParentMap = metadata.partitionParentMap;
|
|
5511
|
-
} catch (error) {
|
|
5512
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
5513
|
-
console.warn(`[tables-manifest] Failed to classify extension/partition metadata: ${message}`);
|
|
5514
|
-
}
|
|
5515
|
-
const classification = classifyMissingSourceTables({
|
|
5516
|
-
tablesWithoutSource: missingSourceQualifiedNames,
|
|
5517
|
-
idempotentManagedTables,
|
|
5518
|
-
extensionManagedTables,
|
|
5519
|
-
partitionParentMap,
|
|
5520
|
-
excludeFromOrphanDetection: sourceConfig.excludeFromOrphanDetection,
|
|
5521
|
-
systemSchemas: SUPABASE_SYSTEM_SCHEMA_SET,
|
|
5522
|
-
knownSystemTables: KNOWN_EXTENSION_SYSTEM_TABLES
|
|
3661
|
+
} finally {
|
|
3662
|
+
await cleanupApplyResources({
|
|
3663
|
+
shadowDb,
|
|
3664
|
+
prefilter: prefilterState.prefilter,
|
|
3665
|
+
tmpDir,
|
|
3666
|
+
verbose: input.verbose
|
|
5523
3667
|
});
|
|
5524
|
-
logMissingSourceClassification(classification);
|
|
5525
|
-
}
|
|
5526
|
-
if (crossCheck && existsSync(drizzleSchemaPath)) {
|
|
5527
|
-
await logDrizzleCrossCheck(tables, drizzleSchemaPath);
|
|
5528
|
-
}
|
|
5529
|
-
const mappingResult = generateMapping(tables, mappingOptions);
|
|
5530
|
-
logMappingConflicts(mappingResult.conflicts);
|
|
5531
|
-
tables = applyMappingToTables(tables, mappingResult.mapping);
|
|
5532
|
-
const now = /* @__PURE__ */ new Date();
|
|
5533
|
-
const jstOffset = 9 * 60 * 60 * 1e3;
|
|
5534
|
-
const jst = new Date(now.getTime() + jstOffset);
|
|
5535
|
-
const generatedAtJST = `${jst.getUTCFullYear()}-${String(jst.getUTCMonth() + 1).padStart(2, "0")}-${String(jst.getUTCDate()).padStart(2, "0")}T${String(jst.getUTCHours()).padStart(2, "0")}:${String(jst.getUTCMinutes()).padStart(2, "0")}:${String(jst.getUTCSeconds()).padStart(2, "0")}+09:00`;
|
|
5536
|
-
const manifest = {
|
|
5537
|
-
version: MANIFEST_VERSION,
|
|
5538
|
-
source,
|
|
5539
|
-
generatedAt: generatedAtJST,
|
|
5540
|
-
generatorVersion: GENERATOR_VERSION,
|
|
5541
|
-
tables,
|
|
5542
|
-
mapping: mappingResult.mapping
|
|
5543
|
-
};
|
|
5544
|
-
const outputDir = join(outputPath, "..");
|
|
5545
|
-
if (!existsSync(outputDir)) {
|
|
5546
|
-
mkdirSync(outputDir, { recursive: true });
|
|
5547
|
-
}
|
|
5548
|
-
writeFileSync(outputPath, `${JSON.stringify(manifest, null, 2)}
|
|
5549
|
-
`);
|
|
5550
|
-
logManifestSummary(manifest, mappingResult.conflicts);
|
|
5551
|
-
return manifest;
|
|
5552
|
-
}
|
|
5553
|
-
function logManifestSummary(manifest, conflicts) {
|
|
5554
|
-
const tableCount = manifest.tables.length;
|
|
5555
|
-
const schemas = [...new Set(manifest.tables.map((t) => t.schema))];
|
|
5556
|
-
const mappingCount = Object.keys(manifest.mapping).length;
|
|
5557
|
-
const checkCount = manifest.tables.reduce(
|
|
5558
|
-
(sum, t) => sum + (t.checkConstraints?.length ?? 0),
|
|
5559
|
-
0
|
|
5560
|
-
);
|
|
5561
|
-
const triggerCount = manifest.tables.reduce(
|
|
5562
|
-
(sum, t) => sum + (t.triggers?.length ?? 0),
|
|
5563
|
-
0
|
|
5564
|
-
);
|
|
5565
|
-
console.log("\n\u2713 Tables manifest generated");
|
|
5566
|
-
console.log(` - Source: DB introspection (PostgreSQL system catalogs)`);
|
|
5567
|
-
console.log(` - ${tableCount} tables extracted`);
|
|
5568
|
-
console.log(` - ${schemas.length} schemas: ${schemas.join(", ")}`);
|
|
5569
|
-
console.log(` - ${mappingCount} semantic names mapped`);
|
|
5570
|
-
if (checkCount > 0) {
|
|
5571
|
-
console.log(` - ${checkCount} CHECK constraints detected`);
|
|
5572
|
-
}
|
|
5573
|
-
if (triggerCount > 0) {
|
|
5574
|
-
console.log(` - ${triggerCount} triggers detected`);
|
|
5575
3668
|
}
|
|
5576
|
-
|
|
5577
|
-
|
|
5578
|
-
|
|
5579
|
-
|
|
5580
|
-
|
|
5581
|
-
|
|
5582
|
-
}
|
|
5583
|
-
const
|
|
5584
|
-
|
|
5585
|
-
|
|
5586
|
-
|
|
5587
|
-
|
|
5588
|
-
|
|
5589
|
-
|
|
5590
|
-
const missingTables = expectedTables.filter((name) => !manifest.mapping[name]);
|
|
5591
|
-
if (missingTables.length > 0) {
|
|
5592
|
-
warnings.push(
|
|
5593
|
-
`Optional SDK tables not found: ${missingTables.join(", ")}
|
|
5594
|
-
(This is OK if your schema uses different names)`
|
|
5595
|
-
);
|
|
5596
|
-
}
|
|
5597
|
-
if (warnings.length > 0) {
|
|
5598
|
-
console.log("\n\u26A0\uFE0F Warnings:");
|
|
5599
|
-
for (const warning of warnings) {
|
|
5600
|
-
console.log(` - ${warning}`);
|
|
5601
|
-
}
|
|
3669
|
+
});
|
|
3670
|
+
var validatePartitions = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3671
|
+
if (input.check) return { warnings: [] };
|
|
3672
|
+
const idempotentDir = join(targetDir, "supabase/schemas/idempotent");
|
|
3673
|
+
if (!existsSync(idempotentDir)) return { warnings: [] };
|
|
3674
|
+
const expected = parseExpectedPartitions(idempotentDir);
|
|
3675
|
+
if (expected.length === 0) return { warnings: [] };
|
|
3676
|
+
const dbUrl = getDbUrl(input);
|
|
3677
|
+
const schemas = [...new Set(expected.map((e) => e.parent.split(".")[0] ?? ""))];
|
|
3678
|
+
const actual = queryActualPartitions(dbUrl, schemas);
|
|
3679
|
+
const drift = detectPartitionDrift(expected, actual);
|
|
3680
|
+
if (drift.missing.length === 0) {
|
|
3681
|
+
logger13.success(`All ${expected.length} expected partition(s) verified`);
|
|
3682
|
+
return { warnings: [] };
|
|
5602
3683
|
}
|
|
5603
|
-
|
|
5604
|
-
|
|
5605
|
-
}
|
|
3684
|
+
const warnings = formatPartitionWarnings(drift);
|
|
3685
|
+
for (const w of warnings) logger13.warn(w);
|
|
3686
|
+
return { warnings };
|
|
3687
|
+
});
|
|
5606
3688
|
|
|
5607
3689
|
// src/commands/db/apply/actors/seed-actors.ts
|
|
3690
|
+
init_esm_shims();
|
|
5608
3691
|
var DESTRUCTIVE_SEED_PATTERNS = [
|
|
5609
3692
|
{ pattern: /\bDELETE\s+FROM\b/i, description: "DELETE FROM" },
|
|
5610
3693
|
{ pattern: /\bTRUNCATE\b/i, description: "TRUNCATE" },
|
|
@@ -5637,9 +3720,12 @@ function isUnsafeProductionSeed(input, seedFile) {
|
|
|
5637
3720
|
return true;
|
|
5638
3721
|
}
|
|
5639
3722
|
function parseSeedErrorDiagnostics(stderr) {
|
|
3723
|
+
const psqlLocation = stderr.match(/psql:([^:]+):(\d+):\s*ERROR:/);
|
|
3724
|
+
const locationInfo = psqlLocation ? { file: psqlLocation[1], line: Number(psqlLocation[2]) } : {};
|
|
5640
3725
|
const columnMissing = stderr.match(/column "([^"]+)" of relation "([^"]+)" does not exist/);
|
|
5641
3726
|
if (columnMissing) {
|
|
5642
3727
|
return {
|
|
3728
|
+
...locationInfo,
|
|
5643
3729
|
table: columnMissing[2],
|
|
5644
3730
|
errorType: "missing_column",
|
|
5645
3731
|
hint: `Column "${columnMissing[1]}" missing from ${columnMissing[2]}. Schema may have changed. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5648,6 +3734,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5648
3734
|
const relationMissing = stderr.match(/relation "([^"]+)" does not exist/);
|
|
5649
3735
|
if (relationMissing) {
|
|
5650
3736
|
return {
|
|
3737
|
+
...locationInfo,
|
|
5651
3738
|
table: relationMissing[1],
|
|
5652
3739
|
errorType: "missing_relation",
|
|
5653
3740
|
hint: `Table ${relationMissing[1]} does not exist. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5658,6 +3745,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5658
3745
|
);
|
|
5659
3746
|
if (fkViolation) {
|
|
5660
3747
|
return {
|
|
3748
|
+
...locationInfo,
|
|
5661
3749
|
table: fkViolation[1],
|
|
5662
3750
|
errorType: "fk_violation",
|
|
5663
3751
|
hint: `FK constraint failed on ${fkViolation[1]}. Check seed insertion order or missing parent records.`
|
|
@@ -5666,6 +3754,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5666
3754
|
const checkViolation = stderr.match(/new row for relation "([^"]+)" violates check constraint/);
|
|
5667
3755
|
if (checkViolation) {
|
|
5668
3756
|
return {
|
|
3757
|
+
...locationInfo,
|
|
5669
3758
|
table: checkViolation[1],
|
|
5670
3759
|
errorType: "check_violation",
|
|
5671
3760
|
hint: `CHECK constraint failed on ${checkViolation[1]}. Seed data may not match column constraints. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5676,12 +3765,47 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5676
3765
|
);
|
|
5677
3766
|
if (uniqueViolation) {
|
|
5678
3767
|
return {
|
|
3768
|
+
...locationInfo,
|
|
5679
3769
|
table: uniqueViolation[1],
|
|
5680
3770
|
errorType: "unique_violation",
|
|
5681
3771
|
hint: `Duplicate key on ${uniqueViolation[1]}. Seeds may have been partially applied. Try: runa db reset`
|
|
5682
3772
|
};
|
|
5683
3773
|
}
|
|
5684
|
-
return {};
|
|
3774
|
+
return { ...locationInfo };
|
|
3775
|
+
}
|
|
3776
|
+
function createSeedFailureMessage(diagnostics, locationSuffix) {
|
|
3777
|
+
if (diagnostics.table) {
|
|
3778
|
+
const summary = `Seed apply failed on table: ${diagnostics.table} (${diagnostics.errorType})${locationSuffix}`;
|
|
3779
|
+
return {
|
|
3780
|
+
summary,
|
|
3781
|
+
hint: diagnostics.hint
|
|
3782
|
+
};
|
|
3783
|
+
}
|
|
3784
|
+
if (locationSuffix) {
|
|
3785
|
+
return { summary: `Seed apply failed${locationSuffix} (non-blocking)` };
|
|
3786
|
+
}
|
|
3787
|
+
return { summary: "Seed apply failed (non-blocking)" };
|
|
3788
|
+
}
|
|
3789
|
+
function logSeedFailureSummary(seedFailure) {
|
|
3790
|
+
logger13.warn(seedFailure.summary);
|
|
3791
|
+
if (seedFailure.hint) {
|
|
3792
|
+
logger13.info(` Hint: ${seedFailure.hint}`);
|
|
3793
|
+
}
|
|
3794
|
+
}
|
|
3795
|
+
function buildSeedLocationSuffix(diagnostics) {
|
|
3796
|
+
if (diagnostics.line == null) return "";
|
|
3797
|
+
const fileSuffix = diagnostics.file ? ` of ${diagnostics.file}` : "";
|
|
3798
|
+
return ` at line ${diagnostics.line}${fileSuffix}`;
|
|
3799
|
+
}
|
|
3800
|
+
function handleFailedSeed(result, verbose) {
|
|
3801
|
+
const errorMsg = result.stderr ? maskDbCredentials(result.stderr) : "";
|
|
3802
|
+
const diagnostics = parseSeedErrorDiagnostics(result.stderr);
|
|
3803
|
+
const failure = createSeedFailureMessage(diagnostics, buildSeedLocationSuffix(diagnostics));
|
|
3804
|
+
logSeedFailureSummary(failure);
|
|
3805
|
+
if (errorMsg && !verbose) {
|
|
3806
|
+
logger13.debug(` Error: ${errorMsg.split("\n")[0]}`);
|
|
3807
|
+
}
|
|
3808
|
+
return false;
|
|
5685
3809
|
}
|
|
5686
3810
|
function applySeedFile(dbUrl, seedFile, verbose) {
|
|
5687
3811
|
logger13.step("Applying seeds...");
|
|
@@ -5696,24 +3820,11 @@ function applySeedFile(dbUrl, seedFile, verbose) {
|
|
|
5696
3820
|
if (stdout) process.stdout.write(stdout);
|
|
5697
3821
|
if (stderr) process.stderr.write(stderr);
|
|
5698
3822
|
}
|
|
5699
|
-
if (result.status
|
|
5700
|
-
|
|
5701
|
-
|
|
5702
|
-
if (diagnostics.table) {
|
|
5703
|
-
logger13.warn(`Seed apply failed on table: ${diagnostics.table} (${diagnostics.errorType})`);
|
|
5704
|
-
if (diagnostics.hint) {
|
|
5705
|
-
logger13.info(` Hint: ${diagnostics.hint}`);
|
|
5706
|
-
}
|
|
5707
|
-
} else {
|
|
5708
|
-
logger13.warn(`Seed apply failed (non-blocking)`);
|
|
5709
|
-
}
|
|
5710
|
-
if (errorMsg && !verbose) {
|
|
5711
|
-
logger13.debug(` Error: ${errorMsg.split("\n")[0]}`);
|
|
5712
|
-
}
|
|
5713
|
-
return false;
|
|
3823
|
+
if (result.status === 0) {
|
|
3824
|
+
logger13.success("Seeds applied");
|
|
3825
|
+
return true;
|
|
5714
3826
|
}
|
|
5715
|
-
|
|
5716
|
-
return true;
|
|
3827
|
+
return handleFailedSeed(result, verbose);
|
|
5717
3828
|
}
|
|
5718
3829
|
function runSeeds(input, targetDir, dbUrl) {
|
|
5719
3830
|
if (input.noSeed) {
|
|
@@ -6995,11 +5106,11 @@ async function validateGitHubOutputPath(filePath) {
|
|
|
6995
5106
|
if (!filePath || filePath.trim().length === 0) {
|
|
6996
5107
|
return invalidOutputPath("Empty file path");
|
|
6997
5108
|
}
|
|
6998
|
-
const normalizedPath =
|
|
5109
|
+
const normalizedPath = path6.normalize(filePath);
|
|
6999
5110
|
if (normalizedPath.includes("..")) {
|
|
7000
5111
|
return invalidOutputPath("Path traversal detected (..) in file path");
|
|
7001
5112
|
}
|
|
7002
|
-
const absolutePath =
|
|
5113
|
+
const absolutePath = path6.resolve(normalizedPath);
|
|
7003
5114
|
const forbiddenPath = findForbiddenPath(absolutePath);
|
|
7004
5115
|
if (forbiddenPath) {
|
|
7005
5116
|
return invalidOutputPath(`Forbidden path: ${forbiddenPath}`);
|
|
@@ -7393,7 +5504,7 @@ async function analyzeSchemaChanges() {
|
|
|
7393
5504
|
try {
|
|
7394
5505
|
const { getDatabasePackagePath: getDatabasePackagePath2 } = await import('./config-loader-GT3HAQ7U.js');
|
|
7395
5506
|
const dbPath = await getDatabasePackagePath2();
|
|
7396
|
-
const schemaPath =
|
|
5507
|
+
const schemaPath = path6.join(dbPath, "src/schema/");
|
|
7397
5508
|
const { stdout } = await execa("git", ["diff", "--cached", "--", schemaPath]);
|
|
7398
5509
|
const lines = stdout.split("\n");
|
|
7399
5510
|
return parseDiffLines(lines);
|
|
@@ -7550,7 +5661,7 @@ async function testDatabaseConnection(projectRoot) {
|
|
|
7550
5661
|
const dbPort = detectLocalSupabasePorts(resolvedRoot).db;
|
|
7551
5662
|
let sql = null;
|
|
7552
5663
|
try {
|
|
7553
|
-
sql =
|
|
5664
|
+
sql = postgres(connectionUrl, {
|
|
7554
5665
|
connect_timeout: 5,
|
|
7555
5666
|
idle_timeout: 5,
|
|
7556
5667
|
max: 1
|
|
@@ -7659,7 +5770,7 @@ function getDeclarativeSqlFiles(sqlDir, logger15) {
|
|
|
7659
5770
|
async function collectSchemaRisks(sqlDir, sqlFiles) {
|
|
7660
5771
|
const allRisks = [];
|
|
7661
5772
|
for (const sqlFile of sqlFiles) {
|
|
7662
|
-
const filePath =
|
|
5773
|
+
const filePath = path6.join(sqlDir, sqlFile);
|
|
7663
5774
|
const risks = await detectSchemaRisks(filePath);
|
|
7664
5775
|
for (const risk of risks) {
|
|
7665
5776
|
allRisks.push({ ...risk, file: sqlFile });
|
|
@@ -7710,7 +5821,7 @@ function reportRiskGuidance(logger15, highRiskCount, lowRiskCount) {
|
|
|
7710
5821
|
async function runSqlSchemaRiskCheck(result, logger15, step) {
|
|
7711
5822
|
logger15.step("Checking SQL schema for risky patterns", step.next());
|
|
7712
5823
|
const cwd = process.cwd();
|
|
7713
|
-
const sqlDir =
|
|
5824
|
+
const sqlDir = path6.join(cwd, "supabase", "schemas", "declarative");
|
|
7714
5825
|
const sqlFiles = getDeclarativeSqlFiles(sqlDir, logger15);
|
|
7715
5826
|
if (!sqlFiles) return;
|
|
7716
5827
|
try {
|
|
@@ -7764,7 +5875,7 @@ async function runOrphanCheck(env, dbPackagePath, result, logger15, step) {
|
|
|
7764
5875
|
const { expectedTables, expectedEnums } = await extractSchemaTablesAndEnums(dbPackagePath);
|
|
7765
5876
|
const databaseUrl = tryResolveDatabaseUrl("local") || buildLocalDatabaseUrl(process.cwd());
|
|
7766
5877
|
const { dbTables, dbEnums } = await fetchDbTablesAndEnums(databaseUrl, {
|
|
7767
|
-
schemaDir:
|
|
5878
|
+
schemaDir: path6.join(dbPackagePath, "src", "schema")
|
|
7768
5879
|
});
|
|
7769
5880
|
let excludeFromOrphanDetection = [];
|
|
7770
5881
|
let idempotentSqlDir = "supabase/schemas/idempotent";
|
|
@@ -7858,21 +5969,21 @@ function parseSqlFilename(filename) {
|
|
|
7858
5969
|
// src/commands/db/utils/preflight-checks/domain-naming-checks.ts
|
|
7859
5970
|
var FILE_SIZE_THRESHOLD = 2e3;
|
|
7860
5971
|
var IDENTIFIER = "[A-Za-z_][A-Za-z0-9_]{0,127}";
|
|
7861
|
-
var
|
|
5972
|
+
var SQL_IDENTIFIER = `(?:"[^"]*(?:""[^"]*)*"|${IDENTIFIER})`;
|
|
7862
5973
|
var CREATE_SCHEMA_RE = new RegExp(
|
|
7863
|
-
`CREATE\\s+SCHEMA\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${
|
|
5974
|
+
`CREATE\\s+SCHEMA\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})`,
|
|
7864
5975
|
"gi"
|
|
7865
5976
|
);
|
|
7866
5977
|
var CREATE_TABLE_RE = new RegExp(
|
|
7867
|
-
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(?:(${
|
|
5978
|
+
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7868
5979
|
"gi"
|
|
7869
5980
|
);
|
|
7870
5981
|
var REFERENCES_RE = new RegExp(
|
|
7871
|
-
`REFERENCES\\s+(?:(${
|
|
5982
|
+
`REFERENCES\\s+(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7872
5983
|
"gi"
|
|
7873
5984
|
);
|
|
7874
5985
|
var CREATE_POLICY_RE = new RegExp(
|
|
7875
|
-
`CREATE\\s+POLICY\\s+${
|
|
5986
|
+
`CREATE\\s+POLICY\\s+${SQL_IDENTIFIER}\\s+ON\\s+(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7876
5987
|
"gi"
|
|
7877
5988
|
);
|
|
7878
5989
|
var SCHEMA_DOT_TABLE_RE = new RegExp(`\\b(${IDENTIFIER})\\s*\\.\\s*(${IDENTIFIER})\\b`, "gi");
|
|
@@ -7944,7 +6055,7 @@ function extractFkRefs(strippedContent, rawContent) {
|
|
|
7944
6055
|
function analyzeFile(sqlDir, filename) {
|
|
7945
6056
|
const parsed = parseSqlFilename(filename);
|
|
7946
6057
|
if (!parsed) return null;
|
|
7947
|
-
const filePath =
|
|
6058
|
+
const filePath = path6.join(sqlDir, filename);
|
|
7948
6059
|
try {
|
|
7949
6060
|
const content = readFileSync(filePath, "utf-8");
|
|
7950
6061
|
const strippedContent = blankDollarQuotedBodies(stripSqlComments(content));
|
|
@@ -8141,34 +6252,55 @@ function checkPolicyCrossSchemaRef(analysis, managedSchemas) {
|
|
|
8141
6252
|
"schema",
|
|
8142
6253
|
"function"
|
|
8143
6254
|
]);
|
|
8144
|
-
|
|
8145
|
-
|
|
8146
|
-
|
|
8147
|
-
|
|
8148
|
-
|
|
8149
|
-
|
|
8150
|
-
|
|
8151
|
-
|
|
8152
|
-
|
|
6255
|
+
function isPolicyNoisePart(token) {
|
|
6256
|
+
return token.length === 0 || SQL_NOISE.has(token);
|
|
6257
|
+
}
|
|
6258
|
+
function toPolicyLine(matchIndex) {
|
|
6259
|
+
return lineNumberAt(content, matchIndex);
|
|
6260
|
+
}
|
|
6261
|
+
function buildPolicyIssue(file, policySchema, refSchema, refTable, policyLine) {
|
|
6262
|
+
return {
|
|
6263
|
+
file,
|
|
6264
|
+
line: policyLine,
|
|
6265
|
+
rule: "DOMAIN_POLICY_CROSS_SCHEMA_REF",
|
|
6266
|
+
severity: "warning",
|
|
6267
|
+
message: `RLS policy in ${file} on '${policySchema}' schema references '${refSchema}.${refTable}'. pg-schema-diff does not track cross-schema references in USING/WITH CHECK. Use a PL/pgSQL wrapper function with dynamic SQL instead. See: database-known-limitations.md #11`
|
|
6268
|
+
};
|
|
6269
|
+
}
|
|
6270
|
+
function collectPolicyRefs(stmtText) {
|
|
8153
6271
|
const refs = collectRegexMatches(SCHEMA_DOT_TABLE_RE, stmtText);
|
|
8154
6272
|
const seenRefs = /* @__PURE__ */ new Set();
|
|
8155
6273
|
for (const ref of refs) {
|
|
8156
6274
|
const refSchema = ref[1] ? ref[1].toLowerCase() : "";
|
|
8157
6275
|
const refTable = ref[2] ? ref[2].toLowerCase() : "";
|
|
8158
|
-
if (
|
|
8159
|
-
|
|
8160
|
-
|
|
8161
|
-
|
|
8162
|
-
|
|
8163
|
-
|
|
8164
|
-
|
|
8165
|
-
|
|
8166
|
-
|
|
8167
|
-
|
|
8168
|
-
|
|
8169
|
-
|
|
8170
|
-
|
|
8171
|
-
|
|
6276
|
+
if (isPolicyNoisePart(refSchema) || isPolicyNoisePart(refTable)) {
|
|
6277
|
+
continue;
|
|
6278
|
+
}
|
|
6279
|
+
seenRefs.add(`${refSchema}.${refTable}`);
|
|
6280
|
+
}
|
|
6281
|
+
return seenRefs;
|
|
6282
|
+
}
|
|
6283
|
+
function isRelevantRef(policySchema, ref, managedSchemas2) {
|
|
6284
|
+
const [refSchema] = ref.split(".");
|
|
6285
|
+
return !!refSchema && refSchema !== policySchema && managedSchemas2.has(refSchema) && !SQL_NOISE.has(refSchema);
|
|
6286
|
+
}
|
|
6287
|
+
function buildStatementSlice(startIndex) {
|
|
6288
|
+
const stmtEnd = strippedContent.indexOf(";", startIndex);
|
|
6289
|
+
return strippedContent.slice(startIndex, stmtEnd >= 0 ? stmtEnd : strippedContent.length);
|
|
6290
|
+
}
|
|
6291
|
+
const policyMatches = collectRegexMatches(CREATE_POLICY_RE, strippedContent);
|
|
6292
|
+
for (const policyMatch of policyMatches) {
|
|
6293
|
+
const policySchema = policyMatch[1] ? unquote(policyMatch[1]) : "public";
|
|
6294
|
+
const policyLine = toPolicyLine(policyMatch.index);
|
|
6295
|
+
const stmtText = buildStatementSlice(policyMatch.index);
|
|
6296
|
+
for (const ref of collectPolicyRefs(stmtText)) {
|
|
6297
|
+
if (!isRelevantRef(policySchema, ref, managedSchemas)) {
|
|
6298
|
+
continue;
|
|
6299
|
+
}
|
|
6300
|
+
const [refSchema, refTable] = ref.split(".");
|
|
6301
|
+
issues.push(
|
|
6302
|
+
buildPolicyIssue(parsed.raw, policySchema, refSchema || "", refTable || "", policyLine)
|
|
6303
|
+
);
|
|
8172
6304
|
}
|
|
8173
6305
|
}
|
|
8174
6306
|
return issues;
|
|
@@ -8214,7 +6346,7 @@ function reportIssues(issues, result, logger15) {
|
|
|
8214
6346
|
async function runDomainNamingCheck(result, logger15, step) {
|
|
8215
6347
|
logger15.step("Checking domain naming consistency", step.next());
|
|
8216
6348
|
const cwd = process.cwd();
|
|
8217
|
-
const sqlDir =
|
|
6349
|
+
const sqlDir = path6.join(cwd, "supabase", "schemas", "declarative");
|
|
8218
6350
|
if (!existsSync(sqlDir)) {
|
|
8219
6351
|
logger15.success("No declarative SQL directory found (skipped)");
|
|
8220
6352
|
return;
|
|
@@ -8303,12 +6435,12 @@ function logSummary(result, logger15) {
|
|
|
8303
6435
|
function checkDatabasePackage() {
|
|
8304
6436
|
const cwd = process.cwd();
|
|
8305
6437
|
const locations = [
|
|
8306
|
-
|
|
8307
|
-
|
|
8308
|
-
|
|
6438
|
+
path6.join(cwd, "packages", "database"),
|
|
6439
|
+
path6.join(cwd, "packages", "db"),
|
|
6440
|
+
path6.join(cwd, "db")
|
|
8309
6441
|
];
|
|
8310
6442
|
for (const location of locations) {
|
|
8311
|
-
const configPath =
|
|
6443
|
+
const configPath = path6.join(location, "drizzle.config.ts");
|
|
8312
6444
|
if (existsSync(configPath)) {
|
|
8313
6445
|
return { exists: true, path: location };
|
|
8314
6446
|
}
|
|
@@ -8323,7 +6455,7 @@ function countTsFilesRecursive(dir) {
|
|
|
8323
6455
|
try {
|
|
8324
6456
|
const entries = readdirSync(dir, { withFileTypes: true });
|
|
8325
6457
|
for (const entry of entries) {
|
|
8326
|
-
const fullPath =
|
|
6458
|
+
const fullPath = path6.join(dir, entry.name);
|
|
8327
6459
|
if (entry.isDirectory() && !entry.isSymbolicLink()) {
|
|
8328
6460
|
count += countTsFilesRecursive(fullPath);
|
|
8329
6461
|
} else if (entry.isFile() && entry.name.endsWith(".ts")) {
|
|
@@ -8335,7 +6467,7 @@ function countTsFilesRecursive(dir) {
|
|
|
8335
6467
|
return count;
|
|
8336
6468
|
}
|
|
8337
6469
|
function checkSchemaFiles(dbPackagePath) {
|
|
8338
|
-
const schemaDir =
|
|
6470
|
+
const schemaDir = path6.join(dbPackagePath, "src", "schema");
|
|
8339
6471
|
if (!existsSync(schemaDir)) {
|
|
8340
6472
|
return { exists: false, count: 0 };
|
|
8341
6473
|
}
|
|
@@ -8386,7 +6518,7 @@ function runDatabasePackageChecks(result, logger15, step) {
|
|
|
8386
6518
|
logger15.error("Database package path is missing");
|
|
8387
6519
|
return null;
|
|
8388
6520
|
}
|
|
8389
|
-
logger15.success(`Found database package: ${
|
|
6521
|
+
logger15.success(`Found database package: ${path6.basename(dbPackagePath)}`);
|
|
8390
6522
|
logger15.step("Checking schema files", step.next());
|
|
8391
6523
|
const schemaCheck = checkSchemaFiles(dbPackagePath);
|
|
8392
6524
|
if (!schemaCheck.exists) {
|
|
@@ -10135,7 +8267,7 @@ function getSupabasePorts(projectPath) {
|
|
|
10135
8267
|
return getPortsWithOffset(offset);
|
|
10136
8268
|
}
|
|
10137
8269
|
async function updateSupabaseConfigPortsSafe(projectPath) {
|
|
10138
|
-
const configPath =
|
|
8270
|
+
const configPath = path6.join(projectPath, "supabase", "config.toml");
|
|
10139
8271
|
const resolved = await resolveAvailablePorts(projectPath);
|
|
10140
8272
|
if (!resolved) {
|
|
10141
8273
|
const ports = getSupabasePorts(projectPath);
|
|
@@ -10178,7 +8310,7 @@ function getPortAllocationSummary(projectPath) {
|
|
|
10178
8310
|
const ports = getSupabasePorts(projectPath);
|
|
10179
8311
|
const offset = calculatePortOffset(projectPath);
|
|
10180
8312
|
return [
|
|
10181
|
-
`Port allocation for: ${
|
|
8313
|
+
`Port allocation for: ${path6.basename(projectPath)}`,
|
|
10182
8314
|
` Slot: ${offset / 10} (hash-based, offset=${offset})`,
|
|
10183
8315
|
` API: ${ports.api}`,
|
|
10184
8316
|
` DB: ${ports.db}`,
|
|
@@ -10212,7 +8344,7 @@ function parseSeedPaths(configPath) {
|
|
|
10212
8344
|
}
|
|
10213
8345
|
}
|
|
10214
8346
|
async function applySeedFile2(seedPath, dbUrl) {
|
|
10215
|
-
const supabaseDir =
|
|
8347
|
+
const supabaseDir = path6.join(process.cwd(), "supabase");
|
|
10216
8348
|
const absolutePath = resolveSafePath(supabaseDir, seedPath);
|
|
10217
8349
|
if (!existsSync(absolutePath)) {
|
|
10218
8350
|
return;
|
|
@@ -10223,12 +8355,12 @@ async function applySeedFile2(seedPath, dbUrl) {
|
|
|
10223
8355
|
});
|
|
10224
8356
|
}
|
|
10225
8357
|
async function applySeeds2(configPath) {
|
|
10226
|
-
const configFile = configPath ||
|
|
8358
|
+
const configFile = configPath || path6.join(process.cwd(), "supabase", "config.toml");
|
|
10227
8359
|
const seedPaths = parseSeedPaths(configFile);
|
|
10228
8360
|
if (seedPaths.length === 0) {
|
|
10229
8361
|
return;
|
|
10230
8362
|
}
|
|
10231
|
-
const supabaseDir =
|
|
8363
|
+
const supabaseDir = path6.join(process.cwd(), "supabase");
|
|
10232
8364
|
const safePaths = filterSafePaths(seedPaths, supabaseDir);
|
|
10233
8365
|
if (safePaths.length === 0) {
|
|
10234
8366
|
return;
|
|
@@ -10751,7 +8883,7 @@ var validateCommand = new Command("validate").description("Validate schema files
|
|
|
10751
8883
|
const logger15 = createCLILogger("db:validate");
|
|
10752
8884
|
try {
|
|
10753
8885
|
logger15.section("Schema Validation");
|
|
10754
|
-
const schemasPath =
|
|
8886
|
+
const schemasPath = path6.join(process.cwd(), "packages", "database", "src", "schema");
|
|
10755
8887
|
if (!existsSync(schemasPath)) {
|
|
10756
8888
|
throw new CLIError("Schema directory not found", "SCHEMA_DIR_NOT_FOUND", [
|
|
10757
8889
|
`Expected location: ${schemasPath}`,
|
|
@@ -10885,8 +9017,8 @@ var generateCommand = new Command("generate").description("Generate TypeScript t
|
|
|
10885
9017
|
var listCommand = new Command("list").description("List managed schemas from drizzle.config.ts").option("--sql", "Output as SQL-compatible string for IN clauses").option("--json", "Output as JSON array").action(async (options) => {
|
|
10886
9018
|
const logger15 = createCLILogger("db:schema:list");
|
|
10887
9019
|
try {
|
|
10888
|
-
const dbPackagePath =
|
|
10889
|
-
if (!existsSync(
|
|
9020
|
+
const dbPackagePath = path6.join(process.cwd(), "packages", "database");
|
|
9021
|
+
if (!existsSync(path6.join(dbPackagePath, "drizzle.config.ts"))) {
|
|
10890
9022
|
throw new CLIError("drizzle.config.ts not found", "CONFIG_NOT_FOUND", [
|
|
10891
9023
|
`Expected location: ${dbPackagePath}/drizzle.config.ts`,
|
|
10892
9024
|
"Ensure you are in the project root",
|
|
@@ -12789,7 +10921,7 @@ function loadPolicyFromFile(policyFile) {
|
|
|
12789
10921
|
}
|
|
12790
10922
|
}
|
|
12791
10923
|
function loadBoundaryPolicy(projectRoot, policyPath) {
|
|
12792
|
-
const policyFile = policyPath ??
|
|
10924
|
+
const policyFile = policyPath ?? path6.join(projectRoot, "supabase", "schemas", BOUNDARY_POLICY_FILENAME);
|
|
12793
10925
|
if (!existsSync(policyFile)) {
|
|
12794
10926
|
return {
|
|
12795
10927
|
version: DEFAULT_POLICY_VERSION,
|
|
@@ -12817,7 +10949,7 @@ init_esm_shims();
|
|
|
12817
10949
|
var riskDetectorLoader = null;
|
|
12818
10950
|
function loadRiskDetectorModule() {
|
|
12819
10951
|
if (!riskDetectorLoader) {
|
|
12820
|
-
riskDetectorLoader = import('./risk-detector-
|
|
10952
|
+
riskDetectorLoader = import('./risk-detector-4U6ZJ2G5.js').then((module) => ({
|
|
12821
10953
|
detectSchemaRisks: module.detectSchemaRisks
|
|
12822
10954
|
})).catch((error) => {
|
|
12823
10955
|
riskDetectorLoader = null;
|
|
@@ -12831,7 +10963,7 @@ function loadRiskDetectorModule() {
|
|
|
12831
10963
|
init_esm_shims();
|
|
12832
10964
|
var boundaryPolicyCache = /* @__PURE__ */ new Map();
|
|
12833
10965
|
function getBoundaryPolicy(cwd = process.cwd()) {
|
|
12834
|
-
const resolved =
|
|
10966
|
+
const resolved = path6.resolve(cwd);
|
|
12835
10967
|
let cached = boundaryPolicyCache.get(resolved);
|
|
12836
10968
|
if (!cached) {
|
|
12837
10969
|
cached = loadBoundaryPolicy(cwd);
|
|
@@ -12978,7 +11110,7 @@ function shouldAbortSchemaPrecheckForBudget(state, filePath) {
|
|
|
12978
11110
|
)}`;
|
|
12979
11111
|
}
|
|
12980
11112
|
if (state.scannedFiles + 1 > state.maxFiles) {
|
|
12981
|
-
return `Schema file scan budget exceeds ${state.maxFiles} files at ${
|
|
11113
|
+
return `Schema file scan budget exceeds ${state.maxFiles} files at ${path6.basename(filePath)}.`;
|
|
12982
11114
|
}
|
|
12983
11115
|
const projectedBytes = state.scannedBytes + size;
|
|
12984
11116
|
if (projectedBytes > state.maxBytes) {
|
|
@@ -13000,7 +11132,7 @@ function* collectSqlFilesRecursively(baseDir) {
|
|
|
13000
11132
|
try {
|
|
13001
11133
|
const entries = readdirSync(currentDir, { withFileTypes: true });
|
|
13002
11134
|
for (const entry of entries) {
|
|
13003
|
-
const fullPath =
|
|
11135
|
+
const fullPath = path6.join(currentDir, entry.name);
|
|
13004
11136
|
if (entry.isDirectory()) {
|
|
13005
11137
|
queue.push(fullPath);
|
|
13006
11138
|
continue;
|
|
@@ -14233,7 +12365,7 @@ function classifyIdempotentMisplacementRisk(file, content, boundaryPolicy) {
|
|
|
14233
12365
|
}
|
|
14234
12366
|
function classifyFileMisplacementRisks(params) {
|
|
14235
12367
|
const risks = [];
|
|
14236
|
-
const
|
|
12368
|
+
const relative2 = path6.relative(process.cwd(), params.file);
|
|
14237
12369
|
const normalized = normalizeSqlForPlacementCheck(params.content);
|
|
14238
12370
|
const statements = splitSqlStatements(normalized);
|
|
14239
12371
|
const seenMessages = /* @__PURE__ */ new Set();
|
|
@@ -14241,13 +12373,13 @@ function classifyFileMisplacementRisks(params) {
|
|
|
14241
12373
|
const candidates = collectRuleBasedCandidates({
|
|
14242
12374
|
statement,
|
|
14243
12375
|
line,
|
|
14244
|
-
file:
|
|
12376
|
+
file: relative2,
|
|
14245
12377
|
rules: params.rules
|
|
14246
12378
|
});
|
|
14247
12379
|
const unknownObjectRisk = maybeCollectUnknownObjectBoundaryRisk({
|
|
14248
12380
|
statement,
|
|
14249
12381
|
line,
|
|
14250
|
-
file:
|
|
12382
|
+
file: relative2,
|
|
14251
12383
|
fileType: params.fileType,
|
|
14252
12384
|
boundaryPolicy: params.boundaryPolicy,
|
|
14253
12385
|
resolver: params.resolver,
|
|
@@ -14313,6 +12445,93 @@ function classifyPlanStatementHazards(statement) {
|
|
|
14313
12445
|
return risks;
|
|
14314
12446
|
}
|
|
14315
12447
|
|
|
12448
|
+
// src/commands/db/commands/db-sync/error-classifier.ts
|
|
12449
|
+
init_esm_shims();
|
|
12450
|
+
var DNS_RESOLUTION_PATTERNS = [
|
|
12451
|
+
"could not translate host name",
|
|
12452
|
+
"could not resolve host",
|
|
12453
|
+
"name or service not known",
|
|
12454
|
+
"nodename nor servname provided",
|
|
12455
|
+
"temporary failure in name resolution"
|
|
12456
|
+
];
|
|
12457
|
+
var CONNECTION_PATTERNS = [
|
|
12458
|
+
"econnrefused",
|
|
12459
|
+
"connection refused",
|
|
12460
|
+
"could not connect to server",
|
|
12461
|
+
"connection timed out",
|
|
12462
|
+
"timeout expired",
|
|
12463
|
+
"no pg_hba.conf entry",
|
|
12464
|
+
"the database system is starting up",
|
|
12465
|
+
"the database system is shutting down"
|
|
12466
|
+
];
|
|
12467
|
+
var FALLBACK_SUGGESTIONS = [
|
|
12468
|
+
"Check database connectivity",
|
|
12469
|
+
"Verify psql is installed and accessible",
|
|
12470
|
+
"Ensure packages/database exists"
|
|
12471
|
+
];
|
|
12472
|
+
function getEnvironmentLabel(environment) {
|
|
12473
|
+
switch (environment) {
|
|
12474
|
+
case "production":
|
|
12475
|
+
return "production";
|
|
12476
|
+
case "main":
|
|
12477
|
+
return "main";
|
|
12478
|
+
case "preview":
|
|
12479
|
+
return "preview";
|
|
12480
|
+
case "local":
|
|
12481
|
+
return "local";
|
|
12482
|
+
}
|
|
12483
|
+
}
|
|
12484
|
+
function buildCombinedMessage(error) {
|
|
12485
|
+
if (isExecaError(error)) {
|
|
12486
|
+
return [error.message, error.stderr, error.stdout].filter(Boolean).join("\n");
|
|
12487
|
+
}
|
|
12488
|
+
if (error instanceof Error) {
|
|
12489
|
+
return error.message;
|
|
12490
|
+
}
|
|
12491
|
+
return String(error);
|
|
12492
|
+
}
|
|
12493
|
+
function findRelevantLine(message, patterns) {
|
|
12494
|
+
const lines = message.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
|
|
12495
|
+
const matched = lines.find((line) => {
|
|
12496
|
+
const lower = line.toLowerCase();
|
|
12497
|
+
return patterns.some((pattern) => lower.includes(pattern));
|
|
12498
|
+
});
|
|
12499
|
+
return matched ?? null;
|
|
12500
|
+
}
|
|
12501
|
+
function classifyDbSyncCommandFailure(error, environment) {
|
|
12502
|
+
const message = buildCombinedMessage(error);
|
|
12503
|
+
const messageLower = message.toLowerCase();
|
|
12504
|
+
const environmentLabel = getEnvironmentLabel(environment);
|
|
12505
|
+
if (DNS_RESOLUTION_PATTERNS.some((pattern) => messageLower.includes(pattern))) {
|
|
12506
|
+
const detail = findRelevantLine(message, DNS_RESOLUTION_PATTERNS);
|
|
12507
|
+
return {
|
|
12508
|
+
code: "DB_HOST_RESOLUTION_FAILED",
|
|
12509
|
+
message: detail ? `Could not resolve the ${environmentLabel} database host: ${detail}` : `Could not resolve the ${environmentLabel} database host`,
|
|
12510
|
+
suggestions: [
|
|
12511
|
+
"Verify the database host in DATABASE_URL / DATABASE_URL_ADMIN",
|
|
12512
|
+
"Check DNS resolution and outbound network access to the database host",
|
|
12513
|
+
"Re-run from an environment that can reach the target database"
|
|
12514
|
+
]
|
|
12515
|
+
};
|
|
12516
|
+
}
|
|
12517
|
+
if (CONNECTION_PATTERNS.some((pattern) => messageLower.includes(pattern))) {
|
|
12518
|
+
const detail = findRelevantLine(message, CONNECTION_PATTERNS);
|
|
12519
|
+
return {
|
|
12520
|
+
code: "DB_CONNECTION_FAILED",
|
|
12521
|
+
message: detail ? `Could not connect to the ${environmentLabel} database: ${detail}` : `Could not connect to the ${environmentLabel} database`,
|
|
12522
|
+
suggestions: [
|
|
12523
|
+
"Verify the database is reachable from this environment",
|
|
12524
|
+
"Check DATABASE_URL / DATABASE_URL_ADMIN credentials and firewall settings",
|
|
12525
|
+
"Retry after confirming the target database is accepting connections"
|
|
12526
|
+
]
|
|
12527
|
+
};
|
|
12528
|
+
}
|
|
12529
|
+
return null;
|
|
12530
|
+
}
|
|
12531
|
+
function getDbSyncFallbackSuggestions() {
|
|
12532
|
+
return [...FALLBACK_SUGGESTIONS];
|
|
12533
|
+
}
|
|
12534
|
+
|
|
14316
12535
|
// src/commands/db/commands/db-sync.ts
|
|
14317
12536
|
var SHOW_ALLOWLIST_REPORT = process.env.RUNA_DB_PRECHECK_ALLOWLIST_REPORT === "1";
|
|
14318
12537
|
var DIRECTORY_PLACEMENT_WARNING_PREFIX = " [misplacement] ";
|
|
@@ -14586,8 +12805,8 @@ async function collectPlanBoundaryReconciliationReport(planSql) {
|
|
|
14586
12805
|
}
|
|
14587
12806
|
async function collectDirectoryPlacementReport() {
|
|
14588
12807
|
const boundaryPolicy = getBoundaryPolicy();
|
|
14589
|
-
const declarativeDir =
|
|
14590
|
-
const idempotentDir =
|
|
12808
|
+
const declarativeDir = path6.join(process.cwd(), "supabase", "schemas", "declarative");
|
|
12809
|
+
const idempotentDir = path6.join(process.cwd(), "supabase", "schemas", "idempotent");
|
|
14591
12810
|
const blockers = [];
|
|
14592
12811
|
const warnings = [];
|
|
14593
12812
|
const allowlist = [];
|
|
@@ -14871,7 +13090,7 @@ async function processDeclarativeRiskFile(params) {
|
|
|
14871
13090
|
report: { ...detectorResult.report, allowlist: detectorResult.report.allowlist ?? [] }
|
|
14872
13091
|
};
|
|
14873
13092
|
}
|
|
14874
|
-
const relPath =
|
|
13093
|
+
const relPath = path6.relative(process.cwd(), params.file);
|
|
14875
13094
|
const risks = await detectorResult.detector(params.file);
|
|
14876
13095
|
const scopedRisks = risks.map((risk) => ({ ...risk, file: relPath }));
|
|
14877
13096
|
collectDeclarativeRiskItemsForFile({
|
|
@@ -14883,7 +13102,7 @@ async function processDeclarativeRiskFile(params) {
|
|
|
14883
13102
|
return { kind: "ok", detector: detectorResult.detector };
|
|
14884
13103
|
}
|
|
14885
13104
|
async function collectDeclarativeRiskReport() {
|
|
14886
|
-
const declarativeDir =
|
|
13105
|
+
const declarativeDir = path6.join(process.cwd(), "supabase", "schemas", "declarative");
|
|
14887
13106
|
if (!existsSync(declarativeDir)) {
|
|
14888
13107
|
return {
|
|
14889
13108
|
blockers: [],
|
|
@@ -14917,7 +13136,7 @@ async function collectDeclarativeRiskReport() {
|
|
|
14917
13136
|
return formatCollectedDeclarativeRisks(collected, allowlist);
|
|
14918
13137
|
}
|
|
14919
13138
|
async function collectIdempotentRiskReport() {
|
|
14920
|
-
const idempotentDir =
|
|
13139
|
+
const idempotentDir = path6.join(process.cwd(), "supabase", "schemas", "idempotent");
|
|
14921
13140
|
if (!existsSync(idempotentDir)) {
|
|
14922
13141
|
return { blockers: [], warnings: [] };
|
|
14923
13142
|
}
|
|
@@ -14945,7 +13164,7 @@ async function collectIdempotentRiskReport() {
|
|
|
14945
13164
|
detectSchemaRisks2 = detectorResult.detector;
|
|
14946
13165
|
const risks = await detectSchemaRisks2(file);
|
|
14947
13166
|
if (risks.length === 0) continue;
|
|
14948
|
-
const relPath =
|
|
13167
|
+
const relPath = path6.relative(process.cwd(), file);
|
|
14949
13168
|
for (const risk of risks) {
|
|
14950
13169
|
const level = correctIdempotentRiskLevel(risk.level, risk.reasonCode);
|
|
14951
13170
|
collected.push({ ...risk, level, file: relPath });
|
|
@@ -15375,14 +13594,19 @@ async function runSyncAction(env, options) {
|
|
|
15375
13594
|
throwSchemaOutOfSyncError();
|
|
15376
13595
|
} catch (error) {
|
|
15377
13596
|
if (error instanceof CLIError) throw error;
|
|
13597
|
+
const classifiedFailure = classifyDbSyncCommandFailure(error, runaEnv);
|
|
13598
|
+
if (classifiedFailure) {
|
|
13599
|
+
throw new CLIError(
|
|
13600
|
+
classifiedFailure.message,
|
|
13601
|
+
classifiedFailure.code,
|
|
13602
|
+
classifiedFailure.suggestions,
|
|
13603
|
+
error instanceof Error ? error : void 0
|
|
13604
|
+
);
|
|
13605
|
+
}
|
|
15378
13606
|
throw new CLIError(
|
|
15379
13607
|
"Schema sync check failed",
|
|
15380
13608
|
"DB_SYNC_FAILED",
|
|
15381
|
-
|
|
15382
|
-
"Check database connectivity",
|
|
15383
|
-
"Verify psql is installed and accessible",
|
|
15384
|
-
"Ensure packages/database exists"
|
|
15385
|
-
],
|
|
13609
|
+
getDbSyncFallbackSuggestions(),
|
|
15386
13610
|
error instanceof Error ? error : void 0
|
|
15387
13611
|
);
|
|
15388
13612
|
}
|
|
@@ -15406,7 +13630,7 @@ var testGenCommand = new Command("test:gen").description("Generate pgTAP behavio
|
|
|
15406
13630
|
const databaseUrl = options.db || process.env.DATABASE_URL || getLocalDbUrl();
|
|
15407
13631
|
const schemas = options.schemas ? options.schemas.split(",") : void 0;
|
|
15408
13632
|
const dbPackage = databasePaths.package();
|
|
15409
|
-
const defaultOutputPath =
|
|
13633
|
+
const defaultOutputPath = path6.join(dbPackage, "tests/00_behavior.generated.test.sql");
|
|
15410
13634
|
const outputPath = options.output || defaultOutputPath;
|
|
15411
13635
|
spinner.text = "Generating pgTAP behavior tests...";
|
|
15412
13636
|
const result = await dbGeneratePgTapTests({
|
|
@@ -15802,15 +14026,15 @@ async function findScriptPath(scriptKey) {
|
|
|
15802
14026
|
const scriptConfig = SCRIPT_LOCATIONS[scriptKey];
|
|
15803
14027
|
const relativePath = scriptConfig.relativePath;
|
|
15804
14028
|
const workspaceRoot = findWorkspaceRoot() || process.cwd();
|
|
15805
|
-
const directPath =
|
|
14029
|
+
const directPath = path6.join(workspaceRoot, relativePath);
|
|
15806
14030
|
if (existsSync(directPath)) {
|
|
15807
14031
|
return directPath;
|
|
15808
14032
|
}
|
|
15809
14033
|
if (relativePath.includes("sdk/scripts") || relativePath.includes("packages/sdk")) {
|
|
15810
14034
|
try {
|
|
15811
14035
|
const sdkScriptsPath = await getSDKScriptsPath();
|
|
15812
|
-
const scriptName =
|
|
15813
|
-
const sdkScriptPath =
|
|
14036
|
+
const scriptName = path6.basename(relativePath);
|
|
14037
|
+
const sdkScriptPath = path6.join(sdkScriptsPath, scriptName);
|
|
15814
14038
|
if (existsSync(sdkScriptPath)) {
|
|
15815
14039
|
return sdkScriptPath;
|
|
15816
14040
|
}
|
|
@@ -15827,7 +14051,7 @@ async function runSupabaseScript(scriptKey, args = [], options = {}) {
|
|
|
15827
14051
|
}
|
|
15828
14052
|
try {
|
|
15829
14053
|
const result = await execa(scriptPath, args, {
|
|
15830
|
-
cwd: options.cwd ||
|
|
14054
|
+
cwd: options.cwd || path6.dirname(scriptPath),
|
|
15831
14055
|
stdio: options.stdio || "pipe",
|
|
15832
14056
|
timeout: options.timeout,
|
|
15833
14057
|
env: {
|