@runa-ai/runa-cli 0.6.0 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{build-BXUJKYHC.js → build-HUDIP6KU.js} +153 -164
- package/dist/{cache-H63JKFYH.js → cache-N7WNPEYF.js} +2 -3
- package/dist/check-LOMVIRHX.js +12 -0
- package/dist/{chunk-HPYJPB5Y.js → chunk-2APB25TT.js} +44 -10
- package/dist/chunk-3WDV32GA.js +33 -0
- package/dist/chunk-5FT3F36G.js +59 -0
- package/dist/{chunk-7QV7U6NI.js → chunk-6FAU4IGR.js} +2 -1
- package/dist/{chunk-CE3DEYFT.js → chunk-7B5C6U2K.js} +2 -208
- package/dist/{chunk-GOGRLQNP.js → chunk-AFY3TX4I.js} +1 -1
- package/dist/{chunk-KWX3JHCY.js → chunk-AKZAN4BC.js} +6 -1
- package/dist/{chunk-XJBQINSA.js → chunk-CCW3PLQY.js} +2 -2
- package/dist/{chunk-IBVVGH6X.js → chunk-EMB6IZFT.js} +17 -4
- package/dist/chunk-FHG3ILE4.js +2011 -0
- package/dist/{chunk-22CS6EMA.js → chunk-H2AHNI75.js} +1 -1
- package/dist/{chunk-UU55OH7P.js → chunk-KE6QJBZG.js} +2 -3
- package/dist/{check-6AB5NGWK.js → chunk-QM53IQHM.js} +14 -12
- package/dist/{chunk-RRGQCUKT.js → chunk-WJXC4MVY.js} +30 -3
- package/dist/chunk-XDCHRVE3.js +215 -0
- package/dist/{chunk-P7U52PBY.js → chunk-Z4Z5DNW4.js} +49 -2
- package/dist/{ci-V3PIG2GI.js → ci-XY6IKEDC.js} +1938 -238
- package/dist/cli/contract-output.d.ts +1 -0
- package/dist/{cli-GFRZCJQR.js → cli-UZA4RBNQ.js} +216 -173
- package/dist/commands/build/actors/validate.d.ts +2 -0
- package/dist/commands/check/commands/check.d.ts +8 -3
- package/dist/commands/ci/machine/actors/db/collect-schema-stats.d.ts +12 -6
- package/dist/commands/ci/machine/actors/db/production-preview.d.ts +10 -0
- package/dist/commands/ci/machine/actors/db/schema-canonical-diff.d.ts +77 -0
- package/dist/commands/ci/machine/actors/db/schema-stats.d.ts +11 -0
- package/dist/commands/ci/machine/actors/db/sync-schema.d.ts +9 -1
- package/dist/commands/ci/machine/commands/machine-runner.d.ts +2 -0
- package/dist/commands/ci/machine/formatters/sections/production-schema-status.d.ts +30 -0
- package/dist/commands/ci/machine/formatters/sections/schema-matrix.d.ts +3 -3
- package/dist/commands/ci/machine/helpers.d.ts +8 -0
- package/dist/commands/ci/machine/machine.d.ts +57 -4
- package/dist/commands/ci/machine/types.d.ts +2 -0
- package/dist/commands/ci/utils/execa-helpers.d.ts +1 -0
- package/dist/commands/db/commands/db-sync/error-classifier.d.ts +9 -0
- package/dist/commands/dev/actors/index.d.ts +5 -0
- package/dist/commands/dev/actors/tables-manifest.d.ts +16 -0
- package/dist/commands/dev/contract.d.ts +1 -1
- package/dist/commands/dev/guards.d.ts +24 -0
- package/dist/commands/dev/machine.d.ts +22 -3
- package/dist/commands/dev/types.d.ts +2 -0
- package/dist/commands/doctor.d.ts +9 -0
- package/dist/commands/inject-test-attrs/defaults.d.ts +9 -0
- package/dist/commands/template-check/commands/template-check.d.ts +1 -0
- package/dist/commands/template-check/contract.d.ts +1 -0
- package/dist/commands/utils/machine-state-logging.d.ts +20 -0
- package/dist/commands/utils/repo-root.d.ts +2 -0
- package/dist/constants/versions.d.ts +1 -1
- package/dist/{db-HR7CREX2.js → db-Q3GF7JWP.js} +518 -2234
- package/dist/{dev-A7RW6XQV.js → dev-5YXNPTCJ.js} +168 -49
- package/dist/doctor-MZLOA53G.js +44 -0
- package/dist/{env-B47Z4747.js → env-GMB3THRG.js} +6 -7
- package/dist/{env-files-K2C7O7L5.js → env-files-2UIUYLLR.js} +2 -2
- package/dist/{error-handler-4EYSDOSE.js → error-handler-HEXBRNVV.js} +2 -2
- package/dist/{hotfix-CULKKMGS.js → hotfix-NDTPY2T4.js} +4 -4
- package/dist/index.js +4 -4
- package/dist/{init-ELK5QCWR.js → init-U4VCRHTD.js} +5 -6
- package/dist/{inject-test-attrs-Y5UD5P7Q.js → inject-test-attrs-P44BVTQS.js} +5 -18
- package/dist/{link-C43JRZWY.js → link-VSNDVZZD.js} +2 -3
- package/dist/manifest-TMFLESHW.js +19 -0
- package/dist/{risk-detector-BXUY2WKS.js → risk-detector-4U6ZJ2G5.js} +1 -1
- package/dist/{risk-detector-core-O7I7SPR7.js → risk-detector-core-TK4OAI3N.js} +2 -2
- package/dist/{risk-detector-plpgsql-SGMVKYJP.js → risk-detector-plpgsql-HWKS4OLR.js} +37 -7
- package/dist/{status-IJ4ZWHMX.js → status-UTKS63AB.js} +2 -3
- package/dist/{telemetry-FN7V727Y.js → telemetry-P56UBLZ2.js} +2 -3
- package/dist/{template-check-PNG5NQ5H.js → template-check-FFJVDLBF.js} +63 -35
- package/dist/{test-QYXE5UVW.js → test-V4KQL574.js} +34 -10
- package/dist/{test-gen-QPWOIEHU.js → test-gen-FS4CEY3P.js} +2 -3
- package/dist/{upgrade-3SLWVNAC.js → upgrade-7TWORWBV.js} +18 -6
- package/dist/{validate-SM4PXPS7.js → validate-CAAW4Y44.js} +2 -3
- package/dist/{vuln-check-TYQNEFS7.js → vuln-check-6CMNPSBR.js} +3 -4
- package/dist/{vuln-checker-2QXGN5YT.js → vuln-checker-EJJTNDNE.js} +413 -140
- package/dist/{watch-UCDVOQAH.js → watch-PNTKZYFB.js} +1 -1
- package/dist/{workflow-ZB5Q2PFY.js → workflow-H75N4BXX.js} +3 -4
- package/package.json +2 -2
- package/dist/chunk-JT5SUTWE.js +0 -9
- package/dist/chunk-M47WJJVS.js +0 -71
- package/dist/manifest-2NOQ2IMK.js +0 -32
- package/dist/{chunk-MNPMZERI.js → chunk-644FVGIQ.js} +1 -1
|
@@ -1,47 +1,43 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { createRequire } from 'module';
|
|
3
|
+
import { detectDatabaseStack, getStackPaths } from './chunk-CCKG5R4Y.js';
|
|
3
4
|
import './chunk-ZZOXM6Q4.js';
|
|
4
5
|
import { createError } from './chunk-JQXOVCOP.js';
|
|
5
|
-
import { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-
|
|
6
|
-
export { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-
|
|
7
|
-
import {
|
|
8
|
-
import { detectAppSchemas, normalizeDatabaseUrlForDdl, parsePostgresUrl, buildPsqlEnv, buildPsqlArgs, psqlSyncQuery, formatSchemasForSql, blankDollarQuotedBodies, stripSqlComments, psqlQuery, psqlSyncFile, psqlExec } from './chunk-CE3DEYFT.js';
|
|
6
|
+
import { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-AKZAN4BC.js';
|
|
7
|
+
export { resolveDatabaseUrl, tryResolveDatabaseUrl } from './chunk-AKZAN4BC.js';
|
|
8
|
+
import { detectAppSchemas, normalizeDatabaseUrlForDdl, formatSchemasForSql } from './chunk-XDCHRVE3.js';
|
|
9
9
|
import './chunk-NPSRD26F.js';
|
|
10
|
-
import { categorizeRisks, detectSchemaRisks } from './chunk-
|
|
11
|
-
import {
|
|
12
|
-
import { loadEnvFiles } from './chunk-MNPMZERI.js';
|
|
13
|
-
import { findWorkspaceRoot } from './chunk-JMJP4A47.js';
|
|
10
|
+
import { categorizeRisks, detectSchemaRisks } from './chunk-H2AHNI75.js';
|
|
11
|
+
import { loadEnvFiles } from './chunk-644FVGIQ.js';
|
|
14
12
|
import { diagnoseSupabaseStart } from './chunk-AAIE4F2U.js';
|
|
13
|
+
import { validateUserFilePath, filterSafePaths, resolveSafePath } from './chunk-DRSUEMAK.js';
|
|
15
14
|
import { runMachine } from './chunk-QDF7QXBL.js';
|
|
16
|
-
import './chunk-
|
|
15
|
+
import './chunk-EMB6IZFT.js';
|
|
16
|
+
import { extractSchemaTablesAndEnums, fetchDbTablesAndEnums, extractTablesFromIdempotentSql, diffSchema, generateTablesManifest, writeEnvLocalBridge, removeEnvLocalBridge } from './chunk-FHG3ILE4.js';
|
|
17
|
+
import { parsePostgresUrl, buildPsqlEnv, buildPsqlArgs, psqlSyncQuery, blankDollarQuotedBodies, stripSqlComments, psqlSyncFile, psqlExec, psqlQuery } from './chunk-7B5C6U2K.js';
|
|
17
18
|
import { redactSecrets } from './chunk-II7VYQEM.js';
|
|
18
|
-
import { writeEnvLocalBridge, removeEnvLocalBridge } from './chunk-M47WJJVS.js';
|
|
19
19
|
import { init_local_supabase, init_constants, detectLocalSupabasePorts, buildLocalDatabaseUrl, DATABASE_DEFAULTS, SEED_DEFAULTS, SCRIPT_LOCATIONS } from './chunk-VM3IWOT5.js';
|
|
20
20
|
export { DATABASE_DEFAULTS, SCRIPT_LOCATIONS, SEED_DEFAULTS } from './chunk-VM3IWOT5.js';
|
|
21
21
|
import { secureSupabase, secureDocker } from './chunk-RZLYEO4U.js';
|
|
22
|
-
import {
|
|
23
|
-
import
|
|
24
|
-
import './chunk-RRGQCUKT.js';
|
|
25
|
-
import './chunk-JT5SUTWE.js';
|
|
22
|
+
import { emitJsonSuccess } from './chunk-KE6QJBZG.js';
|
|
23
|
+
import './chunk-WJXC4MVY.js';
|
|
26
24
|
import { getOutputFormatFromEnv } from './chunk-HKUWEGUX.js';
|
|
25
|
+
import { getDatabasePackagePath, loadRunaConfig, getSDKScriptsPath } from './chunk-5NKWR4FF.js';
|
|
26
|
+
import { findWorkspaceRoot } from './chunk-JMJP4A47.js';
|
|
27
27
|
import { init_esm_shims } from './chunk-VRXHCR5K.js';
|
|
28
28
|
import { Command } from 'commander';
|
|
29
|
-
import { createCLILogger,
|
|
29
|
+
import { createCLILogger, CLIError, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, loadRunaConfigOrThrow, dbDetectSchemaRisks, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, checkExtensionConfig, resolveAvailablePorts, calculatePortOffset, getPortsWithOffset, formatExtensionWarnings } from '@runa-ai/runa';
|
|
30
30
|
import { fromPromise, setup, assign, createActor } from 'xstate';
|
|
31
31
|
import { spawn, spawnSync, execFileSync } from 'child_process';
|
|
32
32
|
import { z } from 'zod';
|
|
33
|
-
import { existsSync, mkdirSync, copyFileSync, readdirSync, mkdtempSync, readFileSync, writeFileSync, rmSync, lstatSync,
|
|
34
|
-
import
|
|
33
|
+
import { existsSync, mkdirSync, copyFileSync, readdirSync, mkdtempSync, readFileSync, writeFileSync, rmSync, lstatSync, unlinkSync, statSync } from 'fs';
|
|
34
|
+
import path6, { join, dirname, resolve, basename, relative } from 'path';
|
|
35
35
|
import crypto, { randomBytes, randomUUID } from 'crypto';
|
|
36
36
|
import { tmpdir } from 'os';
|
|
37
|
-
import postgres2 from 'postgres';
|
|
38
|
-
import { isTable, getTableUniqueName, getTableName } from 'drizzle-orm';
|
|
39
|
-
import { isPgEnum } from 'drizzle-orm/pg-core';
|
|
40
|
-
import { createJiti } from 'jiti';
|
|
41
|
-
import { introspectDatabase } from '@runa-ai/runa/test-generators';
|
|
42
37
|
import { writeFile, appendFile, readFile, stat, realpath } from 'fs/promises';
|
|
43
38
|
import chalk from 'chalk';
|
|
44
39
|
import { execa } from 'execa';
|
|
40
|
+
import postgres from 'postgres';
|
|
45
41
|
import { fileURLToPath } from 'url';
|
|
46
42
|
import { Project } from 'ts-morph';
|
|
47
43
|
import ora from 'ora';
|
|
@@ -227,11 +223,39 @@ function flushStatement(current, results) {
|
|
|
227
223
|
if (!sql) return;
|
|
228
224
|
results.push({ index: current.index, sql, hazards: current.hazards });
|
|
229
225
|
}
|
|
226
|
+
function stripBlockComments(line, inBlockComment) {
|
|
227
|
+
let index = 0;
|
|
228
|
+
let text = "";
|
|
229
|
+
let isInsideBlockComment = inBlockComment;
|
|
230
|
+
while (index < line.length) {
|
|
231
|
+
if (isInsideBlockComment) {
|
|
232
|
+
const endIndex = line.indexOf("*/", index);
|
|
233
|
+
if (endIndex === -1) {
|
|
234
|
+
return { text, inBlockComment: true };
|
|
235
|
+
}
|
|
236
|
+
index = endIndex + 2;
|
|
237
|
+
isInsideBlockComment = false;
|
|
238
|
+
continue;
|
|
239
|
+
}
|
|
240
|
+
const startIndex = line.indexOf("/*", index);
|
|
241
|
+
if (startIndex === -1) {
|
|
242
|
+
text += line.slice(index);
|
|
243
|
+
break;
|
|
244
|
+
}
|
|
245
|
+
text += line.slice(index, startIndex);
|
|
246
|
+
index = startIndex + 2;
|
|
247
|
+
isInsideBlockComment = true;
|
|
248
|
+
}
|
|
249
|
+
return { text, inBlockComment: isInsideBlockComment };
|
|
250
|
+
}
|
|
230
251
|
function parseWithStatementMarkers(lines) {
|
|
231
252
|
const results = [];
|
|
232
253
|
let current = null;
|
|
254
|
+
let inBlockComment = false;
|
|
233
255
|
for (const line of lines) {
|
|
234
|
-
const
|
|
256
|
+
const stripped = stripBlockComments(line, inBlockComment);
|
|
257
|
+
inBlockComment = stripped.inBlockComment;
|
|
258
|
+
const trimmed = stripped.text.trim();
|
|
235
259
|
const idxMatch = trimmed.match(STATEMENT_IDX_REGEX);
|
|
236
260
|
if (idxMatch) {
|
|
237
261
|
flushStatement(current, results);
|
|
@@ -244,7 +268,7 @@ function parseWithStatementMarkers(lines) {
|
|
|
244
268
|
continue;
|
|
245
269
|
}
|
|
246
270
|
if (current && trimmed && !trimmed.startsWith("--")) {
|
|
247
|
-
current.sqlLines.push(
|
|
271
|
+
current.sqlLines.push(stripped.text);
|
|
248
272
|
}
|
|
249
273
|
}
|
|
250
274
|
flushStatement(current, results);
|
|
@@ -253,15 +277,18 @@ function parseWithStatementMarkers(lines) {
|
|
|
253
277
|
function parseAsSingleStatement(lines) {
|
|
254
278
|
const sqlLines = [];
|
|
255
279
|
const hazards = [];
|
|
280
|
+
let inBlockComment = false;
|
|
256
281
|
for (const line of lines) {
|
|
257
|
-
const
|
|
282
|
+
const stripped = stripBlockComments(line, inBlockComment);
|
|
283
|
+
inBlockComment = stripped.inBlockComment;
|
|
284
|
+
const trimmed = stripped.text.trim();
|
|
258
285
|
const hazardMatch = trimmed.match(HAZARD_REGEX);
|
|
259
286
|
if (hazardMatch) {
|
|
260
287
|
hazards.push({ type: hazardMatch[1], message: hazardMatch[2] });
|
|
261
288
|
continue;
|
|
262
289
|
}
|
|
263
290
|
if (trimmed && !trimmed.startsWith("--")) {
|
|
264
|
-
sqlLines.push(
|
|
291
|
+
sqlLines.push(stripped.text);
|
|
265
292
|
}
|
|
266
293
|
}
|
|
267
294
|
const sql = sqlLines.join("\n").trim();
|
|
@@ -1446,6 +1473,46 @@ function cleanupPlanFile(planFile) {
|
|
|
1446
1473
|
} catch {
|
|
1447
1474
|
}
|
|
1448
1475
|
}
|
|
1476
|
+
function validatePreparedPlan(config, attempt, totalWaitMs, planSql, verbose) {
|
|
1477
|
+
const plan = parsePlanOutput(planSql);
|
|
1478
|
+
if (config?.allowedHazardTypes) {
|
|
1479
|
+
validatePlanForExecution(plan, config.allowedHazardTypes);
|
|
1480
|
+
}
|
|
1481
|
+
if (plan.totalStatements === 0) {
|
|
1482
|
+
return {
|
|
1483
|
+
currentPlanSql: plan.rawSql,
|
|
1484
|
+
result: { success: true, attempts: attempt, totalWaitMs }
|
|
1485
|
+
};
|
|
1486
|
+
}
|
|
1487
|
+
validateStatementTypes(plan);
|
|
1488
|
+
if (verbose) {
|
|
1489
|
+
logger6.debug(`Plan validated: ${plan.totalStatements} statement(s)`);
|
|
1490
|
+
}
|
|
1491
|
+
return { currentPlanSql: plan.rawSql };
|
|
1492
|
+
}
|
|
1493
|
+
function writePlanFile(planSql, attempt) {
|
|
1494
|
+
const planFile = join(tmpdir(), `runa-plan-${randomUUID()}-${attempt}.sql`);
|
|
1495
|
+
const wrappedSql = wrapPlanSql(planSql);
|
|
1496
|
+
writeFileSync(planFile, wrappedSql, "utf-8");
|
|
1497
|
+
return planFile;
|
|
1498
|
+
}
|
|
1499
|
+
function runPlanAttempt(context, planFile) {
|
|
1500
|
+
const execution = runPlanExecution(context.dbUrl, planFile, context.verbose);
|
|
1501
|
+
if (execution.kind !== "retry") {
|
|
1502
|
+
return execution;
|
|
1503
|
+
}
|
|
1504
|
+
logRetryableExecution(context.attempt, context.maxRetries, execution.errorOutput);
|
|
1505
|
+
return execution;
|
|
1506
|
+
}
|
|
1507
|
+
function buildRetryFailure(maxRetries, totalWaitMs, lastError) {
|
|
1508
|
+
logger6.error(`Migration failed after ${maxRetries} attempts (total wait: ${totalWaitMs}ms)`);
|
|
1509
|
+
return {
|
|
1510
|
+
success: false,
|
|
1511
|
+
error: lastError || new Error("Migration failed after max retries"),
|
|
1512
|
+
attempts: maxRetries,
|
|
1513
|
+
totalWaitMs
|
|
1514
|
+
};
|
|
1515
|
+
}
|
|
1449
1516
|
async function prepareRetryIteration(params) {
|
|
1450
1517
|
if (params.attempt === 0) {
|
|
1451
1518
|
return {
|
|
@@ -1497,44 +1564,31 @@ async function executePlanSqlWithRetry(dbUrl, initialPlanSql, verbose, config) {
|
|
|
1497
1564
|
return preparedPlan.result;
|
|
1498
1565
|
}
|
|
1499
1566
|
currentPlanSql = preparedPlan.currentPlanSql;
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
`Filtered ${removedStatements.length} DROP statement(s) targeting idempotent-managed tables`
|
|
1510
|
-
);
|
|
1511
|
-
for (const stmt of removedStatements) {
|
|
1512
|
-
logger6.warn(` Skipped: ${stmt.sql.split("\n")[0]}`);
|
|
1513
|
-
}
|
|
1514
|
-
plan = filteredPlan;
|
|
1515
|
-
currentPlanSql = filteredPlan.rawSql;
|
|
1516
|
-
}
|
|
1517
|
-
}
|
|
1518
|
-
if (plan.totalStatements === 0) {
|
|
1519
|
-
return { success: true, attempts: attempt, totalWaitMs };
|
|
1520
|
-
}
|
|
1521
|
-
if (config?.allowedHazardTypes) {
|
|
1522
|
-
validatePlanForExecution(plan, config.allowedHazardTypes);
|
|
1523
|
-
}
|
|
1524
|
-
validateStatementTypes(plan);
|
|
1525
|
-
if (verbose) {
|
|
1526
|
-
logger6.debug(`Plan validated: ${plan.totalStatements} statement(s)`);
|
|
1567
|
+
const validatedPlan = validatePreparedPlan(
|
|
1568
|
+
config,
|
|
1569
|
+
attempt,
|
|
1570
|
+
totalWaitMs,
|
|
1571
|
+
currentPlanSql,
|
|
1572
|
+
verbose
|
|
1573
|
+
);
|
|
1574
|
+
if (validatedPlan.result) {
|
|
1575
|
+
return validatedPlan.result;
|
|
1527
1576
|
}
|
|
1528
|
-
|
|
1529
|
-
const
|
|
1530
|
-
|
|
1577
|
+
currentPlanSql = validatedPlan.currentPlanSql;
|
|
1578
|
+
const planFile = writePlanFile(currentPlanSql, attempt);
|
|
1579
|
+
const context = {
|
|
1580
|
+
dbUrl,
|
|
1581
|
+
attempt,
|
|
1582
|
+
maxRetries,
|
|
1583
|
+
verbose,
|
|
1584
|
+
totalWaitMs
|
|
1585
|
+
};
|
|
1531
1586
|
try {
|
|
1532
|
-
const execution =
|
|
1587
|
+
const execution = runPlanAttempt(context, planFile);
|
|
1533
1588
|
if (execution.kind === "success") {
|
|
1534
1589
|
return { success: true, attempts: attempt, totalWaitMs };
|
|
1535
1590
|
}
|
|
1536
1591
|
if (execution.kind === "retry") {
|
|
1537
|
-
logRetryableExecution(attempt, maxRetries, execution.errorOutput);
|
|
1538
1592
|
lastError = execution.error;
|
|
1539
1593
|
continue;
|
|
1540
1594
|
}
|
|
@@ -1548,13 +1602,7 @@ async function executePlanSqlWithRetry(dbUrl, initialPlanSql, verbose, config) {
|
|
|
1548
1602
|
cleanupPlanFile(planFile);
|
|
1549
1603
|
}
|
|
1550
1604
|
}
|
|
1551
|
-
|
|
1552
|
-
return {
|
|
1553
|
-
success: false,
|
|
1554
|
-
error: lastError || new Error("Migration failed after max retries"),
|
|
1555
|
-
attempts: maxRetries,
|
|
1556
|
-
totalWaitMs
|
|
1557
|
-
};
|
|
1605
|
+
return buildRetryFailure(maxRetries, totalWaitMs, lastError);
|
|
1558
1606
|
}
|
|
1559
1607
|
|
|
1560
1608
|
// src/commands/db/apply/helpers/shadow-db-manager.ts
|
|
@@ -3234,7 +3282,7 @@ function logIdempotentRiskSummary(summary) {
|
|
|
3234
3282
|
async function detectIdempotentRiskSummary(schemasDir, files, verbose) {
|
|
3235
3283
|
const summary = emptyRiskSummary();
|
|
3236
3284
|
try {
|
|
3237
|
-
const { detectSchemaRisks: detectSchemaRisks2 } = await import('./risk-detector-
|
|
3285
|
+
const { detectSchemaRisks: detectSchemaRisks2 } = await import('./risk-detector-4U6ZJ2G5.js');
|
|
3238
3286
|
for (const file of files) {
|
|
3239
3287
|
const filePath = join(schemasDir, file);
|
|
3240
3288
|
const risks = await detectSchemaRisks2(filePath);
|
|
@@ -3528,2083 +3576,149 @@ function backupProtectedTablesForProduction(dbUrl, protectedTables, input) {
|
|
|
3528
3576
|
if (input.env !== "production") {
|
|
3529
3577
|
return;
|
|
3530
3578
|
}
|
|
3531
|
-
const { backupPath } = backupIdempotentTables(dbUrl, protectedTables, input.verbose);
|
|
3532
|
-
if (backupPath) {
|
|
3533
|
-
logger13.info(`Recovery: pg_restore -d <DATABASE_URL> ${backupPath}`);
|
|
3534
|
-
return;
|
|
3535
|
-
}
|
|
3536
|
-
if (protectedTables.length > 0 && !input.allowDataLoss) {
|
|
3537
|
-
throw new Error(
|
|
3538
|
-
"Pre-apply backup failed for production deployment.\n Protected tables exist but could not be backed up.\n Use --allow-data-loss to proceed without backup (emergency only)."
|
|
3539
|
-
);
|
|
3540
|
-
}
|
|
3541
|
-
}
|
|
3542
|
-
async function cleanupApplyResources(params) {
|
|
3543
|
-
if (params.shadowDb) {
|
|
3544
|
-
try {
|
|
3545
|
-
await params.shadowDb.cleanup();
|
|
3546
|
-
if (params.verbose) {
|
|
3547
|
-
logger13.debug("Shadow DB cleaned up");
|
|
3548
|
-
}
|
|
3549
|
-
} catch (cleanupError) {
|
|
3550
|
-
logger13.warn(`Failed to cleanup shadow DB: ${cleanupError}`);
|
|
3551
|
-
}
|
|
3552
|
-
}
|
|
3553
|
-
if (params.prefilter) {
|
|
3554
|
-
try {
|
|
3555
|
-
rmSync(params.prefilter.filteredDir, { recursive: true, force: true });
|
|
3556
|
-
} catch {
|
|
3557
|
-
}
|
|
3558
|
-
}
|
|
3559
|
-
try {
|
|
3560
|
-
rmSync(params.tmpDir, { recursive: true, force: true });
|
|
3561
|
-
} catch {
|
|
3562
|
-
}
|
|
3563
|
-
}
|
|
3564
|
-
var applyPgSchemaDiff = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3565
|
-
const schemasDir = join(targetDir, "supabase/schemas/declarative");
|
|
3566
|
-
if (!existsSync(schemasDir)) {
|
|
3567
|
-
logger13.info("No declarative schemas found");
|
|
3568
|
-
return { sql: "", hazards: [], applied: false };
|
|
3569
|
-
}
|
|
3570
|
-
const dbUrl = getDbUrl(input);
|
|
3571
|
-
const configState = loadPgSchemaDiffConfigState(targetDir, input.verbose);
|
|
3572
|
-
const prefilterState = createPrefilterState(
|
|
3573
|
-
schemasDir,
|
|
3574
|
-
input.verbose,
|
|
3575
|
-
configState.configExclusions
|
|
3576
|
-
);
|
|
3577
|
-
const freshDbResult = handleFreshDbCase(input, dbUrl, targetDir, prefilterState.pgSchemaDiffDir);
|
|
3578
|
-
if (freshDbResult) return freshDbResult;
|
|
3579
|
-
const schemaFiles = collectSchemaFiles(schemasDir);
|
|
3580
|
-
if (schemaFiles.length === 0) {
|
|
3581
|
-
logger13.info("No schema files to apply");
|
|
3582
|
-
return { sql: "", hazards: [], applied: false };
|
|
3583
|
-
}
|
|
3584
|
-
const tmpDir = createCombinedSchemaBundle(schemaFiles, input.verbose);
|
|
3585
|
-
logger13.step("Running pg-schema-diff (incremental changes)...");
|
|
3586
|
-
let shadowDb = null;
|
|
3587
|
-
try {
|
|
3588
|
-
verifyPgSchemaDiffBinary({ strictVersion: input.env === "production" });
|
|
3589
|
-
await verifyDatabaseConnection(dbUrl);
|
|
3590
|
-
shadowDb = await createShadowDbForRun(dbUrl, configState.shadowExtensions, input.verbose);
|
|
3591
|
-
const includeSchemas = detectAppSchemas(schemasDir, input.verbose);
|
|
3592
|
-
cleanPartitionAclsForPgSchemaDiff(dbUrl, includeSchemas, input.verbose);
|
|
3593
|
-
const { planOutput } = executePgSchemaDiffPlan(
|
|
3594
|
-
dbUrl,
|
|
3595
|
-
prefilterState.pgSchemaDiffDir,
|
|
3596
|
-
includeSchemas,
|
|
3597
|
-
input.verbose,
|
|
3598
|
-
{ tempDbDsn: shadowDb?.dsn }
|
|
3599
|
-
);
|
|
3600
|
-
const noChangesResult = buildNoChangesResult(planOutput);
|
|
3601
|
-
if (noChangesResult) return noChangesResult;
|
|
3602
|
-
const { hazards } = handleHazardsWithContext(planOutput, input, schemasDir);
|
|
3603
|
-
const droppedTables = detectDropTableStatements(planOutput);
|
|
3604
|
-
enforceDropSafety(input, droppedTables);
|
|
3605
|
-
const dataViolationCount = runPreApplyDataCompatibility(dbUrl, planOutput, input);
|
|
3606
|
-
const protectedTables = getIdempotentProtectedTables(
|
|
3607
|
-
schemasDir,
|
|
3608
|
-
prefilterState.configExclusions
|
|
3609
|
-
);
|
|
3610
|
-
const protectedObjects = getIdempotentProtectedObjects(
|
|
3611
|
-
schemasDir,
|
|
3612
|
-
prefilterState.configExclusions
|
|
3613
|
-
);
|
|
3614
|
-
const checkModeResult = buildCheckModeResult(
|
|
3615
|
-
input,
|
|
3616
|
-
planOutput,
|
|
3617
|
-
hazards,
|
|
3618
|
-
protectedTables,
|
|
3619
|
-
protectedObjects,
|
|
3620
|
-
dataViolationCount
|
|
3621
|
-
);
|
|
3622
|
-
if (checkModeResult) return checkModeResult;
|
|
3623
|
-
backupProtectedTablesForProduction(dbUrl, protectedTables, input);
|
|
3624
|
-
const preApplyCounts = getTableRowEstimates(dbUrl, schemasDir, input.verbose);
|
|
3625
|
-
const applyResult = await applyWithRetry({
|
|
3626
|
-
dbUrl,
|
|
3627
|
-
schemasDir,
|
|
3628
|
-
includeSchemas,
|
|
3629
|
-
input,
|
|
3630
|
-
planOutput,
|
|
3631
|
-
hazards,
|
|
3632
|
-
protectedTables,
|
|
3633
|
-
protectedObjects,
|
|
3634
|
-
tempDbDsn: shadowDb?.dsn,
|
|
3635
|
-
pgSchemaDiffDir: prefilterState.pgSchemaDiffDir
|
|
3636
|
-
});
|
|
3637
|
-
if (applyResult.applied) {
|
|
3638
|
-
verifyDataIntegrity(dbUrl, schemasDir, preApplyCounts, input.verbose, input.allowDataLoss);
|
|
3639
|
-
}
|
|
3640
|
-
return {
|
|
3641
|
-
...applyResult,
|
|
3642
|
-
dataViolations: dataViolationCount > 0 ? dataViolationCount : void 0
|
|
3643
|
-
};
|
|
3644
|
-
} finally {
|
|
3645
|
-
await cleanupApplyResources({
|
|
3646
|
-
shadowDb,
|
|
3647
|
-
prefilter: prefilterState.prefilter,
|
|
3648
|
-
tmpDir,
|
|
3649
|
-
verbose: input.verbose
|
|
3650
|
-
});
|
|
3651
|
-
}
|
|
3652
|
-
});
|
|
3653
|
-
var validatePartitions = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3654
|
-
if (input.check) return { warnings: [] };
|
|
3655
|
-
const idempotentDir = join(targetDir, "supabase/schemas/idempotent");
|
|
3656
|
-
if (!existsSync(idempotentDir)) return { warnings: [] };
|
|
3657
|
-
const expected = parseExpectedPartitions(idempotentDir);
|
|
3658
|
-
if (expected.length === 0) return { warnings: [] };
|
|
3659
|
-
const dbUrl = getDbUrl(input);
|
|
3660
|
-
const schemas = [...new Set(expected.map((e) => e.parent.split(".")[0] ?? ""))];
|
|
3661
|
-
const actual = queryActualPartitions(dbUrl, schemas);
|
|
3662
|
-
const drift = detectPartitionDrift(expected, actual);
|
|
3663
|
-
if (drift.missing.length === 0) {
|
|
3664
|
-
logger13.success(`All ${expected.length} expected partition(s) verified`);
|
|
3665
|
-
return { warnings: [] };
|
|
3666
|
-
}
|
|
3667
|
-
const warnings = formatPartitionWarnings(drift);
|
|
3668
|
-
for (const w of warnings) logger13.warn(w);
|
|
3669
|
-
return { warnings };
|
|
3670
|
-
});
|
|
3671
|
-
|
|
3672
|
-
// src/commands/db/apply/actors/seed-actors.ts
|
|
3673
|
-
init_esm_shims();
|
|
3674
|
-
|
|
3675
|
-
// src/commands/db/utils/table-registry.ts
|
|
3676
|
-
init_esm_shims();
|
|
3677
|
-
|
|
3678
|
-
// src/commands/db/utils/semantic-mapper.ts
|
|
3679
|
-
init_esm_shims();
|
|
3680
|
-
function snakeToCamel(str) {
|
|
3681
|
-
return str.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
|
|
3682
|
-
}
|
|
3683
|
-
function generateSemanticName(schema, tableName, useSchemaPrefix = false) {
|
|
3684
|
-
const baseName = snakeToCamel(tableName);
|
|
3685
|
-
if (useSchemaPrefix) {
|
|
3686
|
-
const schemaPrefix = snakeToCamel(schema);
|
|
3687
|
-
return schemaPrefix + baseName.charAt(0).toUpperCase() + baseName.slice(1);
|
|
3688
|
-
}
|
|
3689
|
-
return baseName;
|
|
3690
|
-
}
|
|
3691
|
-
function groupBySemanticName(tables) {
|
|
3692
|
-
const bySemanticName = /* @__PURE__ */ new Map();
|
|
3693
|
-
for (const table of tables) {
|
|
3694
|
-
const baseName = snakeToCamel(table.name);
|
|
3695
|
-
const existing = bySemanticName.get(baseName) ?? [];
|
|
3696
|
-
existing.push(table);
|
|
3697
|
-
bySemanticName.set(baseName, existing);
|
|
3698
|
-
}
|
|
3699
|
-
return bySemanticName;
|
|
3700
|
-
}
|
|
3701
|
-
function collectConflicts(bySemanticName) {
|
|
3702
|
-
const conflicts = [];
|
|
3703
|
-
for (const [semanticName, tables] of bySemanticName) {
|
|
3704
|
-
if (tables.length <= 1) continue;
|
|
3705
|
-
conflicts.push({
|
|
3706
|
-
semanticName,
|
|
3707
|
-
tables: tables.map((table) => table.qualifiedName)
|
|
3708
|
-
});
|
|
3709
|
-
}
|
|
3710
|
-
return conflicts;
|
|
3711
|
-
}
|
|
3712
|
-
function getSchemaPriorityRank(schema, prioritySchemas) {
|
|
3713
|
-
const index = prioritySchemas.indexOf(schema);
|
|
3714
|
-
return index === -1 ? Number.POSITIVE_INFINITY : index;
|
|
3715
|
-
}
|
|
3716
|
-
function compareTablesByPriorityAndName(left, right, prioritySchemas) {
|
|
3717
|
-
const leftRank = getSchemaPriorityRank(left.schema, prioritySchemas);
|
|
3718
|
-
const rightRank = getSchemaPriorityRank(right.schema, prioritySchemas);
|
|
3719
|
-
if (leftRank !== rightRank) {
|
|
3720
|
-
return leftRank - rightRank;
|
|
3721
|
-
}
|
|
3722
|
-
return left.qualifiedName.localeCompare(right.qualifiedName);
|
|
3723
|
-
}
|
|
3724
|
-
function applyOverride(table, overrides, mapping) {
|
|
3725
|
-
const overrideName = overrides[table.qualifiedName];
|
|
3726
|
-
if (!overrideName) return false;
|
|
3727
|
-
mapping[overrideName] = table.qualifiedName;
|
|
3728
|
-
return true;
|
|
3729
|
-
}
|
|
3730
|
-
function resolveFirstStrategy(table, baseName, tablesWithSameName, prioritySchemas) {
|
|
3731
|
-
const sorted = [...tablesWithSameName].sort(
|
|
3732
|
-
(left, right) => compareTablesByPriorityAndName(left, right, prioritySchemas)
|
|
3733
|
-
);
|
|
3734
|
-
if (sorted[0]?.qualifiedName === table.qualifiedName) {
|
|
3735
|
-
return { mappedName: baseName };
|
|
3736
|
-
}
|
|
3737
|
-
return { skipped: true };
|
|
3738
|
-
}
|
|
3739
|
-
function resolveConflict(table, baseName, tablesWithSameName, conflictStrategy, prioritySchemas) {
|
|
3740
|
-
switch (conflictStrategy) {
|
|
3741
|
-
case "prefix":
|
|
3742
|
-
return { mappedName: generateSemanticName(table.schema, table.name, true) };
|
|
3743
|
-
case "error":
|
|
3744
|
-
throw new Error(
|
|
3745
|
-
`Semantic name conflict: '${baseName}' maps to multiple tables: ${tablesWithSameName.map((candidate) => candidate.qualifiedName).join(", ")}`
|
|
3746
|
-
);
|
|
3747
|
-
case "first":
|
|
3748
|
-
return resolveFirstStrategy(table, baseName, tablesWithSameName, prioritySchemas);
|
|
3749
|
-
}
|
|
3750
|
-
}
|
|
3751
|
-
function generateMapping(tables, options = {}) {
|
|
3752
|
-
const { conflictStrategy = "prefix", prioritySchemas = [], overrides = {} } = options;
|
|
3753
|
-
const bySemanticName = groupBySemanticName(tables);
|
|
3754
|
-
const conflicts = collectConflicts(bySemanticName);
|
|
3755
|
-
const mapping = {};
|
|
3756
|
-
const skipped = [];
|
|
3757
|
-
for (const table of tables) {
|
|
3758
|
-
if (applyOverride(table, overrides, mapping)) {
|
|
3759
|
-
continue;
|
|
3760
|
-
}
|
|
3761
|
-
const baseName = snakeToCamel(table.name);
|
|
3762
|
-
const tablesWithSameName = bySemanticName.get(baseName) ?? [];
|
|
3763
|
-
if (tablesWithSameName.length === 1) {
|
|
3764
|
-
mapping[baseName] = table.qualifiedName;
|
|
3765
|
-
continue;
|
|
3766
|
-
}
|
|
3767
|
-
const resolution = resolveConflict(
|
|
3768
|
-
table,
|
|
3769
|
-
baseName,
|
|
3770
|
-
tablesWithSameName,
|
|
3771
|
-
conflictStrategy,
|
|
3772
|
-
prioritySchemas
|
|
3773
|
-
);
|
|
3774
|
-
if (resolution.mappedName) {
|
|
3775
|
-
mapping[resolution.mappedName] = table.qualifiedName;
|
|
3776
|
-
} else if (resolution.skipped) {
|
|
3777
|
-
skipped.push(table.qualifiedName);
|
|
3778
|
-
}
|
|
3779
|
-
}
|
|
3780
|
-
return { mapping, conflicts, skipped };
|
|
3781
|
-
}
|
|
3782
|
-
function applyMappingToTables(tables, mapping) {
|
|
3783
|
-
const reverseMapping = /* @__PURE__ */ new Map();
|
|
3784
|
-
for (const [semantic, qualified] of Object.entries(mapping)) {
|
|
3785
|
-
reverseMapping.set(qualified, semantic);
|
|
3786
|
-
}
|
|
3787
|
-
return tables.map((table) => ({
|
|
3788
|
-
...table,
|
|
3789
|
-
semanticName: reverseMapping.get(table.qualifiedName) || table.semanticName
|
|
3790
|
-
}));
|
|
3791
|
-
}
|
|
3792
|
-
|
|
3793
|
-
// src/commands/db/utils/schema-sync.ts
|
|
3794
|
-
init_esm_shims();
|
|
3795
|
-
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
3796
|
-
function validatePgIdentifier(name, context) {
|
|
3797
|
-
if (!name || typeof name !== "string") {
|
|
3798
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
3799
|
-
}
|
|
3800
|
-
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
3801
|
-
throw new Error(
|
|
3802
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
3803
|
-
);
|
|
3804
|
-
}
|
|
3805
|
-
}
|
|
3806
|
-
function escapePgStringLiteral(value) {
|
|
3807
|
-
if (typeof value !== "string") {
|
|
3808
|
-
throw new Error("Value must be a string");
|
|
3809
|
-
}
|
|
3810
|
-
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
3811
|
-
}
|
|
3812
|
-
function buildSafeSchemaInClause(schemas) {
|
|
3813
|
-
if (schemas.length === 0) {
|
|
3814
|
-
throw new Error("No schemas provided for IN clause");
|
|
3815
|
-
}
|
|
3816
|
-
const safeSchemas = [];
|
|
3817
|
-
for (const schema of schemas) {
|
|
3818
|
-
validatePgIdentifier(schema, "schema name");
|
|
3819
|
-
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
3820
|
-
}
|
|
3821
|
-
return safeSchemas.join(",");
|
|
3822
|
-
}
|
|
3823
|
-
var ERROR_MESSAGES = {
|
|
3824
|
-
PATH_TRAVERSAL: "Schema path validation failed",
|
|
3825
|
-
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
3826
|
-
};
|
|
3827
|
-
function containsPathTraversal(inputPath) {
|
|
3828
|
-
const normalized = path.normalize(inputPath);
|
|
3829
|
-
return normalized.includes("..") || inputPath.includes("\0");
|
|
3830
|
-
}
|
|
3831
|
-
function isPathWithinBase(filePath, baseDir) {
|
|
3832
|
-
try {
|
|
3833
|
-
const resolvedFile = path.resolve(filePath);
|
|
3834
|
-
const resolvedBase = path.resolve(baseDir);
|
|
3835
|
-
const normalizedFile = path.normalize(resolvedFile);
|
|
3836
|
-
const normalizedBase = path.normalize(resolvedBase);
|
|
3837
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path.sep);
|
|
3838
|
-
} catch {
|
|
3839
|
-
return false;
|
|
3840
|
-
}
|
|
3841
|
-
}
|
|
3842
|
-
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
3843
|
-
if (containsPathTraversal(dbPackagePath)) {
|
|
3844
|
-
throw new Error(ERROR_MESSAGES.PATH_TRAVERSAL);
|
|
3845
|
-
}
|
|
3846
|
-
const schemaEntry = path.join(dbPackagePath, "src", "schema", "index.ts");
|
|
3847
|
-
const absoluteSchemaPath = path.resolve(projectRoot, schemaEntry);
|
|
3848
|
-
let resolvedProjectRoot;
|
|
3849
|
-
try {
|
|
3850
|
-
resolvedProjectRoot = realpathSync(projectRoot);
|
|
3851
|
-
} catch {
|
|
3852
|
-
resolvedProjectRoot = path.resolve(projectRoot);
|
|
3853
|
-
}
|
|
3854
|
-
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
3855
|
-
throw new Error(ERROR_MESSAGES.PATH_TRAVERSAL);
|
|
3856
|
-
}
|
|
3857
|
-
if (!existsSync(absoluteSchemaPath)) {
|
|
3858
|
-
throw new Error(ERROR_MESSAGES.SCHEMA_NOT_FOUND);
|
|
3859
|
-
}
|
|
3860
|
-
return absoluteSchemaPath;
|
|
3861
|
-
}
|
|
3862
|
-
function uniqueSorted(values) {
|
|
3863
|
-
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
3864
|
-
}
|
|
3865
|
-
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
3866
|
-
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
3867
|
-
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
3868
|
-
let schemaModule;
|
|
3869
|
-
try {
|
|
3870
|
-
schemaModule = await jiti.import(validatedSchemaPath);
|
|
3871
|
-
} catch (error) {
|
|
3872
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
3873
|
-
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
3874
|
-
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
3875
|
-
}
|
|
3876
|
-
const expectedTables = /* @__PURE__ */ new Set();
|
|
3877
|
-
const expectedEnums = /* @__PURE__ */ new Map();
|
|
3878
|
-
for (const value of Object.values(schemaModule)) {
|
|
3879
|
-
if (isTable(value)) {
|
|
3880
|
-
const unique = String(getTableUniqueName(value));
|
|
3881
|
-
if (unique.startsWith("undefined.")) {
|
|
3882
|
-
expectedTables.add(`public.${getTableName(value)}`);
|
|
3883
|
-
} else {
|
|
3884
|
-
expectedTables.add(unique);
|
|
3885
|
-
}
|
|
3886
|
-
continue;
|
|
3887
|
-
}
|
|
3888
|
-
if (isPgEnum(value)) {
|
|
3889
|
-
expectedEnums.set(value.enumName, {
|
|
3890
|
-
name: value.enumName,
|
|
3891
|
-
values: uniqueSorted(value.enumValues)
|
|
3892
|
-
});
|
|
3893
|
-
}
|
|
3894
|
-
}
|
|
3895
|
-
return { expectedTables, expectedEnums };
|
|
3896
|
-
}
|
|
3897
|
-
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
3898
|
-
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
3899
|
-
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
3900
|
-
const systemSchemas = /* @__PURE__ */ new Set([
|
|
3901
|
-
...SUPABASE_SYSTEM_SCHEMAS,
|
|
3902
|
-
...options?.additionalSystemSchemas ?? []
|
|
3903
|
-
]);
|
|
3904
|
-
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
3905
|
-
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
3906
|
-
const tablesSql = `
|
|
3907
|
-
SELECT schemaname || '.' || tablename
|
|
3908
|
-
FROM pg_tables
|
|
3909
|
-
WHERE schemaname IN (${schemaList})
|
|
3910
|
-
ORDER BY schemaname, tablename;`.trim();
|
|
3911
|
-
const enumsSql = `
|
|
3912
|
-
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
3913
|
-
FROM pg_type t
|
|
3914
|
-
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
3915
|
-
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
3916
|
-
WHERE n.nspname = 'public'
|
|
3917
|
-
GROUP BY t.typname
|
|
3918
|
-
ORDER BY t.typname;`.trim();
|
|
3919
|
-
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
3920
|
-
const dbTables = /* @__PURE__ */ new Set();
|
|
3921
|
-
for (const line of tablesOut.split("\n")) {
|
|
3922
|
-
const v = line.trim();
|
|
3923
|
-
if (v.length > 0) dbTables.add(v);
|
|
3924
|
-
}
|
|
3925
|
-
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
3926
|
-
const dbEnums = /* @__PURE__ */ new Map();
|
|
3927
|
-
for (const line of enumsOut.split("\n")) {
|
|
3928
|
-
const trimmed = line.trim();
|
|
3929
|
-
if (trimmed.length === 0) continue;
|
|
3930
|
-
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
3931
|
-
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
3932
|
-
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
3933
|
-
}
|
|
3934
|
-
return { dbTables, dbEnums };
|
|
3935
|
-
}
|
|
3936
|
-
function diffSchema(params) {
|
|
3937
|
-
const missingTables = uniqueSorted(
|
|
3938
|
-
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
3939
|
-
);
|
|
3940
|
-
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
3941
|
-
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
3942
|
-
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
3943
|
-
const isExcluded = (table) => {
|
|
3944
|
-
if (exactExclusions.includes(table)) return true;
|
|
3945
|
-
for (const pattern of exclusionPatterns) {
|
|
3946
|
-
const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
3947
|
-
const regex = new RegExp(`^${escaped.replace(/\\\*/g, ".*")}$`);
|
|
3948
|
-
if (regex.test(table)) return true;
|
|
3949
|
-
}
|
|
3950
|
-
return false;
|
|
3951
|
-
};
|
|
3952
|
-
const orphanTables = uniqueSorted(
|
|
3953
|
-
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
3954
|
-
);
|
|
3955
|
-
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
3956
|
-
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
3957
|
-
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
3958
|
-
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
3959
|
-
const enumValueMismatches = [];
|
|
3960
|
-
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
3961
|
-
const s = params.expectedEnums.get(name);
|
|
3962
|
-
const d = params.dbEnums.get(name);
|
|
3963
|
-
if (!s || !d) continue;
|
|
3964
|
-
const schemaValues = uniqueSorted(s.values);
|
|
3965
|
-
const dbValues = uniqueSorted(d.values);
|
|
3966
|
-
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
3967
|
-
if (same) continue;
|
|
3968
|
-
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
3969
|
-
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
3970
|
-
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
3971
|
-
}
|
|
3972
|
-
return {
|
|
3973
|
-
expectedTables: params.expectedTables,
|
|
3974
|
-
expectedEnums: params.expectedEnums,
|
|
3975
|
-
dbTables: params.dbTables,
|
|
3976
|
-
dbEnums: params.dbEnums,
|
|
3977
|
-
missingTables,
|
|
3978
|
-
orphanTables,
|
|
3979
|
-
missingEnums,
|
|
3980
|
-
extraEnums,
|
|
3981
|
-
enumValueMismatches
|
|
3982
|
-
};
|
|
3983
|
-
}
|
|
3984
|
-
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
3985
|
-
const fullPath = path.resolve(projectRoot, idempotentDir);
|
|
3986
|
-
if (!existsSync(fullPath)) {
|
|
3987
|
-
return [];
|
|
3988
|
-
}
|
|
3989
|
-
const tables = [];
|
|
3990
|
-
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
3991
|
-
try {
|
|
3992
|
-
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
3993
|
-
for (const file of files) {
|
|
3994
|
-
const filePath = path.join(fullPath, file);
|
|
3995
|
-
const content = readFileSync(filePath, "utf-8");
|
|
3996
|
-
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
3997
|
-
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
3998
|
-
const schema = match[1] || "public";
|
|
3999
|
-
const tableName = match[2];
|
|
4000
|
-
if (tableName) {
|
|
4001
|
-
tables.push(`${schema}.${tableName}`);
|
|
4002
|
-
}
|
|
4003
|
-
}
|
|
4004
|
-
}
|
|
4005
|
-
} catch {
|
|
4006
|
-
return [];
|
|
4007
|
-
}
|
|
4008
|
-
return [...new Set(tables)].sort();
|
|
4009
|
-
}
|
|
4010
|
-
|
|
4011
|
-
// src/commands/db/utils/sql-table-extractor.ts
|
|
4012
|
-
init_esm_shims();
|
|
4013
|
-
|
|
4014
|
-
// src/commands/db/utils/sql-table-extractor-regex.ts
|
|
4015
|
-
init_esm_shims();
|
|
4016
|
-
function findTablesRegex(ctx) {
|
|
4017
|
-
const tables = [];
|
|
4018
|
-
const regex = new RegExp(SQL_PATTERNS.createTable.source, "gi");
|
|
4019
|
-
for (const match of ctx.content.matchAll(regex)) {
|
|
4020
|
-
const reference = parseTableReference(match[1] ?? "");
|
|
4021
|
-
if (!reference) continue;
|
|
4022
|
-
const lineNumber = getLineNumber(ctx.content, match.index ?? 0);
|
|
4023
|
-
const tableBody = extractTableBody(ctx.content, match.index ?? 0);
|
|
4024
|
-
tables.push({ schema: reference.schema, name: reference.name, lineNumber, tableBody });
|
|
4025
|
-
}
|
|
4026
|
-
return tables;
|
|
4027
|
-
}
|
|
4028
|
-
function shouldSkipColumnLine(trimmed) {
|
|
4029
|
-
return !trimmed || trimmed.startsWith("--") || /^(?:PRIMARY|FOREIGN|UNIQUE|CHECK|CONSTRAINT)\s/i.test(trimmed);
|
|
4030
|
-
}
|
|
4031
|
-
function isReservedKeyword(name) {
|
|
4032
|
-
return /^(?:PRIMARY|FOREIGN|UNIQUE|CHECK|CONSTRAINT)$/i.test(name);
|
|
4033
|
-
}
|
|
4034
|
-
var COLUMN_CONSTRAINT_KEYWORDS = [
|
|
4035
|
-
"NOT NULL",
|
|
4036
|
-
"DEFAULT",
|
|
4037
|
-
"REFERENCES",
|
|
4038
|
-
"PRIMARY",
|
|
4039
|
-
"UNIQUE",
|
|
4040
|
-
"CHECK",
|
|
4041
|
-
"CONSTRAINT"
|
|
4042
|
-
];
|
|
4043
|
-
function isBoundaryAtTopLevel(source, index, keyword) {
|
|
4044
|
-
const before = index === 0 || /\s|[(),]/.test(source[index - 1] ?? "");
|
|
4045
|
-
const after = index + keyword.length;
|
|
4046
|
-
const afterChar = source[after] ?? "";
|
|
4047
|
-
const afterBoundary = after === source.length || /\s|[(),]/.test(afterChar);
|
|
4048
|
-
if (!before || !afterBoundary) {
|
|
4049
|
-
return false;
|
|
4050
|
-
}
|
|
4051
|
-
return source.substring(index, after).toUpperCase() === keyword;
|
|
4052
|
-
}
|
|
4053
|
-
function createConstraintScanState() {
|
|
4054
|
-
return {
|
|
4055
|
-
depth: 0,
|
|
4056
|
-
dollarTag: "",
|
|
4057
|
-
inDollarQuote: false,
|
|
4058
|
-
inDoubleQuote: false,
|
|
4059
|
-
inSingleQuote: false
|
|
4060
|
-
};
|
|
4061
|
-
}
|
|
4062
|
-
function consumeConstraintDollarQuote(line, index, state) {
|
|
4063
|
-
if ((line[index] ?? "") !== "$" || state.inSingleQuote || state.inDoubleQuote) {
|
|
4064
|
-
return null;
|
|
4065
|
-
}
|
|
4066
|
-
if (state.inDollarQuote) {
|
|
4067
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4068
|
-
if (!line.slice(index).startsWith(closeTag)) {
|
|
4069
|
-
return null;
|
|
4070
|
-
}
|
|
4071
|
-
state.inDollarQuote = false;
|
|
4072
|
-
state.dollarTag = "";
|
|
4073
|
-
return index + closeTag.length - 1;
|
|
4074
|
-
}
|
|
4075
|
-
const tagMatch = line.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4076
|
-
if (!tagMatch) {
|
|
4077
|
-
return null;
|
|
4078
|
-
}
|
|
4079
|
-
state.inDollarQuote = true;
|
|
4080
|
-
state.dollarTag = tagMatch[1] ?? "";
|
|
4081
|
-
return index + tagMatch[0].length - 1;
|
|
4082
|
-
}
|
|
4083
|
-
function consumeConstraintSingleQuote(line, index, state) {
|
|
4084
|
-
if ((line[index] ?? "") !== "'" || state.inDoubleQuote || state.inDollarQuote) {
|
|
4085
|
-
return null;
|
|
4086
|
-
}
|
|
4087
|
-
if (state.inSingleQuote && (line[index + 1] ?? "") === "'") {
|
|
4088
|
-
return index + 1;
|
|
4089
|
-
}
|
|
4090
|
-
state.inSingleQuote = !state.inSingleQuote;
|
|
4091
|
-
return index;
|
|
4092
|
-
}
|
|
4093
|
-
function consumeConstraintDoubleQuote(line, index, state) {
|
|
4094
|
-
if ((line[index] ?? "") !== '"' || state.inSingleQuote || state.inDollarQuote) {
|
|
4095
|
-
return null;
|
|
4096
|
-
}
|
|
4097
|
-
if (state.inDoubleQuote && (line[index + 1] ?? "") === '"') {
|
|
4098
|
-
return index + 1;
|
|
4099
|
-
}
|
|
4100
|
-
state.inDoubleQuote = !state.inDoubleQuote;
|
|
4101
|
-
return index;
|
|
4102
|
-
}
|
|
4103
|
-
function consumeConstraintQuote(line, index, state) {
|
|
4104
|
-
const adjustedByDollar = consumeConstraintDollarQuote(line, index, state);
|
|
4105
|
-
if (adjustedByDollar !== null) {
|
|
4106
|
-
return adjustedByDollar;
|
|
4107
|
-
}
|
|
4108
|
-
const adjustedBySingle = consumeConstraintSingleQuote(line, index, state);
|
|
4109
|
-
if (adjustedBySingle !== null) {
|
|
4110
|
-
return adjustedBySingle;
|
|
4111
|
-
}
|
|
4112
|
-
return consumeConstraintDoubleQuote(line, index, state);
|
|
4113
|
-
}
|
|
4114
|
-
function isInsideConstraintQuote(state) {
|
|
4115
|
-
return state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote;
|
|
4116
|
-
}
|
|
4117
|
-
function updateConstraintDepth(char, state) {
|
|
4118
|
-
if (char === "(") {
|
|
4119
|
-
state.depth += 1;
|
|
4120
|
-
return;
|
|
4121
|
-
}
|
|
4122
|
-
if (char === ")" && state.depth > 0) {
|
|
4123
|
-
state.depth -= 1;
|
|
4124
|
-
}
|
|
4125
|
-
}
|
|
4126
|
-
function findKeywordAtTopLevel(upper, index, keywords) {
|
|
4127
|
-
return keywords.some(
|
|
4128
|
-
(keyword) => upper.startsWith(keyword, index) && isBoundaryAtTopLevel(upper, index, keyword)
|
|
4129
|
-
);
|
|
4130
|
-
}
|
|
4131
|
-
function findConstraintStart(line, keywords = COLUMN_CONSTRAINT_KEYWORDS) {
|
|
4132
|
-
const upper = line.toUpperCase();
|
|
4133
|
-
const state = createConstraintScanState();
|
|
4134
|
-
for (let i = 0; i < upper.length; i++) {
|
|
4135
|
-
const adjustedIndex = consumeConstraintQuote(line, i, state);
|
|
4136
|
-
if (adjustedIndex !== null) {
|
|
4137
|
-
i = adjustedIndex;
|
|
4138
|
-
continue;
|
|
4139
|
-
}
|
|
4140
|
-
if (isInsideConstraintQuote(state)) continue;
|
|
4141
|
-
const char = upper[i] ?? "";
|
|
4142
|
-
updateConstraintDepth(char, state);
|
|
4143
|
-
if (char === "(" || char === ")") continue;
|
|
4144
|
-
if (state.depth !== 0) continue;
|
|
4145
|
-
if (findKeywordAtTopLevel(upper, i, keywords)) return i;
|
|
4146
|
-
}
|
|
4147
|
-
return -1;
|
|
4148
|
-
}
|
|
4149
|
-
function parseInlineReference(constraintSource) {
|
|
4150
|
-
const referenceStart = findConstraintStart(constraintSource, ["REFERENCES"]);
|
|
4151
|
-
if (referenceStart === -1) {
|
|
4152
|
-
return null;
|
|
4153
|
-
}
|
|
4154
|
-
const afterConstraint = constraintSource.slice(referenceStart);
|
|
4155
|
-
if (!/^\s*REFERENCES\s+/i.test(afterConstraint)) {
|
|
4156
|
-
return null;
|
|
4157
|
-
}
|
|
4158
|
-
const refMatch = afterConstraint.match(
|
|
4159
|
-
new RegExp(`^\\s*REFERENCES\\s+(${TABLE_REFERENCE})\\s*\\(\\s*(${SQL_IDENTIFIER})\\s*\\)`, "i")
|
|
4160
|
-
);
|
|
4161
|
-
if (!refMatch) {
|
|
4162
|
-
return null;
|
|
4163
|
-
}
|
|
4164
|
-
const ref = parseTableReference(refMatch[1] ?? "");
|
|
4165
|
-
const refColumn = unquoteIdentifier(refMatch[2] ?? "");
|
|
4166
|
-
if (!ref || !refColumn) {
|
|
4167
|
-
return null;
|
|
4168
|
-
}
|
|
4169
|
-
return { table: `${ref.schema}.${ref.name}`, column: refColumn };
|
|
4170
|
-
}
|
|
4171
|
-
function splitColumnDeclaration(line) {
|
|
4172
|
-
const columnMatch = line.match(
|
|
4173
|
-
/^((?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*))\s+(.+?)(?:\s*,\s*)?$/
|
|
4174
|
-
);
|
|
4175
|
-
if (!columnMatch) {
|
|
4176
|
-
return null;
|
|
4177
|
-
}
|
|
4178
|
-
const name = unquoteIdentifier(columnMatch[1] ?? "");
|
|
4179
|
-
const rest = (columnMatch[2] ?? "").trim();
|
|
4180
|
-
if (!name || isReservedKeyword(name)) {
|
|
4181
|
-
return null;
|
|
4182
|
-
}
|
|
4183
|
-
const typeEndIndex = (() => {
|
|
4184
|
-
const withParens = rest;
|
|
4185
|
-
const constraintsIndex = findConstraintStart(withParens);
|
|
4186
|
-
if (constraintsIndex === -1) {
|
|
4187
|
-
return withParens.length;
|
|
4188
|
-
}
|
|
4189
|
-
return constraintsIndex;
|
|
4190
|
-
})();
|
|
4191
|
-
const type = rest.slice(0, typeEndIndex).trim();
|
|
4192
|
-
const remaining = rest.slice(typeEndIndex);
|
|
4193
|
-
const inlineReference = parseInlineReference(remaining);
|
|
4194
|
-
const hasDefault = findConstraintStart(remaining, ["DEFAULT"]) !== -1;
|
|
4195
|
-
const notNull = findConstraintStart(remaining, ["NOT NULL"]) !== -1;
|
|
4196
|
-
const isPrimaryKey = findConstraintStart(remaining, ["PRIMARY KEY"]) !== -1;
|
|
4197
|
-
return {
|
|
4198
|
-
name,
|
|
4199
|
-
type,
|
|
4200
|
-
hasDefault,
|
|
4201
|
-
notNull,
|
|
4202
|
-
isPrimaryKey,
|
|
4203
|
-
inlineReferenceTable: inlineReference?.table,
|
|
4204
|
-
inlineReferenceColumn: inlineReference?.column
|
|
4205
|
-
};
|
|
4206
|
-
}
|
|
4207
|
-
function parseColumnsRegex(tableBody) {
|
|
4208
|
-
const columns = [];
|
|
4209
|
-
const seen = /* @__PURE__ */ new Set();
|
|
4210
|
-
const lines = splitTopLevelSqlStatements(tableBody);
|
|
4211
|
-
for (const line of lines) {
|
|
4212
|
-
const trimmed = line.trim();
|
|
4213
|
-
if (shouldSkipColumnLine(trimmed)) continue;
|
|
4214
|
-
const column = splitColumnDeclaration(trimmed);
|
|
4215
|
-
if (!column) continue;
|
|
4216
|
-
if (!column.name || seen.has(column.name) || isReservedKeyword(column.name)) continue;
|
|
4217
|
-
seen.add(column.name);
|
|
4218
|
-
columns.push({
|
|
4219
|
-
name: column.name,
|
|
4220
|
-
type: normalizeType(column.type),
|
|
4221
|
-
notNull: column.notNull || column.isPrimaryKey,
|
|
4222
|
-
hasDefault: column.hasDefault,
|
|
4223
|
-
isPrimaryKey: column.isPrimaryKey
|
|
4224
|
-
});
|
|
4225
|
-
}
|
|
4226
|
-
return columns;
|
|
4227
|
-
}
|
|
4228
|
-
function parsePrimaryKeyRegex(tableBody) {
|
|
4229
|
-
const regex = new RegExp(SQL_PATTERNS.primaryKey.source, "i");
|
|
4230
|
-
const found = [];
|
|
4231
|
-
for (const line of splitTopLevelSqlStatements(tableBody)) {
|
|
4232
|
-
const match = line.match(regex);
|
|
4233
|
-
if (!match || !match[1]) {
|
|
4234
|
-
continue;
|
|
4235
|
-
}
|
|
4236
|
-
for (const col of match[1]?.split(",") ?? []) {
|
|
4237
|
-
const normalized = unquoteIdentifier(col.trim());
|
|
4238
|
-
if (normalized && !found.includes(normalized)) {
|
|
4239
|
-
found.push(normalized);
|
|
4240
|
-
}
|
|
4241
|
-
}
|
|
4242
|
-
regex.lastIndex = 0;
|
|
4243
|
-
}
|
|
4244
|
-
return found;
|
|
4245
|
-
}
|
|
4246
|
-
function parseExplicitForeignKeys(tableBody) {
|
|
4247
|
-
const fks = [];
|
|
4248
|
-
const fkRegex = new RegExp(SQL_PATTERNS.foreignKey.source, "gi");
|
|
4249
|
-
for (const line of splitTopLevelSqlStatements(tableBody)) {
|
|
4250
|
-
for (const match of line.matchAll(fkRegex)) {
|
|
4251
|
-
const column = unquoteIdentifier(match[1] ?? "");
|
|
4252
|
-
const ref = parseTableReference(match[2] ?? "");
|
|
4253
|
-
const refColumn = unquoteIdentifier(match[3] ?? "");
|
|
4254
|
-
if (!column || !ref || !refColumn) continue;
|
|
4255
|
-
fks.push({
|
|
4256
|
-
column,
|
|
4257
|
-
referencesTable: `${ref.schema}.${ref.name}`,
|
|
4258
|
-
referencesColumn: refColumn,
|
|
4259
|
-
onDelete: normalizeOnAction(match[4]),
|
|
4260
|
-
onUpdate: normalizeOnAction(match[5])
|
|
4261
|
-
});
|
|
4262
|
-
}
|
|
4263
|
-
}
|
|
4264
|
-
return fks;
|
|
4265
|
-
}
|
|
4266
|
-
function parseInlineForeignKeys(tableBody, existingColumns) {
|
|
4267
|
-
const fks = [];
|
|
4268
|
-
for (const fragment of splitTopLevelSqlStatements(tableBody)) {
|
|
4269
|
-
const declaration = splitColumnDeclaration(fragment);
|
|
4270
|
-
if (!declaration) continue;
|
|
4271
|
-
if (!declaration.inlineReferenceTable || !declaration.inlineReferenceColumn) continue;
|
|
4272
|
-
if (existingColumns.has(declaration.name)) continue;
|
|
4273
|
-
fks.push({
|
|
4274
|
-
column: declaration.name,
|
|
4275
|
-
referencesTable: declaration.inlineReferenceTable,
|
|
4276
|
-
referencesColumn: declaration.inlineReferenceColumn
|
|
4277
|
-
});
|
|
4278
|
-
}
|
|
4279
|
-
return fks;
|
|
4280
|
-
}
|
|
4281
|
-
function parseForeignKeysRegex(tableBody) {
|
|
4282
|
-
const explicitFks = parseExplicitForeignKeys(tableBody);
|
|
4283
|
-
const existingColumns = new Set(explicitFks.map((fk) => fk.column));
|
|
4284
|
-
const inlineFks = parseInlineForeignKeys(tableBody, existingColumns);
|
|
4285
|
-
return [...explicitFks, ...inlineFks];
|
|
4286
|
-
}
|
|
4287
|
-
function parseIndexesRegex(content, schema, tableName) {
|
|
4288
|
-
const indexes = [];
|
|
4289
|
-
const regex = new RegExp(SQL_PATTERNS.createIndex.source, "gi");
|
|
4290
|
-
for (const match of content.matchAll(regex)) {
|
|
4291
|
-
const indexTableRef = parseTableReference(match[3] ?? "");
|
|
4292
|
-
if (!indexTableRef) continue;
|
|
4293
|
-
const indexSchema = indexTableRef.schema;
|
|
4294
|
-
const indexTable = indexTableRef.name;
|
|
4295
|
-
if (indexSchema === schema && indexTable === tableName) {
|
|
4296
|
-
const indexName = unquoteIdentifier(match[2] ?? "");
|
|
4297
|
-
if (!indexName) continue;
|
|
4298
|
-
const rawColumns = match[4] ?? "";
|
|
4299
|
-
indexes.push({
|
|
4300
|
-
name: indexName,
|
|
4301
|
-
columns: parseIndexColumns(rawColumns),
|
|
4302
|
-
isUnique: !!match[1]
|
|
4303
|
-
});
|
|
4304
|
-
}
|
|
4305
|
-
}
|
|
4306
|
-
return indexes;
|
|
4307
|
-
}
|
|
4308
|
-
function hasRlsEnabledRegex(content, schema, tableName) {
|
|
4309
|
-
const regex = new RegExp(SQL_PATTERNS.enableRls.source, "gi");
|
|
4310
|
-
for (const match of content.matchAll(regex)) {
|
|
4311
|
-
const matchTable = parseTableReference(match[1] ?? "");
|
|
4312
|
-
if (!matchTable) continue;
|
|
4313
|
-
if (matchTable.schema === schema && matchTable.name === tableName) {
|
|
4314
|
-
return true;
|
|
4315
|
-
}
|
|
4316
|
-
}
|
|
4317
|
-
return false;
|
|
4318
|
-
}
|
|
4319
|
-
|
|
4320
|
-
// src/commands/db/utils/sql-table-extractor-rls.ts
|
|
4321
|
-
init_esm_shims();
|
|
4322
|
-
function readDollarTagAt(content, index) {
|
|
4323
|
-
if (content[index] !== "$") return void 0;
|
|
4324
|
-
const match = content.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4325
|
-
return match?.[0];
|
|
4326
|
-
}
|
|
4327
|
-
function consumePolicySingleQuote(content, state) {
|
|
4328
|
-
if (!state.inSingleQuote) return false;
|
|
4329
|
-
const char = content[state.cursor] ?? "";
|
|
4330
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4331
|
-
if (char === "'" && next === "'") {
|
|
4332
|
-
state.cursor += 2;
|
|
4333
|
-
return true;
|
|
4334
|
-
}
|
|
4335
|
-
if (char === "'") {
|
|
4336
|
-
state.inSingleQuote = false;
|
|
4337
|
-
}
|
|
4338
|
-
state.cursor += 1;
|
|
4339
|
-
return true;
|
|
4340
|
-
}
|
|
4341
|
-
function consumePolicyDoubleQuote(content, state) {
|
|
4342
|
-
if (!state.inDoubleQuote) return false;
|
|
4343
|
-
const char = content[state.cursor] ?? "";
|
|
4344
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4345
|
-
if (char === '"' && next === '"') {
|
|
4346
|
-
state.cursor += 2;
|
|
4347
|
-
return true;
|
|
4348
|
-
}
|
|
4349
|
-
if (char === '"') {
|
|
4350
|
-
state.inDoubleQuote = false;
|
|
4351
|
-
}
|
|
4352
|
-
state.cursor += 1;
|
|
4353
|
-
return true;
|
|
4354
|
-
}
|
|
4355
|
-
function consumePolicyDollarQuote(content, state) {
|
|
4356
|
-
if (!state.inDollarQuote) return false;
|
|
4357
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4358
|
-
if (content.startsWith(closeTag, state.cursor)) {
|
|
4359
|
-
state.inDollarQuote = false;
|
|
4360
|
-
state.dollarTag = "";
|
|
4361
|
-
state.cursor += closeTag.length;
|
|
4362
|
-
return true;
|
|
4363
|
-
}
|
|
4364
|
-
state.cursor += 1;
|
|
4365
|
-
return true;
|
|
4366
|
-
}
|
|
4367
|
-
function trySkipPolicyLineComment(content, state) {
|
|
4368
|
-
if (state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote) return false;
|
|
4369
|
-
const char = content[state.cursor] ?? "";
|
|
4370
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4371
|
-
if (char !== "-" || next !== "-") return false;
|
|
4372
|
-
const newlineIndex = content.indexOf("\n", state.cursor);
|
|
4373
|
-
state.cursor = newlineIndex === -1 ? content.length : newlineIndex + 1;
|
|
4374
|
-
return true;
|
|
4375
|
-
}
|
|
4376
|
-
function trySkipPolicyBlockComment(content, state) {
|
|
4377
|
-
if (state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote) return false;
|
|
4378
|
-
const char = content[state.cursor] ?? "";
|
|
4379
|
-
const next = content[state.cursor + 1] ?? "";
|
|
4380
|
-
if (char !== "/" || next !== "*") return false;
|
|
4381
|
-
const closeIndex = content.indexOf("*/", state.cursor + 2);
|
|
4382
|
-
state.cursor = closeIndex === -1 ? content.length : closeIndex + 2;
|
|
4383
|
-
return true;
|
|
4384
|
-
}
|
|
4385
|
-
function tryStartPolicyDollarQuote(content, state) {
|
|
4386
|
-
if (state.inSingleQuote || state.inDoubleQuote) return false;
|
|
4387
|
-
const tag = readDollarTagAt(content, state.cursor);
|
|
4388
|
-
if (!tag) return false;
|
|
4389
|
-
state.inDollarQuote = true;
|
|
4390
|
-
state.dollarTag = tag.slice(1, -1);
|
|
4391
|
-
state.cursor += tag.length;
|
|
4392
|
-
return true;
|
|
4393
|
-
}
|
|
4394
|
-
function tryStartPolicySingleQuote(content, state) {
|
|
4395
|
-
if (state.inDoubleQuote || state.inDollarQuote || content[state.cursor] !== "'") return false;
|
|
4396
|
-
state.inSingleQuote = true;
|
|
4397
|
-
state.cursor += 1;
|
|
4398
|
-
return true;
|
|
4399
|
-
}
|
|
4400
|
-
function tryStartPolicyDoubleQuote(content, state) {
|
|
4401
|
-
if (state.inSingleQuote || state.inDollarQuote || content[state.cursor] !== '"') return false;
|
|
4402
|
-
state.inDoubleQuote = true;
|
|
4403
|
-
state.cursor += 1;
|
|
4404
|
-
return true;
|
|
4405
|
-
}
|
|
4406
|
-
function consumePolicySqlTrivia(content, state) {
|
|
4407
|
-
if (consumePolicySingleQuote(content, state)) return true;
|
|
4408
|
-
if (consumePolicyDoubleQuote(content, state)) return true;
|
|
4409
|
-
if (consumePolicyDollarQuote(content, state)) return true;
|
|
4410
|
-
if (trySkipPolicyLineComment(content, state)) return true;
|
|
4411
|
-
if (trySkipPolicyBlockComment(content, state)) return true;
|
|
4412
|
-
if (tryStartPolicyDollarQuote(content, state)) return true;
|
|
4413
|
-
if (tryStartPolicySingleQuote(content, state)) return true;
|
|
4414
|
-
return tryStartPolicyDoubleQuote(content, state);
|
|
4415
|
-
}
|
|
4416
|
-
function findOutsideSqlCharIndex(content, startIndex, predicate) {
|
|
4417
|
-
const state = {
|
|
4418
|
-
cursor: startIndex,
|
|
4419
|
-
inSingleQuote: false,
|
|
4420
|
-
inDoubleQuote: false,
|
|
4421
|
-
inDollarQuote: false,
|
|
4422
|
-
dollarTag: ""
|
|
4423
|
-
};
|
|
4424
|
-
while (state.cursor < content.length) {
|
|
4425
|
-
if (consumePolicySqlTrivia(content, state)) continue;
|
|
4426
|
-
const char = content[state.cursor] ?? "";
|
|
4427
|
-
if (predicate(char, state.cursor)) return state.cursor;
|
|
4428
|
-
state.cursor += 1;
|
|
4429
|
-
}
|
|
4430
|
-
return void 0;
|
|
4431
|
-
}
|
|
4432
|
-
function extractCreatePolicyStatements(content) {
|
|
4433
|
-
const statements = [];
|
|
4434
|
-
const startRegex = /\bCREATE\s+POLICY\b/gi;
|
|
4435
|
-
let match;
|
|
4436
|
-
while ((match = startRegex.exec(content)) !== null) {
|
|
4437
|
-
const startIndex = match.index ?? 0;
|
|
4438
|
-
const endIndex = findSqlStatementEndForPolicy(content, startIndex);
|
|
4439
|
-
statements.push(content.slice(startIndex, endIndex).trim());
|
|
4440
|
-
}
|
|
4441
|
-
return statements;
|
|
4442
|
-
}
|
|
4443
|
-
function findSqlStatementEndForPolicy(content, startIndex) {
|
|
4444
|
-
const semicolonIndex = findOutsideSqlCharIndex(content, startIndex, (char) => char === ";");
|
|
4445
|
-
return semicolonIndex === void 0 ? content.length : semicolonIndex + 1;
|
|
4446
|
-
}
|
|
4447
|
-
function extractBalancedClause(statement, startIndex) {
|
|
4448
|
-
const openParenIndex = statement.indexOf("(", startIndex);
|
|
4449
|
-
if (openParenIndex === -1) return void 0;
|
|
4450
|
-
let depth = 0;
|
|
4451
|
-
let clauseStart = -1;
|
|
4452
|
-
const closeParenIndex = findOutsideSqlCharIndex(statement, openParenIndex, (char, index) => {
|
|
4453
|
-
if (char === "(") {
|
|
4454
|
-
if (depth === 0) clauseStart = index + 1;
|
|
4455
|
-
depth++;
|
|
4456
|
-
return false;
|
|
4457
|
-
}
|
|
4458
|
-
if (char === ")") {
|
|
4459
|
-
depth--;
|
|
4460
|
-
return depth === 0 && clauseStart !== -1;
|
|
4461
|
-
}
|
|
4462
|
-
return false;
|
|
4463
|
-
});
|
|
4464
|
-
if (closeParenIndex === void 0 || clauseStart === -1) return void 0;
|
|
4465
|
-
return statement.slice(clauseStart, closeParenIndex).trim();
|
|
4466
|
-
}
|
|
4467
|
-
function parsePolicyDefinitionFromStatement(statement, headerRegex, schema, tableName) {
|
|
4468
|
-
const match = statement.match(headerRegex);
|
|
4469
|
-
if (!match) return void 0;
|
|
4470
|
-
const policyName = unquoteIdentifier(match[1] ?? match[2] ?? "");
|
|
4471
|
-
const policyTableRef = parseTableReference(match[3] ?? "");
|
|
4472
|
-
if (!policyName || !policyTableRef) return void 0;
|
|
4473
|
-
if (policyTableRef.schema !== schema || policyTableRef.name !== tableName) return void 0;
|
|
4474
|
-
const usingIndex = statement.search(/\bUSING\s*\(/i);
|
|
4475
|
-
const withCheckIndex = statement.search(/\bWITH\s+CHECK\s*\(/i);
|
|
4476
|
-
return {
|
|
4477
|
-
name: policyName,
|
|
4478
|
-
command: (match[4] || "ALL").toUpperCase(),
|
|
4479
|
-
using: usingIndex !== -1 ? extractBalancedClause(statement, usingIndex) : void 0,
|
|
4480
|
-
withCheck: withCheckIndex !== -1 ? extractBalancedClause(statement, withCheckIndex) : void 0
|
|
4481
|
-
};
|
|
4482
|
-
}
|
|
4483
|
-
function parsePoliciesRegex(content, schema, tableName) {
|
|
4484
|
-
const policies = [];
|
|
4485
|
-
const statements = extractCreatePolicyStatements(content);
|
|
4486
|
-
const headerRegex = new RegExp(
|
|
4487
|
-
`^\\s*CREATE\\s+POLICY\\s+(?:"((?:[^"]|"")*)"|([a-zA-Z_][a-zA-Z0-9_]*))\\s+ON\\s+(${TABLE_REFERENCE})(?:\\s+AS\\s+\\w+)?(?:\\s+FOR\\s+(\\w+))?`,
|
|
4488
|
-
"i"
|
|
4489
|
-
);
|
|
4490
|
-
for (const statement of statements) {
|
|
4491
|
-
const parsed = parsePolicyDefinitionFromStatement(statement, headerRegex, schema, tableName);
|
|
4492
|
-
if (parsed) {
|
|
4493
|
-
policies.push(parsed);
|
|
4494
|
-
}
|
|
4495
|
-
}
|
|
4496
|
-
return policies;
|
|
4497
|
-
}
|
|
4498
|
-
|
|
4499
|
-
// src/commands/db/utils/sql-table-extractor-ast.ts
|
|
4500
|
-
init_esm_shims();
|
|
4501
|
-
function convertAstColumns(columns, include) {
|
|
4502
|
-
if (!include) return void 0;
|
|
4503
|
-
return columns.map((col) => ({
|
|
4504
|
-
name: col.name,
|
|
4505
|
-
type: col.type,
|
|
4506
|
-
notNull: col.notNull,
|
|
4507
|
-
hasDefault: col.hasDefault,
|
|
4508
|
-
isPrimaryKey: col.isPrimaryKey
|
|
4509
|
-
}));
|
|
4510
|
-
}
|
|
4511
|
-
function convertAstForeignKeys(fks, include) {
|
|
4512
|
-
if (!include) return void 0;
|
|
4513
|
-
const result = fks.map((fk) => ({
|
|
4514
|
-
column: fk.column,
|
|
4515
|
-
referencesTable: fk.referencesTable,
|
|
4516
|
-
referencesColumn: fk.referencesColumn,
|
|
4517
|
-
onDelete: fk.onDelete,
|
|
4518
|
-
onUpdate: fk.onUpdate
|
|
4519
|
-
}));
|
|
4520
|
-
return result.length > 0 ? result : void 0;
|
|
4521
|
-
}
|
|
4522
|
-
function convertAstIndexes(indexes, include) {
|
|
4523
|
-
if (!include || indexes.length === 0) return void 0;
|
|
4524
|
-
return indexes.map((idx) => ({
|
|
4525
|
-
name: idx.name,
|
|
4526
|
-
columns: idx.columns,
|
|
4527
|
-
isUnique: idx.isUnique
|
|
4528
|
-
}));
|
|
4529
|
-
}
|
|
4530
|
-
async function extractTablesWithAst(content, filePath, opts) {
|
|
4531
|
-
const parser = await getSqlParserUtils();
|
|
4532
|
-
if (!parser) return [];
|
|
4533
|
-
const astTables = await parser.parseCreateTables(content);
|
|
4534
|
-
const tables = [];
|
|
4535
|
-
for (const astTable of astTables) {
|
|
4536
|
-
if (opts.includeIndexes) {
|
|
4537
|
-
await parser.parseIndexesForTables(content, [astTable]);
|
|
4538
|
-
}
|
|
4539
|
-
const hasRls = hasRlsEnabledRegex(content, astTable.schema, astTable.name);
|
|
4540
|
-
const rlsPolicies = opts.includeRlsPolicies && hasRls ? parsePoliciesRegex(content, astTable.schema, astTable.name) : void 0;
|
|
4541
|
-
tables.push({
|
|
4542
|
-
schema: astTable.schema,
|
|
4543
|
-
name: astTable.name,
|
|
4544
|
-
qualifiedName: astTable.qualifiedName,
|
|
4545
|
-
semanticName: snakeToCamel2(astTable.name),
|
|
4546
|
-
sourceFile: filePath,
|
|
4547
|
-
lineNumber: astTable.lineNumber ?? 1,
|
|
4548
|
-
columns: convertAstColumns(astTable.columns, opts.includeColumns),
|
|
4549
|
-
primaryKey: astTable.primaryKey,
|
|
4550
|
-
foreignKeys: convertAstForeignKeys(astTable.foreignKeys, opts.includeForeignKeys),
|
|
4551
|
-
indexes: convertAstIndexes(astTable.indexes, opts.includeIndexes),
|
|
4552
|
-
hasRls,
|
|
4553
|
-
rlsPolicies: rlsPolicies?.length ? rlsPolicies : void 0
|
|
4554
|
-
});
|
|
4555
|
-
}
|
|
4556
|
-
return tables;
|
|
4557
|
-
}
|
|
4558
|
-
|
|
4559
|
-
// src/commands/db/utils/sql-table-extractor.ts
|
|
4560
|
-
var sqlParserUtils = null;
|
|
4561
|
-
var astAvailable = null;
|
|
4562
|
-
async function isAstParserAvailable() {
|
|
4563
|
-
if (astAvailable !== null) return astAvailable;
|
|
4564
|
-
try {
|
|
4565
|
-
const { loadSqlParserUtils } = await import('@runa-ai/runa/ast');
|
|
4566
|
-
sqlParserUtils = await loadSqlParserUtils();
|
|
4567
|
-
const isAvailable = await sqlParserUtils.isSqlParserAvailable();
|
|
4568
|
-
astAvailable = isAvailable;
|
|
4569
|
-
return isAvailable;
|
|
4570
|
-
} catch {
|
|
4571
|
-
astAvailable = false;
|
|
4572
|
-
return false;
|
|
4573
|
-
}
|
|
4574
|
-
}
|
|
4575
|
-
async function getSqlParserUtils() {
|
|
4576
|
-
if (sqlParserUtils) return sqlParserUtils;
|
|
4577
|
-
await isAstParserAvailable();
|
|
4578
|
-
return sqlParserUtils;
|
|
4579
|
-
}
|
|
4580
|
-
var SQL_IDENTIFIER = '(?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*)';
|
|
4581
|
-
var TABLE_REFERENCE = `${SQL_IDENTIFIER}(?:\\s*\\.\\s*${SQL_IDENTIFIER})?`;
|
|
4582
|
-
var TABLE_IDENTIFIER = /(?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*)/g;
|
|
4583
|
-
var SQL_PATTERNS = {
|
|
4584
|
-
// CREATE TABLE [IF NOT EXISTS] schema.table_name (
|
|
4585
|
-
createTable: new RegExp(
|
|
4586
|
-
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${TABLE_REFERENCE})\\s*\\(`,
|
|
4587
|
-
"gi"
|
|
4588
|
-
),
|
|
4589
|
-
// PRIMARY KEY (columns)
|
|
4590
|
-
primaryKey: /PRIMARY\s+KEY\s*\(([^)]+)\)/gi,
|
|
4591
|
-
// FOREIGN KEY (column) REFERENCES schema.table(column) [ON DELETE|UPDATE ...]
|
|
4592
|
-
foreignKey: new RegExp(
|
|
4593
|
-
`FOREIGN\\s+KEY\\s*\\((${SQL_IDENTIFIER})\\)\\s*REFERENCES\\s+(${TABLE_REFERENCE})\\s*\\((${SQL_IDENTIFIER})\\)(?:\\s+ON\\s+DELETE\\s+(\\w+(?:\\s+\\w+)?))?(?:\\s+ON\\s+UPDATE\\s+(\\w+(?:\\s+\\w+)?))?`,
|
|
4594
|
-
"gi"
|
|
4595
|
-
),
|
|
4596
|
-
// CREATE [UNIQUE] INDEX name ON schema.table (columns)
|
|
4597
|
-
createIndex: new RegExp(
|
|
4598
|
-
`CREATE\\s+(UNIQUE\\s+)?INDEX\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})\\s+ON\\s+(${TABLE_REFERENCE})(?:\\s+USING\\s+\\w+)?\\s*\\(([^)]+)\\)`,
|
|
4599
|
-
"gi"
|
|
4600
|
-
),
|
|
4601
|
-
// ALTER TABLE ... ENABLE ROW LEVEL SECURITY
|
|
4602
|
-
enableRls: new RegExp(
|
|
4603
|
-
`ALTER\\s+TABLE\\s+(${TABLE_REFERENCE})\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`,
|
|
4604
|
-
"gi"
|
|
4605
|
-
)
|
|
4606
|
-
};
|
|
4607
|
-
function createTopLevelSplitState() {
|
|
4608
|
-
return {
|
|
4609
|
-
depth: 0,
|
|
4610
|
-
dollarTag: "",
|
|
4611
|
-
inDollarQuote: false,
|
|
4612
|
-
inDoubleQuote: false,
|
|
4613
|
-
inSingleQuote: false,
|
|
4614
|
-
start: 0
|
|
4615
|
-
};
|
|
4616
|
-
}
|
|
4617
|
-
function consumeDollarQuoteToken(content, index, state) {
|
|
4618
|
-
if ((content[index] ?? "") !== "$" || state.inSingleQuote || state.inDoubleQuote) {
|
|
4619
|
-
return null;
|
|
4620
|
-
}
|
|
4621
|
-
if (state.inDollarQuote) {
|
|
4622
|
-
const closeTag = `$${state.dollarTag}$`;
|
|
4623
|
-
if (!content.slice(index).startsWith(closeTag)) {
|
|
4624
|
-
return null;
|
|
4625
|
-
}
|
|
4626
|
-
state.inDollarQuote = false;
|
|
4627
|
-
state.dollarTag = "";
|
|
4628
|
-
return index + closeTag.length - 1;
|
|
4629
|
-
}
|
|
4630
|
-
const tagMatch = content.slice(index).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
|
|
4631
|
-
if (!tagMatch) {
|
|
4632
|
-
return null;
|
|
4633
|
-
}
|
|
4634
|
-
state.inDollarQuote = true;
|
|
4635
|
-
state.dollarTag = tagMatch[1] ?? "";
|
|
4636
|
-
return index + tagMatch[0].length - 1;
|
|
4637
|
-
}
|
|
4638
|
-
function consumeSingleQuoteToken(content, index, state) {
|
|
4639
|
-
if ((content[index] ?? "") !== "'" || state.inDoubleQuote || state.inDollarQuote) {
|
|
4640
|
-
return null;
|
|
4641
|
-
}
|
|
4642
|
-
if (state.inSingleQuote && (content[index + 1] ?? "") === "'") {
|
|
4643
|
-
return index + 1;
|
|
4644
|
-
}
|
|
4645
|
-
state.inSingleQuote = !state.inSingleQuote;
|
|
4646
|
-
return index;
|
|
4647
|
-
}
|
|
4648
|
-
function consumeDoubleQuoteToken(content, index, state) {
|
|
4649
|
-
if ((content[index] ?? "") !== '"' || state.inSingleQuote || state.inDollarQuote) {
|
|
4650
|
-
return null;
|
|
4651
|
-
}
|
|
4652
|
-
if (state.inDoubleQuote && (content[index + 1] ?? "") === '"') {
|
|
4653
|
-
return index + 1;
|
|
4654
|
-
}
|
|
4655
|
-
state.inDoubleQuote = !state.inDoubleQuote;
|
|
4656
|
-
return index;
|
|
4657
|
-
}
|
|
4658
|
-
function consumeQuoteToken(content, index, state) {
|
|
4659
|
-
const adjustedByDollarQuote = consumeDollarQuoteToken(content, index, state);
|
|
4660
|
-
if (adjustedByDollarQuote !== null) {
|
|
4661
|
-
return adjustedByDollarQuote;
|
|
4662
|
-
}
|
|
4663
|
-
const adjustedBySingleQuote = consumeSingleQuoteToken(content, index, state);
|
|
4664
|
-
if (adjustedBySingleQuote !== null) {
|
|
4665
|
-
return adjustedBySingleQuote;
|
|
4666
|
-
}
|
|
4667
|
-
return consumeDoubleQuoteToken(content, index, state);
|
|
4668
|
-
}
|
|
4669
|
-
function isInQuotedScope(state) {
|
|
4670
|
-
return state.inSingleQuote || state.inDoubleQuote || state.inDollarQuote;
|
|
4671
|
-
}
|
|
4672
|
-
function processStructuralToken(content, index, state, chunks) {
|
|
4673
|
-
const char = content[index] ?? "";
|
|
4674
|
-
if (char === "(") {
|
|
4675
|
-
state.depth += 1;
|
|
4676
|
-
return;
|
|
4677
|
-
}
|
|
4678
|
-
if (char === ")" && state.depth > 0) {
|
|
4679
|
-
state.depth -= 1;
|
|
4680
|
-
return;
|
|
4681
|
-
}
|
|
4682
|
-
if (char === "," && state.depth === 0) {
|
|
4683
|
-
chunks.push(content.slice(state.start, index));
|
|
4684
|
-
state.start = index + 1;
|
|
4685
|
-
}
|
|
4686
|
-
}
|
|
4687
|
-
function splitByTopLevelComma(content) {
|
|
4688
|
-
const chunks = [];
|
|
4689
|
-
const state = createTopLevelSplitState();
|
|
4690
|
-
for (let i = 0; i < content.length; i++) {
|
|
4691
|
-
const adjustedIndex = consumeQuoteToken(content, i, state);
|
|
4692
|
-
if (adjustedIndex !== null) {
|
|
4693
|
-
i = adjustedIndex;
|
|
4694
|
-
continue;
|
|
4695
|
-
}
|
|
4696
|
-
if (isInQuotedScope(state)) continue;
|
|
4697
|
-
processStructuralToken(content, i, state, chunks);
|
|
4698
|
-
}
|
|
4699
|
-
chunks.push(content.slice(state.start));
|
|
4700
|
-
return chunks.map((chunk) => chunk.trim()).filter(Boolean);
|
|
4701
|
-
}
|
|
4702
|
-
function splitTopLevelSqlStatements(content) {
|
|
4703
|
-
return splitByTopLevelComma(content);
|
|
4704
|
-
}
|
|
4705
|
-
function splitTopLevelCsv(content) {
|
|
4706
|
-
return splitByTopLevelComma(content);
|
|
4707
|
-
}
|
|
4708
|
-
function collectSqlFiles(dir) {
|
|
4709
|
-
const entries = readdirSync(dir, { withFileTypes: true });
|
|
4710
|
-
const files = [];
|
|
4711
|
-
for (const entry of entries) {
|
|
4712
|
-
const fullPath = join(dir, entry.name);
|
|
4713
|
-
if (entry.isDirectory()) {
|
|
4714
|
-
files.push(...collectSqlFiles(fullPath));
|
|
4715
|
-
} else if (entry.isFile() && fullPath.endsWith(".sql")) {
|
|
4716
|
-
files.push(fullPath);
|
|
4717
|
-
}
|
|
4718
|
-
}
|
|
4719
|
-
return files;
|
|
4720
|
-
}
|
|
4721
|
-
function snakeToCamel2(str) {
|
|
4722
|
-
return str.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
|
|
4723
|
-
}
|
|
4724
|
-
function getLineNumber(content, position) {
|
|
4725
|
-
return content.substring(0, position).split("\n").length;
|
|
4726
|
-
}
|
|
4727
|
-
function extractTableBody(content, startPos) {
|
|
4728
|
-
let depth = 0;
|
|
4729
|
-
let bodyStart = -1;
|
|
4730
|
-
let bodyEnd = -1;
|
|
4731
|
-
for (let i = startPos; i < content.length; i++) {
|
|
4732
|
-
const char = content[i];
|
|
4733
|
-
if (char === "(") {
|
|
4734
|
-
if (depth === 0) {
|
|
4735
|
-
bodyStart = i + 1;
|
|
4736
|
-
}
|
|
4737
|
-
depth++;
|
|
4738
|
-
} else if (char === ")") {
|
|
4739
|
-
depth--;
|
|
4740
|
-
if (depth === 0) {
|
|
4741
|
-
bodyEnd = i;
|
|
4742
|
-
break;
|
|
4743
|
-
}
|
|
4744
|
-
}
|
|
4745
|
-
}
|
|
4746
|
-
if (bodyStart === -1 || bodyEnd === -1) {
|
|
4747
|
-
return "";
|
|
4748
|
-
}
|
|
4749
|
-
return content.substring(bodyStart, bodyEnd);
|
|
4750
|
-
}
|
|
4751
|
-
function normalizeType(type) {
|
|
4752
|
-
return type.trim().replace(/\s+/g, " ").toLowerCase().replace("character varying", "varchar").replace("timestamp with time zone", "timestamptz").replace("timestamp without time zone", "timestamp");
|
|
4753
|
-
}
|
|
4754
|
-
function unquoteIdentifier(identifier) {
|
|
4755
|
-
const trimmed = identifier.trim();
|
|
4756
|
-
if (trimmed.startsWith('"') && trimmed.endsWith('"')) {
|
|
4757
|
-
return trimmed.slice(1, -1).replace(/""/g, '"');
|
|
4758
|
-
}
|
|
4759
|
-
return trimmed;
|
|
4760
|
-
}
|
|
4761
|
-
function parseTableReference(ref) {
|
|
4762
|
-
const tokens = [...ref.matchAll(new RegExp(TABLE_IDENTIFIER.source, "g"))].map(
|
|
4763
|
-
(match) => unquoteIdentifier(match[0] ?? "")
|
|
4764
|
-
);
|
|
4765
|
-
if (tokens.length === 1) {
|
|
4766
|
-
return { schema: "public", name: tokens[0] ?? "" };
|
|
4767
|
-
}
|
|
4768
|
-
if (tokens.length === 2) {
|
|
4769
|
-
return { schema: tokens[0] ?? "", name: tokens[1] ?? "" };
|
|
4770
|
-
}
|
|
4771
|
-
return null;
|
|
4772
|
-
}
|
|
4773
|
-
function parseIndexColumns(rawColumns) {
|
|
4774
|
-
return splitTopLevelCsv(rawColumns).map((col) => {
|
|
4775
|
-
const trimmed = col.trim();
|
|
4776
|
-
const quotedMatch = trimmed.match(/^"([^"]+)"/);
|
|
4777
|
-
if (quotedMatch) {
|
|
4778
|
-
return quotedMatch[1];
|
|
4779
|
-
}
|
|
4780
|
-
const unquotedMatch = trimmed.match(/^(\w+)/);
|
|
4781
|
-
if (unquotedMatch) {
|
|
4782
|
-
return unquotedMatch[1];
|
|
4783
|
-
}
|
|
4784
|
-
return "";
|
|
4785
|
-
}).filter(Boolean);
|
|
4786
|
-
}
|
|
4787
|
-
function normalizeOnAction(action) {
|
|
4788
|
-
if (!action) return void 0;
|
|
4789
|
-
const normalized = action.toUpperCase().replace(/\s+/g, " ").trim();
|
|
4790
|
-
switch (normalized) {
|
|
4791
|
-
case "CASCADE":
|
|
4792
|
-
return "CASCADE";
|
|
4793
|
-
case "SET NULL":
|
|
4794
|
-
return "SET NULL";
|
|
4795
|
-
case "SET DEFAULT":
|
|
4796
|
-
return "SET DEFAULT";
|
|
4797
|
-
case "RESTRICT":
|
|
4798
|
-
return "RESTRICT";
|
|
4799
|
-
case "NO ACTION":
|
|
4800
|
-
return "NO ACTION";
|
|
4801
|
-
default:
|
|
4802
|
-
return void 0;
|
|
4803
|
-
}
|
|
4804
|
-
}
|
|
4805
|
-
function resolveOptions(options) {
|
|
4806
|
-
return {
|
|
4807
|
-
includeColumns: options.includeColumns ?? true,
|
|
4808
|
-
includeForeignKeys: options.includeForeignKeys ?? true,
|
|
4809
|
-
includeIndexes: options.includeIndexes ?? true,
|
|
4810
|
-
includeRlsPolicies: options.includeRlsPolicies ?? true
|
|
4811
|
-
};
|
|
4812
|
-
}
|
|
4813
|
-
function buildTableEntryRegex(table, content, filePath, opts) {
|
|
4814
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
4815
|
-
const pkFromBody = parsePrimaryKeyRegex(table.tableBody);
|
|
4816
|
-
let columns;
|
|
4817
|
-
if (opts.includeColumns) {
|
|
4818
|
-
columns = parseColumnsRegex(table.tableBody);
|
|
4819
|
-
for (const col of columns) {
|
|
4820
|
-
if (pkFromBody.includes(col.name)) {
|
|
4821
|
-
col.isPrimaryKey = true;
|
|
4822
|
-
}
|
|
4823
|
-
}
|
|
4824
|
-
}
|
|
4825
|
-
const foreignKeys = opts.includeForeignKeys ? parseForeignKeysRegex(table.tableBody) : void 0;
|
|
4826
|
-
const indexes = opts.includeIndexes ? parseIndexesRegex(content, table.schema, table.name) : void 0;
|
|
4827
|
-
const hasRls = hasRlsEnabledRegex(content, table.schema, table.name);
|
|
4828
|
-
const rlsPolicies = opts.includeRlsPolicies && hasRls ? parsePoliciesRegex(content, table.schema, table.name) : void 0;
|
|
4829
|
-
return {
|
|
4830
|
-
schema: table.schema,
|
|
4831
|
-
name: table.name,
|
|
4832
|
-
qualifiedName,
|
|
4833
|
-
semanticName: snakeToCamel2(table.name),
|
|
4834
|
-
sourceFile: filePath,
|
|
4835
|
-
lineNumber: table.lineNumber,
|
|
4836
|
-
columns,
|
|
4837
|
-
primaryKey: pkFromBody.length > 0 ? pkFromBody : void 0,
|
|
4838
|
-
foreignKeys: foreignKeys?.length ? foreignKeys : void 0,
|
|
4839
|
-
indexes: indexes?.length ? indexes : void 0,
|
|
4840
|
-
hasRls,
|
|
4841
|
-
rlsPolicies: rlsPolicies?.length ? rlsPolicies : void 0
|
|
4842
|
-
};
|
|
4843
|
-
}
|
|
4844
|
-
function processTablesFromFileRegex(filePath, opts, seen) {
|
|
4845
|
-
const rawContent = readFileSync(filePath, "utf-8");
|
|
4846
|
-
const content = stripSqlComments(rawContent);
|
|
4847
|
-
const ctx = { content, lines: content.split("\n") };
|
|
4848
|
-
const tables = findTablesRegex(ctx);
|
|
4849
|
-
const entries = [];
|
|
4850
|
-
for (const table of tables) {
|
|
4851
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
4852
|
-
if (seen?.has(qualifiedName)) continue;
|
|
4853
|
-
seen?.add(qualifiedName);
|
|
4854
|
-
entries.push(buildTableEntryRegex(table, content, filePath, opts));
|
|
4855
|
-
}
|
|
4856
|
-
return entries;
|
|
4857
|
-
}
|
|
4858
|
-
async function processTablesFromFile(filePath, opts, seen) {
|
|
4859
|
-
const content = readFileSync(filePath, "utf-8");
|
|
4860
|
-
if (await isAstParserAvailable()) {
|
|
4861
|
-
const astTables = await extractTablesWithAst(content, filePath, opts);
|
|
4862
|
-
if (astTables.length > 0) {
|
|
4863
|
-
const entries = [];
|
|
4864
|
-
for (const table of astTables) {
|
|
4865
|
-
if (seen?.has(table.qualifiedName)) continue;
|
|
4866
|
-
seen?.add(table.qualifiedName);
|
|
4867
|
-
entries.push(table);
|
|
4868
|
-
}
|
|
4869
|
-
return entries;
|
|
4870
|
-
}
|
|
4871
|
-
}
|
|
4872
|
-
return processTablesFromFileRegex(filePath, opts, seen);
|
|
4873
|
-
}
|
|
4874
|
-
async function extractTablesFromSqlDir(sqlDir, options = {}) {
|
|
4875
|
-
if (!existsSync(sqlDir)) return [];
|
|
4876
|
-
const opts = resolveOptions(options);
|
|
4877
|
-
const seen = /* @__PURE__ */ new Set();
|
|
4878
|
-
const tableEntries = [];
|
|
4879
|
-
const files = collectSqlFiles(sqlDir).sort();
|
|
4880
|
-
for (const file of files) {
|
|
4881
|
-
const filePath = file;
|
|
4882
|
-
const entries = await processTablesFromFile(filePath, opts, seen);
|
|
4883
|
-
tableEntries.push(...entries);
|
|
4884
|
-
}
|
|
4885
|
-
return tableEntries;
|
|
4886
|
-
}
|
|
4887
|
-
|
|
4888
|
-
// src/commands/db/utils/table-source-classifier.ts
|
|
4889
|
-
init_esm_shims();
|
|
4890
|
-
function splitQualifiedName(qualifiedName) {
|
|
4891
|
-
const [schema = "", table = ""] = qualifiedName.split(".", 2);
|
|
4892
|
-
return { schema, table };
|
|
4893
|
-
}
|
|
4894
|
-
function escapeRegexLiteral(value) {
|
|
4895
|
-
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
4896
|
-
}
|
|
4897
|
-
function buildTablePatternMatcher(patterns) {
|
|
4898
|
-
const compiled = patterns.map((p) => p.trim()).filter((p) => p.length > 0).map((pattern) => {
|
|
4899
|
-
const target = pattern.includes(".") ? "qualified" : "table";
|
|
4900
|
-
const regex = new RegExp(`^${escapeRegexLiteral(pattern).replace(/\\\*/g, ".*")}$`);
|
|
4901
|
-
return { target, regex };
|
|
4902
|
-
});
|
|
4903
|
-
return (qualifiedName) => {
|
|
4904
|
-
const { table } = splitQualifiedName(qualifiedName);
|
|
4905
|
-
for (const entry of compiled) {
|
|
4906
|
-
const candidate = entry.target === "qualified" ? qualifiedName : table;
|
|
4907
|
-
if (entry.regex.test(candidate)) {
|
|
4908
|
-
return true;
|
|
4909
|
-
}
|
|
4910
|
-
}
|
|
4911
|
-
return false;
|
|
4912
|
-
};
|
|
4913
|
-
}
|
|
4914
|
-
function findIdempotentAncestor(table, partitionParentMap, idempotentManagedTables) {
|
|
4915
|
-
if (idempotentManagedTables.has(table)) {
|
|
4916
|
-
return table;
|
|
4917
|
-
}
|
|
4918
|
-
let current = table;
|
|
4919
|
-
const visited = /* @__PURE__ */ new Set();
|
|
4920
|
-
while (!visited.has(current)) {
|
|
4921
|
-
visited.add(current);
|
|
4922
|
-
const parent = partitionParentMap.get(current);
|
|
4923
|
-
if (!parent) {
|
|
4924
|
-
return null;
|
|
4925
|
-
}
|
|
4926
|
-
if (idempotentManagedTables.has(parent)) {
|
|
4927
|
-
return parent;
|
|
4928
|
-
}
|
|
4929
|
-
current = parent;
|
|
4930
|
-
}
|
|
4931
|
-
return null;
|
|
4932
|
-
}
|
|
4933
|
-
function isSystemManagedTable(params) {
|
|
4934
|
-
const { schema } = splitQualifiedName(params.qualifiedName);
|
|
4935
|
-
return params.systemSchemas.has(schema) || params.knownSystemTables.has(params.qualifiedName);
|
|
4936
|
-
}
|
|
4937
|
-
function classifyIdempotentManagedTable(params) {
|
|
4938
|
-
const idempotentAncestor = findIdempotentAncestor(
|
|
4939
|
-
params.qualifiedName,
|
|
4940
|
-
params.partitionParentMap,
|
|
4941
|
-
params.idempotentManagedTables
|
|
4942
|
-
);
|
|
4943
|
-
if (!idempotentAncestor) {
|
|
4944
|
-
return null;
|
|
4945
|
-
}
|
|
4946
|
-
return {
|
|
4947
|
-
qualifiedName: params.qualifiedName,
|
|
4948
|
-
detail: idempotentAncestor === params.qualifiedName ? "matched CREATE TABLE in idempotent SQL" : `partition child of ${idempotentAncestor}`
|
|
4949
|
-
};
|
|
4950
|
-
}
|
|
4951
|
-
function classifyExtensionSystemOrAllowlistedTable(params) {
|
|
4952
|
-
const extensionName = params.extensionManagedTables.get(params.qualifiedName);
|
|
4953
|
-
if (extensionName) {
|
|
4954
|
-
return {
|
|
4955
|
-
qualifiedName: params.qualifiedName,
|
|
4956
|
-
detail: `managed by extension "${extensionName}"`
|
|
4957
|
-
};
|
|
4958
|
-
}
|
|
4959
|
-
if (isSystemManagedTable({
|
|
4960
|
-
qualifiedName: params.qualifiedName,
|
|
4961
|
-
systemSchemas: params.systemSchemas,
|
|
4962
|
-
knownSystemTables: params.knownSystemTables
|
|
4963
|
-
})) {
|
|
4964
|
-
return {
|
|
4965
|
-
qualifiedName: params.qualifiedName,
|
|
4966
|
-
detail: "system-managed schema/table"
|
|
4967
|
-
};
|
|
4968
|
-
}
|
|
4969
|
-
if (params.exclusionMatcher(params.qualifiedName)) {
|
|
4970
|
-
return {
|
|
4971
|
-
qualifiedName: params.qualifiedName,
|
|
4972
|
-
detail: "allowlisted by database.pgSchemaDiff.excludeFromOrphanDetection"
|
|
4973
|
-
};
|
|
4974
|
-
}
|
|
4975
|
-
return null;
|
|
4976
|
-
}
|
|
4977
|
-
function classifyMissingSourceTables(params) {
|
|
4978
|
-
const extensionManagedTables = params.extensionManagedTables ?? /* @__PURE__ */ new Map();
|
|
4979
|
-
const partitionParentMap = params.partitionParentMap ?? /* @__PURE__ */ new Map();
|
|
4980
|
-
const exclusionMatcher = buildTablePatternMatcher(params.excludeFromOrphanDetection ?? []);
|
|
4981
|
-
const systemSchemas = new Set(params.systemSchemas ?? []);
|
|
4982
|
-
const knownSystemTables = new Set(params.knownSystemTables ?? []);
|
|
4983
|
-
const classified = {
|
|
4984
|
-
definedInIdempotentDynamicDdl: [],
|
|
4985
|
-
extensionManagedOrSystemTable: [],
|
|
4986
|
-
trulyOrphaned: []
|
|
4987
|
-
};
|
|
4988
|
-
for (const qualifiedName of params.tablesWithoutSource) {
|
|
4989
|
-
const idempotentManagedItem = classifyIdempotentManagedTable({
|
|
4990
|
-
qualifiedName,
|
|
4991
|
-
partitionParentMap,
|
|
4992
|
-
idempotentManagedTables: params.idempotentManagedTables
|
|
4993
|
-
});
|
|
4994
|
-
if (idempotentManagedItem) {
|
|
4995
|
-
classified.definedInIdempotentDynamicDdl.push(idempotentManagedItem);
|
|
4996
|
-
continue;
|
|
4997
|
-
}
|
|
4998
|
-
const extensionSystemOrAllowlistedItem = classifyExtensionSystemOrAllowlistedTable({
|
|
4999
|
-
qualifiedName,
|
|
5000
|
-
extensionManagedTables,
|
|
5001
|
-
systemSchemas,
|
|
5002
|
-
knownSystemTables,
|
|
5003
|
-
exclusionMatcher
|
|
5004
|
-
});
|
|
5005
|
-
if (extensionSystemOrAllowlistedItem) {
|
|
5006
|
-
classified.extensionManagedOrSystemTable.push(extensionSystemOrAllowlistedItem);
|
|
5007
|
-
continue;
|
|
5008
|
-
}
|
|
5009
|
-
classified.trulyOrphaned.push(qualifiedName);
|
|
5010
|
-
}
|
|
5011
|
-
return classified;
|
|
5012
|
-
}
|
|
5013
|
-
|
|
5014
|
-
// src/commands/db/utils/table-registry-introspection.ts
|
|
5015
|
-
init_esm_shims();
|
|
5016
|
-
var VALID_PG_IDENTIFIER = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
5017
|
-
function validatePgIdentifier2(name, context) {
|
|
5018
|
-
if (!name || typeof name !== "string") {
|
|
5019
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
5020
|
-
}
|
|
5021
|
-
if (!VALID_PG_IDENTIFIER.test(name)) {
|
|
5022
|
-
throw new Error(
|
|
5023
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
5024
|
-
);
|
|
5025
|
-
}
|
|
5026
|
-
}
|
|
5027
|
-
function buildSafeSchemaInClause2(schemas) {
|
|
5028
|
-
if (schemas.length === 0) {
|
|
5029
|
-
throw new Error("No schemas provided for IN clause");
|
|
5030
|
-
}
|
|
5031
|
-
const safeSchemas = [];
|
|
5032
|
-
for (const schema of schemas) {
|
|
5033
|
-
validatePgIdentifier2(schema, "schema name");
|
|
5034
|
-
safeSchemas.push(`'${schema.replace(/'/g, "''")}'`);
|
|
5035
|
-
}
|
|
5036
|
-
return safeSchemas.join(",");
|
|
5037
|
-
}
|
|
5038
|
-
async function introspectTablesFromDb(databaseUrl, schemas) {
|
|
5039
|
-
try {
|
|
5040
|
-
const result = await introspectDatabase(databaseUrl, { schemas });
|
|
5041
|
-
return convertIntrospectionToTableEntries(result);
|
|
5042
|
-
} catch (error) {
|
|
5043
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
5044
|
-
if (message.includes("ECONNREFUSED") || message.includes("connection refused")) {
|
|
5045
|
-
throw new Error(
|
|
5046
|
-
`[DB Introspection] Cannot connect to database.
|
|
5047
|
-
URL: ${databaseUrl.replace(/:[^:@]+@/, ":***@")}
|
|
5048
|
-
Cause: ${message}
|
|
5049
|
-
|
|
5050
|
-
Solutions:
|
|
5051
|
-
1. Ensure database is running: runa check --fix
|
|
5052
|
-
2. Check DATABASE_URL in .env.development`
|
|
5053
|
-
);
|
|
5054
|
-
}
|
|
5055
|
-
if (message.includes("authentication") || message.includes("password")) {
|
|
5056
|
-
throw new Error(
|
|
5057
|
-
`[DB Introspection] Database authentication failed.
|
|
5058
|
-
Cause: ${message}
|
|
5059
|
-
|
|
5060
|
-
Solutions:
|
|
5061
|
-
1. Check DATABASE_URL credentials
|
|
5062
|
-
2. Verify database user has SELECT on pg_catalog`
|
|
5063
|
-
);
|
|
5064
|
-
}
|
|
5065
|
-
if (message.includes("permission") || message.includes("denied")) {
|
|
5066
|
-
throw new Error(
|
|
5067
|
-
`[DB Introspection] Permission denied.
|
|
5068
|
-
Cause: ${message}
|
|
5069
|
-
|
|
5070
|
-
Solutions:
|
|
5071
|
-
1. Ensure database user has SELECT on information_schema
|
|
5072
|
-
2. Ensure database user has SELECT on pg_catalog`
|
|
5073
|
-
);
|
|
5074
|
-
}
|
|
5075
|
-
throw new Error(
|
|
5076
|
-
`[DB Introspection] Failed to introspect database.
|
|
5077
|
-
Cause: ${message}
|
|
5078
|
-
|
|
5079
|
-
Solutions:
|
|
5080
|
-
1. Run: runa check --verbose
|
|
5081
|
-
2. Verify database is running and accessible`
|
|
5082
|
-
);
|
|
5083
|
-
}
|
|
5084
|
-
}
|
|
5085
|
-
function buildForeignKeyMap(foreignKeys) {
|
|
5086
|
-
const fksByTable = /* @__PURE__ */ new Map();
|
|
5087
|
-
for (const fk of foreignKeys) {
|
|
5088
|
-
const key = `${fk.schemaName}.${fk.tableName}`;
|
|
5089
|
-
if (!fksByTable.has(key)) fksByTable.set(key, []);
|
|
5090
|
-
fksByTable.get(key)?.push({
|
|
5091
|
-
column: fk.columnName,
|
|
5092
|
-
referencesTable: `${fk.referencedSchema}.${fk.referencedTable}`,
|
|
5093
|
-
referencesColumn: fk.referencedColumn,
|
|
5094
|
-
onDelete: normalizeOnAction2(fk.onDelete),
|
|
5095
|
-
onUpdate: normalizeOnAction2(fk.onUpdate)
|
|
5096
|
-
});
|
|
5097
|
-
}
|
|
5098
|
-
return fksByTable;
|
|
5099
|
-
}
|
|
5100
|
-
function buildIndexMap(indexes) {
|
|
5101
|
-
const indexesByTable = /* @__PURE__ */ new Map();
|
|
5102
|
-
for (const idx of indexes) {
|
|
5103
|
-
const key = `${idx.schemaName}.${idx.tableName}`;
|
|
5104
|
-
if (!indexesByTable.has(key)) indexesByTable.set(key, []);
|
|
5105
|
-
indexesByTable.get(key)?.push({
|
|
5106
|
-
name: idx.indexName,
|
|
5107
|
-
columns: idx.columns,
|
|
5108
|
-
isUnique: idx.isUnique
|
|
5109
|
-
});
|
|
5110
|
-
}
|
|
5111
|
-
return indexesByTable;
|
|
5112
|
-
}
|
|
5113
|
-
function buildRlsEnabledMap(rlsTables) {
|
|
5114
|
-
const rlsByTable = /* @__PURE__ */ new Map();
|
|
5115
|
-
for (const rls of rlsTables) {
|
|
5116
|
-
rlsByTable.set(`${rls.schemaName}.${rls.tableName}`, rls.rlsEnabled);
|
|
5117
|
-
}
|
|
5118
|
-
return rlsByTable;
|
|
5119
|
-
}
|
|
5120
|
-
function buildRlsPoliciesMap(rlsPolicies) {
|
|
5121
|
-
const rlsPoliciesByTable = /* @__PURE__ */ new Map();
|
|
5122
|
-
for (const policy of rlsPolicies) {
|
|
5123
|
-
const key = `${policy.schemaName}.${policy.tableName}`;
|
|
5124
|
-
if (!rlsPoliciesByTable.has(key)) rlsPoliciesByTable.set(key, []);
|
|
5125
|
-
rlsPoliciesByTable.get(key)?.push({
|
|
5126
|
-
name: policy.policyName,
|
|
5127
|
-
command: policy.command,
|
|
5128
|
-
using: policy.usingExpr ?? void 0,
|
|
5129
|
-
withCheck: policy.withCheckExpr ?? void 0
|
|
5130
|
-
});
|
|
5131
|
-
}
|
|
5132
|
-
return rlsPoliciesByTable;
|
|
5133
|
-
}
|
|
5134
|
-
function buildCheckConstraintsMap(checkConstraints) {
|
|
5135
|
-
const checksByTable = /* @__PURE__ */ new Map();
|
|
5136
|
-
for (const check of checkConstraints) {
|
|
5137
|
-
if (!check.name || !check.definition) {
|
|
5138
|
-
console.warn(
|
|
5139
|
-
`[DB Introspection] Skipping CHECK constraint with missing name or definition in ${check.schemaName}.${check.tableName}`
|
|
5140
|
-
);
|
|
5141
|
-
continue;
|
|
5142
|
-
}
|
|
5143
|
-
const key = `${check.schemaName}.${check.tableName}`;
|
|
5144
|
-
if (!checksByTable.has(key)) checksByTable.set(key, []);
|
|
5145
|
-
checksByTable.get(key)?.push({
|
|
5146
|
-
name: check.name,
|
|
5147
|
-
definition: check.definition,
|
|
5148
|
-
columns: check.columns ?? []
|
|
5149
|
-
});
|
|
5150
|
-
}
|
|
5151
|
-
return checksByTable;
|
|
5152
|
-
}
|
|
5153
|
-
function buildTriggersMap(triggers) {
|
|
5154
|
-
const triggersByTable = /* @__PURE__ */ new Map();
|
|
5155
|
-
for (const trigger of triggers) {
|
|
5156
|
-
if (!trigger.actionStatement) {
|
|
5157
|
-
console.warn(
|
|
5158
|
-
`[DB Introspection] Skipping trigger ${trigger.triggerName} with missing actionStatement`
|
|
5159
|
-
);
|
|
5160
|
-
continue;
|
|
5161
|
-
}
|
|
5162
|
-
const key = `${trigger.schemaName}.${trigger.tableName}`;
|
|
5163
|
-
if (!triggersByTable.has(key)) triggersByTable.set(key, []);
|
|
5164
|
-
const functionMatch = trigger.actionStatement.match(
|
|
5165
|
-
/EXECUTE\s+(?:FUNCTION|PROCEDURE)\s+([^\s(]+)/i
|
|
5166
|
-
);
|
|
5167
|
-
const functionName = functionMatch?.[1] ?? trigger.actionStatement;
|
|
5168
|
-
triggersByTable.get(key)?.push({
|
|
5169
|
-
name: trigger.triggerName,
|
|
5170
|
-
event: trigger.eventManipulation,
|
|
5171
|
-
timing: trigger.actionTiming,
|
|
5172
|
-
function: functionName
|
|
5173
|
-
});
|
|
5174
|
-
}
|
|
5175
|
-
return triggersByTable;
|
|
5176
|
-
}
|
|
5177
|
-
function buildPrimaryKeyMap(indexes) {
|
|
5178
|
-
const pkByTable = /* @__PURE__ */ new Map();
|
|
5179
|
-
for (const idx of indexes) {
|
|
5180
|
-
if (idx.indexName.endsWith("_pkey")) {
|
|
5181
|
-
const key = `${idx.schemaName}.${idx.tableName}`;
|
|
5182
|
-
pkByTable.set(key, idx.columns);
|
|
5183
|
-
}
|
|
5184
|
-
}
|
|
5185
|
-
return pkByTable;
|
|
5186
|
-
}
|
|
5187
|
-
function convertIntrospectionToTableEntries(result) {
|
|
5188
|
-
const fksByTable = buildForeignKeyMap(result.foreignKeys);
|
|
5189
|
-
const indexesByTable = buildIndexMap(result.indexes);
|
|
5190
|
-
const rlsByTable = buildRlsEnabledMap(result.rlsTables);
|
|
5191
|
-
const rlsPoliciesByTable = buildRlsPoliciesMap(result.rlsPolicies);
|
|
5192
|
-
const checksByTable = buildCheckConstraintsMap(result.checkConstraints);
|
|
5193
|
-
const triggersByTable = buildTriggersMap(result.triggers);
|
|
5194
|
-
const pkByTable = buildPrimaryKeyMap(result.indexes);
|
|
5195
|
-
return result.tables.map((table) => {
|
|
5196
|
-
const qualifiedName = `${table.schema}.${table.name}`;
|
|
5197
|
-
const primaryKeyColumns = pkByTable.get(qualifiedName) ?? [];
|
|
5198
|
-
const columns = table.columns.map((col) => ({
|
|
5199
|
-
name: col.name,
|
|
5200
|
-
type: col.type,
|
|
5201
|
-
notNull: !col.nullable,
|
|
5202
|
-
hasDefault: col.default !== null && col.default !== void 0,
|
|
5203
|
-
isPrimaryKey: primaryKeyColumns.includes(col.name)
|
|
5204
|
-
}));
|
|
5205
|
-
return {
|
|
5206
|
-
schema: table.schema,
|
|
5207
|
-
name: table.name,
|
|
5208
|
-
qualifiedName,
|
|
5209
|
-
semanticName: "",
|
|
5210
|
-
// Will be set by semantic mapper
|
|
5211
|
-
sourceFile: "",
|
|
5212
|
-
// Will be set by SQL file mapping
|
|
5213
|
-
columns,
|
|
5214
|
-
primaryKey: pkByTable.get(qualifiedName) ?? table.primaryKey,
|
|
5215
|
-
foreignKeys: fksByTable.get(qualifiedName) ?? [],
|
|
5216
|
-
indexes: indexesByTable.get(qualifiedName) ?? [],
|
|
5217
|
-
hasRls: rlsByTable.get(qualifiedName) ?? false,
|
|
5218
|
-
rlsPolicies: rlsPoliciesByTable.get(qualifiedName) ?? [],
|
|
5219
|
-
checkConstraints: checksByTable.get(qualifiedName) ?? [],
|
|
5220
|
-
triggers: triggersByTable.get(qualifiedName) ?? []
|
|
5221
|
-
};
|
|
5222
|
-
});
|
|
5223
|
-
}
|
|
5224
|
-
function normalizeOnAction2(action) {
|
|
5225
|
-
if (!action) return void 0;
|
|
5226
|
-
const upper = action.toUpperCase().replace(/\s+/g, " ").trim();
|
|
5227
|
-
switch (upper) {
|
|
5228
|
-
case "CASCADE":
|
|
5229
|
-
return "CASCADE";
|
|
5230
|
-
case "SET NULL":
|
|
5231
|
-
return "SET NULL";
|
|
5232
|
-
case "SET DEFAULT":
|
|
5233
|
-
return "SET DEFAULT";
|
|
5234
|
-
case "RESTRICT":
|
|
5235
|
-
return "RESTRICT";
|
|
5236
|
-
case "NO ACTION":
|
|
5237
|
-
return "NO ACTION";
|
|
5238
|
-
default:
|
|
5239
|
-
return void 0;
|
|
5240
|
-
}
|
|
5241
|
-
}
|
|
5242
|
-
function shouldSkipDrizzleExport(exportName) {
|
|
5243
|
-
return exportName.endsWith("Schema") || exportName.endsWith("Enum") || exportName.endsWith("Relations");
|
|
5244
|
-
}
|
|
5245
|
-
function extractDrizzleTableNames(schemaModule) {
|
|
5246
|
-
const drizzleTables = /* @__PURE__ */ new Set();
|
|
5247
|
-
for (const [exportName, exportValue] of Object.entries(schemaModule)) {
|
|
5248
|
-
if (shouldSkipDrizzleExport(exportName)) continue;
|
|
5249
|
-
const maybeTable = exportValue;
|
|
5250
|
-
if (maybeTable?._?.name) {
|
|
5251
|
-
const fullName = maybeTable._.schema ? `${maybeTable._.schema}.${maybeTable._.name}` : maybeTable._.name;
|
|
5252
|
-
drizzleTables.add(fullName);
|
|
5253
|
-
}
|
|
5254
|
-
}
|
|
5255
|
-
return drizzleTables;
|
|
5256
|
-
}
|
|
5257
|
-
function compareTables(sqlTables, drizzleTables) {
|
|
5258
|
-
const matched = [];
|
|
5259
|
-
const sqlOnly = [];
|
|
5260
|
-
const sqlFullNames = new Set(sqlTables.map((t) => t.qualifiedName));
|
|
5261
|
-
for (const table of sqlTables) {
|
|
5262
|
-
if (drizzleTables.has(table.qualifiedName)) {
|
|
5263
|
-
matched.push(table);
|
|
5264
|
-
} else {
|
|
5265
|
-
sqlOnly.push(table);
|
|
5266
|
-
}
|
|
5267
|
-
}
|
|
5268
|
-
const drizzleOnly = [...drizzleTables].filter((name) => !sqlFullNames.has(name));
|
|
5269
|
-
return { matched, sqlOnly, drizzleOnly };
|
|
5270
|
-
}
|
|
5271
|
-
async function crossCheckWithDrizzle(sqlTables, drizzleSchemaPath) {
|
|
5272
|
-
if (!existsSync(drizzleSchemaPath)) {
|
|
5273
|
-
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
5274
|
-
}
|
|
5275
|
-
try {
|
|
5276
|
-
const schemaModule = await import(drizzleSchemaPath);
|
|
5277
|
-
const drizzleTables = extractDrizzleTableNames(schemaModule);
|
|
5278
|
-
return compareTables(sqlTables, drizzleTables);
|
|
5279
|
-
} catch {
|
|
5280
|
-
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
5281
|
-
}
|
|
5282
|
-
}
|
|
5283
|
-
|
|
5284
|
-
// src/commands/db/utils/table-registry.ts
|
|
5285
|
-
var MANIFEST_VERSION = 2;
|
|
5286
|
-
var GENERATOR_VERSION = "1.0.0";
|
|
5287
|
-
var DEFAULT_IDEMPOTENT_SQL_DIR = "supabase/schemas/idempotent";
|
|
5288
|
-
var KNOWN_EXTENSION_SYSTEM_TABLES = /* @__PURE__ */ new Set([
|
|
5289
|
-
"public.spatial_ref_sys",
|
|
5290
|
-
"public.geometry_columns",
|
|
5291
|
-
"public.geography_columns"
|
|
5292
|
-
]);
|
|
5293
|
-
var SUPABASE_SYSTEM_SCHEMA_SET = new Set(SUPABASE_SYSTEM_SCHEMAS);
|
|
5294
|
-
function toRelativeSourcePath(projectRoot, sourceFile) {
|
|
5295
|
-
let relativeSource = relative(projectRoot, sourceFile);
|
|
5296
|
-
if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
|
|
5297
|
-
const schemaMatch = sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
|
|
5298
|
-
relativeSource = schemaMatch ? schemaMatch[0] : sourceFile;
|
|
5299
|
-
}
|
|
5300
|
-
return relativeSource;
|
|
5301
|
-
}
|
|
5302
|
-
function resolveSourceConfig(projectRoot, options) {
|
|
5303
|
-
let idempotentSqlDir = options.idempotentSqlDir ?? DEFAULT_IDEMPOTENT_SQL_DIR;
|
|
5304
|
-
const exclusions = new Set(options.excludeFromOrphanDetection ?? []);
|
|
5305
|
-
try {
|
|
5306
|
-
const config = loadRunaConfig(projectRoot);
|
|
5307
|
-
const pgSchemaDiff = config.database?.pgSchemaDiff;
|
|
5308
|
-
if (!options.idempotentSqlDir && pgSchemaDiff?.idempotentSqlDir) {
|
|
5309
|
-
idempotentSqlDir = pgSchemaDiff.idempotentSqlDir;
|
|
5310
|
-
}
|
|
5311
|
-
if (pgSchemaDiff?.excludeFromOrphanDetection) {
|
|
5312
|
-
for (const pattern of pgSchemaDiff.excludeFromOrphanDetection) {
|
|
5313
|
-
exclusions.add(pattern);
|
|
5314
|
-
}
|
|
5315
|
-
}
|
|
5316
|
-
} catch {
|
|
5317
|
-
}
|
|
5318
|
-
return {
|
|
5319
|
-
idempotentSqlDir: isAbsolute(idempotentSqlDir) ? idempotentSqlDir : join(projectRoot, idempotentSqlDir),
|
|
5320
|
-
excludeFromOrphanDetection: [...exclusions].sort((a, b) => a.localeCompare(b))
|
|
5321
|
-
};
|
|
5322
|
-
}
|
|
5323
|
-
async function fetchMissingSourceMetadata(params) {
|
|
5324
|
-
const { databaseUrl, schemas } = params;
|
|
5325
|
-
if (schemas.length === 0) {
|
|
5326
|
-
return {
|
|
5327
|
-
extensionManagedTables: /* @__PURE__ */ new Map(),
|
|
5328
|
-
partitionParentMap: /* @__PURE__ */ new Map()
|
|
5329
|
-
};
|
|
5330
|
-
}
|
|
5331
|
-
const isRemoteSupabase = databaseUrl.includes(".supabase.co");
|
|
5332
|
-
const sql = postgres2(databaseUrl, {
|
|
5333
|
-
...isRemoteSupabase && { ssl: "require" }
|
|
5334
|
-
});
|
|
5335
|
-
try {
|
|
5336
|
-
const schemaList = buildSafeSchemaInClause2(schemas);
|
|
5337
|
-
const [extensionRows, partitionRows] = await Promise.all([
|
|
5338
|
-
sql`
|
|
5339
|
-
SELECT
|
|
5340
|
-
n.nspname AS schema_name,
|
|
5341
|
-
c.relname AS table_name,
|
|
5342
|
-
ext.extname AS extension_name
|
|
5343
|
-
FROM pg_class c
|
|
5344
|
-
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
5345
|
-
JOIN pg_depend d
|
|
5346
|
-
ON d.classid = 'pg_class'::regclass
|
|
5347
|
-
AND d.objid = c.oid
|
|
5348
|
-
AND d.refclassid = 'pg_extension'::regclass
|
|
5349
|
-
AND d.deptype = 'e'
|
|
5350
|
-
JOIN pg_extension ext ON ext.oid = d.refobjid
|
|
5351
|
-
WHERE c.relkind IN ('r', 'p')
|
|
5352
|
-
AND n.nspname IN (${sql.unsafe(schemaList)})
|
|
5353
|
-
`,
|
|
5354
|
-
sql`
|
|
5355
|
-
SELECT
|
|
5356
|
-
child_ns.nspname AS child_schema,
|
|
5357
|
-
child.relname AS child_table,
|
|
5358
|
-
parent_ns.nspname AS parent_schema,
|
|
5359
|
-
parent.relname AS parent_table
|
|
5360
|
-
FROM pg_inherits i
|
|
5361
|
-
JOIN pg_class child ON child.oid = i.inhrelid
|
|
5362
|
-
JOIN pg_namespace child_ns ON child_ns.oid = child.relnamespace
|
|
5363
|
-
JOIN pg_class parent ON parent.oid = i.inhparent
|
|
5364
|
-
JOIN pg_namespace parent_ns ON parent_ns.oid = parent.relnamespace
|
|
5365
|
-
WHERE child.relkind IN ('r', 'p')
|
|
5366
|
-
AND child_ns.nspname IN (${sql.unsafe(schemaList)})
|
|
5367
|
-
`
|
|
5368
|
-
]);
|
|
5369
|
-
const extensionManagedTables = /* @__PURE__ */ new Map();
|
|
5370
|
-
for (const row of extensionRows) {
|
|
5371
|
-
extensionManagedTables.set(
|
|
5372
|
-
`${String(row.schema_name)}.${String(row.table_name)}`,
|
|
5373
|
-
String(row.extension_name)
|
|
5374
|
-
);
|
|
5375
|
-
}
|
|
5376
|
-
const partitionParentMap = /* @__PURE__ */ new Map();
|
|
5377
|
-
for (const row of partitionRows) {
|
|
5378
|
-
partitionParentMap.set(
|
|
5379
|
-
`${String(row.child_schema)}.${String(row.child_table)}`,
|
|
5380
|
-
`${String(row.parent_schema)}.${String(row.parent_table)}`
|
|
5381
|
-
);
|
|
5382
|
-
}
|
|
5383
|
-
return { extensionManagedTables, partitionParentMap };
|
|
5384
|
-
} finally {
|
|
5385
|
-
await sql.end();
|
|
5386
|
-
}
|
|
5387
|
-
}
|
|
5388
|
-
function formatMissingSourceItems(items) {
|
|
5389
|
-
return items.map((item) => item.detail ? `${item.qualifiedName} (${item.detail})` : item.qualifiedName).join(", ");
|
|
5390
|
-
}
|
|
5391
|
-
function logMissingSourceClassification(classification) {
|
|
5392
|
-
const total = classification.definedInIdempotentDynamicDdl.length + classification.extensionManagedOrSystemTable.length + classification.trulyOrphaned.length;
|
|
5393
|
-
if (total === 0) return;
|
|
5394
|
-
console.warn(`[tables-manifest] \u26A0 ${total} table(s) exist in DB but not in SQL files.`);
|
|
5395
|
-
if (classification.definedInIdempotentDynamicDdl.length > 0) {
|
|
5396
|
-
console.log(
|
|
5397
|
-
`[tables-manifest] info: defined_in_idempotent_dynamic_ddl (${classification.definedInIdempotentDynamicDdl.length})`
|
|
5398
|
-
);
|
|
5399
|
-
console.log(` ${formatMissingSourceItems(classification.definedInIdempotentDynamicDdl)}`);
|
|
5400
|
-
}
|
|
5401
|
-
if (classification.extensionManagedOrSystemTable.length > 0) {
|
|
5402
|
-
console.log(
|
|
5403
|
-
`[tables-manifest] info: extension_managed/system_table (${classification.extensionManagedOrSystemTable.length})`
|
|
5404
|
-
);
|
|
5405
|
-
console.log(` ${formatMissingSourceItems(classification.extensionManagedOrSystemTable)}`);
|
|
5406
|
-
}
|
|
5407
|
-
if (classification.trulyOrphaned.length > 0) {
|
|
5408
|
-
console.warn(`[tables-manifest] warn: truly_orphaned (${classification.trulyOrphaned.length})`);
|
|
5409
|
-
console.warn(` ${classification.trulyOrphaned.join(", ")}`);
|
|
5410
|
-
console.warn(
|
|
5411
|
-
" \u2192 Add declarative/idempotent SQL definitions or allowlist via database.pgSchemaDiff.excludeFromOrphanDetection."
|
|
5412
|
-
);
|
|
5413
|
-
} else {
|
|
5414
|
-
console.log("[tables-manifest] info: no truly_orphaned tables detected.");
|
|
5415
|
-
}
|
|
5416
|
-
}
|
|
5417
|
-
async function logDrizzleCrossCheck(tables, drizzleSchemaPath) {
|
|
5418
|
-
const result = await crossCheckWithDrizzle(tables, drizzleSchemaPath);
|
|
5419
|
-
if (result.sqlOnly.length === 0 && result.drizzleOnly.length === 0) return;
|
|
5420
|
-
console.warn("[tables-manifest] SQL\u2194Drizzle discrepancies found:");
|
|
5421
|
-
if (result.sqlOnly.length > 0) {
|
|
5422
|
-
console.warn(" SQL only:", result.sqlOnly.map((t) => t.qualifiedName).join(", "));
|
|
5423
|
-
}
|
|
5424
|
-
if (result.drizzleOnly.length > 0) {
|
|
5425
|
-
console.warn(" Drizzle only:", result.drizzleOnly.join(", "));
|
|
5426
|
-
}
|
|
5427
|
-
}
|
|
5428
|
-
function logMappingConflicts(conflicts) {
|
|
5429
|
-
if (conflicts.length === 0) return;
|
|
5430
|
-
console.warn("[tables-manifest] Semantic name conflicts detected:");
|
|
5431
|
-
for (const conflict of conflicts) {
|
|
5432
|
-
console.warn(` '${conflict.semanticName}': ${conflict.tables.join(", ")}`);
|
|
5433
|
-
}
|
|
5434
|
-
}
|
|
5435
|
-
async function generateTablesManifest(projectRoot, options = {}) {
|
|
5436
|
-
const {
|
|
5437
|
-
sqlDir = join(projectRoot, "supabase/schemas/declarative"),
|
|
5438
|
-
drizzleSchemaPath = join(projectRoot, "packages/database/src/schema/index.js"),
|
|
5439
|
-
outputPath = join(projectRoot, ".runa/manifests/tables.json"),
|
|
5440
|
-
crossCheck = true,
|
|
5441
|
-
databaseUrl,
|
|
5442
|
-
mappingOptions = { conflictStrategy: "prefix" },
|
|
5443
|
-
// includeMetadata is defined in options but not yet used
|
|
5444
|
-
// Reserved for future metadata filtering feature
|
|
5445
|
-
includeMetadata: _includeMetadata = true
|
|
5446
|
-
} = options;
|
|
5447
|
-
const sourceConfig = resolveSourceConfig(projectRoot, options);
|
|
5448
|
-
let tables = [];
|
|
5449
|
-
const source = "introspection";
|
|
5450
|
-
const declarativeTables = await extractTablesFromSqlDir(sqlDir, {
|
|
5451
|
-
includeColumns: false,
|
|
5452
|
-
// Don't need columns from SQL (DB introspection is more accurate)
|
|
5453
|
-
includeForeignKeys: false,
|
|
5454
|
-
includeIndexes: false,
|
|
5455
|
-
includeRlsPolicies: false
|
|
5456
|
-
});
|
|
5457
|
-
const idempotentTablesForSource = await extractTablesFromSqlDir(sourceConfig.idempotentSqlDir, {
|
|
5458
|
-
includeColumns: false,
|
|
5459
|
-
includeForeignKeys: false,
|
|
5460
|
-
includeIndexes: false,
|
|
5461
|
-
includeRlsPolicies: false
|
|
5462
|
-
});
|
|
5463
|
-
const idempotentTablesFromRegex = extractTablesFromIdempotentSql(
|
|
5464
|
-
sourceConfig.idempotentSqlDir,
|
|
5465
|
-
projectRoot
|
|
5466
|
-
);
|
|
5467
|
-
const idempotentManagedTables = /* @__PURE__ */ new Set([
|
|
5468
|
-
...idempotentTablesFromRegex,
|
|
5469
|
-
...idempotentTablesForSource.map((t) => t.qualifiedName)
|
|
5470
|
-
]);
|
|
5471
|
-
const sourceFileMap = /* @__PURE__ */ new Map();
|
|
5472
|
-
const sourceTables = [...declarativeTables, ...idempotentTablesForSource];
|
|
5473
|
-
for (const t of sourceTables) {
|
|
5474
|
-
if (sourceFileMap.has(t.qualifiedName)) {
|
|
5475
|
-
continue;
|
|
5476
|
-
}
|
|
5477
|
-
sourceFileMap.set(t.qualifiedName, {
|
|
5478
|
-
sourceFile: toRelativeSourcePath(projectRoot, t.sourceFile),
|
|
5479
|
-
lineNumber: t.lineNumber
|
|
5480
|
-
});
|
|
5481
|
-
}
|
|
5482
|
-
if (!databaseUrl) {
|
|
3579
|
+
const { backupPath } = backupIdempotentTables(dbUrl, protectedTables, input.verbose);
|
|
3580
|
+
if (backupPath) {
|
|
3581
|
+
logger13.info(`Recovery: pg_restore -d <DATABASE_URL> ${backupPath}`);
|
|
3582
|
+
return;
|
|
3583
|
+
}
|
|
3584
|
+
if (protectedTables.length > 0 && !input.allowDataLoss) {
|
|
5483
3585
|
throw new Error(
|
|
5484
|
-
"
|
|
3586
|
+
"Pre-apply backup failed for production deployment.\n Protected tables exist but could not be backed up.\n Use --allow-data-loss to proceed without backup (emergency only)."
|
|
5485
3587
|
);
|
|
5486
3588
|
}
|
|
5487
|
-
|
|
5488
|
-
|
|
5489
|
-
|
|
5490
|
-
const fileInfo = sourceFileMap.get(t.qualifiedName);
|
|
5491
|
-
return {
|
|
5492
|
-
...t,
|
|
5493
|
-
sourceFile: fileInfo?.sourceFile ?? "",
|
|
5494
|
-
lineNumber: fileInfo?.lineNumber
|
|
5495
|
-
};
|
|
5496
|
-
});
|
|
5497
|
-
console.log(`[tables-manifest] \u2713 Introspected ${tables.length} tables from database`);
|
|
5498
|
-
const tablesWithoutSource = tables.filter((t) => !t.sourceFile);
|
|
5499
|
-
if (tablesWithoutSource.length > 0) {
|
|
5500
|
-
const missingSourceQualifiedNames = tablesWithoutSource.map((t) => t.qualifiedName);
|
|
5501
|
-
const missingSchemas = [...new Set(tablesWithoutSource.map((t) => t.schema))];
|
|
5502
|
-
let extensionManagedTables = /* @__PURE__ */ new Map();
|
|
5503
|
-
let partitionParentMap = /* @__PURE__ */ new Map();
|
|
3589
|
+
}
|
|
3590
|
+
async function cleanupApplyResources(params) {
|
|
3591
|
+
if (params.shadowDb) {
|
|
5504
3592
|
try {
|
|
5505
|
-
|
|
5506
|
-
|
|
5507
|
-
|
|
5508
|
-
}
|
|
5509
|
-
|
|
5510
|
-
|
|
5511
|
-
}
|
|
5512
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
5513
|
-
console.warn(`[tables-manifest] Failed to classify extension/partition metadata: ${message}`);
|
|
5514
|
-
}
|
|
5515
|
-
const classification = classifyMissingSourceTables({
|
|
5516
|
-
tablesWithoutSource: missingSourceQualifiedNames,
|
|
5517
|
-
idempotentManagedTables,
|
|
5518
|
-
extensionManagedTables,
|
|
5519
|
-
partitionParentMap,
|
|
5520
|
-
excludeFromOrphanDetection: sourceConfig.excludeFromOrphanDetection,
|
|
5521
|
-
systemSchemas: SUPABASE_SYSTEM_SCHEMA_SET,
|
|
5522
|
-
knownSystemTables: KNOWN_EXTENSION_SYSTEM_TABLES
|
|
5523
|
-
});
|
|
5524
|
-
logMissingSourceClassification(classification);
|
|
5525
|
-
}
|
|
5526
|
-
if (crossCheck && existsSync(drizzleSchemaPath)) {
|
|
5527
|
-
await logDrizzleCrossCheck(tables, drizzleSchemaPath);
|
|
5528
|
-
}
|
|
5529
|
-
const mappingResult = generateMapping(tables, mappingOptions);
|
|
5530
|
-
logMappingConflicts(mappingResult.conflicts);
|
|
5531
|
-
tables = applyMappingToTables(tables, mappingResult.mapping);
|
|
5532
|
-
const now = /* @__PURE__ */ new Date();
|
|
5533
|
-
const jstOffset = 9 * 60 * 60 * 1e3;
|
|
5534
|
-
const jst = new Date(now.getTime() + jstOffset);
|
|
5535
|
-
const generatedAtJST = `${jst.getUTCFullYear()}-${String(jst.getUTCMonth() + 1).padStart(2, "0")}-${String(jst.getUTCDate()).padStart(2, "0")}T${String(jst.getUTCHours()).padStart(2, "0")}:${String(jst.getUTCMinutes()).padStart(2, "0")}:${String(jst.getUTCSeconds()).padStart(2, "0")}+09:00`;
|
|
5536
|
-
const manifest = {
|
|
5537
|
-
version: MANIFEST_VERSION,
|
|
5538
|
-
source,
|
|
5539
|
-
generatedAt: generatedAtJST,
|
|
5540
|
-
generatorVersion: GENERATOR_VERSION,
|
|
5541
|
-
tables,
|
|
5542
|
-
mapping: mappingResult.mapping
|
|
5543
|
-
};
|
|
5544
|
-
const outputDir = join(outputPath, "..");
|
|
5545
|
-
if (!existsSync(outputDir)) {
|
|
5546
|
-
mkdirSync(outputDir, { recursive: true });
|
|
3593
|
+
await params.shadowDb.cleanup();
|
|
3594
|
+
if (params.verbose) {
|
|
3595
|
+
logger13.debug("Shadow DB cleaned up");
|
|
3596
|
+
}
|
|
3597
|
+
} catch (cleanupError) {
|
|
3598
|
+
logger13.warn(`Failed to cleanup shadow DB: ${cleanupError}`);
|
|
3599
|
+
}
|
|
5547
3600
|
}
|
|
5548
|
-
|
|
5549
|
-
|
|
5550
|
-
|
|
5551
|
-
|
|
5552
|
-
}
|
|
5553
|
-
function logManifestSummary(manifest, conflicts) {
|
|
5554
|
-
const tableCount = manifest.tables.length;
|
|
5555
|
-
const schemas = [...new Set(manifest.tables.map((t) => t.schema))];
|
|
5556
|
-
const mappingCount = Object.keys(manifest.mapping).length;
|
|
5557
|
-
const checkCount = manifest.tables.reduce(
|
|
5558
|
-
(sum, t) => sum + (t.checkConstraints?.length ?? 0),
|
|
5559
|
-
0
|
|
5560
|
-
);
|
|
5561
|
-
const triggerCount = manifest.tables.reduce(
|
|
5562
|
-
(sum, t) => sum + (t.triggers?.length ?? 0),
|
|
5563
|
-
0
|
|
5564
|
-
);
|
|
5565
|
-
console.log("\n\u2713 Tables manifest generated");
|
|
5566
|
-
console.log(` - Source: DB introspection (PostgreSQL system catalogs)`);
|
|
5567
|
-
console.log(` - ${tableCount} tables extracted`);
|
|
5568
|
-
console.log(` - ${schemas.length} schemas: ${schemas.join(", ")}`);
|
|
5569
|
-
console.log(` - ${mappingCount} semantic names mapped`);
|
|
5570
|
-
if (checkCount > 0) {
|
|
5571
|
-
console.log(` - ${checkCount} CHECK constraints detected`);
|
|
3601
|
+
if (params.prefilter) {
|
|
3602
|
+
try {
|
|
3603
|
+
rmSync(params.prefilter.filteredDir, { recursive: true, force: true });
|
|
3604
|
+
} catch {
|
|
3605
|
+
}
|
|
5572
3606
|
}
|
|
5573
|
-
|
|
5574
|
-
|
|
3607
|
+
try {
|
|
3608
|
+
rmSync(params.tmpDir, { recursive: true, force: true });
|
|
3609
|
+
} catch {
|
|
5575
3610
|
}
|
|
5576
|
-
|
|
5577
|
-
|
|
5578
|
-
|
|
5579
|
-
|
|
5580
|
-
|
|
5581
|
-
}
|
|
3611
|
+
}
|
|
3612
|
+
var applyPgSchemaDiff = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3613
|
+
const schemasDir = join(targetDir, "supabase/schemas/declarative");
|
|
3614
|
+
if (!existsSync(schemasDir)) {
|
|
3615
|
+
logger13.info("No declarative schemas found");
|
|
3616
|
+
return { sql: "", hazards: [], applied: false };
|
|
5582
3617
|
}
|
|
5583
|
-
const
|
|
5584
|
-
|
|
5585
|
-
|
|
5586
|
-
|
|
5587
|
-
|
|
3618
|
+
const dbUrl = getDbUrl(input);
|
|
3619
|
+
const configState = loadPgSchemaDiffConfigState(targetDir, input.verbose);
|
|
3620
|
+
const prefilterState = createPrefilterState(
|
|
3621
|
+
schemasDir,
|
|
3622
|
+
input.verbose,
|
|
3623
|
+
configState.configExclusions
|
|
3624
|
+
);
|
|
3625
|
+
const freshDbResult = handleFreshDbCase(input, dbUrl, targetDir, prefilterState.pgSchemaDiffDir);
|
|
3626
|
+
if (freshDbResult) return freshDbResult;
|
|
3627
|
+
const schemaFiles = collectSchemaFiles(schemasDir);
|
|
3628
|
+
if (schemaFiles.length === 0) {
|
|
3629
|
+
logger13.info("No schema files to apply");
|
|
3630
|
+
return { sql: "", hazards: [], applied: false };
|
|
5588
3631
|
}
|
|
5589
|
-
const
|
|
5590
|
-
|
|
5591
|
-
|
|
5592
|
-
|
|
5593
|
-
|
|
5594
|
-
|
|
3632
|
+
const tmpDir = createCombinedSchemaBundle(schemaFiles, input.verbose);
|
|
3633
|
+
logger13.step("Running pg-schema-diff (incremental changes)...");
|
|
3634
|
+
let shadowDb = null;
|
|
3635
|
+
try {
|
|
3636
|
+
verifyPgSchemaDiffBinary({ strictVersion: input.env === "production" });
|
|
3637
|
+
await verifyDatabaseConnection(dbUrl);
|
|
3638
|
+
shadowDb = await createShadowDbForRun(dbUrl, configState.shadowExtensions, input.verbose);
|
|
3639
|
+
const includeSchemas = detectAppSchemas(schemasDir, input.verbose);
|
|
3640
|
+
cleanPartitionAclsForPgSchemaDiff(dbUrl, includeSchemas, input.verbose);
|
|
3641
|
+
const { planOutput } = executePgSchemaDiffPlan(
|
|
3642
|
+
dbUrl,
|
|
3643
|
+
prefilterState.pgSchemaDiffDir,
|
|
3644
|
+
includeSchemas,
|
|
3645
|
+
input.verbose,
|
|
3646
|
+
{ tempDbDsn: shadowDb?.dsn }
|
|
5595
3647
|
);
|
|
5596
|
-
|
|
5597
|
-
|
|
5598
|
-
|
|
5599
|
-
|
|
5600
|
-
|
|
3648
|
+
const noChangesResult = buildNoChangesResult(planOutput);
|
|
3649
|
+
if (noChangesResult) return noChangesResult;
|
|
3650
|
+
const { hazards } = handleHazardsWithContext(planOutput, input, schemasDir);
|
|
3651
|
+
const droppedTables = detectDropTableStatements(planOutput);
|
|
3652
|
+
enforceDropSafety(input, droppedTables);
|
|
3653
|
+
const dataViolationCount = runPreApplyDataCompatibility(dbUrl, planOutput, input);
|
|
3654
|
+
const protectedTables = getIdempotentProtectedTables(
|
|
3655
|
+
schemasDir,
|
|
3656
|
+
prefilterState.configExclusions
|
|
3657
|
+
);
|
|
3658
|
+
const protectedObjects = getIdempotentProtectedObjects(
|
|
3659
|
+
schemasDir,
|
|
3660
|
+
prefilterState.configExclusions
|
|
3661
|
+
);
|
|
3662
|
+
const checkModeResult = buildCheckModeResult(
|
|
3663
|
+
input,
|
|
3664
|
+
planOutput,
|
|
3665
|
+
hazards,
|
|
3666
|
+
protectedTables,
|
|
3667
|
+
protectedObjects,
|
|
3668
|
+
dataViolationCount
|
|
3669
|
+
);
|
|
3670
|
+
if (checkModeResult) return checkModeResult;
|
|
3671
|
+
backupProtectedTablesForProduction(dbUrl, protectedTables, input);
|
|
3672
|
+
const preApplyCounts = getTableRowEstimates(dbUrl, schemasDir, input.verbose);
|
|
3673
|
+
const applyResult = await applyWithRetry({
|
|
3674
|
+
dbUrl,
|
|
3675
|
+
schemasDir,
|
|
3676
|
+
includeSchemas,
|
|
3677
|
+
input,
|
|
3678
|
+
planOutput,
|
|
3679
|
+
hazards,
|
|
3680
|
+
protectedTables,
|
|
3681
|
+
protectedObjects,
|
|
3682
|
+
tempDbDsn: shadowDb?.dsn,
|
|
3683
|
+
pgSchemaDiffDir: prefilterState.pgSchemaDiffDir
|
|
3684
|
+
});
|
|
3685
|
+
if (applyResult.applied) {
|
|
3686
|
+
verifyDataIntegrity(dbUrl, schemasDir, preApplyCounts, input.verbose, input.allowDataLoss);
|
|
5601
3687
|
}
|
|
3688
|
+
return {
|
|
3689
|
+
...applyResult,
|
|
3690
|
+
dataViolations: dataViolationCount > 0 ? dataViolationCount : void 0
|
|
3691
|
+
};
|
|
3692
|
+
} finally {
|
|
3693
|
+
await cleanupApplyResources({
|
|
3694
|
+
shadowDb,
|
|
3695
|
+
prefilter: prefilterState.prefilter,
|
|
3696
|
+
tmpDir,
|
|
3697
|
+
verbose: input.verbose
|
|
3698
|
+
});
|
|
5602
3699
|
}
|
|
5603
|
-
|
|
5604
|
-
|
|
5605
|
-
}
|
|
3700
|
+
});
|
|
3701
|
+
var validatePartitions = fromPromise(async ({ input: { input, targetDir } }) => {
|
|
3702
|
+
if (input.check) return { warnings: [] };
|
|
3703
|
+
const idempotentDir = join(targetDir, "supabase/schemas/idempotent");
|
|
3704
|
+
if (!existsSync(idempotentDir)) return { warnings: [] };
|
|
3705
|
+
const expected = parseExpectedPartitions(idempotentDir);
|
|
3706
|
+
if (expected.length === 0) return { warnings: [] };
|
|
3707
|
+
const dbUrl = getDbUrl(input);
|
|
3708
|
+
const schemas = [...new Set(expected.map((e) => e.parent.split(".")[0] ?? ""))];
|
|
3709
|
+
const actual = queryActualPartitions(dbUrl, schemas);
|
|
3710
|
+
const drift = detectPartitionDrift(expected, actual);
|
|
3711
|
+
if (drift.missing.length === 0) {
|
|
3712
|
+
logger13.success(`All ${expected.length} expected partition(s) verified`);
|
|
3713
|
+
return { warnings: [] };
|
|
3714
|
+
}
|
|
3715
|
+
const warnings = formatPartitionWarnings(drift);
|
|
3716
|
+
for (const w of warnings) logger13.warn(w);
|
|
3717
|
+
return { warnings };
|
|
3718
|
+
});
|
|
5606
3719
|
|
|
5607
3720
|
// src/commands/db/apply/actors/seed-actors.ts
|
|
3721
|
+
init_esm_shims();
|
|
5608
3722
|
var DESTRUCTIVE_SEED_PATTERNS = [
|
|
5609
3723
|
{ pattern: /\bDELETE\s+FROM\b/i, description: "DELETE FROM" },
|
|
5610
3724
|
{ pattern: /\bTRUNCATE\b/i, description: "TRUNCATE" },
|
|
@@ -5637,9 +3751,12 @@ function isUnsafeProductionSeed(input, seedFile) {
|
|
|
5637
3751
|
return true;
|
|
5638
3752
|
}
|
|
5639
3753
|
function parseSeedErrorDiagnostics(stderr) {
|
|
3754
|
+
const psqlLocation = stderr.match(/psql:([^:]+):(\d+):\s*ERROR:/);
|
|
3755
|
+
const locationInfo = psqlLocation ? { file: psqlLocation[1], line: Number(psqlLocation[2]) } : {};
|
|
5640
3756
|
const columnMissing = stderr.match(/column "([^"]+)" of relation "([^"]+)" does not exist/);
|
|
5641
3757
|
if (columnMissing) {
|
|
5642
3758
|
return {
|
|
3759
|
+
...locationInfo,
|
|
5643
3760
|
table: columnMissing[2],
|
|
5644
3761
|
errorType: "missing_column",
|
|
5645
3762
|
hint: `Column "${columnMissing[1]}" missing from ${columnMissing[2]}. Schema may have changed. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5648,6 +3765,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5648
3765
|
const relationMissing = stderr.match(/relation "([^"]+)" does not exist/);
|
|
5649
3766
|
if (relationMissing) {
|
|
5650
3767
|
return {
|
|
3768
|
+
...locationInfo,
|
|
5651
3769
|
table: relationMissing[1],
|
|
5652
3770
|
errorType: "missing_relation",
|
|
5653
3771
|
hint: `Table ${relationMissing[1]} does not exist. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5658,6 +3776,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5658
3776
|
);
|
|
5659
3777
|
if (fkViolation) {
|
|
5660
3778
|
return {
|
|
3779
|
+
...locationInfo,
|
|
5661
3780
|
table: fkViolation[1],
|
|
5662
3781
|
errorType: "fk_violation",
|
|
5663
3782
|
hint: `FK constraint failed on ${fkViolation[1]}. Check seed insertion order or missing parent records.`
|
|
@@ -5666,6 +3785,7 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5666
3785
|
const checkViolation = stderr.match(/new row for relation "([^"]+)" violates check constraint/);
|
|
5667
3786
|
if (checkViolation) {
|
|
5668
3787
|
return {
|
|
3788
|
+
...locationInfo,
|
|
5669
3789
|
table: checkViolation[1],
|
|
5670
3790
|
errorType: "check_violation",
|
|
5671
3791
|
hint: `CHECK constraint failed on ${checkViolation[1]}. Seed data may not match column constraints. Regenerate seeds: pnpm generate:seeds ci`
|
|
@@ -5676,12 +3796,47 @@ function parseSeedErrorDiagnostics(stderr) {
|
|
|
5676
3796
|
);
|
|
5677
3797
|
if (uniqueViolation) {
|
|
5678
3798
|
return {
|
|
3799
|
+
...locationInfo,
|
|
5679
3800
|
table: uniqueViolation[1],
|
|
5680
3801
|
errorType: "unique_violation",
|
|
5681
3802
|
hint: `Duplicate key on ${uniqueViolation[1]}. Seeds may have been partially applied. Try: runa db reset`
|
|
5682
3803
|
};
|
|
5683
3804
|
}
|
|
5684
|
-
return {};
|
|
3805
|
+
return { ...locationInfo };
|
|
3806
|
+
}
|
|
3807
|
+
function createSeedFailureMessage(diagnostics, locationSuffix) {
|
|
3808
|
+
if (diagnostics.table) {
|
|
3809
|
+
const summary = `Seed apply failed on table: ${diagnostics.table} (${diagnostics.errorType})${locationSuffix}`;
|
|
3810
|
+
return {
|
|
3811
|
+
summary,
|
|
3812
|
+
hint: diagnostics.hint
|
|
3813
|
+
};
|
|
3814
|
+
}
|
|
3815
|
+
if (locationSuffix) {
|
|
3816
|
+
return { summary: `Seed apply failed${locationSuffix} (non-blocking)` };
|
|
3817
|
+
}
|
|
3818
|
+
return { summary: "Seed apply failed (non-blocking)" };
|
|
3819
|
+
}
|
|
3820
|
+
function logSeedFailureSummary(seedFailure) {
|
|
3821
|
+
logger13.warn(seedFailure.summary);
|
|
3822
|
+
if (seedFailure.hint) {
|
|
3823
|
+
logger13.info(` Hint: ${seedFailure.hint}`);
|
|
3824
|
+
}
|
|
3825
|
+
}
|
|
3826
|
+
function buildSeedLocationSuffix(diagnostics) {
|
|
3827
|
+
if (diagnostics.line == null) return "";
|
|
3828
|
+
const fileSuffix = diagnostics.file ? ` of ${diagnostics.file}` : "";
|
|
3829
|
+
return ` at line ${diagnostics.line}${fileSuffix}`;
|
|
3830
|
+
}
|
|
3831
|
+
function handleFailedSeed(result, verbose) {
|
|
3832
|
+
const errorMsg = result.stderr ? maskDbCredentials(result.stderr) : "";
|
|
3833
|
+
const diagnostics = parseSeedErrorDiagnostics(result.stderr);
|
|
3834
|
+
const failure = createSeedFailureMessage(diagnostics, buildSeedLocationSuffix(diagnostics));
|
|
3835
|
+
logSeedFailureSummary(failure);
|
|
3836
|
+
if (errorMsg && !verbose) {
|
|
3837
|
+
logger13.debug(` Error: ${errorMsg.split("\n")[0]}`);
|
|
3838
|
+
}
|
|
3839
|
+
return false;
|
|
5685
3840
|
}
|
|
5686
3841
|
function applySeedFile(dbUrl, seedFile, verbose) {
|
|
5687
3842
|
logger13.step("Applying seeds...");
|
|
@@ -5696,24 +3851,11 @@ function applySeedFile(dbUrl, seedFile, verbose) {
|
|
|
5696
3851
|
if (stdout) process.stdout.write(stdout);
|
|
5697
3852
|
if (stderr) process.stderr.write(stderr);
|
|
5698
3853
|
}
|
|
5699
|
-
if (result.status
|
|
5700
|
-
|
|
5701
|
-
|
|
5702
|
-
if (diagnostics.table) {
|
|
5703
|
-
logger13.warn(`Seed apply failed on table: ${diagnostics.table} (${diagnostics.errorType})`);
|
|
5704
|
-
if (diagnostics.hint) {
|
|
5705
|
-
logger13.info(` Hint: ${diagnostics.hint}`);
|
|
5706
|
-
}
|
|
5707
|
-
} else {
|
|
5708
|
-
logger13.warn(`Seed apply failed (non-blocking)`);
|
|
5709
|
-
}
|
|
5710
|
-
if (errorMsg && !verbose) {
|
|
5711
|
-
logger13.debug(` Error: ${errorMsg.split("\n")[0]}`);
|
|
5712
|
-
}
|
|
5713
|
-
return false;
|
|
3854
|
+
if (result.status === 0) {
|
|
3855
|
+
logger13.success("Seeds applied");
|
|
3856
|
+
return true;
|
|
5714
3857
|
}
|
|
5715
|
-
|
|
5716
|
-
return true;
|
|
3858
|
+
return handleFailedSeed(result, verbose);
|
|
5717
3859
|
}
|
|
5718
3860
|
function runSeeds(input, targetDir, dbUrl) {
|
|
5719
3861
|
if (input.noSeed) {
|
|
@@ -6995,11 +5137,11 @@ async function validateGitHubOutputPath(filePath) {
|
|
|
6995
5137
|
if (!filePath || filePath.trim().length === 0) {
|
|
6996
5138
|
return invalidOutputPath("Empty file path");
|
|
6997
5139
|
}
|
|
6998
|
-
const normalizedPath =
|
|
5140
|
+
const normalizedPath = path6.normalize(filePath);
|
|
6999
5141
|
if (normalizedPath.includes("..")) {
|
|
7000
5142
|
return invalidOutputPath("Path traversal detected (..) in file path");
|
|
7001
5143
|
}
|
|
7002
|
-
const absolutePath =
|
|
5144
|
+
const absolutePath = path6.resolve(normalizedPath);
|
|
7003
5145
|
const forbiddenPath = findForbiddenPath(absolutePath);
|
|
7004
5146
|
if (forbiddenPath) {
|
|
7005
5147
|
return invalidOutputPath(`Forbidden path: ${forbiddenPath}`);
|
|
@@ -7393,7 +5535,7 @@ async function analyzeSchemaChanges() {
|
|
|
7393
5535
|
try {
|
|
7394
5536
|
const { getDatabasePackagePath: getDatabasePackagePath2 } = await import('./config-loader-GT3HAQ7U.js');
|
|
7395
5537
|
const dbPath = await getDatabasePackagePath2();
|
|
7396
|
-
const schemaPath =
|
|
5538
|
+
const schemaPath = path6.join(dbPath, "src/schema/");
|
|
7397
5539
|
const { stdout } = await execa("git", ["diff", "--cached", "--", schemaPath]);
|
|
7398
5540
|
const lines = stdout.split("\n");
|
|
7399
5541
|
return parseDiffLines(lines);
|
|
@@ -7550,7 +5692,7 @@ async function testDatabaseConnection(projectRoot) {
|
|
|
7550
5692
|
const dbPort = detectLocalSupabasePorts(resolvedRoot).db;
|
|
7551
5693
|
let sql = null;
|
|
7552
5694
|
try {
|
|
7553
|
-
sql =
|
|
5695
|
+
sql = postgres(connectionUrl, {
|
|
7554
5696
|
connect_timeout: 5,
|
|
7555
5697
|
idle_timeout: 5,
|
|
7556
5698
|
max: 1
|
|
@@ -7659,7 +5801,7 @@ function getDeclarativeSqlFiles(sqlDir, logger15) {
|
|
|
7659
5801
|
async function collectSchemaRisks(sqlDir, sqlFiles) {
|
|
7660
5802
|
const allRisks = [];
|
|
7661
5803
|
for (const sqlFile of sqlFiles) {
|
|
7662
|
-
const filePath =
|
|
5804
|
+
const filePath = path6.join(sqlDir, sqlFile);
|
|
7663
5805
|
const risks = await detectSchemaRisks(filePath);
|
|
7664
5806
|
for (const risk of risks) {
|
|
7665
5807
|
allRisks.push({ ...risk, file: sqlFile });
|
|
@@ -7710,7 +5852,7 @@ function reportRiskGuidance(logger15, highRiskCount, lowRiskCount) {
|
|
|
7710
5852
|
async function runSqlSchemaRiskCheck(result, logger15, step) {
|
|
7711
5853
|
logger15.step("Checking SQL schema for risky patterns", step.next());
|
|
7712
5854
|
const cwd = process.cwd();
|
|
7713
|
-
const sqlDir =
|
|
5855
|
+
const sqlDir = path6.join(cwd, "supabase", "schemas", "declarative");
|
|
7714
5856
|
const sqlFiles = getDeclarativeSqlFiles(sqlDir, logger15);
|
|
7715
5857
|
if (!sqlFiles) return;
|
|
7716
5858
|
try {
|
|
@@ -7764,7 +5906,7 @@ async function runOrphanCheck(env, dbPackagePath, result, logger15, step) {
|
|
|
7764
5906
|
const { expectedTables, expectedEnums } = await extractSchemaTablesAndEnums(dbPackagePath);
|
|
7765
5907
|
const databaseUrl = tryResolveDatabaseUrl("local") || buildLocalDatabaseUrl(process.cwd());
|
|
7766
5908
|
const { dbTables, dbEnums } = await fetchDbTablesAndEnums(databaseUrl, {
|
|
7767
|
-
schemaDir:
|
|
5909
|
+
schemaDir: path6.join(dbPackagePath, "src", "schema")
|
|
7768
5910
|
});
|
|
7769
5911
|
let excludeFromOrphanDetection = [];
|
|
7770
5912
|
let idempotentSqlDir = "supabase/schemas/idempotent";
|
|
@@ -7858,21 +6000,21 @@ function parseSqlFilename(filename) {
|
|
|
7858
6000
|
// src/commands/db/utils/preflight-checks/domain-naming-checks.ts
|
|
7859
6001
|
var FILE_SIZE_THRESHOLD = 2e3;
|
|
7860
6002
|
var IDENTIFIER = "[A-Za-z_][A-Za-z0-9_]{0,127}";
|
|
7861
|
-
var
|
|
6003
|
+
var SQL_IDENTIFIER = `(?:"[^"]*(?:""[^"]*)*"|${IDENTIFIER})`;
|
|
7862
6004
|
var CREATE_SCHEMA_RE = new RegExp(
|
|
7863
|
-
`CREATE\\s+SCHEMA\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${
|
|
6005
|
+
`CREATE\\s+SCHEMA\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})`,
|
|
7864
6006
|
"gi"
|
|
7865
6007
|
);
|
|
7866
6008
|
var CREATE_TABLE_RE = new RegExp(
|
|
7867
|
-
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(?:(${
|
|
6009
|
+
`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7868
6010
|
"gi"
|
|
7869
6011
|
);
|
|
7870
6012
|
var REFERENCES_RE = new RegExp(
|
|
7871
|
-
`REFERENCES\\s+(?:(${
|
|
6013
|
+
`REFERENCES\\s+(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7872
6014
|
"gi"
|
|
7873
6015
|
);
|
|
7874
6016
|
var CREATE_POLICY_RE = new RegExp(
|
|
7875
|
-
`CREATE\\s+POLICY\\s+${
|
|
6017
|
+
`CREATE\\s+POLICY\\s+${SQL_IDENTIFIER}\\s+ON\\s+(?:(${SQL_IDENTIFIER})\\s*\\.\\s*)?(${SQL_IDENTIFIER})`,
|
|
7876
6018
|
"gi"
|
|
7877
6019
|
);
|
|
7878
6020
|
var SCHEMA_DOT_TABLE_RE = new RegExp(`\\b(${IDENTIFIER})\\s*\\.\\s*(${IDENTIFIER})\\b`, "gi");
|
|
@@ -7944,7 +6086,7 @@ function extractFkRefs(strippedContent, rawContent) {
|
|
|
7944
6086
|
function analyzeFile(sqlDir, filename) {
|
|
7945
6087
|
const parsed = parseSqlFilename(filename);
|
|
7946
6088
|
if (!parsed) return null;
|
|
7947
|
-
const filePath =
|
|
6089
|
+
const filePath = path6.join(sqlDir, filename);
|
|
7948
6090
|
try {
|
|
7949
6091
|
const content = readFileSync(filePath, "utf-8");
|
|
7950
6092
|
const strippedContent = blankDollarQuotedBodies(stripSqlComments(content));
|
|
@@ -8141,34 +6283,55 @@ function checkPolicyCrossSchemaRef(analysis, managedSchemas) {
|
|
|
8141
6283
|
"schema",
|
|
8142
6284
|
"function"
|
|
8143
6285
|
]);
|
|
8144
|
-
|
|
8145
|
-
|
|
8146
|
-
|
|
8147
|
-
|
|
8148
|
-
|
|
8149
|
-
|
|
8150
|
-
|
|
8151
|
-
|
|
8152
|
-
|
|
6286
|
+
function isPolicyNoisePart(token) {
|
|
6287
|
+
return token.length === 0 || SQL_NOISE.has(token);
|
|
6288
|
+
}
|
|
6289
|
+
function toPolicyLine(matchIndex) {
|
|
6290
|
+
return lineNumberAt(content, matchIndex);
|
|
6291
|
+
}
|
|
6292
|
+
function buildPolicyIssue(file, policySchema, refSchema, refTable, policyLine) {
|
|
6293
|
+
return {
|
|
6294
|
+
file,
|
|
6295
|
+
line: policyLine,
|
|
6296
|
+
rule: "DOMAIN_POLICY_CROSS_SCHEMA_REF",
|
|
6297
|
+
severity: "warning",
|
|
6298
|
+
message: `RLS policy in ${file} on '${policySchema}' schema references '${refSchema}.${refTable}'. pg-schema-diff does not track cross-schema references in USING/WITH CHECK. Use a PL/pgSQL wrapper function with dynamic SQL instead. See: database-known-limitations.md #11`
|
|
6299
|
+
};
|
|
6300
|
+
}
|
|
6301
|
+
function collectPolicyRefs(stmtText) {
|
|
8153
6302
|
const refs = collectRegexMatches(SCHEMA_DOT_TABLE_RE, stmtText);
|
|
8154
6303
|
const seenRefs = /* @__PURE__ */ new Set();
|
|
8155
6304
|
for (const ref of refs) {
|
|
8156
6305
|
const refSchema = ref[1] ? ref[1].toLowerCase() : "";
|
|
8157
6306
|
const refTable = ref[2] ? ref[2].toLowerCase() : "";
|
|
8158
|
-
if (
|
|
8159
|
-
|
|
8160
|
-
|
|
8161
|
-
|
|
8162
|
-
|
|
8163
|
-
|
|
8164
|
-
|
|
8165
|
-
|
|
8166
|
-
|
|
8167
|
-
|
|
8168
|
-
|
|
8169
|
-
|
|
8170
|
-
|
|
8171
|
-
|
|
6307
|
+
if (isPolicyNoisePart(refSchema) || isPolicyNoisePart(refTable)) {
|
|
6308
|
+
continue;
|
|
6309
|
+
}
|
|
6310
|
+
seenRefs.add(`${refSchema}.${refTable}`);
|
|
6311
|
+
}
|
|
6312
|
+
return seenRefs;
|
|
6313
|
+
}
|
|
6314
|
+
function isRelevantRef(policySchema, ref, managedSchemas2) {
|
|
6315
|
+
const [refSchema] = ref.split(".");
|
|
6316
|
+
return !!refSchema && refSchema !== policySchema && managedSchemas2.has(refSchema) && !SQL_NOISE.has(refSchema);
|
|
6317
|
+
}
|
|
6318
|
+
function buildStatementSlice(startIndex) {
|
|
6319
|
+
const stmtEnd = strippedContent.indexOf(";", startIndex);
|
|
6320
|
+
return strippedContent.slice(startIndex, stmtEnd >= 0 ? stmtEnd : strippedContent.length);
|
|
6321
|
+
}
|
|
6322
|
+
const policyMatches = collectRegexMatches(CREATE_POLICY_RE, strippedContent);
|
|
6323
|
+
for (const policyMatch of policyMatches) {
|
|
6324
|
+
const policySchema = policyMatch[1] ? unquote(policyMatch[1]) : "public";
|
|
6325
|
+
const policyLine = toPolicyLine(policyMatch.index);
|
|
6326
|
+
const stmtText = buildStatementSlice(policyMatch.index);
|
|
6327
|
+
for (const ref of collectPolicyRefs(stmtText)) {
|
|
6328
|
+
if (!isRelevantRef(policySchema, ref, managedSchemas)) {
|
|
6329
|
+
continue;
|
|
6330
|
+
}
|
|
6331
|
+
const [refSchema, refTable] = ref.split(".");
|
|
6332
|
+
issues.push(
|
|
6333
|
+
buildPolicyIssue(parsed.raw, policySchema, refSchema || "", refTable || "", policyLine)
|
|
6334
|
+
);
|
|
8172
6335
|
}
|
|
8173
6336
|
}
|
|
8174
6337
|
return issues;
|
|
@@ -8214,7 +6377,7 @@ function reportIssues(issues, result, logger15) {
|
|
|
8214
6377
|
async function runDomainNamingCheck(result, logger15, step) {
|
|
8215
6378
|
logger15.step("Checking domain naming consistency", step.next());
|
|
8216
6379
|
const cwd = process.cwd();
|
|
8217
|
-
const sqlDir =
|
|
6380
|
+
const sqlDir = path6.join(cwd, "supabase", "schemas", "declarative");
|
|
8218
6381
|
if (!existsSync(sqlDir)) {
|
|
8219
6382
|
logger15.success("No declarative SQL directory found (skipped)");
|
|
8220
6383
|
return;
|
|
@@ -8303,12 +6466,12 @@ function logSummary(result, logger15) {
|
|
|
8303
6466
|
function checkDatabasePackage() {
|
|
8304
6467
|
const cwd = process.cwd();
|
|
8305
6468
|
const locations = [
|
|
8306
|
-
|
|
8307
|
-
|
|
8308
|
-
|
|
6469
|
+
path6.join(cwd, "packages", "database"),
|
|
6470
|
+
path6.join(cwd, "packages", "db"),
|
|
6471
|
+
path6.join(cwd, "db")
|
|
8309
6472
|
];
|
|
8310
6473
|
for (const location of locations) {
|
|
8311
|
-
const configPath =
|
|
6474
|
+
const configPath = path6.join(location, "drizzle.config.ts");
|
|
8312
6475
|
if (existsSync(configPath)) {
|
|
8313
6476
|
return { exists: true, path: location };
|
|
8314
6477
|
}
|
|
@@ -8323,7 +6486,7 @@ function countTsFilesRecursive(dir) {
|
|
|
8323
6486
|
try {
|
|
8324
6487
|
const entries = readdirSync(dir, { withFileTypes: true });
|
|
8325
6488
|
for (const entry of entries) {
|
|
8326
|
-
const fullPath =
|
|
6489
|
+
const fullPath = path6.join(dir, entry.name);
|
|
8327
6490
|
if (entry.isDirectory() && !entry.isSymbolicLink()) {
|
|
8328
6491
|
count += countTsFilesRecursive(fullPath);
|
|
8329
6492
|
} else if (entry.isFile() && entry.name.endsWith(".ts")) {
|
|
@@ -8335,7 +6498,7 @@ function countTsFilesRecursive(dir) {
|
|
|
8335
6498
|
return count;
|
|
8336
6499
|
}
|
|
8337
6500
|
function checkSchemaFiles(dbPackagePath) {
|
|
8338
|
-
const schemaDir =
|
|
6501
|
+
const schemaDir = path6.join(dbPackagePath, "src", "schema");
|
|
8339
6502
|
if (!existsSync(schemaDir)) {
|
|
8340
6503
|
return { exists: false, count: 0 };
|
|
8341
6504
|
}
|
|
@@ -8386,7 +6549,7 @@ function runDatabasePackageChecks(result, logger15, step) {
|
|
|
8386
6549
|
logger15.error("Database package path is missing");
|
|
8387
6550
|
return null;
|
|
8388
6551
|
}
|
|
8389
|
-
logger15.success(`Found database package: ${
|
|
6552
|
+
logger15.success(`Found database package: ${path6.basename(dbPackagePath)}`);
|
|
8390
6553
|
logger15.step("Checking schema files", step.next());
|
|
8391
6554
|
const schemaCheck = checkSchemaFiles(dbPackagePath);
|
|
8392
6555
|
if (!schemaCheck.exists) {
|
|
@@ -10135,7 +8298,7 @@ function getSupabasePorts(projectPath) {
|
|
|
10135
8298
|
return getPortsWithOffset(offset);
|
|
10136
8299
|
}
|
|
10137
8300
|
async function updateSupabaseConfigPortsSafe(projectPath) {
|
|
10138
|
-
const configPath =
|
|
8301
|
+
const configPath = path6.join(projectPath, "supabase", "config.toml");
|
|
10139
8302
|
const resolved = await resolveAvailablePorts(projectPath);
|
|
10140
8303
|
if (!resolved) {
|
|
10141
8304
|
const ports = getSupabasePorts(projectPath);
|
|
@@ -10178,7 +8341,7 @@ function getPortAllocationSummary(projectPath) {
|
|
|
10178
8341
|
const ports = getSupabasePorts(projectPath);
|
|
10179
8342
|
const offset = calculatePortOffset(projectPath);
|
|
10180
8343
|
return [
|
|
10181
|
-
`Port allocation for: ${
|
|
8344
|
+
`Port allocation for: ${path6.basename(projectPath)}`,
|
|
10182
8345
|
` Slot: ${offset / 10} (hash-based, offset=${offset})`,
|
|
10183
8346
|
` API: ${ports.api}`,
|
|
10184
8347
|
` DB: ${ports.db}`,
|
|
@@ -10212,7 +8375,7 @@ function parseSeedPaths(configPath) {
|
|
|
10212
8375
|
}
|
|
10213
8376
|
}
|
|
10214
8377
|
async function applySeedFile2(seedPath, dbUrl) {
|
|
10215
|
-
const supabaseDir =
|
|
8378
|
+
const supabaseDir = path6.join(process.cwd(), "supabase");
|
|
10216
8379
|
const absolutePath = resolveSafePath(supabaseDir, seedPath);
|
|
10217
8380
|
if (!existsSync(absolutePath)) {
|
|
10218
8381
|
return;
|
|
@@ -10223,12 +8386,12 @@ async function applySeedFile2(seedPath, dbUrl) {
|
|
|
10223
8386
|
});
|
|
10224
8387
|
}
|
|
10225
8388
|
async function applySeeds2(configPath) {
|
|
10226
|
-
const configFile = configPath ||
|
|
8389
|
+
const configFile = configPath || path6.join(process.cwd(), "supabase", "config.toml");
|
|
10227
8390
|
const seedPaths = parseSeedPaths(configFile);
|
|
10228
8391
|
if (seedPaths.length === 0) {
|
|
10229
8392
|
return;
|
|
10230
8393
|
}
|
|
10231
|
-
const supabaseDir =
|
|
8394
|
+
const supabaseDir = path6.join(process.cwd(), "supabase");
|
|
10232
8395
|
const safePaths = filterSafePaths(seedPaths, supabaseDir);
|
|
10233
8396
|
if (safePaths.length === 0) {
|
|
10234
8397
|
return;
|
|
@@ -10751,7 +8914,7 @@ var validateCommand = new Command("validate").description("Validate schema files
|
|
|
10751
8914
|
const logger15 = createCLILogger("db:validate");
|
|
10752
8915
|
try {
|
|
10753
8916
|
logger15.section("Schema Validation");
|
|
10754
|
-
const schemasPath =
|
|
8917
|
+
const schemasPath = path6.join(process.cwd(), "packages", "database", "src", "schema");
|
|
10755
8918
|
if (!existsSync(schemasPath)) {
|
|
10756
8919
|
throw new CLIError("Schema directory not found", "SCHEMA_DIR_NOT_FOUND", [
|
|
10757
8920
|
`Expected location: ${schemasPath}`,
|
|
@@ -10885,8 +9048,8 @@ var generateCommand = new Command("generate").description("Generate TypeScript t
|
|
|
10885
9048
|
var listCommand = new Command("list").description("List managed schemas from drizzle.config.ts").option("--sql", "Output as SQL-compatible string for IN clauses").option("--json", "Output as JSON array").action(async (options) => {
|
|
10886
9049
|
const logger15 = createCLILogger("db:schema:list");
|
|
10887
9050
|
try {
|
|
10888
|
-
const dbPackagePath =
|
|
10889
|
-
if (!existsSync(
|
|
9051
|
+
const dbPackagePath = path6.join(process.cwd(), "packages", "database");
|
|
9052
|
+
if (!existsSync(path6.join(dbPackagePath, "drizzle.config.ts"))) {
|
|
10890
9053
|
throw new CLIError("drizzle.config.ts not found", "CONFIG_NOT_FOUND", [
|
|
10891
9054
|
`Expected location: ${dbPackagePath}/drizzle.config.ts`,
|
|
10892
9055
|
"Ensure you are in the project root",
|
|
@@ -11096,8 +9259,8 @@ var seedValidateCommand = new Command("validate").description("Validate seed SQL
|
|
|
11096
9259
|
const output = await dbSeedValidate({});
|
|
11097
9260
|
emitJsonSuccess(seedValidateCommand, DbSeedValidateOutputSchema, output);
|
|
11098
9261
|
if (!output.valid) {
|
|
11099
|
-
const
|
|
11100
|
-
throw new CLIError("Seed validation failed", "SEED_VALIDATION_FAILED",
|
|
9262
|
+
const details = output.missingFiles ? [`Missing: ${output.missingFiles.join(", ")}`] : output.violations && output.violations.length > 0 ? output.violations : ["Invalid SQL content"];
|
|
9263
|
+
throw new CLIError("Seed validation failed", "SEED_VALIDATION_FAILED", details);
|
|
11101
9264
|
}
|
|
11102
9265
|
} catch (error) {
|
|
11103
9266
|
if (error instanceof CLIError) {
|
|
@@ -12789,7 +10952,7 @@ function loadPolicyFromFile(policyFile) {
|
|
|
12789
10952
|
}
|
|
12790
10953
|
}
|
|
12791
10954
|
function loadBoundaryPolicy(projectRoot, policyPath) {
|
|
12792
|
-
const policyFile = policyPath ??
|
|
10955
|
+
const policyFile = policyPath ?? path6.join(projectRoot, "supabase", "schemas", BOUNDARY_POLICY_FILENAME);
|
|
12793
10956
|
if (!existsSync(policyFile)) {
|
|
12794
10957
|
return {
|
|
12795
10958
|
version: DEFAULT_POLICY_VERSION,
|
|
@@ -12817,7 +10980,7 @@ init_esm_shims();
|
|
|
12817
10980
|
var riskDetectorLoader = null;
|
|
12818
10981
|
function loadRiskDetectorModule() {
|
|
12819
10982
|
if (!riskDetectorLoader) {
|
|
12820
|
-
riskDetectorLoader = import('./risk-detector-
|
|
10983
|
+
riskDetectorLoader = import('./risk-detector-4U6ZJ2G5.js').then((module) => ({
|
|
12821
10984
|
detectSchemaRisks: module.detectSchemaRisks
|
|
12822
10985
|
})).catch((error) => {
|
|
12823
10986
|
riskDetectorLoader = null;
|
|
@@ -12831,7 +10994,7 @@ function loadRiskDetectorModule() {
|
|
|
12831
10994
|
init_esm_shims();
|
|
12832
10995
|
var boundaryPolicyCache = /* @__PURE__ */ new Map();
|
|
12833
10996
|
function getBoundaryPolicy(cwd = process.cwd()) {
|
|
12834
|
-
const resolved =
|
|
10997
|
+
const resolved = path6.resolve(cwd);
|
|
12835
10998
|
let cached = boundaryPolicyCache.get(resolved);
|
|
12836
10999
|
if (!cached) {
|
|
12837
11000
|
cached = loadBoundaryPolicy(cwd);
|
|
@@ -12978,7 +11141,7 @@ function shouldAbortSchemaPrecheckForBudget(state, filePath) {
|
|
|
12978
11141
|
)}`;
|
|
12979
11142
|
}
|
|
12980
11143
|
if (state.scannedFiles + 1 > state.maxFiles) {
|
|
12981
|
-
return `Schema file scan budget exceeds ${state.maxFiles} files at ${
|
|
11144
|
+
return `Schema file scan budget exceeds ${state.maxFiles} files at ${path6.basename(filePath)}.`;
|
|
12982
11145
|
}
|
|
12983
11146
|
const projectedBytes = state.scannedBytes + size;
|
|
12984
11147
|
if (projectedBytes > state.maxBytes) {
|
|
@@ -13000,7 +11163,7 @@ function* collectSqlFilesRecursively(baseDir) {
|
|
|
13000
11163
|
try {
|
|
13001
11164
|
const entries = readdirSync(currentDir, { withFileTypes: true });
|
|
13002
11165
|
for (const entry of entries) {
|
|
13003
|
-
const fullPath =
|
|
11166
|
+
const fullPath = path6.join(currentDir, entry.name);
|
|
13004
11167
|
if (entry.isDirectory()) {
|
|
13005
11168
|
queue.push(fullPath);
|
|
13006
11169
|
continue;
|
|
@@ -14233,7 +12396,7 @@ function classifyIdempotentMisplacementRisk(file, content, boundaryPolicy) {
|
|
|
14233
12396
|
}
|
|
14234
12397
|
function classifyFileMisplacementRisks(params) {
|
|
14235
12398
|
const risks = [];
|
|
14236
|
-
const
|
|
12399
|
+
const relative2 = path6.relative(process.cwd(), params.file);
|
|
14237
12400
|
const normalized = normalizeSqlForPlacementCheck(params.content);
|
|
14238
12401
|
const statements = splitSqlStatements(normalized);
|
|
14239
12402
|
const seenMessages = /* @__PURE__ */ new Set();
|
|
@@ -14241,13 +12404,13 @@ function classifyFileMisplacementRisks(params) {
|
|
|
14241
12404
|
const candidates = collectRuleBasedCandidates({
|
|
14242
12405
|
statement,
|
|
14243
12406
|
line,
|
|
14244
|
-
file:
|
|
12407
|
+
file: relative2,
|
|
14245
12408
|
rules: params.rules
|
|
14246
12409
|
});
|
|
14247
12410
|
const unknownObjectRisk = maybeCollectUnknownObjectBoundaryRisk({
|
|
14248
12411
|
statement,
|
|
14249
12412
|
line,
|
|
14250
|
-
file:
|
|
12413
|
+
file: relative2,
|
|
14251
12414
|
fileType: params.fileType,
|
|
14252
12415
|
boundaryPolicy: params.boundaryPolicy,
|
|
14253
12416
|
resolver: params.resolver,
|
|
@@ -14313,6 +12476,93 @@ function classifyPlanStatementHazards(statement) {
|
|
|
14313
12476
|
return risks;
|
|
14314
12477
|
}
|
|
14315
12478
|
|
|
12479
|
+
// src/commands/db/commands/db-sync/error-classifier.ts
|
|
12480
|
+
init_esm_shims();
|
|
12481
|
+
var DNS_RESOLUTION_PATTERNS = [
|
|
12482
|
+
"could not translate host name",
|
|
12483
|
+
"could not resolve host",
|
|
12484
|
+
"name or service not known",
|
|
12485
|
+
"nodename nor servname provided",
|
|
12486
|
+
"temporary failure in name resolution"
|
|
12487
|
+
];
|
|
12488
|
+
var CONNECTION_PATTERNS = [
|
|
12489
|
+
"econnrefused",
|
|
12490
|
+
"connection refused",
|
|
12491
|
+
"could not connect to server",
|
|
12492
|
+
"connection timed out",
|
|
12493
|
+
"timeout expired",
|
|
12494
|
+
"no pg_hba.conf entry",
|
|
12495
|
+
"the database system is starting up",
|
|
12496
|
+
"the database system is shutting down"
|
|
12497
|
+
];
|
|
12498
|
+
var FALLBACK_SUGGESTIONS = [
|
|
12499
|
+
"Check database connectivity",
|
|
12500
|
+
"Verify psql is installed and accessible",
|
|
12501
|
+
"Ensure packages/database exists"
|
|
12502
|
+
];
|
|
12503
|
+
function getEnvironmentLabel(environment) {
|
|
12504
|
+
switch (environment) {
|
|
12505
|
+
case "production":
|
|
12506
|
+
return "production";
|
|
12507
|
+
case "main":
|
|
12508
|
+
return "main";
|
|
12509
|
+
case "preview":
|
|
12510
|
+
return "preview";
|
|
12511
|
+
case "local":
|
|
12512
|
+
return "local";
|
|
12513
|
+
}
|
|
12514
|
+
}
|
|
12515
|
+
function buildCombinedMessage(error) {
|
|
12516
|
+
if (isExecaError(error)) {
|
|
12517
|
+
return [error.message, error.stderr, error.stdout].filter(Boolean).join("\n");
|
|
12518
|
+
}
|
|
12519
|
+
if (error instanceof Error) {
|
|
12520
|
+
return error.message;
|
|
12521
|
+
}
|
|
12522
|
+
return String(error);
|
|
12523
|
+
}
|
|
12524
|
+
function findRelevantLine(message, patterns) {
|
|
12525
|
+
const lines = message.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
|
|
12526
|
+
const matched = lines.find((line) => {
|
|
12527
|
+
const lower = line.toLowerCase();
|
|
12528
|
+
return patterns.some((pattern) => lower.includes(pattern));
|
|
12529
|
+
});
|
|
12530
|
+
return matched ?? null;
|
|
12531
|
+
}
|
|
12532
|
+
function classifyDbSyncCommandFailure(error, environment) {
|
|
12533
|
+
const message = buildCombinedMessage(error);
|
|
12534
|
+
const messageLower = message.toLowerCase();
|
|
12535
|
+
const environmentLabel = getEnvironmentLabel(environment);
|
|
12536
|
+
if (DNS_RESOLUTION_PATTERNS.some((pattern) => messageLower.includes(pattern))) {
|
|
12537
|
+
const detail = findRelevantLine(message, DNS_RESOLUTION_PATTERNS);
|
|
12538
|
+
return {
|
|
12539
|
+
code: "DB_HOST_RESOLUTION_FAILED",
|
|
12540
|
+
message: detail ? `Could not resolve the ${environmentLabel} database host: ${detail}` : `Could not resolve the ${environmentLabel} database host`,
|
|
12541
|
+
suggestions: [
|
|
12542
|
+
"Verify the database host in DATABASE_URL / DATABASE_URL_ADMIN",
|
|
12543
|
+
"Check DNS resolution and outbound network access to the database host",
|
|
12544
|
+
"Re-run from an environment that can reach the target database"
|
|
12545
|
+
]
|
|
12546
|
+
};
|
|
12547
|
+
}
|
|
12548
|
+
if (CONNECTION_PATTERNS.some((pattern) => messageLower.includes(pattern))) {
|
|
12549
|
+
const detail = findRelevantLine(message, CONNECTION_PATTERNS);
|
|
12550
|
+
return {
|
|
12551
|
+
code: "DB_CONNECTION_FAILED",
|
|
12552
|
+
message: detail ? `Could not connect to the ${environmentLabel} database: ${detail}` : `Could not connect to the ${environmentLabel} database`,
|
|
12553
|
+
suggestions: [
|
|
12554
|
+
"Verify the database is reachable from this environment",
|
|
12555
|
+
"Check DATABASE_URL / DATABASE_URL_ADMIN credentials and firewall settings",
|
|
12556
|
+
"Retry after confirming the target database is accepting connections"
|
|
12557
|
+
]
|
|
12558
|
+
};
|
|
12559
|
+
}
|
|
12560
|
+
return null;
|
|
12561
|
+
}
|
|
12562
|
+
function getDbSyncFallbackSuggestions() {
|
|
12563
|
+
return [...FALLBACK_SUGGESTIONS];
|
|
12564
|
+
}
|
|
12565
|
+
|
|
14316
12566
|
// src/commands/db/commands/db-sync.ts
|
|
14317
12567
|
var SHOW_ALLOWLIST_REPORT = process.env.RUNA_DB_PRECHECK_ALLOWLIST_REPORT === "1";
|
|
14318
12568
|
var DIRECTORY_PLACEMENT_WARNING_PREFIX = " [misplacement] ";
|
|
@@ -14586,8 +12836,8 @@ async function collectPlanBoundaryReconciliationReport(planSql) {
|
|
|
14586
12836
|
}
|
|
14587
12837
|
async function collectDirectoryPlacementReport() {
|
|
14588
12838
|
const boundaryPolicy = getBoundaryPolicy();
|
|
14589
|
-
const declarativeDir =
|
|
14590
|
-
const idempotentDir =
|
|
12839
|
+
const declarativeDir = path6.join(process.cwd(), "supabase", "schemas", "declarative");
|
|
12840
|
+
const idempotentDir = path6.join(process.cwd(), "supabase", "schemas", "idempotent");
|
|
14591
12841
|
const blockers = [];
|
|
14592
12842
|
const warnings = [];
|
|
14593
12843
|
const allowlist = [];
|
|
@@ -14809,9 +13059,9 @@ function formatCollectedDeclarativeRisks(collected, allowlist) {
|
|
|
14809
13059
|
allowlist: dedupeAndSort(allowlist)
|
|
14810
13060
|
};
|
|
14811
13061
|
}
|
|
14812
|
-
function formatCollectedIdempotentRisks(collected) {
|
|
13062
|
+
function formatCollectedIdempotentRisks(collected, allowlist) {
|
|
14813
13063
|
if (collected.length === 0) {
|
|
14814
|
-
return { blockers: [], warnings: [] };
|
|
13064
|
+
return { blockers: [], warnings: [], allowlist: dedupeAndSort(allowlist) };
|
|
14815
13065
|
}
|
|
14816
13066
|
const high = collected.filter((r) => r.level === "high");
|
|
14817
13067
|
const medium = collected.filter((r) => r.level === "medium");
|
|
@@ -14825,7 +13075,8 @@ function formatCollectedIdempotentRisks(collected) {
|
|
|
14825
13075
|
}
|
|
14826
13076
|
return {
|
|
14827
13077
|
blockers: dedupeAndSort(blockers),
|
|
14828
|
-
warnings: dedupeAndSort(warnings)
|
|
13078
|
+
warnings: dedupeAndSort(warnings),
|
|
13079
|
+
allowlist: dedupeAndSort(allowlist)
|
|
14829
13080
|
};
|
|
14830
13081
|
}
|
|
14831
13082
|
function collectDeclarativeRiskItemsForFile(params) {
|
|
@@ -14871,7 +13122,7 @@ async function processDeclarativeRiskFile(params) {
|
|
|
14871
13122
|
report: { ...detectorResult.report, allowlist: detectorResult.report.allowlist ?? [] }
|
|
14872
13123
|
};
|
|
14873
13124
|
}
|
|
14874
|
-
const relPath =
|
|
13125
|
+
const relPath = path6.relative(process.cwd(), params.file);
|
|
14875
13126
|
const risks = await detectorResult.detector(params.file);
|
|
14876
13127
|
const scopedRisks = risks.map((risk) => ({ ...risk, file: relPath }));
|
|
14877
13128
|
collectDeclarativeRiskItemsForFile({
|
|
@@ -14883,7 +13134,7 @@ async function processDeclarativeRiskFile(params) {
|
|
|
14883
13134
|
return { kind: "ok", detector: detectorResult.detector };
|
|
14884
13135
|
}
|
|
14885
13136
|
async function collectDeclarativeRiskReport() {
|
|
14886
|
-
const declarativeDir =
|
|
13137
|
+
const declarativeDir = path6.join(process.cwd(), "supabase", "schemas", "declarative");
|
|
14887
13138
|
if (!existsSync(declarativeDir)) {
|
|
14888
13139
|
return {
|
|
14889
13140
|
blockers: [],
|
|
@@ -14917,13 +13168,15 @@ async function collectDeclarativeRiskReport() {
|
|
|
14917
13168
|
return formatCollectedDeclarativeRisks(collected, allowlist);
|
|
14918
13169
|
}
|
|
14919
13170
|
async function collectIdempotentRiskReport() {
|
|
14920
|
-
const idempotentDir =
|
|
13171
|
+
const idempotentDir = path6.join(process.cwd(), "supabase", "schemas", "idempotent");
|
|
14921
13172
|
if (!existsSync(idempotentDir)) {
|
|
14922
|
-
return { blockers: [], warnings: [] };
|
|
13173
|
+
return { blockers: [], warnings: [], allowlist: [] };
|
|
14923
13174
|
}
|
|
13175
|
+
const policy = getBoundaryPolicy();
|
|
14924
13176
|
const sqlFiles = collectSqlFilesRecursively(idempotentDir);
|
|
14925
13177
|
let hasSqlFile = false;
|
|
14926
13178
|
const collected = [];
|
|
13179
|
+
const allowlist = [];
|
|
14927
13180
|
let detectSchemaRisks2;
|
|
14928
13181
|
const scanBudget = createSchemaPrecheckBudgetState();
|
|
14929
13182
|
for (const file of sqlFiles) {
|
|
@@ -14932,7 +13185,9 @@ async function collectIdempotentRiskReport() {
|
|
|
14932
13185
|
reason: shouldAbortSchemaPrecheckForBudget(scanBudget, file) ?? void 0,
|
|
14933
13186
|
scanBudget
|
|
14934
13187
|
});
|
|
14935
|
-
if (budgetAbortReport)
|
|
13188
|
+
if (budgetAbortReport) {
|
|
13189
|
+
return { ...budgetAbortReport, allowlist: budgetAbortReport.allowlist ?? [] };
|
|
13190
|
+
}
|
|
14936
13191
|
hasSqlFile = true;
|
|
14937
13192
|
const detectorResult = await ensureRiskDetectorOrReport({
|
|
14938
13193
|
detector: detectSchemaRisks2,
|
|
@@ -14940,21 +13195,36 @@ async function collectIdempotentRiskReport() {
|
|
|
14940
13195
|
onUninitialized: buildIdempotentRiskUninitializedReport
|
|
14941
13196
|
});
|
|
14942
13197
|
if (detectorResult.kind === "report") {
|
|
14943
|
-
return detectorResult.report;
|
|
13198
|
+
return { ...detectorResult.report, allowlist: detectorResult.report.allowlist ?? [] };
|
|
14944
13199
|
}
|
|
14945
13200
|
detectSchemaRisks2 = detectorResult.detector;
|
|
14946
13201
|
const risks = await detectSchemaRisks2(file);
|
|
14947
13202
|
if (risks.length === 0) continue;
|
|
14948
|
-
const relPath =
|
|
13203
|
+
const relPath = path6.relative(process.cwd(), file);
|
|
14949
13204
|
for (const risk of risks) {
|
|
14950
13205
|
const level = correctIdempotentRiskLevel(risk.level, risk.reasonCode);
|
|
14951
|
-
|
|
13206
|
+
const scopedRisk = { ...risk, level, file: relPath };
|
|
13207
|
+
const matched = findDeclarativeRiskAllowlistMatch(scopedRisk, policy);
|
|
13208
|
+
if (matched) {
|
|
13209
|
+
if (SHOW_ALLOWLIST_REPORT) {
|
|
13210
|
+
allowlist.push(
|
|
13211
|
+
formatAllowlistReason({
|
|
13212
|
+
label: "idempotent-risk",
|
|
13213
|
+
ruleId: matched.id,
|
|
13214
|
+
rule: matched,
|
|
13215
|
+
reason: `${scopedRisk.file}: ${scopedRisk.description} (${matched.reason})`
|
|
13216
|
+
})
|
|
13217
|
+
);
|
|
13218
|
+
}
|
|
13219
|
+
continue;
|
|
13220
|
+
}
|
|
13221
|
+
collected.push(scopedRisk);
|
|
14952
13222
|
}
|
|
14953
13223
|
}
|
|
14954
13224
|
if (!hasSqlFile || collected.length === 0) {
|
|
14955
|
-
return { blockers: [], warnings: [] };
|
|
13225
|
+
return { blockers: [], warnings: [], allowlist: dedupeAndSort(allowlist) };
|
|
14956
13226
|
}
|
|
14957
|
-
return formatCollectedIdempotentRisks(collected);
|
|
13227
|
+
return formatCollectedIdempotentRisks(collected, allowlist);
|
|
14958
13228
|
}
|
|
14959
13229
|
function countSyncIssues(diff) {
|
|
14960
13230
|
return diff.missingTables.length + diff.orphanTables.length + diff.missingEnums.length + diff.extraEnums.length + diff.enumValueMismatches.length;
|
|
@@ -15375,14 +13645,19 @@ async function runSyncAction(env, options) {
|
|
|
15375
13645
|
throwSchemaOutOfSyncError();
|
|
15376
13646
|
} catch (error) {
|
|
15377
13647
|
if (error instanceof CLIError) throw error;
|
|
13648
|
+
const classifiedFailure = classifyDbSyncCommandFailure(error, runaEnv);
|
|
13649
|
+
if (classifiedFailure) {
|
|
13650
|
+
throw new CLIError(
|
|
13651
|
+
classifiedFailure.message,
|
|
13652
|
+
classifiedFailure.code,
|
|
13653
|
+
classifiedFailure.suggestions,
|
|
13654
|
+
error instanceof Error ? error : void 0
|
|
13655
|
+
);
|
|
13656
|
+
}
|
|
15378
13657
|
throw new CLIError(
|
|
15379
13658
|
"Schema sync check failed",
|
|
15380
13659
|
"DB_SYNC_FAILED",
|
|
15381
|
-
|
|
15382
|
-
"Check database connectivity",
|
|
15383
|
-
"Verify psql is installed and accessible",
|
|
15384
|
-
"Ensure packages/database exists"
|
|
15385
|
-
],
|
|
13660
|
+
getDbSyncFallbackSuggestions(),
|
|
15386
13661
|
error instanceof Error ? error : void 0
|
|
15387
13662
|
);
|
|
15388
13663
|
}
|
|
@@ -15406,7 +13681,7 @@ var testGenCommand = new Command("test:gen").description("Generate pgTAP behavio
|
|
|
15406
13681
|
const databaseUrl = options.db || process.env.DATABASE_URL || getLocalDbUrl();
|
|
15407
13682
|
const schemas = options.schemas ? options.schemas.split(",") : void 0;
|
|
15408
13683
|
const dbPackage = databasePaths.package();
|
|
15409
|
-
const defaultOutputPath =
|
|
13684
|
+
const defaultOutputPath = path6.join(dbPackage, "tests/00_behavior.generated.test.sql");
|
|
15410
13685
|
const outputPath = options.output || defaultOutputPath;
|
|
15411
13686
|
spinner.text = "Generating pgTAP behavior tests...";
|
|
15412
13687
|
const result = await dbGeneratePgTapTests({
|
|
@@ -15651,6 +13926,15 @@ function printResults(result, logger15) {
|
|
|
15651
13926
|
console.log("");
|
|
15652
13927
|
return;
|
|
15653
13928
|
}
|
|
13929
|
+
if (result.error) {
|
|
13930
|
+
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
13931
|
+
logger15.error("DATA QUALITY CHECK COULD NOT BE COMPLETED");
|
|
13932
|
+
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
13933
|
+
console.log("");
|
|
13934
|
+
logger15.error(result.error);
|
|
13935
|
+
console.log("");
|
|
13936
|
+
return;
|
|
13937
|
+
}
|
|
15654
13938
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
15655
13939
|
logger15.error("DATA QUALITY CHECK FAILED");
|
|
15656
13940
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
@@ -15802,15 +14086,15 @@ async function findScriptPath(scriptKey) {
|
|
|
15802
14086
|
const scriptConfig = SCRIPT_LOCATIONS[scriptKey];
|
|
15803
14087
|
const relativePath = scriptConfig.relativePath;
|
|
15804
14088
|
const workspaceRoot = findWorkspaceRoot() || process.cwd();
|
|
15805
|
-
const directPath =
|
|
14089
|
+
const directPath = path6.join(workspaceRoot, relativePath);
|
|
15806
14090
|
if (existsSync(directPath)) {
|
|
15807
14091
|
return directPath;
|
|
15808
14092
|
}
|
|
15809
14093
|
if (relativePath.includes("sdk/scripts") || relativePath.includes("packages/sdk")) {
|
|
15810
14094
|
try {
|
|
15811
14095
|
const sdkScriptsPath = await getSDKScriptsPath();
|
|
15812
|
-
const scriptName =
|
|
15813
|
-
const sdkScriptPath =
|
|
14096
|
+
const scriptName = path6.basename(relativePath);
|
|
14097
|
+
const sdkScriptPath = path6.join(sdkScriptsPath, scriptName);
|
|
15814
14098
|
if (existsSync(sdkScriptPath)) {
|
|
15815
14099
|
return sdkScriptPath;
|
|
15816
14100
|
}
|
|
@@ -15827,7 +14111,7 @@ async function runSupabaseScript(scriptKey, args = [], options = {}) {
|
|
|
15827
14111
|
}
|
|
15828
14112
|
try {
|
|
15829
14113
|
const result = await execa(scriptPath, args, {
|
|
15830
|
-
cwd: options.cwd ||
|
|
14114
|
+
cwd: options.cwd || path6.dirname(scriptPath),
|
|
15831
14115
|
stdio: options.stdio || "pipe",
|
|
15832
14116
|
timeout: options.timeout,
|
|
15833
14117
|
env: {
|