@runa-ai/runa-cli 0.5.53 → 0.5.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/build/machine.d.ts +6 -4
- package/dist/commands/build/machine.d.ts.map +1 -1
- package/dist/commands/ci/machine/actors/db/sync-schema.d.ts.map +1 -1
- package/dist/commands/db/commands/db-drizzle.d.ts.map +1 -1
- package/dist/commands/db/commands/db-lifecycle.d.ts.map +1 -1
- package/dist/commands/db/commands/db-seed-metadata.d.ts.map +1 -1
- package/dist/commands/db/sync/actors.d.ts +1 -0
- package/dist/commands/db/sync/actors.d.ts.map +1 -1
- package/dist/commands/db/sync/contract.d.ts +4 -1
- package/dist/commands/db/sync/contract.d.ts.map +1 -1
- package/dist/commands/db/sync/machine.d.ts +1 -0
- package/dist/commands/db/sync/machine.d.ts.map +1 -1
- package/dist/commands/db/types.d.ts +2 -0
- package/dist/commands/db/types.d.ts.map +1 -1
- package/dist/commands/db/utils/table-registry.d.ts +4 -0
- package/dist/commands/db/utils/table-registry.d.ts.map +1 -1
- package/dist/commands/db/utils/table-source-classifier.d.ts +28 -0
- package/dist/commands/db/utils/table-source-classifier.d.ts.map +1 -0
- package/dist/commands/dev/machine.d.ts +5 -4
- package/dist/commands/dev/machine.d.ts.map +1 -1
- package/dist/commands/env/commands/env-pull.d.ts.map +1 -1
- package/dist/commands/env/constants/local-supabase.d.ts +4 -2
- package/dist/commands/env/constants/local-supabase.d.ts.map +1 -1
- package/dist/commands/hotfix/metadata.d.ts.map +1 -1
- package/dist/commands/inject-test-attrs/processor-utils.d.ts.map +1 -1
- package/dist/commands/inject-test-attrs/processor.d.ts.map +1 -1
- package/dist/constants/versions.d.ts +1 -1
- package/dist/errors/catalog.d.ts +8 -0
- package/dist/errors/catalog.d.ts.map +1 -1
- package/dist/index.js +733 -412
- package/dist/utils/path-security.d.ts +18 -0
- package/dist/utils/path-security.d.ts.map +1 -1
- package/dist/utils/port-allocator.d.ts +34 -37
- package/dist/utils/port-allocator.d.ts.map +1 -1
- package/package.json +8 -8
package/dist/index.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { createRequire } from 'module';
|
|
3
3
|
import * as path11 from 'path';
|
|
4
|
-
import path11__default, { join, dirname, resolve,
|
|
4
|
+
import path11__default, { join, dirname, resolve, isAbsolute, relative, sep, basename, normalize } from 'path';
|
|
5
5
|
import { fileURLToPath } from 'url';
|
|
6
6
|
import * as fs5 from 'fs';
|
|
7
|
-
import fs5__default, { existsSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync,
|
|
7
|
+
import fs5__default, { existsSync, rmSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, realpathSync, promises, lstatSync, accessSync, constants, chmodSync, unlinkSync } from 'fs';
|
|
8
8
|
import { execSync, spawnSync, execFileSync, exec, spawn } from 'child_process';
|
|
9
|
-
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames,
|
|
9
|
+
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, SUPABASE_SYSTEM_SCHEMAS, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, BASE_PORTS, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, resolveAvailablePorts, calculatePortOffset, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, getPortsWithOffset, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, DATABASE_PACKAGE_CANDIDATES, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
|
|
10
10
|
import { z } from 'zod';
|
|
11
11
|
import fs9, { mkdir, writeFile, appendFile, readFile, rm, stat, realpath, cp, readdir, lstat } from 'fs/promises';
|
|
12
12
|
import { promisify } from 'util';
|
|
@@ -24,7 +24,7 @@ import { expand } from 'dotenv-expand';
|
|
|
24
24
|
import { resolve4 } from 'dns/promises';
|
|
25
25
|
import { isIP } from 'net';
|
|
26
26
|
import postgres from 'postgres';
|
|
27
|
-
import crypto, { randomBytes
|
|
27
|
+
import crypto, { randomBytes } from 'crypto';
|
|
28
28
|
import os, { tmpdir } from 'os';
|
|
29
29
|
import { introspectDatabase, HonoRouteAnalyzer } from '@runa-ai/runa/test-generators';
|
|
30
30
|
import { isTable, getTableUniqueName, getTableName } from 'drizzle-orm';
|
|
@@ -33,7 +33,7 @@ import { createJiti } from 'jiti';
|
|
|
33
33
|
import ora from 'ora';
|
|
34
34
|
import { stdout, stdin } from 'process';
|
|
35
35
|
import * as readline from 'readline/promises';
|
|
36
|
-
import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile,
|
|
36
|
+
import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile, isLayoutFile, isApiRouteFile, isMiddlewareFile, collectPageInfo, collectLayoutInfo, collectApiRouteInfo, collectAuthBoundaries, hasMachineDefinition, collectMachineDefinition, collectComponentInfo, createReadAndParseFile, createResolveImportPath, transformSync, getInjectionRegistry, buildManifest, getAllMachineDefinitions, generateSelectorTypeScript, getUnifiedRegistry, buildMachineLinks, registerInjection } from '@runa-ai/runa-xstate-test-plugin/standalone';
|
|
37
37
|
import { listSessions, formatDuration as formatDuration$2, cleanupStaleSessions, removeSession, isSessionCheckDisabled, getCurrentSessionId, checkActiveSessions, createSession, addActivity, checkConflicts, formatConflictDetails } from '@runa-ai/runa/session';
|
|
38
38
|
import { render, Box, Text } from 'ink';
|
|
39
39
|
import Spinner from 'ink-spinner';
|
|
@@ -124,7 +124,7 @@ function extractPort(rawUrl) {
|
|
|
124
124
|
}
|
|
125
125
|
}
|
|
126
126
|
function parseTomlPort(content, section, key) {
|
|
127
|
-
const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)
|
|
127
|
+
const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)`);
|
|
128
128
|
const sectionMatch = sectionRegex.exec(content);
|
|
129
129
|
if (!sectionMatch) return null;
|
|
130
130
|
const sectionContent = sectionMatch[1];
|
|
@@ -287,9 +287,9 @@ var init_local_supabase = __esm({
|
|
|
287
287
|
"src/commands/env/constants/local-supabase.ts"() {
|
|
288
288
|
init_esm_shims();
|
|
289
289
|
DEFAULT_HOST = "127.0.0.1";
|
|
290
|
-
DEFAULT_API_PORT =
|
|
291
|
-
DEFAULT_DB_PORT =
|
|
292
|
-
DEFAULT_STUDIO_PORT =
|
|
290
|
+
DEFAULT_API_PORT = BASE_PORTS.api;
|
|
291
|
+
DEFAULT_DB_PORT = BASE_PORTS.db;
|
|
292
|
+
DEFAULT_STUDIO_PORT = BASE_PORTS.studio;
|
|
293
293
|
LOCAL_SUPABASE_ANON_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0";
|
|
294
294
|
LOCAL_SUPABASE_SERVICE_ROLE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU";
|
|
295
295
|
LOCAL_SUPABASE_ENV_VALUES = getLocalSupabaseEnvValues();
|
|
@@ -1161,7 +1161,7 @@ var CLI_VERSION, HAS_ADMIN_COMMAND;
|
|
|
1161
1161
|
var init_version = __esm({
|
|
1162
1162
|
"src/version.ts"() {
|
|
1163
1163
|
init_esm_shims();
|
|
1164
|
-
CLI_VERSION = "0.5.
|
|
1164
|
+
CLI_VERSION = "0.5.57";
|
|
1165
1165
|
HAS_ADMIN_COMMAND = false;
|
|
1166
1166
|
}
|
|
1167
1167
|
});
|
|
@@ -4425,9 +4425,19 @@ var ERROR_CATALOG = {
|
|
|
4425
4425
|
title: "Docker is not running",
|
|
4426
4426
|
template: "Docker daemon is not running or not accessible",
|
|
4427
4427
|
suggestions: [
|
|
4428
|
-
"Start
|
|
4429
|
-
"
|
|
4430
|
-
|
|
4428
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
|
|
4429
|
+
"Install Colima: brew install colima docker"
|
|
4430
|
+
],
|
|
4431
|
+
docUrl: "https://runa.dev/docs/errors/docker"
|
|
4432
|
+
},
|
|
4433
|
+
DOCKER_DESKTOP_FORBIDDEN: {
|
|
4434
|
+
code: "ERR_RUNA_DOCKER_DESKTOP_FORBIDDEN",
|
|
4435
|
+
exitCode: EXIT_CODES.EXTERNAL_TOOL_ERROR,
|
|
4436
|
+
title: "Docker Desktop is not supported",
|
|
4437
|
+
template: "Docker Desktop is not supported. runa requires Colima.",
|
|
4438
|
+
suggestions: [
|
|
4439
|
+
"Install Colima: brew install colima docker",
|
|
4440
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs"
|
|
4431
4441
|
],
|
|
4432
4442
|
docUrl: "https://runa.dev/docs/errors/docker"
|
|
4433
4443
|
},
|
|
@@ -4441,7 +4451,7 @@ var ERROR_CATALOG = {
|
|
|
4441
4451
|
supabase: "brew install supabase/tap/supabase",
|
|
4442
4452
|
vercel: "pnpm add -g vercel",
|
|
4443
4453
|
gh: "brew install gh",
|
|
4444
|
-
docker: "Install
|
|
4454
|
+
docker: "Install Colima: brew install colima docker",
|
|
4445
4455
|
dotenvx: "pnpm add -g @dotenvx/dotenvx",
|
|
4446
4456
|
"pg-schema-diff": "brew install pg-schema-diff"
|
|
4447
4457
|
};
|
|
@@ -5290,7 +5300,7 @@ function emitJsonSuccess(cmd, dataSchema, data) {
|
|
|
5290
5300
|
init_esm_shims();
|
|
5291
5301
|
var BuildPhaseSchema = z.enum(["types", "lint", "build", "db", "manifest"]);
|
|
5292
5302
|
var VALID_BUILD_PHASES = ["types", "lint", "build", "db", "manifest"];
|
|
5293
|
-
z.object({
|
|
5303
|
+
var BuildInputSchema = z.object({
|
|
5294
5304
|
/** Enable E2E mode (NEXT_PUBLIC_E2E_TEST=true, TURBO_FORCE=true) */
|
|
5295
5305
|
e2e: z.boolean().default(false),
|
|
5296
5306
|
/** Clear build caches (.next, .turbo, .runa/manifests) */
|
|
@@ -6819,6 +6829,14 @@ var e2eMeta = {
|
|
|
6819
6829
|
nextStates: []
|
|
6820
6830
|
}
|
|
6821
6831
|
};
|
|
6832
|
+
function normalizeBuildMachineInput(input3) {
|
|
6833
|
+
const normalizedInput = BuildInputSchema.parse(input3?.input ?? {});
|
|
6834
|
+
const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
|
|
6835
|
+
return {
|
|
6836
|
+
input: normalizedInput,
|
|
6837
|
+
repoRoot
|
|
6838
|
+
};
|
|
6839
|
+
}
|
|
6822
6840
|
var buildMachine = setup({
|
|
6823
6841
|
types: {},
|
|
6824
6842
|
actors: {
|
|
@@ -6852,9 +6870,10 @@ var buildMachine = setup({
|
|
|
6852
6870
|
id: "build",
|
|
6853
6871
|
initial: "idle",
|
|
6854
6872
|
context: ({ input: input3 }) => {
|
|
6855
|
-
const
|
|
6873
|
+
const normalizedInput = normalizeBuildMachineInput(input3);
|
|
6874
|
+
const repoRoot = normalizedInput.repoRoot;
|
|
6856
6875
|
return {
|
|
6857
|
-
input:
|
|
6876
|
+
input: normalizedInput.input,
|
|
6858
6877
|
repoRoot,
|
|
6859
6878
|
tmpDir: ".runa/tmp/build",
|
|
6860
6879
|
hasDatabase: detectDatabase(repoRoot),
|
|
@@ -7579,15 +7598,15 @@ function printSummary(logger16, output3) {
|
|
|
7579
7598
|
}
|
|
7580
7599
|
}
|
|
7581
7600
|
function findRepoRoot(startDir) {
|
|
7582
|
-
const { existsSync:
|
|
7601
|
+
const { existsSync: existsSync53, readFileSync: readFileSync29 } = __require("fs");
|
|
7583
7602
|
const { join: join23, dirname: dirname5 } = __require("path");
|
|
7584
7603
|
let current = startDir;
|
|
7585
7604
|
while (current !== dirname5(current)) {
|
|
7586
|
-
if (
|
|
7605
|
+
if (existsSync53(join23(current, "turbo.json"))) {
|
|
7587
7606
|
return current;
|
|
7588
7607
|
}
|
|
7589
7608
|
const pkgPath = join23(current, "package.json");
|
|
7590
|
-
if (
|
|
7609
|
+
if (existsSync53(pkgPath)) {
|
|
7591
7610
|
try {
|
|
7592
7611
|
const pkg = JSON.parse(readFileSync29(pkgPath, "utf-8"));
|
|
7593
7612
|
if (pkg.workspaces) {
|
|
@@ -7778,7 +7797,7 @@ init_esm_shims();
|
|
|
7778
7797
|
|
|
7779
7798
|
// src/commands/dev/contract.ts
|
|
7780
7799
|
init_esm_shims();
|
|
7781
|
-
z.object({
|
|
7800
|
+
var DevInputSchema = z.object({
|
|
7782
7801
|
/** Port for Next.js dev server (default: 3000) */
|
|
7783
7802
|
port: z.number().int().positive().default(3e3),
|
|
7784
7803
|
/** Skip Supabase start */
|
|
@@ -8170,48 +8189,6 @@ function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoo
|
|
|
8170
8189
|
useRootScript: false
|
|
8171
8190
|
};
|
|
8172
8191
|
}
|
|
8173
|
-
var NEXT_CRITICAL_FILES = ["routes-manifest.json", "build-manifest.json"];
|
|
8174
|
-
function cleanStaleNextDevState(appDir) {
|
|
8175
|
-
const nextDir = path11__default.join(appDir, ".next");
|
|
8176
|
-
if (!existsSync(nextDir)) {
|
|
8177
|
-
return { cleaned: false };
|
|
8178
|
-
}
|
|
8179
|
-
for (const file of NEXT_CRITICAL_FILES) {
|
|
8180
|
-
if (!existsSync(path11__default.join(nextDir, file))) {
|
|
8181
|
-
cleanNextDir(nextDir, `Missing ${file}`);
|
|
8182
|
-
return { cleaned: true, reason: `Missing ${file}` };
|
|
8183
|
-
}
|
|
8184
|
-
}
|
|
8185
|
-
const serverDir = path11__default.join(nextDir, "server");
|
|
8186
|
-
if (!existsSync(serverDir)) {
|
|
8187
|
-
try {
|
|
8188
|
-
const nextStat = statSync(nextDir);
|
|
8189
|
-
const ageHours = (Date.now() - nextStat.mtimeMs) / (1e3 * 60 * 60);
|
|
8190
|
-
if (ageHours > 1) {
|
|
8191
|
-
cleanNextDir(nextDir, "Stale .next without server directory");
|
|
8192
|
-
return { cleaned: true, reason: "Stale .next without server directory" };
|
|
8193
|
-
}
|
|
8194
|
-
} catch {
|
|
8195
|
-
}
|
|
8196
|
-
}
|
|
8197
|
-
return { cleaned: false };
|
|
8198
|
-
}
|
|
8199
|
-
function cleanNextDir(nextDir, reason) {
|
|
8200
|
-
console.log(`[runa] Stale .next detected: ${reason}`);
|
|
8201
|
-
console.log("[runa] Cleaning up .next directory...");
|
|
8202
|
-
try {
|
|
8203
|
-
rmSync(nextDir, { recursive: true, force: true, maxRetries: 10, retryDelay: 100 });
|
|
8204
|
-
console.log("[runa] Cleanup complete");
|
|
8205
|
-
} catch {
|
|
8206
|
-
const staleDir = `${nextDir}-stale-${Date.now()}`;
|
|
8207
|
-
console.log(`[runa] Could not remove .next, quarantining to ${path11__default.basename(staleDir)}`);
|
|
8208
|
-
try {
|
|
8209
|
-
renameSync(nextDir, staleDir);
|
|
8210
|
-
} catch {
|
|
8211
|
-
console.warn("[runa] Failed to quarantine .next. Run: rm -rf .next");
|
|
8212
|
-
}
|
|
8213
|
-
}
|
|
8214
|
-
}
|
|
8215
8192
|
async function startAppBackground(params) {
|
|
8216
8193
|
const mode = params.mode ?? "start";
|
|
8217
8194
|
const isMonorepo2 = params.appDir !== params.repoRoot;
|
|
@@ -8405,7 +8382,11 @@ var appStartActor = fromPromise(
|
|
|
8405
8382
|
const { repoRoot, appDir, port, tmpDir, stream } = input3;
|
|
8406
8383
|
const fullTmpDir = path11__default.join(repoRoot, tmpDir);
|
|
8407
8384
|
await mkdir(fullTmpDir, { recursive: true });
|
|
8408
|
-
|
|
8385
|
+
const nextDir = path11__default.join(appDir, ".next");
|
|
8386
|
+
if (existsSync(nextDir)) {
|
|
8387
|
+
rmSync(nextDir, { recursive: true, force: true });
|
|
8388
|
+
console.log("[runa dev] Cleaned .next cache for fresh start");
|
|
8389
|
+
}
|
|
8409
8390
|
const result = await startAppBackground({
|
|
8410
8391
|
repoRoot,
|
|
8411
8392
|
appDir,
|
|
@@ -8435,6 +8416,14 @@ var shutdownActor = fromPromise(async ({ input: input3 }) => {
|
|
|
8435
8416
|
});
|
|
8436
8417
|
}
|
|
8437
8418
|
});
|
|
8419
|
+
function normalizeDevMachineInput(input3) {
|
|
8420
|
+
const normalizedInput = DevInputSchema.parse(input3?.input ?? {});
|
|
8421
|
+
const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
|
|
8422
|
+
return {
|
|
8423
|
+
input: normalizedInput,
|
|
8424
|
+
repoRoot
|
|
8425
|
+
};
|
|
8426
|
+
}
|
|
8438
8427
|
var devMachine = setup({
|
|
8439
8428
|
types: {},
|
|
8440
8429
|
actors: {
|
|
@@ -8452,9 +8441,10 @@ var devMachine = setup({
|
|
|
8452
8441
|
id: "dev",
|
|
8453
8442
|
initial: "idle",
|
|
8454
8443
|
context: ({ input: input3 }) => {
|
|
8455
|
-
const
|
|
8444
|
+
const normalizedInput = normalizeDevMachineInput(input3);
|
|
8445
|
+
const repoRoot = normalizedInput.repoRoot;
|
|
8456
8446
|
return {
|
|
8457
|
-
input:
|
|
8447
|
+
input: normalizedInput.input,
|
|
8458
8448
|
repoRoot,
|
|
8459
8449
|
tmpDir: ".runa/tmp/dev",
|
|
8460
8450
|
hasDatabase: detectDatabase(repoRoot),
|
|
@@ -8861,6 +8851,23 @@ function isPathContained(basePath, targetPath) {
|
|
|
8861
8851
|
const realTarget = safeRealpath(normalizedTarget);
|
|
8862
8852
|
return realTarget === realBase || realTarget.startsWith(realBase + sep);
|
|
8863
8853
|
}
|
|
8854
|
+
function validateUserFilePath(filePath, baseDir) {
|
|
8855
|
+
if (!filePath || filePath.trim() === "") {
|
|
8856
|
+
throw new Error("File path cannot be empty");
|
|
8857
|
+
}
|
|
8858
|
+
if (!hasNoDangerousChars(filePath)) {
|
|
8859
|
+
throw new Error(
|
|
8860
|
+
"File path contains dangerous characters. Shell metacharacters and control characters are not allowed."
|
|
8861
|
+
);
|
|
8862
|
+
}
|
|
8863
|
+
const resolvedPath = isAbsolute(filePath) ? resolve(filePath) : resolve(baseDir, filePath);
|
|
8864
|
+
if (!isPathContained(baseDir, resolvedPath)) {
|
|
8865
|
+
throw new Error(
|
|
8866
|
+
"File path resolves outside the allowed directory. Path must be within the project root."
|
|
8867
|
+
);
|
|
8868
|
+
}
|
|
8869
|
+
return resolvedPath;
|
|
8870
|
+
}
|
|
8864
8871
|
|
|
8865
8872
|
// src/config/env-files.ts
|
|
8866
8873
|
init_workspace_detector();
|
|
@@ -12888,6 +12895,20 @@ function getSchemaGitDiff(repoRoot) {
|
|
|
12888
12895
|
return null;
|
|
12889
12896
|
}
|
|
12890
12897
|
}
|
|
12898
|
+
function extractRolesFromSql(content) {
|
|
12899
|
+
const roles = [];
|
|
12900
|
+
const roleMatches = content.matchAll(/CREATE\s+ROLE\s+(\w+)\s+WITH/gi);
|
|
12901
|
+
for (const match of roleMatches) {
|
|
12902
|
+
if (match[1]) roles.push(match[1].toLowerCase());
|
|
12903
|
+
}
|
|
12904
|
+
const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
|
|
12905
|
+
for (const match of existsMatches) {
|
|
12906
|
+
if (match[1] && !roles.includes(match[1].toLowerCase())) {
|
|
12907
|
+
roles.push(match[1].toLowerCase());
|
|
12908
|
+
}
|
|
12909
|
+
}
|
|
12910
|
+
return roles;
|
|
12911
|
+
}
|
|
12891
12912
|
function getIdempotentRoleNames2(repoRoot) {
|
|
12892
12913
|
const idempotentDir = path11__default.join(repoRoot, "supabase", "schemas", "idempotent");
|
|
12893
12914
|
const roles = [];
|
|
@@ -12896,17 +12917,12 @@ function getIdempotentRoleNames2(repoRoot) {
|
|
|
12896
12917
|
if (!fs14.existsSync(idempotentDir)) return [];
|
|
12897
12918
|
const files = fs14.readdirSync(idempotentDir).filter((f) => f.endsWith(".sql"));
|
|
12898
12919
|
for (const file of files) {
|
|
12899
|
-
const
|
|
12900
|
-
|
|
12901
|
-
|
|
12902
|
-
if (match[1]) roles.push(match[1].toLowerCase());
|
|
12903
|
-
}
|
|
12904
|
-
const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
|
|
12905
|
-
for (const match of existsMatches) {
|
|
12906
|
-
if (match[1] && !roles.includes(match[1].toLowerCase())) {
|
|
12907
|
-
roles.push(match[1].toLowerCase());
|
|
12908
|
-
}
|
|
12920
|
+
const filePath = path11__default.join(idempotentDir, file);
|
|
12921
|
+
if (!isPathContained(idempotentDir, filePath)) {
|
|
12922
|
+
continue;
|
|
12909
12923
|
}
|
|
12924
|
+
const content = fs14.readFileSync(filePath, "utf-8");
|
|
12925
|
+
roles.push(...extractRolesFromSql(content));
|
|
12910
12926
|
}
|
|
12911
12927
|
} catch {
|
|
12912
12928
|
}
|
|
@@ -17478,6 +17494,7 @@ function tryResolveDatabaseUrl(environment) {
|
|
|
17478
17494
|
|
|
17479
17495
|
// src/commands/db/utils/table-registry.ts
|
|
17480
17496
|
init_esm_shims();
|
|
17497
|
+
init_config_loader();
|
|
17481
17498
|
|
|
17482
17499
|
// src/commands/db/utils/semantic-mapper.ts
|
|
17483
17500
|
init_esm_shims();
|
|
@@ -17564,6 +17581,223 @@ function applyMappingToTables(tables, mapping) {
|
|
|
17564
17581
|
}));
|
|
17565
17582
|
}
|
|
17566
17583
|
|
|
17584
|
+
// src/commands/db/utils/schema-sync.ts
|
|
17585
|
+
init_esm_shims();
|
|
17586
|
+
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
17587
|
+
function validatePgIdentifier(name, context) {
|
|
17588
|
+
if (!name || typeof name !== "string") {
|
|
17589
|
+
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
17590
|
+
}
|
|
17591
|
+
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
17592
|
+
throw new Error(
|
|
17593
|
+
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
17594
|
+
);
|
|
17595
|
+
}
|
|
17596
|
+
}
|
|
17597
|
+
function escapePgStringLiteral(value) {
|
|
17598
|
+
if (typeof value !== "string") {
|
|
17599
|
+
throw new Error("Value must be a string");
|
|
17600
|
+
}
|
|
17601
|
+
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
17602
|
+
}
|
|
17603
|
+
function buildSafeSchemaInClause(schemas) {
|
|
17604
|
+
if (schemas.length === 0) {
|
|
17605
|
+
throw new Error("No schemas provided for IN clause");
|
|
17606
|
+
}
|
|
17607
|
+
const safeSchemas = [];
|
|
17608
|
+
for (const schema of schemas) {
|
|
17609
|
+
validatePgIdentifier(schema, "schema name");
|
|
17610
|
+
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
17611
|
+
}
|
|
17612
|
+
return safeSchemas.join(",");
|
|
17613
|
+
}
|
|
17614
|
+
var ERROR_MESSAGES2 = {
|
|
17615
|
+
PATH_TRAVERSAL: "Schema path validation failed",
|
|
17616
|
+
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
17617
|
+
};
|
|
17618
|
+
function containsPathTraversal2(inputPath) {
|
|
17619
|
+
const normalized = path11__default.normalize(inputPath);
|
|
17620
|
+
return normalized.includes("..") || inputPath.includes("\0");
|
|
17621
|
+
}
|
|
17622
|
+
function isPathWithinBase(filePath, baseDir) {
|
|
17623
|
+
try {
|
|
17624
|
+
const resolvedFile = path11__default.resolve(filePath);
|
|
17625
|
+
const resolvedBase = path11__default.resolve(baseDir);
|
|
17626
|
+
const normalizedFile = path11__default.normalize(resolvedFile);
|
|
17627
|
+
const normalizedBase = path11__default.normalize(resolvedBase);
|
|
17628
|
+
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
|
|
17629
|
+
} catch {
|
|
17630
|
+
return false;
|
|
17631
|
+
}
|
|
17632
|
+
}
|
|
17633
|
+
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
17634
|
+
if (containsPathTraversal2(dbPackagePath)) {
|
|
17635
|
+
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
17636
|
+
}
|
|
17637
|
+
const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
|
|
17638
|
+
const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
|
|
17639
|
+
let resolvedProjectRoot;
|
|
17640
|
+
try {
|
|
17641
|
+
resolvedProjectRoot = realpathSync(projectRoot);
|
|
17642
|
+
} catch {
|
|
17643
|
+
resolvedProjectRoot = path11__default.resolve(projectRoot);
|
|
17644
|
+
}
|
|
17645
|
+
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
17646
|
+
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
17647
|
+
}
|
|
17648
|
+
if (!existsSync(absoluteSchemaPath)) {
|
|
17649
|
+
throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
|
|
17650
|
+
}
|
|
17651
|
+
return absoluteSchemaPath;
|
|
17652
|
+
}
|
|
17653
|
+
function uniqueSorted(values) {
|
|
17654
|
+
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
17655
|
+
}
|
|
17656
|
+
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
17657
|
+
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
17658
|
+
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
17659
|
+
let schemaModule;
|
|
17660
|
+
try {
|
|
17661
|
+
schemaModule = await jiti.import(validatedSchemaPath);
|
|
17662
|
+
} catch (error) {
|
|
17663
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
17664
|
+
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
17665
|
+
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
17666
|
+
}
|
|
17667
|
+
const expectedTables = /* @__PURE__ */ new Set();
|
|
17668
|
+
const expectedEnums = /* @__PURE__ */ new Map();
|
|
17669
|
+
for (const value of Object.values(schemaModule)) {
|
|
17670
|
+
if (isTable(value)) {
|
|
17671
|
+
const unique = String(getTableUniqueName(value));
|
|
17672
|
+
if (unique.startsWith("undefined.")) {
|
|
17673
|
+
expectedTables.add(`public.${getTableName(value)}`);
|
|
17674
|
+
} else {
|
|
17675
|
+
expectedTables.add(unique);
|
|
17676
|
+
}
|
|
17677
|
+
continue;
|
|
17678
|
+
}
|
|
17679
|
+
if (isPgEnum(value)) {
|
|
17680
|
+
expectedEnums.set(value.enumName, {
|
|
17681
|
+
name: value.enumName,
|
|
17682
|
+
values: uniqueSorted(value.enumValues)
|
|
17683
|
+
});
|
|
17684
|
+
}
|
|
17685
|
+
}
|
|
17686
|
+
return { expectedTables, expectedEnums };
|
|
17687
|
+
}
|
|
17688
|
+
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
17689
|
+
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
17690
|
+
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
17691
|
+
const systemSchemas = /* @__PURE__ */ new Set([
|
|
17692
|
+
...SUPABASE_SYSTEM_SCHEMAS,
|
|
17693
|
+
...options?.additionalSystemSchemas ?? []
|
|
17694
|
+
]);
|
|
17695
|
+
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
17696
|
+
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
17697
|
+
const tablesSql = `
|
|
17698
|
+
SELECT schemaname || '.' || tablename
|
|
17699
|
+
FROM pg_tables
|
|
17700
|
+
WHERE schemaname IN (${schemaList})
|
|
17701
|
+
ORDER BY schemaname, tablename;`.trim();
|
|
17702
|
+
const enumsSql = `
|
|
17703
|
+
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
17704
|
+
FROM pg_type t
|
|
17705
|
+
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
17706
|
+
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
17707
|
+
WHERE n.nspname = 'public'
|
|
17708
|
+
GROUP BY t.typname
|
|
17709
|
+
ORDER BY t.typname;`.trim();
|
|
17710
|
+
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
17711
|
+
const dbTables = /* @__PURE__ */ new Set();
|
|
17712
|
+
for (const line of tablesOut.split("\n")) {
|
|
17713
|
+
const v = line.trim();
|
|
17714
|
+
if (v.length > 0) dbTables.add(v);
|
|
17715
|
+
}
|
|
17716
|
+
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
17717
|
+
const dbEnums = /* @__PURE__ */ new Map();
|
|
17718
|
+
for (const line of enumsOut.split("\n")) {
|
|
17719
|
+
const trimmed = line.trim();
|
|
17720
|
+
if (trimmed.length === 0) continue;
|
|
17721
|
+
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
17722
|
+
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
17723
|
+
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
17724
|
+
}
|
|
17725
|
+
return { dbTables, dbEnums };
|
|
17726
|
+
}
|
|
17727
|
+
function diffSchema(params) {
|
|
17728
|
+
const missingTables = uniqueSorted(
|
|
17729
|
+
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
17730
|
+
);
|
|
17731
|
+
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
17732
|
+
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
17733
|
+
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
17734
|
+
const isExcluded = (table) => {
|
|
17735
|
+
if (exactExclusions.includes(table)) return true;
|
|
17736
|
+
for (const pattern of exclusionPatterns) {
|
|
17737
|
+
const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
17738
|
+
if (regex.test(table)) return true;
|
|
17739
|
+
}
|
|
17740
|
+
return false;
|
|
17741
|
+
};
|
|
17742
|
+
const orphanTables = uniqueSorted(
|
|
17743
|
+
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
17744
|
+
);
|
|
17745
|
+
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
17746
|
+
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
17747
|
+
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
17748
|
+
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
17749
|
+
const enumValueMismatches = [];
|
|
17750
|
+
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
17751
|
+
const s = params.expectedEnums.get(name);
|
|
17752
|
+
const d = params.dbEnums.get(name);
|
|
17753
|
+
if (!s || !d) continue;
|
|
17754
|
+
const schemaValues = uniqueSorted(s.values);
|
|
17755
|
+
const dbValues = uniqueSorted(d.values);
|
|
17756
|
+
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
17757
|
+
if (same) continue;
|
|
17758
|
+
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
17759
|
+
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
17760
|
+
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
17761
|
+
}
|
|
17762
|
+
return {
|
|
17763
|
+
expectedTables: params.expectedTables,
|
|
17764
|
+
expectedEnums: params.expectedEnums,
|
|
17765
|
+
dbTables: params.dbTables,
|
|
17766
|
+
dbEnums: params.dbEnums,
|
|
17767
|
+
missingTables,
|
|
17768
|
+
orphanTables,
|
|
17769
|
+
missingEnums,
|
|
17770
|
+
extraEnums,
|
|
17771
|
+
enumValueMismatches
|
|
17772
|
+
};
|
|
17773
|
+
}
|
|
17774
|
+
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
17775
|
+
const fullPath = path11__default.resolve(projectRoot, idempotentDir);
|
|
17776
|
+
if (!existsSync(fullPath)) {
|
|
17777
|
+
return [];
|
|
17778
|
+
}
|
|
17779
|
+
const tables = [];
|
|
17780
|
+
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
17781
|
+
try {
|
|
17782
|
+
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
17783
|
+
for (const file of files) {
|
|
17784
|
+
const filePath = path11__default.join(fullPath, file);
|
|
17785
|
+
const content = readFileSync(filePath, "utf-8");
|
|
17786
|
+
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
17787
|
+
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
17788
|
+
const schema = match[1] || "public";
|
|
17789
|
+
const tableName = match[2];
|
|
17790
|
+
if (tableName) {
|
|
17791
|
+
tables.push(`${schema}.${tableName}`);
|
|
17792
|
+
}
|
|
17793
|
+
}
|
|
17794
|
+
}
|
|
17795
|
+
} catch {
|
|
17796
|
+
return [];
|
|
17797
|
+
}
|
|
17798
|
+
return [...new Set(tables)].sort();
|
|
17799
|
+
}
|
|
17800
|
+
|
|
17567
17801
|
// src/commands/db/utils/sql-table-extractor.ts
|
|
17568
17802
|
init_esm_shims();
|
|
17569
17803
|
var sqlParserUtils = null;
|
|
@@ -18225,9 +18459,235 @@ async function extractTablesFromSqlDir(sqlDir, options = {}) {
|
|
|
18225
18459
|
return tableEntries;
|
|
18226
18460
|
}
|
|
18227
18461
|
|
|
18462
|
+
// src/commands/db/utils/table-source-classifier.ts
|
|
18463
|
+
init_esm_shims();
|
|
18464
|
+
function splitQualifiedName(qualifiedName) {
|
|
18465
|
+
const [schema = "", table = ""] = qualifiedName.split(".", 2);
|
|
18466
|
+
return { schema, table };
|
|
18467
|
+
}
|
|
18468
|
+
function escapeRegexLiteral(value) {
|
|
18469
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
18470
|
+
}
|
|
18471
|
+
function buildTablePatternMatcher(patterns) {
|
|
18472
|
+
const compiled = patterns.map((p) => p.trim()).filter((p) => p.length > 0).map((pattern) => {
|
|
18473
|
+
const target = pattern.includes(".") ? "qualified" : "table";
|
|
18474
|
+
const regex = new RegExp(`^${escapeRegexLiteral(pattern).replace(/\\\*/g, ".*")}$`);
|
|
18475
|
+
return { target, regex };
|
|
18476
|
+
});
|
|
18477
|
+
return (qualifiedName) => {
|
|
18478
|
+
const { table } = splitQualifiedName(qualifiedName);
|
|
18479
|
+
for (const entry of compiled) {
|
|
18480
|
+
const candidate = entry.target === "qualified" ? qualifiedName : table;
|
|
18481
|
+
if (entry.regex.test(candidate)) {
|
|
18482
|
+
return true;
|
|
18483
|
+
}
|
|
18484
|
+
}
|
|
18485
|
+
return false;
|
|
18486
|
+
};
|
|
18487
|
+
}
|
|
18488
|
+
function findIdempotentAncestor(table, partitionParentMap, idempotentManagedTables) {
|
|
18489
|
+
if (idempotentManagedTables.has(table)) {
|
|
18490
|
+
return table;
|
|
18491
|
+
}
|
|
18492
|
+
let current = table;
|
|
18493
|
+
const visited = /* @__PURE__ */ new Set();
|
|
18494
|
+
while (!visited.has(current)) {
|
|
18495
|
+
visited.add(current);
|
|
18496
|
+
const parent = partitionParentMap.get(current);
|
|
18497
|
+
if (!parent) {
|
|
18498
|
+
return null;
|
|
18499
|
+
}
|
|
18500
|
+
if (idempotentManagedTables.has(parent)) {
|
|
18501
|
+
return parent;
|
|
18502
|
+
}
|
|
18503
|
+
current = parent;
|
|
18504
|
+
}
|
|
18505
|
+
return null;
|
|
18506
|
+
}
|
|
18507
|
+
function isSystemManagedTable(params) {
|
|
18508
|
+
const { schema } = splitQualifiedName(params.qualifiedName);
|
|
18509
|
+
return params.systemSchemas.has(schema) || params.knownSystemTables.has(params.qualifiedName);
|
|
18510
|
+
}
|
|
18511
|
+
function classifyMissingSourceTables(params) {
|
|
18512
|
+
const extensionManagedTables = params.extensionManagedTables ?? /* @__PURE__ */ new Map();
|
|
18513
|
+
const partitionParentMap = params.partitionParentMap ?? /* @__PURE__ */ new Map();
|
|
18514
|
+
const exclusionMatcher = buildTablePatternMatcher(params.excludeFromOrphanDetection ?? []);
|
|
18515
|
+
const systemSchemas = new Set(params.systemSchemas ?? []);
|
|
18516
|
+
const knownSystemTables = new Set(params.knownSystemTables ?? []);
|
|
18517
|
+
const classified = {
|
|
18518
|
+
definedInIdempotentDynamicDdl: [],
|
|
18519
|
+
extensionManagedOrSystemTable: [],
|
|
18520
|
+
trulyOrphaned: []
|
|
18521
|
+
};
|
|
18522
|
+
for (const qualifiedName of params.tablesWithoutSource) {
|
|
18523
|
+
const idempotentAncestor = findIdempotentAncestor(
|
|
18524
|
+
qualifiedName,
|
|
18525
|
+
partitionParentMap,
|
|
18526
|
+
params.idempotentManagedTables
|
|
18527
|
+
);
|
|
18528
|
+
if (idempotentAncestor) {
|
|
18529
|
+
classified.definedInIdempotentDynamicDdl.push({
|
|
18530
|
+
qualifiedName,
|
|
18531
|
+
detail: idempotentAncestor === qualifiedName ? "matched CREATE TABLE in idempotent SQL" : `partition child of ${idempotentAncestor}`
|
|
18532
|
+
});
|
|
18533
|
+
continue;
|
|
18534
|
+
}
|
|
18535
|
+
const extensionName = extensionManagedTables.get(qualifiedName);
|
|
18536
|
+
if (extensionName) {
|
|
18537
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18538
|
+
qualifiedName,
|
|
18539
|
+
detail: `managed by extension "${extensionName}"`
|
|
18540
|
+
});
|
|
18541
|
+
continue;
|
|
18542
|
+
}
|
|
18543
|
+
if (isSystemManagedTable({ qualifiedName, systemSchemas, knownSystemTables })) {
|
|
18544
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18545
|
+
qualifiedName,
|
|
18546
|
+
detail: "system-managed schema/table"
|
|
18547
|
+
});
|
|
18548
|
+
continue;
|
|
18549
|
+
}
|
|
18550
|
+
if (exclusionMatcher(qualifiedName)) {
|
|
18551
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18552
|
+
qualifiedName,
|
|
18553
|
+
detail: "allowlisted by database.pgSchemaDiff.excludeFromOrphanDetection"
|
|
18554
|
+
});
|
|
18555
|
+
continue;
|
|
18556
|
+
}
|
|
18557
|
+
classified.trulyOrphaned.push(qualifiedName);
|
|
18558
|
+
}
|
|
18559
|
+
return classified;
|
|
18560
|
+
}
|
|
18561
|
+
|
|
18228
18562
|
// src/commands/db/utils/table-registry.ts
|
|
18229
18563
|
var MANIFEST_VERSION = 2;
|
|
18230
18564
|
var GENERATOR_VERSION = "1.0.0";
|
|
18565
|
+
var DEFAULT_IDEMPOTENT_SQL_DIR = "supabase/schemas/idempotent";
|
|
18566
|
+
var KNOWN_EXTENSION_SYSTEM_TABLES = /* @__PURE__ */ new Set([
|
|
18567
|
+
"public.spatial_ref_sys",
|
|
18568
|
+
"public.geometry_columns",
|
|
18569
|
+
"public.geography_columns"
|
|
18570
|
+
]);
|
|
18571
|
+
var SUPABASE_SYSTEM_SCHEMA_SET = new Set(SUPABASE_SYSTEM_SCHEMAS);
|
|
18572
|
+
var VALID_PG_IDENTIFIER2 = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
18573
|
+
function validatePgIdentifier2(name, context) {
|
|
18574
|
+
if (!name || typeof name !== "string") {
|
|
18575
|
+
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
18576
|
+
}
|
|
18577
|
+
if (!VALID_PG_IDENTIFIER2.test(name)) {
|
|
18578
|
+
throw new Error(
|
|
18579
|
+
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
18580
|
+
);
|
|
18581
|
+
}
|
|
18582
|
+
}
|
|
18583
|
+
function buildSafeSchemaInClause2(schemas) {
|
|
18584
|
+
if (schemas.length === 0) {
|
|
18585
|
+
throw new Error("No schemas provided for IN clause");
|
|
18586
|
+
}
|
|
18587
|
+
const safeSchemas = [];
|
|
18588
|
+
for (const schema of schemas) {
|
|
18589
|
+
validatePgIdentifier2(schema, "schema name");
|
|
18590
|
+
safeSchemas.push(`'${schema.replace(/'/g, "''")}'`);
|
|
18591
|
+
}
|
|
18592
|
+
return safeSchemas.join(",");
|
|
18593
|
+
}
|
|
18594
|
+
function toRelativeSourcePath(projectRoot, sourceFile) {
|
|
18595
|
+
let relativeSource = relative(projectRoot, sourceFile);
|
|
18596
|
+
if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
|
|
18597
|
+
const schemaMatch = sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
|
|
18598
|
+
relativeSource = schemaMatch ? schemaMatch[0] : sourceFile;
|
|
18599
|
+
}
|
|
18600
|
+
return relativeSource;
|
|
18601
|
+
}
|
|
18602
|
+
function resolveSourceConfig(projectRoot, options) {
|
|
18603
|
+
let idempotentSqlDir = options.idempotentSqlDir ?? DEFAULT_IDEMPOTENT_SQL_DIR;
|
|
18604
|
+
const exclusions = new Set(options.excludeFromOrphanDetection ?? []);
|
|
18605
|
+
try {
|
|
18606
|
+
const config = loadRunaConfig2(projectRoot);
|
|
18607
|
+
const pgSchemaDiff = config.database?.pgSchemaDiff;
|
|
18608
|
+
if (!options.idempotentSqlDir && pgSchemaDiff?.idempotentSqlDir) {
|
|
18609
|
+
idempotentSqlDir = pgSchemaDiff.idempotentSqlDir;
|
|
18610
|
+
}
|
|
18611
|
+
if (pgSchemaDiff?.excludeFromOrphanDetection) {
|
|
18612
|
+
for (const pattern of pgSchemaDiff.excludeFromOrphanDetection) {
|
|
18613
|
+
exclusions.add(pattern);
|
|
18614
|
+
}
|
|
18615
|
+
}
|
|
18616
|
+
} catch {
|
|
18617
|
+
}
|
|
18618
|
+
return {
|
|
18619
|
+
idempotentSqlDir: isAbsolute(idempotentSqlDir) ? idempotentSqlDir : join(projectRoot, idempotentSqlDir),
|
|
18620
|
+
excludeFromOrphanDetection: [...exclusions].sort((a, b) => a.localeCompare(b))
|
|
18621
|
+
};
|
|
18622
|
+
}
|
|
18623
|
+
async function fetchMissingSourceMetadata(params) {
|
|
18624
|
+
const { databaseUrl, schemas } = params;
|
|
18625
|
+
if (schemas.length === 0) {
|
|
18626
|
+
return {
|
|
18627
|
+
extensionManagedTables: /* @__PURE__ */ new Map(),
|
|
18628
|
+
partitionParentMap: /* @__PURE__ */ new Map()
|
|
18629
|
+
};
|
|
18630
|
+
}
|
|
18631
|
+
const isRemoteSupabase = databaseUrl.includes(".supabase.co");
|
|
18632
|
+
const sql = postgres(databaseUrl, {
|
|
18633
|
+
...isRemoteSupabase && { ssl: "require" }
|
|
18634
|
+
});
|
|
18635
|
+
try {
|
|
18636
|
+
const schemaList = buildSafeSchemaInClause2(schemas);
|
|
18637
|
+
const [extensionRows, partitionRows] = await Promise.all([
|
|
18638
|
+
sql`
|
|
18639
|
+
SELECT
|
|
18640
|
+
n.nspname AS schema_name,
|
|
18641
|
+
c.relname AS table_name,
|
|
18642
|
+
ext.extname AS extension_name
|
|
18643
|
+
FROM pg_class c
|
|
18644
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
18645
|
+
JOIN pg_depend d
|
|
18646
|
+
ON d.classid = 'pg_class'::regclass
|
|
18647
|
+
AND d.objid = c.oid
|
|
18648
|
+
AND d.refclassid = 'pg_extension'::regclass
|
|
18649
|
+
AND d.deptype = 'e'
|
|
18650
|
+
JOIN pg_extension ext ON ext.oid = d.refobjid
|
|
18651
|
+
WHERE c.relkind IN ('r', 'p')
|
|
18652
|
+
AND n.nspname IN (${sql.unsafe(schemaList)})
|
|
18653
|
+
`,
|
|
18654
|
+
sql`
|
|
18655
|
+
SELECT
|
|
18656
|
+
child_ns.nspname AS child_schema,
|
|
18657
|
+
child.relname AS child_table,
|
|
18658
|
+
parent_ns.nspname AS parent_schema,
|
|
18659
|
+
parent.relname AS parent_table
|
|
18660
|
+
FROM pg_inherits i
|
|
18661
|
+
JOIN pg_class child ON child.oid = i.inhrelid
|
|
18662
|
+
JOIN pg_namespace child_ns ON child_ns.oid = child.relnamespace
|
|
18663
|
+
JOIN pg_class parent ON parent.oid = i.inhparent
|
|
18664
|
+
JOIN pg_namespace parent_ns ON parent_ns.oid = parent.relnamespace
|
|
18665
|
+
WHERE child.relkind IN ('r', 'p')
|
|
18666
|
+
AND child_ns.nspname IN (${sql.unsafe(schemaList)})
|
|
18667
|
+
`
|
|
18668
|
+
]);
|
|
18669
|
+
const extensionManagedTables = /* @__PURE__ */ new Map();
|
|
18670
|
+
for (const row of extensionRows) {
|
|
18671
|
+
extensionManagedTables.set(
|
|
18672
|
+
`${String(row.schema_name)}.${String(row.table_name)}`,
|
|
18673
|
+
String(row.extension_name)
|
|
18674
|
+
);
|
|
18675
|
+
}
|
|
18676
|
+
const partitionParentMap = /* @__PURE__ */ new Map();
|
|
18677
|
+
for (const row of partitionRows) {
|
|
18678
|
+
partitionParentMap.set(
|
|
18679
|
+
`${String(row.child_schema)}.${String(row.child_table)}`,
|
|
18680
|
+
`${String(row.parent_schema)}.${String(row.parent_table)}`
|
|
18681
|
+
);
|
|
18682
|
+
}
|
|
18683
|
+
return { extensionManagedTables, partitionParentMap };
|
|
18684
|
+
} finally {
|
|
18685
|
+
await sql.end();
|
|
18686
|
+
}
|
|
18687
|
+
}
|
|
18688
|
+
function formatMissingSourceItems(items) {
|
|
18689
|
+
return items.map((item) => item.detail ? `${item.qualifiedName} (${item.detail})` : item.qualifiedName).join(", ");
|
|
18690
|
+
}
|
|
18231
18691
|
async function introspectTablesFromDb(databaseUrl, schemas) {
|
|
18232
18692
|
try {
|
|
18233
18693
|
const result = await introspectDatabase(databaseUrl, { schemas });
|
|
@@ -18473,14 +18933,31 @@ async function crossCheckWithDrizzle(sqlTables, drizzleSchemaPath) {
|
|
|
18473
18933
|
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
18474
18934
|
}
|
|
18475
18935
|
}
|
|
18476
|
-
function
|
|
18477
|
-
const
|
|
18478
|
-
if (
|
|
18479
|
-
console.warn(
|
|
18480
|
-
|
|
18481
|
-
|
|
18482
|
-
|
|
18483
|
-
|
|
18936
|
+
function logMissingSourceClassification(classification) {
|
|
18937
|
+
const total = classification.definedInIdempotentDynamicDdl.length + classification.extensionManagedOrSystemTable.length + classification.trulyOrphaned.length;
|
|
18938
|
+
if (total === 0) return;
|
|
18939
|
+
console.warn(`[tables-manifest] \u26A0 ${total} table(s) exist in DB but not in SQL files.`);
|
|
18940
|
+
if (classification.definedInIdempotentDynamicDdl.length > 0) {
|
|
18941
|
+
console.log(
|
|
18942
|
+
`[tables-manifest] info: defined_in_idempotent_dynamic_ddl (${classification.definedInIdempotentDynamicDdl.length})`
|
|
18943
|
+
);
|
|
18944
|
+
console.log(` ${formatMissingSourceItems(classification.definedInIdempotentDynamicDdl)}`);
|
|
18945
|
+
}
|
|
18946
|
+
if (classification.extensionManagedOrSystemTable.length > 0) {
|
|
18947
|
+
console.log(
|
|
18948
|
+
`[tables-manifest] info: extension_managed/system_table (${classification.extensionManagedOrSystemTable.length})`
|
|
18949
|
+
);
|
|
18950
|
+
console.log(` ${formatMissingSourceItems(classification.extensionManagedOrSystemTable)}`);
|
|
18951
|
+
}
|
|
18952
|
+
if (classification.trulyOrphaned.length > 0) {
|
|
18953
|
+
console.warn(`[tables-manifest] warn: truly_orphaned (${classification.trulyOrphaned.length})`);
|
|
18954
|
+
console.warn(` ${classification.trulyOrphaned.join(", ")}`);
|
|
18955
|
+
console.warn(
|
|
18956
|
+
" \u2192 Add declarative/idempotent SQL definitions or allowlist via database.pgSchemaDiff.excludeFromOrphanDetection."
|
|
18957
|
+
);
|
|
18958
|
+
} else {
|
|
18959
|
+
console.log("[tables-manifest] info: no truly_orphaned tables detected.");
|
|
18960
|
+
}
|
|
18484
18961
|
}
|
|
18485
18962
|
async function logDrizzleCrossCheck(tables, drizzleSchemaPath) {
|
|
18486
18963
|
const result = await crossCheckWithDrizzle(tables, drizzleSchemaPath);
|
|
@@ -18512,24 +18989,38 @@ async function generateTablesManifest(projectRoot, options = {}) {
|
|
|
18512
18989
|
// Reserved for future metadata filtering feature
|
|
18513
18990
|
includeMetadata: _includeMetadata = true
|
|
18514
18991
|
} = options;
|
|
18992
|
+
const sourceConfig = resolveSourceConfig(projectRoot, options);
|
|
18515
18993
|
let tables = [];
|
|
18516
18994
|
const source = "introspection";
|
|
18517
|
-
const
|
|
18995
|
+
const declarativeTables = await extractTablesFromSqlDir(sqlDir, {
|
|
18518
18996
|
includeColumns: false,
|
|
18519
18997
|
// Don't need columns from SQL (DB introspection is more accurate)
|
|
18520
18998
|
includeForeignKeys: false,
|
|
18521
18999
|
includeIndexes: false,
|
|
18522
19000
|
includeRlsPolicies: false
|
|
18523
19001
|
});
|
|
19002
|
+
const idempotentTablesForSource = await extractTablesFromSqlDir(sourceConfig.idempotentSqlDir, {
|
|
19003
|
+
includeColumns: false,
|
|
19004
|
+
includeForeignKeys: false,
|
|
19005
|
+
includeIndexes: false,
|
|
19006
|
+
includeRlsPolicies: false
|
|
19007
|
+
});
|
|
19008
|
+
const idempotentTablesFromRegex = extractTablesFromIdempotentSql(
|
|
19009
|
+
sourceConfig.idempotentSqlDir,
|
|
19010
|
+
projectRoot
|
|
19011
|
+
);
|
|
19012
|
+
const idempotentManagedTables = /* @__PURE__ */ new Set([
|
|
19013
|
+
...idempotentTablesFromRegex,
|
|
19014
|
+
...idempotentTablesForSource.map((t) => t.qualifiedName)
|
|
19015
|
+
]);
|
|
18524
19016
|
const sourceFileMap = /* @__PURE__ */ new Map();
|
|
18525
|
-
|
|
18526
|
-
|
|
18527
|
-
if (
|
|
18528
|
-
|
|
18529
|
-
relativeSource = schemaMatch ? schemaMatch[0] : t.sourceFile;
|
|
19017
|
+
const sourceTables = [...declarativeTables, ...idempotentTablesForSource];
|
|
19018
|
+
for (const t of sourceTables) {
|
|
19019
|
+
if (sourceFileMap.has(t.qualifiedName)) {
|
|
19020
|
+
continue;
|
|
18530
19021
|
}
|
|
18531
19022
|
sourceFileMap.set(t.qualifiedName, {
|
|
18532
|
-
sourceFile:
|
|
19023
|
+
sourceFile: toRelativeSourcePath(projectRoot, t.sourceFile),
|
|
18533
19024
|
lineNumber: t.lineNumber
|
|
18534
19025
|
});
|
|
18535
19026
|
}
|
|
@@ -18549,7 +19040,34 @@ async function generateTablesManifest(projectRoot, options = {}) {
|
|
|
18549
19040
|
};
|
|
18550
19041
|
});
|
|
18551
19042
|
console.log(`[tables-manifest] \u2713 Introspected ${tables.length} tables from database`);
|
|
18552
|
-
|
|
19043
|
+
const tablesWithoutSource = tables.filter((t) => !t.sourceFile);
|
|
19044
|
+
if (tablesWithoutSource.length > 0) {
|
|
19045
|
+
const missingSourceQualifiedNames = tablesWithoutSource.map((t) => t.qualifiedName);
|
|
19046
|
+
const missingSchemas = [...new Set(tablesWithoutSource.map((t) => t.schema))];
|
|
19047
|
+
let extensionManagedTables = /* @__PURE__ */ new Map();
|
|
19048
|
+
let partitionParentMap = /* @__PURE__ */ new Map();
|
|
19049
|
+
try {
|
|
19050
|
+
const metadata = await fetchMissingSourceMetadata({
|
|
19051
|
+
databaseUrl,
|
|
19052
|
+
schemas: missingSchemas
|
|
19053
|
+
});
|
|
19054
|
+
extensionManagedTables = metadata.extensionManagedTables;
|
|
19055
|
+
partitionParentMap = metadata.partitionParentMap;
|
|
19056
|
+
} catch (error) {
|
|
19057
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
19058
|
+
console.warn(`[tables-manifest] Failed to classify extension/partition metadata: ${message}`);
|
|
19059
|
+
}
|
|
19060
|
+
const classification = classifyMissingSourceTables({
|
|
19061
|
+
tablesWithoutSource: missingSourceQualifiedNames,
|
|
19062
|
+
idempotentManagedTables,
|
|
19063
|
+
extensionManagedTables,
|
|
19064
|
+
partitionParentMap,
|
|
19065
|
+
excludeFromOrphanDetection: sourceConfig.excludeFromOrphanDetection,
|
|
19066
|
+
systemSchemas: SUPABASE_SYSTEM_SCHEMA_SET,
|
|
19067
|
+
knownSystemTables: KNOWN_EXTENSION_SYSTEM_TABLES
|
|
19068
|
+
});
|
|
19069
|
+
logMissingSourceClassification(classification);
|
|
19070
|
+
}
|
|
18553
19071
|
if (crossCheck && existsSync(drizzleSchemaPath)) {
|
|
18554
19072
|
await logDrizzleCrossCheck(tables, drizzleSchemaPath);
|
|
18555
19073
|
}
|
|
@@ -20594,225 +21112,6 @@ var backupCommand = new Command("backup").description("Manage database backups (
|
|
|
20594
21112
|
// src/commands/db/commands/db-cleanup.ts
|
|
20595
21113
|
init_esm_shims();
|
|
20596
21114
|
init_config_loader();
|
|
20597
|
-
|
|
20598
|
-
// src/commands/db/utils/schema-sync.ts
|
|
20599
|
-
init_esm_shims();
|
|
20600
|
-
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
20601
|
-
function validatePgIdentifier(name, context) {
|
|
20602
|
-
if (!name || typeof name !== "string") {
|
|
20603
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
20604
|
-
}
|
|
20605
|
-
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
20606
|
-
throw new Error(
|
|
20607
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
20608
|
-
);
|
|
20609
|
-
}
|
|
20610
|
-
}
|
|
20611
|
-
function escapePgStringLiteral(value) {
|
|
20612
|
-
if (typeof value !== "string") {
|
|
20613
|
-
throw new Error("Value must be a string");
|
|
20614
|
-
}
|
|
20615
|
-
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
20616
|
-
}
|
|
20617
|
-
function buildSafeSchemaInClause(schemas) {
|
|
20618
|
-
if (schemas.length === 0) {
|
|
20619
|
-
throw new Error("No schemas provided for IN clause");
|
|
20620
|
-
}
|
|
20621
|
-
const safeSchemas = [];
|
|
20622
|
-
for (const schema of schemas) {
|
|
20623
|
-
validatePgIdentifier(schema, "schema name");
|
|
20624
|
-
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
20625
|
-
}
|
|
20626
|
-
return safeSchemas.join(",");
|
|
20627
|
-
}
|
|
20628
|
-
var ERROR_MESSAGES2 = {
|
|
20629
|
-
PATH_TRAVERSAL: "Schema path validation failed",
|
|
20630
|
-
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
20631
|
-
};
|
|
20632
|
-
function containsPathTraversal2(inputPath) {
|
|
20633
|
-
const normalized = path11__default.normalize(inputPath);
|
|
20634
|
-
return normalized.includes("..") || inputPath.includes("\0");
|
|
20635
|
-
}
|
|
20636
|
-
function isPathWithinBase(filePath, baseDir) {
|
|
20637
|
-
try {
|
|
20638
|
-
const resolvedFile = path11__default.resolve(filePath);
|
|
20639
|
-
const resolvedBase = path11__default.resolve(baseDir);
|
|
20640
|
-
const normalizedFile = path11__default.normalize(resolvedFile);
|
|
20641
|
-
const normalizedBase = path11__default.normalize(resolvedBase);
|
|
20642
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
|
|
20643
|
-
} catch {
|
|
20644
|
-
return false;
|
|
20645
|
-
}
|
|
20646
|
-
}
|
|
20647
|
-
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
20648
|
-
if (containsPathTraversal2(dbPackagePath)) {
|
|
20649
|
-
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
20650
|
-
}
|
|
20651
|
-
const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
|
|
20652
|
-
const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
|
|
20653
|
-
let resolvedProjectRoot;
|
|
20654
|
-
try {
|
|
20655
|
-
resolvedProjectRoot = realpathSync(projectRoot);
|
|
20656
|
-
} catch {
|
|
20657
|
-
resolvedProjectRoot = path11__default.resolve(projectRoot);
|
|
20658
|
-
}
|
|
20659
|
-
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
20660
|
-
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
20661
|
-
}
|
|
20662
|
-
if (!existsSync(absoluteSchemaPath)) {
|
|
20663
|
-
throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
|
|
20664
|
-
}
|
|
20665
|
-
return absoluteSchemaPath;
|
|
20666
|
-
}
|
|
20667
|
-
function uniqueSorted(values) {
|
|
20668
|
-
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
20669
|
-
}
|
|
20670
|
-
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
20671
|
-
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
20672
|
-
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
20673
|
-
let schemaModule;
|
|
20674
|
-
try {
|
|
20675
|
-
schemaModule = await jiti.import(validatedSchemaPath);
|
|
20676
|
-
} catch (error) {
|
|
20677
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
20678
|
-
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
20679
|
-
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
20680
|
-
}
|
|
20681
|
-
const expectedTables = /* @__PURE__ */ new Set();
|
|
20682
|
-
const expectedEnums = /* @__PURE__ */ new Map();
|
|
20683
|
-
for (const value of Object.values(schemaModule)) {
|
|
20684
|
-
if (isTable(value)) {
|
|
20685
|
-
const unique = String(getTableUniqueName(value));
|
|
20686
|
-
if (unique.startsWith("undefined.")) {
|
|
20687
|
-
expectedTables.add(`public.${getTableName(value)}`);
|
|
20688
|
-
} else {
|
|
20689
|
-
expectedTables.add(unique);
|
|
20690
|
-
}
|
|
20691
|
-
continue;
|
|
20692
|
-
}
|
|
20693
|
-
if (isPgEnum(value)) {
|
|
20694
|
-
expectedEnums.set(value.enumName, {
|
|
20695
|
-
name: value.enumName,
|
|
20696
|
-
values: uniqueSorted(value.enumValues)
|
|
20697
|
-
});
|
|
20698
|
-
}
|
|
20699
|
-
}
|
|
20700
|
-
return { expectedTables, expectedEnums };
|
|
20701
|
-
}
|
|
20702
|
-
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
20703
|
-
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
20704
|
-
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
20705
|
-
const systemSchemas = /* @__PURE__ */ new Set([
|
|
20706
|
-
...SUPABASE_SYSTEM_SCHEMAS,
|
|
20707
|
-
...options?.additionalSystemSchemas ?? []
|
|
20708
|
-
]);
|
|
20709
|
-
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
20710
|
-
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
20711
|
-
const tablesSql = `
|
|
20712
|
-
SELECT schemaname || '.' || tablename
|
|
20713
|
-
FROM pg_tables
|
|
20714
|
-
WHERE schemaname IN (${schemaList})
|
|
20715
|
-
ORDER BY schemaname, tablename;`.trim();
|
|
20716
|
-
const enumsSql = `
|
|
20717
|
-
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
20718
|
-
FROM pg_type t
|
|
20719
|
-
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
20720
|
-
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
20721
|
-
WHERE n.nspname = 'public'
|
|
20722
|
-
GROUP BY t.typname
|
|
20723
|
-
ORDER BY t.typname;`.trim();
|
|
20724
|
-
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
20725
|
-
const dbTables = /* @__PURE__ */ new Set();
|
|
20726
|
-
for (const line of tablesOut.split("\n")) {
|
|
20727
|
-
const v = line.trim();
|
|
20728
|
-
if (v.length > 0) dbTables.add(v);
|
|
20729
|
-
}
|
|
20730
|
-
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
20731
|
-
const dbEnums = /* @__PURE__ */ new Map();
|
|
20732
|
-
for (const line of enumsOut.split("\n")) {
|
|
20733
|
-
const trimmed = line.trim();
|
|
20734
|
-
if (trimmed.length === 0) continue;
|
|
20735
|
-
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
20736
|
-
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
20737
|
-
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
20738
|
-
}
|
|
20739
|
-
return { dbTables, dbEnums };
|
|
20740
|
-
}
|
|
20741
|
-
function diffSchema(params) {
|
|
20742
|
-
const missingTables = uniqueSorted(
|
|
20743
|
-
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
20744
|
-
);
|
|
20745
|
-
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
20746
|
-
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
20747
|
-
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
20748
|
-
const isExcluded = (table) => {
|
|
20749
|
-
if (exactExclusions.includes(table)) return true;
|
|
20750
|
-
for (const pattern of exclusionPatterns) {
|
|
20751
|
-
const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
20752
|
-
if (regex.test(table)) return true;
|
|
20753
|
-
}
|
|
20754
|
-
return false;
|
|
20755
|
-
};
|
|
20756
|
-
const orphanTables = uniqueSorted(
|
|
20757
|
-
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
20758
|
-
);
|
|
20759
|
-
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
20760
|
-
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
20761
|
-
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
20762
|
-
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
20763
|
-
const enumValueMismatches = [];
|
|
20764
|
-
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
20765
|
-
const s = params.expectedEnums.get(name);
|
|
20766
|
-
const d = params.dbEnums.get(name);
|
|
20767
|
-
if (!s || !d) continue;
|
|
20768
|
-
const schemaValues = uniqueSorted(s.values);
|
|
20769
|
-
const dbValues = uniqueSorted(d.values);
|
|
20770
|
-
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
20771
|
-
if (same) continue;
|
|
20772
|
-
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
20773
|
-
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
20774
|
-
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
20775
|
-
}
|
|
20776
|
-
return {
|
|
20777
|
-
expectedTables: params.expectedTables,
|
|
20778
|
-
expectedEnums: params.expectedEnums,
|
|
20779
|
-
dbTables: params.dbTables,
|
|
20780
|
-
dbEnums: params.dbEnums,
|
|
20781
|
-
missingTables,
|
|
20782
|
-
orphanTables,
|
|
20783
|
-
missingEnums,
|
|
20784
|
-
extraEnums,
|
|
20785
|
-
enumValueMismatches
|
|
20786
|
-
};
|
|
20787
|
-
}
|
|
20788
|
-
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
20789
|
-
const fullPath = path11__default.resolve(projectRoot, idempotentDir);
|
|
20790
|
-
if (!existsSync(fullPath)) {
|
|
20791
|
-
return [];
|
|
20792
|
-
}
|
|
20793
|
-
const tables = [];
|
|
20794
|
-
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
20795
|
-
try {
|
|
20796
|
-
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
20797
|
-
for (const file of files) {
|
|
20798
|
-
const filePath = path11__default.join(fullPath, file);
|
|
20799
|
-
const content = readFileSync(filePath, "utf-8");
|
|
20800
|
-
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
20801
|
-
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
20802
|
-
const schema = match[1] || "public";
|
|
20803
|
-
const tableName = match[2];
|
|
20804
|
-
if (tableName) {
|
|
20805
|
-
tables.push(`${schema}.${tableName}`);
|
|
20806
|
-
}
|
|
20807
|
-
}
|
|
20808
|
-
}
|
|
20809
|
-
} catch {
|
|
20810
|
-
return [];
|
|
20811
|
-
}
|
|
20812
|
-
return [...new Set(tables)].sort();
|
|
20813
|
-
}
|
|
20814
|
-
|
|
20815
|
-
// src/commands/db/commands/db-cleanup.ts
|
|
20816
21115
|
function quoteIdentifier(identifier) {
|
|
20817
21116
|
return `"${identifier.replaceAll('"', '""')}"`;
|
|
20818
21117
|
}
|
|
@@ -22223,11 +22522,18 @@ var setupContext = fromPromise(
|
|
|
22223
22522
|
const dbPackagePath = await getDatabasePackagePath();
|
|
22224
22523
|
const databaseUrl = resolveDatabaseUrl(env2);
|
|
22225
22524
|
const tmpDir = `${repoRoot}/.runa-tmp`;
|
|
22525
|
+
let configTimeoutMs;
|
|
22526
|
+
try {
|
|
22527
|
+
const config = loadRunaConfig2();
|
|
22528
|
+
configTimeoutMs = config.database?.sync?.timeoutMs;
|
|
22529
|
+
} catch {
|
|
22530
|
+
}
|
|
22226
22531
|
return {
|
|
22227
22532
|
repoRoot,
|
|
22228
22533
|
tmpDir,
|
|
22229
22534
|
databaseUrl,
|
|
22230
|
-
dbPackagePath
|
|
22535
|
+
dbPackagePath,
|
|
22536
|
+
configTimeoutMs
|
|
22231
22537
|
};
|
|
22232
22538
|
}
|
|
22233
22539
|
);
|
|
@@ -22372,7 +22678,9 @@ var syncSchema = fromPromise(
|
|
|
22372
22678
|
skipCodegen: ctx.skipCodegen,
|
|
22373
22679
|
reportJson: ctx.reportJson,
|
|
22374
22680
|
invokedAs: "runa db sync",
|
|
22375
|
-
fromProduction: ctx.fromProduction
|
|
22681
|
+
fromProduction: ctx.fromProduction,
|
|
22682
|
+
timeoutMs: ctx.timeoutMs,
|
|
22683
|
+
configTimeoutMs: ctx.configTimeoutMs
|
|
22376
22684
|
});
|
|
22377
22685
|
return {
|
|
22378
22686
|
applied: result.applied,
|
|
@@ -22522,7 +22830,11 @@ z.object({
|
|
|
22522
22830
|
skipCodegen: z.boolean(),
|
|
22523
22831
|
fromProduction: z.boolean(),
|
|
22524
22832
|
autoSnapshot: z.boolean(),
|
|
22525
|
-
reportJson: z.string().optional()
|
|
22833
|
+
reportJson: z.string().optional(),
|
|
22834
|
+
/** Subprocess timeout in milliseconds (from CLI flag) */
|
|
22835
|
+
timeoutMs: z.number().int().positive().optional(),
|
|
22836
|
+
/** Config-level timeout from runa.config.ts (lower priority than timeoutMs and env var) */
|
|
22837
|
+
configTimeoutMs: z.number().int().positive().optional()
|
|
22526
22838
|
});
|
|
22527
22839
|
z.object({
|
|
22528
22840
|
/** Target environment */
|
|
@@ -22558,8 +22870,10 @@ z.object({
|
|
|
22558
22870
|
autoSnapshot: z.boolean().optional(),
|
|
22559
22871
|
reportJson: z.string().optional(),
|
|
22560
22872
|
/** Pre-clean orphan empty tables + unused enums before sync */
|
|
22561
|
-
reconcile: z.boolean().optional()
|
|
22562
|
-
|
|
22873
|
+
reconcile: z.boolean().optional(),
|
|
22874
|
+
/** Subprocess timeout in milliseconds */
|
|
22875
|
+
timeoutMs: z.number().int().positive().optional()
|
|
22876
|
+
}).strict();
|
|
22563
22877
|
|
|
22564
22878
|
// src/commands/db/sync/machine.ts
|
|
22565
22879
|
init_esm_shims();
|
|
@@ -22731,7 +23045,9 @@ var dbSyncMachine = setup({
|
|
|
22731
23045
|
skipCodegen: context.input.skipCodegen ?? false,
|
|
22732
23046
|
fromProduction: context.input.fromProduction ?? false,
|
|
22733
23047
|
autoSnapshot: context.input.autoSnapshot ?? false,
|
|
22734
|
-
reportJson: context.input.reportJson
|
|
23048
|
+
reportJson: context.input.reportJson,
|
|
23049
|
+
timeoutMs: context.input.timeoutMs,
|
|
23050
|
+
configTimeoutMs: event.output.configTimeoutMs
|
|
22735
23051
|
})
|
|
22736
23052
|
})
|
|
22737
23053
|
},
|
|
@@ -22751,7 +23067,9 @@ var dbSyncMachine = setup({
|
|
|
22751
23067
|
skipCodegen: context.input.skipCodegen ?? false,
|
|
22752
23068
|
fromProduction: context.input.fromProduction ?? false,
|
|
22753
23069
|
autoSnapshot: context.input.autoSnapshot ?? false,
|
|
22754
|
-
reportJson: context.input.reportJson
|
|
23070
|
+
reportJson: context.input.reportJson,
|
|
23071
|
+
timeoutMs: context.input.timeoutMs,
|
|
23072
|
+
configTimeoutMs: event.output.configTimeoutMs
|
|
22755
23073
|
})
|
|
22756
23074
|
})
|
|
22757
23075
|
}
|
|
@@ -22971,7 +23289,8 @@ function optionsToMachineInput2(env2, options) {
|
|
|
22971
23289
|
fromProduction: options.fromProduction === true || typeof options.fromProduction === "string",
|
|
22972
23290
|
reportJson: options.reportJson,
|
|
22973
23291
|
targetDir: process.cwd(),
|
|
22974
|
-
reconcile: options.reconcile === true
|
|
23292
|
+
reconcile: options.reconcile === true,
|
|
23293
|
+
timeoutMs: options.timeout
|
|
22975
23294
|
};
|
|
22976
23295
|
}
|
|
22977
23296
|
var dbSyncHelpers = {
|
|
@@ -23163,6 +23482,14 @@ var syncCommand = new Command("sync").description("Sync SQL schemas to database
|
|
|
23163
23482
|
).option(
|
|
23164
23483
|
"--bootstrap",
|
|
23165
23484
|
"Bootstrap mode: Auto-start Supabase with --ignore-health-check if not running"
|
|
23485
|
+
).option(
|
|
23486
|
+
"--timeout <ms>",
|
|
23487
|
+
"Subprocess timeout in ms (default: 180000 local, 600000 production)",
|
|
23488
|
+
(val) => {
|
|
23489
|
+
const n = Number.parseInt(val, 10);
|
|
23490
|
+
if (Number.isNaN(n) || n <= 0) throw new Error("--timeout must be a positive integer");
|
|
23491
|
+
return n;
|
|
23492
|
+
}
|
|
23166
23493
|
).action(
|
|
23167
23494
|
async (env2, options) => await runSyncCommandAction(env2, options)
|
|
23168
23495
|
);
|
|
@@ -23841,9 +24168,9 @@ async function checkDocker() {
|
|
|
23841
24168
|
severity: "error",
|
|
23842
24169
|
message: "Docker is not running or not installed",
|
|
23843
24170
|
fixInstructions: [
|
|
23844
|
-
"Start
|
|
23845
|
-
"
|
|
23846
|
-
"
|
|
24171
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
|
|
24172
|
+
"Install Colima: brew install colima docker",
|
|
24173
|
+
"Linux: sudo systemctl start docker"
|
|
23847
24174
|
]
|
|
23848
24175
|
};
|
|
23849
24176
|
}
|
|
@@ -23875,24 +24202,24 @@ async function checkPort(port) {
|
|
|
23875
24202
|
}
|
|
23876
24203
|
function detectSupabasePortsFromConfig() {
|
|
23877
24204
|
const configPath = path11__default.join(process.cwd(), "supabase", "config.toml");
|
|
23878
|
-
const
|
|
24205
|
+
const BASE_PORTS2 = { api: 54321, db: 54322, studio: 54323, inbucket: 54324 };
|
|
23879
24206
|
if (!existsSync(configPath)) {
|
|
23880
|
-
return Object.values(
|
|
24207
|
+
return Object.values(BASE_PORTS2);
|
|
23881
24208
|
}
|
|
23882
24209
|
try {
|
|
23883
24210
|
const content = readFileSync(configPath, "utf-8");
|
|
23884
24211
|
const ports = [];
|
|
23885
24212
|
const apiMatch = content.match(/\[api\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23886
|
-
ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) :
|
|
24213
|
+
ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) : BASE_PORTS2.api);
|
|
23887
24214
|
const dbMatch = content.match(/\[db\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23888
|
-
ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) :
|
|
24215
|
+
ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) : BASE_PORTS2.db);
|
|
23889
24216
|
const studioMatch = content.match(/\[studio\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23890
|
-
ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) :
|
|
24217
|
+
ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) : BASE_PORTS2.studio);
|
|
23891
24218
|
const inbucketMatch = content.match(/\[inbucket\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23892
|
-
ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) :
|
|
24219
|
+
ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) : BASE_PORTS2.inbucket);
|
|
23893
24220
|
return ports;
|
|
23894
24221
|
} catch {
|
|
23895
|
-
return Object.values(
|
|
24222
|
+
return Object.values(BASE_PORTS2);
|
|
23896
24223
|
}
|
|
23897
24224
|
}
|
|
23898
24225
|
async function checkSupabasePorts() {
|
|
@@ -23959,68 +24286,56 @@ function diagnoseInitFailure(errorMessage) {
|
|
|
23959
24286
|
|
|
23960
24287
|
// src/utils/port-allocator.ts
|
|
23961
24288
|
init_esm_shims();
|
|
23962
|
-
var BASE_PORT = 54321;
|
|
23963
|
-
var PORTS_PER_SLOT = 10;
|
|
23964
|
-
var TOTAL_SLOTS = 100;
|
|
23965
|
-
function calculatePortOffset(projectPath) {
|
|
23966
|
-
const normalizedPath = path11__default.resolve(projectPath);
|
|
23967
|
-
const hash = createHash("md5").update(normalizedPath).digest("hex");
|
|
23968
|
-
return parseInt(hash.slice(0, 8), 16) % TOTAL_SLOTS;
|
|
23969
|
-
}
|
|
23970
24289
|
function getSupabasePorts(projectPath) {
|
|
23971
24290
|
const offset = calculatePortOffset(projectPath);
|
|
23972
|
-
|
|
23973
|
-
return {
|
|
23974
|
-
api: basePort + 0,
|
|
23975
|
-
db: basePort + 1,
|
|
23976
|
-
studio: basePort + 2,
|
|
23977
|
-
inbucket: basePort + 3,
|
|
23978
|
-
auth: basePort + 4,
|
|
23979
|
-
rest: basePort + 5,
|
|
23980
|
-
realtime: basePort + 6,
|
|
23981
|
-
storage: basePort + 7,
|
|
23982
|
-
shadow: basePort + 8
|
|
23983
|
-
};
|
|
24291
|
+
return getPortsWithOffset(offset);
|
|
23984
24292
|
}
|
|
23985
|
-
function
|
|
23986
|
-
const ports = getSupabasePorts(projectPath);
|
|
24293
|
+
async function updateSupabaseConfigPortsSafe(projectPath) {
|
|
23987
24294
|
const configPath = path11__default.join(projectPath, "supabase", "config.toml");
|
|
24295
|
+
const resolved = await resolveAvailablePorts(projectPath);
|
|
24296
|
+
if (!resolved) {
|
|
24297
|
+
const ports = getSupabasePorts(projectPath);
|
|
24298
|
+
return { updated: false, ports, configPath, retried: false };
|
|
24299
|
+
}
|
|
23988
24300
|
if (!existsSync(configPath)) {
|
|
23989
|
-
return { updated: false, ports, configPath };
|
|
24301
|
+
return { updated: false, ports: resolved.ports, configPath, retried: resolved.retried };
|
|
23990
24302
|
}
|
|
24303
|
+
const updated = writePortsToConfig(configPath, resolved.ports);
|
|
24304
|
+
return { updated, ports: resolved.ports, configPath, retried: resolved.retried };
|
|
24305
|
+
}
|
|
24306
|
+
function writePortsToConfig(configPath, ports) {
|
|
23991
24307
|
let content = readFileSync(configPath, "utf-8");
|
|
23992
|
-
let
|
|
24308
|
+
let changed = false;
|
|
23993
24309
|
const portMappings = [
|
|
23994
24310
|
{ section: "api", key: "port", value: ports.api },
|
|
23995
24311
|
{ section: "db", key: "port", value: ports.db },
|
|
23996
24312
|
{ section: "db", key: "shadow_port", value: ports.shadow },
|
|
23997
24313
|
{ section: "studio", key: "port", value: ports.studio },
|
|
23998
|
-
{ section: "inbucket", key: "port", value: ports.inbucket }
|
|
23999
|
-
{ section: "auth", key: "port", value: ports.auth }
|
|
24000
|
-
// Note: rest, realtime, storage ports are internal and not in config.toml
|
|
24314
|
+
{ section: "inbucket", key: "port", value: ports.inbucket }
|
|
24001
24315
|
];
|
|
24002
24316
|
for (const { section, key, value } of portMappings) {
|
|
24003
24317
|
const sectionRegex = new RegExp(`(\\[${section}\\][^\\[]*?)(${key}\\s*=\\s*)(\\d+)`, "gs");
|
|
24318
|
+
sectionRegex.lastIndex = 0;
|
|
24004
24319
|
const newContent = content.replace(sectionRegex, (match, prefix, keyPart, oldValue) => {
|
|
24005
24320
|
if (parseInt(oldValue, 10) !== value) {
|
|
24006
|
-
|
|
24321
|
+
changed = true;
|
|
24007
24322
|
return `${prefix}${keyPart}${value}`;
|
|
24008
24323
|
}
|
|
24009
24324
|
return match;
|
|
24010
24325
|
});
|
|
24011
24326
|
content = newContent;
|
|
24012
24327
|
}
|
|
24013
|
-
if (
|
|
24328
|
+
if (changed) {
|
|
24014
24329
|
writeFileSync(configPath, content, "utf-8");
|
|
24015
24330
|
}
|
|
24016
|
-
return
|
|
24331
|
+
return changed;
|
|
24017
24332
|
}
|
|
24018
24333
|
function getPortAllocationSummary(projectPath) {
|
|
24019
24334
|
const ports = getSupabasePorts(projectPath);
|
|
24020
24335
|
const offset = calculatePortOffset(projectPath);
|
|
24021
24336
|
return [
|
|
24022
24337
|
`Port allocation for: ${path11__default.basename(projectPath)}`,
|
|
24023
|
-
` Slot: ${offset} (hash-based)`,
|
|
24338
|
+
` Slot: ${offset / 10} (hash-based, offset=${offset})`,
|
|
24024
24339
|
` API: ${ports.api}`,
|
|
24025
24340
|
` DB: ${ports.db}`,
|
|
24026
24341
|
` Studio: ${ports.studio}`,
|
|
@@ -24178,11 +24493,14 @@ var startCommand = new Command("start").description("Start local Supabase with a
|
|
|
24178
24493
|
logger16.info(output3);
|
|
24179
24494
|
return;
|
|
24180
24495
|
}
|
|
24181
|
-
const portResult =
|
|
24496
|
+
const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
|
|
24182
24497
|
if (portResult.updated) {
|
|
24183
24498
|
logger16.info("\u{1F522} Port allocation updated for this project:");
|
|
24184
24499
|
logger16.info(getPortAllocationSummary(projectRoot));
|
|
24185
24500
|
}
|
|
24501
|
+
if (portResult.retried) {
|
|
24502
|
+
logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
|
|
24503
|
+
}
|
|
24186
24504
|
logStartModeInfo(logger16, options, detectionResult, finalExcluded);
|
|
24187
24505
|
const result = await dbStart({
|
|
24188
24506
|
exclude: finalExcluded,
|
|
@@ -24269,11 +24587,14 @@ var resetCommand = new Command("reset").description("Reset local database with s
|
|
|
24269
24587
|
try {
|
|
24270
24588
|
logger16.section("Database Reset");
|
|
24271
24589
|
const projectRoot = process.cwd();
|
|
24272
|
-
const portResult =
|
|
24590
|
+
const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
|
|
24273
24591
|
if (portResult.updated) {
|
|
24274
24592
|
logger16.info("\u{1F522} Port allocation updated for this project:");
|
|
24275
24593
|
logger16.info(getPortAllocationSummary(projectRoot));
|
|
24276
24594
|
}
|
|
24595
|
+
if (portResult.retried) {
|
|
24596
|
+
logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
|
|
24597
|
+
}
|
|
24277
24598
|
logger16.step("Stopping Supabase...", 1);
|
|
24278
24599
|
const resetResult = await dbReset({
|
|
24279
24600
|
env: "local",
|
|
@@ -25123,9 +25444,15 @@ async function readSeedMetadataFile(metadataPath) {
|
|
|
25123
25444
|
}
|
|
25124
25445
|
}
|
|
25125
25446
|
var seedMetadataCommand = new Command("metadata").description("Extract seed metadata (primary root anchor) for CI workflows").option("--file <path>", "Path to metadata JSON file (optional; prefer --from-db)").option("--from-db", "Infer metadata from applied database state (preferred)", false).option("--github-output", "Write outputs to $GITHUB_OUTPUT (GitHub Actions)", false).action(async (options) => {
|
|
25126
|
-
const
|
|
25127
|
-
const shouldUseDb = options.fromDb === true || !
|
|
25128
|
-
|
|
25447
|
+
const rawMetadataPath = options.file?.trim();
|
|
25448
|
+
const shouldUseDb = options.fromDb === true || !rawMetadataPath;
|
|
25449
|
+
let out;
|
|
25450
|
+
if (shouldUseDb) {
|
|
25451
|
+
out = await inferPrimaryIdsFromDatabase();
|
|
25452
|
+
} else {
|
|
25453
|
+
const validatedPath = validateUserFilePath(rawMetadataPath, process.cwd());
|
|
25454
|
+
out = await readSeedMetadataFile(validatedPath);
|
|
25455
|
+
}
|
|
25129
25456
|
if (options.githubOutput === true) {
|
|
25130
25457
|
await writeGitHubOutput({
|
|
25131
25458
|
...out.primary?.root?.id ? { root_id: out.primary.root.id } : {},
|
|
@@ -26908,9 +27235,7 @@ async function getVercelRootDirectory() {
|
|
|
26908
27235
|
init_local_supabase();
|
|
26909
27236
|
var ERROR_MESSAGES3 = {
|
|
26910
27237
|
INVALID_PATH: "Invalid working directory path",
|
|
26911
|
-
PATH_TRAVERSAL: "Working directory path validation failed"
|
|
26912
|
-
APP_NOT_FOUND: "App directory not found"
|
|
26913
|
-
};
|
|
27238
|
+
PATH_TRAVERSAL: "Working directory path validation failed"};
|
|
26914
27239
|
function sanitizeErrorMessage(message) {
|
|
26915
27240
|
if (!message || typeof message !== "string") {
|
|
26916
27241
|
return "Unknown error";
|
|
@@ -26970,23 +27295,6 @@ function validateCustomWorkingDir(cwdPath, projectRoot) {
|
|
|
26970
27295
|
}
|
|
26971
27296
|
return absolutePath;
|
|
26972
27297
|
}
|
|
26973
|
-
function validateAppDirectory2(appName, projectRoot) {
|
|
26974
|
-
if (containsPathTraversal3(appName) || appName.includes("/") || appName.includes("\\")) {
|
|
26975
|
-
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
26976
|
-
}
|
|
26977
|
-
const appsDir = resolve(projectRoot, "apps");
|
|
26978
|
-
const appDir = resolve(appsDir, appName);
|
|
26979
|
-
if (!isPathWithinBase2(appDir, appsDir)) {
|
|
26980
|
-
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
26981
|
-
}
|
|
26982
|
-
if (!existsSync(appDir)) {
|
|
26983
|
-
throw new CLIError(ERROR_MESSAGES3.APP_NOT_FOUND, "ENV_PULL_APP_NOT_FOUND", [
|
|
26984
|
-
`Available apps: ${getAvailableApps().join(", ") || "none"}`,
|
|
26985
|
-
"Specify full path with --cwd instead"
|
|
26986
|
-
]);
|
|
26987
|
-
}
|
|
26988
|
-
return appDir;
|
|
26989
|
-
}
|
|
26990
27298
|
var LOCAL_BOOTSTRAP_REQUIRED_KEYS = [
|
|
26991
27299
|
"LOCAL_SUPABASE_HOST",
|
|
26992
27300
|
"LOCAL_SUPABASE_API_PORT",
|
|
@@ -27083,22 +27391,18 @@ function resolveVercelAuth(workDir, options, logger16) {
|
|
|
27083
27391
|
}
|
|
27084
27392
|
function resolveWorkingDir(options) {
|
|
27085
27393
|
const projectRoot = process.cwd();
|
|
27394
|
+
if (options.app) {
|
|
27395
|
+
console.warn(
|
|
27396
|
+
`\u26A0\uFE0F --app is deprecated. Environment files are managed at the monorepo root only.
|
|
27397
|
+
Turbo auto-propagates root .env.* files to all workspaces.
|
|
27398
|
+
Writing to project root instead of apps/${options.app}/.`
|
|
27399
|
+
);
|
|
27400
|
+
}
|
|
27086
27401
|
if (options.cwd) {
|
|
27087
27402
|
return validateCustomWorkingDir(options.cwd, projectRoot);
|
|
27088
27403
|
}
|
|
27089
|
-
if (options.app) {
|
|
27090
|
-
return validateAppDirectory2(options.app, projectRoot);
|
|
27091
|
-
}
|
|
27092
27404
|
return projectRoot;
|
|
27093
27405
|
}
|
|
27094
|
-
function getAvailableApps() {
|
|
27095
|
-
const appsDir = resolve(process.cwd(), "apps");
|
|
27096
|
-
if (!existsSync(appsDir)) return [];
|
|
27097
|
-
return readdirSync(appsDir).filter((name) => {
|
|
27098
|
-
const fullPath = resolve(appsDir, name);
|
|
27099
|
-
return statSync(fullPath).isDirectory();
|
|
27100
|
-
});
|
|
27101
|
-
}
|
|
27102
27406
|
function getOutputPath(workDir, environment) {
|
|
27103
27407
|
return resolve(workDir, `.env.${environment}`);
|
|
27104
27408
|
}
|
|
@@ -29331,7 +29635,11 @@ async function listArchivedHotfixes(input3 = {}) {
|
|
|
29331
29635
|
const hotfixes = [];
|
|
29332
29636
|
for (const file of files.filter((f) => f.endsWith(".json"))) {
|
|
29333
29637
|
try {
|
|
29334
|
-
const
|
|
29638
|
+
const filePath = path11__default.join(archiveDir, file);
|
|
29639
|
+
if (!isPathContained(archiveDir, filePath)) {
|
|
29640
|
+
continue;
|
|
29641
|
+
}
|
|
29642
|
+
const content = await readFile(filePath, "utf-8");
|
|
29335
29643
|
hotfixes.push(HotfixMetadataSchema.parse(JSON.parse(content)));
|
|
29336
29644
|
} catch {
|
|
29337
29645
|
}
|
|
@@ -30668,10 +30976,10 @@ Fix:
|
|
|
30668
30976
|
Docker is not running or not installed.
|
|
30669
30977
|
|
|
30670
30978
|
Fix:
|
|
30671
|
-
1. Start
|
|
30672
|
-
2.
|
|
30673
|
-
3.
|
|
30674
|
-
4.
|
|
30979
|
+
1. Start Colima: colima start --cpu 4 --memory 8 --vm-type vz --mount-type virtiofs
|
|
30980
|
+
2. Install Colima: brew install colima docker
|
|
30981
|
+
3. Linux: sudo systemctl start docker
|
|
30982
|
+
4. Verify Docker is running: docker ps
|
|
30675
30983
|
`,
|
|
30676
30984
|
relatedCommands: ["runa check"]
|
|
30677
30985
|
},
|
|
@@ -30797,7 +31105,7 @@ init_esm_shims();
|
|
|
30797
31105
|
|
|
30798
31106
|
// src/constants/versions.ts
|
|
30799
31107
|
init_esm_shims();
|
|
30800
|
-
var COMPATIBLE_TEMPLATES_VERSION = "0.5.
|
|
31108
|
+
var COMPATIBLE_TEMPLATES_VERSION = "0.5.56";
|
|
30801
31109
|
var TEMPLATES_PACKAGE_NAME = "@r06-dev/runa-templates";
|
|
30802
31110
|
var GITHUB_PACKAGES_REGISTRY = "https://npm.pkg.github.com";
|
|
30803
31111
|
|
|
@@ -31761,21 +32069,28 @@ function collectRouteInfo(relativePath, code, _verbose) {
|
|
|
31761
32069
|
if (isExcludedScope(relativePath)) {
|
|
31762
32070
|
return;
|
|
31763
32071
|
}
|
|
31764
|
-
|
|
32072
|
+
const isPage = isPageFile(relativePath);
|
|
32073
|
+
const isLayout = isLayoutFile(relativePath);
|
|
32074
|
+
const isApiRoute = isApiRouteFile(relativePath);
|
|
32075
|
+
const isMiddleware = isMiddlewareFile(relativePath);
|
|
32076
|
+
if (isPage) {
|
|
31765
32077
|
collectPageInfo(relativePath, code);
|
|
31766
32078
|
}
|
|
31767
|
-
if (
|
|
32079
|
+
if (isLayout) {
|
|
31768
32080
|
collectLayoutInfo(relativePath, code);
|
|
31769
32081
|
}
|
|
31770
|
-
if (
|
|
32082
|
+
if (isApiRoute) {
|
|
31771
32083
|
collectApiRouteInfo(relativePath, code);
|
|
31772
32084
|
}
|
|
31773
|
-
if (
|
|
32085
|
+
if (isMiddleware) {
|
|
31774
32086
|
collectAuthBoundaries(relativePath, code);
|
|
31775
32087
|
}
|
|
31776
32088
|
if (hasMachineDefinition(code)) {
|
|
31777
32089
|
collectMachineDefinition(relativePath, code);
|
|
31778
32090
|
}
|
|
32091
|
+
if (!isPage && !isLayout && !isApiRoute && !isMiddleware) {
|
|
32092
|
+
collectComponentInfo(relativePath, code);
|
|
32093
|
+
}
|
|
31779
32094
|
}
|
|
31780
32095
|
function emptyResult(filePath) {
|
|
31781
32096
|
return {
|
|
@@ -32216,6 +32531,12 @@ async function preprocessFile(filePath, repoRoot, options) {
|
|
|
32216
32531
|
const attrs = extractInjectedAttributes(code);
|
|
32217
32532
|
const machineIdMatches = code.matchAll(/data-machine-id="([^"]+)"/g);
|
|
32218
32533
|
const machineIds = [...new Set([...machineIdMatches].map((m) => m[1]).filter(Boolean))];
|
|
32534
|
+
const resolutionDetails = machineIds.map((machineId) => ({
|
|
32535
|
+
filePath: relativePath,
|
|
32536
|
+
machineRef: machineId,
|
|
32537
|
+
resolvedId: machineId,
|
|
32538
|
+
source: "explicit"
|
|
32539
|
+
}));
|
|
32219
32540
|
if (machineIds.length === 0 && mightContainMachineHooks(code)) {
|
|
32220
32541
|
if (options.verbose) {
|
|
32221
32542
|
console.log(` [recovery] Marker found but no attributes in ${relativePath}, re-injecting`);
|
|
@@ -32234,7 +32555,7 @@ async function preprocessFile(filePath, repoRoot, options) {
|
|
|
32234
32555
|
machineIds,
|
|
32235
32556
|
changed: false,
|
|
32236
32557
|
...attrs,
|
|
32237
|
-
resolutionDetails
|
|
32558
|
+
resolutionDetails
|
|
32238
32559
|
}
|
|
32239
32560
|
};
|
|
32240
32561
|
}
|
|
@@ -35374,15 +35695,15 @@ function printActionsNeeded(logger16, actions) {
|
|
|
35374
35695
|
);
|
|
35375
35696
|
}
|
|
35376
35697
|
function findRepoRoot3(startDir) {
|
|
35377
|
-
const { existsSync:
|
|
35698
|
+
const { existsSync: existsSync53, readFileSync: readFileSync29 } = __require("fs");
|
|
35378
35699
|
const { join: join23, dirname: dirname5 } = __require("path");
|
|
35379
35700
|
let current = startDir;
|
|
35380
35701
|
while (current !== dirname5(current)) {
|
|
35381
|
-
if (
|
|
35702
|
+
if (existsSync53(join23(current, "turbo.json"))) {
|
|
35382
35703
|
return current;
|
|
35383
35704
|
}
|
|
35384
35705
|
const pkgPath = join23(current, "package.json");
|
|
35385
|
-
if (
|
|
35706
|
+
if (existsSync53(pkgPath)) {
|
|
35386
35707
|
try {
|
|
35387
35708
|
const pkg = JSON.parse(readFileSync29(pkgPath, "utf-8"));
|
|
35388
35709
|
if (pkg.workspaces) {
|
|
@@ -35450,10 +35771,10 @@ function generateReportOutput(output3, isJsonMode) {
|
|
|
35450
35771
|
};
|
|
35451
35772
|
}
|
|
35452
35773
|
function validateRunaRepo(repoRoot) {
|
|
35453
|
-
const { existsSync:
|
|
35774
|
+
const { existsSync: existsSync53 } = __require("fs");
|
|
35454
35775
|
const { join: join23 } = __require("path");
|
|
35455
35776
|
const templateDir = join23(repoRoot, "packages/runa-templates/templates");
|
|
35456
|
-
if (!
|
|
35777
|
+
if (!existsSync53(templateDir)) {
|
|
35457
35778
|
throw new CLIError("template-check is a runa-repo only command", "NOT_RUNA_REPO", [
|
|
35458
35779
|
"This command compares runa-repo with pj-repo templates",
|
|
35459
35780
|
"It should only be run in the runa repository",
|