@runa-ai/runa-cli 0.5.53 → 0.5.58
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/build/machine.d.ts +6 -4
- package/dist/commands/build/machine.d.ts.map +1 -1
- package/dist/commands/ci/machine/actors/db/sync-schema.d.ts.map +1 -1
- package/dist/commands/ci/utils/app-runtime.d.ts +2 -0
- package/dist/commands/ci/utils/app-runtime.d.ts.map +1 -1
- package/dist/commands/db/commands/db-drizzle.d.ts.map +1 -1
- package/dist/commands/db/commands/db-lifecycle.d.ts.map +1 -1
- package/dist/commands/db/commands/db-seed-metadata.d.ts.map +1 -1
- package/dist/commands/db/sync/actors.d.ts +1 -0
- package/dist/commands/db/sync/actors.d.ts.map +1 -1
- package/dist/commands/db/sync/contract.d.ts +4 -1
- package/dist/commands/db/sync/contract.d.ts.map +1 -1
- package/dist/commands/db/sync/machine.d.ts +1 -0
- package/dist/commands/db/sync/machine.d.ts.map +1 -1
- package/dist/commands/db/types.d.ts +2 -0
- package/dist/commands/db/types.d.ts.map +1 -1
- package/dist/commands/db/utils/table-registry.d.ts +4 -0
- package/dist/commands/db/utils/table-registry.d.ts.map +1 -1
- package/dist/commands/db/utils/table-source-classifier.d.ts +28 -0
- package/dist/commands/db/utils/table-source-classifier.d.ts.map +1 -0
- package/dist/commands/dev/commands/dev.d.ts +2 -0
- package/dist/commands/dev/commands/dev.d.ts.map +1 -1
- package/dist/commands/dev/contract.d.ts +5 -0
- package/dist/commands/dev/contract.d.ts.map +1 -1
- package/dist/commands/dev/machine.d.ts +21 -7
- package/dist/commands/dev/machine.d.ts.map +1 -1
- package/dist/commands/env/commands/env-pull.d.ts.map +1 -1
- package/dist/commands/env/constants/local-supabase.d.ts +4 -2
- package/dist/commands/env/constants/local-supabase.d.ts.map +1 -1
- package/dist/commands/hotfix/metadata.d.ts.map +1 -1
- package/dist/commands/inject-test-attrs/processor-utils.d.ts.map +1 -1
- package/dist/commands/inject-test-attrs/processor.d.ts.map +1 -1
- package/dist/constants/versions.d.ts +1 -1
- package/dist/errors/catalog.d.ts +8 -0
- package/dist/errors/catalog.d.ts.map +1 -1
- package/dist/index.js +857 -432
- package/dist/utils/path-security.d.ts +18 -0
- package/dist/utils/path-security.d.ts.map +1 -1
- package/dist/utils/port-allocator.d.ts +34 -37
- package/dist/utils/port-allocator.d.ts.map +1 -1
- package/package.json +8 -8
package/dist/index.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { createRequire } from 'module';
|
|
3
3
|
import * as path11 from 'path';
|
|
4
|
-
import path11__default, { join, dirname, resolve,
|
|
4
|
+
import path11__default, { join, dirname, resolve, isAbsolute, relative, sep, basename, normalize } from 'path';
|
|
5
5
|
import { fileURLToPath } from 'url';
|
|
6
6
|
import * as fs5 from 'fs';
|
|
7
|
-
import fs5__default, { existsSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync,
|
|
7
|
+
import fs5__default, { existsSync, readFileSync, unlinkSync, rmSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, realpathSync, promises, lstatSync, accessSync, constants, chmodSync } from 'fs';
|
|
8
8
|
import { execSync, spawnSync, execFileSync, exec, spawn } from 'child_process';
|
|
9
|
-
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames,
|
|
9
|
+
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, SUPABASE_SYSTEM_SCHEMAS, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, BASE_PORTS, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, resolveAvailablePorts, calculatePortOffset, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, getPortsWithOffset, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, DATABASE_PACKAGE_CANDIDATES, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
|
|
10
10
|
import { z } from 'zod';
|
|
11
11
|
import fs9, { mkdir, writeFile, appendFile, readFile, rm, stat, realpath, cp, readdir, lstat } from 'fs/promises';
|
|
12
12
|
import { promisify } from 'util';
|
|
@@ -17,14 +17,14 @@ import { minimatch } from 'minimatch';
|
|
|
17
17
|
import { Command } from 'commander';
|
|
18
18
|
import { fromPromise, setup, assign, createActor } from 'xstate';
|
|
19
19
|
import { execa } from 'execa';
|
|
20
|
+
import net, { isIP } from 'net';
|
|
20
21
|
import chalk from 'chalk';
|
|
21
22
|
import { config } from '@dotenvx/dotenvx';
|
|
22
23
|
import { parse } from 'dotenv';
|
|
23
24
|
import { expand } from 'dotenv-expand';
|
|
24
25
|
import { resolve4 } from 'dns/promises';
|
|
25
|
-
import { isIP } from 'net';
|
|
26
26
|
import postgres from 'postgres';
|
|
27
|
-
import crypto, { randomBytes
|
|
27
|
+
import crypto, { randomBytes } from 'crypto';
|
|
28
28
|
import os, { tmpdir } from 'os';
|
|
29
29
|
import { introspectDatabase, HonoRouteAnalyzer } from '@runa-ai/runa/test-generators';
|
|
30
30
|
import { isTable, getTableUniqueName, getTableName } from 'drizzle-orm';
|
|
@@ -33,7 +33,7 @@ import { createJiti } from 'jiti';
|
|
|
33
33
|
import ora from 'ora';
|
|
34
34
|
import { stdout, stdin } from 'process';
|
|
35
35
|
import * as readline from 'readline/promises';
|
|
36
|
-
import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile,
|
|
36
|
+
import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile, isLayoutFile, isApiRouteFile, isMiddlewareFile, collectPageInfo, collectLayoutInfo, collectApiRouteInfo, collectAuthBoundaries, hasMachineDefinition, collectMachineDefinition, collectComponentInfo, createReadAndParseFile, createResolveImportPath, transformSync, getInjectionRegistry, buildManifest, getAllMachineDefinitions, generateSelectorTypeScript, getUnifiedRegistry, buildMachineLinks, registerInjection } from '@runa-ai/runa-xstate-test-plugin/standalone';
|
|
37
37
|
import { listSessions, formatDuration as formatDuration$2, cleanupStaleSessions, removeSession, isSessionCheckDisabled, getCurrentSessionId, checkActiveSessions, createSession, addActivity, checkConflicts, formatConflictDetails } from '@runa-ai/runa/session';
|
|
38
38
|
import { render, Box, Text } from 'ink';
|
|
39
39
|
import Spinner from 'ink-spinner';
|
|
@@ -124,7 +124,7 @@ function extractPort(rawUrl) {
|
|
|
124
124
|
}
|
|
125
125
|
}
|
|
126
126
|
function parseTomlPort(content, section, key) {
|
|
127
|
-
const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)
|
|
127
|
+
const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)`);
|
|
128
128
|
const sectionMatch = sectionRegex.exec(content);
|
|
129
129
|
if (!sectionMatch) return null;
|
|
130
130
|
const sectionContent = sectionMatch[1];
|
|
@@ -287,9 +287,9 @@ var init_local_supabase = __esm({
|
|
|
287
287
|
"src/commands/env/constants/local-supabase.ts"() {
|
|
288
288
|
init_esm_shims();
|
|
289
289
|
DEFAULT_HOST = "127.0.0.1";
|
|
290
|
-
DEFAULT_API_PORT =
|
|
291
|
-
DEFAULT_DB_PORT =
|
|
292
|
-
DEFAULT_STUDIO_PORT =
|
|
290
|
+
DEFAULT_API_PORT = BASE_PORTS.api;
|
|
291
|
+
DEFAULT_DB_PORT = BASE_PORTS.db;
|
|
292
|
+
DEFAULT_STUDIO_PORT = BASE_PORTS.studio;
|
|
293
293
|
LOCAL_SUPABASE_ANON_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0";
|
|
294
294
|
LOCAL_SUPABASE_SERVICE_ROLE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU";
|
|
295
295
|
LOCAL_SUPABASE_ENV_VALUES = getLocalSupabaseEnvValues();
|
|
@@ -1161,7 +1161,7 @@ var CLI_VERSION, HAS_ADMIN_COMMAND;
|
|
|
1161
1161
|
var init_version = __esm({
|
|
1162
1162
|
"src/version.ts"() {
|
|
1163
1163
|
init_esm_shims();
|
|
1164
|
-
CLI_VERSION = "0.5.
|
|
1164
|
+
CLI_VERSION = "0.5.58";
|
|
1165
1165
|
HAS_ADMIN_COMMAND = false;
|
|
1166
1166
|
}
|
|
1167
1167
|
});
|
|
@@ -4425,9 +4425,19 @@ var ERROR_CATALOG = {
|
|
|
4425
4425
|
title: "Docker is not running",
|
|
4426
4426
|
template: "Docker daemon is not running or not accessible",
|
|
4427
4427
|
suggestions: [
|
|
4428
|
-
"Start
|
|
4429
|
-
"
|
|
4430
|
-
|
|
4428
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
|
|
4429
|
+
"Install Colima: brew install colima docker"
|
|
4430
|
+
],
|
|
4431
|
+
docUrl: "https://runa.dev/docs/errors/docker"
|
|
4432
|
+
},
|
|
4433
|
+
DOCKER_DESKTOP_FORBIDDEN: {
|
|
4434
|
+
code: "ERR_RUNA_DOCKER_DESKTOP_FORBIDDEN",
|
|
4435
|
+
exitCode: EXIT_CODES.EXTERNAL_TOOL_ERROR,
|
|
4436
|
+
title: "Docker Desktop is not supported",
|
|
4437
|
+
template: "Docker Desktop is not supported. runa requires Colima.",
|
|
4438
|
+
suggestions: [
|
|
4439
|
+
"Install Colima: brew install colima docker",
|
|
4440
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs"
|
|
4431
4441
|
],
|
|
4432
4442
|
docUrl: "https://runa.dev/docs/errors/docker"
|
|
4433
4443
|
},
|
|
@@ -4441,7 +4451,7 @@ var ERROR_CATALOG = {
|
|
|
4441
4451
|
supabase: "brew install supabase/tap/supabase",
|
|
4442
4452
|
vercel: "pnpm add -g vercel",
|
|
4443
4453
|
gh: "brew install gh",
|
|
4444
|
-
docker: "Install
|
|
4454
|
+
docker: "Install Colima: brew install colima docker",
|
|
4445
4455
|
dotenvx: "pnpm add -g @dotenvx/dotenvx",
|
|
4446
4456
|
"pg-schema-diff": "brew install pg-schema-diff"
|
|
4447
4457
|
};
|
|
@@ -5290,7 +5300,7 @@ function emitJsonSuccess(cmd, dataSchema, data) {
|
|
|
5290
5300
|
init_esm_shims();
|
|
5291
5301
|
var BuildPhaseSchema = z.enum(["types", "lint", "build", "db", "manifest"]);
|
|
5292
5302
|
var VALID_BUILD_PHASES = ["types", "lint", "build", "db", "manifest"];
|
|
5293
|
-
z.object({
|
|
5303
|
+
var BuildInputSchema = z.object({
|
|
5294
5304
|
/** Enable E2E mode (NEXT_PUBLIC_E2E_TEST=true, TURBO_FORCE=true) */
|
|
5295
5305
|
e2e: z.boolean().default(false),
|
|
5296
5306
|
/** Clear build caches (.next, .turbo, .runa/manifests) */
|
|
@@ -6819,6 +6829,14 @@ var e2eMeta = {
|
|
|
6819
6829
|
nextStates: []
|
|
6820
6830
|
}
|
|
6821
6831
|
};
|
|
6832
|
+
function normalizeBuildMachineInput(input3) {
|
|
6833
|
+
const normalizedInput = BuildInputSchema.parse(input3?.input ?? {});
|
|
6834
|
+
const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
|
|
6835
|
+
return {
|
|
6836
|
+
input: normalizedInput,
|
|
6837
|
+
repoRoot
|
|
6838
|
+
};
|
|
6839
|
+
}
|
|
6822
6840
|
var buildMachine = setup({
|
|
6823
6841
|
types: {},
|
|
6824
6842
|
actors: {
|
|
@@ -6852,9 +6870,10 @@ var buildMachine = setup({
|
|
|
6852
6870
|
id: "build",
|
|
6853
6871
|
initial: "idle",
|
|
6854
6872
|
context: ({ input: input3 }) => {
|
|
6855
|
-
const
|
|
6873
|
+
const normalizedInput = normalizeBuildMachineInput(input3);
|
|
6874
|
+
const repoRoot = normalizedInput.repoRoot;
|
|
6856
6875
|
return {
|
|
6857
|
-
input:
|
|
6876
|
+
input: normalizedInput.input,
|
|
6858
6877
|
repoRoot,
|
|
6859
6878
|
tmpDir: ".runa/tmp/build",
|
|
6860
6879
|
hasDatabase: detectDatabase(repoRoot),
|
|
@@ -7579,17 +7598,17 @@ function printSummary(logger16, output3) {
|
|
|
7579
7598
|
}
|
|
7580
7599
|
}
|
|
7581
7600
|
function findRepoRoot(startDir) {
|
|
7582
|
-
const { existsSync:
|
|
7601
|
+
const { existsSync: existsSync53, readFileSync: readFileSync30 } = __require("fs");
|
|
7583
7602
|
const { join: join23, dirname: dirname5 } = __require("path");
|
|
7584
7603
|
let current = startDir;
|
|
7585
7604
|
while (current !== dirname5(current)) {
|
|
7586
|
-
if (
|
|
7605
|
+
if (existsSync53(join23(current, "turbo.json"))) {
|
|
7587
7606
|
return current;
|
|
7588
7607
|
}
|
|
7589
7608
|
const pkgPath = join23(current, "package.json");
|
|
7590
|
-
if (
|
|
7609
|
+
if (existsSync53(pkgPath)) {
|
|
7591
7610
|
try {
|
|
7592
|
-
const pkg = JSON.parse(
|
|
7611
|
+
const pkg = JSON.parse(readFileSync30(pkgPath, "utf-8"));
|
|
7593
7612
|
if (pkg.workspaces) {
|
|
7594
7613
|
return current;
|
|
7595
7614
|
}
|
|
@@ -7778,7 +7797,7 @@ init_esm_shims();
|
|
|
7778
7797
|
|
|
7779
7798
|
// src/commands/dev/contract.ts
|
|
7780
7799
|
init_esm_shims();
|
|
7781
|
-
z.object({
|
|
7800
|
+
var DevInputSchema = z.object({
|
|
7782
7801
|
/** Port for Next.js dev server (default: 3000) */
|
|
7783
7802
|
port: z.number().int().positive().default(3e3),
|
|
7784
7803
|
/** Skip Supabase start */
|
|
@@ -7790,7 +7809,11 @@ z.object({
|
|
|
7790
7809
|
/** Stream app output to terminal (in addition to log file). Default: true */
|
|
7791
7810
|
stream: z.boolean().default(true),
|
|
7792
7811
|
/** Target directory (defaults to cwd) */
|
|
7793
|
-
targetDir: z.string().optional()
|
|
7812
|
+
targetDir: z.string().optional(),
|
|
7813
|
+
/** Replace existing runa dev process if already running */
|
|
7814
|
+
replace: z.boolean().default(false),
|
|
7815
|
+
/** Bundler to use for Next.js dev server */
|
|
7816
|
+
bundler: z.enum(["turbopack", "webpack"]).optional()
|
|
7794
7817
|
}).strict();
|
|
7795
7818
|
z.enum(["pending", "running", "passed", "failed", "skipped"]);
|
|
7796
7819
|
var DevOutputSchema = z.object({
|
|
@@ -7888,8 +7911,8 @@ function readPortFromScripts(appDir) {
|
|
|
7888
7911
|
const pkgPath = path11__default.join(appDir, "package.json");
|
|
7889
7912
|
if (!existsSync(pkgPath)) return 3e3;
|
|
7890
7913
|
try {
|
|
7891
|
-
const { readFileSync:
|
|
7892
|
-
const raw =
|
|
7914
|
+
const { readFileSync: readFileSync30 } = __require("fs");
|
|
7915
|
+
const raw = readFileSync30(pkgPath, "utf-8");
|
|
7893
7916
|
const parsed = JSON.parse(raw);
|
|
7894
7917
|
const scripts = parsed.scripts;
|
|
7895
7918
|
for (const key of ["start:ci", "start", "dev"]) {
|
|
@@ -8148,7 +8171,7 @@ async function readPackageScripts(pkgPath) {
|
|
|
8148
8171
|
return null;
|
|
8149
8172
|
}
|
|
8150
8173
|
}
|
|
8151
|
-
function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoot, appDir, port) {
|
|
8174
|
+
function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoot, appDir, port, bundler) {
|
|
8152
8175
|
const ciScriptName = mode === "dev" ? "dev:ci" : "start:ci";
|
|
8153
8176
|
const nextCommand = mode === "dev" ? "dev" : "start";
|
|
8154
8177
|
const rootHasCiScript = Boolean(rootScripts?.[ciScriptName]);
|
|
@@ -8165,53 +8188,12 @@ function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoo
|
|
|
8165
8188
|
return { command: ["pnpm", ...dirArgs2, scriptName], useRootScript: false };
|
|
8166
8189
|
}
|
|
8167
8190
|
const dirArgs = isMonorepo2 ? ["-C", path11__default.relative(repoRoot, appDir)] : [];
|
|
8191
|
+
const bundlerArgs = bundler ? [`--${bundler}`] : [];
|
|
8168
8192
|
return {
|
|
8169
|
-
command: ["pnpm", ...dirArgs, "exec", "next", nextCommand, "-p", String(port)],
|
|
8193
|
+
command: ["pnpm", ...dirArgs, "exec", "next", nextCommand, ...bundlerArgs, "-p", String(port)],
|
|
8170
8194
|
useRootScript: false
|
|
8171
8195
|
};
|
|
8172
8196
|
}
|
|
8173
|
-
var NEXT_CRITICAL_FILES = ["routes-manifest.json", "build-manifest.json"];
|
|
8174
|
-
function cleanStaleNextDevState(appDir) {
|
|
8175
|
-
const nextDir = path11__default.join(appDir, ".next");
|
|
8176
|
-
if (!existsSync(nextDir)) {
|
|
8177
|
-
return { cleaned: false };
|
|
8178
|
-
}
|
|
8179
|
-
for (const file of NEXT_CRITICAL_FILES) {
|
|
8180
|
-
if (!existsSync(path11__default.join(nextDir, file))) {
|
|
8181
|
-
cleanNextDir(nextDir, `Missing ${file}`);
|
|
8182
|
-
return { cleaned: true, reason: `Missing ${file}` };
|
|
8183
|
-
}
|
|
8184
|
-
}
|
|
8185
|
-
const serverDir = path11__default.join(nextDir, "server");
|
|
8186
|
-
if (!existsSync(serverDir)) {
|
|
8187
|
-
try {
|
|
8188
|
-
const nextStat = statSync(nextDir);
|
|
8189
|
-
const ageHours = (Date.now() - nextStat.mtimeMs) / (1e3 * 60 * 60);
|
|
8190
|
-
if (ageHours > 1) {
|
|
8191
|
-
cleanNextDir(nextDir, "Stale .next without server directory");
|
|
8192
|
-
return { cleaned: true, reason: "Stale .next without server directory" };
|
|
8193
|
-
}
|
|
8194
|
-
} catch {
|
|
8195
|
-
}
|
|
8196
|
-
}
|
|
8197
|
-
return { cleaned: false };
|
|
8198
|
-
}
|
|
8199
|
-
function cleanNextDir(nextDir, reason) {
|
|
8200
|
-
console.log(`[runa] Stale .next detected: ${reason}`);
|
|
8201
|
-
console.log("[runa] Cleaning up .next directory...");
|
|
8202
|
-
try {
|
|
8203
|
-
rmSync(nextDir, { recursive: true, force: true, maxRetries: 10, retryDelay: 100 });
|
|
8204
|
-
console.log("[runa] Cleanup complete");
|
|
8205
|
-
} catch {
|
|
8206
|
-
const staleDir = `${nextDir}-stale-${Date.now()}`;
|
|
8207
|
-
console.log(`[runa] Could not remove .next, quarantining to ${path11__default.basename(staleDir)}`);
|
|
8208
|
-
try {
|
|
8209
|
-
renameSync(nextDir, staleDir);
|
|
8210
|
-
} catch {
|
|
8211
|
-
console.warn("[runa] Failed to quarantine .next. Run: rm -rf .next");
|
|
8212
|
-
}
|
|
8213
|
-
}
|
|
8214
|
-
}
|
|
8215
8197
|
async function startAppBackground(params) {
|
|
8216
8198
|
const mode = params.mode ?? "start";
|
|
8217
8199
|
const isMonorepo2 = params.appDir !== params.repoRoot;
|
|
@@ -8226,7 +8208,8 @@ async function startAppBackground(params) {
|
|
|
8226
8208
|
appScripts,
|
|
8227
8209
|
params.repoRoot,
|
|
8228
8210
|
params.appDir,
|
|
8229
|
-
params.port
|
|
8211
|
+
params.port,
|
|
8212
|
+
params.bundler
|
|
8230
8213
|
);
|
|
8231
8214
|
const appLog = path11__default.join(params.tmpDir, "app.log");
|
|
8232
8215
|
const out = createWriteStream(appLog, { flags: "a" });
|
|
@@ -8340,7 +8323,16 @@ var e2eMeta2 = {
|
|
|
8340
8323
|
log: "Starting dev"
|
|
8341
8324
|
},
|
|
8342
8325
|
assertions: ["expect(log).toContain('Starting dev')", "expect(state).toBe('idle')"],
|
|
8343
|
-
nextStates: ["
|
|
8326
|
+
nextStates: ["processCheck"]
|
|
8327
|
+
},
|
|
8328
|
+
processCheck: {
|
|
8329
|
+
description: "Check for existing runa dev process and port availability",
|
|
8330
|
+
severity: "blocking",
|
|
8331
|
+
observables: {
|
|
8332
|
+
log: "Checking for existing processes"
|
|
8333
|
+
},
|
|
8334
|
+
assertions: ["expect(log).toContain('Checking for existing processes')"],
|
|
8335
|
+
nextStates: ["setup", "failed"]
|
|
8344
8336
|
},
|
|
8345
8337
|
setup: {
|
|
8346
8338
|
description: "Setup phase: deps, env, Supabase",
|
|
@@ -8400,12 +8392,74 @@ var e2eMeta2 = {
|
|
|
8400
8392
|
nextStates: []
|
|
8401
8393
|
}
|
|
8402
8394
|
};
|
|
8395
|
+
function isProcessAlive(pid) {
|
|
8396
|
+
try {
|
|
8397
|
+
process.kill(pid, 0);
|
|
8398
|
+
return true;
|
|
8399
|
+
} catch {
|
|
8400
|
+
return false;
|
|
8401
|
+
}
|
|
8402
|
+
}
|
|
8403
|
+
function checkPortAvailable(port) {
|
|
8404
|
+
return new Promise((resolve12, reject) => {
|
|
8405
|
+
const server = net.createServer();
|
|
8406
|
+
server.once("error", (err) => {
|
|
8407
|
+
if (err.code === "EADDRINUSE") {
|
|
8408
|
+
reject(
|
|
8409
|
+
new Error(
|
|
8410
|
+
`Port ${port} is already in use. Use --port <number> to specify a different port, or --replace to restart.`
|
|
8411
|
+
)
|
|
8412
|
+
);
|
|
8413
|
+
} else {
|
|
8414
|
+
reject(err);
|
|
8415
|
+
}
|
|
8416
|
+
});
|
|
8417
|
+
server.once("listening", () => {
|
|
8418
|
+
server.close(() => resolve12());
|
|
8419
|
+
});
|
|
8420
|
+
server.listen(port);
|
|
8421
|
+
});
|
|
8422
|
+
}
|
|
8423
|
+
var processCheckActor = fromPromise(async ({ input: input3 }) => {
|
|
8424
|
+
const pidFile = path11__default.join(input3.repoRoot, input3.tmpDir, "app.pid");
|
|
8425
|
+
let pidFileContent = null;
|
|
8426
|
+
try {
|
|
8427
|
+
pidFileContent = readFileSync(pidFile, "utf-8").trim();
|
|
8428
|
+
} catch {
|
|
8429
|
+
}
|
|
8430
|
+
if (pidFileContent) {
|
|
8431
|
+
const pid = parseInt(pidFileContent, 10);
|
|
8432
|
+
if (!isNaN(pid) && isProcessAlive(pid)) {
|
|
8433
|
+
if (input3.replace) {
|
|
8434
|
+
await terminateAppProcessByPid({
|
|
8435
|
+
pid,
|
|
8436
|
+
logger: { info: console.log, warn: console.warn }
|
|
8437
|
+
});
|
|
8438
|
+
} else {
|
|
8439
|
+
throw new Error(
|
|
8440
|
+
`runa dev is already running (PID: ${pid}). Use --replace to restart, or stop the existing process first.`
|
|
8441
|
+
);
|
|
8442
|
+
}
|
|
8443
|
+
}
|
|
8444
|
+
try {
|
|
8445
|
+
unlinkSync(pidFile);
|
|
8446
|
+
} catch {
|
|
8447
|
+
}
|
|
8448
|
+
}
|
|
8449
|
+
if (!input3.skipApp) {
|
|
8450
|
+
await checkPortAvailable(input3.port);
|
|
8451
|
+
}
|
|
8452
|
+
});
|
|
8403
8453
|
var appStartActor = fromPromise(
|
|
8404
8454
|
async ({ input: input3 }) => {
|
|
8405
|
-
const { repoRoot, appDir, port, tmpDir, stream } = input3;
|
|
8455
|
+
const { repoRoot, appDir, port, tmpDir, stream, bundler } = input3;
|
|
8406
8456
|
const fullTmpDir = path11__default.join(repoRoot, tmpDir);
|
|
8407
8457
|
await mkdir(fullTmpDir, { recursive: true });
|
|
8408
|
-
|
|
8458
|
+
const nextDir = path11__default.join(appDir, ".next");
|
|
8459
|
+
if (existsSync(nextDir)) {
|
|
8460
|
+
rmSync(nextDir, { recursive: true, force: true });
|
|
8461
|
+
console.log("[runa dev] Cleaned .next cache for fresh start");
|
|
8462
|
+
}
|
|
8409
8463
|
const result = await startAppBackground({
|
|
8410
8464
|
repoRoot,
|
|
8411
8465
|
appDir,
|
|
@@ -8413,7 +8467,8 @@ var appStartActor = fromPromise(
|
|
|
8413
8467
|
env: process.env,
|
|
8414
8468
|
tmpDir: fullTmpDir,
|
|
8415
8469
|
mode: "dev",
|
|
8416
|
-
stream
|
|
8470
|
+
stream,
|
|
8471
|
+
bundler
|
|
8417
8472
|
});
|
|
8418
8473
|
await waitForAppReady({
|
|
8419
8474
|
port,
|
|
@@ -8435,9 +8490,18 @@ var shutdownActor = fromPromise(async ({ input: input3 }) => {
|
|
|
8435
8490
|
});
|
|
8436
8491
|
}
|
|
8437
8492
|
});
|
|
8493
|
+
function normalizeDevMachineInput(input3) {
|
|
8494
|
+
const normalizedInput = DevInputSchema.parse(input3?.input ?? {});
|
|
8495
|
+
const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
|
|
8496
|
+
return {
|
|
8497
|
+
input: normalizedInput,
|
|
8498
|
+
repoRoot
|
|
8499
|
+
};
|
|
8500
|
+
}
|
|
8438
8501
|
var devMachine = setup({
|
|
8439
8502
|
types: {},
|
|
8440
8503
|
actors: {
|
|
8504
|
+
processCheck: processCheckActor,
|
|
8441
8505
|
depsInstall: depsInstallActor,
|
|
8442
8506
|
envCheck: envCheckActor,
|
|
8443
8507
|
supabaseStart: supabaseStartActor,
|
|
@@ -8452,9 +8516,10 @@ var devMachine = setup({
|
|
|
8452
8516
|
id: "dev",
|
|
8453
8517
|
initial: "idle",
|
|
8454
8518
|
context: ({ input: input3 }) => {
|
|
8455
|
-
const
|
|
8519
|
+
const normalizedInput = normalizeDevMachineInput(input3);
|
|
8520
|
+
const repoRoot = normalizedInput.repoRoot;
|
|
8456
8521
|
return {
|
|
8457
|
-
input:
|
|
8522
|
+
input: normalizedInput.input,
|
|
8458
8523
|
repoRoot,
|
|
8459
8524
|
tmpDir: ".runa/tmp/dev",
|
|
8460
8525
|
hasDatabase: detectDatabase(repoRoot),
|
|
@@ -8471,7 +8536,30 @@ var devMachine = setup({
|
|
|
8471
8536
|
idle: {
|
|
8472
8537
|
meta: { e2e: e2eMeta2.idle },
|
|
8473
8538
|
on: {
|
|
8474
|
-
START: { target: "
|
|
8539
|
+
START: { target: "processCheck" }
|
|
8540
|
+
}
|
|
8541
|
+
},
|
|
8542
|
+
// ============================================================
|
|
8543
|
+
// Process Check Phase
|
|
8544
|
+
// ============================================================
|
|
8545
|
+
processCheck: {
|
|
8546
|
+
meta: { e2e: e2eMeta2.processCheck },
|
|
8547
|
+
invoke: {
|
|
8548
|
+
src: "processCheck",
|
|
8549
|
+
input: ({ context }) => ({
|
|
8550
|
+
repoRoot: context.repoRoot,
|
|
8551
|
+
tmpDir: context.tmpDir,
|
|
8552
|
+
port: context.input.port,
|
|
8553
|
+
replace: context.input.replace,
|
|
8554
|
+
skipApp: context.input.skipApp
|
|
8555
|
+
}),
|
|
8556
|
+
onDone: { target: "setup" },
|
|
8557
|
+
onError: {
|
|
8558
|
+
target: "failed",
|
|
8559
|
+
actions: assign({
|
|
8560
|
+
error: ({ event }) => event.error instanceof Error ? event.error.message : String(event.error)
|
|
8561
|
+
})
|
|
8562
|
+
}
|
|
8475
8563
|
}
|
|
8476
8564
|
},
|
|
8477
8565
|
// ============================================================
|
|
@@ -8547,7 +8635,8 @@ var devMachine = setup({
|
|
|
8547
8635
|
appDir: detected?.appDir ?? context.repoRoot,
|
|
8548
8636
|
port: context.input.port,
|
|
8549
8637
|
tmpDir: context.tmpDir,
|
|
8550
|
-
stream: context.input.stream
|
|
8638
|
+
stream: context.input.stream,
|
|
8639
|
+
bundler: context.input.bundler
|
|
8551
8640
|
};
|
|
8552
8641
|
},
|
|
8553
8642
|
onDone: {
|
|
@@ -8625,6 +8714,9 @@ var stateLogHandlers2 = {
|
|
|
8625
8714
|
logger16.section("Dev Server");
|
|
8626
8715
|
logger16.info("Starting development environment...");
|
|
8627
8716
|
},
|
|
8717
|
+
processCheck: (logger16) => {
|
|
8718
|
+
logger16.info("Checking for existing processes...");
|
|
8719
|
+
},
|
|
8628
8720
|
setup: (logger16) => {
|
|
8629
8721
|
logger16.section("Phase 0: Setup");
|
|
8630
8722
|
logger16.info("Checking environment prerequisites...");
|
|
@@ -8750,14 +8842,16 @@ async function runDevAction(options, cmd) {
|
|
|
8750
8842
|
);
|
|
8751
8843
|
}
|
|
8752
8844
|
}
|
|
8753
|
-
var devCommand = new Command("dev").description("Start development server (Supabase + Next.js)").option("--port <number>", "Port for Next.js dev server", (val) => Number.parseInt(val, 10), 3e3).option("--skip-db", "Skip Supabase start").option("--skip-app", "Start Supabase only (no app server)").option("--verbose", "Show detailed output").option("--no-stream", "Disable streaming app output to terminal (log to file only)").option("--target-dir <path>", "Target directory (defaults to repo root)").action(async (options, cmd) => {
|
|
8845
|
+
var devCommand = new Command("dev").description("Start development server (Supabase + Next.js)").option("--port <number>", "Port for Next.js dev server", (val) => Number.parseInt(val, 10), 3e3).option("--skip-db", "Skip Supabase start").option("--skip-app", "Start Supabase only (no app server)").option("--verbose", "Show detailed output").option("--no-stream", "Disable streaming app output to terminal (log to file only)").option("--target-dir <path>", "Target directory (defaults to repo root)").option("--replace", "Replace existing runa dev process if already running").option("--bundler <type>", "Bundler for Next.js dev server (turbopack or webpack)").action(async (options, cmd) => {
|
|
8754
8846
|
const input3 = {
|
|
8755
8847
|
port: options.port,
|
|
8756
8848
|
skipDb: options.skipDb ?? false,
|
|
8757
8849
|
skipApp: options.skipApp ?? false,
|
|
8758
8850
|
verbose: options.verbose ?? false,
|
|
8759
8851
|
stream: options.stream ?? true,
|
|
8760
|
-
targetDir: options.targetDir
|
|
8852
|
+
targetDir: options.targetDir,
|
|
8853
|
+
replace: options.replace ?? false,
|
|
8854
|
+
bundler: options.bundler
|
|
8761
8855
|
};
|
|
8762
8856
|
await runDevAction(input3, cmd);
|
|
8763
8857
|
});
|
|
@@ -8861,6 +8955,23 @@ function isPathContained(basePath, targetPath) {
|
|
|
8861
8955
|
const realTarget = safeRealpath(normalizedTarget);
|
|
8862
8956
|
return realTarget === realBase || realTarget.startsWith(realBase + sep);
|
|
8863
8957
|
}
|
|
8958
|
+
function validateUserFilePath(filePath, baseDir) {
|
|
8959
|
+
if (!filePath || filePath.trim() === "") {
|
|
8960
|
+
throw new Error("File path cannot be empty");
|
|
8961
|
+
}
|
|
8962
|
+
if (!hasNoDangerousChars(filePath)) {
|
|
8963
|
+
throw new Error(
|
|
8964
|
+
"File path contains dangerous characters. Shell metacharacters and control characters are not allowed."
|
|
8965
|
+
);
|
|
8966
|
+
}
|
|
8967
|
+
const resolvedPath = isAbsolute(filePath) ? resolve(filePath) : resolve(baseDir, filePath);
|
|
8968
|
+
if (!isPathContained(baseDir, resolvedPath)) {
|
|
8969
|
+
throw new Error(
|
|
8970
|
+
"File path resolves outside the allowed directory. Path must be within the project root."
|
|
8971
|
+
);
|
|
8972
|
+
}
|
|
8973
|
+
return resolvedPath;
|
|
8974
|
+
}
|
|
8864
8975
|
|
|
8865
8976
|
// src/config/env-files.ts
|
|
8866
8977
|
init_workspace_detector();
|
|
@@ -10080,8 +10191,8 @@ async function detectRisks(repoRoot, tmpDir) {
|
|
|
10080
10191
|
} catch (error) {
|
|
10081
10192
|
let logContent = "";
|
|
10082
10193
|
try {
|
|
10083
|
-
const { readFileSync:
|
|
10084
|
-
logContent =
|
|
10194
|
+
const { readFileSync: readFileSync30 } = await import('fs');
|
|
10195
|
+
logContent = readFileSync30(logFile, "utf-8");
|
|
10085
10196
|
} catch {
|
|
10086
10197
|
}
|
|
10087
10198
|
const isInitialDeployment = logContent.includes("No common ancestor") || logContent.includes("INITIAL DEPLOYMENT");
|
|
@@ -10209,8 +10320,8 @@ async function applyProductionSchema(repoRoot, tmpDir, productionDbUrlAdmin, pro
|
|
|
10209
10320
|
const totalMs = Date.now() - startTime;
|
|
10210
10321
|
let logContent = "";
|
|
10211
10322
|
try {
|
|
10212
|
-
const { readFileSync:
|
|
10213
|
-
logContent =
|
|
10323
|
+
const { readFileSync: readFileSync30 } = await import('fs');
|
|
10324
|
+
logContent = readFileSync30(logPath, "utf-8");
|
|
10214
10325
|
} catch {
|
|
10215
10326
|
}
|
|
10216
10327
|
const parsed = parseApplyLog(logContent);
|
|
@@ -12888,6 +12999,20 @@ function getSchemaGitDiff(repoRoot) {
|
|
|
12888
12999
|
return null;
|
|
12889
13000
|
}
|
|
12890
13001
|
}
|
|
13002
|
+
function extractRolesFromSql(content) {
|
|
13003
|
+
const roles = [];
|
|
13004
|
+
const roleMatches = content.matchAll(/CREATE\s+ROLE\s+(\w+)\s+WITH/gi);
|
|
13005
|
+
for (const match of roleMatches) {
|
|
13006
|
+
if (match[1]) roles.push(match[1].toLowerCase());
|
|
13007
|
+
}
|
|
13008
|
+
const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
|
|
13009
|
+
for (const match of existsMatches) {
|
|
13010
|
+
if (match[1] && !roles.includes(match[1].toLowerCase())) {
|
|
13011
|
+
roles.push(match[1].toLowerCase());
|
|
13012
|
+
}
|
|
13013
|
+
}
|
|
13014
|
+
return roles;
|
|
13015
|
+
}
|
|
12891
13016
|
function getIdempotentRoleNames2(repoRoot) {
|
|
12892
13017
|
const idempotentDir = path11__default.join(repoRoot, "supabase", "schemas", "idempotent");
|
|
12893
13018
|
const roles = [];
|
|
@@ -12896,17 +13021,12 @@ function getIdempotentRoleNames2(repoRoot) {
|
|
|
12896
13021
|
if (!fs14.existsSync(idempotentDir)) return [];
|
|
12897
13022
|
const files = fs14.readdirSync(idempotentDir).filter((f) => f.endsWith(".sql"));
|
|
12898
13023
|
for (const file of files) {
|
|
12899
|
-
const
|
|
12900
|
-
|
|
12901
|
-
|
|
12902
|
-
if (match[1]) roles.push(match[1].toLowerCase());
|
|
12903
|
-
}
|
|
12904
|
-
const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
|
|
12905
|
-
for (const match of existsMatches) {
|
|
12906
|
-
if (match[1] && !roles.includes(match[1].toLowerCase())) {
|
|
12907
|
-
roles.push(match[1].toLowerCase());
|
|
12908
|
-
}
|
|
13024
|
+
const filePath = path11__default.join(idempotentDir, file);
|
|
13025
|
+
if (!isPathContained(idempotentDir, filePath)) {
|
|
13026
|
+
continue;
|
|
12909
13027
|
}
|
|
13028
|
+
const content = fs14.readFileSync(filePath, "utf-8");
|
|
13029
|
+
roles.push(...extractRolesFromSql(content));
|
|
12910
13030
|
}
|
|
12911
13031
|
} catch {
|
|
12912
13032
|
}
|
|
@@ -17478,6 +17598,7 @@ function tryResolveDatabaseUrl(environment) {
|
|
|
17478
17598
|
|
|
17479
17599
|
// src/commands/db/utils/table-registry.ts
|
|
17480
17600
|
init_esm_shims();
|
|
17601
|
+
init_config_loader();
|
|
17481
17602
|
|
|
17482
17603
|
// src/commands/db/utils/semantic-mapper.ts
|
|
17483
17604
|
init_esm_shims();
|
|
@@ -17564,6 +17685,223 @@ function applyMappingToTables(tables, mapping) {
|
|
|
17564
17685
|
}));
|
|
17565
17686
|
}
|
|
17566
17687
|
|
|
17688
|
+
// src/commands/db/utils/schema-sync.ts
|
|
17689
|
+
init_esm_shims();
|
|
17690
|
+
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
17691
|
+
function validatePgIdentifier(name, context) {
|
|
17692
|
+
if (!name || typeof name !== "string") {
|
|
17693
|
+
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
17694
|
+
}
|
|
17695
|
+
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
17696
|
+
throw new Error(
|
|
17697
|
+
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
17698
|
+
);
|
|
17699
|
+
}
|
|
17700
|
+
}
|
|
17701
|
+
function escapePgStringLiteral(value) {
|
|
17702
|
+
if (typeof value !== "string") {
|
|
17703
|
+
throw new Error("Value must be a string");
|
|
17704
|
+
}
|
|
17705
|
+
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
17706
|
+
}
|
|
17707
|
+
function buildSafeSchemaInClause(schemas) {
|
|
17708
|
+
if (schemas.length === 0) {
|
|
17709
|
+
throw new Error("No schemas provided for IN clause");
|
|
17710
|
+
}
|
|
17711
|
+
const safeSchemas = [];
|
|
17712
|
+
for (const schema of schemas) {
|
|
17713
|
+
validatePgIdentifier(schema, "schema name");
|
|
17714
|
+
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
17715
|
+
}
|
|
17716
|
+
return safeSchemas.join(",");
|
|
17717
|
+
}
|
|
17718
|
+
var ERROR_MESSAGES2 = {
|
|
17719
|
+
PATH_TRAVERSAL: "Schema path validation failed",
|
|
17720
|
+
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
17721
|
+
};
|
|
17722
|
+
function containsPathTraversal2(inputPath) {
|
|
17723
|
+
const normalized = path11__default.normalize(inputPath);
|
|
17724
|
+
return normalized.includes("..") || inputPath.includes("\0");
|
|
17725
|
+
}
|
|
17726
|
+
function isPathWithinBase(filePath, baseDir) {
|
|
17727
|
+
try {
|
|
17728
|
+
const resolvedFile = path11__default.resolve(filePath);
|
|
17729
|
+
const resolvedBase = path11__default.resolve(baseDir);
|
|
17730
|
+
const normalizedFile = path11__default.normalize(resolvedFile);
|
|
17731
|
+
const normalizedBase = path11__default.normalize(resolvedBase);
|
|
17732
|
+
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
|
|
17733
|
+
} catch {
|
|
17734
|
+
return false;
|
|
17735
|
+
}
|
|
17736
|
+
}
|
|
17737
|
+
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
17738
|
+
if (containsPathTraversal2(dbPackagePath)) {
|
|
17739
|
+
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
17740
|
+
}
|
|
17741
|
+
const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
|
|
17742
|
+
const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
|
|
17743
|
+
let resolvedProjectRoot;
|
|
17744
|
+
try {
|
|
17745
|
+
resolvedProjectRoot = realpathSync(projectRoot);
|
|
17746
|
+
} catch {
|
|
17747
|
+
resolvedProjectRoot = path11__default.resolve(projectRoot);
|
|
17748
|
+
}
|
|
17749
|
+
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
17750
|
+
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
17751
|
+
}
|
|
17752
|
+
if (!existsSync(absoluteSchemaPath)) {
|
|
17753
|
+
throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
|
|
17754
|
+
}
|
|
17755
|
+
return absoluteSchemaPath;
|
|
17756
|
+
}
|
|
17757
|
+
function uniqueSorted(values) {
|
|
17758
|
+
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
17759
|
+
}
|
|
17760
|
+
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
17761
|
+
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
17762
|
+
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
17763
|
+
let schemaModule;
|
|
17764
|
+
try {
|
|
17765
|
+
schemaModule = await jiti.import(validatedSchemaPath);
|
|
17766
|
+
} catch (error) {
|
|
17767
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
17768
|
+
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
17769
|
+
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
17770
|
+
}
|
|
17771
|
+
const expectedTables = /* @__PURE__ */ new Set();
|
|
17772
|
+
const expectedEnums = /* @__PURE__ */ new Map();
|
|
17773
|
+
for (const value of Object.values(schemaModule)) {
|
|
17774
|
+
if (isTable(value)) {
|
|
17775
|
+
const unique = String(getTableUniqueName(value));
|
|
17776
|
+
if (unique.startsWith("undefined.")) {
|
|
17777
|
+
expectedTables.add(`public.${getTableName(value)}`);
|
|
17778
|
+
} else {
|
|
17779
|
+
expectedTables.add(unique);
|
|
17780
|
+
}
|
|
17781
|
+
continue;
|
|
17782
|
+
}
|
|
17783
|
+
if (isPgEnum(value)) {
|
|
17784
|
+
expectedEnums.set(value.enumName, {
|
|
17785
|
+
name: value.enumName,
|
|
17786
|
+
values: uniqueSorted(value.enumValues)
|
|
17787
|
+
});
|
|
17788
|
+
}
|
|
17789
|
+
}
|
|
17790
|
+
return { expectedTables, expectedEnums };
|
|
17791
|
+
}
|
|
17792
|
+
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
17793
|
+
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
17794
|
+
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
17795
|
+
const systemSchemas = /* @__PURE__ */ new Set([
|
|
17796
|
+
...SUPABASE_SYSTEM_SCHEMAS,
|
|
17797
|
+
...options?.additionalSystemSchemas ?? []
|
|
17798
|
+
]);
|
|
17799
|
+
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
17800
|
+
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
17801
|
+
const tablesSql = `
|
|
17802
|
+
SELECT schemaname || '.' || tablename
|
|
17803
|
+
FROM pg_tables
|
|
17804
|
+
WHERE schemaname IN (${schemaList})
|
|
17805
|
+
ORDER BY schemaname, tablename;`.trim();
|
|
17806
|
+
const enumsSql = `
|
|
17807
|
+
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
17808
|
+
FROM pg_type t
|
|
17809
|
+
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
17810
|
+
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
17811
|
+
WHERE n.nspname = 'public'
|
|
17812
|
+
GROUP BY t.typname
|
|
17813
|
+
ORDER BY t.typname;`.trim();
|
|
17814
|
+
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
17815
|
+
const dbTables = /* @__PURE__ */ new Set();
|
|
17816
|
+
for (const line of tablesOut.split("\n")) {
|
|
17817
|
+
const v = line.trim();
|
|
17818
|
+
if (v.length > 0) dbTables.add(v);
|
|
17819
|
+
}
|
|
17820
|
+
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
17821
|
+
const dbEnums = /* @__PURE__ */ new Map();
|
|
17822
|
+
for (const line of enumsOut.split("\n")) {
|
|
17823
|
+
const trimmed = line.trim();
|
|
17824
|
+
if (trimmed.length === 0) continue;
|
|
17825
|
+
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
17826
|
+
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
17827
|
+
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
17828
|
+
}
|
|
17829
|
+
return { dbTables, dbEnums };
|
|
17830
|
+
}
|
|
17831
|
+
function diffSchema(params) {
|
|
17832
|
+
const missingTables = uniqueSorted(
|
|
17833
|
+
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
17834
|
+
);
|
|
17835
|
+
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
17836
|
+
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
17837
|
+
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
17838
|
+
const isExcluded = (table) => {
|
|
17839
|
+
if (exactExclusions.includes(table)) return true;
|
|
17840
|
+
for (const pattern of exclusionPatterns) {
|
|
17841
|
+
const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
17842
|
+
if (regex.test(table)) return true;
|
|
17843
|
+
}
|
|
17844
|
+
return false;
|
|
17845
|
+
};
|
|
17846
|
+
const orphanTables = uniqueSorted(
|
|
17847
|
+
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
17848
|
+
);
|
|
17849
|
+
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
17850
|
+
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
17851
|
+
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
17852
|
+
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
17853
|
+
const enumValueMismatches = [];
|
|
17854
|
+
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
17855
|
+
const s = params.expectedEnums.get(name);
|
|
17856
|
+
const d = params.dbEnums.get(name);
|
|
17857
|
+
if (!s || !d) continue;
|
|
17858
|
+
const schemaValues = uniqueSorted(s.values);
|
|
17859
|
+
const dbValues = uniqueSorted(d.values);
|
|
17860
|
+
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
17861
|
+
if (same) continue;
|
|
17862
|
+
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
17863
|
+
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
17864
|
+
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
17865
|
+
}
|
|
17866
|
+
return {
|
|
17867
|
+
expectedTables: params.expectedTables,
|
|
17868
|
+
expectedEnums: params.expectedEnums,
|
|
17869
|
+
dbTables: params.dbTables,
|
|
17870
|
+
dbEnums: params.dbEnums,
|
|
17871
|
+
missingTables,
|
|
17872
|
+
orphanTables,
|
|
17873
|
+
missingEnums,
|
|
17874
|
+
extraEnums,
|
|
17875
|
+
enumValueMismatches
|
|
17876
|
+
};
|
|
17877
|
+
}
|
|
17878
|
+
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
17879
|
+
const fullPath = path11__default.resolve(projectRoot, idempotentDir);
|
|
17880
|
+
if (!existsSync(fullPath)) {
|
|
17881
|
+
return [];
|
|
17882
|
+
}
|
|
17883
|
+
const tables = [];
|
|
17884
|
+
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
17885
|
+
try {
|
|
17886
|
+
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
17887
|
+
for (const file of files) {
|
|
17888
|
+
const filePath = path11__default.join(fullPath, file);
|
|
17889
|
+
const content = readFileSync(filePath, "utf-8");
|
|
17890
|
+
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
17891
|
+
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
17892
|
+
const schema = match[1] || "public";
|
|
17893
|
+
const tableName = match[2];
|
|
17894
|
+
if (tableName) {
|
|
17895
|
+
tables.push(`${schema}.${tableName}`);
|
|
17896
|
+
}
|
|
17897
|
+
}
|
|
17898
|
+
}
|
|
17899
|
+
} catch {
|
|
17900
|
+
return [];
|
|
17901
|
+
}
|
|
17902
|
+
return [...new Set(tables)].sort();
|
|
17903
|
+
}
|
|
17904
|
+
|
|
17567
17905
|
// src/commands/db/utils/sql-table-extractor.ts
|
|
17568
17906
|
init_esm_shims();
|
|
17569
17907
|
var sqlParserUtils = null;
|
|
@@ -18225,9 +18563,235 @@ async function extractTablesFromSqlDir(sqlDir, options = {}) {
|
|
|
18225
18563
|
return tableEntries;
|
|
18226
18564
|
}
|
|
18227
18565
|
|
|
18566
|
+
// src/commands/db/utils/table-source-classifier.ts
|
|
18567
|
+
init_esm_shims();
|
|
18568
|
+
function splitQualifiedName(qualifiedName) {
|
|
18569
|
+
const [schema = "", table = ""] = qualifiedName.split(".", 2);
|
|
18570
|
+
return { schema, table };
|
|
18571
|
+
}
|
|
18572
|
+
function escapeRegexLiteral(value) {
|
|
18573
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
18574
|
+
}
|
|
18575
|
+
function buildTablePatternMatcher(patterns) {
|
|
18576
|
+
const compiled = patterns.map((p) => p.trim()).filter((p) => p.length > 0).map((pattern) => {
|
|
18577
|
+
const target = pattern.includes(".") ? "qualified" : "table";
|
|
18578
|
+
const regex = new RegExp(`^${escapeRegexLiteral(pattern).replace(/\\\*/g, ".*")}$`);
|
|
18579
|
+
return { target, regex };
|
|
18580
|
+
});
|
|
18581
|
+
return (qualifiedName) => {
|
|
18582
|
+
const { table } = splitQualifiedName(qualifiedName);
|
|
18583
|
+
for (const entry of compiled) {
|
|
18584
|
+
const candidate = entry.target === "qualified" ? qualifiedName : table;
|
|
18585
|
+
if (entry.regex.test(candidate)) {
|
|
18586
|
+
return true;
|
|
18587
|
+
}
|
|
18588
|
+
}
|
|
18589
|
+
return false;
|
|
18590
|
+
};
|
|
18591
|
+
}
|
|
18592
|
+
function findIdempotentAncestor(table, partitionParentMap, idempotentManagedTables) {
|
|
18593
|
+
if (idempotentManagedTables.has(table)) {
|
|
18594
|
+
return table;
|
|
18595
|
+
}
|
|
18596
|
+
let current = table;
|
|
18597
|
+
const visited = /* @__PURE__ */ new Set();
|
|
18598
|
+
while (!visited.has(current)) {
|
|
18599
|
+
visited.add(current);
|
|
18600
|
+
const parent = partitionParentMap.get(current);
|
|
18601
|
+
if (!parent) {
|
|
18602
|
+
return null;
|
|
18603
|
+
}
|
|
18604
|
+
if (idempotentManagedTables.has(parent)) {
|
|
18605
|
+
return parent;
|
|
18606
|
+
}
|
|
18607
|
+
current = parent;
|
|
18608
|
+
}
|
|
18609
|
+
return null;
|
|
18610
|
+
}
|
|
18611
|
+
function isSystemManagedTable(params) {
|
|
18612
|
+
const { schema } = splitQualifiedName(params.qualifiedName);
|
|
18613
|
+
return params.systemSchemas.has(schema) || params.knownSystemTables.has(params.qualifiedName);
|
|
18614
|
+
}
|
|
18615
|
+
function classifyMissingSourceTables(params) {
|
|
18616
|
+
const extensionManagedTables = params.extensionManagedTables ?? /* @__PURE__ */ new Map();
|
|
18617
|
+
const partitionParentMap = params.partitionParentMap ?? /* @__PURE__ */ new Map();
|
|
18618
|
+
const exclusionMatcher = buildTablePatternMatcher(params.excludeFromOrphanDetection ?? []);
|
|
18619
|
+
const systemSchemas = new Set(params.systemSchemas ?? []);
|
|
18620
|
+
const knownSystemTables = new Set(params.knownSystemTables ?? []);
|
|
18621
|
+
const classified = {
|
|
18622
|
+
definedInIdempotentDynamicDdl: [],
|
|
18623
|
+
extensionManagedOrSystemTable: [],
|
|
18624
|
+
trulyOrphaned: []
|
|
18625
|
+
};
|
|
18626
|
+
for (const qualifiedName of params.tablesWithoutSource) {
|
|
18627
|
+
const idempotentAncestor = findIdempotentAncestor(
|
|
18628
|
+
qualifiedName,
|
|
18629
|
+
partitionParentMap,
|
|
18630
|
+
params.idempotentManagedTables
|
|
18631
|
+
);
|
|
18632
|
+
if (idempotentAncestor) {
|
|
18633
|
+
classified.definedInIdempotentDynamicDdl.push({
|
|
18634
|
+
qualifiedName,
|
|
18635
|
+
detail: idempotentAncestor === qualifiedName ? "matched CREATE TABLE in idempotent SQL" : `partition child of ${idempotentAncestor}`
|
|
18636
|
+
});
|
|
18637
|
+
continue;
|
|
18638
|
+
}
|
|
18639
|
+
const extensionName = extensionManagedTables.get(qualifiedName);
|
|
18640
|
+
if (extensionName) {
|
|
18641
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18642
|
+
qualifiedName,
|
|
18643
|
+
detail: `managed by extension "${extensionName}"`
|
|
18644
|
+
});
|
|
18645
|
+
continue;
|
|
18646
|
+
}
|
|
18647
|
+
if (isSystemManagedTable({ qualifiedName, systemSchemas, knownSystemTables })) {
|
|
18648
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18649
|
+
qualifiedName,
|
|
18650
|
+
detail: "system-managed schema/table"
|
|
18651
|
+
});
|
|
18652
|
+
continue;
|
|
18653
|
+
}
|
|
18654
|
+
if (exclusionMatcher(qualifiedName)) {
|
|
18655
|
+
classified.extensionManagedOrSystemTable.push({
|
|
18656
|
+
qualifiedName,
|
|
18657
|
+
detail: "allowlisted by database.pgSchemaDiff.excludeFromOrphanDetection"
|
|
18658
|
+
});
|
|
18659
|
+
continue;
|
|
18660
|
+
}
|
|
18661
|
+
classified.trulyOrphaned.push(qualifiedName);
|
|
18662
|
+
}
|
|
18663
|
+
return classified;
|
|
18664
|
+
}
|
|
18665
|
+
|
|
18228
18666
|
// src/commands/db/utils/table-registry.ts
|
|
18229
18667
|
var MANIFEST_VERSION = 2;
|
|
18230
18668
|
var GENERATOR_VERSION = "1.0.0";
|
|
18669
|
+
var DEFAULT_IDEMPOTENT_SQL_DIR = "supabase/schemas/idempotent";
|
|
18670
|
+
var KNOWN_EXTENSION_SYSTEM_TABLES = /* @__PURE__ */ new Set([
|
|
18671
|
+
"public.spatial_ref_sys",
|
|
18672
|
+
"public.geometry_columns",
|
|
18673
|
+
"public.geography_columns"
|
|
18674
|
+
]);
|
|
18675
|
+
var SUPABASE_SYSTEM_SCHEMA_SET = new Set(SUPABASE_SYSTEM_SCHEMAS);
|
|
18676
|
+
var VALID_PG_IDENTIFIER2 = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
18677
|
+
function validatePgIdentifier2(name, context) {
|
|
18678
|
+
if (!name || typeof name !== "string") {
|
|
18679
|
+
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
18680
|
+
}
|
|
18681
|
+
if (!VALID_PG_IDENTIFIER2.test(name)) {
|
|
18682
|
+
throw new Error(
|
|
18683
|
+
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
18684
|
+
);
|
|
18685
|
+
}
|
|
18686
|
+
}
|
|
18687
|
+
function buildSafeSchemaInClause2(schemas) {
|
|
18688
|
+
if (schemas.length === 0) {
|
|
18689
|
+
throw new Error("No schemas provided for IN clause");
|
|
18690
|
+
}
|
|
18691
|
+
const safeSchemas = [];
|
|
18692
|
+
for (const schema of schemas) {
|
|
18693
|
+
validatePgIdentifier2(schema, "schema name");
|
|
18694
|
+
safeSchemas.push(`'${schema.replace(/'/g, "''")}'`);
|
|
18695
|
+
}
|
|
18696
|
+
return safeSchemas.join(",");
|
|
18697
|
+
}
|
|
18698
|
+
function toRelativeSourcePath(projectRoot, sourceFile) {
|
|
18699
|
+
let relativeSource = relative(projectRoot, sourceFile);
|
|
18700
|
+
if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
|
|
18701
|
+
const schemaMatch = sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
|
|
18702
|
+
relativeSource = schemaMatch ? schemaMatch[0] : sourceFile;
|
|
18703
|
+
}
|
|
18704
|
+
return relativeSource;
|
|
18705
|
+
}
|
|
18706
|
+
function resolveSourceConfig(projectRoot, options) {
|
|
18707
|
+
let idempotentSqlDir = options.idempotentSqlDir ?? DEFAULT_IDEMPOTENT_SQL_DIR;
|
|
18708
|
+
const exclusions = new Set(options.excludeFromOrphanDetection ?? []);
|
|
18709
|
+
try {
|
|
18710
|
+
const config = loadRunaConfig2(projectRoot);
|
|
18711
|
+
const pgSchemaDiff = config.database?.pgSchemaDiff;
|
|
18712
|
+
if (!options.idempotentSqlDir && pgSchemaDiff?.idempotentSqlDir) {
|
|
18713
|
+
idempotentSqlDir = pgSchemaDiff.idempotentSqlDir;
|
|
18714
|
+
}
|
|
18715
|
+
if (pgSchemaDiff?.excludeFromOrphanDetection) {
|
|
18716
|
+
for (const pattern of pgSchemaDiff.excludeFromOrphanDetection) {
|
|
18717
|
+
exclusions.add(pattern);
|
|
18718
|
+
}
|
|
18719
|
+
}
|
|
18720
|
+
} catch {
|
|
18721
|
+
}
|
|
18722
|
+
return {
|
|
18723
|
+
idempotentSqlDir: isAbsolute(idempotentSqlDir) ? idempotentSqlDir : join(projectRoot, idempotentSqlDir),
|
|
18724
|
+
excludeFromOrphanDetection: [...exclusions].sort((a, b) => a.localeCompare(b))
|
|
18725
|
+
};
|
|
18726
|
+
}
|
|
18727
|
+
async function fetchMissingSourceMetadata(params) {
|
|
18728
|
+
const { databaseUrl, schemas } = params;
|
|
18729
|
+
if (schemas.length === 0) {
|
|
18730
|
+
return {
|
|
18731
|
+
extensionManagedTables: /* @__PURE__ */ new Map(),
|
|
18732
|
+
partitionParentMap: /* @__PURE__ */ new Map()
|
|
18733
|
+
};
|
|
18734
|
+
}
|
|
18735
|
+
const isRemoteSupabase = databaseUrl.includes(".supabase.co");
|
|
18736
|
+
const sql = postgres(databaseUrl, {
|
|
18737
|
+
...isRemoteSupabase && { ssl: "require" }
|
|
18738
|
+
});
|
|
18739
|
+
try {
|
|
18740
|
+
const schemaList = buildSafeSchemaInClause2(schemas);
|
|
18741
|
+
const [extensionRows, partitionRows] = await Promise.all([
|
|
18742
|
+
sql`
|
|
18743
|
+
SELECT
|
|
18744
|
+
n.nspname AS schema_name,
|
|
18745
|
+
c.relname AS table_name,
|
|
18746
|
+
ext.extname AS extension_name
|
|
18747
|
+
FROM pg_class c
|
|
18748
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
18749
|
+
JOIN pg_depend d
|
|
18750
|
+
ON d.classid = 'pg_class'::regclass
|
|
18751
|
+
AND d.objid = c.oid
|
|
18752
|
+
AND d.refclassid = 'pg_extension'::regclass
|
|
18753
|
+
AND d.deptype = 'e'
|
|
18754
|
+
JOIN pg_extension ext ON ext.oid = d.refobjid
|
|
18755
|
+
WHERE c.relkind IN ('r', 'p')
|
|
18756
|
+
AND n.nspname IN (${sql.unsafe(schemaList)})
|
|
18757
|
+
`,
|
|
18758
|
+
sql`
|
|
18759
|
+
SELECT
|
|
18760
|
+
child_ns.nspname AS child_schema,
|
|
18761
|
+
child.relname AS child_table,
|
|
18762
|
+
parent_ns.nspname AS parent_schema,
|
|
18763
|
+
parent.relname AS parent_table
|
|
18764
|
+
FROM pg_inherits i
|
|
18765
|
+
JOIN pg_class child ON child.oid = i.inhrelid
|
|
18766
|
+
JOIN pg_namespace child_ns ON child_ns.oid = child.relnamespace
|
|
18767
|
+
JOIN pg_class parent ON parent.oid = i.inhparent
|
|
18768
|
+
JOIN pg_namespace parent_ns ON parent_ns.oid = parent.relnamespace
|
|
18769
|
+
WHERE child.relkind IN ('r', 'p')
|
|
18770
|
+
AND child_ns.nspname IN (${sql.unsafe(schemaList)})
|
|
18771
|
+
`
|
|
18772
|
+
]);
|
|
18773
|
+
const extensionManagedTables = /* @__PURE__ */ new Map();
|
|
18774
|
+
for (const row of extensionRows) {
|
|
18775
|
+
extensionManagedTables.set(
|
|
18776
|
+
`${String(row.schema_name)}.${String(row.table_name)}`,
|
|
18777
|
+
String(row.extension_name)
|
|
18778
|
+
);
|
|
18779
|
+
}
|
|
18780
|
+
const partitionParentMap = /* @__PURE__ */ new Map();
|
|
18781
|
+
for (const row of partitionRows) {
|
|
18782
|
+
partitionParentMap.set(
|
|
18783
|
+
`${String(row.child_schema)}.${String(row.child_table)}`,
|
|
18784
|
+
`${String(row.parent_schema)}.${String(row.parent_table)}`
|
|
18785
|
+
);
|
|
18786
|
+
}
|
|
18787
|
+
return { extensionManagedTables, partitionParentMap };
|
|
18788
|
+
} finally {
|
|
18789
|
+
await sql.end();
|
|
18790
|
+
}
|
|
18791
|
+
}
|
|
18792
|
+
function formatMissingSourceItems(items) {
|
|
18793
|
+
return items.map((item) => item.detail ? `${item.qualifiedName} (${item.detail})` : item.qualifiedName).join(", ");
|
|
18794
|
+
}
|
|
18231
18795
|
async function introspectTablesFromDb(databaseUrl, schemas) {
|
|
18232
18796
|
try {
|
|
18233
18797
|
const result = await introspectDatabase(databaseUrl, { schemas });
|
|
@@ -18473,14 +19037,31 @@ async function crossCheckWithDrizzle(sqlTables, drizzleSchemaPath) {
|
|
|
18473
19037
|
return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
|
|
18474
19038
|
}
|
|
18475
19039
|
}
|
|
18476
|
-
function
|
|
18477
|
-
const
|
|
18478
|
-
if (
|
|
18479
|
-
console.warn(
|
|
18480
|
-
|
|
18481
|
-
|
|
18482
|
-
|
|
18483
|
-
|
|
19040
|
+
function logMissingSourceClassification(classification) {
|
|
19041
|
+
const total = classification.definedInIdempotentDynamicDdl.length + classification.extensionManagedOrSystemTable.length + classification.trulyOrphaned.length;
|
|
19042
|
+
if (total === 0) return;
|
|
19043
|
+
console.warn(`[tables-manifest] \u26A0 ${total} table(s) exist in DB but not in SQL files.`);
|
|
19044
|
+
if (classification.definedInIdempotentDynamicDdl.length > 0) {
|
|
19045
|
+
console.log(
|
|
19046
|
+
`[tables-manifest] info: defined_in_idempotent_dynamic_ddl (${classification.definedInIdempotentDynamicDdl.length})`
|
|
19047
|
+
);
|
|
19048
|
+
console.log(` ${formatMissingSourceItems(classification.definedInIdempotentDynamicDdl)}`);
|
|
19049
|
+
}
|
|
19050
|
+
if (classification.extensionManagedOrSystemTable.length > 0) {
|
|
19051
|
+
console.log(
|
|
19052
|
+
`[tables-manifest] info: extension_managed/system_table (${classification.extensionManagedOrSystemTable.length})`
|
|
19053
|
+
);
|
|
19054
|
+
console.log(` ${formatMissingSourceItems(classification.extensionManagedOrSystemTable)}`);
|
|
19055
|
+
}
|
|
19056
|
+
if (classification.trulyOrphaned.length > 0) {
|
|
19057
|
+
console.warn(`[tables-manifest] warn: truly_orphaned (${classification.trulyOrphaned.length})`);
|
|
19058
|
+
console.warn(` ${classification.trulyOrphaned.join(", ")}`);
|
|
19059
|
+
console.warn(
|
|
19060
|
+
" \u2192 Add declarative/idempotent SQL definitions or allowlist via database.pgSchemaDiff.excludeFromOrphanDetection."
|
|
19061
|
+
);
|
|
19062
|
+
} else {
|
|
19063
|
+
console.log("[tables-manifest] info: no truly_orphaned tables detected.");
|
|
19064
|
+
}
|
|
18484
19065
|
}
|
|
18485
19066
|
async function logDrizzleCrossCheck(tables, drizzleSchemaPath) {
|
|
18486
19067
|
const result = await crossCheckWithDrizzle(tables, drizzleSchemaPath);
|
|
@@ -18512,24 +19093,38 @@ async function generateTablesManifest(projectRoot, options = {}) {
|
|
|
18512
19093
|
// Reserved for future metadata filtering feature
|
|
18513
19094
|
includeMetadata: _includeMetadata = true
|
|
18514
19095
|
} = options;
|
|
19096
|
+
const sourceConfig = resolveSourceConfig(projectRoot, options);
|
|
18515
19097
|
let tables = [];
|
|
18516
19098
|
const source = "introspection";
|
|
18517
|
-
const
|
|
19099
|
+
const declarativeTables = await extractTablesFromSqlDir(sqlDir, {
|
|
18518
19100
|
includeColumns: false,
|
|
18519
19101
|
// Don't need columns from SQL (DB introspection is more accurate)
|
|
18520
19102
|
includeForeignKeys: false,
|
|
18521
19103
|
includeIndexes: false,
|
|
18522
19104
|
includeRlsPolicies: false
|
|
18523
19105
|
});
|
|
19106
|
+
const idempotentTablesForSource = await extractTablesFromSqlDir(sourceConfig.idempotentSqlDir, {
|
|
19107
|
+
includeColumns: false,
|
|
19108
|
+
includeForeignKeys: false,
|
|
19109
|
+
includeIndexes: false,
|
|
19110
|
+
includeRlsPolicies: false
|
|
19111
|
+
});
|
|
19112
|
+
const idempotentTablesFromRegex = extractTablesFromIdempotentSql(
|
|
19113
|
+
sourceConfig.idempotentSqlDir,
|
|
19114
|
+
projectRoot
|
|
19115
|
+
);
|
|
19116
|
+
const idempotentManagedTables = /* @__PURE__ */ new Set([
|
|
19117
|
+
...idempotentTablesFromRegex,
|
|
19118
|
+
...idempotentTablesForSource.map((t) => t.qualifiedName)
|
|
19119
|
+
]);
|
|
18524
19120
|
const sourceFileMap = /* @__PURE__ */ new Map();
|
|
18525
|
-
|
|
18526
|
-
|
|
18527
|
-
if (
|
|
18528
|
-
|
|
18529
|
-
relativeSource = schemaMatch ? schemaMatch[0] : t.sourceFile;
|
|
19121
|
+
const sourceTables = [...declarativeTables, ...idempotentTablesForSource];
|
|
19122
|
+
for (const t of sourceTables) {
|
|
19123
|
+
if (sourceFileMap.has(t.qualifiedName)) {
|
|
19124
|
+
continue;
|
|
18530
19125
|
}
|
|
18531
19126
|
sourceFileMap.set(t.qualifiedName, {
|
|
18532
|
-
sourceFile:
|
|
19127
|
+
sourceFile: toRelativeSourcePath(projectRoot, t.sourceFile),
|
|
18533
19128
|
lineNumber: t.lineNumber
|
|
18534
19129
|
});
|
|
18535
19130
|
}
|
|
@@ -18549,7 +19144,34 @@ async function generateTablesManifest(projectRoot, options = {}) {
|
|
|
18549
19144
|
};
|
|
18550
19145
|
});
|
|
18551
19146
|
console.log(`[tables-manifest] \u2713 Introspected ${tables.length} tables from database`);
|
|
18552
|
-
|
|
19147
|
+
const tablesWithoutSource = tables.filter((t) => !t.sourceFile);
|
|
19148
|
+
if (tablesWithoutSource.length > 0) {
|
|
19149
|
+
const missingSourceQualifiedNames = tablesWithoutSource.map((t) => t.qualifiedName);
|
|
19150
|
+
const missingSchemas = [...new Set(tablesWithoutSource.map((t) => t.schema))];
|
|
19151
|
+
let extensionManagedTables = /* @__PURE__ */ new Map();
|
|
19152
|
+
let partitionParentMap = /* @__PURE__ */ new Map();
|
|
19153
|
+
try {
|
|
19154
|
+
const metadata = await fetchMissingSourceMetadata({
|
|
19155
|
+
databaseUrl,
|
|
19156
|
+
schemas: missingSchemas
|
|
19157
|
+
});
|
|
19158
|
+
extensionManagedTables = metadata.extensionManagedTables;
|
|
19159
|
+
partitionParentMap = metadata.partitionParentMap;
|
|
19160
|
+
} catch (error) {
|
|
19161
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
19162
|
+
console.warn(`[tables-manifest] Failed to classify extension/partition metadata: ${message}`);
|
|
19163
|
+
}
|
|
19164
|
+
const classification = classifyMissingSourceTables({
|
|
19165
|
+
tablesWithoutSource: missingSourceQualifiedNames,
|
|
19166
|
+
idempotentManagedTables,
|
|
19167
|
+
extensionManagedTables,
|
|
19168
|
+
partitionParentMap,
|
|
19169
|
+
excludeFromOrphanDetection: sourceConfig.excludeFromOrphanDetection,
|
|
19170
|
+
systemSchemas: SUPABASE_SYSTEM_SCHEMA_SET,
|
|
19171
|
+
knownSystemTables: KNOWN_EXTENSION_SYSTEM_TABLES
|
|
19172
|
+
});
|
|
19173
|
+
logMissingSourceClassification(classification);
|
|
19174
|
+
}
|
|
18553
19175
|
if (crossCheck && existsSync(drizzleSchemaPath)) {
|
|
18554
19176
|
await logDrizzleCrossCheck(tables, drizzleSchemaPath);
|
|
18555
19177
|
}
|
|
@@ -20594,225 +21216,6 @@ var backupCommand = new Command("backup").description("Manage database backups (
|
|
|
20594
21216
|
// src/commands/db/commands/db-cleanup.ts
|
|
20595
21217
|
init_esm_shims();
|
|
20596
21218
|
init_config_loader();
|
|
20597
|
-
|
|
20598
|
-
// src/commands/db/utils/schema-sync.ts
|
|
20599
|
-
init_esm_shims();
|
|
20600
|
-
var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
|
|
20601
|
-
function validatePgIdentifier(name, context) {
|
|
20602
|
-
if (!name || typeof name !== "string") {
|
|
20603
|
-
throw new Error(`Invalid ${context}: empty or not a string`);
|
|
20604
|
-
}
|
|
20605
|
-
if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
|
|
20606
|
-
throw new Error(
|
|
20607
|
-
`Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
|
|
20608
|
-
);
|
|
20609
|
-
}
|
|
20610
|
-
}
|
|
20611
|
-
function escapePgStringLiteral(value) {
|
|
20612
|
-
if (typeof value !== "string") {
|
|
20613
|
-
throw new Error("Value must be a string");
|
|
20614
|
-
}
|
|
20615
|
-
return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
|
|
20616
|
-
}
|
|
20617
|
-
function buildSafeSchemaInClause(schemas) {
|
|
20618
|
-
if (schemas.length === 0) {
|
|
20619
|
-
throw new Error("No schemas provided for IN clause");
|
|
20620
|
-
}
|
|
20621
|
-
const safeSchemas = [];
|
|
20622
|
-
for (const schema of schemas) {
|
|
20623
|
-
validatePgIdentifier(schema, "schema name");
|
|
20624
|
-
safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
|
|
20625
|
-
}
|
|
20626
|
-
return safeSchemas.join(",");
|
|
20627
|
-
}
|
|
20628
|
-
var ERROR_MESSAGES2 = {
|
|
20629
|
-
PATH_TRAVERSAL: "Schema path validation failed",
|
|
20630
|
-
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
20631
|
-
};
|
|
20632
|
-
function containsPathTraversal2(inputPath) {
|
|
20633
|
-
const normalized = path11__default.normalize(inputPath);
|
|
20634
|
-
return normalized.includes("..") || inputPath.includes("\0");
|
|
20635
|
-
}
|
|
20636
|
-
function isPathWithinBase(filePath, baseDir) {
|
|
20637
|
-
try {
|
|
20638
|
-
const resolvedFile = path11__default.resolve(filePath);
|
|
20639
|
-
const resolvedBase = path11__default.resolve(baseDir);
|
|
20640
|
-
const normalizedFile = path11__default.normalize(resolvedFile);
|
|
20641
|
-
const normalizedBase = path11__default.normalize(resolvedBase);
|
|
20642
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
|
|
20643
|
-
} catch {
|
|
20644
|
-
return false;
|
|
20645
|
-
}
|
|
20646
|
-
}
|
|
20647
|
-
function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
20648
|
-
if (containsPathTraversal2(dbPackagePath)) {
|
|
20649
|
-
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
20650
|
-
}
|
|
20651
|
-
const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
|
|
20652
|
-
const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
|
|
20653
|
-
let resolvedProjectRoot;
|
|
20654
|
-
try {
|
|
20655
|
-
resolvedProjectRoot = realpathSync(projectRoot);
|
|
20656
|
-
} catch {
|
|
20657
|
-
resolvedProjectRoot = path11__default.resolve(projectRoot);
|
|
20658
|
-
}
|
|
20659
|
-
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
20660
|
-
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
20661
|
-
}
|
|
20662
|
-
if (!existsSync(absoluteSchemaPath)) {
|
|
20663
|
-
throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
|
|
20664
|
-
}
|
|
20665
|
-
return absoluteSchemaPath;
|
|
20666
|
-
}
|
|
20667
|
-
function uniqueSorted(values) {
|
|
20668
|
-
return [...new Set(values)].sort((a, b) => a.localeCompare(b));
|
|
20669
|
-
}
|
|
20670
|
-
async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
|
|
20671
|
-
const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
|
|
20672
|
-
const jiti = createJiti(projectRoot, { interopDefault: true });
|
|
20673
|
-
let schemaModule;
|
|
20674
|
-
try {
|
|
20675
|
-
schemaModule = await jiti.import(validatedSchemaPath);
|
|
20676
|
-
} catch (error) {
|
|
20677
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
20678
|
-
const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
|
|
20679
|
-
throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
|
|
20680
|
-
}
|
|
20681
|
-
const expectedTables = /* @__PURE__ */ new Set();
|
|
20682
|
-
const expectedEnums = /* @__PURE__ */ new Map();
|
|
20683
|
-
for (const value of Object.values(schemaModule)) {
|
|
20684
|
-
if (isTable(value)) {
|
|
20685
|
-
const unique = String(getTableUniqueName(value));
|
|
20686
|
-
if (unique.startsWith("undefined.")) {
|
|
20687
|
-
expectedTables.add(`public.${getTableName(value)}`);
|
|
20688
|
-
} else {
|
|
20689
|
-
expectedTables.add(unique);
|
|
20690
|
-
}
|
|
20691
|
-
continue;
|
|
20692
|
-
}
|
|
20693
|
-
if (isPgEnum(value)) {
|
|
20694
|
-
expectedEnums.set(value.enumName, {
|
|
20695
|
-
name: value.enumName,
|
|
20696
|
-
values: uniqueSorted(value.enumValues)
|
|
20697
|
-
});
|
|
20698
|
-
}
|
|
20699
|
-
}
|
|
20700
|
-
return { expectedTables, expectedEnums };
|
|
20701
|
-
}
|
|
20702
|
-
async function fetchDbTablesAndEnums(databaseUrl, options) {
|
|
20703
|
-
const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
|
|
20704
|
-
const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
|
|
20705
|
-
const systemSchemas = /* @__PURE__ */ new Set([
|
|
20706
|
-
...SUPABASE_SYSTEM_SCHEMAS,
|
|
20707
|
-
...options?.additionalSystemSchemas ?? []
|
|
20708
|
-
]);
|
|
20709
|
-
const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
|
|
20710
|
-
const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
|
|
20711
|
-
const tablesSql = `
|
|
20712
|
-
SELECT schemaname || '.' || tablename
|
|
20713
|
-
FROM pg_tables
|
|
20714
|
-
WHERE schemaname IN (${schemaList})
|
|
20715
|
-
ORDER BY schemaname, tablename;`.trim();
|
|
20716
|
-
const enumsSql = `
|
|
20717
|
-
SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
|
|
20718
|
-
FROM pg_type t
|
|
20719
|
-
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
20720
|
-
JOIN pg_namespace n ON n.oid = t.typnamespace
|
|
20721
|
-
WHERE n.nspname = 'public'
|
|
20722
|
-
GROUP BY t.typname
|
|
20723
|
-
ORDER BY t.typname;`.trim();
|
|
20724
|
-
const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
|
|
20725
|
-
const dbTables = /* @__PURE__ */ new Set();
|
|
20726
|
-
for (const line of tablesOut.split("\n")) {
|
|
20727
|
-
const v = line.trim();
|
|
20728
|
-
if (v.length > 0) dbTables.add(v);
|
|
20729
|
-
}
|
|
20730
|
-
const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
|
|
20731
|
-
const dbEnums = /* @__PURE__ */ new Map();
|
|
20732
|
-
for (const line of enumsOut.split("\n")) {
|
|
20733
|
-
const trimmed = line.trim();
|
|
20734
|
-
if (trimmed.length === 0) continue;
|
|
20735
|
-
const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
|
|
20736
|
-
const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
|
|
20737
|
-
dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
|
|
20738
|
-
}
|
|
20739
|
-
return { dbTables, dbEnums };
|
|
20740
|
-
}
|
|
20741
|
-
function diffSchema(params) {
|
|
20742
|
-
const missingTables = uniqueSorted(
|
|
20743
|
-
[...params.expectedTables].filter((t) => !params.dbTables.has(t))
|
|
20744
|
-
);
|
|
20745
|
-
const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
|
|
20746
|
-
const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
|
|
20747
|
-
const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
|
|
20748
|
-
const isExcluded = (table) => {
|
|
20749
|
-
if (exactExclusions.includes(table)) return true;
|
|
20750
|
-
for (const pattern of exclusionPatterns) {
|
|
20751
|
-
const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
20752
|
-
if (regex.test(table)) return true;
|
|
20753
|
-
}
|
|
20754
|
-
return false;
|
|
20755
|
-
};
|
|
20756
|
-
const orphanTables = uniqueSorted(
|
|
20757
|
-
[...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
|
|
20758
|
-
);
|
|
20759
|
-
const expectedEnumNames = new Set(params.expectedEnums.keys());
|
|
20760
|
-
const dbEnumNames = new Set(params.dbEnums.keys());
|
|
20761
|
-
const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
|
|
20762
|
-
const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
|
|
20763
|
-
const enumValueMismatches = [];
|
|
20764
|
-
for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
|
|
20765
|
-
const s = params.expectedEnums.get(name);
|
|
20766
|
-
const d = params.dbEnums.get(name);
|
|
20767
|
-
if (!s || !d) continue;
|
|
20768
|
-
const schemaValues = uniqueSorted(s.values);
|
|
20769
|
-
const dbValues = uniqueSorted(d.values);
|
|
20770
|
-
const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
|
|
20771
|
-
if (same) continue;
|
|
20772
|
-
const added = schemaValues.filter((v) => !dbValues.includes(v));
|
|
20773
|
-
const removed = dbValues.filter((v) => !schemaValues.includes(v));
|
|
20774
|
-
enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
|
|
20775
|
-
}
|
|
20776
|
-
return {
|
|
20777
|
-
expectedTables: params.expectedTables,
|
|
20778
|
-
expectedEnums: params.expectedEnums,
|
|
20779
|
-
dbTables: params.dbTables,
|
|
20780
|
-
dbEnums: params.dbEnums,
|
|
20781
|
-
missingTables,
|
|
20782
|
-
orphanTables,
|
|
20783
|
-
missingEnums,
|
|
20784
|
-
extraEnums,
|
|
20785
|
-
enumValueMismatches
|
|
20786
|
-
};
|
|
20787
|
-
}
|
|
20788
|
-
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
20789
|
-
const fullPath = path11__default.resolve(projectRoot, idempotentDir);
|
|
20790
|
-
if (!existsSync(fullPath)) {
|
|
20791
|
-
return [];
|
|
20792
|
-
}
|
|
20793
|
-
const tables = [];
|
|
20794
|
-
const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
|
|
20795
|
-
try {
|
|
20796
|
-
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
20797
|
-
for (const file of files) {
|
|
20798
|
-
const filePath = path11__default.join(fullPath, file);
|
|
20799
|
-
const content = readFileSync(filePath, "utf-8");
|
|
20800
|
-
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
20801
|
-
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
20802
|
-
const schema = match[1] || "public";
|
|
20803
|
-
const tableName = match[2];
|
|
20804
|
-
if (tableName) {
|
|
20805
|
-
tables.push(`${schema}.${tableName}`);
|
|
20806
|
-
}
|
|
20807
|
-
}
|
|
20808
|
-
}
|
|
20809
|
-
} catch {
|
|
20810
|
-
return [];
|
|
20811
|
-
}
|
|
20812
|
-
return [...new Set(tables)].sort();
|
|
20813
|
-
}
|
|
20814
|
-
|
|
20815
|
-
// src/commands/db/commands/db-cleanup.ts
|
|
20816
21219
|
function quoteIdentifier(identifier) {
|
|
20817
21220
|
return `"${identifier.replaceAll('"', '""')}"`;
|
|
20818
21221
|
}
|
|
@@ -22223,11 +22626,18 @@ var setupContext = fromPromise(
|
|
|
22223
22626
|
const dbPackagePath = await getDatabasePackagePath();
|
|
22224
22627
|
const databaseUrl = resolveDatabaseUrl(env2);
|
|
22225
22628
|
const tmpDir = `${repoRoot}/.runa-tmp`;
|
|
22629
|
+
let configTimeoutMs;
|
|
22630
|
+
try {
|
|
22631
|
+
const config = loadRunaConfig2();
|
|
22632
|
+
configTimeoutMs = config.database?.sync?.timeoutMs;
|
|
22633
|
+
} catch {
|
|
22634
|
+
}
|
|
22226
22635
|
return {
|
|
22227
22636
|
repoRoot,
|
|
22228
22637
|
tmpDir,
|
|
22229
22638
|
databaseUrl,
|
|
22230
|
-
dbPackagePath
|
|
22639
|
+
dbPackagePath,
|
|
22640
|
+
configTimeoutMs
|
|
22231
22641
|
};
|
|
22232
22642
|
}
|
|
22233
22643
|
);
|
|
@@ -22372,7 +22782,9 @@ var syncSchema = fromPromise(
|
|
|
22372
22782
|
skipCodegen: ctx.skipCodegen,
|
|
22373
22783
|
reportJson: ctx.reportJson,
|
|
22374
22784
|
invokedAs: "runa db sync",
|
|
22375
|
-
fromProduction: ctx.fromProduction
|
|
22785
|
+
fromProduction: ctx.fromProduction,
|
|
22786
|
+
timeoutMs: ctx.timeoutMs,
|
|
22787
|
+
configTimeoutMs: ctx.configTimeoutMs
|
|
22376
22788
|
});
|
|
22377
22789
|
return {
|
|
22378
22790
|
applied: result.applied,
|
|
@@ -22522,7 +22934,11 @@ z.object({
|
|
|
22522
22934
|
skipCodegen: z.boolean(),
|
|
22523
22935
|
fromProduction: z.boolean(),
|
|
22524
22936
|
autoSnapshot: z.boolean(),
|
|
22525
|
-
reportJson: z.string().optional()
|
|
22937
|
+
reportJson: z.string().optional(),
|
|
22938
|
+
/** Subprocess timeout in milliseconds (from CLI flag) */
|
|
22939
|
+
timeoutMs: z.number().int().positive().optional(),
|
|
22940
|
+
/** Config-level timeout from runa.config.ts (lower priority than timeoutMs and env var) */
|
|
22941
|
+
configTimeoutMs: z.number().int().positive().optional()
|
|
22526
22942
|
});
|
|
22527
22943
|
z.object({
|
|
22528
22944
|
/** Target environment */
|
|
@@ -22558,8 +22974,10 @@ z.object({
|
|
|
22558
22974
|
autoSnapshot: z.boolean().optional(),
|
|
22559
22975
|
reportJson: z.string().optional(),
|
|
22560
22976
|
/** Pre-clean orphan empty tables + unused enums before sync */
|
|
22561
|
-
reconcile: z.boolean().optional()
|
|
22562
|
-
|
|
22977
|
+
reconcile: z.boolean().optional(),
|
|
22978
|
+
/** Subprocess timeout in milliseconds */
|
|
22979
|
+
timeoutMs: z.number().int().positive().optional()
|
|
22980
|
+
}).strict();
|
|
22563
22981
|
|
|
22564
22982
|
// src/commands/db/sync/machine.ts
|
|
22565
22983
|
init_esm_shims();
|
|
@@ -22731,7 +23149,9 @@ var dbSyncMachine = setup({
|
|
|
22731
23149
|
skipCodegen: context.input.skipCodegen ?? false,
|
|
22732
23150
|
fromProduction: context.input.fromProduction ?? false,
|
|
22733
23151
|
autoSnapshot: context.input.autoSnapshot ?? false,
|
|
22734
|
-
reportJson: context.input.reportJson
|
|
23152
|
+
reportJson: context.input.reportJson,
|
|
23153
|
+
timeoutMs: context.input.timeoutMs,
|
|
23154
|
+
configTimeoutMs: event.output.configTimeoutMs
|
|
22735
23155
|
})
|
|
22736
23156
|
})
|
|
22737
23157
|
},
|
|
@@ -22751,7 +23171,9 @@ var dbSyncMachine = setup({
|
|
|
22751
23171
|
skipCodegen: context.input.skipCodegen ?? false,
|
|
22752
23172
|
fromProduction: context.input.fromProduction ?? false,
|
|
22753
23173
|
autoSnapshot: context.input.autoSnapshot ?? false,
|
|
22754
|
-
reportJson: context.input.reportJson
|
|
23174
|
+
reportJson: context.input.reportJson,
|
|
23175
|
+
timeoutMs: context.input.timeoutMs,
|
|
23176
|
+
configTimeoutMs: event.output.configTimeoutMs
|
|
22755
23177
|
})
|
|
22756
23178
|
})
|
|
22757
23179
|
}
|
|
@@ -22971,7 +23393,8 @@ function optionsToMachineInput2(env2, options) {
|
|
|
22971
23393
|
fromProduction: options.fromProduction === true || typeof options.fromProduction === "string",
|
|
22972
23394
|
reportJson: options.reportJson,
|
|
22973
23395
|
targetDir: process.cwd(),
|
|
22974
|
-
reconcile: options.reconcile === true
|
|
23396
|
+
reconcile: options.reconcile === true,
|
|
23397
|
+
timeoutMs: options.timeout
|
|
22975
23398
|
};
|
|
22976
23399
|
}
|
|
22977
23400
|
var dbSyncHelpers = {
|
|
@@ -23163,6 +23586,14 @@ var syncCommand = new Command("sync").description("Sync SQL schemas to database
|
|
|
23163
23586
|
).option(
|
|
23164
23587
|
"--bootstrap",
|
|
23165
23588
|
"Bootstrap mode: Auto-start Supabase with --ignore-health-check if not running"
|
|
23589
|
+
).option(
|
|
23590
|
+
"--timeout <ms>",
|
|
23591
|
+
"Subprocess timeout in ms (default: 180000 local, 600000 production)",
|
|
23592
|
+
(val) => {
|
|
23593
|
+
const n = Number.parseInt(val, 10);
|
|
23594
|
+
if (Number.isNaN(n) || n <= 0) throw new Error("--timeout must be a positive integer");
|
|
23595
|
+
return n;
|
|
23596
|
+
}
|
|
23166
23597
|
).action(
|
|
23167
23598
|
async (env2, options) => await runSyncCommandAction(env2, options)
|
|
23168
23599
|
);
|
|
@@ -23841,9 +24272,9 @@ async function checkDocker() {
|
|
|
23841
24272
|
severity: "error",
|
|
23842
24273
|
message: "Docker is not running or not installed",
|
|
23843
24274
|
fixInstructions: [
|
|
23844
|
-
"Start
|
|
23845
|
-
"
|
|
23846
|
-
"
|
|
24275
|
+
"Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
|
|
24276
|
+
"Install Colima: brew install colima docker",
|
|
24277
|
+
"Linux: sudo systemctl start docker"
|
|
23847
24278
|
]
|
|
23848
24279
|
};
|
|
23849
24280
|
}
|
|
@@ -23875,24 +24306,24 @@ async function checkPort(port) {
|
|
|
23875
24306
|
}
|
|
23876
24307
|
function detectSupabasePortsFromConfig() {
|
|
23877
24308
|
const configPath = path11__default.join(process.cwd(), "supabase", "config.toml");
|
|
23878
|
-
const
|
|
24309
|
+
const BASE_PORTS2 = { api: 54321, db: 54322, studio: 54323, inbucket: 54324 };
|
|
23879
24310
|
if (!existsSync(configPath)) {
|
|
23880
|
-
return Object.values(
|
|
24311
|
+
return Object.values(BASE_PORTS2);
|
|
23881
24312
|
}
|
|
23882
24313
|
try {
|
|
23883
24314
|
const content = readFileSync(configPath, "utf-8");
|
|
23884
24315
|
const ports = [];
|
|
23885
24316
|
const apiMatch = content.match(/\[api\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23886
|
-
ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) :
|
|
24317
|
+
ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) : BASE_PORTS2.api);
|
|
23887
24318
|
const dbMatch = content.match(/\[db\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23888
|
-
ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) :
|
|
24319
|
+
ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) : BASE_PORTS2.db);
|
|
23889
24320
|
const studioMatch = content.match(/\[studio\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23890
|
-
ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) :
|
|
24321
|
+
ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) : BASE_PORTS2.studio);
|
|
23891
24322
|
const inbucketMatch = content.match(/\[inbucket\][\s\S]*?port\s*=\s*(\d+)/);
|
|
23892
|
-
ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) :
|
|
24323
|
+
ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) : BASE_PORTS2.inbucket);
|
|
23893
24324
|
return ports;
|
|
23894
24325
|
} catch {
|
|
23895
|
-
return Object.values(
|
|
24326
|
+
return Object.values(BASE_PORTS2);
|
|
23896
24327
|
}
|
|
23897
24328
|
}
|
|
23898
24329
|
async function checkSupabasePorts() {
|
|
@@ -23959,68 +24390,56 @@ function diagnoseInitFailure(errorMessage) {
|
|
|
23959
24390
|
|
|
23960
24391
|
// src/utils/port-allocator.ts
|
|
23961
24392
|
init_esm_shims();
|
|
23962
|
-
var BASE_PORT = 54321;
|
|
23963
|
-
var PORTS_PER_SLOT = 10;
|
|
23964
|
-
var TOTAL_SLOTS = 100;
|
|
23965
|
-
function calculatePortOffset(projectPath) {
|
|
23966
|
-
const normalizedPath = path11__default.resolve(projectPath);
|
|
23967
|
-
const hash = createHash("md5").update(normalizedPath).digest("hex");
|
|
23968
|
-
return parseInt(hash.slice(0, 8), 16) % TOTAL_SLOTS;
|
|
23969
|
-
}
|
|
23970
24393
|
function getSupabasePorts(projectPath) {
|
|
23971
24394
|
const offset = calculatePortOffset(projectPath);
|
|
23972
|
-
|
|
23973
|
-
return {
|
|
23974
|
-
api: basePort + 0,
|
|
23975
|
-
db: basePort + 1,
|
|
23976
|
-
studio: basePort + 2,
|
|
23977
|
-
inbucket: basePort + 3,
|
|
23978
|
-
auth: basePort + 4,
|
|
23979
|
-
rest: basePort + 5,
|
|
23980
|
-
realtime: basePort + 6,
|
|
23981
|
-
storage: basePort + 7,
|
|
23982
|
-
shadow: basePort + 8
|
|
23983
|
-
};
|
|
24395
|
+
return getPortsWithOffset(offset);
|
|
23984
24396
|
}
|
|
23985
|
-
function
|
|
23986
|
-
const ports = getSupabasePorts(projectPath);
|
|
24397
|
+
async function updateSupabaseConfigPortsSafe(projectPath) {
|
|
23987
24398
|
const configPath = path11__default.join(projectPath, "supabase", "config.toml");
|
|
24399
|
+
const resolved = await resolveAvailablePorts(projectPath);
|
|
24400
|
+
if (!resolved) {
|
|
24401
|
+
const ports = getSupabasePorts(projectPath);
|
|
24402
|
+
return { updated: false, ports, configPath, retried: false };
|
|
24403
|
+
}
|
|
23988
24404
|
if (!existsSync(configPath)) {
|
|
23989
|
-
return { updated: false, ports, configPath };
|
|
24405
|
+
return { updated: false, ports: resolved.ports, configPath, retried: resolved.retried };
|
|
23990
24406
|
}
|
|
24407
|
+
const updated = writePortsToConfig(configPath, resolved.ports);
|
|
24408
|
+
return { updated, ports: resolved.ports, configPath, retried: resolved.retried };
|
|
24409
|
+
}
|
|
24410
|
+
function writePortsToConfig(configPath, ports) {
|
|
23991
24411
|
let content = readFileSync(configPath, "utf-8");
|
|
23992
|
-
let
|
|
24412
|
+
let changed = false;
|
|
23993
24413
|
const portMappings = [
|
|
23994
24414
|
{ section: "api", key: "port", value: ports.api },
|
|
23995
24415
|
{ section: "db", key: "port", value: ports.db },
|
|
23996
24416
|
{ section: "db", key: "shadow_port", value: ports.shadow },
|
|
23997
24417
|
{ section: "studio", key: "port", value: ports.studio },
|
|
23998
|
-
{ section: "inbucket", key: "port", value: ports.inbucket }
|
|
23999
|
-
{ section: "auth", key: "port", value: ports.auth }
|
|
24000
|
-
// Note: rest, realtime, storage ports are internal and not in config.toml
|
|
24418
|
+
{ section: "inbucket", key: "port", value: ports.inbucket }
|
|
24001
24419
|
];
|
|
24002
24420
|
for (const { section, key, value } of portMappings) {
|
|
24003
24421
|
const sectionRegex = new RegExp(`(\\[${section}\\][^\\[]*?)(${key}\\s*=\\s*)(\\d+)`, "gs");
|
|
24422
|
+
sectionRegex.lastIndex = 0;
|
|
24004
24423
|
const newContent = content.replace(sectionRegex, (match, prefix, keyPart, oldValue) => {
|
|
24005
24424
|
if (parseInt(oldValue, 10) !== value) {
|
|
24006
|
-
|
|
24425
|
+
changed = true;
|
|
24007
24426
|
return `${prefix}${keyPart}${value}`;
|
|
24008
24427
|
}
|
|
24009
24428
|
return match;
|
|
24010
24429
|
});
|
|
24011
24430
|
content = newContent;
|
|
24012
24431
|
}
|
|
24013
|
-
if (
|
|
24432
|
+
if (changed) {
|
|
24014
24433
|
writeFileSync(configPath, content, "utf-8");
|
|
24015
24434
|
}
|
|
24016
|
-
return
|
|
24435
|
+
return changed;
|
|
24017
24436
|
}
|
|
24018
24437
|
function getPortAllocationSummary(projectPath) {
|
|
24019
24438
|
const ports = getSupabasePorts(projectPath);
|
|
24020
24439
|
const offset = calculatePortOffset(projectPath);
|
|
24021
24440
|
return [
|
|
24022
24441
|
`Port allocation for: ${path11__default.basename(projectPath)}`,
|
|
24023
|
-
` Slot: ${offset} (hash-based)`,
|
|
24442
|
+
` Slot: ${offset / 10} (hash-based, offset=${offset})`,
|
|
24024
24443
|
` API: ${ports.api}`,
|
|
24025
24444
|
` DB: ${ports.db}`,
|
|
24026
24445
|
` Studio: ${ports.studio}`,
|
|
@@ -24178,11 +24597,14 @@ var startCommand = new Command("start").description("Start local Supabase with a
|
|
|
24178
24597
|
logger16.info(output3);
|
|
24179
24598
|
return;
|
|
24180
24599
|
}
|
|
24181
|
-
const portResult =
|
|
24600
|
+
const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
|
|
24182
24601
|
if (portResult.updated) {
|
|
24183
24602
|
logger16.info("\u{1F522} Port allocation updated for this project:");
|
|
24184
24603
|
logger16.info(getPortAllocationSummary(projectRoot));
|
|
24185
24604
|
}
|
|
24605
|
+
if (portResult.retried) {
|
|
24606
|
+
logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
|
|
24607
|
+
}
|
|
24186
24608
|
logStartModeInfo(logger16, options, detectionResult, finalExcluded);
|
|
24187
24609
|
const result = await dbStart({
|
|
24188
24610
|
exclude: finalExcluded,
|
|
@@ -24269,11 +24691,14 @@ var resetCommand = new Command("reset").description("Reset local database with s
|
|
|
24269
24691
|
try {
|
|
24270
24692
|
logger16.section("Database Reset");
|
|
24271
24693
|
const projectRoot = process.cwd();
|
|
24272
|
-
const portResult =
|
|
24694
|
+
const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
|
|
24273
24695
|
if (portResult.updated) {
|
|
24274
24696
|
logger16.info("\u{1F522} Port allocation updated for this project:");
|
|
24275
24697
|
logger16.info(getPortAllocationSummary(projectRoot));
|
|
24276
24698
|
}
|
|
24699
|
+
if (portResult.retried) {
|
|
24700
|
+
logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
|
|
24701
|
+
}
|
|
24277
24702
|
logger16.step("Stopping Supabase...", 1);
|
|
24278
24703
|
const resetResult = await dbReset({
|
|
24279
24704
|
env: "local",
|
|
@@ -25123,9 +25548,15 @@ async function readSeedMetadataFile(metadataPath) {
|
|
|
25123
25548
|
}
|
|
25124
25549
|
}
|
|
25125
25550
|
var seedMetadataCommand = new Command("metadata").description("Extract seed metadata (primary root anchor) for CI workflows").option("--file <path>", "Path to metadata JSON file (optional; prefer --from-db)").option("--from-db", "Infer metadata from applied database state (preferred)", false).option("--github-output", "Write outputs to $GITHUB_OUTPUT (GitHub Actions)", false).action(async (options) => {
|
|
25126
|
-
const
|
|
25127
|
-
const shouldUseDb = options.fromDb === true || !
|
|
25128
|
-
|
|
25551
|
+
const rawMetadataPath = options.file?.trim();
|
|
25552
|
+
const shouldUseDb = options.fromDb === true || !rawMetadataPath;
|
|
25553
|
+
let out;
|
|
25554
|
+
if (shouldUseDb) {
|
|
25555
|
+
out = await inferPrimaryIdsFromDatabase();
|
|
25556
|
+
} else {
|
|
25557
|
+
const validatedPath = validateUserFilePath(rawMetadataPath, process.cwd());
|
|
25558
|
+
out = await readSeedMetadataFile(validatedPath);
|
|
25559
|
+
}
|
|
25129
25560
|
if (options.githubOutput === true) {
|
|
25130
25561
|
await writeGitHubOutput({
|
|
25131
25562
|
...out.primary?.root?.id ? { root_id: out.primary.root.id } : {},
|
|
@@ -26908,9 +27339,7 @@ async function getVercelRootDirectory() {
|
|
|
26908
27339
|
init_local_supabase();
|
|
26909
27340
|
var ERROR_MESSAGES3 = {
|
|
26910
27341
|
INVALID_PATH: "Invalid working directory path",
|
|
26911
|
-
PATH_TRAVERSAL: "Working directory path validation failed"
|
|
26912
|
-
APP_NOT_FOUND: "App directory not found"
|
|
26913
|
-
};
|
|
27342
|
+
PATH_TRAVERSAL: "Working directory path validation failed"};
|
|
26914
27343
|
function sanitizeErrorMessage(message) {
|
|
26915
27344
|
if (!message || typeof message !== "string") {
|
|
26916
27345
|
return "Unknown error";
|
|
@@ -26970,23 +27399,6 @@ function validateCustomWorkingDir(cwdPath, projectRoot) {
|
|
|
26970
27399
|
}
|
|
26971
27400
|
return absolutePath;
|
|
26972
27401
|
}
|
|
26973
|
-
function validateAppDirectory2(appName, projectRoot) {
|
|
26974
|
-
if (containsPathTraversal3(appName) || appName.includes("/") || appName.includes("\\")) {
|
|
26975
|
-
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
26976
|
-
}
|
|
26977
|
-
const appsDir = resolve(projectRoot, "apps");
|
|
26978
|
-
const appDir = resolve(appsDir, appName);
|
|
26979
|
-
if (!isPathWithinBase2(appDir, appsDir)) {
|
|
26980
|
-
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
26981
|
-
}
|
|
26982
|
-
if (!existsSync(appDir)) {
|
|
26983
|
-
throw new CLIError(ERROR_MESSAGES3.APP_NOT_FOUND, "ENV_PULL_APP_NOT_FOUND", [
|
|
26984
|
-
`Available apps: ${getAvailableApps().join(", ") || "none"}`,
|
|
26985
|
-
"Specify full path with --cwd instead"
|
|
26986
|
-
]);
|
|
26987
|
-
}
|
|
26988
|
-
return appDir;
|
|
26989
|
-
}
|
|
26990
27402
|
var LOCAL_BOOTSTRAP_REQUIRED_KEYS = [
|
|
26991
27403
|
"LOCAL_SUPABASE_HOST",
|
|
26992
27404
|
"LOCAL_SUPABASE_API_PORT",
|
|
@@ -27083,22 +27495,18 @@ function resolveVercelAuth(workDir, options, logger16) {
|
|
|
27083
27495
|
}
|
|
27084
27496
|
function resolveWorkingDir(options) {
|
|
27085
27497
|
const projectRoot = process.cwd();
|
|
27498
|
+
if (options.app) {
|
|
27499
|
+
console.warn(
|
|
27500
|
+
`\u26A0\uFE0F --app is deprecated. Environment files are managed at the monorepo root only.
|
|
27501
|
+
Turbo auto-propagates root .env.* files to all workspaces.
|
|
27502
|
+
Writing to project root instead of apps/${options.app}/.`
|
|
27503
|
+
);
|
|
27504
|
+
}
|
|
27086
27505
|
if (options.cwd) {
|
|
27087
27506
|
return validateCustomWorkingDir(options.cwd, projectRoot);
|
|
27088
27507
|
}
|
|
27089
|
-
if (options.app) {
|
|
27090
|
-
return validateAppDirectory2(options.app, projectRoot);
|
|
27091
|
-
}
|
|
27092
27508
|
return projectRoot;
|
|
27093
27509
|
}
|
|
27094
|
-
function getAvailableApps() {
|
|
27095
|
-
const appsDir = resolve(process.cwd(), "apps");
|
|
27096
|
-
if (!existsSync(appsDir)) return [];
|
|
27097
|
-
return readdirSync(appsDir).filter((name) => {
|
|
27098
|
-
const fullPath = resolve(appsDir, name);
|
|
27099
|
-
return statSync(fullPath).isDirectory();
|
|
27100
|
-
});
|
|
27101
|
-
}
|
|
27102
27510
|
function getOutputPath(workDir, environment) {
|
|
27103
27511
|
return resolve(workDir, `.env.${environment}`);
|
|
27104
27512
|
}
|
|
@@ -29331,7 +29739,11 @@ async function listArchivedHotfixes(input3 = {}) {
|
|
|
29331
29739
|
const hotfixes = [];
|
|
29332
29740
|
for (const file of files.filter((f) => f.endsWith(".json"))) {
|
|
29333
29741
|
try {
|
|
29334
|
-
const
|
|
29742
|
+
const filePath = path11__default.join(archiveDir, file);
|
|
29743
|
+
if (!isPathContained(archiveDir, filePath)) {
|
|
29744
|
+
continue;
|
|
29745
|
+
}
|
|
29746
|
+
const content = await readFile(filePath, "utf-8");
|
|
29335
29747
|
hotfixes.push(HotfixMetadataSchema.parse(JSON.parse(content)));
|
|
29336
29748
|
} catch {
|
|
29337
29749
|
}
|
|
@@ -30668,10 +31080,10 @@ Fix:
|
|
|
30668
31080
|
Docker is not running or not installed.
|
|
30669
31081
|
|
|
30670
31082
|
Fix:
|
|
30671
|
-
1. Start
|
|
30672
|
-
2.
|
|
30673
|
-
3.
|
|
30674
|
-
4.
|
|
31083
|
+
1. Start Colima: colima start --cpu 4 --memory 8 --vm-type vz --mount-type virtiofs
|
|
31084
|
+
2. Install Colima: brew install colima docker
|
|
31085
|
+
3. Linux: sudo systemctl start docker
|
|
31086
|
+
4. Verify Docker is running: docker ps
|
|
30675
31087
|
`,
|
|
30676
31088
|
relatedCommands: ["runa check"]
|
|
30677
31089
|
},
|
|
@@ -30797,7 +31209,7 @@ init_esm_shims();
|
|
|
30797
31209
|
|
|
30798
31210
|
// src/constants/versions.ts
|
|
30799
31211
|
init_esm_shims();
|
|
30800
|
-
var COMPATIBLE_TEMPLATES_VERSION = "0.5.
|
|
31212
|
+
var COMPATIBLE_TEMPLATES_VERSION = "0.5.56";
|
|
30801
31213
|
var TEMPLATES_PACKAGE_NAME = "@r06-dev/runa-templates";
|
|
30802
31214
|
var GITHUB_PACKAGES_REGISTRY = "https://npm.pkg.github.com";
|
|
30803
31215
|
|
|
@@ -31761,21 +32173,28 @@ function collectRouteInfo(relativePath, code, _verbose) {
|
|
|
31761
32173
|
if (isExcludedScope(relativePath)) {
|
|
31762
32174
|
return;
|
|
31763
32175
|
}
|
|
31764
|
-
|
|
32176
|
+
const isPage = isPageFile(relativePath);
|
|
32177
|
+
const isLayout = isLayoutFile(relativePath);
|
|
32178
|
+
const isApiRoute = isApiRouteFile(relativePath);
|
|
32179
|
+
const isMiddleware = isMiddlewareFile(relativePath);
|
|
32180
|
+
if (isPage) {
|
|
31765
32181
|
collectPageInfo(relativePath, code);
|
|
31766
32182
|
}
|
|
31767
|
-
if (
|
|
32183
|
+
if (isLayout) {
|
|
31768
32184
|
collectLayoutInfo(relativePath, code);
|
|
31769
32185
|
}
|
|
31770
|
-
if (
|
|
32186
|
+
if (isApiRoute) {
|
|
31771
32187
|
collectApiRouteInfo(relativePath, code);
|
|
31772
32188
|
}
|
|
31773
|
-
if (
|
|
32189
|
+
if (isMiddleware) {
|
|
31774
32190
|
collectAuthBoundaries(relativePath, code);
|
|
31775
32191
|
}
|
|
31776
32192
|
if (hasMachineDefinition(code)) {
|
|
31777
32193
|
collectMachineDefinition(relativePath, code);
|
|
31778
32194
|
}
|
|
32195
|
+
if (!isPage && !isLayout && !isApiRoute && !isMiddleware) {
|
|
32196
|
+
collectComponentInfo(relativePath, code);
|
|
32197
|
+
}
|
|
31779
32198
|
}
|
|
31780
32199
|
function emptyResult(filePath) {
|
|
31781
32200
|
return {
|
|
@@ -32216,6 +32635,12 @@ async function preprocessFile(filePath, repoRoot, options) {
|
|
|
32216
32635
|
const attrs = extractInjectedAttributes(code);
|
|
32217
32636
|
const machineIdMatches = code.matchAll(/data-machine-id="([^"]+)"/g);
|
|
32218
32637
|
const machineIds = [...new Set([...machineIdMatches].map((m) => m[1]).filter(Boolean))];
|
|
32638
|
+
const resolutionDetails = machineIds.map((machineId) => ({
|
|
32639
|
+
filePath: relativePath,
|
|
32640
|
+
machineRef: machineId,
|
|
32641
|
+
resolvedId: machineId,
|
|
32642
|
+
source: "explicit"
|
|
32643
|
+
}));
|
|
32219
32644
|
if (machineIds.length === 0 && mightContainMachineHooks(code)) {
|
|
32220
32645
|
if (options.verbose) {
|
|
32221
32646
|
console.log(` [recovery] Marker found but no attributes in ${relativePath}, re-injecting`);
|
|
@@ -32234,7 +32659,7 @@ async function preprocessFile(filePath, repoRoot, options) {
|
|
|
32234
32659
|
machineIds,
|
|
32235
32660
|
changed: false,
|
|
32236
32661
|
...attrs,
|
|
32237
|
-
resolutionDetails
|
|
32662
|
+
resolutionDetails
|
|
32238
32663
|
}
|
|
32239
32664
|
};
|
|
32240
32665
|
}
|
|
@@ -35374,17 +35799,17 @@ function printActionsNeeded(logger16, actions) {
|
|
|
35374
35799
|
);
|
|
35375
35800
|
}
|
|
35376
35801
|
function findRepoRoot3(startDir) {
|
|
35377
|
-
const { existsSync:
|
|
35802
|
+
const { existsSync: existsSync53, readFileSync: readFileSync30 } = __require("fs");
|
|
35378
35803
|
const { join: join23, dirname: dirname5 } = __require("path");
|
|
35379
35804
|
let current = startDir;
|
|
35380
35805
|
while (current !== dirname5(current)) {
|
|
35381
|
-
if (
|
|
35806
|
+
if (existsSync53(join23(current, "turbo.json"))) {
|
|
35382
35807
|
return current;
|
|
35383
35808
|
}
|
|
35384
35809
|
const pkgPath = join23(current, "package.json");
|
|
35385
|
-
if (
|
|
35810
|
+
if (existsSync53(pkgPath)) {
|
|
35386
35811
|
try {
|
|
35387
|
-
const pkg = JSON.parse(
|
|
35812
|
+
const pkg = JSON.parse(readFileSync30(pkgPath, "utf-8"));
|
|
35388
35813
|
if (pkg.workspaces) {
|
|
35389
35814
|
return current;
|
|
35390
35815
|
}
|
|
@@ -35450,10 +35875,10 @@ function generateReportOutput(output3, isJsonMode) {
|
|
|
35450
35875
|
};
|
|
35451
35876
|
}
|
|
35452
35877
|
function validateRunaRepo(repoRoot) {
|
|
35453
|
-
const { existsSync:
|
|
35878
|
+
const { existsSync: existsSync53 } = __require("fs");
|
|
35454
35879
|
const { join: join23 } = __require("path");
|
|
35455
35880
|
const templateDir = join23(repoRoot, "packages/runa-templates/templates");
|
|
35456
|
-
if (!
|
|
35881
|
+
if (!existsSync53(templateDir)) {
|
|
35457
35882
|
throw new CLIError("template-check is a runa-repo only command", "NOT_RUNA_REPO", [
|
|
35458
35883
|
"This command compares runa-repo with pj-repo templates",
|
|
35459
35884
|
"It should only be run in the runa repository",
|