@runa-ai/runa-cli 0.5.58 → 0.5.61
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/ci/commands/ci-supabase-local.d.ts +5 -0
- package/dist/commands/ci/commands/ci-supabase-local.d.ts.map +1 -1
- package/dist/commands/ci/machine/actors/db/sync-schema.d.ts.map +1 -1
- package/dist/commands/db/apply/actors.d.ts +4 -0
- package/dist/commands/db/apply/actors.d.ts.map +1 -1
- package/dist/commands/db/apply/contract.d.ts +1 -0
- package/dist/commands/db/apply/contract.d.ts.map +1 -1
- package/dist/commands/db/apply/helpers/index.d.ts +4 -2
- package/dist/commands/db/apply/helpers/index.d.ts.map +1 -1
- package/dist/commands/db/apply/helpers/plan-validator.d.ts +61 -0
- package/dist/commands/db/apply/helpers/plan-validator.d.ts.map +1 -0
- package/dist/commands/db/apply/helpers/retry-logic.d.ts +25 -0
- package/dist/commands/db/apply/helpers/retry-logic.d.ts.map +1 -1
- package/dist/commands/db/apply/machine.d.ts +5 -0
- package/dist/commands/db/apply/machine.d.ts.map +1 -1
- package/dist/commands/db/commands/db-lifecycle.d.ts.map +1 -1
- package/dist/commands/db/utils/db-target.d.ts +5 -3
- package/dist/commands/db/utils/db-target.d.ts.map +1 -1
- package/dist/commands/dev/commands/dev.d.ts.map +1 -1
- package/dist/constants/versions.d.ts +1 -1
- package/dist/index.js +799 -450
- package/dist/utils/env-local-bridge.d.ts +58 -0
- package/dist/utils/env-local-bridge.d.ts.map +1 -0
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { createRequire } from 'module';
|
|
3
|
-
import * as
|
|
4
|
-
import
|
|
3
|
+
import * as path12 from 'path';
|
|
4
|
+
import path12__default, { join, dirname, resolve, isAbsolute, relative, sep, basename, normalize } from 'path';
|
|
5
5
|
import { fileURLToPath } from 'url';
|
|
6
6
|
import * as fs5 from 'fs';
|
|
7
7
|
import fs5__default, { existsSync, readFileSync, unlinkSync, rmSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, realpathSync, promises, lstatSync, accessSync, constants, chmodSync } from 'fs';
|
|
8
8
|
import { execSync, spawnSync, execFileSync, exec, spawn } from 'child_process';
|
|
9
|
-
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, SUPABASE_SYSTEM_SCHEMAS, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile,
|
|
9
|
+
import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, SUPABASE_SYSTEM_SCHEMAS, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, BASE_PORTS, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, resolveAvailablePorts, calculatePortOffset, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, getPortsWithOffset, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, DATABASE_PACKAGE_CANDIDATES, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
|
|
10
10
|
import { z } from 'zod';
|
|
11
11
|
import fs9, { mkdir, writeFile, appendFile, readFile, rm, stat, realpath, cp, readdir, lstat } from 'fs/promises';
|
|
12
12
|
import { promisify } from 'util';
|
|
@@ -73,7 +73,7 @@ var getFilename, getDirname, __dirname$1;
|
|
|
73
73
|
var init_esm_shims = __esm({
|
|
74
74
|
"../../node_modules/.pnpm/tsup@8.5.1_jiti@2.6.1_postcss@8.5.6_tsx@4.21.0_typescript@5.9.3_yaml@2.8.2/node_modules/tsup/assets/esm_shims.js"() {
|
|
75
75
|
getFilename = () => fileURLToPath(import.meta.url);
|
|
76
|
-
getDirname = () =>
|
|
76
|
+
getDirname = () => path12__default.dirname(getFilename());
|
|
77
77
|
__dirname$1 = /* @__PURE__ */ getDirname();
|
|
78
78
|
}
|
|
79
79
|
});
|
|
@@ -135,7 +135,7 @@ function parseTomlPort(content, section, key) {
|
|
|
135
135
|
return isValidPort(parsed) ? parsed : null;
|
|
136
136
|
}
|
|
137
137
|
function parseTomlPorts(configRoot) {
|
|
138
|
-
const configPath =
|
|
138
|
+
const configPath = path12__default.join(configRoot, "supabase", "config.toml");
|
|
139
139
|
if (!existsSync(configPath)) return null;
|
|
140
140
|
try {
|
|
141
141
|
const content = readFileSync(configPath, "utf-8");
|
|
@@ -164,13 +164,13 @@ function detectStatus(projectRoot) {
|
|
|
164
164
|
}
|
|
165
165
|
}
|
|
166
166
|
function resolveSupabaseRoot(projectRoot) {
|
|
167
|
-
const start =
|
|
167
|
+
const start = path12__default.resolve(projectRoot);
|
|
168
168
|
let current = start;
|
|
169
169
|
while (true) {
|
|
170
|
-
if (existsSync(
|
|
170
|
+
if (existsSync(path12__default.join(current, "supabase", "config.toml"))) {
|
|
171
171
|
return current;
|
|
172
172
|
}
|
|
173
|
-
const parent =
|
|
173
|
+
const parent = path12__default.dirname(current);
|
|
174
174
|
if (parent === current) {
|
|
175
175
|
return start;
|
|
176
176
|
}
|
|
@@ -980,11 +980,11 @@ function validateRunaConfig(config) {
|
|
|
980
980
|
};
|
|
981
981
|
}
|
|
982
982
|
function hasValidDatabaseIndicators(dir) {
|
|
983
|
-
const hasPrimaryIndicator = existsSync(
|
|
983
|
+
const hasPrimaryIndicator = existsSync(path12__default.join(dir, "drizzle.config.ts")) || existsSync(path12__default.join(dir, "supabase", "schemas")) || existsSync(path12__default.join(dir, "src", "schema")) || existsSync(path12__default.join(dir, "scripts", "db-sync.ts"));
|
|
984
984
|
if (hasPrimaryIndicator) {
|
|
985
985
|
return true;
|
|
986
986
|
}
|
|
987
|
-
const pkgJsonPath =
|
|
987
|
+
const pkgJsonPath = path12__default.join(dir, "package.json");
|
|
988
988
|
if (existsSync(pkgJsonPath)) {
|
|
989
989
|
try {
|
|
990
990
|
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
@@ -1000,15 +1000,15 @@ function hasValidDatabaseIndicators(dir) {
|
|
|
1000
1000
|
function tryResolveFromConfig(cwd) {
|
|
1001
1001
|
try {
|
|
1002
1002
|
const loaded = loadRunaConfigOrThrow(cwd);
|
|
1003
|
-
const configDir =
|
|
1003
|
+
const configDir = path12__default.dirname(loaded.configPath);
|
|
1004
1004
|
const config = loaded.config;
|
|
1005
1005
|
if (config.project.monorepo === false && hasValidDatabaseIndicators(configDir)) {
|
|
1006
1006
|
return configDir;
|
|
1007
1007
|
}
|
|
1008
1008
|
const dbConfig = getDatabaseConfig(config);
|
|
1009
1009
|
const schemaPath = dbConfig.schemaPath;
|
|
1010
|
-
const dbPath = schemaPath.includes("/src/") ? schemaPath.split("/src/")[0] : schemaPath.startsWith("src/") ? "." :
|
|
1011
|
-
const fullPath = dbPath === "." ? configDir :
|
|
1010
|
+
const dbPath = schemaPath.includes("/src/") ? schemaPath.split("/src/")[0] : schemaPath.startsWith("src/") ? "." : path12__default.dirname(schemaPath);
|
|
1011
|
+
const fullPath = dbPath === "." ? configDir : path12__default.join(configDir, dbPath);
|
|
1012
1012
|
return existsSync(fullPath) ? fullPath : null;
|
|
1013
1013
|
} catch {
|
|
1014
1014
|
return null;
|
|
@@ -1021,9 +1021,9 @@ async function getDatabasePackagePath(cwd = process.cwd()) {
|
|
|
1021
1021
|
if (dbPackage) return dbPackage;
|
|
1022
1022
|
const baseDir = findWorkspaceRoot(cwd) || cwd;
|
|
1023
1023
|
const conventionalPaths = [
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1024
|
+
path12__default.join(baseDir, "packages", "database"),
|
|
1025
|
+
path12__default.join(baseDir, "packages", "db"),
|
|
1026
|
+
path12__default.join(baseDir, "db"),
|
|
1027
1027
|
baseDir
|
|
1028
1028
|
];
|
|
1029
1029
|
for (const candidate of conventionalPaths) {
|
|
@@ -1054,14 +1054,14 @@ Solutions:
|
|
|
1054
1054
|
async function getSDKScriptsPath(cwd = process.cwd()) {
|
|
1055
1055
|
const sdkPackage = findPackage("sdk", cwd);
|
|
1056
1056
|
if (sdkPackage) {
|
|
1057
|
-
const scriptsPath =
|
|
1057
|
+
const scriptsPath = path12__default.join(sdkPackage, "scripts");
|
|
1058
1058
|
if (existsSync(scriptsPath)) {
|
|
1059
1059
|
return scriptsPath;
|
|
1060
1060
|
}
|
|
1061
1061
|
}
|
|
1062
1062
|
const workspaceRoot = findWorkspaceRoot(cwd);
|
|
1063
1063
|
const baseDir = workspaceRoot || cwd;
|
|
1064
|
-
const installedPath =
|
|
1064
|
+
const installedPath = path12__default.join(baseDir, "node_modules", "@runa-ai", "runa", "scripts");
|
|
1065
1065
|
if (existsSync(installedPath)) {
|
|
1066
1066
|
return installedPath;
|
|
1067
1067
|
}
|
|
@@ -1072,14 +1072,14 @@ async function getSDKScriptsPath(cwd = process.cwd()) {
|
|
|
1072
1072
|
async function getSDKTemplatesPath(cwd = process.cwd()) {
|
|
1073
1073
|
const sdkPackage = findPackage("sdk", cwd);
|
|
1074
1074
|
if (sdkPackage) {
|
|
1075
|
-
const templatesPath =
|
|
1075
|
+
const templatesPath = path12__default.join(sdkPackage, "templates");
|
|
1076
1076
|
if (existsSync(templatesPath)) {
|
|
1077
1077
|
return templatesPath;
|
|
1078
1078
|
}
|
|
1079
1079
|
}
|
|
1080
1080
|
const workspaceRoot = findWorkspaceRoot(cwd);
|
|
1081
1081
|
const baseDir = workspaceRoot || cwd;
|
|
1082
|
-
const installedPath =
|
|
1082
|
+
const installedPath = path12__default.join(baseDir, "node_modules", "@runa-ai", "runa", "templates");
|
|
1083
1083
|
if (existsSync(installedPath)) {
|
|
1084
1084
|
return installedPath;
|
|
1085
1085
|
}
|
|
@@ -1161,7 +1161,7 @@ var CLI_VERSION, HAS_ADMIN_COMMAND;
|
|
|
1161
1161
|
var init_version = __esm({
|
|
1162
1162
|
"src/version.ts"() {
|
|
1163
1163
|
init_esm_shims();
|
|
1164
|
-
CLI_VERSION = "0.5.
|
|
1164
|
+
CLI_VERSION = "0.5.61";
|
|
1165
1165
|
HAS_ADMIN_COMMAND = false;
|
|
1166
1166
|
}
|
|
1167
1167
|
});
|
|
@@ -1557,7 +1557,7 @@ var init_dependency_analyzer = __esm({
|
|
|
1557
1557
|
name = "DependencyAnalyzer";
|
|
1558
1558
|
categories = ["dependency"];
|
|
1559
1559
|
async analyze(options) {
|
|
1560
|
-
const packageJsonPath =
|
|
1560
|
+
const packageJsonPath = path12__default.join(options.rootDir, "package.json");
|
|
1561
1561
|
try {
|
|
1562
1562
|
await fs9.access(packageJsonPath);
|
|
1563
1563
|
} catch {
|
|
@@ -3311,17 +3311,17 @@ var init_typescript_analyzer = __esm({
|
|
|
3311
3311
|
}
|
|
3312
3312
|
});
|
|
3313
3313
|
function containsPathTraversal4(inputPath) {
|
|
3314
|
-
const normalized =
|
|
3314
|
+
const normalized = path12__default.normalize(inputPath);
|
|
3315
3315
|
if (normalized.includes("..")) return true;
|
|
3316
3316
|
if (inputPath.includes("\0")) return true;
|
|
3317
3317
|
return false;
|
|
3318
3318
|
}
|
|
3319
3319
|
function isPathWithinBoundary(filePath, boundaryDir) {
|
|
3320
3320
|
try {
|
|
3321
|
-
const resolvedFile =
|
|
3322
|
-
const resolvedBoundary =
|
|
3323
|
-
const relative9 =
|
|
3324
|
-
return !relative9.startsWith("..") && !
|
|
3321
|
+
const resolvedFile = path12__default.resolve(filePath);
|
|
3322
|
+
const resolvedBoundary = path12__default.resolve(boundaryDir);
|
|
3323
|
+
const relative9 = path12__default.relative(resolvedBoundary, resolvedFile);
|
|
3324
|
+
return !relative9.startsWith("..") && !path12__default.isAbsolute(relative9);
|
|
3325
3325
|
} catch {
|
|
3326
3326
|
return false;
|
|
3327
3327
|
}
|
|
@@ -3330,14 +3330,14 @@ function resolvePathWithinBoundary(inputPath, boundaryDir) {
|
|
|
3330
3330
|
if (containsPathTraversal4(inputPath)) {
|
|
3331
3331
|
throw new Error(`Path contains traversal patterns: ${inputPath}`);
|
|
3332
3332
|
}
|
|
3333
|
-
const resolvedPath =
|
|
3333
|
+
const resolvedPath = path12__default.isAbsolute(inputPath) ? inputPath : path12__default.resolve(boundaryDir, inputPath);
|
|
3334
3334
|
if (!isPathWithinBoundary(resolvedPath, boundaryDir)) {
|
|
3335
3335
|
throw new Error(`Path is outside allowed boundary: ${inputPath}`);
|
|
3336
3336
|
}
|
|
3337
3337
|
return resolvedPath;
|
|
3338
3338
|
}
|
|
3339
3339
|
function filterPathsWithinBoundary(files, boundaryDir) {
|
|
3340
|
-
const resolvedBoundary =
|
|
3340
|
+
const resolvedBoundary = path12__default.resolve(boundaryDir);
|
|
3341
3341
|
return files.filter((file) => isPathWithinBoundary(file, resolvedBoundary));
|
|
3342
3342
|
}
|
|
3343
3343
|
function validateGlobPatterns(patterns) {
|
|
@@ -3398,7 +3398,7 @@ var init_loader = __esm({
|
|
|
3398
3398
|
}
|
|
3399
3399
|
});
|
|
3400
3400
|
function normalizePath(filePath) {
|
|
3401
|
-
const normalized =
|
|
3401
|
+
const normalized = path12__default.normalize(filePath);
|
|
3402
3402
|
return normalized.replace(/\\/g, "/");
|
|
3403
3403
|
}
|
|
3404
3404
|
function normalizeRuleId(ruleId) {
|
|
@@ -5652,7 +5652,7 @@ function pipeToSharedLog(params) {
|
|
|
5652
5652
|
}
|
|
5653
5653
|
async function ensureRunaTmpDir(cwd) {
|
|
5654
5654
|
const root = cwd ?? process.cwd();
|
|
5655
|
-
const dir =
|
|
5655
|
+
const dir = path12__default.join(root, ".runa", "tmp");
|
|
5656
5656
|
await mkdir(dir, { recursive: true });
|
|
5657
5657
|
return dir;
|
|
5658
5658
|
}
|
|
@@ -5677,7 +5677,7 @@ var buildActor = fromPromise(
|
|
|
5677
5677
|
const { repoRoot, tmpDir, hasTurbo, e2e, env: env2 = process.env } = input3;
|
|
5678
5678
|
const startTime = Date.now();
|
|
5679
5679
|
try {
|
|
5680
|
-
const fullTmpDir =
|
|
5680
|
+
const fullTmpDir = path12__default.join(repoRoot, tmpDir);
|
|
5681
5681
|
await mkdir(fullTmpDir, { recursive: true });
|
|
5682
5682
|
const buildEnv = { ...env2 };
|
|
5683
5683
|
if (e2e) {
|
|
@@ -5686,7 +5686,7 @@ var buildActor = fromPromise(
|
|
|
5686
5686
|
buildEnv.TURBO_FORCE = "true";
|
|
5687
5687
|
console.log(" E2E mode enabled: NEXT_PUBLIC_E2E_TEST=true, TURBO_FORCE=true");
|
|
5688
5688
|
}
|
|
5689
|
-
const hasApps = existsSync(
|
|
5689
|
+
const hasApps = existsSync(path12__default.join(repoRoot, "apps"));
|
|
5690
5690
|
let args;
|
|
5691
5691
|
if (hasTurbo) {
|
|
5692
5692
|
args = hasApps ? ["turbo", "run", "build", "--filter=./apps/*", "--filter=./packages/*"] : ["turbo", "run", "build"];
|
|
@@ -5699,7 +5699,7 @@ var buildActor = fromPromise(
|
|
|
5699
5699
|
label: "build",
|
|
5700
5700
|
command: "pnpm",
|
|
5701
5701
|
args,
|
|
5702
|
-
logFile:
|
|
5702
|
+
logFile: path12__default.join(fullTmpDir, "build.log")
|
|
5703
5703
|
});
|
|
5704
5704
|
return {
|
|
5705
5705
|
passed: true,
|
|
@@ -5938,7 +5938,7 @@ function deleteIfExists(fullPath, displayPath, verbose) {
|
|
|
5938
5938
|
function deleteRootDirectories(repoRoot, dirs, verbose) {
|
|
5939
5939
|
const deleted = [];
|
|
5940
5940
|
for (const dir of dirs) {
|
|
5941
|
-
const fullPath =
|
|
5941
|
+
const fullPath = path12__default.join(repoRoot, dir);
|
|
5942
5942
|
if (deleteIfExists(fullPath, dir, verbose)) {
|
|
5943
5943
|
deleted.push(dir);
|
|
5944
5944
|
}
|
|
@@ -5949,13 +5949,13 @@ function cleanMonorepoPackages(repoRoot, dirs, verbose) {
|
|
|
5949
5949
|
const deleted = [];
|
|
5950
5950
|
const subdirs = ["apps", "packages"];
|
|
5951
5951
|
for (const subdir of subdirs) {
|
|
5952
|
-
const subdirPath =
|
|
5952
|
+
const subdirPath = path12__default.join(repoRoot, subdir);
|
|
5953
5953
|
if (!existsSync(subdirPath)) continue;
|
|
5954
5954
|
const entries = __require("fs").readdirSync(subdirPath, { withFileTypes: true });
|
|
5955
5955
|
for (const entry of entries) {
|
|
5956
5956
|
if (!entry.isDirectory()) continue;
|
|
5957
5957
|
for (const dir of dirs) {
|
|
5958
|
-
const targetPath =
|
|
5958
|
+
const targetPath = path12__default.join(subdirPath, entry.name, dir);
|
|
5959
5959
|
const displayPath = `${subdir}/${entry.name}/${dir}`;
|
|
5960
5960
|
if (deleteIfExists(targetPath, displayPath, verbose)) {
|
|
5961
5961
|
deleted.push(displayPath);
|
|
@@ -5974,7 +5974,7 @@ var cleanActor = fromPromise(
|
|
|
5974
5974
|
async ({ input: input3 }) => {
|
|
5975
5975
|
const { repoRoot, tmpDir, verbose = false } = input3;
|
|
5976
5976
|
try {
|
|
5977
|
-
await mkdir(
|
|
5977
|
+
await mkdir(path12__default.join(repoRoot, tmpDir), { recursive: true });
|
|
5978
5978
|
if (verbose) {
|
|
5979
5979
|
console.log("Cleaning build caches...");
|
|
5980
5980
|
}
|
|
@@ -5996,7 +5996,7 @@ var freshActor = fromPromise(
|
|
|
5996
5996
|
async ({ input: input3 }) => {
|
|
5997
5997
|
const { repoRoot, tmpDir, verbose = false } = input3;
|
|
5998
5998
|
try {
|
|
5999
|
-
await mkdir(
|
|
5999
|
+
await mkdir(path12__default.join(repoRoot, tmpDir), { recursive: true });
|
|
6000
6000
|
if (verbose) {
|
|
6001
6001
|
console.log("Fresh install: cleaning all caches and node_modules...");
|
|
6002
6002
|
}
|
|
@@ -6068,7 +6068,7 @@ var dbSyncActor = fromPromise(
|
|
|
6068
6068
|
const { repoRoot, tmpDir, env: env2 = process.env } = input3;
|
|
6069
6069
|
const startTime = Date.now();
|
|
6070
6070
|
try {
|
|
6071
|
-
const fullTmpDir =
|
|
6071
|
+
const fullTmpDir = path12__default.join(repoRoot, tmpDir);
|
|
6072
6072
|
await mkdir(fullTmpDir, { recursive: true });
|
|
6073
6073
|
await runLogged({
|
|
6074
6074
|
cwd: repoRoot,
|
|
@@ -6076,7 +6076,7 @@ var dbSyncActor = fromPromise(
|
|
|
6076
6076
|
label: "db-sync",
|
|
6077
6077
|
command: "pnpm",
|
|
6078
6078
|
args: ["exec", "runa", "db", "sync", "--auto-approve"],
|
|
6079
|
-
logFile:
|
|
6079
|
+
logFile: path12__default.join(fullTmpDir, "db-sync.log")
|
|
6080
6080
|
});
|
|
6081
6081
|
return {
|
|
6082
6082
|
passed: true,
|
|
@@ -6112,7 +6112,7 @@ var manifestActor = fromPromise(
|
|
|
6112
6112
|
const { repoRoot, tmpDir, env: env2 = process.env } = input3;
|
|
6113
6113
|
const startTime = Date.now();
|
|
6114
6114
|
try {
|
|
6115
|
-
const fullTmpDir =
|
|
6115
|
+
const fullTmpDir = path12__default.join(repoRoot, tmpDir);
|
|
6116
6116
|
await mkdir(fullTmpDir, { recursive: true });
|
|
6117
6117
|
await runLogged({
|
|
6118
6118
|
cwd: repoRoot,
|
|
@@ -6120,7 +6120,7 @@ var manifestActor = fromPromise(
|
|
|
6120
6120
|
label: "manifest",
|
|
6121
6121
|
command: "pnpm",
|
|
6122
6122
|
args: ["exec", "runa", "manifest"],
|
|
6123
|
-
logFile:
|
|
6123
|
+
logFile: path12__default.join(fullTmpDir, "manifest.log")
|
|
6124
6124
|
});
|
|
6125
6125
|
return {
|
|
6126
6126
|
passed: true,
|
|
@@ -6384,7 +6384,7 @@ function buildOutput(typeCheckPassed, lintPassed, typeCheckDurationMs, lintDurat
|
|
|
6384
6384
|
var staticChecksActor = fromPromise(
|
|
6385
6385
|
async ({ input: input3 }) => {
|
|
6386
6386
|
const { repoRoot, tmpDir, hasTurbo, env: env2 = process.env, skipTypes, skipLint } = input3;
|
|
6387
|
-
const fullTmpDir =
|
|
6387
|
+
const fullTmpDir = path12__default.join(repoRoot, tmpDir);
|
|
6388
6388
|
await mkdir(fullTmpDir, { recursive: true });
|
|
6389
6389
|
const startTime = Date.now();
|
|
6390
6390
|
const checksToRun = [];
|
|
@@ -6398,7 +6398,7 @@ var staticChecksActor = fromPromise(
|
|
|
6398
6398
|
label: "type-check",
|
|
6399
6399
|
command: cmd.command,
|
|
6400
6400
|
args: cmd.args,
|
|
6401
|
-
logFile:
|
|
6401
|
+
logFile: path12__default.join(fullTmpDir, "type-check.log")
|
|
6402
6402
|
})
|
|
6403
6403
|
);
|
|
6404
6404
|
checkOrder.push("type-check");
|
|
@@ -6412,7 +6412,7 @@ var staticChecksActor = fromPromise(
|
|
|
6412
6412
|
label: "lint",
|
|
6413
6413
|
command: cmd.command,
|
|
6414
6414
|
args: cmd.args,
|
|
6415
|
-
logFile:
|
|
6415
|
+
logFile: path12__default.join(fullTmpDir, "lint.log")
|
|
6416
6416
|
})
|
|
6417
6417
|
);
|
|
6418
6418
|
checkOrder.push("lint");
|
|
@@ -6456,14 +6456,14 @@ var validateActor = fromPromise(
|
|
|
6456
6456
|
let manifestsExist = true;
|
|
6457
6457
|
const buildOutputPaths = [
|
|
6458
6458
|
// Single app
|
|
6459
|
-
|
|
6460
|
-
|
|
6459
|
+
path12__default.join(repoRoot, ".next"),
|
|
6460
|
+
path12__default.join(repoRoot, "dist"),
|
|
6461
6461
|
// Monorepo apps
|
|
6462
|
-
|
|
6463
|
-
|
|
6462
|
+
path12__default.join(repoRoot, "apps", "web", ".next"),
|
|
6463
|
+
path12__default.join(repoRoot, "apps", "dashboard", ".next"),
|
|
6464
6464
|
// Packages
|
|
6465
|
-
|
|
6466
|
-
|
|
6465
|
+
path12__default.join(repoRoot, "packages", "cli", "dist"),
|
|
6466
|
+
path12__default.join(repoRoot, "packages", "sdk", "dist")
|
|
6467
6467
|
];
|
|
6468
6468
|
for (const outputPath of buildOutputPaths) {
|
|
6469
6469
|
if (existsSync(outputPath)) {
|
|
@@ -6477,7 +6477,7 @@ var validateActor = fromPromise(
|
|
|
6477
6477
|
);
|
|
6478
6478
|
}
|
|
6479
6479
|
if (!skipManifest) {
|
|
6480
|
-
const manifestPath =
|
|
6480
|
+
const manifestPath = path12__default.join(repoRoot, ".runa", "manifests", "manifest.json");
|
|
6481
6481
|
if (!existsSync(manifestPath)) {
|
|
6482
6482
|
manifestsExist = false;
|
|
6483
6483
|
warnings.push("No manifest.json found. E2E test generation may be affected.");
|
|
@@ -6536,35 +6536,35 @@ function isE2EMode({ context }) {
|
|
|
6536
6536
|
return context.input.e2e;
|
|
6537
6537
|
}
|
|
6538
6538
|
function detectDatabase(repoRoot) {
|
|
6539
|
-
const configPath =
|
|
6539
|
+
const configPath = path12__default.join(repoRoot, "runa.config.ts");
|
|
6540
6540
|
if (!existsSync(configPath)) {
|
|
6541
|
-
const jsConfigPath =
|
|
6541
|
+
const jsConfigPath = path12__default.join(repoRoot, "runa.config.js");
|
|
6542
6542
|
if (!existsSync(jsConfigPath)) {
|
|
6543
6543
|
return false;
|
|
6544
6544
|
}
|
|
6545
6545
|
}
|
|
6546
|
-
const supabaseDir =
|
|
6546
|
+
const supabaseDir = path12__default.join(repoRoot, "supabase");
|
|
6547
6547
|
if (existsSync(supabaseDir)) {
|
|
6548
6548
|
return true;
|
|
6549
6549
|
}
|
|
6550
|
-
const databasePkg =
|
|
6550
|
+
const databasePkg = path12__default.join(repoRoot, "packages", "database");
|
|
6551
6551
|
if (existsSync(databasePkg)) {
|
|
6552
6552
|
return true;
|
|
6553
6553
|
}
|
|
6554
6554
|
return false;
|
|
6555
6555
|
}
|
|
6556
6556
|
function detectManifestTask(repoRoot) {
|
|
6557
|
-
const runaConfigPath =
|
|
6557
|
+
const runaConfigPath = path12__default.join(repoRoot, "runa.config.ts");
|
|
6558
6558
|
if (existsSync(runaConfigPath)) {
|
|
6559
6559
|
return true;
|
|
6560
6560
|
}
|
|
6561
|
-
const rootPkgPath =
|
|
6561
|
+
const rootPkgPath = path12__default.join(repoRoot, "package.json");
|
|
6562
6562
|
if (hasXStateDependency(rootPkgPath)) {
|
|
6563
6563
|
return true;
|
|
6564
6564
|
}
|
|
6565
6565
|
const appDirs = ["apps/web", "apps/app", "apps/dashboard", "app"];
|
|
6566
6566
|
for (const appDir of appDirs) {
|
|
6567
|
-
const appPkgPath =
|
|
6567
|
+
const appPkgPath = path12__default.join(repoRoot, appDir, "package.json");
|
|
6568
6568
|
if (hasXStateDependency(appPkgPath)) {
|
|
6569
6569
|
return true;
|
|
6570
6570
|
}
|
|
@@ -6591,7 +6591,7 @@ function hasXStateDependency(pkgPath) {
|
|
|
6591
6591
|
}
|
|
6592
6592
|
}
|
|
6593
6593
|
function detectTurbo(repoRoot) {
|
|
6594
|
-
const turboJsonPath =
|
|
6594
|
+
const turboJsonPath = path12__default.join(repoRoot, "turbo.json");
|
|
6595
6595
|
return existsSync(turboJsonPath);
|
|
6596
6596
|
}
|
|
6597
6597
|
var guards = {
|
|
@@ -7598,17 +7598,17 @@ function printSummary(logger16, output3) {
|
|
|
7598
7598
|
}
|
|
7599
7599
|
}
|
|
7600
7600
|
function findRepoRoot(startDir) {
|
|
7601
|
-
const { existsSync:
|
|
7602
|
-
const { join:
|
|
7601
|
+
const { existsSync: existsSync54, readFileSync: readFileSync31 } = __require("fs");
|
|
7602
|
+
const { join: join24, dirname: dirname5 } = __require("path");
|
|
7603
7603
|
let current = startDir;
|
|
7604
7604
|
while (current !== dirname5(current)) {
|
|
7605
|
-
if (
|
|
7605
|
+
if (existsSync54(join24(current, "turbo.json"))) {
|
|
7606
7606
|
return current;
|
|
7607
7607
|
}
|
|
7608
|
-
const pkgPath =
|
|
7609
|
-
if (
|
|
7608
|
+
const pkgPath = join24(current, "package.json");
|
|
7609
|
+
if (existsSync54(pkgPath)) {
|
|
7610
7610
|
try {
|
|
7611
|
-
const pkg = JSON.parse(
|
|
7611
|
+
const pkg = JSON.parse(readFileSync31(pkgPath, "utf-8"));
|
|
7612
7612
|
if (pkg.workspaces) {
|
|
7613
7613
|
return current;
|
|
7614
7614
|
}
|
|
@@ -7829,6 +7829,71 @@ var DevOutputSchema = z.object({
|
|
|
7829
7829
|
error: z.string().optional()
|
|
7830
7830
|
}).strict();
|
|
7831
7831
|
|
|
7832
|
+
// src/commands/dev/commands/dev.ts
|
|
7833
|
+
init_local_supabase();
|
|
7834
|
+
|
|
7835
|
+
// src/utils/env-local-bridge.ts
|
|
7836
|
+
init_esm_shims();
|
|
7837
|
+
var DEFAULT_API_PORT2 = 54321;
|
|
7838
|
+
var DEFAULT_DB_PORT2 = 54322;
|
|
7839
|
+
var RUNA_ENV_LOCAL_MARKER = "# RUNA_AUTO_GENERATED \u2014 do not edit (created by `runa db start`)";
|
|
7840
|
+
function envLocalPath(projectRoot) {
|
|
7841
|
+
return path12__default.join(projectRoot, ".env.local");
|
|
7842
|
+
}
|
|
7843
|
+
function isNonDefaultPorts(config) {
|
|
7844
|
+
return config.api !== DEFAULT_API_PORT2 || config.db !== DEFAULT_DB_PORT2;
|
|
7845
|
+
}
|
|
7846
|
+
function buildEnvLocalContent(config) {
|
|
7847
|
+
const dbUrl = `postgresql://postgres:postgres@${config.host}:${config.db}/postgres`;
|
|
7848
|
+
return [
|
|
7849
|
+
RUNA_ENV_LOCAL_MARKER,
|
|
7850
|
+
`# Supabase is running on non-default ports (default: api=${DEFAULT_API_PORT2}, db=${DEFAULT_DB_PORT2}).`,
|
|
7851
|
+
"# This file overrides .env.development so that `pnpm dev` uses correct ports.",
|
|
7852
|
+
"# Removed automatically by `runa db stop`. Safe to delete manually.",
|
|
7853
|
+
`DATABASE_URL=${dbUrl}`,
|
|
7854
|
+
`DATABASE_URL_ADMIN=${dbUrl}`,
|
|
7855
|
+
`DATABASE_URL_SERVICE=${dbUrl}`,
|
|
7856
|
+
`NEXT_PUBLIC_SUPABASE_URL=http://${config.host}:${config.api}`,
|
|
7857
|
+
""
|
|
7858
|
+
].join("\n");
|
|
7859
|
+
}
|
|
7860
|
+
function isRunaManagedEnvLocal(projectRoot) {
|
|
7861
|
+
const filePath = envLocalPath(projectRoot);
|
|
7862
|
+
try {
|
|
7863
|
+
const content = readFileSync(filePath, "utf-8");
|
|
7864
|
+
return content.startsWith(RUNA_ENV_LOCAL_MARKER);
|
|
7865
|
+
} catch {
|
|
7866
|
+
return false;
|
|
7867
|
+
}
|
|
7868
|
+
}
|
|
7869
|
+
function writeEnvLocalBridge(projectRoot, config) {
|
|
7870
|
+
const filePath = envLocalPath(projectRoot);
|
|
7871
|
+
const fileExists = existsSync(filePath);
|
|
7872
|
+
if (isNonDefaultPorts(config)) {
|
|
7873
|
+
if (fileExists && !isRunaManagedEnvLocal(projectRoot)) {
|
|
7874
|
+
return { written: false, reason: "user-managed" };
|
|
7875
|
+
}
|
|
7876
|
+
writeFileSync(filePath, buildEnvLocalContent(config), "utf-8");
|
|
7877
|
+
return { written: true, ports: { api: config.api, db: config.db } };
|
|
7878
|
+
}
|
|
7879
|
+
if (fileExists && isRunaManagedEnvLocal(projectRoot)) {
|
|
7880
|
+
unlinkSync(filePath);
|
|
7881
|
+
return { written: false, reason: "stale-removed" };
|
|
7882
|
+
}
|
|
7883
|
+
return { written: false, reason: "default-ports" };
|
|
7884
|
+
}
|
|
7885
|
+
function removeEnvLocalBridge(projectRoot) {
|
|
7886
|
+
const filePath = envLocalPath(projectRoot);
|
|
7887
|
+
if (!existsSync(filePath)) {
|
|
7888
|
+
return { removed: false, reason: "not-found" };
|
|
7889
|
+
}
|
|
7890
|
+
if (!isRunaManagedEnvLocal(projectRoot)) {
|
|
7891
|
+
return { removed: false, reason: "user-managed" };
|
|
7892
|
+
}
|
|
7893
|
+
unlinkSync(filePath);
|
|
7894
|
+
return { removed: true };
|
|
7895
|
+
}
|
|
7896
|
+
|
|
7832
7897
|
// src/commands/dev/machine.ts
|
|
7833
7898
|
init_esm_shims();
|
|
7834
7899
|
|
|
@@ -7853,42 +7918,42 @@ function resolvePolicy(_config, _repoKind) {
|
|
|
7853
7918
|
};
|
|
7854
7919
|
}
|
|
7855
7920
|
function detectRepoKind(repoRoot) {
|
|
7856
|
-
const hasApps = existsSync(
|
|
7857
|
-
const hasPackages = existsSync(
|
|
7858
|
-
const hasTurbo = existsSync(
|
|
7921
|
+
const hasApps = existsSync(path12__default.join(repoRoot, "apps"));
|
|
7922
|
+
const hasPackages = existsSync(path12__default.join(repoRoot, "packages"));
|
|
7923
|
+
const hasTurbo = existsSync(path12__default.join(repoRoot, "turbo.json"));
|
|
7859
7924
|
if (hasApps || hasPackages || hasTurbo) {
|
|
7860
7925
|
return "monorepo";
|
|
7861
7926
|
}
|
|
7862
|
-
const hasSrc = existsSync(
|
|
7863
|
-
const hasApp = existsSync(
|
|
7927
|
+
const hasSrc = existsSync(path12__default.join(repoRoot, "src"));
|
|
7928
|
+
const hasApp = existsSync(path12__default.join(repoRoot, "app"));
|
|
7864
7929
|
if (hasSrc || hasApp) {
|
|
7865
7930
|
return "pj-repo";
|
|
7866
7931
|
}
|
|
7867
7932
|
return "unknown";
|
|
7868
7933
|
}
|
|
7869
7934
|
function hasKnownFrameworkConfig(dir) {
|
|
7870
|
-
if (existsSync(
|
|
7935
|
+
if (existsSync(path12__default.join(dir, "next.config.js")) || existsSync(path12__default.join(dir, "next.config.mjs")) || existsSync(path12__default.join(dir, "next.config.ts"))) {
|
|
7871
7936
|
return true;
|
|
7872
7937
|
}
|
|
7873
|
-
if (existsSync(
|
|
7938
|
+
if (existsSync(path12__default.join(dir, "vite.config.js")) || existsSync(path12__default.join(dir, "vite.config.ts"))) {
|
|
7874
7939
|
return true;
|
|
7875
7940
|
}
|
|
7876
|
-
if (existsSync(
|
|
7941
|
+
if (existsSync(path12__default.join(dir, "remix.config.js"))) {
|
|
7877
7942
|
return true;
|
|
7878
7943
|
}
|
|
7879
|
-
if (existsSync(
|
|
7944
|
+
if (existsSync(path12__default.join(dir, "astro.config.mjs")) || existsSync(path12__default.join(dir, "astro.config.ts"))) {
|
|
7880
7945
|
return true;
|
|
7881
7946
|
}
|
|
7882
|
-
if (existsSync(
|
|
7947
|
+
if (existsSync(path12__default.join(dir, "nuxt.config.js")) || existsSync(path12__default.join(dir, "nuxt.config.ts"))) {
|
|
7883
7948
|
return true;
|
|
7884
7949
|
}
|
|
7885
|
-
if (existsSync(
|
|
7950
|
+
if (existsSync(path12__default.join(dir, "svelte.config.js"))) {
|
|
7886
7951
|
return true;
|
|
7887
7952
|
}
|
|
7888
7953
|
return false;
|
|
7889
7954
|
}
|
|
7890
7955
|
function hasPackageJson(dir) {
|
|
7891
|
-
return existsSync(
|
|
7956
|
+
return existsSync(path12__default.join(dir, "package.json"));
|
|
7892
7957
|
}
|
|
7893
7958
|
var PORT_PATTERNS = [
|
|
7894
7959
|
/-p\s*([0-9]+)/i,
|
|
@@ -7908,11 +7973,11 @@ function extractPortFromScript(script) {
|
|
|
7908
7973
|
return null;
|
|
7909
7974
|
}
|
|
7910
7975
|
function readPortFromScripts(appDir) {
|
|
7911
|
-
const pkgPath =
|
|
7976
|
+
const pkgPath = path12__default.join(appDir, "package.json");
|
|
7912
7977
|
if (!existsSync(pkgPath)) return 3e3;
|
|
7913
7978
|
try {
|
|
7914
|
-
const { readFileSync:
|
|
7915
|
-
const raw =
|
|
7979
|
+
const { readFileSync: readFileSync31 } = __require("fs");
|
|
7980
|
+
const raw = readFileSync31(pkgPath, "utf-8");
|
|
7916
7981
|
const parsed = JSON.parse(raw);
|
|
7917
7982
|
const scripts = parsed.scripts;
|
|
7918
7983
|
for (const key of ["start:ci", "start", "dev"]) {
|
|
@@ -7927,30 +7992,30 @@ function readPortFromScripts(appDir) {
|
|
|
7927
7992
|
return 3e3;
|
|
7928
7993
|
}
|
|
7929
7994
|
function findWebAppUnderApps(repoRoot) {
|
|
7930
|
-
const appsDir =
|
|
7995
|
+
const appsDir = path12__default.join(repoRoot, "apps");
|
|
7931
7996
|
if (!existsSync(appsDir)) return null;
|
|
7932
7997
|
try {
|
|
7933
7998
|
const { readdirSync: readdirSync12 } = __require("fs");
|
|
7934
7999
|
const entries = readdirSync12(appsDir, { withFileTypes: true });
|
|
7935
8000
|
const priority = ["web", "dashboard", "app", "frontend", "client"];
|
|
7936
8001
|
for (const name of priority) {
|
|
7937
|
-
const candidate =
|
|
8002
|
+
const candidate = path12__default.join(appsDir, name);
|
|
7938
8003
|
if (hasKnownFrameworkConfig(candidate)) return candidate;
|
|
7939
8004
|
}
|
|
7940
8005
|
for (const entry of entries) {
|
|
7941
8006
|
if (!entry.isDirectory()) continue;
|
|
7942
8007
|
if (priority.includes(entry.name)) continue;
|
|
7943
|
-
const candidate =
|
|
8008
|
+
const candidate = path12__default.join(appsDir, entry.name);
|
|
7944
8009
|
if (hasKnownFrameworkConfig(candidate)) return candidate;
|
|
7945
8010
|
}
|
|
7946
8011
|
for (const name of priority) {
|
|
7947
|
-
const candidate =
|
|
8012
|
+
const candidate = path12__default.join(appsDir, name);
|
|
7948
8013
|
if (hasPackageJson(candidate)) return candidate;
|
|
7949
8014
|
}
|
|
7950
8015
|
for (const entry of entries) {
|
|
7951
8016
|
if (!entry.isDirectory()) continue;
|
|
7952
8017
|
if (priority.includes(entry.name)) continue;
|
|
7953
|
-
const candidate =
|
|
8018
|
+
const candidate = path12__default.join(appsDir, entry.name);
|
|
7954
8019
|
if (hasPackageJson(candidate)) return candidate;
|
|
7955
8020
|
}
|
|
7956
8021
|
} catch {
|
|
@@ -7959,7 +8024,7 @@ function findWebAppUnderApps(repoRoot) {
|
|
|
7959
8024
|
}
|
|
7960
8025
|
function detectApp(repoRoot, configOverride) {
|
|
7961
8026
|
if (configOverride?.directory && configOverride.directory !== ".") {
|
|
7962
|
-
const configuredDir =
|
|
8027
|
+
const configuredDir = path12__default.join(repoRoot, configOverride.directory);
|
|
7963
8028
|
if (existsSync(configuredDir) && hasPackageJson(configuredDir)) {
|
|
7964
8029
|
return {
|
|
7965
8030
|
appDir: configuredDir,
|
|
@@ -8094,7 +8159,7 @@ function pipeChildOutputToLog(params) {
|
|
|
8094
8159
|
return { cleanup };
|
|
8095
8160
|
}
|
|
8096
8161
|
async function writeEnvLocal(params) {
|
|
8097
|
-
const filePath =
|
|
8162
|
+
const filePath = path12__default.join(params.appDir, ".env.local");
|
|
8098
8163
|
const content = `${Object.entries(params.values).map(([k, v]) => `${k}=${v}`).join("\n")}
|
|
8099
8164
|
`;
|
|
8100
8165
|
await writeFile(filePath, content, "utf-8");
|
|
@@ -8176,18 +8241,18 @@ function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoo
|
|
|
8176
8241
|
const nextCommand = mode === "dev" ? "dev" : "start";
|
|
8177
8242
|
const rootHasCiScript = Boolean(rootScripts?.[ciScriptName]);
|
|
8178
8243
|
const appHasCiScript = Boolean(appScripts?.[ciScriptName]);
|
|
8179
|
-
const rootHasDefaultScript = mode === "start" && Boolean(rootScripts?.
|
|
8180
|
-
const appHasDefaultScript = mode === "start" && Boolean(appScripts?.
|
|
8244
|
+
const rootHasDefaultScript = mode === "start" && Boolean(rootScripts?.start);
|
|
8245
|
+
const appHasDefaultScript = mode === "start" && Boolean(appScripts?.start);
|
|
8181
8246
|
if (isMonorepo2 && (rootHasCiScript || rootHasDefaultScript)) {
|
|
8182
8247
|
const scriptName = rootHasCiScript ? ciScriptName : "start";
|
|
8183
8248
|
return { command: ["pnpm", scriptName], useRootScript: true };
|
|
8184
8249
|
}
|
|
8185
8250
|
if (appHasCiScript || appHasDefaultScript) {
|
|
8186
8251
|
const scriptName = appHasCiScript ? ciScriptName : "start";
|
|
8187
|
-
const dirArgs2 = isMonorepo2 ? ["-C",
|
|
8252
|
+
const dirArgs2 = isMonorepo2 ? ["-C", path12__default.relative(repoRoot, appDir)] : [];
|
|
8188
8253
|
return { command: ["pnpm", ...dirArgs2, scriptName], useRootScript: false };
|
|
8189
8254
|
}
|
|
8190
|
-
const dirArgs = isMonorepo2 ? ["-C",
|
|
8255
|
+
const dirArgs = isMonorepo2 ? ["-C", path12__default.relative(repoRoot, appDir)] : [];
|
|
8191
8256
|
const bundlerArgs = bundler ? [`--${bundler}`] : [];
|
|
8192
8257
|
return {
|
|
8193
8258
|
command: ["pnpm", ...dirArgs, "exec", "next", nextCommand, ...bundlerArgs, "-p", String(port)],
|
|
@@ -8197,8 +8262,8 @@ function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoo
|
|
|
8197
8262
|
async function startAppBackground(params) {
|
|
8198
8263
|
const mode = params.mode ?? "start";
|
|
8199
8264
|
const isMonorepo2 = params.appDir !== params.repoRoot;
|
|
8200
|
-
const rootPkgPath =
|
|
8201
|
-
const appPkgPath =
|
|
8265
|
+
const rootPkgPath = path12__default.join(params.repoRoot, "package.json");
|
|
8266
|
+
const appPkgPath = path12__default.join(params.appDir, "package.json");
|
|
8202
8267
|
const rootScripts = await readPackageScripts(rootPkgPath);
|
|
8203
8268
|
const appScripts = isMonorepo2 ? await readPackageScripts(appPkgPath) : rootScripts;
|
|
8204
8269
|
const { command, useRootScript } = determineAppCommand(
|
|
@@ -8211,7 +8276,7 @@ async function startAppBackground(params) {
|
|
|
8211
8276
|
params.port,
|
|
8212
8277
|
params.bundler
|
|
8213
8278
|
);
|
|
8214
|
-
const appLog =
|
|
8279
|
+
const appLog = path12__default.join(params.tmpDir, "app.log");
|
|
8215
8280
|
const out = createWriteStream(appLog, { flags: "a" });
|
|
8216
8281
|
const commandStr = command.join(" ");
|
|
8217
8282
|
const modeLabel = mode === "dev" ? "development (hot reload)" : "production";
|
|
@@ -8220,7 +8285,7 @@ async function startAppBackground(params) {
|
|
|
8220
8285
|
`[runa] Mode: ${modeLabel}`,
|
|
8221
8286
|
`[runa] Command: ${commandStr}`,
|
|
8222
8287
|
`[runa] Working directory: ${params.repoRoot}`,
|
|
8223
|
-
`[runa] App directory: ${
|
|
8288
|
+
`[runa] App directory: ${path12__default.relative(params.repoRoot, params.appDir) || "."}`,
|
|
8224
8289
|
useRootScript ? "[runa] Using root package.json script" : "[runa] Using app package.json script",
|
|
8225
8290
|
"---",
|
|
8226
8291
|
""
|
|
@@ -8241,12 +8306,12 @@ async function startAppBackground(params) {
|
|
|
8241
8306
|
streamToTerminal: params.stream
|
|
8242
8307
|
});
|
|
8243
8308
|
const pid = child.pid ?? -1;
|
|
8244
|
-
await writeFile(
|
|
8309
|
+
await writeFile(path12__default.join(params.tmpDir, "app.pid"), `${pid}
|
|
8245
8310
|
`, "utf-8");
|
|
8246
8311
|
return { pid, cleanupStreams };
|
|
8247
8312
|
}
|
|
8248
8313
|
async function waitForAppReady(params) {
|
|
8249
|
-
const logFilePath = params.tmpDir ?
|
|
8314
|
+
const logFilePath = params.tmpDir ? path12__default.join(params.tmpDir, "app.log") : void 0;
|
|
8250
8315
|
await waitHttpOk({
|
|
8251
8316
|
url: `http://127.0.0.1:${params.port}/`,
|
|
8252
8317
|
timeoutSeconds: params.timeoutSeconds,
|
|
@@ -8421,7 +8486,7 @@ function checkPortAvailable(port) {
|
|
|
8421
8486
|
});
|
|
8422
8487
|
}
|
|
8423
8488
|
var processCheckActor = fromPromise(async ({ input: input3 }) => {
|
|
8424
|
-
const pidFile =
|
|
8489
|
+
const pidFile = path12__default.join(input3.repoRoot, input3.tmpDir, "app.pid");
|
|
8425
8490
|
let pidFileContent = null;
|
|
8426
8491
|
try {
|
|
8427
8492
|
pidFileContent = readFileSync(pidFile, "utf-8").trim();
|
|
@@ -8429,7 +8494,7 @@ var processCheckActor = fromPromise(async ({ input: input3 }) => {
|
|
|
8429
8494
|
}
|
|
8430
8495
|
if (pidFileContent) {
|
|
8431
8496
|
const pid = parseInt(pidFileContent, 10);
|
|
8432
|
-
if (!isNaN(pid) && isProcessAlive(pid)) {
|
|
8497
|
+
if (!Number.isNaN(pid) && isProcessAlive(pid)) {
|
|
8433
8498
|
if (input3.replace) {
|
|
8434
8499
|
await terminateAppProcessByPid({
|
|
8435
8500
|
pid,
|
|
@@ -8453,9 +8518,9 @@ var processCheckActor = fromPromise(async ({ input: input3 }) => {
|
|
|
8453
8518
|
var appStartActor = fromPromise(
|
|
8454
8519
|
async ({ input: input3 }) => {
|
|
8455
8520
|
const { repoRoot, appDir, port, tmpDir, stream, bundler } = input3;
|
|
8456
|
-
const fullTmpDir =
|
|
8521
|
+
const fullTmpDir = path12__default.join(repoRoot, tmpDir);
|
|
8457
8522
|
await mkdir(fullTmpDir, { recursive: true });
|
|
8458
|
-
const nextDir =
|
|
8523
|
+
const nextDir = path12__default.join(appDir, ".next");
|
|
8459
8524
|
if (existsSync(nextDir)) {
|
|
8460
8525
|
rmSync(nextDir, { recursive: true, force: true });
|
|
8461
8526
|
console.log("[runa dev] Cleaned .next cache for fresh start");
|
|
@@ -8763,11 +8828,11 @@ function printSummary2(logger16, output3) {
|
|
|
8763
8828
|
}
|
|
8764
8829
|
function findRepoRoot2(startDir) {
|
|
8765
8830
|
let current = startDir;
|
|
8766
|
-
while (current !==
|
|
8767
|
-
if (existsSync(
|
|
8831
|
+
while (current !== path12__default.dirname(current)) {
|
|
8832
|
+
if (existsSync(path12__default.join(current, "turbo.json"))) {
|
|
8768
8833
|
return current;
|
|
8769
8834
|
}
|
|
8770
|
-
const pkgPath =
|
|
8835
|
+
const pkgPath = path12__default.join(current, "package.json");
|
|
8771
8836
|
if (existsSync(pkgPath)) {
|
|
8772
8837
|
try {
|
|
8773
8838
|
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
@@ -8777,7 +8842,7 @@ function findRepoRoot2(startDir) {
|
|
|
8777
8842
|
} catch {
|
|
8778
8843
|
}
|
|
8779
8844
|
}
|
|
8780
|
-
current =
|
|
8845
|
+
current = path12__default.dirname(current);
|
|
8781
8846
|
}
|
|
8782
8847
|
return startDir;
|
|
8783
8848
|
}
|
|
@@ -8792,6 +8857,22 @@ async function runDevMachine(input3, logger16) {
|
|
|
8792
8857
|
const currentState = getStateName2(snapshot2);
|
|
8793
8858
|
if (currentState !== previousState) {
|
|
8794
8859
|
handleStateChange2(snapshot2, previousState, logger16, input3.port);
|
|
8860
|
+
if (previousState.startsWith("setup") && (currentState === "appStart" || currentState === "running") && snapshot2.context.supabaseStarted) {
|
|
8861
|
+
try {
|
|
8862
|
+
const bridgeResult = writeEnvLocalBridge(
|
|
8863
|
+
snapshot2.context.repoRoot,
|
|
8864
|
+
detectLocalSupabasePorts(snapshot2.context.repoRoot)
|
|
8865
|
+
);
|
|
8866
|
+
if (bridgeResult.written) {
|
|
8867
|
+
logger16.info(
|
|
8868
|
+
` \u2192 .env.local written (port override: api=${bridgeResult.ports.api}, db=${bridgeResult.ports.db})`
|
|
8869
|
+
);
|
|
8870
|
+
} else if (bridgeResult.reason === "user-managed") {
|
|
8871
|
+
logger16.warn(" \u2192 .env.local exists but is user-managed; port override skipped");
|
|
8872
|
+
}
|
|
8873
|
+
} catch {
|
|
8874
|
+
}
|
|
8875
|
+
}
|
|
8795
8876
|
previousState = currentState;
|
|
8796
8877
|
}
|
|
8797
8878
|
if (isComplete2(snapshot2)) {
|
|
@@ -8983,19 +9064,19 @@ function isSupportedNodeEnv(value) {
|
|
|
8983
9064
|
function findProjectRoot(from) {
|
|
8984
9065
|
const workspaceRoot = findWorkspaceRoot(from);
|
|
8985
9066
|
if (workspaceRoot) return workspaceRoot;
|
|
8986
|
-
let current =
|
|
8987
|
-
const root =
|
|
9067
|
+
let current = path12__default.resolve(from);
|
|
9068
|
+
const root = path12__default.parse(current).root;
|
|
8988
9069
|
let depth = 0;
|
|
8989
9070
|
while (current !== root && depth < MAX_DIRECTORY_TRAVERSAL_DEPTH) {
|
|
8990
|
-
const hasGit = existsSync(
|
|
8991
|
-
const hasPackageJson2 = existsSync(
|
|
9071
|
+
const hasGit = existsSync(path12__default.join(current, ".git"));
|
|
9072
|
+
const hasPackageJson2 = existsSync(path12__default.join(current, "package.json"));
|
|
8992
9073
|
if (hasGit || hasPackageJson2) return current;
|
|
8993
|
-
const parent =
|
|
9074
|
+
const parent = path12__default.dirname(current);
|
|
8994
9075
|
if (parent === current) break;
|
|
8995
9076
|
current = parent;
|
|
8996
9077
|
depth++;
|
|
8997
9078
|
}
|
|
8998
|
-
return
|
|
9079
|
+
return path12__default.resolve(from);
|
|
8999
9080
|
}
|
|
9000
9081
|
function mapRunaEnvToVercelEnv(runaEnv) {
|
|
9001
9082
|
if (runaEnv === "preview") return "preview";
|
|
@@ -9036,7 +9117,7 @@ function loadEnvFiles(options = {}) {
|
|
|
9036
9117
|
const nodeEnv = resolveNodeEnv(options.nodeEnv);
|
|
9037
9118
|
const projectRoot = findProjectRoot(cwd);
|
|
9038
9119
|
const vercelEnv = options.vercelEnv ?? inferVercelEnv({ nodeEnv, runaEnv: options.runaEnv });
|
|
9039
|
-
const keysFile =
|
|
9120
|
+
const keysFile = path12__default.join(projectRoot, ".env.keys");
|
|
9040
9121
|
const baseFilePaths = buildBaseEnvFilePaths({ nodeEnv, vercelEnv });
|
|
9041
9122
|
const runaFilePaths = options.runaEnv ? buildRunaEnvFilePaths(options.runaEnv) : [];
|
|
9042
9123
|
const allRelPaths = [...baseFilePaths, ...runaFilePaths];
|
|
@@ -9046,7 +9127,7 @@ function loadEnvFiles(options = {}) {
|
|
|
9046
9127
|
const filePaths = [
|
|
9047
9128
|
keysFile,
|
|
9048
9129
|
// Load keys first
|
|
9049
|
-
...allRelPaths.map((rel) =>
|
|
9130
|
+
...allRelPaths.map((rel) => path12__default.join(projectRoot, rel)).filter(existsSync)
|
|
9050
9131
|
];
|
|
9051
9132
|
config({
|
|
9052
9133
|
path: filePaths,
|
|
@@ -9055,7 +9136,7 @@ function loadEnvFiles(options = {}) {
|
|
|
9055
9136
|
quiet: true
|
|
9056
9137
|
// Suppress dotenvx verbose output
|
|
9057
9138
|
});
|
|
9058
|
-
loadedFiles = allRelPaths.filter((rel) => existsSync(
|
|
9139
|
+
loadedFiles = allRelPaths.filter((rel) => existsSync(path12__default.join(projectRoot, rel)));
|
|
9059
9140
|
} else {
|
|
9060
9141
|
const targetEnv = { ...process.env };
|
|
9061
9142
|
loadedFiles = [
|
|
@@ -9130,8 +9211,8 @@ function buildRunaEnvFilePaths(runaEnv) {
|
|
|
9130
9211
|
function loadEnvFilesIntoTarget(params) {
|
|
9131
9212
|
const loadedFiles = [];
|
|
9132
9213
|
for (const relPath of params.relPaths) {
|
|
9133
|
-
const fullPath =
|
|
9134
|
-
const resolvedPath =
|
|
9214
|
+
const fullPath = path12__default.join(params.projectRoot, relPath);
|
|
9215
|
+
const resolvedPath = path12__default.resolve(fullPath);
|
|
9135
9216
|
if (!isPathContained(params.projectRoot, resolvedPath)) {
|
|
9136
9217
|
continue;
|
|
9137
9218
|
}
|
|
@@ -9399,7 +9480,7 @@ async function runTool(params) {
|
|
|
9399
9480
|
exitCode,
|
|
9400
9481
|
issueCount,
|
|
9401
9482
|
durationMs,
|
|
9402
|
-
outputPath:
|
|
9483
|
+
outputPath: path12__default.relative(process.cwd(), params.outputPath)
|
|
9403
9484
|
});
|
|
9404
9485
|
});
|
|
9405
9486
|
proc.on("error", async () => {
|
|
@@ -9418,7 +9499,7 @@ async function runTool(params) {
|
|
|
9418
9499
|
exitCode: -1,
|
|
9419
9500
|
issueCount: 0,
|
|
9420
9501
|
durationMs,
|
|
9421
|
-
outputPath:
|
|
9502
|
+
outputPath: path12__default.relative(process.cwd(), params.outputPath)
|
|
9422
9503
|
});
|
|
9423
9504
|
});
|
|
9424
9505
|
});
|
|
@@ -9450,7 +9531,7 @@ async function executeWorkflowCheck(params) {
|
|
|
9450
9531
|
label: params.label,
|
|
9451
9532
|
command: params.command,
|
|
9452
9533
|
args: params.args,
|
|
9453
|
-
outputPath:
|
|
9534
|
+
outputPath: path12__default.join(params.logDir, `${params.name}-output.txt`),
|
|
9454
9535
|
parseIssueCount: params.parseIssueCount
|
|
9455
9536
|
});
|
|
9456
9537
|
logCheckResult(result, params.isWarningOnly ?? false);
|
|
@@ -9522,7 +9603,7 @@ async function runCiChecks(options) {
|
|
|
9522
9603
|
const mode = options.mode === "github-actions" ? "github-actions" : "local";
|
|
9523
9604
|
const isCI3 = mode === "github-actions";
|
|
9524
9605
|
const workflowsChanged = options.workflowsChanged;
|
|
9525
|
-
const logDir =
|
|
9606
|
+
const logDir = path12__default.join(process.cwd(), ".runa", "tmp", "workflow");
|
|
9526
9607
|
await mkdir(logDir, { recursive: true });
|
|
9527
9608
|
console.log("");
|
|
9528
9609
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
@@ -9531,7 +9612,7 @@ async function runCiChecks(options) {
|
|
|
9531
9612
|
console.log("");
|
|
9532
9613
|
const results = await executeChecks(workflowsChanged, logDir);
|
|
9533
9614
|
const summary = buildChecksSummary(startedAt, workflowsChanged, results);
|
|
9534
|
-
const summaryPath =
|
|
9615
|
+
const summaryPath = path12__default.join(logDir, "checks-summary.json");
|
|
9535
9616
|
await writeFile(summaryPath, JSON.stringify(summary, null, 2), "utf-8");
|
|
9536
9617
|
if (isCI3) {
|
|
9537
9618
|
await appendGithubStepSummary(buildStepSummaryMarkdown(summary));
|
|
@@ -9598,7 +9679,7 @@ function printFinalOutput(summary, summaryPath) {
|
|
|
9598
9679
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
9599
9680
|
console.log(`${summary.status === "failure" ? "\u274C" : "\u2705"} Workflow Checks: ${summary.status}`);
|
|
9600
9681
|
console.log(` Duration: ${formatDuration$1(summary.durationMs)}`);
|
|
9601
|
-
console.log(` Summary: ${
|
|
9682
|
+
console.log(` Summary: ${path12__default.relative(process.cwd(), summaryPath)}`);
|
|
9602
9683
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
9603
9684
|
}
|
|
9604
9685
|
var ciChecksCommand = new Command("checks").description("Run workflow checks (actionlint, zizmor)").option("--mode <mode>", "Execution mode (github-actions | local)", "local").option("--workflows-changed", "Indicate workflows have changed", false).action(runCiChecks);
|
|
@@ -9611,10 +9692,10 @@ init_esm_shims();
|
|
|
9611
9692
|
var CiConfigSchema = z.record(z.string(), z.unknown());
|
|
9612
9693
|
async function loadCiConfig(params) {
|
|
9613
9694
|
const override = typeof params.overridePath === "string" && params.overridePath.trim().length > 0 ? params.overridePath.trim() : null;
|
|
9614
|
-
const defaultPath =
|
|
9695
|
+
const defaultPath = path12__default.join(params.repoRoot, ".runa", "ci.config.json");
|
|
9615
9696
|
const resolvedPath = override ?? (existsSync(defaultPath) ? defaultPath : null);
|
|
9616
9697
|
if (!resolvedPath) return { configPath: null, config: null };
|
|
9617
|
-
const abs =
|
|
9698
|
+
const abs = path12__default.isAbsolute(resolvedPath) ? resolvedPath : path12__default.join(params.repoRoot, resolvedPath);
|
|
9618
9699
|
if (!existsSync(abs)) return { configPath: abs, config: null };
|
|
9619
9700
|
const raw = await readFile(abs, "utf-8");
|
|
9620
9701
|
const parsed = JSON.parse(raw);
|
|
@@ -9716,9 +9797,9 @@ var CiSummarySchema = z.object({
|
|
|
9716
9797
|
async function writeCiSummary(params) {
|
|
9717
9798
|
const cwd = params.cwd ?? process.cwd();
|
|
9718
9799
|
const parsed = CiSummarySchema.parse(params.summary);
|
|
9719
|
-
const dir =
|
|
9800
|
+
const dir = path12__default.join(cwd, ".runa", "tmp");
|
|
9720
9801
|
await mkdir(dir, { recursive: true });
|
|
9721
|
-
const filePath =
|
|
9802
|
+
const filePath = path12__default.join(dir, "ci-summary.json");
|
|
9722
9803
|
await writeFile(filePath, `${JSON.stringify(parsed, null, 2)}
|
|
9723
9804
|
`, "utf-8");
|
|
9724
9805
|
return filePath;
|
|
@@ -10051,12 +10132,12 @@ async function detectStack(repoRoot, tmpDir, productionDbUrlAdmin) {
|
|
|
10051
10132
|
label: "detect-stack",
|
|
10052
10133
|
command: "pnpm",
|
|
10053
10134
|
args: ["exec", "runa", "db", "detect-stack", "--quiet"],
|
|
10054
|
-
logFile:
|
|
10135
|
+
logFile: path12__default.join(tmpDir, "detect-stack.log")
|
|
10055
10136
|
});
|
|
10056
10137
|
return String(res.stdout ?? "").trim();
|
|
10057
10138
|
}
|
|
10058
10139
|
function checkIfInitialDeployment(repoRoot, productionDbUrl) {
|
|
10059
|
-
const schemasDir =
|
|
10140
|
+
const schemasDir = path12__default.join(repoRoot, "supabase", "schemas", "declarative");
|
|
10060
10141
|
let schemas;
|
|
10061
10142
|
try {
|
|
10062
10143
|
schemas = detectAppSchemas(schemasDir, false);
|
|
@@ -10122,9 +10203,9 @@ function checkIfInitialDeployment(repoRoot, productionDbUrl) {
|
|
|
10122
10203
|
}
|
|
10123
10204
|
}
|
|
10124
10205
|
async function showSchemaDiff(repoRoot, tmpDir) {
|
|
10125
|
-
const diffLog =
|
|
10206
|
+
const diffLog = path12__default.join(tmpDir, "schema-diff.log");
|
|
10126
10207
|
const safeEnv = getSafeEnv();
|
|
10127
|
-
const declarativeSqlPath =
|
|
10208
|
+
const declarativeSqlPath = path12__default.join(repoRoot, "supabase/schemas/declarative");
|
|
10128
10209
|
if (existsSync(declarativeSqlPath)) {
|
|
10129
10210
|
await runLogged({
|
|
10130
10211
|
cwd: repoRoot,
|
|
@@ -10138,8 +10219,8 @@ async function showSchemaDiff(repoRoot, tmpDir) {
|
|
|
10138
10219
|
}
|
|
10139
10220
|
const detected = detectDatabasePackage(repoRoot);
|
|
10140
10221
|
if (detected) {
|
|
10141
|
-
const schemaPath =
|
|
10142
|
-
if (existsSync(
|
|
10222
|
+
const schemaPath = path12__default.join(detected, "src", "schema");
|
|
10223
|
+
if (existsSync(path12__default.join(repoRoot, schemaPath))) {
|
|
10143
10224
|
await runLogged({
|
|
10144
10225
|
cwd: repoRoot,
|
|
10145
10226
|
env: safeEnv,
|
|
@@ -10152,8 +10233,8 @@ async function showSchemaDiff(repoRoot, tmpDir) {
|
|
|
10152
10233
|
}
|
|
10153
10234
|
}
|
|
10154
10235
|
for (const candidate of DATABASE_PACKAGE_CANDIDATES) {
|
|
10155
|
-
const candidatePath =
|
|
10156
|
-
if (existsSync(
|
|
10236
|
+
const candidatePath = path12__default.join("packages", candidate, "src", "schema");
|
|
10237
|
+
if (existsSync(path12__default.join(repoRoot, candidatePath))) {
|
|
10157
10238
|
await runLogged({
|
|
10158
10239
|
cwd: repoRoot,
|
|
10159
10240
|
env: safeEnv,
|
|
@@ -10165,7 +10246,7 @@ async function showSchemaDiff(repoRoot, tmpDir) {
|
|
|
10165
10246
|
return;
|
|
10166
10247
|
}
|
|
10167
10248
|
}
|
|
10168
|
-
if (existsSync(
|
|
10249
|
+
if (existsSync(path12__default.join(repoRoot, "src", "schema"))) {
|
|
10169
10250
|
await runLogged({
|
|
10170
10251
|
cwd: repoRoot,
|
|
10171
10252
|
env: safeEnv,
|
|
@@ -10177,7 +10258,7 @@ async function showSchemaDiff(repoRoot, tmpDir) {
|
|
|
10177
10258
|
}
|
|
10178
10259
|
}
|
|
10179
10260
|
async function detectRisks(repoRoot, tmpDir) {
|
|
10180
|
-
const logFile =
|
|
10261
|
+
const logFile = path12__default.join(tmpDir, "db-risks.log");
|
|
10181
10262
|
try {
|
|
10182
10263
|
const env2 = getFilteredEnv();
|
|
10183
10264
|
await runLogged({
|
|
@@ -10191,8 +10272,8 @@ async function detectRisks(repoRoot, tmpDir) {
|
|
|
10191
10272
|
} catch (error) {
|
|
10192
10273
|
let logContent = "";
|
|
10193
10274
|
try {
|
|
10194
|
-
const { readFileSync:
|
|
10195
|
-
logContent =
|
|
10275
|
+
const { readFileSync: readFileSync31 } = await import('fs');
|
|
10276
|
+
logContent = readFileSync31(logFile, "utf-8");
|
|
10196
10277
|
} catch {
|
|
10197
10278
|
}
|
|
10198
10279
|
const isInitialDeployment = logContent.includes("No common ancestor") || logContent.includes("INITIAL DEPLOYMENT");
|
|
@@ -10223,7 +10304,7 @@ async function snapshotCreate(repoRoot, tmpDir, productionDbUrlAdmin, commit) {
|
|
|
10223
10304
|
label: "snapshot create production",
|
|
10224
10305
|
command: "pnpm",
|
|
10225
10306
|
args: ["exec", "runa", "db", "snapshot", "create", "production", "--commit", commit],
|
|
10226
|
-
logFile:
|
|
10307
|
+
logFile: path12__default.join(tmpDir, "snapshot-create.log")
|
|
10227
10308
|
});
|
|
10228
10309
|
}
|
|
10229
10310
|
async function snapshotRestoreLatest(repoRoot, tmpDir, productionDbUrlAdmin) {
|
|
@@ -10236,7 +10317,7 @@ async function snapshotRestoreLatest(repoRoot, tmpDir, productionDbUrlAdmin) {
|
|
|
10236
10317
|
label: "snapshot restore production (latest)",
|
|
10237
10318
|
command: "pnpm",
|
|
10238
10319
|
args: ["exec", "runa", "db", "snapshot", "restore", "production", "--latest", "--auto-approve"],
|
|
10239
|
-
logFile:
|
|
10320
|
+
logFile: path12__default.join(tmpDir, "snapshot-restore.log")
|
|
10240
10321
|
});
|
|
10241
10322
|
}
|
|
10242
10323
|
function parseApplyLog(logContent) {
|
|
@@ -10300,7 +10381,7 @@ async function applyProductionSchema(repoRoot, tmpDir, productionDbUrlAdmin, pro
|
|
|
10300
10381
|
if (options?.maxLockWaitMs !== void 0) {
|
|
10301
10382
|
args.push("--max-lock-wait-ms", String(options.maxLockWaitMs));
|
|
10302
10383
|
}
|
|
10303
|
-
const logPath =
|
|
10384
|
+
const logPath = path12__default.join(tmpDir, "db-apply-production.log");
|
|
10304
10385
|
const startTime = Date.now();
|
|
10305
10386
|
await runLogged({
|
|
10306
10387
|
cwd: repoRoot,
|
|
@@ -10320,8 +10401,8 @@ async function applyProductionSchema(repoRoot, tmpDir, productionDbUrlAdmin, pro
|
|
|
10320
10401
|
const totalMs = Date.now() - startTime;
|
|
10321
10402
|
let logContent = "";
|
|
10322
10403
|
try {
|
|
10323
|
-
const { readFileSync:
|
|
10324
|
-
logContent =
|
|
10404
|
+
const { readFileSync: readFileSync31 } = await import('fs');
|
|
10405
|
+
logContent = readFileSync31(logPath, "utf-8");
|
|
10325
10406
|
} catch {
|
|
10326
10407
|
}
|
|
10327
10408
|
const parsed = parseApplyLog(logContent);
|
|
@@ -10345,11 +10426,11 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
|
|
|
10345
10426
|
label: "git log (commit message)",
|
|
10346
10427
|
command: "git",
|
|
10347
10428
|
args: ["log", "-1", "--pretty=format:%s"],
|
|
10348
|
-
logFile:
|
|
10429
|
+
logFile: path12__default.join(tmpDir, "git-commit-message.log")
|
|
10349
10430
|
});
|
|
10350
10431
|
const commitMsg = String(msg.stdout ?? "").trim();
|
|
10351
10432
|
const schemaPaths = [];
|
|
10352
|
-
const declarativeSqlPath =
|
|
10433
|
+
const declarativeSqlPath = path12__default.join(repoRoot, "supabase/schemas/declarative");
|
|
10353
10434
|
if (existsSync(declarativeSqlPath)) {
|
|
10354
10435
|
schemaPaths.push("supabase/schemas/declarative/");
|
|
10355
10436
|
} else {
|
|
@@ -10369,7 +10450,7 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
|
|
|
10369
10450
|
label: "git diff (schema)",
|
|
10370
10451
|
command: "git",
|
|
10371
10452
|
args: ["diff", "origin/main", "HEAD", "--", ...schemaPaths],
|
|
10372
|
-
logFile:
|
|
10453
|
+
logFile: path12__default.join(tmpDir, "git-schema-diff.log")
|
|
10373
10454
|
});
|
|
10374
10455
|
const diffRaw = String(diff.stdout ?? "");
|
|
10375
10456
|
const diffLimited = diffRaw.length > 1e4 ? diffRaw.slice(0, 1e4) : diffRaw;
|
|
@@ -10398,7 +10479,7 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
|
|
|
10398
10479
|
"--environment",
|
|
10399
10480
|
"production"
|
|
10400
10481
|
],
|
|
10401
|
-
logFile:
|
|
10482
|
+
logFile: path12__default.join(tmpDir, "db-audit-record.log")
|
|
10402
10483
|
});
|
|
10403
10484
|
}
|
|
10404
10485
|
async function notifyDeployment(repoRoot, tmpDir, params) {
|
|
@@ -10426,7 +10507,7 @@ async function notifyDeployment(repoRoot, tmpDir, params) {
|
|
|
10426
10507
|
"--type",
|
|
10427
10508
|
"production-schema"
|
|
10428
10509
|
],
|
|
10429
|
-
logFile:
|
|
10510
|
+
logFile: path12__default.join(tmpDir, "notify-deployment.log")
|
|
10430
10511
|
});
|
|
10431
10512
|
}
|
|
10432
10513
|
|
|
@@ -10788,7 +10869,7 @@ $$;
|
|
|
10788
10869
|
label: `repair timestamps (${params.labelPrefix})`,
|
|
10789
10870
|
command: "psql",
|
|
10790
10871
|
args: [...psql.args, "-v", "ON_ERROR_STOP=1", "-c", sql],
|
|
10791
|
-
logFile:
|
|
10872
|
+
logFile: path12__default.join(params.tmpDir, `timestamp-repair-${params.labelPrefix}.log`)
|
|
10792
10873
|
});
|
|
10793
10874
|
}
|
|
10794
10875
|
|
|
@@ -11220,7 +11301,7 @@ async function runCiProdApply(params) {
|
|
|
11220
11301
|
overridePath: params.options.config
|
|
11221
11302
|
});
|
|
11222
11303
|
if (cfg.configPath)
|
|
11223
|
-
params.summary.detected.configPath =
|
|
11304
|
+
params.summary.detected.configPath = path12__default.relative(params.repoRoot, cfg.configPath);
|
|
11224
11305
|
if (cfg.config) params.summary.detected.config = cfg.config;
|
|
11225
11306
|
requireCiAutoApprove({ mode: params.summary.mode, autoApprove: params.options.autoApprove });
|
|
11226
11307
|
const inputs = resolveProdApplyInputs();
|
|
@@ -11313,7 +11394,7 @@ var ciProdApplyCommand = new Command("prod-apply").description("Apply production
|
|
|
11313
11394
|
await appendGithubStepSummary(
|
|
11314
11395
|
buildCiProdApplyStepSummaryMarkdown({
|
|
11315
11396
|
summary,
|
|
11316
|
-
summaryPath:
|
|
11397
|
+
summaryPath: path12__default.relative(repoRoot, summaryPath)
|
|
11317
11398
|
})
|
|
11318
11399
|
);
|
|
11319
11400
|
if (summary.mode === "github-actions") {
|
|
@@ -11329,7 +11410,7 @@ var ciProdApplyCommand = new Command("prod-apply").description("Apply production
|
|
|
11329
11410
|
const summaryPath = await writeCiSummary({ summary });
|
|
11330
11411
|
logSection("Result");
|
|
11331
11412
|
console.log(`status: ${summary.status}`);
|
|
11332
|
-
console.log(`summary: ${
|
|
11413
|
+
console.log(`summary: ${path12__default.relative(repoRoot, summaryPath)}`);
|
|
11333
11414
|
logNextActions([
|
|
11334
11415
|
"Inspect .runa/tmp logs (db-apply-production.log, snapshot-*.log)",
|
|
11335
11416
|
"If rollback succeeded, database should be restored to pre-deploy snapshot"
|
|
@@ -11337,7 +11418,7 @@ var ciProdApplyCommand = new Command("prod-apply").description("Apply production
|
|
|
11337
11418
|
await appendGithubStepSummary(
|
|
11338
11419
|
buildCiProdApplyStepSummaryMarkdown({
|
|
11339
11420
|
summary,
|
|
11340
|
-
summaryPath:
|
|
11421
|
+
summaryPath: path12__default.relative(repoRoot, summaryPath)
|
|
11341
11422
|
})
|
|
11342
11423
|
);
|
|
11343
11424
|
await maybePostFailureComment({
|
|
@@ -11408,7 +11489,7 @@ function getChecks() {
|
|
|
11408
11489
|
}
|
|
11409
11490
|
async function runCheck(check, logDir) {
|
|
11410
11491
|
const startTime = Date.now();
|
|
11411
|
-
const logPath =
|
|
11492
|
+
const logPath = path12__default.join(logDir, `${check.name}.log`);
|
|
11412
11493
|
return new Promise((resolve12) => {
|
|
11413
11494
|
const proc = spawn(check.command, check.args, {
|
|
11414
11495
|
cwd: process.cwd(),
|
|
@@ -11438,7 +11519,7 @@ async function runCheck(check, logDir) {
|
|
|
11438
11519
|
status: exitCode === 0 ? "passed" : "failed",
|
|
11439
11520
|
exitCode,
|
|
11440
11521
|
durationMs,
|
|
11441
|
-
logPath:
|
|
11522
|
+
logPath: path12__default.relative(process.cwd(), logPath)
|
|
11442
11523
|
});
|
|
11443
11524
|
});
|
|
11444
11525
|
proc.on("error", async () => {
|
|
@@ -11453,7 +11534,7 @@ async function runCheck(check, logDir) {
|
|
|
11453
11534
|
status: "failed",
|
|
11454
11535
|
exitCode: 1,
|
|
11455
11536
|
durationMs,
|
|
11456
|
-
logPath:
|
|
11537
|
+
logPath: path12__default.relative(process.cwd(), logPath)
|
|
11457
11538
|
});
|
|
11458
11539
|
});
|
|
11459
11540
|
});
|
|
@@ -11581,19 +11662,19 @@ function printFinalStaticOutput(summary, summaryPath) {
|
|
|
11581
11662
|
console.log(`${summary.status === "success" ? "\u2705" : "\u274C"} Static Analysis: ${summary.status}`);
|
|
11582
11663
|
console.log(` Duration: ${formatDuration$1(summary.durationMs)}`);
|
|
11583
11664
|
console.log(` Passed: ${summary.passedCount}/${summary.checks.length}`);
|
|
11584
|
-
console.log(` Summary: ${
|
|
11665
|
+
console.log(` Summary: ${path12__default.relative(process.cwd(), summaryPath)}`);
|
|
11585
11666
|
console.log("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501");
|
|
11586
11667
|
}
|
|
11587
11668
|
async function runCiStatic(options) {
|
|
11588
11669
|
const startedAt = /* @__PURE__ */ new Date();
|
|
11589
11670
|
const isCI3 = options.mode === "github-actions";
|
|
11590
|
-
const logDir =
|
|
11671
|
+
const logDir = path12__default.join(process.cwd(), ".runa", "tmp", "workflow");
|
|
11591
11672
|
await mkdir(logDir, { recursive: true });
|
|
11592
11673
|
const checksToRun = getChecksToRun(options);
|
|
11593
11674
|
printStaticHeader(options);
|
|
11594
11675
|
const results = options.parallel ? await runChecksParallel(checksToRun, logDir) : await runChecksSequential(checksToRun, logDir);
|
|
11595
11676
|
const summary = buildStaticSummary(startedAt, results);
|
|
11596
|
-
const summaryPath =
|
|
11677
|
+
const summaryPath = path12__default.join(logDir, "static-summary.json");
|
|
11597
11678
|
await writeFile(summaryPath, JSON.stringify(summary, null, 2), "utf-8");
|
|
11598
11679
|
if (isCI3) {
|
|
11599
11680
|
await appendGithubStepSummary(buildStepSummaryMarkdown2(summary));
|
|
@@ -11623,9 +11704,9 @@ var appBuildActor = fromPromise(
|
|
|
11623
11704
|
async ({ input: input3 }) => {
|
|
11624
11705
|
const { repoRoot, tmpDir, env: env2 = process.env } = input3;
|
|
11625
11706
|
try {
|
|
11626
|
-
const hasTurbo = existsSync(
|
|
11627
|
-
const hasApps = existsSync(
|
|
11628
|
-
const hasAppsWeb = existsSync(
|
|
11707
|
+
const hasTurbo = existsSync(path12__default.join(repoRoot, "turbo.json"));
|
|
11708
|
+
const hasApps = existsSync(path12__default.join(repoRoot, "apps"));
|
|
11709
|
+
const hasAppsWeb = existsSync(path12__default.join(repoRoot, "apps", "web"));
|
|
11629
11710
|
const args = hasTurbo ? hasApps && !hasAppsWeb ? ["turbo", "run", "build", "--filter=./apps/*"] : ["turbo", "run", "build"] : ["build"];
|
|
11630
11711
|
await runLogged({
|
|
11631
11712
|
cwd: repoRoot,
|
|
@@ -11633,7 +11714,7 @@ var appBuildActor = fromPromise(
|
|
|
11633
11714
|
label: "build",
|
|
11634
11715
|
command: "pnpm",
|
|
11635
11716
|
args,
|
|
11636
|
-
logFile:
|
|
11717
|
+
logFile: path12__default.join(tmpDir, "build.log")
|
|
11637
11718
|
});
|
|
11638
11719
|
return { passed: true };
|
|
11639
11720
|
} catch (error) {
|
|
@@ -11649,14 +11730,14 @@ var appBuildActor = fromPromise(
|
|
|
11649
11730
|
init_esm_shims();
|
|
11650
11731
|
var APP_CANDIDATES = ["web", "dashboard", "app", "frontend", "client"];
|
|
11651
11732
|
function findExistingAppDirs(repoRoot) {
|
|
11652
|
-
const appsDir =
|
|
11733
|
+
const appsDir = path12__default.join(repoRoot, "apps");
|
|
11653
11734
|
if (!existsSync(appsDir)) return [];
|
|
11654
11735
|
try {
|
|
11655
11736
|
const entries = readdirSync(appsDir, { withFileTypes: true });
|
|
11656
11737
|
const found = [];
|
|
11657
11738
|
for (const entry of entries) {
|
|
11658
11739
|
if (!entry.isDirectory()) continue;
|
|
11659
|
-
const pkgJson =
|
|
11740
|
+
const pkgJson = path12__default.join(appsDir, entry.name, "package.json");
|
|
11660
11741
|
if (existsSync(pkgJson)) {
|
|
11661
11742
|
found.push(`apps/${entry.name}`);
|
|
11662
11743
|
}
|
|
@@ -11676,9 +11757,9 @@ function findExistingAppDirs(repoRoot) {
|
|
|
11676
11757
|
}
|
|
11677
11758
|
}
|
|
11678
11759
|
function validateAppDir(repoRoot, appDir) {
|
|
11679
|
-
const pkgJsonPath =
|
|
11760
|
+
const pkgJsonPath = path12__default.join(appDir, "package.json");
|
|
11680
11761
|
if (!existsSync(appDir)) {
|
|
11681
|
-
const relPath =
|
|
11762
|
+
const relPath = path12__default.relative(repoRoot, appDir) || ".";
|
|
11682
11763
|
const suggestions = findExistingAppDirs(repoRoot);
|
|
11683
11764
|
const errorParts = [`App directory not found: ${relPath}`];
|
|
11684
11765
|
if (suggestions.length > 0) {
|
|
@@ -11700,7 +11781,7 @@ function validateAppDir(repoRoot, appDir) {
|
|
|
11700
11781
|
return errorParts.join("\n");
|
|
11701
11782
|
}
|
|
11702
11783
|
if (!existsSync(pkgJsonPath)) {
|
|
11703
|
-
const relPath =
|
|
11784
|
+
const relPath = path12__default.relative(repoRoot, appDir) || ".";
|
|
11704
11785
|
return `No package.json found in app directory: ${relPath}`;
|
|
11705
11786
|
}
|
|
11706
11787
|
return null;
|
|
@@ -11757,9 +11838,9 @@ init_esm_shims();
|
|
|
11757
11838
|
async function runAppBuild(params) {
|
|
11758
11839
|
const { repoRoot, tmpDir, env: env2 } = params;
|
|
11759
11840
|
try {
|
|
11760
|
-
const hasTurbo = existsSync(
|
|
11761
|
-
const hasApps = existsSync(
|
|
11762
|
-
const hasAppsWeb = existsSync(
|
|
11841
|
+
const hasTurbo = existsSync(path12__default.join(repoRoot, "turbo.json"));
|
|
11842
|
+
const hasApps = existsSync(path12__default.join(repoRoot, "apps"));
|
|
11843
|
+
const hasAppsWeb = existsSync(path12__default.join(repoRoot, "apps", "web"));
|
|
11763
11844
|
const args = hasTurbo ? hasApps && !hasAppsWeb ? ["turbo", "run", "build", "--filter=./apps/*"] : ["turbo", "run", "build"] : ["build"];
|
|
11764
11845
|
await runLogged({
|
|
11765
11846
|
cwd: repoRoot,
|
|
@@ -11767,7 +11848,7 @@ async function runAppBuild(params) {
|
|
|
11767
11848
|
label: "build",
|
|
11768
11849
|
command: "pnpm",
|
|
11769
11850
|
args,
|
|
11770
|
-
logFile:
|
|
11851
|
+
logFile: path12__default.join(tmpDir, "build.log")
|
|
11771
11852
|
});
|
|
11772
11853
|
return { passed: true };
|
|
11773
11854
|
} catch (error) {
|
|
@@ -11786,7 +11867,7 @@ async function runManifestGenerate(params) {
|
|
|
11786
11867
|
label: "manifest:generate",
|
|
11787
11868
|
command: "pnpm",
|
|
11788
11869
|
args: ["manifest:generate"],
|
|
11789
|
-
logFile:
|
|
11870
|
+
logFile: path12__default.join(tmpDir, "manifest-generate.log")
|
|
11790
11871
|
});
|
|
11791
11872
|
return { generated: true };
|
|
11792
11873
|
} catch (error) {
|
|
@@ -11806,7 +11887,7 @@ async function runPlaywrightInstall(params) {
|
|
|
11806
11887
|
label: "playwright install",
|
|
11807
11888
|
command: "pnpm",
|
|
11808
11889
|
args,
|
|
11809
|
-
logFile:
|
|
11890
|
+
logFile: path12__default.join(tmpDir, "playwright-install.log")
|
|
11810
11891
|
});
|
|
11811
11892
|
return { installed: true };
|
|
11812
11893
|
} catch (error) {
|
|
@@ -11879,7 +11960,7 @@ var playwrightInstallActor = fromPromise(
|
|
|
11879
11960
|
label: "playwright install",
|
|
11880
11961
|
command: "pnpm",
|
|
11881
11962
|
args,
|
|
11882
|
-
logFile:
|
|
11963
|
+
logFile: path12__default.join(tmpDir, "playwright-install.log")
|
|
11883
11964
|
});
|
|
11884
11965
|
return { installed: true };
|
|
11885
11966
|
} catch (error) {
|
|
@@ -11903,7 +11984,7 @@ var staticChecksActor2 = fromPromise(
|
|
|
11903
11984
|
label: "type-check",
|
|
11904
11985
|
command: "pnpm",
|
|
11905
11986
|
args: ["type-check"],
|
|
11906
|
-
logFile:
|
|
11987
|
+
logFile: path12__default.join(tmpDir, "type-check.log")
|
|
11907
11988
|
}),
|
|
11908
11989
|
runLogged({
|
|
11909
11990
|
cwd: repoRoot,
|
|
@@ -11911,7 +11992,7 @@ var staticChecksActor2 = fromPromise(
|
|
|
11911
11992
|
label: "lint",
|
|
11912
11993
|
command: "pnpm",
|
|
11913
11994
|
args: ["lint"],
|
|
11914
|
-
logFile:
|
|
11995
|
+
logFile: path12__default.join(tmpDir, "lint.log")
|
|
11915
11996
|
})
|
|
11916
11997
|
]);
|
|
11917
11998
|
const typeCheckPassed = typeCheckResult.status === "fulfilled";
|
|
@@ -12062,7 +12143,7 @@ var applySeedsActor = fromPromise(
|
|
|
12062
12143
|
label: `db seed (${envArg})`,
|
|
12063
12144
|
command: "pnpm",
|
|
12064
12145
|
args: ["exec", "runa", "db", "seed", envArg, "--auto-approve"],
|
|
12065
|
-
logFile:
|
|
12146
|
+
logFile: path12__default.join(tmpDir, `ci-db-seed-${envArg}.log`)
|
|
12066
12147
|
});
|
|
12067
12148
|
return { applied: true };
|
|
12068
12149
|
} catch (error) {
|
|
@@ -12440,7 +12521,7 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
|
|
|
12440
12521
|
label: `probe pgtap (best-effort, ${params.label})`,
|
|
12441
12522
|
command: "psql",
|
|
12442
12523
|
args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-t", "-A", "-c", probeSql],
|
|
12443
|
-
logFile:
|
|
12524
|
+
logFile: path12__default.join(params.tmpDir, `pgtap-probe-${params.label}.log`)
|
|
12444
12525
|
}).catch(() => null);
|
|
12445
12526
|
const raw = String(probe?.stdout ?? "").trim();
|
|
12446
12527
|
const [availableRaw, installedRaw] = raw.split(",");
|
|
@@ -12455,11 +12536,11 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
|
|
|
12455
12536
|
label: `ensure pgtap (best-effort, ${params.label})`,
|
|
12456
12537
|
command: "psql",
|
|
12457
12538
|
args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-c", installSql],
|
|
12458
|
-
logFile:
|
|
12539
|
+
logFile: path12__default.join(params.tmpDir, `pgtap-ensure-${params.label}.log`)
|
|
12459
12540
|
}).catch(async (error) => {
|
|
12460
12541
|
try {
|
|
12461
12542
|
await writeFile(
|
|
12462
|
-
|
|
12543
|
+
path12__default.join(params.tmpDir, `pgtap-ensure-${params.label}.failed`),
|
|
12463
12544
|
`${error instanceof Error ? error.message : String(error)}
|
|
12464
12545
|
`,
|
|
12465
12546
|
"utf-8"
|
|
@@ -12473,7 +12554,7 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
|
|
|
12473
12554
|
label: `probe pgtap (best-effort, ${params.label}, after)`,
|
|
12474
12555
|
command: "psql",
|
|
12475
12556
|
args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-t", "-A", "-c", probeSql],
|
|
12476
|
-
logFile:
|
|
12557
|
+
logFile: path12__default.join(params.tmpDir, `pgtap-probe-${params.label}-after.log`)
|
|
12477
12558
|
}).catch(() => null);
|
|
12478
12559
|
const afterRaw = String(probeAfter?.stdout ?? "").trim();
|
|
12479
12560
|
const [availableAfterRaw, installedAfterRaw] = afterRaw.split(",");
|
|
@@ -12594,14 +12675,14 @@ function extractSqlFromSchemaChanges(fullOutput) {
|
|
|
12594
12675
|
return null;
|
|
12595
12676
|
}
|
|
12596
12677
|
function getIdempotentRoleNames(repoRoot) {
|
|
12597
|
-
const idempotentDir =
|
|
12678
|
+
const idempotentDir = path12__default.join(repoRoot, "supabase", "schemas", "idempotent");
|
|
12598
12679
|
const roles = [];
|
|
12599
12680
|
try {
|
|
12600
12681
|
const fs14 = __require("fs");
|
|
12601
12682
|
if (!fs14.existsSync(idempotentDir)) return [];
|
|
12602
12683
|
const files = fs14.readdirSync(idempotentDir).filter((f) => f.endsWith(".sql"));
|
|
12603
12684
|
for (const file of files) {
|
|
12604
|
-
const content = fs14.readFileSync(
|
|
12685
|
+
const content = fs14.readFileSync(path12__default.join(idempotentDir, file), "utf-8");
|
|
12605
12686
|
const roleMatches = content.matchAll(/CREATE\s+ROLE\s+(\w+)\s+WITH/gi);
|
|
12606
12687
|
for (const match of roleMatches) {
|
|
12607
12688
|
if (match[1]) roles.push(match[1].toLowerCase());
|
|
@@ -12742,7 +12823,7 @@ var productionPreviewActor = fromPromise(
|
|
|
12742
12823
|
);
|
|
12743
12824
|
return buildSkipResult(!!productionUrl);
|
|
12744
12825
|
}
|
|
12745
|
-
const logFile =
|
|
12826
|
+
const logFile = path12__default.join(tmpDir, "ci-production-preview.log");
|
|
12746
12827
|
console.log("\u25B6 production preview (dry-run): runa db apply production --check");
|
|
12747
12828
|
try {
|
|
12748
12829
|
const result = await execa(
|
|
@@ -12832,7 +12913,7 @@ var resetDbActor = fromPromise(
|
|
|
12832
12913
|
label: "db reset (local)",
|
|
12833
12914
|
command: "npx",
|
|
12834
12915
|
args: ["supabase", "db", "reset", "--local"],
|
|
12835
|
-
logFile:
|
|
12916
|
+
logFile: path12__default.join(tmpDir, "ci-db-reset.log")
|
|
12836
12917
|
});
|
|
12837
12918
|
return { reset: true };
|
|
12838
12919
|
} catch (error) {
|
|
@@ -12848,8 +12929,8 @@ var resetDbActor = fromPromise(
|
|
|
12848
12929
|
init_esm_shims();
|
|
12849
12930
|
async function hasSetupRolesScript(repoRoot) {
|
|
12850
12931
|
const candidates = [
|
|
12851
|
-
|
|
12852
|
-
|
|
12932
|
+
path12__default.join(repoRoot, "packages", "database", "package.json"),
|
|
12933
|
+
path12__default.join(repoRoot, "package.json")
|
|
12853
12934
|
];
|
|
12854
12935
|
for (const pkgPath of candidates) {
|
|
12855
12936
|
if (!existsSync(pkgPath)) continue;
|
|
@@ -12887,7 +12968,7 @@ var setupRolesActor = fromPromise(
|
|
|
12887
12968
|
label: "db roles setup",
|
|
12888
12969
|
command: "pnpm",
|
|
12889
12970
|
args: ["db:setup-roles"],
|
|
12890
|
-
logFile:
|
|
12971
|
+
logFile: path12__default.join(tmpDir, "ci-db-roles-setup.log")
|
|
12891
12972
|
});
|
|
12892
12973
|
const stdout = String(result.stdout ?? "");
|
|
12893
12974
|
const appUrlMatch = stdout.match(/APP_DATABASE_URL=["']?([^"'\n]+)["']?/);
|
|
@@ -13014,14 +13095,14 @@ function extractRolesFromSql(content) {
|
|
|
13014
13095
|
return roles;
|
|
13015
13096
|
}
|
|
13016
13097
|
function getIdempotentRoleNames2(repoRoot) {
|
|
13017
|
-
const idempotentDir =
|
|
13098
|
+
const idempotentDir = path12__default.join(repoRoot, "supabase", "schemas", "idempotent");
|
|
13018
13099
|
const roles = [];
|
|
13019
13100
|
try {
|
|
13020
13101
|
const fs14 = __require("fs");
|
|
13021
13102
|
if (!fs14.existsSync(idempotentDir)) return [];
|
|
13022
13103
|
const files = fs14.readdirSync(idempotentDir).filter((f) => f.endsWith(".sql"));
|
|
13023
13104
|
for (const file of files) {
|
|
13024
|
-
const filePath =
|
|
13105
|
+
const filePath = path12__default.join(idempotentDir, file);
|
|
13025
13106
|
if (!isPathContained(idempotentDir, filePath)) {
|
|
13026
13107
|
continue;
|
|
13027
13108
|
}
|
|
@@ -13099,7 +13180,7 @@ var syncSchemaActor = fromPromise(
|
|
|
13099
13180
|
"--auto-approve",
|
|
13100
13181
|
// Allow DELETES_DATA hazards in preview
|
|
13101
13182
|
"--no-seed",
|
|
13102
|
-
// Seeds applied separately by applySeedsActor
|
|
13183
|
+
// Seeds applied separately by applySeedsActor
|
|
13103
13184
|
"--verbose"
|
|
13104
13185
|
// Always verbose for full traceability
|
|
13105
13186
|
] : [
|
|
@@ -13109,11 +13190,13 @@ var syncSchemaActor = fromPromise(
|
|
|
13109
13190
|
"sync",
|
|
13110
13191
|
envArg,
|
|
13111
13192
|
"--auto-approve",
|
|
13193
|
+
"--no-seed",
|
|
13194
|
+
// Seeds applied separately by applySeedsActor
|
|
13112
13195
|
"--verbose",
|
|
13113
13196
|
// Always verbose for full traceability
|
|
13114
13197
|
...skipCodegen ? ["--skip-codegen"] : []
|
|
13115
13198
|
];
|
|
13116
|
-
const logFile =
|
|
13199
|
+
const logFile = path12__default.join(tmpDir, `ci-db-${useDbApply ? "apply" : "sync"}-${envArg}.log`);
|
|
13117
13200
|
await runLogged({
|
|
13118
13201
|
cwd: repoRoot,
|
|
13119
13202
|
env: baseEnv,
|
|
@@ -13245,7 +13328,67 @@ init_esm_shims();
|
|
|
13245
13328
|
// src/commands/ci/commands/ci-supabase-local.ts
|
|
13246
13329
|
init_esm_shims();
|
|
13247
13330
|
init_constants();
|
|
13331
|
+
function isPortAvailable(port) {
|
|
13332
|
+
return new Promise((resolve12) => {
|
|
13333
|
+
const server = net.createServer();
|
|
13334
|
+
server.once("error", () => resolve12(false));
|
|
13335
|
+
server.once("listening", () => {
|
|
13336
|
+
server.close(() => resolve12(true));
|
|
13337
|
+
});
|
|
13338
|
+
server.listen(port);
|
|
13339
|
+
});
|
|
13340
|
+
}
|
|
13341
|
+
function detectSupabaseContainers() {
|
|
13342
|
+
try {
|
|
13343
|
+
const result = spawnSync(
|
|
13344
|
+
"docker",
|
|
13345
|
+
["ps", "--format", "{{.Names}} {{.Ports}} {{.Status}}", "--filter", "name=supabase"],
|
|
13346
|
+
{ encoding: "utf-8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
|
|
13347
|
+
);
|
|
13348
|
+
if (result.status !== 0 || !result.stdout?.trim()) return [];
|
|
13349
|
+
return result.stdout.trim().split("\n").filter(Boolean);
|
|
13350
|
+
} catch {
|
|
13351
|
+
return [];
|
|
13352
|
+
}
|
|
13353
|
+
}
|
|
13354
|
+
async function checkSupabasePortConflicts(repoRoot) {
|
|
13355
|
+
let dbPort = 54322;
|
|
13356
|
+
try {
|
|
13357
|
+
const { readFileSync: readFileSync31 } = await import('fs');
|
|
13358
|
+
const configPath = path12__default.join(repoRoot, "supabase", "config.toml");
|
|
13359
|
+
const content = readFileSync31(configPath, "utf-8");
|
|
13360
|
+
const match = /\[db\][^[]*?port\s*=\s*(\d+)/s.exec(content);
|
|
13361
|
+
if (match?.[1]) dbPort = Number.parseInt(match[1], 10);
|
|
13362
|
+
} catch {
|
|
13363
|
+
}
|
|
13364
|
+
const available = await isPortAvailable(dbPort);
|
|
13365
|
+
if (available) return;
|
|
13366
|
+
const containers = detectSupabaseContainers();
|
|
13367
|
+
const dbContainers = containers.filter((c) => c.includes(String(dbPort)) || c.includes("db-"));
|
|
13368
|
+
console.warn("");
|
|
13369
|
+
console.warn("\u26A0\uFE0F Port conflict detected: PostgreSQL port %d is already in use", dbPort);
|
|
13370
|
+
console.warn("");
|
|
13371
|
+
if (dbContainers.length > 0) {
|
|
13372
|
+
console.warn(" Running Supabase containers:");
|
|
13373
|
+
for (const c of dbContainers) {
|
|
13374
|
+
console.warn(` ${c}`);
|
|
13375
|
+
}
|
|
13376
|
+
} else if (containers.length > 0) {
|
|
13377
|
+
console.warn(" Running Supabase containers (no DB match):");
|
|
13378
|
+
for (const c of containers.slice(0, 5)) {
|
|
13379
|
+
console.warn(` ${c}`);
|
|
13380
|
+
}
|
|
13381
|
+
} else {
|
|
13382
|
+
console.warn(" No Supabase containers detected (port may be used by another process).");
|
|
13383
|
+
}
|
|
13384
|
+
console.warn("");
|
|
13385
|
+
console.warn(" To fix:");
|
|
13386
|
+
console.warn(" 1. Stop the other instance: supabase stop (in the other project)");
|
|
13387
|
+
console.warn(" 2. Or use a different port in supabase/config.toml: [db] port = 54323");
|
|
13388
|
+
console.warn("");
|
|
13389
|
+
}
|
|
13248
13390
|
async function startSupabaseLocal(params) {
|
|
13391
|
+
await checkSupabasePortConflicts(params.repoRoot);
|
|
13249
13392
|
const exclude = process.env.RUNA_CI_SUPABASE_EXCLUDE ?? "studio,edge-runtime,storage-api,realtime,imgproxy,mailpit,logflare,vector,supavisor";
|
|
13250
13393
|
await runLogged({
|
|
13251
13394
|
cwd: params.repoRoot,
|
|
@@ -13253,7 +13396,7 @@ async function startSupabaseLocal(params) {
|
|
|
13253
13396
|
label: "supabase start (local)",
|
|
13254
13397
|
command: "supabase",
|
|
13255
13398
|
args: ["start", "--exclude", exclude],
|
|
13256
|
-
logFile:
|
|
13399
|
+
logFile: path12__default.join(params.tmpDir, "supabase-start-local.log")
|
|
13257
13400
|
});
|
|
13258
13401
|
}
|
|
13259
13402
|
function getDefaultLocalSupabaseUrl(repoRoot) {
|
|
@@ -13281,7 +13424,7 @@ async function resolveLocalSupabaseEnv(params) {
|
|
|
13281
13424
|
label: "supabase status (json)",
|
|
13282
13425
|
command: "supabase",
|
|
13283
13426
|
args: ["status", "--output", "json"],
|
|
13284
|
-
logFile:
|
|
13427
|
+
logFile: path12__default.join(params.tmpDir, `supabase-status-${i + 1}.log`)
|
|
13285
13428
|
});
|
|
13286
13429
|
const parsed = JSON.parse(String(res.stdout ?? "{}"));
|
|
13287
13430
|
const out = z.object({
|
|
@@ -13401,7 +13544,7 @@ async function findFilesRecursive(params) {
|
|
|
13401
13544
|
continue;
|
|
13402
13545
|
}
|
|
13403
13546
|
for (const e of entries) {
|
|
13404
|
-
const p =
|
|
13547
|
+
const p = path12__default.join(dir, e.name);
|
|
13405
13548
|
if (e.isDirectory()) stack.push(p);
|
|
13406
13549
|
else if (e.isFile() && params.suffixes.some((s) => p.endsWith(s))) out.push(p);
|
|
13407
13550
|
}
|
|
@@ -13415,7 +13558,7 @@ async function queryScalar(params) {
|
|
|
13415
13558
|
label: `psql scalar (${params.logName})`,
|
|
13416
13559
|
command: "psql",
|
|
13417
13560
|
args: [params.databaseUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-t", "-A", "-c", params.sql],
|
|
13418
|
-
logFile:
|
|
13561
|
+
logFile: path12__default.join(params.tmpDir, `psql-${params.logName}.log`)
|
|
13419
13562
|
});
|
|
13420
13563
|
return String(res.stdout ?? "").trim();
|
|
13421
13564
|
}
|
|
@@ -13465,7 +13608,7 @@ AND table_schema NOT IN ('information_schema');
|
|
|
13465
13608
|
SELECT COUNT(*)::int
|
|
13466
13609
|
FROM pg_class c
|
|
13467
13610
|
JOIN pg_namespace n ON n.oid=c.relnamespace
|
|
13468
|
-
WHERE c.relkind
|
|
13611
|
+
WHERE c.relkind IN ('r','p')
|
|
13469
13612
|
AND n.nspname NOT LIKE 'pg_%'
|
|
13470
13613
|
AND n.nspname NOT IN ('information_schema')
|
|
13471
13614
|
AND c.relrowsecurity;
|
|
@@ -13543,8 +13686,8 @@ function isPlaywrightIgnored(filePath) {
|
|
|
13543
13686
|
async function detectGeneratedE2eCapabilities(params) {
|
|
13544
13687
|
const caps = /* @__PURE__ */ new Set();
|
|
13545
13688
|
const diagnostics = {};
|
|
13546
|
-
const layer4Dir =
|
|
13547
|
-
const tmpDir =
|
|
13689
|
+
const layer4Dir = path12__default.join(params.repoRoot, ".runa", "tests", "layer4");
|
|
13690
|
+
const tmpDir = path12__default.join(params.repoRoot, ".runa", "tmp");
|
|
13548
13691
|
const hasLayer4Dir = await pathExists(layer4Dir);
|
|
13549
13692
|
const hasTmpDir = await pathExists(tmpDir);
|
|
13550
13693
|
if (!hasLayer4Dir && !hasTmpDir) {
|
|
@@ -14054,7 +14197,7 @@ init_esm_shims();
|
|
|
14054
14197
|
// src/commands/ci/utils/test-parallel.ts
|
|
14055
14198
|
init_esm_shims();
|
|
14056
14199
|
async function readLayerResult(tmpDir, layer) {
|
|
14057
|
-
const resultFile =
|
|
14200
|
+
const resultFile = path12__default.join(tmpDir, `vitest-layer${layer}-result.json`);
|
|
14058
14201
|
try {
|
|
14059
14202
|
const content = await readFile(resultFile, "utf-8");
|
|
14060
14203
|
return JSON.parse(content);
|
|
@@ -14172,8 +14315,8 @@ async function waitUntilAllFinished(params) {
|
|
|
14172
14315
|
async function runLayersInParallel(params) {
|
|
14173
14316
|
const spawned = [];
|
|
14174
14317
|
for (const layer of params.layers) {
|
|
14175
|
-
const logFile =
|
|
14176
|
-
const exitFile =
|
|
14318
|
+
const logFile = path12__default.join(params.tmpDir, `layer${layer}.log`);
|
|
14319
|
+
const exitFile = path12__default.join(params.tmpDir, `layer${layer}.exit`);
|
|
14177
14320
|
const proc = runLogged({
|
|
14178
14321
|
cwd: params.repoRoot,
|
|
14179
14322
|
env: {
|
|
@@ -14216,7 +14359,7 @@ async function runLayersInParallel(params) {
|
|
|
14216
14359
|
for (const p of spawned) {
|
|
14217
14360
|
const codeRaw = p.exitCode ?? await p.procPromise;
|
|
14218
14361
|
const code = Number.isNaN(codeRaw) ? 1 : codeRaw;
|
|
14219
|
-
const resultFile =
|
|
14362
|
+
const resultFile = path12__default.join(params.tmpDir, `vitest-layer${p.layer}-result.json`);
|
|
14220
14363
|
const wasKilled = killedLayerSet.has(p.layer);
|
|
14221
14364
|
const actualResult = waitResult.successMap.get(p.layer) ?? {
|
|
14222
14365
|
success: code === 0,
|
|
@@ -14231,9 +14374,9 @@ async function runLayersInParallel(params) {
|
|
|
14231
14374
|
skippedReason: actualResult.skippedReason,
|
|
14232
14375
|
killed: wasKilled,
|
|
14233
14376
|
killedBy: wasKilled ? failFastSource : void 0,
|
|
14234
|
-
logPath:
|
|
14235
|
-
exitPath:
|
|
14236
|
-
resultPath:
|
|
14377
|
+
logPath: path12__default.relative(params.repoRoot, p.logFile),
|
|
14378
|
+
exitPath: path12__default.relative(params.repoRoot, p.exitFile),
|
|
14379
|
+
resultPath: path12__default.relative(params.repoRoot, resultFile),
|
|
14237
14380
|
// Test count information
|
|
14238
14381
|
totalTests: actualResult.totalTests,
|
|
14239
14382
|
passedTests: actualResult.passedTests,
|
|
@@ -17576,11 +17719,11 @@ function resolveDatabaseUrl(environment) {
|
|
|
17576
17719
|
}
|
|
17577
17720
|
case "main":
|
|
17578
17721
|
case "production": {
|
|
17579
|
-
const url = process.env.GH_DATABASE_URL_ADMIN || process.env.GH_DATABASE_URL || env.DATABASE_URL || process.env.DATABASE_URL;
|
|
17722
|
+
const url = process.env.GH_DATABASE_URL_ADMIN || process.env.DATABASE_URL_ADMIN || process.env.GH_DATABASE_URL || env.DATABASE_URL || process.env.DATABASE_URL;
|
|
17580
17723
|
if (!url) {
|
|
17581
17724
|
throw new CLIError("Production database URL not found", "PRODUCTION_DB_URL_NOT_FOUND", [
|
|
17582
|
-
"Set
|
|
17583
|
-
"Or set
|
|
17725
|
+
"Set DATABASE_URL_ADMIN environment variable (for DDL/migrations)",
|
|
17726
|
+
"Or set GH_DATABASE_URL_ADMIN (CI) / DATABASE_URL as fallback"
|
|
17584
17727
|
]);
|
|
17585
17728
|
}
|
|
17586
17729
|
return url;
|
|
@@ -17720,16 +17863,16 @@ var ERROR_MESSAGES2 = {
|
|
|
17720
17863
|
SCHEMA_NOT_FOUND: "Schema file not found"
|
|
17721
17864
|
};
|
|
17722
17865
|
function containsPathTraversal2(inputPath) {
|
|
17723
|
-
const normalized =
|
|
17866
|
+
const normalized = path12__default.normalize(inputPath);
|
|
17724
17867
|
return normalized.includes("..") || inputPath.includes("\0");
|
|
17725
17868
|
}
|
|
17726
17869
|
function isPathWithinBase(filePath, baseDir) {
|
|
17727
17870
|
try {
|
|
17728
|
-
const resolvedFile =
|
|
17729
|
-
const resolvedBase =
|
|
17730
|
-
const normalizedFile =
|
|
17731
|
-
const normalizedBase =
|
|
17732
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase +
|
|
17871
|
+
const resolvedFile = path12__default.resolve(filePath);
|
|
17872
|
+
const resolvedBase = path12__default.resolve(baseDir);
|
|
17873
|
+
const normalizedFile = path12__default.normalize(resolvedFile);
|
|
17874
|
+
const normalizedBase = path12__default.normalize(resolvedBase);
|
|
17875
|
+
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path12__default.sep);
|
|
17733
17876
|
} catch {
|
|
17734
17877
|
return false;
|
|
17735
17878
|
}
|
|
@@ -17738,13 +17881,13 @@ function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
|
|
|
17738
17881
|
if (containsPathTraversal2(dbPackagePath)) {
|
|
17739
17882
|
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
17740
17883
|
}
|
|
17741
|
-
const schemaEntry =
|
|
17742
|
-
const absoluteSchemaPath =
|
|
17884
|
+
const schemaEntry = path12__default.join(dbPackagePath, "src", "schema", "index.ts");
|
|
17885
|
+
const absoluteSchemaPath = path12__default.resolve(projectRoot, schemaEntry);
|
|
17743
17886
|
let resolvedProjectRoot;
|
|
17744
17887
|
try {
|
|
17745
17888
|
resolvedProjectRoot = realpathSync(projectRoot);
|
|
17746
17889
|
} catch {
|
|
17747
|
-
resolvedProjectRoot =
|
|
17890
|
+
resolvedProjectRoot = path12__default.resolve(projectRoot);
|
|
17748
17891
|
}
|
|
17749
17892
|
if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
|
|
17750
17893
|
throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
|
|
@@ -17876,7 +18019,7 @@ function diffSchema(params) {
|
|
|
17876
18019
|
};
|
|
17877
18020
|
}
|
|
17878
18021
|
function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
|
|
17879
|
-
const fullPath =
|
|
18022
|
+
const fullPath = path12__default.resolve(projectRoot, idempotentDir);
|
|
17880
18023
|
if (!existsSync(fullPath)) {
|
|
17881
18024
|
return [];
|
|
17882
18025
|
}
|
|
@@ -17885,7 +18028,7 @@ function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd
|
|
|
17885
18028
|
try {
|
|
17886
18029
|
const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
|
|
17887
18030
|
for (const file of files) {
|
|
17888
|
-
const filePath =
|
|
18031
|
+
const filePath = path12__default.join(fullPath, file);
|
|
17889
18032
|
const content = readFileSync(filePath, "utf-8");
|
|
17890
18033
|
const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
17891
18034
|
for (const match of contentWithoutComments.matchAll(createTablePattern)) {
|
|
@@ -19404,6 +19547,93 @@ function releaseAdvisoryLock(dbUrl, verbose) {
|
|
|
19404
19547
|
}
|
|
19405
19548
|
}
|
|
19406
19549
|
|
|
19550
|
+
// src/commands/db/apply/helpers/plan-validator.ts
|
|
19551
|
+
init_esm_shims();
|
|
19552
|
+
var PlanHazardSchema = z.object({
|
|
19553
|
+
type: z.string().min(1),
|
|
19554
|
+
message: z.string()
|
|
19555
|
+
});
|
|
19556
|
+
var PlanStatementSchema = z.object({
|
|
19557
|
+
index: z.number().int().nonnegative(),
|
|
19558
|
+
sql: z.string().min(1),
|
|
19559
|
+
hazards: z.array(PlanHazardSchema)
|
|
19560
|
+
});
|
|
19561
|
+
var ValidatedPlanSchema = z.object({
|
|
19562
|
+
statements: z.array(PlanStatementSchema),
|
|
19563
|
+
totalStatements: z.number().int().nonnegative(),
|
|
19564
|
+
rawSql: z.string().min(1)
|
|
19565
|
+
});
|
|
19566
|
+
function flushStatement(current, results) {
|
|
19567
|
+
if (!current) return;
|
|
19568
|
+
const sql = current.sqlLines.join("\n").trim();
|
|
19569
|
+
if (!sql) return;
|
|
19570
|
+
results.push({ index: current.index, sql, hazards: current.hazards });
|
|
19571
|
+
}
|
|
19572
|
+
function parseWithStatementMarkers(lines) {
|
|
19573
|
+
const results = [];
|
|
19574
|
+
let current = null;
|
|
19575
|
+
for (const line of lines) {
|
|
19576
|
+
const trimmed = line.trim();
|
|
19577
|
+
const idxMatch = trimmed.match(/^-- Statement Idx\.\s*(\d+)/);
|
|
19578
|
+
if (idxMatch) {
|
|
19579
|
+
flushStatement(current, results);
|
|
19580
|
+
current = { index: parseInt(idxMatch[1], 10), sqlLines: [], hazards: [] };
|
|
19581
|
+
continue;
|
|
19582
|
+
}
|
|
19583
|
+
const hazardMatch = trimmed.match(/^-- Hazard (\w+): (.+)/);
|
|
19584
|
+
if (hazardMatch && current) {
|
|
19585
|
+
current.hazards.push({ type: hazardMatch[1], message: hazardMatch[2] });
|
|
19586
|
+
continue;
|
|
19587
|
+
}
|
|
19588
|
+
if (current && trimmed && !trimmed.startsWith("--")) {
|
|
19589
|
+
current.sqlLines.push(line);
|
|
19590
|
+
}
|
|
19591
|
+
}
|
|
19592
|
+
flushStatement(current, results);
|
|
19593
|
+
return results;
|
|
19594
|
+
}
|
|
19595
|
+
function parseAsSingleStatement(lines) {
|
|
19596
|
+
const sqlLines = [];
|
|
19597
|
+
const hazards = [];
|
|
19598
|
+
for (const line of lines) {
|
|
19599
|
+
const trimmed = line.trim();
|
|
19600
|
+
const hazardMatch = trimmed.match(/^-- Hazard (\w+): (.+)/);
|
|
19601
|
+
if (hazardMatch) {
|
|
19602
|
+
hazards.push({ type: hazardMatch[1], message: hazardMatch[2] });
|
|
19603
|
+
continue;
|
|
19604
|
+
}
|
|
19605
|
+
if (trimmed && !trimmed.startsWith("--")) {
|
|
19606
|
+
sqlLines.push(line);
|
|
19607
|
+
}
|
|
19608
|
+
}
|
|
19609
|
+
const sql = sqlLines.join("\n").trim();
|
|
19610
|
+
if (!sql) return [];
|
|
19611
|
+
return [{ index: 0, sql, hazards }];
|
|
19612
|
+
}
|
|
19613
|
+
function parsePlanOutput(planOutput) {
|
|
19614
|
+
const lines = planOutput.split("\n");
|
|
19615
|
+
const hasStatementMarkers = lines.some((l) => /^-- Statement Idx\.\s*\d+/.test(l.trim()));
|
|
19616
|
+
const statements = hasStatementMarkers ? parseWithStatementMarkers(lines) : parseAsSingleStatement(lines);
|
|
19617
|
+
const plan = {
|
|
19618
|
+
statements,
|
|
19619
|
+
totalStatements: statements.length,
|
|
19620
|
+
rawSql: planOutput
|
|
19621
|
+
};
|
|
19622
|
+
return ValidatedPlanSchema.parse(plan);
|
|
19623
|
+
}
|
|
19624
|
+
function validatePlanForExecution(plan, allowedHazardTypes) {
|
|
19625
|
+
if (plan.totalStatements === 0) return;
|
|
19626
|
+
const allHazards = plan.statements.flatMap((s) => s.hazards);
|
|
19627
|
+
const allowedSet = new Set(allowedHazardTypes);
|
|
19628
|
+
const unresolved = allHazards.filter((h) => !allowedSet.has(h.type));
|
|
19629
|
+
if (unresolved.length > 0) {
|
|
19630
|
+
const types = [...new Set(unresolved.map((h) => h.type))].join(", ");
|
|
19631
|
+
throw new Error(
|
|
19632
|
+
`Plan contains unapproved hazards: ${types}. Approved: [${allowedHazardTypes.join(", ")}]. This is a safety check \u2014 hazards must be approved before execution.`
|
|
19633
|
+
);
|
|
19634
|
+
}
|
|
19635
|
+
}
|
|
19636
|
+
|
|
19407
19637
|
// src/commands/db/apply/helpers/pg-schema-diff-helpers.ts
|
|
19408
19638
|
init_esm_shims();
|
|
19409
19639
|
var logger4 = createCLILogger("db:apply");
|
|
@@ -19749,49 +19979,70 @@ function calculateBackoffDelay(attempt, maxDelayMs = DEFAULT_MAX_DELAY_MS) {
|
|
|
19749
19979
|
function isLockTimeoutError(errorOutput) {
|
|
19750
19980
|
return errorOutput.includes("lock_timeout") || errorOutput.includes("canceling statement due to lock timeout") || errorOutput.includes("could not obtain lock");
|
|
19751
19981
|
}
|
|
19752
|
-
async function
|
|
19982
|
+
async function executePlanSqlWithRetry(dbUrl, initialPlanSql, verbose, config) {
|
|
19753
19983
|
const maxRetries = config?.maxRetries ?? MAX_RETRIES;
|
|
19754
19984
|
const maxDelayMs = config?.maxDelayMs ?? DEFAULT_MAX_DELAY_MS;
|
|
19755
19985
|
let lastError = null;
|
|
19756
19986
|
let totalWaitMs = 0;
|
|
19987
|
+
let currentPlanSql = initialPlanSql;
|
|
19757
19988
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
19758
19989
|
if (attempt > 0) {
|
|
19759
19990
|
const delay = calculateBackoffDelay(attempt - 1, maxDelayMs);
|
|
19760
19991
|
totalWaitMs += delay;
|
|
19761
19992
|
logger5.info(`Retry ${attempt}/${maxRetries - 1} after ${Math.round(delay)}ms...`);
|
|
19762
19993
|
await sleep(delay);
|
|
19994
|
+
if (config?.rePlanFn) {
|
|
19995
|
+
const freshPlan = config.rePlanFn();
|
|
19996
|
+
if (freshPlan === null) {
|
|
19997
|
+
return { success: true, attempts: attempt, totalWaitMs };
|
|
19998
|
+
}
|
|
19999
|
+
currentPlanSql = freshPlan;
|
|
20000
|
+
}
|
|
19763
20001
|
}
|
|
19764
|
-
const
|
|
19765
|
-
|
|
19766
|
-
stdio: ["pipe", "pipe", "pipe"]
|
|
19767
|
-
});
|
|
19768
|
-
const stdout = applyResult.stdout || "";
|
|
19769
|
-
const stderr = applyResult.stderr || "";
|
|
19770
|
-
if (verbose) {
|
|
19771
|
-
if (stdout) process.stdout.write(stdout);
|
|
19772
|
-
if (stderr) process.stderr.write(stderr);
|
|
19773
|
-
}
|
|
19774
|
-
if (applyResult.status === 0) {
|
|
20002
|
+
const plan = parsePlanOutput(currentPlanSql);
|
|
20003
|
+
if (plan.totalStatements === 0) {
|
|
19775
20004
|
return { success: true, attempts: attempt, totalWaitMs };
|
|
19776
20005
|
}
|
|
19777
|
-
|
|
19778
|
-
|
|
19779
|
-
logger5.warn(`Lock timeout on attempt ${attempt + 1}/${maxRetries}`);
|
|
19780
|
-
lastError = new Error(`lock_timeout: ${errorOutput}`);
|
|
19781
|
-
continue;
|
|
20006
|
+
if (config?.allowedHazardTypes) {
|
|
20007
|
+
validatePlanForExecution(plan, config.allowedHazardTypes);
|
|
19782
20008
|
}
|
|
19783
|
-
|
|
19784
|
-
|
|
19785
|
-
|
|
19786
|
-
|
|
19787
|
-
|
|
20009
|
+
if (verbose) {
|
|
20010
|
+
logger5.debug(`Plan validated: ${plan.totalStatements} statement(s)`);
|
|
20011
|
+
}
|
|
20012
|
+
const planFile = join(tmpdir(), `runa-plan-${Date.now()}-${attempt}.sql`);
|
|
20013
|
+
const wrappedSql = `SET lock_timeout = '50ms';
|
|
20014
|
+
|
|
20015
|
+
${currentPlanSql}`;
|
|
20016
|
+
writeFileSync(planFile, wrappedSql, "utf-8");
|
|
20017
|
+
try {
|
|
20018
|
+
const result = psqlSyncFile({ databaseUrl: dbUrl, filePath: planFile, onErrorStop: true });
|
|
20019
|
+
if (verbose) {
|
|
20020
|
+
if (result.stdout) process.stdout.write(result.stdout);
|
|
20021
|
+
if (result.stderr) process.stderr.write(result.stderr);
|
|
20022
|
+
}
|
|
20023
|
+
if (result.status === 0) {
|
|
20024
|
+
return { success: true, attempts: attempt, totalWaitMs };
|
|
20025
|
+
}
|
|
20026
|
+
const errorOutput = result.stderr || result.stdout || "Unknown error";
|
|
20027
|
+
if (isLockTimeoutError(errorOutput)) {
|
|
20028
|
+
logger5.warn(`Lock timeout on attempt ${attempt + 1}/${maxRetries}`);
|
|
20029
|
+
lastError = new Error(`lock_timeout: ${errorOutput}`);
|
|
20030
|
+
continue;
|
|
20031
|
+
}
|
|
20032
|
+
logger5.error("Plan execution failed:");
|
|
20033
|
+
logger5.error(errorOutput);
|
|
20034
|
+
return {
|
|
20035
|
+
success: false,
|
|
20036
|
+
error: new Error(`Plan execution failed: ${errorOutput}`),
|
|
20037
|
+
attempts: attempt,
|
|
20038
|
+
totalWaitMs
|
|
20039
|
+
};
|
|
20040
|
+
} finally {
|
|
20041
|
+
try {
|
|
20042
|
+
unlinkSync(planFile);
|
|
20043
|
+
} catch {
|
|
20044
|
+
}
|
|
19788
20045
|
}
|
|
19789
|
-
return {
|
|
19790
|
-
success: false,
|
|
19791
|
-
error: new Error(`pg-schema-diff apply failed: ${errorOutput}`),
|
|
19792
|
-
attempts: attempt,
|
|
19793
|
-
totalWaitMs
|
|
19794
|
-
};
|
|
19795
20046
|
}
|
|
19796
20047
|
logger5.error(`Migration failed after ${maxRetries} attempts (total wait: ${totalWaitMs}ms)`);
|
|
19797
20048
|
return {
|
|
@@ -20087,48 +20338,58 @@ function handleFreshDbCase(input3, dbUrl, targetDir) {
|
|
|
20087
20338
|
return { sql: "", hazards: [], applied: true };
|
|
20088
20339
|
}
|
|
20089
20340
|
}
|
|
20090
|
-
async function applyWithLockAndRetry(dbUrl, schemasDir, includeSchemas, input3, planOutput, hazards) {
|
|
20091
|
-
logger6.step("Applying schema changes...");
|
|
20341
|
+
async function applyWithLockAndRetry(dbUrl, schemasDir, includeSchemas, input3, planOutput, hazards, tempDbDsn) {
|
|
20342
|
+
logger6.step("Applying schema changes (plan+psql)...");
|
|
20092
20343
|
const lockAcquired = acquireAdvisoryLock(dbUrl, input3.verbose);
|
|
20093
20344
|
if (!lockAcquired) {
|
|
20094
20345
|
throw new Error(
|
|
20095
20346
|
"Could not acquire migration lock. Another migration may be running. Wait for it to complete or manually release the lock."
|
|
20096
20347
|
);
|
|
20097
20348
|
}
|
|
20098
|
-
|
|
20099
|
-
|
|
20100
|
-
|
|
20101
|
-
|
|
20102
|
-
|
|
20103
|
-
|
|
20104
|
-
|
|
20105
|
-
|
|
20106
|
-
|
|
20107
|
-
|
|
20108
|
-
|
|
20109
|
-
|
|
20110
|
-
|
|
20111
|
-
|
|
20112
|
-
|
|
20113
|
-
|
|
20114
|
-
|
|
20115
|
-
|
|
20116
|
-
|
|
20117
|
-
|
|
20118
|
-
|
|
20119
|
-
|
|
20120
|
-
|
|
20121
|
-
|
|
20122
|
-
|
|
20349
|
+
try {
|
|
20350
|
+
const allowedHazardTypes = buildAllowedHazards(input3);
|
|
20351
|
+
const result = await executePlanSqlWithRetry(dbUrl, planOutput, input3.verbose, {
|
|
20352
|
+
maxDelayMs: input3.maxLockWaitMs,
|
|
20353
|
+
allowedHazardTypes,
|
|
20354
|
+
rePlanFn: () => {
|
|
20355
|
+
const { planOutput: freshPlan } = executePgSchemaDiffPlan(
|
|
20356
|
+
dbUrl,
|
|
20357
|
+
schemasDir,
|
|
20358
|
+
includeSchemas,
|
|
20359
|
+
input3.verbose,
|
|
20360
|
+
{ tempDbDsn }
|
|
20361
|
+
);
|
|
20362
|
+
if (!freshPlan.trim() || freshPlan.includes("No changes")) {
|
|
20363
|
+
return null;
|
|
20364
|
+
}
|
|
20365
|
+
return freshPlan;
|
|
20366
|
+
}
|
|
20367
|
+
});
|
|
20368
|
+
if (!result.success) {
|
|
20369
|
+
throw result.error || new Error("Migration failed");
|
|
20370
|
+
}
|
|
20371
|
+
if (input3.verbose && result.attempts > 0) {
|
|
20372
|
+
logger6.debug(
|
|
20373
|
+
`Retry metrics: ${result.attempts} attempts, ${result.totalWaitMs}ms total wait`
|
|
20374
|
+
);
|
|
20375
|
+
}
|
|
20376
|
+
logger6.success("Schema changes applied");
|
|
20377
|
+
return {
|
|
20378
|
+
sql: planOutput,
|
|
20379
|
+
hazards,
|
|
20380
|
+
applied: true,
|
|
20381
|
+
retryAttempts: result.attempts,
|
|
20382
|
+
retryWaitMs: result.totalWaitMs
|
|
20383
|
+
};
|
|
20384
|
+
} finally {
|
|
20385
|
+
try {
|
|
20386
|
+
releaseAdvisoryLock(dbUrl, input3.verbose);
|
|
20387
|
+
} catch (lockError) {
|
|
20388
|
+
logger6.warn(
|
|
20389
|
+
`Failed to release advisory lock: ${lockError instanceof Error ? lockError.message : "Unknown error"}`
|
|
20390
|
+
);
|
|
20391
|
+
}
|
|
20123
20392
|
}
|
|
20124
|
-
logger6.success("Schema changes applied");
|
|
20125
|
-
return {
|
|
20126
|
-
sql: planOutput,
|
|
20127
|
-
hazards,
|
|
20128
|
-
applied: true,
|
|
20129
|
-
retryAttempts: result.attempts,
|
|
20130
|
-
retryWaitMs: result.totalWaitMs
|
|
20131
|
-
};
|
|
20132
20393
|
}
|
|
20133
20394
|
var ROLE_PASSWORD_CONFIGS = [
|
|
20134
20395
|
{
|
|
@@ -20239,45 +20500,66 @@ function shouldSkipInProduction(file, env2, verbose) {
|
|
|
20239
20500
|
}
|
|
20240
20501
|
return true;
|
|
20241
20502
|
}
|
|
20242
|
-
function applySingleIdempotentFile(dbUrl, schemasDir, file, verbose) {
|
|
20503
|
+
function applySingleIdempotentFile(dbUrl, schemasDir, file, verbose, pass) {
|
|
20243
20504
|
const filePath = join(schemasDir, file);
|
|
20244
20505
|
if (verbose) logger6.debug(`Applying ${file}...`);
|
|
20245
20506
|
const result = psqlSyncFile({
|
|
20246
20507
|
databaseUrl: dbUrl,
|
|
20247
20508
|
filePath,
|
|
20248
20509
|
onErrorStop: true
|
|
20249
|
-
// Fail fast on idempotent schemas
|
|
20250
20510
|
});
|
|
20251
20511
|
if (verbose) {
|
|
20252
20512
|
if (result.stdout) process.stdout.write(result.stdout);
|
|
20253
20513
|
if (result.stderr) process.stderr.write(result.stderr);
|
|
20254
20514
|
}
|
|
20255
20515
|
if (result.status !== 0) {
|
|
20516
|
+
if (pass === "pre") {
|
|
20517
|
+
if (verbose) {
|
|
20518
|
+
logger6.warn(`Skipped ${file} (will retry in 2nd pass)`);
|
|
20519
|
+
}
|
|
20520
|
+
return false;
|
|
20521
|
+
}
|
|
20256
20522
|
const errorMsg = result.stderr ? maskDbCredentials(result.stderr) : "";
|
|
20257
20523
|
throw new Error(`Failed to apply idempotent schema: ${file}
|
|
20258
20524
|
${errorMsg}`);
|
|
20259
20525
|
}
|
|
20526
|
+
return true;
|
|
20260
20527
|
}
|
|
20261
|
-
var applyIdempotentSchemas = fromPromise(async ({ input: { input: input3, targetDir } }) => {
|
|
20528
|
+
var applyIdempotentSchemas = fromPromise(async ({ input: { input: input3, targetDir, pass } }) => {
|
|
20262
20529
|
checkPasswordSecurity();
|
|
20263
20530
|
const schemasDir = join(targetDir, "supabase/schemas/idempotent");
|
|
20264
20531
|
const dbUrl = getDbUrl(input3);
|
|
20265
20532
|
let filesApplied = 0;
|
|
20533
|
+
let filesSkipped = 0;
|
|
20266
20534
|
if (existsSync(schemasDir)) {
|
|
20267
20535
|
const files = readdirSync(schemasDir).filter((f) => f.endsWith(".sql")).sort();
|
|
20268
20536
|
if (files.length > 0) {
|
|
20269
20537
|
for (const file of files) {
|
|
20270
20538
|
if (shouldSkipInProduction(file, input3.env, input3.verbose)) continue;
|
|
20271
|
-
applySingleIdempotentFile(dbUrl, schemasDir, file, input3.verbose);
|
|
20539
|
+
const applied = applySingleIdempotentFile(dbUrl, schemasDir, file, input3.verbose, pass);
|
|
20540
|
+
if (applied) {
|
|
20541
|
+
filesApplied++;
|
|
20542
|
+
} else {
|
|
20543
|
+
filesSkipped++;
|
|
20544
|
+
}
|
|
20545
|
+
}
|
|
20546
|
+
if (filesSkipped > 0) {
|
|
20547
|
+
logger6.success(
|
|
20548
|
+
`Applied ${filesApplied}/${filesApplied + filesSkipped} idempotent schema(s) (${filesSkipped} deferred to 2nd pass)`
|
|
20549
|
+
);
|
|
20550
|
+
if (filesSkipped === files.length) {
|
|
20551
|
+
logger6.warn("ALL idempotent files skipped in 1st pass.");
|
|
20552
|
+
logger6.warn("This may indicate a syntax error. Check logs with --verbose.");
|
|
20553
|
+
}
|
|
20554
|
+
} else {
|
|
20555
|
+
logger6.success(`Applied ${filesApplied} idempotent schema(s)`);
|
|
20272
20556
|
}
|
|
20273
|
-
logger6.success(`Applied ${files.length} idempotent schema(s)`);
|
|
20274
|
-
filesApplied = files.length;
|
|
20275
20557
|
}
|
|
20276
20558
|
} else {
|
|
20277
20559
|
logger6.info("No idempotent schemas found");
|
|
20278
20560
|
}
|
|
20279
20561
|
const rolePasswordsSet = setRolePasswords(dbUrl, input3.verbose);
|
|
20280
|
-
return { filesApplied, rolePasswordsSet };
|
|
20562
|
+
return { filesApplied, filesSkipped, rolePasswordsSet };
|
|
20281
20563
|
});
|
|
20282
20564
|
var applyPgSchemaDiff = fromPromise(async ({ input: { input: input3, targetDir } }) => {
|
|
20283
20565
|
const schemasDir = join(targetDir, "supabase/schemas/declarative");
|
|
@@ -20346,7 +20628,15 @@ ${content}`;
|
|
|
20346
20628
|
displayCheckModeResults(planOutput);
|
|
20347
20629
|
return { sql: planOutput, hazards, applied: false };
|
|
20348
20630
|
}
|
|
20349
|
-
return applyWithLockAndRetry(
|
|
20631
|
+
return applyWithLockAndRetry(
|
|
20632
|
+
dbUrl,
|
|
20633
|
+
schemasDir,
|
|
20634
|
+
includeSchemas,
|
|
20635
|
+
input3,
|
|
20636
|
+
planOutput,
|
|
20637
|
+
hazards,
|
|
20638
|
+
shadowDb?.dsn
|
|
20639
|
+
);
|
|
20350
20640
|
} finally {
|
|
20351
20641
|
if (shadowDb) {
|
|
20352
20642
|
try {
|
|
@@ -20458,6 +20748,11 @@ var DbApplyMetricsSchema = z.object({
|
|
|
20458
20748
|
z.object({
|
|
20459
20749
|
success: z.boolean(),
|
|
20460
20750
|
idempotentSchemasApplied: z.number(),
|
|
20751
|
+
/**
|
|
20752
|
+
* Number of idempotent schemas deferred during 1st pass (graceful skip).
|
|
20753
|
+
* These are retried in 2nd pass after declarative tables exist.
|
|
20754
|
+
*/
|
|
20755
|
+
idempotentSchemasSkipped: z.number().optional(),
|
|
20461
20756
|
/**
|
|
20462
20757
|
* Number of RBAC role passwords set (drizzle_app, drizzle_service).
|
|
20463
20758
|
* Set when DRIZZLE_APP_PASSWORD or DRIZZLE_SERVICE_PASSWORD env vars are present.
|
|
@@ -20578,7 +20873,9 @@ var dbApplyMachine = setup({
|
|
|
20578
20873
|
targetDir: input3.targetDir,
|
|
20579
20874
|
// 2-pass idempotent
|
|
20580
20875
|
idempotentPreApplied: 0,
|
|
20876
|
+
idempotentPreSkipped: 0,
|
|
20581
20877
|
idempotentPostApplied: 0,
|
|
20878
|
+
idempotentPostSkipped: 0,
|
|
20582
20879
|
rolePasswordsSet: 0,
|
|
20583
20880
|
schemaChangesApplied: false,
|
|
20584
20881
|
hazards: [],
|
|
@@ -20609,11 +20906,16 @@ var dbApplyMachine = setup({
|
|
|
20609
20906
|
entry: assign({ idempotentPreStartTime: () => Date.now() }),
|
|
20610
20907
|
invoke: {
|
|
20611
20908
|
src: "applyIdempotentSchemas",
|
|
20612
|
-
input: ({ context }) => ({
|
|
20909
|
+
input: ({ context }) => ({
|
|
20910
|
+
input: context.input,
|
|
20911
|
+
targetDir: context.targetDir,
|
|
20912
|
+
pass: "pre"
|
|
20913
|
+
}),
|
|
20613
20914
|
onDone: {
|
|
20614
20915
|
target: "applyingPgSchemaDiff",
|
|
20615
20916
|
actions: assign({
|
|
20616
20917
|
idempotentPreApplied: ({ event }) => event.output.filesApplied,
|
|
20918
|
+
idempotentPreSkipped: ({ event }) => event.output.filesSkipped,
|
|
20617
20919
|
rolePasswordsSet: ({ event }) => event.output.rolePasswordsSet,
|
|
20618
20920
|
idempotentPreEndTime: () => Date.now()
|
|
20619
20921
|
})
|
|
@@ -20672,11 +20974,16 @@ var dbApplyMachine = setup({
|
|
|
20672
20974
|
entry: assign({ idempotentPostStartTime: () => Date.now() }),
|
|
20673
20975
|
invoke: {
|
|
20674
20976
|
src: "applyIdempotentSchemas",
|
|
20675
|
-
input: ({ context }) => ({
|
|
20977
|
+
input: ({ context }) => ({
|
|
20978
|
+
input: context.input,
|
|
20979
|
+
targetDir: context.targetDir,
|
|
20980
|
+
pass: "post"
|
|
20981
|
+
}),
|
|
20676
20982
|
onDone: {
|
|
20677
20983
|
target: "applyingSeeds",
|
|
20678
20984
|
actions: assign({
|
|
20679
20985
|
idempotentPostApplied: ({ event }) => event.output.filesApplied,
|
|
20986
|
+
idempotentPostSkipped: ({ event }) => event.output.filesSkipped,
|
|
20680
20987
|
idempotentPostEndTime: () => Date.now()
|
|
20681
20988
|
})
|
|
20682
20989
|
},
|
|
@@ -20734,9 +21041,11 @@ var dbApplyMachine = setup({
|
|
|
20734
21041
|
retryAttempts: context.retryAttempts > 0 ? context.retryAttempts : void 0
|
|
20735
21042
|
};
|
|
20736
21043
|
const totalIdempotentApplied = context.idempotentPreApplied + context.idempotentPostApplied;
|
|
21044
|
+
const totalIdempotentSkipped = context.idempotentPreSkipped + context.idempotentPostSkipped;
|
|
20737
21045
|
return {
|
|
20738
21046
|
success: !context.error,
|
|
20739
21047
|
idempotentSchemasApplied: totalIdempotentApplied,
|
|
21048
|
+
idempotentSchemasSkipped: totalIdempotentSkipped > 0 ? totalIdempotentSkipped : void 0,
|
|
20740
21049
|
rolePasswordsSet: context.rolePasswordsSet > 0 ? context.rolePasswordsSet : void 0,
|
|
20741
21050
|
schemaChangesApplied: context.schemaChangesApplied,
|
|
20742
21051
|
hazards: context.hazards,
|
|
@@ -21421,7 +21730,7 @@ async function validateGitHubOutputPath(filePath) {
|
|
|
21421
21730
|
if (!filePath || filePath.trim().length === 0) {
|
|
21422
21731
|
return { valid: false, resolvedPath: null, error: "Empty file path" };
|
|
21423
21732
|
}
|
|
21424
|
-
const normalizedPath =
|
|
21733
|
+
const normalizedPath = path12__default.normalize(filePath);
|
|
21425
21734
|
if (normalizedPath.includes("..")) {
|
|
21426
21735
|
return {
|
|
21427
21736
|
valid: false,
|
|
@@ -21429,7 +21738,7 @@ async function validateGitHubOutputPath(filePath) {
|
|
|
21429
21738
|
error: "Path traversal detected (..) in file path"
|
|
21430
21739
|
};
|
|
21431
21740
|
}
|
|
21432
|
-
const absolutePath =
|
|
21741
|
+
const absolutePath = path12__default.resolve(normalizedPath);
|
|
21433
21742
|
for (const forbidden of FORBIDDEN_PATHS) {
|
|
21434
21743
|
if (absolutePath.startsWith(forbidden) || absolutePath.includes(forbidden)) {
|
|
21435
21744
|
return {
|
|
@@ -21850,7 +22159,7 @@ async function analyzeSchemaChanges() {
|
|
|
21850
22159
|
try {
|
|
21851
22160
|
const { getDatabasePackagePath: getDatabasePackagePath2 } = await Promise.resolve().then(() => (init_config_loader(), config_loader_exports));
|
|
21852
22161
|
const dbPath = await getDatabasePackagePath2();
|
|
21853
|
-
const schemaPath =
|
|
22162
|
+
const schemaPath = path12__default.join(dbPath, "src/schema/");
|
|
21854
22163
|
const { stdout } = await execa("git", ["diff", "--cached", "--", schemaPath]);
|
|
21855
22164
|
const lines = stdout.split("\n");
|
|
21856
22165
|
return parseDiffLines(lines);
|
|
@@ -22189,7 +22498,7 @@ function runDatabasePackageChecks(result, logger16, step) {
|
|
|
22189
22498
|
logger16.error("Database package path is missing");
|
|
22190
22499
|
return null;
|
|
22191
22500
|
}
|
|
22192
|
-
logger16.success(`Found database package: ${
|
|
22501
|
+
logger16.success(`Found database package: ${path12__default.basename(dbPackagePath)}`);
|
|
22193
22502
|
logger16.step("Checking schema files", step.next());
|
|
22194
22503
|
const schemaCheck = checkSchemaFiles(dbPackagePath);
|
|
22195
22504
|
if (!schemaCheck.exists) {
|
|
@@ -22337,12 +22646,12 @@ async function testDatabaseConnection(projectRoot) {
|
|
|
22337
22646
|
function checkDatabasePackage() {
|
|
22338
22647
|
const cwd = process.cwd();
|
|
22339
22648
|
const locations = [
|
|
22340
|
-
|
|
22341
|
-
|
|
22342
|
-
|
|
22649
|
+
path12__default.join(cwd, "packages", "database"),
|
|
22650
|
+
path12__default.join(cwd, "packages", "db"),
|
|
22651
|
+
path12__default.join(cwd, "db")
|
|
22343
22652
|
];
|
|
22344
22653
|
for (const location of locations) {
|
|
22345
|
-
const configPath =
|
|
22654
|
+
const configPath = path12__default.join(location, "drizzle.config.ts");
|
|
22346
22655
|
if (existsSync(configPath)) {
|
|
22347
22656
|
return { exists: true, path: location };
|
|
22348
22657
|
}
|
|
@@ -22387,7 +22696,7 @@ function countTsFilesRecursive(dir) {
|
|
|
22387
22696
|
try {
|
|
22388
22697
|
const entries = readdirSync(dir, { withFileTypes: true });
|
|
22389
22698
|
for (const entry of entries) {
|
|
22390
|
-
const fullPath =
|
|
22699
|
+
const fullPath = path12__default.join(dir, entry.name);
|
|
22391
22700
|
if (entry.isDirectory()) {
|
|
22392
22701
|
count += countTsFilesRecursive(fullPath);
|
|
22393
22702
|
} else if (entry.isFile() && entry.name.endsWith(".ts")) {
|
|
@@ -22399,7 +22708,7 @@ function countTsFilesRecursive(dir) {
|
|
|
22399
22708
|
return count;
|
|
22400
22709
|
}
|
|
22401
22710
|
function checkSchemaFiles(dbPackagePath) {
|
|
22402
|
-
const schemaDir =
|
|
22711
|
+
const schemaDir = path12__default.join(dbPackagePath, "src", "schema");
|
|
22403
22712
|
if (!existsSync(schemaDir)) {
|
|
22404
22713
|
return { exists: false, count: 0 };
|
|
22405
22714
|
}
|
|
@@ -22438,7 +22747,7 @@ async function runOrphanCheck(env2, dbPackagePath, result, logger16, step) {
|
|
|
22438
22747
|
const { expectedTables, expectedEnums } = await extractSchemaTablesAndEnums(dbPackagePath);
|
|
22439
22748
|
const databaseUrl = tryResolveDatabaseUrl("local") || buildLocalDatabaseUrl(process.cwd());
|
|
22440
22749
|
const { dbTables, dbEnums } = await fetchDbTablesAndEnums(databaseUrl, {
|
|
22441
|
-
schemaDir:
|
|
22750
|
+
schemaDir: path12__default.join(dbPackagePath, "src", "schema")
|
|
22442
22751
|
});
|
|
22443
22752
|
let excludeFromOrphanDetection = [];
|
|
22444
22753
|
let idempotentSqlDir = "supabase/schemas/idempotent";
|
|
@@ -22512,7 +22821,7 @@ function runExtensionConfigCheck(_result, logger16, step) {
|
|
|
22512
22821
|
async function runSqlSchemaRiskCheck(result, logger16, step) {
|
|
22513
22822
|
logger16.step("Checking SQL schema for risky patterns", step.next());
|
|
22514
22823
|
const cwd = process.cwd();
|
|
22515
|
-
const sqlDir =
|
|
22824
|
+
const sqlDir = path12__default.join(cwd, "supabase", "schemas", "declarative");
|
|
22516
22825
|
if (!existsSync(sqlDir)) {
|
|
22517
22826
|
logger16.success("No SQL schema directory found (supabase/schemas/declarative/)");
|
|
22518
22827
|
return;
|
|
@@ -22525,7 +22834,7 @@ async function runSqlSchemaRiskCheck(result, logger16, step) {
|
|
|
22525
22834
|
}
|
|
22526
22835
|
const allRisks = [];
|
|
22527
22836
|
for (const sqlFile of sqlFiles) {
|
|
22528
|
-
const filePath =
|
|
22837
|
+
const filePath = path12__default.join(sqlDir, sqlFile);
|
|
22529
22838
|
const risks = await detectSchemaRisks(filePath);
|
|
22530
22839
|
for (const risk of risks) {
|
|
22531
22840
|
allRisks.push({ ...risk, file: sqlFile });
|
|
@@ -24305,7 +24614,7 @@ async function checkPort(port) {
|
|
|
24305
24614
|
};
|
|
24306
24615
|
}
|
|
24307
24616
|
function detectSupabasePortsFromConfig() {
|
|
24308
|
-
const configPath =
|
|
24617
|
+
const configPath = path12__default.join(process.cwd(), "supabase", "config.toml");
|
|
24309
24618
|
const BASE_PORTS2 = { api: 54321, db: 54322, studio: 54323, inbucket: 54324 };
|
|
24310
24619
|
if (!existsSync(configPath)) {
|
|
24311
24620
|
return Object.values(BASE_PORTS2);
|
|
@@ -24388,6 +24697,9 @@ function diagnoseInitFailure(errorMessage) {
|
|
|
24388
24697
|
return results;
|
|
24389
24698
|
}
|
|
24390
24699
|
|
|
24700
|
+
// src/commands/db/commands/db-lifecycle.ts
|
|
24701
|
+
init_local_supabase();
|
|
24702
|
+
|
|
24391
24703
|
// src/utils/port-allocator.ts
|
|
24392
24704
|
init_esm_shims();
|
|
24393
24705
|
function getSupabasePorts(projectPath) {
|
|
@@ -24395,7 +24707,7 @@ function getSupabasePorts(projectPath) {
|
|
|
24395
24707
|
return getPortsWithOffset(offset);
|
|
24396
24708
|
}
|
|
24397
24709
|
async function updateSupabaseConfigPortsSafe(projectPath) {
|
|
24398
|
-
const configPath =
|
|
24710
|
+
const configPath = path12__default.join(projectPath, "supabase", "config.toml");
|
|
24399
24711
|
const resolved = await resolveAvailablePorts(projectPath);
|
|
24400
24712
|
if (!resolved) {
|
|
24401
24713
|
const ports = getSupabasePorts(projectPath);
|
|
@@ -24438,7 +24750,7 @@ function getPortAllocationSummary(projectPath) {
|
|
|
24438
24750
|
const ports = getSupabasePorts(projectPath);
|
|
24439
24751
|
const offset = calculatePortOffset(projectPath);
|
|
24440
24752
|
return [
|
|
24441
|
-
`Port allocation for: ${
|
|
24753
|
+
`Port allocation for: ${path12__default.basename(projectPath)}`,
|
|
24442
24754
|
` Slot: ${offset / 10} (hash-based, offset=${offset})`,
|
|
24443
24755
|
` API: ${ports.api}`,
|
|
24444
24756
|
` DB: ${ports.db}`,
|
|
@@ -24472,7 +24784,7 @@ function parseSeedPaths(configPath) {
|
|
|
24472
24784
|
}
|
|
24473
24785
|
}
|
|
24474
24786
|
async function applySeedFile(seedPath, dbUrl) {
|
|
24475
|
-
const supabaseDir =
|
|
24787
|
+
const supabaseDir = path12__default.join(process.cwd(), "supabase");
|
|
24476
24788
|
const absolutePath = resolveSafePath(supabaseDir, seedPath);
|
|
24477
24789
|
if (!existsSync(absolutePath)) {
|
|
24478
24790
|
return;
|
|
@@ -24483,12 +24795,12 @@ async function applySeedFile(seedPath, dbUrl) {
|
|
|
24483
24795
|
});
|
|
24484
24796
|
}
|
|
24485
24797
|
async function applySeeds2(configPath) {
|
|
24486
|
-
const configFile =
|
|
24798
|
+
const configFile = path12__default.join(process.cwd(), "supabase", "config.toml");
|
|
24487
24799
|
const seedPaths = parseSeedPaths(configFile);
|
|
24488
24800
|
if (seedPaths.length === 0) {
|
|
24489
24801
|
return;
|
|
24490
24802
|
}
|
|
24491
|
-
const supabaseDir =
|
|
24803
|
+
const supabaseDir = path12__default.join(process.cwd(), "supabase");
|
|
24492
24804
|
const safePaths = filterSafePaths(seedPaths, supabaseDir);
|
|
24493
24805
|
if (safePaths.length === 0) {
|
|
24494
24806
|
return;
|
|
@@ -24615,6 +24927,20 @@ var startCommand = new Command("start").description("Start local Supabase with a
|
|
|
24615
24927
|
if (options.bootstrap) {
|
|
24616
24928
|
logger16.info(" \u2192 Bare schemas created. Run `runa db sync` to apply full schema.");
|
|
24617
24929
|
}
|
|
24930
|
+
try {
|
|
24931
|
+
const bridgeResult = writeEnvLocalBridge(
|
|
24932
|
+
projectRoot,
|
|
24933
|
+
detectLocalSupabasePorts(projectRoot)
|
|
24934
|
+
);
|
|
24935
|
+
if (bridgeResult.written) {
|
|
24936
|
+
logger16.info(
|
|
24937
|
+
` \u2192 .env.local written (port override: api=${bridgeResult.ports.api}, db=${bridgeResult.ports.db})`
|
|
24938
|
+
);
|
|
24939
|
+
} else if (bridgeResult.reason === "user-managed") {
|
|
24940
|
+
logger16.warn(" \u2192 .env.local exists but is user-managed; port override skipped");
|
|
24941
|
+
}
|
|
24942
|
+
} catch {
|
|
24943
|
+
}
|
|
24618
24944
|
emitJsonSuccess(startCommand, DbLifecycleStartOutputSchema, result);
|
|
24619
24945
|
} catch (error) {
|
|
24620
24946
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
@@ -24635,6 +24961,15 @@ var stopCommand = new Command("stop").description("Stop local Supabase").action(
|
|
|
24635
24961
|
logger16.info("Stopping local Supabase...");
|
|
24636
24962
|
const result = await dbStop({});
|
|
24637
24963
|
logger16.success("Supabase stopped successfully");
|
|
24964
|
+
try {
|
|
24965
|
+
const bridgeResult = removeEnvLocalBridge(process.cwd());
|
|
24966
|
+
if (bridgeResult.removed) {
|
|
24967
|
+
logger16.info(" \u2192 .env.local removed (runa-managed port override)");
|
|
24968
|
+
} else if (bridgeResult.reason === "user-managed") {
|
|
24969
|
+
logger16.warn(" \u2192 .env.local exists but is user-managed; skipping cleanup");
|
|
24970
|
+
}
|
|
24971
|
+
} catch {
|
|
24972
|
+
}
|
|
24638
24973
|
emitJsonSuccess(stopCommand, DbLifecycleStopOutputSchema, result);
|
|
24639
24974
|
} catch (error) {
|
|
24640
24975
|
throw new CLIError(
|
|
@@ -24730,6 +25065,20 @@ var resetCommand = new Command("reset").description("Reset local database with s
|
|
|
24730
25065
|
if (!options.skipSeeds) {
|
|
24731
25066
|
logger16.info(" \u2713 Seeds applied");
|
|
24732
25067
|
}
|
|
25068
|
+
try {
|
|
25069
|
+
const bridgeResult = writeEnvLocalBridge(
|
|
25070
|
+
projectRoot,
|
|
25071
|
+
detectLocalSupabasePorts(projectRoot)
|
|
25072
|
+
);
|
|
25073
|
+
if (bridgeResult.written) {
|
|
25074
|
+
logger16.info(
|
|
25075
|
+
` \u2192 .env.local written (port override: api=${bridgeResult.ports.api}, db=${bridgeResult.ports.db})`
|
|
25076
|
+
);
|
|
25077
|
+
} else if (bridgeResult.reason === "user-managed") {
|
|
25078
|
+
logger16.warn(" \u2192 .env.local exists but is user-managed; port override skipped");
|
|
25079
|
+
}
|
|
25080
|
+
} catch {
|
|
25081
|
+
}
|
|
24733
25082
|
emitJsonSuccess(resetCommand, DbLifecycleResetOutputSchema, {
|
|
24734
25083
|
...resetResult,
|
|
24735
25084
|
stepsCompleted: allSteps
|
|
@@ -24912,7 +25261,7 @@ var validateCommand = new Command("validate").description("Validate schema files
|
|
|
24912
25261
|
const logger16 = createCLILogger("db:validate");
|
|
24913
25262
|
try {
|
|
24914
25263
|
logger16.section("Schema Validation");
|
|
24915
|
-
const schemasPath =
|
|
25264
|
+
const schemasPath = path12__default.join(process.cwd(), "packages", "database", "src", "schema");
|
|
24916
25265
|
if (!existsSync(schemasPath)) {
|
|
24917
25266
|
throw new CLIError("Schema directory not found", "SCHEMA_DIR_NOT_FOUND", [
|
|
24918
25267
|
`Expected location: ${schemasPath}`,
|
|
@@ -25077,8 +25426,8 @@ var generateCommand = new Command("generate").description("Generate TypeScript t
|
|
|
25077
25426
|
var listCommand = new Command("list").description("List managed schemas from drizzle.config.ts").option("--sql", "Output as SQL-compatible string for IN clauses").option("--json", "Output as JSON array").action(async (options) => {
|
|
25078
25427
|
const logger16 = createCLILogger("db:schema:list");
|
|
25079
25428
|
try {
|
|
25080
|
-
const dbPackagePath =
|
|
25081
|
-
if (!existsSync(
|
|
25429
|
+
const dbPackagePath = path12__default.join(process.cwd(), "packages", "database");
|
|
25430
|
+
if (!existsSync(path12__default.join(dbPackagePath, "drizzle.config.ts"))) {
|
|
25082
25431
|
throw new CLIError("drizzle.config.ts not found", "CONFIG_NOT_FOUND", [
|
|
25083
25432
|
`Expected location: ${dbPackagePath}/drizzle.config.ts`,
|
|
25084
25433
|
"Ensure you are in the project root",
|
|
@@ -26323,7 +26672,7 @@ var testGenCommand = new Command("test:gen").description("Generate pgTAP behavio
|
|
|
26323
26672
|
const databaseUrl = options.db || process.env.DATABASE_URL || getLocalDbUrl();
|
|
26324
26673
|
const schemas = options.schemas ? options.schemas.split(",") : void 0;
|
|
26325
26674
|
const dbPackage = databasePaths.package();
|
|
26326
|
-
const defaultOutputPath =
|
|
26675
|
+
const defaultOutputPath = path12__default.join(dbPackage, "tests/00_behavior.generated.test.sql");
|
|
26327
26676
|
const outputPath = options.output || defaultOutputPath;
|
|
26328
26677
|
spinner.text = "Generating pgTAP behavior tests...";
|
|
26329
26678
|
const result = await dbGeneratePgTapTests({
|
|
@@ -26393,7 +26742,7 @@ function getTablesWithTimestamps(dbUrl, schemas) {
|
|
|
26393
26742
|
JOIN pg_attribute a1 ON c.oid = a1.attrelid AND a1.attname = 'updated_at'
|
|
26394
26743
|
JOIN pg_attribute a2 ON c.oid = a2.attrelid AND a2.attname = 'created_at'
|
|
26395
26744
|
WHERE n.nspname IN (${schemasFilter})
|
|
26396
|
-
AND c.relkind
|
|
26745
|
+
AND c.relkind IN ('r', 'p')
|
|
26397
26746
|
ORDER BY n.nspname, c.relname;
|
|
26398
26747
|
`;
|
|
26399
26748
|
const conn = parsePostgresUrl(dbUrl);
|
|
@@ -27366,16 +27715,16 @@ function sanitizeErrorMessage(message) {
|
|
|
27366
27715
|
return sanitized;
|
|
27367
27716
|
}
|
|
27368
27717
|
function containsPathTraversal3(inputPath) {
|
|
27369
|
-
const normalized =
|
|
27718
|
+
const normalized = path12__default.normalize(inputPath);
|
|
27370
27719
|
return normalized.includes("..") || inputPath.includes("\0");
|
|
27371
27720
|
}
|
|
27372
27721
|
function isPathWithinBase2(filePath, baseDir) {
|
|
27373
27722
|
try {
|
|
27374
|
-
const resolvedFile =
|
|
27375
|
-
const resolvedBase =
|
|
27376
|
-
const normalizedFile =
|
|
27377
|
-
const normalizedBase =
|
|
27378
|
-
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase +
|
|
27723
|
+
const resolvedFile = path12__default.resolve(filePath);
|
|
27724
|
+
const resolvedBase = path12__default.resolve(baseDir);
|
|
27725
|
+
const normalizedFile = path12__default.normalize(resolvedFile);
|
|
27726
|
+
const normalizedBase = path12__default.normalize(resolvedBase);
|
|
27727
|
+
return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path12__default.sep);
|
|
27379
27728
|
} catch {
|
|
27380
27729
|
return false;
|
|
27381
27730
|
}
|
|
@@ -27384,12 +27733,12 @@ function validateCustomWorkingDir(cwdPath, projectRoot) {
|
|
|
27384
27733
|
if (containsPathTraversal3(cwdPath)) {
|
|
27385
27734
|
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
27386
27735
|
}
|
|
27387
|
-
const absolutePath =
|
|
27736
|
+
const absolutePath = path12__default.isAbsolute(cwdPath) ? cwdPath : path12__default.resolve(projectRoot, cwdPath);
|
|
27388
27737
|
let resolvedProjectRoot;
|
|
27389
27738
|
try {
|
|
27390
27739
|
resolvedProjectRoot = realpathSync(projectRoot);
|
|
27391
27740
|
} catch {
|
|
27392
|
-
resolvedProjectRoot =
|
|
27741
|
+
resolvedProjectRoot = path12__default.resolve(projectRoot);
|
|
27393
27742
|
}
|
|
27394
27743
|
if (!isPathWithinBase2(absolutePath, resolvedProjectRoot)) {
|
|
27395
27744
|
throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
|
|
@@ -29638,13 +29987,13 @@ z.object({
|
|
|
29638
29987
|
|
|
29639
29988
|
// src/commands/hotfix/metadata.ts
|
|
29640
29989
|
function getHotfixDir(targetDir) {
|
|
29641
|
-
return
|
|
29990
|
+
return path12__default.join(targetDir, ".runa", "hotfix");
|
|
29642
29991
|
}
|
|
29643
29992
|
function getCurrentFile(targetDir) {
|
|
29644
|
-
return
|
|
29993
|
+
return path12__default.join(getHotfixDir(targetDir), "current.json");
|
|
29645
29994
|
}
|
|
29646
29995
|
function getArchiveDir(targetDir) {
|
|
29647
|
-
return
|
|
29996
|
+
return path12__default.join(getHotfixDir(targetDir), "archive");
|
|
29648
29997
|
}
|
|
29649
29998
|
async function ensureHotfixDir(targetDir) {
|
|
29650
29999
|
await mkdir(getHotfixDir(targetDir), { recursive: true });
|
|
@@ -29654,7 +30003,7 @@ async function ensureArchiveDir(targetDir) {
|
|
|
29654
30003
|
}
|
|
29655
30004
|
async function getHotfixMetadata(input3 = {}) {
|
|
29656
30005
|
const parsed = GetHotfixInputSchema.parse(input3);
|
|
29657
|
-
const targetDir = parsed.targetDir ?
|
|
30006
|
+
const targetDir = parsed.targetDir ? path12__default.resolve(parsed.targetDir) : process.cwd();
|
|
29658
30007
|
const currentFile = getCurrentFile(targetDir);
|
|
29659
30008
|
if (!existsSync(currentFile)) {
|
|
29660
30009
|
return null;
|
|
@@ -29664,7 +30013,7 @@ async function getHotfixMetadata(input3 = {}) {
|
|
|
29664
30013
|
}
|
|
29665
30014
|
async function createHotfixMetadata(input3) {
|
|
29666
30015
|
const parsed = CreateHotfixMetadataInputSchema.parse(input3);
|
|
29667
|
-
const targetDir = parsed.targetDir ?
|
|
30016
|
+
const targetDir = parsed.targetDir ? path12__default.resolve(parsed.targetDir) : process.cwd();
|
|
29668
30017
|
await ensureHotfixDir(targetDir);
|
|
29669
30018
|
const metadata = {
|
|
29670
30019
|
branch: parsed.branch,
|
|
@@ -29681,7 +30030,7 @@ async function createHotfixMetadata(input3) {
|
|
|
29681
30030
|
}
|
|
29682
30031
|
async function updateHotfixStatus(input3) {
|
|
29683
30032
|
const parsed = UpdateHotfixInputSchema.parse(input3);
|
|
29684
|
-
const targetDir = parsed.targetDir ?
|
|
30033
|
+
const targetDir = parsed.targetDir ? path12__default.resolve(parsed.targetDir) : process.cwd();
|
|
29685
30034
|
const current = await getHotfixMetadata({ targetDir });
|
|
29686
30035
|
if (!current) {
|
|
29687
30036
|
throw createError("HOTFIX_NOT_FOUND");
|
|
@@ -29698,13 +30047,13 @@ async function updateHotfixStatus(input3) {
|
|
|
29698
30047
|
return updated;
|
|
29699
30048
|
}
|
|
29700
30049
|
async function archiveHotfix(targetDir) {
|
|
29701
|
-
const dir = targetDir ?
|
|
30050
|
+
const dir = targetDir ? path12__default.resolve(targetDir) : process.cwd();
|
|
29702
30051
|
const current = await getHotfixMetadata({ targetDir: dir });
|
|
29703
30052
|
if (!current) {
|
|
29704
30053
|
throw createError("HOTFIX_NOT_FOUND");
|
|
29705
30054
|
}
|
|
29706
30055
|
await ensureArchiveDir(dir);
|
|
29707
|
-
const archiveFile =
|
|
30056
|
+
const archiveFile = path12__default.join(
|
|
29708
30057
|
getArchiveDir(dir),
|
|
29709
30058
|
`${current.branch.replace(/\//g, "-")}-${Date.now()}.json`
|
|
29710
30059
|
);
|
|
@@ -29720,7 +30069,7 @@ async function archiveHotfix(targetDir) {
|
|
|
29720
30069
|
return archived;
|
|
29721
30070
|
}
|
|
29722
30071
|
async function cancelHotfix(targetDir) {
|
|
29723
|
-
const dir = targetDir ?
|
|
30072
|
+
const dir = targetDir ? path12__default.resolve(targetDir) : process.cwd();
|
|
29724
30073
|
await updateHotfixStatus({
|
|
29725
30074
|
status: "cancelled",
|
|
29726
30075
|
completedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
@@ -29730,7 +30079,7 @@ async function cancelHotfix(targetDir) {
|
|
|
29730
30079
|
}
|
|
29731
30080
|
async function listArchivedHotfixes(input3 = {}) {
|
|
29732
30081
|
const parsed = ListArchivedHotfixesInputSchema.parse(input3);
|
|
29733
|
-
const targetDir = parsed.targetDir ?
|
|
30082
|
+
const targetDir = parsed.targetDir ? path12__default.resolve(parsed.targetDir) : process.cwd();
|
|
29734
30083
|
const archiveDir = getArchiveDir(targetDir);
|
|
29735
30084
|
if (!existsSync(archiveDir)) {
|
|
29736
30085
|
return { hotfixes: [] };
|
|
@@ -29739,7 +30088,7 @@ async function listArchivedHotfixes(input3 = {}) {
|
|
|
29739
30088
|
const hotfixes = [];
|
|
29740
30089
|
for (const file of files.filter((f) => f.endsWith(".json"))) {
|
|
29741
30090
|
try {
|
|
29742
|
-
const filePath =
|
|
30091
|
+
const filePath = path12__default.join(archiveDir, file);
|
|
29743
30092
|
if (!isPathContained(archiveDir, filePath)) {
|
|
29744
30093
|
continue;
|
|
29745
30094
|
}
|
|
@@ -31209,7 +31558,7 @@ init_esm_shims();
|
|
|
31209
31558
|
|
|
31210
31559
|
// src/constants/versions.ts
|
|
31211
31560
|
init_esm_shims();
|
|
31212
|
-
var COMPATIBLE_TEMPLATES_VERSION = "0.5.
|
|
31561
|
+
var COMPATIBLE_TEMPLATES_VERSION = "0.5.61";
|
|
31213
31562
|
var TEMPLATES_PACKAGE_NAME = "@r06-dev/runa-templates";
|
|
31214
31563
|
var GITHUB_PACKAGES_REGISTRY = "https://npm.pkg.github.com";
|
|
31215
31564
|
|
|
@@ -31243,13 +31592,13 @@ function validateVersion(version) {
|
|
|
31243
31592
|
}
|
|
31244
31593
|
}
|
|
31245
31594
|
function getCacheBase() {
|
|
31246
|
-
return
|
|
31595
|
+
return path12__default.join(os.homedir(), ".cache", "runa", "templates");
|
|
31247
31596
|
}
|
|
31248
31597
|
function getCacheDir(version) {
|
|
31249
31598
|
validateVersion(version);
|
|
31250
31599
|
const cacheBase = getCacheBase();
|
|
31251
|
-
const cacheDir =
|
|
31252
|
-
const resolvedCacheDir =
|
|
31600
|
+
const cacheDir = path12__default.join(cacheBase, version);
|
|
31601
|
+
const resolvedCacheDir = path12__default.resolve(cacheDir);
|
|
31253
31602
|
if (!isPathContained(cacheBase, resolvedCacheDir)) {
|
|
31254
31603
|
throw new CLIError(
|
|
31255
31604
|
"Security: cache directory would escape allowed location.",
|
|
@@ -31261,7 +31610,7 @@ function getCacheDir(version) {
|
|
|
31261
31610
|
}
|
|
31262
31611
|
function isCached(version) {
|
|
31263
31612
|
const cacheDir = getCacheDir(version);
|
|
31264
|
-
const templatesDir =
|
|
31613
|
+
const templatesDir = path12__default.join(cacheDir, "templates");
|
|
31265
31614
|
return fs5__default.existsSync(templatesDir);
|
|
31266
31615
|
}
|
|
31267
31616
|
var scopedAuthToken = null;
|
|
@@ -31312,13 +31661,13 @@ async function createTempPackageJson(tempDir, version) {
|
|
|
31312
31661
|
[TEMPLATES_PACKAGE_NAME]: version
|
|
31313
31662
|
}
|
|
31314
31663
|
};
|
|
31315
|
-
await writeFile(
|
|
31664
|
+
await writeFile(path12__default.join(tempDir, "package.json"), JSON.stringify(packageJson, null, 2));
|
|
31316
31665
|
}
|
|
31317
31666
|
async function createTempNpmrc(tempDir) {
|
|
31318
31667
|
const npmrc = `@r06-dev:registry=${GITHUB_PACKAGES_REGISTRY}
|
|
31319
31668
|
//npm.pkg.github.com/:_authToken=\${NODE_AUTH_TOKEN}
|
|
31320
31669
|
`;
|
|
31321
|
-
await writeFile(
|
|
31670
|
+
await writeFile(path12__default.join(tempDir, ".npmrc"), npmrc);
|
|
31322
31671
|
}
|
|
31323
31672
|
async function installTemplates(tempDir, authToken, verbose) {
|
|
31324
31673
|
try {
|
|
@@ -31376,10 +31725,10 @@ async function installTemplates(tempDir, authToken, verbose) {
|
|
|
31376
31725
|
async function verifyNoSymlinks(dir, baseDir) {
|
|
31377
31726
|
const entries = await readdir(dir, { withFileTypes: true });
|
|
31378
31727
|
for (const entry of entries) {
|
|
31379
|
-
const fullPath =
|
|
31728
|
+
const fullPath = path12__default.join(dir, entry.name);
|
|
31380
31729
|
const stats = await lstat(fullPath);
|
|
31381
31730
|
if (stats.isSymbolicLink()) {
|
|
31382
|
-
const relativePath =
|
|
31731
|
+
const relativePath = path12__default.relative(baseDir, fullPath);
|
|
31383
31732
|
throw new CLIError(
|
|
31384
31733
|
"Security: Symlink detected in template package.",
|
|
31385
31734
|
"SYMLINK_ATTACK_DETECTED",
|
|
@@ -31397,7 +31746,7 @@ async function verifyNoSymlinks(dir, baseDir) {
|
|
|
31397
31746
|
}
|
|
31398
31747
|
}
|
|
31399
31748
|
async function copyToCache(tempDir, cacheDir) {
|
|
31400
|
-
const sourceTemplates =
|
|
31749
|
+
const sourceTemplates = path12__default.join(tempDir, "node_modules", TEMPLATES_PACKAGE_NAME, "templates");
|
|
31401
31750
|
if (!fs5__default.existsSync(sourceTemplates)) {
|
|
31402
31751
|
throw new CLIError("Templates directory not found in package.", "TEMPLATES_DIR_NOT_FOUND", [
|
|
31403
31752
|
`Expected: ${sourceTemplates}`,
|
|
@@ -31407,7 +31756,7 @@ async function copyToCache(tempDir, cacheDir) {
|
|
|
31407
31756
|
}
|
|
31408
31757
|
await verifyNoSymlinks(sourceTemplates, sourceTemplates);
|
|
31409
31758
|
await mkdir(cacheDir, { recursive: true });
|
|
31410
|
-
const targetTemplates =
|
|
31759
|
+
const targetTemplates = path12__default.join(cacheDir, "templates");
|
|
31411
31760
|
await cp(sourceTemplates, targetTemplates, {
|
|
31412
31761
|
recursive: true,
|
|
31413
31762
|
dereference: false
|
|
@@ -31429,7 +31778,7 @@ async function fetchTemplates(options = {}) {
|
|
|
31429
31778
|
};
|
|
31430
31779
|
}
|
|
31431
31780
|
const cacheDir = getCacheDir(version);
|
|
31432
|
-
const templatesDir =
|
|
31781
|
+
const templatesDir = path12__default.join(cacheDir, "templates");
|
|
31433
31782
|
if (!fresh && isCached(version)) {
|
|
31434
31783
|
if (verbose) {
|
|
31435
31784
|
console.log(`Using cached templates v${version}: ${templatesDir}`);
|
|
@@ -31445,7 +31794,7 @@ async function fetchTemplates(options = {}) {
|
|
|
31445
31794
|
if (fresh && fs5__default.existsSync(cacheDir)) {
|
|
31446
31795
|
await rm(cacheDir, { recursive: true, force: true });
|
|
31447
31796
|
}
|
|
31448
|
-
const tempDir =
|
|
31797
|
+
const tempDir = path12__default.join(os.tmpdir(), `runa-templates-${Date.now()}`);
|
|
31449
31798
|
await mkdir(tempDir, { recursive: true });
|
|
31450
31799
|
try {
|
|
31451
31800
|
if (verbose) {
|
|
@@ -31473,11 +31822,11 @@ async function fetchTemplates(options = {}) {
|
|
|
31473
31822
|
}
|
|
31474
31823
|
var MAX_WORKSPACE_TRAVERSAL_DEPTH = 10;
|
|
31475
31824
|
function isLegitimateWorkspaceRoot(workspaceRoot) {
|
|
31476
|
-
const pnpmWorkspaceFile =
|
|
31825
|
+
const pnpmWorkspaceFile = path12__default.join(workspaceRoot, "pnpm-workspace.yaml");
|
|
31477
31826
|
if (!fs5__default.existsSync(pnpmWorkspaceFile)) {
|
|
31478
31827
|
return false;
|
|
31479
31828
|
}
|
|
31480
|
-
const rootPackageFile =
|
|
31829
|
+
const rootPackageFile = path12__default.join(workspaceRoot, "package.json");
|
|
31481
31830
|
if (!fs5__default.existsSync(rootPackageFile)) {
|
|
31482
31831
|
return false;
|
|
31483
31832
|
}
|
|
@@ -31491,24 +31840,24 @@ function isLegitimateWorkspaceRoot(workspaceRoot) {
|
|
|
31491
31840
|
}
|
|
31492
31841
|
}
|
|
31493
31842
|
function resolveWorkspaceTemplates() {
|
|
31494
|
-
let current =
|
|
31495
|
-
const root =
|
|
31843
|
+
let current = path12__default.resolve(process.cwd());
|
|
31844
|
+
const root = path12__default.parse(current).root;
|
|
31496
31845
|
let depth = 0;
|
|
31497
31846
|
while (current !== root && depth < MAX_WORKSPACE_TRAVERSAL_DEPTH) {
|
|
31498
31847
|
if (!isLegitimateWorkspaceRoot(current)) {
|
|
31499
|
-
current =
|
|
31848
|
+
current = path12__default.dirname(current);
|
|
31500
31849
|
depth++;
|
|
31501
31850
|
continue;
|
|
31502
31851
|
}
|
|
31503
|
-
const packagesTemplates =
|
|
31504
|
-
const normalizedTemplatesPath =
|
|
31505
|
-
if (!normalizedTemplatesPath.startsWith(current +
|
|
31506
|
-
current =
|
|
31852
|
+
const packagesTemplates = path12__default.join(current, "packages", "runa-templates", "templates");
|
|
31853
|
+
const normalizedTemplatesPath = path12__default.resolve(packagesTemplates);
|
|
31854
|
+
if (!normalizedTemplatesPath.startsWith(current + path12__default.sep)) {
|
|
31855
|
+
current = path12__default.dirname(current);
|
|
31507
31856
|
depth++;
|
|
31508
31857
|
continue;
|
|
31509
31858
|
}
|
|
31510
31859
|
if (fs5__default.existsSync(normalizedTemplatesPath)) {
|
|
31511
|
-
const markerFile =
|
|
31860
|
+
const markerFile = path12__default.join(current, "packages", "runa-templates", "package.json");
|
|
31512
31861
|
if (fs5__default.existsSync(markerFile)) {
|
|
31513
31862
|
try {
|
|
31514
31863
|
const pkg = JSON.parse(fs5__default.readFileSync(markerFile, "utf-8"));
|
|
@@ -31519,7 +31868,7 @@ function resolveWorkspaceTemplates() {
|
|
|
31519
31868
|
}
|
|
31520
31869
|
}
|
|
31521
31870
|
}
|
|
31522
|
-
current =
|
|
31871
|
+
current = path12__default.dirname(current);
|
|
31523
31872
|
depth++;
|
|
31524
31873
|
}
|
|
31525
31874
|
return void 0;
|
|
@@ -31528,9 +31877,9 @@ function resolveWorkspaceTemplates() {
|
|
|
31528
31877
|
// src/commands/init.ts
|
|
31529
31878
|
function checkZodVersion(_logger) {
|
|
31530
31879
|
const zodPaths = [
|
|
31531
|
-
|
|
31880
|
+
path12__default.join(process.cwd(), "node_modules", "zod", "package.json"),
|
|
31532
31881
|
// Check home directory node_modules (common source of conflicts)
|
|
31533
|
-
|
|
31882
|
+
path12__default.join(process.env.HOME ?? "", "node_modules", "zod", "package.json")
|
|
31534
31883
|
];
|
|
31535
31884
|
for (const zodPath of zodPaths) {
|
|
31536
31885
|
if (!fs5__default.existsSync(zodPath)) continue;
|
|
@@ -31544,7 +31893,7 @@ function checkZodVersion(_logger) {
|
|
|
31544
31893
|
"ZOD_VERSION_INCOMPATIBLE",
|
|
31545
31894
|
[
|
|
31546
31895
|
"runa SDK requires zod v4.x, but found zod v3.x",
|
|
31547
|
-
`Conflicting zod found at: ${
|
|
31896
|
+
`Conflicting zod found at: ${path12__default.dirname(zodPath)}`,
|
|
31548
31897
|
"",
|
|
31549
31898
|
"Solutions:",
|
|
31550
31899
|
"1. Run from a clean project directory without existing node_modules",
|
|
@@ -31796,7 +32145,7 @@ var initCommand = new Command("init").description("Initialize pj-repo with runa
|
|
|
31796
32145
|
try {
|
|
31797
32146
|
if (options.repairConfig) {
|
|
31798
32147
|
logger16.section("Repairing runa.config.ts");
|
|
31799
|
-
const projectName2 =
|
|
32148
|
+
const projectName2 = path12__default.basename(process.cwd());
|
|
31800
32149
|
const repairResult = repairRunaConfig(process.cwd(), projectName2);
|
|
31801
32150
|
if (repairResult.success) {
|
|
31802
32151
|
logger16.success("\u2705 Config file repaired successfully");
|
|
@@ -31847,7 +32196,7 @@ var initCommand = new Command("init").description("Initialize pj-repo with runa
|
|
|
31847
32196
|
existingConfig,
|
|
31848
32197
|
force: options.force === true
|
|
31849
32198
|
});
|
|
31850
|
-
const projectName =
|
|
32199
|
+
const projectName = path12__default.basename(process.cwd());
|
|
31851
32200
|
logger16.info(`Project: ${projectName}`);
|
|
31852
32201
|
logger16.step("Applying templates (SDK)", 1);
|
|
31853
32202
|
const result = await initProject({
|
|
@@ -32105,7 +32454,7 @@ function evaluateStrictDetect(diagnostics) {
|
|
|
32105
32454
|
init_esm_shims();
|
|
32106
32455
|
async function formatFilesWithBiome(files, cwd) {
|
|
32107
32456
|
if (files.length === 0) return;
|
|
32108
|
-
const relativePaths = files.map((f) =>
|
|
32457
|
+
const relativePaths = files.map((f) => path12.relative(cwd, f));
|
|
32109
32458
|
try {
|
|
32110
32459
|
await execa("pnpm", ["exec", "biome", "format", "--write", ...relativePaths], {
|
|
32111
32460
|
cwd,
|
|
@@ -32215,7 +32564,7 @@ function ensureDirectoryExists(dirPath) {
|
|
|
32215
32564
|
}
|
|
32216
32565
|
function getGeneratorVersion() {
|
|
32217
32566
|
try {
|
|
32218
|
-
const pkgPath =
|
|
32567
|
+
const pkgPath = path12.resolve(__dirname$1, "../../../package.json");
|
|
32219
32568
|
if (fs5.existsSync(pkgPath)) {
|
|
32220
32569
|
const pkg = JSON.parse(fs5.readFileSync(pkgPath, "utf-8"));
|
|
32221
32570
|
return pkg.version ?? "unknown";
|
|
@@ -32230,15 +32579,15 @@ function detectPlatform(repoRoot) {
|
|
|
32230
32579
|
const nextConfigPatterns = ["next.config.ts", "next.config.js", "next.config.mjs"];
|
|
32231
32580
|
for (const pattern of nextConfigPatterns) {
|
|
32232
32581
|
const paths = [
|
|
32233
|
-
|
|
32234
|
-
|
|
32235
|
-
|
|
32582
|
+
path12.join(repoRoot, pattern),
|
|
32583
|
+
path12.join(repoRoot, "apps", "web", pattern),
|
|
32584
|
+
path12.join(repoRoot, "apps", "dashboard", pattern)
|
|
32236
32585
|
];
|
|
32237
32586
|
if (paths.some((p) => fs5.existsSync(p))) {
|
|
32238
32587
|
return "nextjs";
|
|
32239
32588
|
}
|
|
32240
32589
|
}
|
|
32241
|
-
const appJsonPath =
|
|
32590
|
+
const appJsonPath = path12.join(repoRoot, "app.json");
|
|
32242
32591
|
if (fs5.existsSync(appJsonPath)) {
|
|
32243
32592
|
try {
|
|
32244
32593
|
const appJson = JSON.parse(fs5.readFileSync(appJsonPath, "utf-8"));
|
|
@@ -32248,7 +32597,7 @@ function detectPlatform(repoRoot) {
|
|
|
32248
32597
|
} catch {
|
|
32249
32598
|
}
|
|
32250
32599
|
}
|
|
32251
|
-
if (fs5.existsSync(
|
|
32600
|
+
if (fs5.existsSync(path12.join(repoRoot, "expo.json"))) {
|
|
32252
32601
|
return "expo";
|
|
32253
32602
|
}
|
|
32254
32603
|
const electronPatterns = [
|
|
@@ -32257,7 +32606,7 @@ function detectPlatform(repoRoot) {
|
|
|
32257
32606
|
"electron.vite.config.ts",
|
|
32258
32607
|
"electron.vite.config.js"
|
|
32259
32608
|
];
|
|
32260
|
-
if (electronPatterns.some((p) => fs5.existsSync(
|
|
32609
|
+
if (electronPatterns.some((p) => fs5.existsSync(path12.join(repoRoot, p)))) {
|
|
32261
32610
|
return "electron";
|
|
32262
32611
|
}
|
|
32263
32612
|
return "unknown";
|
|
@@ -32273,13 +32622,13 @@ function hasHonoRouteFiles(dir) {
|
|
|
32273
32622
|
}
|
|
32274
32623
|
}
|
|
32275
32624
|
function collectRouteDirsInScope(repoRoot, scope) {
|
|
32276
|
-
const scopeDir =
|
|
32625
|
+
const scopeDir = path12.join(repoRoot, scope);
|
|
32277
32626
|
if (!fs5.existsSync(scopeDir)) {
|
|
32278
32627
|
return [];
|
|
32279
32628
|
}
|
|
32280
32629
|
try {
|
|
32281
32630
|
const entries = fs5.readdirSync(scopeDir, { withFileTypes: true });
|
|
32282
|
-
return entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name).filter((entryName) => hasHonoRouteFiles(
|
|
32631
|
+
return entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name).filter((entryName) => hasHonoRouteFiles(path12.join(scopeDir, entryName, "routes"))).map((entryName) => `${scope}/${entryName}/routes`);
|
|
32283
32632
|
} catch {
|
|
32284
32633
|
return [];
|
|
32285
32634
|
}
|
|
@@ -32368,7 +32717,7 @@ function buildDefinitionMap(definitions) {
|
|
|
32368
32717
|
function buildEnhancedMachines(e2eManifest, definitionMap, machineDefinitions) {
|
|
32369
32718
|
const enhanced = {};
|
|
32370
32719
|
const normalizedIdToCanonical = /* @__PURE__ */ new Map();
|
|
32371
|
-
const isMachineFile = (
|
|
32720
|
+
const isMachineFile = (path65) => path65.includes("machine.ts") || path65.includes("machines/") || path65.includes(".machine.ts") || path65.includes("/machine/");
|
|
32372
32721
|
const addMachine = (id, sourceFile, entryBuilder) => {
|
|
32373
32722
|
const normalizedId = normalizeToCanonicalId(id).toLowerCase();
|
|
32374
32723
|
const existingCanonical = normalizedIdToCanonical.get(normalizedId);
|
|
@@ -32438,7 +32787,7 @@ async function analyzeHonoRoutes(repoRoot, verbose, resolveSchemas = false) {
|
|
|
32438
32787
|
return { apiContracts: [], schemasResolved: false };
|
|
32439
32788
|
}
|
|
32440
32789
|
if (verbose) {
|
|
32441
|
-
console.log(` Analyzing Hono routes in: ${
|
|
32790
|
+
console.log(` Analyzing Hono routes in: ${path12.relative(repoRoot, routesPath)}`);
|
|
32442
32791
|
if (resolveSchemas) {
|
|
32443
32792
|
console.log(" Schema resolution enabled (--resolve-schemas)");
|
|
32444
32793
|
}
|
|
@@ -32498,18 +32847,18 @@ async function generateManifestFiles(manifestDir, repoRoot, verbose, resolveSche
|
|
|
32498
32847
|
}
|
|
32499
32848
|
const definitionMap = buildDefinitionMap(machineDefinitions);
|
|
32500
32849
|
const enhancedMachines = buildEnhancedMachines(e2eManifest, definitionMap, machineDefinitions);
|
|
32501
|
-
const absoluteManifestDir =
|
|
32850
|
+
const absoluteManifestDir = path12.isAbsolute(manifestDir) ? manifestDir : path12.join(repoRoot, manifestDir);
|
|
32502
32851
|
if (!isPathContained(repoRoot, absoluteManifestDir)) {
|
|
32503
32852
|
throw new Error(
|
|
32504
32853
|
`Security error: Manifest directory '${manifestDir}' would escape the project root. The --manifest-dir must be a relative path within the project directory.`
|
|
32505
32854
|
);
|
|
32506
32855
|
}
|
|
32507
32856
|
ensureDirectoryExists(absoluteManifestDir);
|
|
32508
|
-
const generatedDir =
|
|
32857
|
+
const generatedDir = path12.join(absoluteManifestDir, "generated");
|
|
32509
32858
|
ensureDirectoryExists(generatedDir);
|
|
32510
|
-
const manifestsDir =
|
|
32859
|
+
const manifestsDir = path12.join(absoluteManifestDir, "manifests");
|
|
32511
32860
|
ensureDirectoryExists(manifestsDir);
|
|
32512
|
-
const tsPath =
|
|
32861
|
+
const tsPath = path12.join(generatedDir, "selectors.ts");
|
|
32513
32862
|
await fs5.promises.writeFile(tsPath, generateSelectorTypeScript(e2eManifest), "utf-8");
|
|
32514
32863
|
const unifiedRegistry = getUnifiedRegistry();
|
|
32515
32864
|
const machineLinks = buildMachineLinks();
|
|
@@ -32562,12 +32911,12 @@ async function generateManifestFiles(manifestDir, repoRoot, verbose, resolveSche
|
|
|
32562
32911
|
apiContracts,
|
|
32563
32912
|
detectionDiagnostics
|
|
32564
32913
|
};
|
|
32565
|
-
const manifestPath =
|
|
32914
|
+
const manifestPath = path12.join(manifestsDir, "manifest.json");
|
|
32566
32915
|
await fs5.promises.writeFile(manifestPath, JSON.stringify(unifiedManifest, null, 2), "utf-8");
|
|
32567
32916
|
if (verbose) {
|
|
32568
|
-
console.log(`Generated selectors: ${
|
|
32917
|
+
console.log(`Generated selectors: ${path12.relative(repoRoot, tsPath)}`);
|
|
32569
32918
|
console.log(
|
|
32570
|
-
`Generated manifest: ${
|
|
32919
|
+
`Generated manifest: ${path12.relative(repoRoot, manifestPath)} (v${manifestVersion})`
|
|
32571
32920
|
);
|
|
32572
32921
|
console.log(` Platform: ${platform}`);
|
|
32573
32922
|
console.log(` Machine definitions: ${machineDefinitions.length}`);
|
|
@@ -32586,7 +32935,7 @@ async function generateManifestFiles(manifestDir, repoRoot, verbose, resolveSche
|
|
|
32586
32935
|
);
|
|
32587
32936
|
}
|
|
32588
32937
|
return {
|
|
32589
|
-
manifestPath:
|
|
32938
|
+
manifestPath: path12.relative(repoRoot, tsPath),
|
|
32590
32939
|
machineCount: Object.keys(enhancedMachines).length,
|
|
32591
32940
|
machinesWithoutE2EMeta,
|
|
32592
32941
|
detectionDiagnostics
|
|
@@ -32621,7 +32970,7 @@ function registerExistingInjections(machineIds, attrs, sourceFile) {
|
|
|
32621
32970
|
}
|
|
32622
32971
|
async function preprocessFile(filePath, repoRoot, options) {
|
|
32623
32972
|
let code = await fs5.promises.readFile(filePath, "utf-8");
|
|
32624
|
-
const relativePath =
|
|
32973
|
+
const relativePath = path12.relative(repoRoot, filePath);
|
|
32625
32974
|
collectRouteInfo(relativePath, code, options.verbose);
|
|
32626
32975
|
const hasMarker = code.includes(CLI_INJECTION_MARKER);
|
|
32627
32976
|
if (hasMarker && options.force) {
|
|
@@ -32666,7 +33015,7 @@ async function preprocessFile(filePath, repoRoot, options) {
|
|
|
32666
33015
|
return { code, hasMarker: false, skipped: false };
|
|
32667
33016
|
}
|
|
32668
33017
|
function transformAndRegister(filePath, code, repoRoot, options) {
|
|
32669
|
-
const relativePath =
|
|
33018
|
+
const relativePath = path12.relative(repoRoot, filePath);
|
|
32670
33019
|
const readAndParseFile = createReadAndParseFile(options.verbose);
|
|
32671
33020
|
const resolveImportPath = createResolveImportPath();
|
|
32672
33021
|
try {
|
|
@@ -32721,7 +33070,7 @@ async function processFileForChildInjection(filePath, repoRoot, options) {
|
|
|
32721
33070
|
if (prep.skipped && prep.skipResult) return prep.skipResult;
|
|
32722
33071
|
const result = transformAndRegister(filePath, prep.code, repoRoot, options);
|
|
32723
33072
|
if (options.verbose && result.changed && result.injectedCount > 0) {
|
|
32724
|
-
const relativePath =
|
|
33073
|
+
const relativePath = path12.relative(repoRoot, filePath);
|
|
32725
33074
|
console.log(
|
|
32726
33075
|
` [child] Injected ${result.injectedCount} attrs in ${relativePath} (actions: ${result.actions.join(", ")})`
|
|
32727
33076
|
);
|
|
@@ -32741,7 +33090,7 @@ async function handleChangedFile(result, check, repoRoot, verbose, state2) {
|
|
|
32741
33090
|
await fs5.promises.writeFile(result.filePath, result.transformedCode, "utf-8");
|
|
32742
33091
|
state2.filesToFormat.push(result.filePath);
|
|
32743
33092
|
if (verbose) {
|
|
32744
|
-
const relativePath =
|
|
33093
|
+
const relativePath = path12.relative(repoRoot, result.filePath);
|
|
32745
33094
|
console.log(` Injected ${result.injectedCount} attrs in ${relativePath}`);
|
|
32746
33095
|
}
|
|
32747
33096
|
}
|
|
@@ -32866,7 +33215,7 @@ async function processFiles(options) {
|
|
|
32866
33215
|
for (const filePath of withMachineDefinitions) {
|
|
32867
33216
|
try {
|
|
32868
33217
|
const code = await fs5.promises.readFile(filePath, "utf-8");
|
|
32869
|
-
const relativePath =
|
|
33218
|
+
const relativePath = path12.relative(repoRoot, filePath);
|
|
32870
33219
|
collectRouteInfo(relativePath, code, verbose);
|
|
32871
33220
|
} catch {
|
|
32872
33221
|
}
|
|
@@ -33184,7 +33533,7 @@ function assertNodeAuthToken(options) {
|
|
|
33184
33533
|
}
|
|
33185
33534
|
}
|
|
33186
33535
|
function getPackageVersion(packagePath) {
|
|
33187
|
-
const packageJsonPath =
|
|
33536
|
+
const packageJsonPath = path12__default.join(packagePath, "package.json");
|
|
33188
33537
|
if (!existsSync(packageJsonPath)) {
|
|
33189
33538
|
throw new CLIError(`package.json not found at ${packagePath}`, "PACKAGE_NOT_FOUND");
|
|
33190
33539
|
}
|
|
@@ -33203,7 +33552,7 @@ function resolveWorkspaceDependencies(workspaceRoot, packages, logger16) {
|
|
|
33203
33552
|
const sdkPackage = packages.find((p) => p.name === "SDK");
|
|
33204
33553
|
const xstatePluginPackage = packages.find((p) => p.name === "xstate-test-plugin");
|
|
33205
33554
|
if (!sdkPackage) return;
|
|
33206
|
-
const cliPackagePath =
|
|
33555
|
+
const cliPackagePath = path12__default.join(workspaceRoot, "packages", "cli", "package.json");
|
|
33207
33556
|
if (!existsSync(cliPackagePath)) return;
|
|
33208
33557
|
let content = readFileSync(cliPackagePath, "utf-8");
|
|
33209
33558
|
let updated = false;
|
|
@@ -33232,7 +33581,7 @@ function resolveWorkspaceDependencies(workspaceRoot, packages, logger16) {
|
|
|
33232
33581
|
}
|
|
33233
33582
|
}
|
|
33234
33583
|
function restoreWorkspaceDependencies(workspaceRoot) {
|
|
33235
|
-
const cliPackagePath =
|
|
33584
|
+
const cliPackagePath = path12__default.join(workspaceRoot, "packages", "cli", "package.json");
|
|
33236
33585
|
if (!existsSync(cliPackagePath)) return;
|
|
33237
33586
|
let content = readFileSync(cliPackagePath, "utf-8");
|
|
33238
33587
|
content = content.replace(/"@runa-ai\/runa": "\^[\d.]+"/, '"@runa-ai/runa": "workspace:*"');
|
|
@@ -33344,7 +33693,7 @@ function collectPackageInfo(workspaceRoot, logger16, stepNum) {
|
|
|
33344
33693
|
logger16.step("Collecting package info", stepNum);
|
|
33345
33694
|
const packages = [];
|
|
33346
33695
|
for (const pkgConfig of PUBLISHABLE_PACKAGES) {
|
|
33347
|
-
const pkgPath =
|
|
33696
|
+
const pkgPath = path12__default.join(workspaceRoot, "packages", pkgConfig.dir);
|
|
33348
33697
|
const version = getPackageVersion(pkgPath);
|
|
33349
33698
|
const exists = checkVersionExists(pkgConfig.publishName, version);
|
|
33350
33699
|
packages.push({
|
|
@@ -34733,8 +35082,8 @@ var compareActor = fromPromise(async ({ input: input3 }) => {
|
|
|
34733
35082
|
const result = compareBothFiles(
|
|
34734
35083
|
runaRelPath,
|
|
34735
35084
|
templateRelPath,
|
|
34736
|
-
|
|
34737
|
-
|
|
35085
|
+
path12.join(repoRoot, runaRelPath),
|
|
35086
|
+
path12.join(templateDir, templateRelPath),
|
|
34738
35087
|
runaFile.category,
|
|
34739
35088
|
options.diff ?? false
|
|
34740
35089
|
);
|
|
@@ -35132,9 +35481,9 @@ var PATH_MAPPING_RULES = [
|
|
|
35132
35481
|
category: "prompts"
|
|
35133
35482
|
}
|
|
35134
35483
|
];
|
|
35135
|
-
function getCategoryForPath(
|
|
35484
|
+
function getCategoryForPath(path65) {
|
|
35136
35485
|
for (const rule of PATH_MAPPING_RULES) {
|
|
35137
|
-
if (matchGlobPattern(
|
|
35486
|
+
if (matchGlobPattern(path65, rule.runa) || matchGlobPattern(path65, rule.template)) {
|
|
35138
35487
|
return rule.category;
|
|
35139
35488
|
}
|
|
35140
35489
|
}
|
|
@@ -35154,17 +35503,17 @@ function applyReverseRename(normalized, rule, isTemplate) {
|
|
|
35154
35503
|
}
|
|
35155
35504
|
return normalized;
|
|
35156
35505
|
}
|
|
35157
|
-
function generateComparisonKey(
|
|
35158
|
-
let normalized = isTemplate ? normalizeTemplatePath(
|
|
35506
|
+
function generateComparisonKey(path65, isTemplate) {
|
|
35507
|
+
let normalized = isTemplate ? normalizeTemplatePath(path65) : path65;
|
|
35159
35508
|
for (const rule of PATH_MAPPING_RULES) {
|
|
35160
35509
|
normalized = applyReverseRename(normalized, rule, isTemplate);
|
|
35161
35510
|
}
|
|
35162
35511
|
return normalized;
|
|
35163
35512
|
}
|
|
35164
|
-
function matchGlobPattern(
|
|
35513
|
+
function matchGlobPattern(path65, pattern) {
|
|
35165
35514
|
const regexPattern = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*\*/g, "<<DOUBLE_STAR>>").replace(/\*/g, "([^/]*)").replace(/<<DOUBLE_STAR>>/g, "(.*)");
|
|
35166
35515
|
const regex = new RegExp(`^${regexPattern}$`);
|
|
35167
|
-
const match =
|
|
35516
|
+
const match = path65.match(regex);
|
|
35168
35517
|
if (match) {
|
|
35169
35518
|
const subPath = match[1] ?? "";
|
|
35170
35519
|
return { matched: true, subPath };
|
|
@@ -35185,7 +35534,7 @@ var discoverActor = fromPromise(async ({ input: input3 }) => {
|
|
|
35185
35534
|
const runaPattern = rule.runa;
|
|
35186
35535
|
const runaFiles = await globFiles(repoRoot, runaPattern);
|
|
35187
35536
|
for (const file of runaFiles) {
|
|
35188
|
-
const relativePath =
|
|
35537
|
+
const relativePath = path12.relative(repoRoot, file.absolutePath);
|
|
35189
35538
|
const key = generateComparisonKey(relativePath, false);
|
|
35190
35539
|
inventory.runaFiles.set(relativePath, {
|
|
35191
35540
|
...file,
|
|
@@ -35199,7 +35548,7 @@ var discoverActor = fromPromise(async ({ input: input3 }) => {
|
|
|
35199
35548
|
const templatePattern = rule.template;
|
|
35200
35549
|
const templateFiles = await globFiles(templateDir, templatePattern);
|
|
35201
35550
|
for (const file of templateFiles) {
|
|
35202
|
-
const relativePath =
|
|
35551
|
+
const relativePath = path12.relative(templateDir, file.absolutePath);
|
|
35203
35552
|
const key = generateComparisonKey(relativePath, true);
|
|
35204
35553
|
inventory.templateFiles.set(relativePath, {
|
|
35205
35554
|
...file,
|
|
@@ -35248,7 +35597,7 @@ async function matchDoubleWildcard(ctx, entries) {
|
|
|
35248
35597
|
await walkAndMatch({ ...ctx, partIndex: ctx.partIndex + 1 });
|
|
35249
35598
|
for (const entry of entries) {
|
|
35250
35599
|
if (entry.isDirectory()) {
|
|
35251
|
-
const subDir =
|
|
35600
|
+
const subDir = path12.join(ctx.currentDir, entry.name);
|
|
35252
35601
|
await walkAndMatch({ ...ctx, currentDir: subDir });
|
|
35253
35602
|
}
|
|
35254
35603
|
}
|
|
@@ -35257,7 +35606,7 @@ async function matchSingleWildcard(ctx, entries, pattern, isLastPart) {
|
|
|
35257
35606
|
const regex = patternToRegex(pattern);
|
|
35258
35607
|
for (const entry of entries) {
|
|
35259
35608
|
if (regex.test(entry.name)) {
|
|
35260
|
-
const entryPath =
|
|
35609
|
+
const entryPath = path12.join(ctx.currentDir, entry.name);
|
|
35261
35610
|
await processEntry(ctx, entryPath, entry.isFile(), entry.isDirectory(), isLastPart);
|
|
35262
35611
|
}
|
|
35263
35612
|
}
|
|
@@ -35267,14 +35616,14 @@ async function matchBraceExpansion(ctx, entries, pattern, isLastPart) {
|
|
|
35267
35616
|
for (const option of options) {
|
|
35268
35617
|
for (const entry of entries) {
|
|
35269
35618
|
if (entry.name === option) {
|
|
35270
|
-
const entryPath =
|
|
35619
|
+
const entryPath = path12.join(ctx.currentDir, entry.name);
|
|
35271
35620
|
await processEntry(ctx, entryPath, entry.isFile(), entry.isDirectory(), isLastPart);
|
|
35272
35621
|
}
|
|
35273
35622
|
}
|
|
35274
35623
|
}
|
|
35275
35624
|
}
|
|
35276
35625
|
async function matchLiteral(ctx, pattern, isLastPart) {
|
|
35277
|
-
const entryPath =
|
|
35626
|
+
const entryPath = path12.join(ctx.currentDir, pattern);
|
|
35278
35627
|
if (!fs5.existsSync(entryPath)) return;
|
|
35279
35628
|
const stats = fs5.statSync(entryPath);
|
|
35280
35629
|
await processEntry(ctx, entryPath, stats.isFile(), stats.isDirectory(), isLastPart);
|
|
@@ -35799,17 +36148,17 @@ function printActionsNeeded(logger16, actions) {
|
|
|
35799
36148
|
);
|
|
35800
36149
|
}
|
|
35801
36150
|
function findRepoRoot3(startDir) {
|
|
35802
|
-
const { existsSync:
|
|
35803
|
-
const { join:
|
|
36151
|
+
const { existsSync: existsSync54, readFileSync: readFileSync31 } = __require("fs");
|
|
36152
|
+
const { join: join24, dirname: dirname5 } = __require("path");
|
|
35804
36153
|
let current = startDir;
|
|
35805
36154
|
while (current !== dirname5(current)) {
|
|
35806
|
-
if (
|
|
36155
|
+
if (existsSync54(join24(current, "turbo.json"))) {
|
|
35807
36156
|
return current;
|
|
35808
36157
|
}
|
|
35809
|
-
const pkgPath =
|
|
35810
|
-
if (
|
|
36158
|
+
const pkgPath = join24(current, "package.json");
|
|
36159
|
+
if (existsSync54(pkgPath)) {
|
|
35811
36160
|
try {
|
|
35812
|
-
const pkg = JSON.parse(
|
|
36161
|
+
const pkg = JSON.parse(readFileSync31(pkgPath, "utf-8"));
|
|
35813
36162
|
if (pkg.workspaces) {
|
|
35814
36163
|
return current;
|
|
35815
36164
|
}
|
|
@@ -35875,10 +36224,10 @@ function generateReportOutput(output3, isJsonMode) {
|
|
|
35875
36224
|
};
|
|
35876
36225
|
}
|
|
35877
36226
|
function validateRunaRepo(repoRoot) {
|
|
35878
|
-
const { existsSync:
|
|
35879
|
-
const { join:
|
|
35880
|
-
const templateDir =
|
|
35881
|
-
if (!
|
|
36227
|
+
const { existsSync: existsSync54 } = __require("fs");
|
|
36228
|
+
const { join: join24 } = __require("path");
|
|
36229
|
+
const templateDir = join24(repoRoot, "packages/runa-templates/templates");
|
|
36230
|
+
if (!existsSync54(templateDir)) {
|
|
35882
36231
|
throw new CLIError("template-check is a runa-repo only command", "NOT_RUNA_REPO", [
|
|
35883
36232
|
"This command compares runa-repo with pj-repo templates",
|
|
35884
36233
|
"It should only be run in the runa repository",
|
|
@@ -37218,7 +37567,7 @@ async function runVerification(logger16) {
|
|
|
37218
37567
|
}
|
|
37219
37568
|
async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
|
|
37220
37569
|
const fs14 = await import('fs/promises');
|
|
37221
|
-
const
|
|
37570
|
+
const path65 = await import('path');
|
|
37222
37571
|
const { glob: glob8 } = await import('glob');
|
|
37223
37572
|
const result = [];
|
|
37224
37573
|
const packageJsonPaths = await glob8("**/package.json", {
|
|
@@ -37227,7 +37576,7 @@ async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
|
|
|
37227
37576
|
// Exclude root
|
|
37228
37577
|
});
|
|
37229
37578
|
for (const relPath of packageJsonPaths) {
|
|
37230
|
-
const fullPath =
|
|
37579
|
+
const fullPath = path65.join(rootDir, relPath);
|
|
37231
37580
|
try {
|
|
37232
37581
|
const content = await fs14.readFile(fullPath, "utf-8");
|
|
37233
37582
|
const pkg = JSON.parse(content);
|
|
@@ -37240,7 +37589,7 @@ async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
|
|
|
37240
37589
|
}
|
|
37241
37590
|
if (hasAnySdkPackage) {
|
|
37242
37591
|
result.push({
|
|
37243
|
-
dir:
|
|
37592
|
+
dir: path65.dirname(fullPath),
|
|
37244
37593
|
packages: packageNames
|
|
37245
37594
|
// All packages, not just found ones
|
|
37246
37595
|
});
|
|
@@ -37883,7 +38232,7 @@ var SchemaWatcher = class {
|
|
|
37883
38232
|
persistent: true,
|
|
37884
38233
|
ignoreInitial: true
|
|
37885
38234
|
});
|
|
37886
|
-
this.watcher.on("add", (
|
|
38235
|
+
this.watcher.on("add", (path65) => this.handleFileEvent("add", path65)).on("change", (path65) => this.handleFileEvent("change", path65)).on("unlink", (path65) => this.handleFileEvent("unlink", path65)).on("error", (error) => this.handleError(error));
|
|
37887
38236
|
this.logger.success("\u2705 Schema watcher started");
|
|
37888
38237
|
this.logger.info(chalk.dim(" Press Ctrl+C to stop\n"));
|
|
37889
38238
|
}
|
|
@@ -37904,23 +38253,23 @@ var SchemaWatcher = class {
|
|
|
37904
38253
|
/**
|
|
37905
38254
|
* Handle file system events with debouncing
|
|
37906
38255
|
*/
|
|
37907
|
-
handleFileEvent(type,
|
|
37908
|
-
const existingTimer = this.debounceTimers.get(
|
|
38256
|
+
handleFileEvent(type, path65) {
|
|
38257
|
+
const existingTimer = this.debounceTimers.get(path65);
|
|
37909
38258
|
if (existingTimer) {
|
|
37910
38259
|
clearTimeout(existingTimer);
|
|
37911
38260
|
}
|
|
37912
38261
|
const timer = setTimeout(() => {
|
|
37913
|
-
this.processFileEvent({ type, path:
|
|
37914
|
-
this.debounceTimers.delete(
|
|
38262
|
+
this.processFileEvent({ type, path: path65, timestamp: /* @__PURE__ */ new Date() });
|
|
38263
|
+
this.debounceTimers.delete(path65);
|
|
37915
38264
|
}, this.options.debounceMs);
|
|
37916
|
-
this.debounceTimers.set(
|
|
38265
|
+
this.debounceTimers.set(path65, timer);
|
|
37917
38266
|
}
|
|
37918
38267
|
/**
|
|
37919
38268
|
* Process file system event
|
|
37920
38269
|
*/
|
|
37921
38270
|
async processFileEvent(event) {
|
|
37922
|
-
const { type, path:
|
|
37923
|
-
const fileName =
|
|
38271
|
+
const { type, path: path65 } = event;
|
|
38272
|
+
const fileName = path65.split("/").pop() || path65;
|
|
37924
38273
|
switch (type) {
|
|
37925
38274
|
case "add":
|
|
37926
38275
|
this.logger.info(chalk.green(`\u2795 Added: ${fileName}`));
|
|
@@ -37933,19 +38282,19 @@ var SchemaWatcher = class {
|
|
|
37933
38282
|
return;
|
|
37934
38283
|
}
|
|
37935
38284
|
if (this.options.autoValidate) {
|
|
37936
|
-
await this.validateFile(
|
|
38285
|
+
await this.validateFile(path65);
|
|
37937
38286
|
}
|
|
37938
38287
|
}
|
|
37939
38288
|
/**
|
|
37940
38289
|
* Validate schema file
|
|
37941
38290
|
*/
|
|
37942
|
-
async validateFile(
|
|
38291
|
+
async validateFile(path65) {
|
|
37943
38292
|
try {
|
|
37944
38293
|
this.logger.info(chalk.dim(" Validating..."));
|
|
37945
|
-
const validationResult = await validateSchemaFile(
|
|
38294
|
+
const validationResult = await validateSchemaFile(path65);
|
|
37946
38295
|
if (validationResult.isValid) {
|
|
37947
38296
|
this.logger.success(chalk.green(" \u2713 Validation passed"));
|
|
37948
|
-
const risks = await detectSchemaRisks(
|
|
38297
|
+
const risks = await detectSchemaRisks(path65);
|
|
37949
38298
|
if (risks.length > 0) {
|
|
37950
38299
|
this.logger.warn(` \u26A0\uFE0F ${risks.length} risk(s) detected`);
|
|
37951
38300
|
for (const risk of risks) {
|
|
@@ -37960,7 +38309,7 @@ var SchemaWatcher = class {
|
|
|
37960
38309
|
if (this.options.notifyOnError) {
|
|
37961
38310
|
await notifyDesktop({
|
|
37962
38311
|
title: "Schema Validation Failed",
|
|
37963
|
-
message: `${validationResult.errors.length} error(s) in ${
|
|
38312
|
+
message: `${validationResult.errors.length} error(s) in ${path65.split("/").pop()}`,
|
|
37964
38313
|
type: "error"
|
|
37965
38314
|
});
|
|
37966
38315
|
}
|
|
@@ -38179,7 +38528,7 @@ var TestWatcher = class {
|
|
|
38179
38528
|
persistent: true,
|
|
38180
38529
|
ignoreInitial: true
|
|
38181
38530
|
});
|
|
38182
|
-
this.watcher.on("add", (
|
|
38531
|
+
this.watcher.on("add", (path65) => this.handleFileEvent("add", path65)).on("change", (path65) => this.handleFileEvent("change", path65)).on("unlink", (path65) => this.handleFileEvent("unlink", path65)).on("error", (error) => this.handleError(error));
|
|
38183
38532
|
this.logger.success("\u2705 Test watcher started");
|
|
38184
38533
|
this.logger.info(chalk.dim(" Press Ctrl+C to stop\n"));
|
|
38185
38534
|
}
|
|
@@ -38200,26 +38549,26 @@ var TestWatcher = class {
|
|
|
38200
38549
|
/**
|
|
38201
38550
|
* Handle file system events with debouncing
|
|
38202
38551
|
*/
|
|
38203
|
-
handleFileEvent(type,
|
|
38204
|
-
const existingTimer = this.debounceTimers.get(
|
|
38552
|
+
handleFileEvent(type, path65) {
|
|
38553
|
+
const existingTimer = this.debounceTimers.get(path65);
|
|
38205
38554
|
if (existingTimer) {
|
|
38206
38555
|
clearTimeout(existingTimer);
|
|
38207
38556
|
}
|
|
38208
38557
|
const timer = setTimeout(() => {
|
|
38209
|
-
this.processFileEvent(type,
|
|
38210
|
-
this.debounceTimers.delete(
|
|
38558
|
+
this.processFileEvent(type, path65);
|
|
38559
|
+
this.debounceTimers.delete(path65);
|
|
38211
38560
|
}, this.options.debounceMs);
|
|
38212
|
-
this.debounceTimers.set(
|
|
38561
|
+
this.debounceTimers.set(path65, timer);
|
|
38213
38562
|
}
|
|
38214
38563
|
/**
|
|
38215
38564
|
* Process file system event
|
|
38216
38565
|
*/
|
|
38217
|
-
async processFileEvent(type,
|
|
38566
|
+
async processFileEvent(type, path65) {
|
|
38218
38567
|
if (this.isRunning) {
|
|
38219
38568
|
this.logger.warn(chalk.yellow("\u23F3 Tests already running, skipping..."));
|
|
38220
38569
|
return;
|
|
38221
38570
|
}
|
|
38222
|
-
const fileName =
|
|
38571
|
+
const fileName = path65.split("/").pop() || path65;
|
|
38223
38572
|
switch (type) {
|
|
38224
38573
|
case "add":
|
|
38225
38574
|
this.logger.info(chalk.green(`\u2795 Test added: ${fileName}`));
|
|
@@ -38232,7 +38581,7 @@ var TestWatcher = class {
|
|
|
38232
38581
|
return;
|
|
38233
38582
|
}
|
|
38234
38583
|
if (this.options.autoRun) {
|
|
38235
|
-
await this.runTests(
|
|
38584
|
+
await this.runTests(path65);
|
|
38236
38585
|
}
|
|
38237
38586
|
}
|
|
38238
38587
|
/**
|
|
@@ -38305,8 +38654,8 @@ var logger13 = createCLILogger("watch");
|
|
|
38305
38654
|
function addFallbackSchemaPatterns(patterns) {
|
|
38306
38655
|
const detected = detectDatabasePackage(process.cwd());
|
|
38307
38656
|
if (detected) {
|
|
38308
|
-
patterns.push(
|
|
38309
|
-
patterns.push(
|
|
38657
|
+
patterns.push(path12__default.join(detected, "src/schema/**/*.ts"));
|
|
38658
|
+
patterns.push(path12__default.join(detected, "sql/**/*.sql"));
|
|
38310
38659
|
return;
|
|
38311
38660
|
}
|
|
38312
38661
|
for (const candidate of DATABASE_PACKAGE_CANDIDATES) {
|
|
@@ -38320,8 +38669,8 @@ async function buildSchemaPatterns() {
|
|
|
38320
38669
|
const patterns = ["supabase/schemas/**/*.sql"];
|
|
38321
38670
|
try {
|
|
38322
38671
|
const dbPackagePath = await getDatabasePackagePath();
|
|
38323
|
-
patterns.push(
|
|
38324
|
-
patterns.push(
|
|
38672
|
+
patterns.push(path12__default.join(dbPackagePath, "src/schema/**/*.ts"));
|
|
38673
|
+
patterns.push(path12__default.join(dbPackagePath, "sql/**/*.sql"));
|
|
38325
38674
|
} catch {
|
|
38326
38675
|
addFallbackSchemaPatterns(patterns);
|
|
38327
38676
|
}
|