@runa-ai/runa-cli 0.5.52 → 0.5.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/commands/build/machine.d.ts +6 -4
  2. package/dist/commands/build/machine.d.ts.map +1 -1
  3. package/dist/commands/ci/machine/actors/db/sync-schema.d.ts.map +1 -1
  4. package/dist/commands/ci/machine/commands/ci-local.d.ts.map +1 -1
  5. package/dist/commands/ci/machine/commands/ci-pr.d.ts.map +1 -1
  6. package/dist/commands/ci/machine/commands/runtime-env.d.ts +8 -0
  7. package/dist/commands/ci/machine/commands/runtime-env.d.ts.map +1 -0
  8. package/dist/commands/ci/machine/contract.d.ts +1 -0
  9. package/dist/commands/ci/machine/contract.d.ts.map +1 -1
  10. package/dist/commands/ci/machine/helpers.d.ts +13 -0
  11. package/dist/commands/ci/machine/helpers.d.ts.map +1 -1
  12. package/dist/commands/ci/machine/machine.d.ts +1 -0
  13. package/dist/commands/ci/machine/machine.d.ts.map +1 -1
  14. package/dist/commands/db/commands/db-drizzle.d.ts.map +1 -1
  15. package/dist/commands/db/commands/db-lifecycle.d.ts.map +1 -1
  16. package/dist/commands/db/commands/db-seed-metadata.d.ts.map +1 -1
  17. package/dist/commands/db/sync/actors.d.ts +1 -0
  18. package/dist/commands/db/sync/actors.d.ts.map +1 -1
  19. package/dist/commands/db/sync/contract.d.ts +4 -1
  20. package/dist/commands/db/sync/contract.d.ts.map +1 -1
  21. package/dist/commands/db/sync/machine.d.ts +1 -0
  22. package/dist/commands/db/sync/machine.d.ts.map +1 -1
  23. package/dist/commands/db/types.d.ts +2 -0
  24. package/dist/commands/db/types.d.ts.map +1 -1
  25. package/dist/commands/db/utils/schema-detector.d.ts.map +1 -1
  26. package/dist/commands/db/utils/sql-table-extractor.d.ts.map +1 -1
  27. package/dist/commands/db/utils/table-registry.d.ts +4 -0
  28. package/dist/commands/db/utils/table-registry.d.ts.map +1 -1
  29. package/dist/commands/db/utils/table-source-classifier.d.ts +28 -0
  30. package/dist/commands/db/utils/table-source-classifier.d.ts.map +1 -0
  31. package/dist/commands/dev/machine.d.ts +5 -4
  32. package/dist/commands/dev/machine.d.ts.map +1 -1
  33. package/dist/commands/env/commands/env-pull.d.ts.map +1 -1
  34. package/dist/commands/env/constants/local-supabase.d.ts +4 -2
  35. package/dist/commands/env/constants/local-supabase.d.ts.map +1 -1
  36. package/dist/commands/hotfix/metadata.d.ts.map +1 -1
  37. package/dist/commands/inject-test-attrs/processor-utils.d.ts.map +1 -1
  38. package/dist/commands/inject-test-attrs/processor.d.ts.map +1 -1
  39. package/dist/constants/versions.d.ts +1 -1
  40. package/dist/errors/catalog.d.ts +8 -0
  41. package/dist/errors/catalog.d.ts.map +1 -1
  42. package/dist/index.js +1289 -578
  43. package/dist/internal/machines/index.d.ts +1 -0
  44. package/dist/internal/machines/index.d.ts.map +1 -1
  45. package/dist/internal/machines/snapshot-helpers.d.ts +30 -0
  46. package/dist/internal/machines/snapshot-helpers.d.ts.map +1 -0
  47. package/dist/utils/path-security.d.ts +18 -0
  48. package/dist/utils/path-security.d.ts.map +1 -1
  49. package/dist/utils/port-allocator.d.ts +34 -37
  50. package/dist/utils/port-allocator.d.ts.map +1 -1
  51. package/dist/validators/risk-detector.d.ts.map +1 -1
  52. package/package.json +8 -8
package/dist/index.js CHANGED
@@ -1,12 +1,12 @@
1
1
  #!/usr/bin/env node
2
2
  import { createRequire } from 'module';
3
3
  import * as path11 from 'path';
4
- import path11__default, { join, dirname, resolve, relative, basename, sep, isAbsolute, normalize } from 'path';
4
+ import path11__default, { join, dirname, resolve, isAbsolute, relative, sep, basename, normalize } from 'path';
5
5
  import { fileURLToPath } from 'url';
6
6
  import * as fs5 from 'fs';
7
- import fs5__default, { existsSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, rmSync, realpathSync, renameSync, promises, lstatSync, accessSync, constants, chmodSync, unlinkSync } from 'fs';
7
+ import fs5__default, { existsSync, rmSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, realpathSync, promises, lstatSync, accessSync, constants, chmodSync, unlinkSync } from 'fs';
8
8
  import { execSync, spawnSync, execFileSync, exec, spawn } from 'child_process';
9
- import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, SUPABASE_SYSTEM_SCHEMAS, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, DATABASE_PACKAGE_CANDIDATES, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
9
+ import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, SUPABASE_SYSTEM_SCHEMAS, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, BASE_PORTS, loadRunaConfigOrThrow, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, resolveAvailablePorts, calculatePortOffset, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, getPortsWithOffset, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, DATABASE_PACKAGE_CANDIDATES, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
10
10
  import { z } from 'zod';
11
11
  import fs9, { mkdir, writeFile, appendFile, readFile, rm, stat, realpath, cp, readdir, lstat } from 'fs/promises';
12
12
  import { promisify } from 'util';
@@ -24,7 +24,7 @@ import { expand } from 'dotenv-expand';
24
24
  import { resolve4 } from 'dns/promises';
25
25
  import { isIP } from 'net';
26
26
  import postgres from 'postgres';
27
- import crypto, { randomBytes, createHash } from 'crypto';
27
+ import crypto, { randomBytes } from 'crypto';
28
28
  import os, { tmpdir } from 'os';
29
29
  import { introspectDatabase, HonoRouteAnalyzer } from '@runa-ai/runa/test-generators';
30
30
  import { isTable, getTableUniqueName, getTableName } from 'drizzle-orm';
@@ -33,7 +33,7 @@ import { createJiti } from 'jiti';
33
33
  import ora from 'ora';
34
34
  import { stdout, stdin } from 'process';
35
35
  import * as readline from 'readline/promises';
36
- import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile, collectPageInfo, isLayoutFile, collectLayoutInfo, isApiRouteFile, collectApiRouteInfo, isMiddlewareFile, collectAuthBoundaries, hasMachineDefinition, collectMachineDefinition, createReadAndParseFile, createResolveImportPath, transformSync, getInjectionRegistry, buildManifest, getAllMachineDefinitions, generateSelectorTypeScript, getUnifiedRegistry, buildMachineLinks, registerInjection } from '@runa-ai/runa-xstate-test-plugin/standalone';
36
+ import { clearInjectionRegistry, clearUnifiedRegistry, postProcessRegistries, isPageFile, isLayoutFile, isApiRouteFile, isMiddlewareFile, collectPageInfo, collectLayoutInfo, collectApiRouteInfo, collectAuthBoundaries, hasMachineDefinition, collectMachineDefinition, collectComponentInfo, createReadAndParseFile, createResolveImportPath, transformSync, getInjectionRegistry, buildManifest, getAllMachineDefinitions, generateSelectorTypeScript, getUnifiedRegistry, buildMachineLinks, registerInjection } from '@runa-ai/runa-xstate-test-plugin/standalone';
37
37
  import { listSessions, formatDuration as formatDuration$2, cleanupStaleSessions, removeSession, isSessionCheckDisabled, getCurrentSessionId, checkActiveSessions, createSession, addActivity, checkConflicts, formatConflictDetails } from '@runa-ai/runa/session';
38
38
  import { render, Box, Text } from 'ink';
39
39
  import Spinner from 'ink-spinner';
@@ -124,7 +124,7 @@ function extractPort(rawUrl) {
124
124
  }
125
125
  }
126
126
  function parseTomlPort(content, section, key) {
127
- const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)`, "m");
127
+ const sectionRegex = new RegExp(`\\[${section}\\]([\\s\\S]*?)(?=\\n\\[|$)`);
128
128
  const sectionMatch = sectionRegex.exec(content);
129
129
  if (!sectionMatch) return null;
130
130
  const sectionContent = sectionMatch[1];
@@ -287,9 +287,9 @@ var init_local_supabase = __esm({
287
287
  "src/commands/env/constants/local-supabase.ts"() {
288
288
  init_esm_shims();
289
289
  DEFAULT_HOST = "127.0.0.1";
290
- DEFAULT_API_PORT = 54321;
291
- DEFAULT_DB_PORT = 54322;
292
- DEFAULT_STUDIO_PORT = 54323;
290
+ DEFAULT_API_PORT = BASE_PORTS.api;
291
+ DEFAULT_DB_PORT = BASE_PORTS.db;
292
+ DEFAULT_STUDIO_PORT = BASE_PORTS.studio;
293
293
  LOCAL_SUPABASE_ANON_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0";
294
294
  LOCAL_SUPABASE_SERVICE_ROLE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU";
295
295
  LOCAL_SUPABASE_ENV_VALUES = getLocalSupabaseEnvValues();
@@ -1161,7 +1161,7 @@ var CLI_VERSION, HAS_ADMIN_COMMAND;
1161
1161
  var init_version = __esm({
1162
1162
  "src/version.ts"() {
1163
1163
  init_esm_shims();
1164
- CLI_VERSION = "0.5.52";
1164
+ CLI_VERSION = "0.5.57";
1165
1165
  HAS_ADMIN_COMMAND = false;
1166
1166
  }
1167
1167
  });
@@ -4425,9 +4425,19 @@ var ERROR_CATALOG = {
4425
4425
  title: "Docker is not running",
4426
4426
  template: "Docker daemon is not running or not accessible",
4427
4427
  suggestions: [
4428
- "Start Docker Desktop",
4429
- "Run: open -a Docker (macOS)",
4430
- "Run: systemctl start docker (Linux)"
4428
+ "Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
4429
+ "Install Colima: brew install colima docker"
4430
+ ],
4431
+ docUrl: "https://runa.dev/docs/errors/docker"
4432
+ },
4433
+ DOCKER_DESKTOP_FORBIDDEN: {
4434
+ code: "ERR_RUNA_DOCKER_DESKTOP_FORBIDDEN",
4435
+ exitCode: EXIT_CODES.EXTERNAL_TOOL_ERROR,
4436
+ title: "Docker Desktop is not supported",
4437
+ template: "Docker Desktop is not supported. runa requires Colima.",
4438
+ suggestions: [
4439
+ "Install Colima: brew install colima docker",
4440
+ "Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs"
4431
4441
  ],
4432
4442
  docUrl: "https://runa.dev/docs/errors/docker"
4433
4443
  },
@@ -4441,7 +4451,7 @@ var ERROR_CATALOG = {
4441
4451
  supabase: "brew install supabase/tap/supabase",
4442
4452
  vercel: "pnpm add -g vercel",
4443
4453
  gh: "brew install gh",
4444
- docker: "Install Docker Desktop from https://docker.com",
4454
+ docker: "Install Colima: brew install colima docker",
4445
4455
  dotenvx: "pnpm add -g @dotenvx/dotenvx",
4446
4456
  "pg-schema-diff": "brew install pg-schema-diff"
4447
4457
  };
@@ -5290,7 +5300,7 @@ function emitJsonSuccess(cmd, dataSchema, data) {
5290
5300
  init_esm_shims();
5291
5301
  var BuildPhaseSchema = z.enum(["types", "lint", "build", "db", "manifest"]);
5292
5302
  var VALID_BUILD_PHASES = ["types", "lint", "build", "db", "manifest"];
5293
- z.object({
5303
+ var BuildInputSchema = z.object({
5294
5304
  /** Enable E2E mode (NEXT_PUBLIC_E2E_TEST=true, TURBO_FORCE=true) */
5295
5305
  e2e: z.boolean().default(false),
5296
5306
  /** Clear build caches (.next, .turbo, .runa/manifests) */
@@ -5351,6 +5361,87 @@ var BuildOutputSchema = z.object({
5351
5361
  // src/commands/build/machine.ts
5352
5362
  init_esm_shims();
5353
5363
 
5364
+ // src/internal/machines/index.ts
5365
+ init_esm_shims();
5366
+
5367
+ // src/internal/machines/machine-runner.ts
5368
+ init_esm_shims();
5369
+ function getOutputOrThrow(snapshot2) {
5370
+ const s = snapshot2;
5371
+ if (s.output === void 0) {
5372
+ throw new CLIError("Machine completed without output", "MACHINE_NO_OUTPUT", [
5373
+ "Ensure the machine defines an `output:` function."
5374
+ ]);
5375
+ }
5376
+ return s.output;
5377
+ }
5378
+ async function runMachine(params) {
5379
+ const timeoutMs = params.timeoutMs ?? 10 * 60 * 1e3;
5380
+ return new Promise((resolve12, reject) => {
5381
+ const actor = createActor(params.machine, { input: params.input });
5382
+ const timer = setTimeout(() => {
5383
+ try {
5384
+ actor.stop?.();
5385
+ } catch {
5386
+ }
5387
+ reject(
5388
+ new CLIError("Machine execution timed out", "MACHINE_TIMEOUT", [
5389
+ `Timeout: ${timeoutMs}ms`,
5390
+ "Consider increasing timeoutMs for long-running operations."
5391
+ ])
5392
+ );
5393
+ }, timeoutMs);
5394
+ const sub = actor.subscribe((snapshot2) => {
5395
+ try {
5396
+ params.onSnapshot?.(snapshot2);
5397
+ const stateName = params.helpers.getStateName(snapshot2);
5398
+ params.onTransition?.(stateName);
5399
+ if (params.helpers.isComplete(snapshot2)) {
5400
+ clearTimeout(timer);
5401
+ sub.unsubscribe();
5402
+ const out = getOutputOrThrow(snapshot2);
5403
+ resolve12(out);
5404
+ }
5405
+ } catch (err) {
5406
+ clearTimeout(timer);
5407
+ sub.unsubscribe();
5408
+ reject(err);
5409
+ }
5410
+ });
5411
+ try {
5412
+ actor.start();
5413
+ actor.send({ type: "START" });
5414
+ } catch (err) {
5415
+ clearTimeout(timer);
5416
+ sub.unsubscribe();
5417
+ reject(err);
5418
+ }
5419
+ });
5420
+ }
5421
+
5422
+ // src/internal/machines/snapshot-helpers.ts
5423
+ init_esm_shims();
5424
+ function getSnapshotStateName(snapshot2) {
5425
+ if (typeof snapshot2.value === "string") {
5426
+ return snapshot2.value;
5427
+ }
5428
+ if (!snapshot2.value || typeof snapshot2.value !== "object") {
5429
+ return "unknown";
5430
+ }
5431
+ const entries = Object.entries(snapshot2.value);
5432
+ if (entries.length === 0) {
5433
+ return "unknown";
5434
+ }
5435
+ const [parent, child] = entries[0];
5436
+ if (typeof child === "string") {
5437
+ return `${parent}.${child}`;
5438
+ }
5439
+ return parent;
5440
+ }
5441
+ function isSnapshotComplete(snapshot2) {
5442
+ return snapshot2.status === "done";
5443
+ }
5444
+
5354
5445
  // src/commands/build/actors/index.ts
5355
5446
  init_esm_shims();
5356
5447
 
@@ -6738,6 +6829,14 @@ var e2eMeta = {
6738
6829
  nextStates: []
6739
6830
  }
6740
6831
  };
6832
+ function normalizeBuildMachineInput(input3) {
6833
+ const normalizedInput = BuildInputSchema.parse(input3?.input ?? {});
6834
+ const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
6835
+ return {
6836
+ input: normalizedInput,
6837
+ repoRoot
6838
+ };
6839
+ }
6741
6840
  var buildMachine = setup({
6742
6841
  types: {},
6743
6842
  actors: {
@@ -6771,9 +6870,10 @@ var buildMachine = setup({
6771
6870
  id: "build",
6772
6871
  initial: "idle",
6773
6872
  context: ({ input: input3 }) => {
6774
- const repoRoot = input3.repoRoot;
6873
+ const normalizedInput = normalizeBuildMachineInput(input3);
6874
+ const repoRoot = normalizedInput.repoRoot;
6775
6875
  return {
6776
- input: input3.input,
6876
+ input: normalizedInput.input,
6777
6877
  repoRoot,
6778
6878
  tmpDir: ".runa/tmp/build",
6779
6879
  hasDatabase: detectDatabase(repoRoot),
@@ -7350,20 +7450,10 @@ var buildMachine = setup({
7350
7450
  output: ({ context }) => createOutput(context)
7351
7451
  });
7352
7452
  function getStateName(snapshot2) {
7353
- if (typeof snapshot2.value === "string") {
7354
- return snapshot2.value;
7355
- }
7356
- const topLevel = Object.keys(snapshot2.value)[0];
7357
- if (topLevel && typeof snapshot2.value === "object") {
7358
- const nested = snapshot2.value[topLevel];
7359
- if (nested && typeof nested === "string") {
7360
- return `${topLevel}.${nested}`;
7361
- }
7362
- }
7363
- return topLevel ?? "unknown";
7453
+ return getSnapshotStateName(snapshot2);
7364
7454
  }
7365
7455
  function isComplete(snapshot2) {
7366
- return snapshot2.status === "done";
7456
+ return isSnapshotComplete(snapshot2);
7367
7457
  }
7368
7458
 
7369
7459
  // src/commands/build/commands/build.ts
@@ -7508,15 +7598,15 @@ function printSummary(logger16, output3) {
7508
7598
  }
7509
7599
  }
7510
7600
  function findRepoRoot(startDir) {
7511
- const { existsSync: existsSync52, readFileSync: readFileSync29 } = __require("fs");
7601
+ const { existsSync: existsSync53, readFileSync: readFileSync29 } = __require("fs");
7512
7602
  const { join: join23, dirname: dirname5 } = __require("path");
7513
7603
  let current = startDir;
7514
7604
  while (current !== dirname5(current)) {
7515
- if (existsSync52(join23(current, "turbo.json"))) {
7605
+ if (existsSync53(join23(current, "turbo.json"))) {
7516
7606
  return current;
7517
7607
  }
7518
7608
  const pkgPath = join23(current, "package.json");
7519
- if (existsSync52(pkgPath)) {
7609
+ if (existsSync53(pkgPath)) {
7520
7610
  try {
7521
7611
  const pkg = JSON.parse(readFileSync29(pkgPath, "utf-8"));
7522
7612
  if (pkg.workspaces) {
@@ -7707,7 +7797,7 @@ init_esm_shims();
7707
7797
 
7708
7798
  // src/commands/dev/contract.ts
7709
7799
  init_esm_shims();
7710
- z.object({
7800
+ var DevInputSchema = z.object({
7711
7801
  /** Port for Next.js dev server (default: 3000) */
7712
7802
  port: z.number().int().positive().default(3e3),
7713
7803
  /** Skip Supabase start */
@@ -8099,48 +8189,6 @@ function determineAppCommand(mode, isMonorepo2, rootScripts, appScripts, repoRoo
8099
8189
  useRootScript: false
8100
8190
  };
8101
8191
  }
8102
- var NEXT_CRITICAL_FILES = ["routes-manifest.json", "build-manifest.json"];
8103
- function cleanStaleNextDevState(appDir) {
8104
- const nextDir = path11__default.join(appDir, ".next");
8105
- if (!existsSync(nextDir)) {
8106
- return { cleaned: false };
8107
- }
8108
- for (const file of NEXT_CRITICAL_FILES) {
8109
- if (!existsSync(path11__default.join(nextDir, file))) {
8110
- cleanNextDir(nextDir, `Missing ${file}`);
8111
- return { cleaned: true, reason: `Missing ${file}` };
8112
- }
8113
- }
8114
- const serverDir = path11__default.join(nextDir, "server");
8115
- if (!existsSync(serverDir)) {
8116
- try {
8117
- const nextStat = statSync(nextDir);
8118
- const ageHours = (Date.now() - nextStat.mtimeMs) / (1e3 * 60 * 60);
8119
- if (ageHours > 1) {
8120
- cleanNextDir(nextDir, "Stale .next without server directory");
8121
- return { cleaned: true, reason: "Stale .next without server directory" };
8122
- }
8123
- } catch {
8124
- }
8125
- }
8126
- return { cleaned: false };
8127
- }
8128
- function cleanNextDir(nextDir, reason) {
8129
- console.log(`[runa] Stale .next detected: ${reason}`);
8130
- console.log("[runa] Cleaning up .next directory...");
8131
- try {
8132
- rmSync(nextDir, { recursive: true, force: true, maxRetries: 10, retryDelay: 100 });
8133
- console.log("[runa] Cleanup complete");
8134
- } catch {
8135
- const staleDir = `${nextDir}-stale-${Date.now()}`;
8136
- console.log(`[runa] Could not remove .next, quarantining to ${path11__default.basename(staleDir)}`);
8137
- try {
8138
- renameSync(nextDir, staleDir);
8139
- } catch {
8140
- console.warn("[runa] Failed to quarantine .next. Run: rm -rf .next");
8141
- }
8142
- }
8143
- }
8144
8192
  async function startAppBackground(params) {
8145
8193
  const mode = params.mode ?? "start";
8146
8194
  const isMonorepo2 = params.appDir !== params.repoRoot;
@@ -8334,7 +8382,11 @@ var appStartActor = fromPromise(
8334
8382
  const { repoRoot, appDir, port, tmpDir, stream } = input3;
8335
8383
  const fullTmpDir = path11__default.join(repoRoot, tmpDir);
8336
8384
  await mkdir(fullTmpDir, { recursive: true });
8337
- cleanStaleNextDevState(appDir);
8385
+ const nextDir = path11__default.join(appDir, ".next");
8386
+ if (existsSync(nextDir)) {
8387
+ rmSync(nextDir, { recursive: true, force: true });
8388
+ console.log("[runa dev] Cleaned .next cache for fresh start");
8389
+ }
8338
8390
  const result = await startAppBackground({
8339
8391
  repoRoot,
8340
8392
  appDir,
@@ -8364,6 +8416,14 @@ var shutdownActor = fromPromise(async ({ input: input3 }) => {
8364
8416
  });
8365
8417
  }
8366
8418
  });
8419
+ function normalizeDevMachineInput(input3) {
8420
+ const normalizedInput = DevInputSchema.parse(input3?.input ?? {});
8421
+ const repoRoot = input3?.repoRoot ?? normalizedInput.targetDir ?? process.cwd();
8422
+ return {
8423
+ input: normalizedInput,
8424
+ repoRoot
8425
+ };
8426
+ }
8367
8427
  var devMachine = setup({
8368
8428
  types: {},
8369
8429
  actors: {
@@ -8381,9 +8441,10 @@ var devMachine = setup({
8381
8441
  id: "dev",
8382
8442
  initial: "idle",
8383
8443
  context: ({ input: input3 }) => {
8384
- const repoRoot = input3.repoRoot ?? process.cwd();
8444
+ const normalizedInput = normalizeDevMachineInput(input3);
8445
+ const repoRoot = normalizedInput.repoRoot;
8385
8446
  return {
8386
- input: input3.input,
8447
+ input: normalizedInput.input,
8387
8448
  repoRoot,
8388
8449
  tmpDir: ".runa/tmp/dev",
8389
8450
  hasDatabase: detectDatabase(repoRoot),
@@ -8790,6 +8851,23 @@ function isPathContained(basePath, targetPath) {
8790
8851
  const realTarget = safeRealpath(normalizedTarget);
8791
8852
  return realTarget === realBase || realTarget.startsWith(realBase + sep);
8792
8853
  }
8854
+ function validateUserFilePath(filePath, baseDir) {
8855
+ if (!filePath || filePath.trim() === "") {
8856
+ throw new Error("File path cannot be empty");
8857
+ }
8858
+ if (!hasNoDangerousChars(filePath)) {
8859
+ throw new Error(
8860
+ "File path contains dangerous characters. Shell metacharacters and control characters are not allowed."
8861
+ );
8862
+ }
8863
+ const resolvedPath = isAbsolute(filePath) ? resolve(filePath) : resolve(baseDir, filePath);
8864
+ if (!isPathContained(baseDir, resolvedPath)) {
8865
+ throw new Error(
8866
+ "File path resolves outside the allowed directory. Path must be within the project root."
8867
+ );
8868
+ }
8869
+ return resolvedPath;
8870
+ }
8793
8871
 
8794
8872
  // src/config/env-files.ts
8795
8873
  init_workspace_detector();
@@ -9741,6 +9819,89 @@ var EXCLUDED_SCHEMAS = /* @__PURE__ */ new Set([
9741
9819
  "pgbouncer",
9742
9820
  "cron"
9743
9821
  ]);
9822
+ function stripSqlCommentsPreserveStrings(content) {
9823
+ let result = "";
9824
+ let i = 0;
9825
+ let inSingleQuote = false;
9826
+ let inDoubleQuote = false;
9827
+ let inDollarQuote = false;
9828
+ let dollarTag = "";
9829
+ while (i < content.length) {
9830
+ const char = content[i] ?? "";
9831
+ const next = content[i + 1] ?? "";
9832
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && char === "-" && next === "-") {
9833
+ while (i < content.length && content[i] !== "\n") {
9834
+ result += " ";
9835
+ i++;
9836
+ }
9837
+ continue;
9838
+ }
9839
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && char === "/" && next === "*") {
9840
+ result += " ";
9841
+ result += " ";
9842
+ i += 2;
9843
+ while (i < content.length) {
9844
+ const blockChar = content[i] ?? "";
9845
+ const blockNext = content[i + 1] ?? "";
9846
+ if (blockChar === "*" && blockNext === "/") {
9847
+ result += " ";
9848
+ result += " ";
9849
+ i += 2;
9850
+ break;
9851
+ }
9852
+ result += blockChar === "\n" ? "\n" : " ";
9853
+ i++;
9854
+ }
9855
+ continue;
9856
+ }
9857
+ if (!inSingleQuote && !inDoubleQuote && char === "$") {
9858
+ if (inDollarQuote) {
9859
+ const closeTag = `$${dollarTag}$`;
9860
+ if (content.slice(i).startsWith(closeTag)) {
9861
+ result += closeTag;
9862
+ i += closeTag.length;
9863
+ inDollarQuote = false;
9864
+ dollarTag = "";
9865
+ continue;
9866
+ }
9867
+ } else {
9868
+ const tagMatch = content.slice(i).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
9869
+ if (tagMatch) {
9870
+ inDollarQuote = true;
9871
+ dollarTag = tagMatch[1] ?? "";
9872
+ result += tagMatch[0];
9873
+ i += tagMatch[0].length;
9874
+ continue;
9875
+ }
9876
+ }
9877
+ }
9878
+ if (!inDoubleQuote && !inDollarQuote && char === "'") {
9879
+ if (inSingleQuote && next === "'") {
9880
+ result += "''";
9881
+ i += 2;
9882
+ continue;
9883
+ }
9884
+ inSingleQuote = !inSingleQuote;
9885
+ result += char;
9886
+ i++;
9887
+ continue;
9888
+ }
9889
+ if (!inSingleQuote && !inDollarQuote && char === '"') {
9890
+ if (inDoubleQuote && next === '"') {
9891
+ result += '""';
9892
+ i += 2;
9893
+ continue;
9894
+ }
9895
+ inDoubleQuote = !inDoubleQuote;
9896
+ result += char;
9897
+ i++;
9898
+ continue;
9899
+ }
9900
+ result += char;
9901
+ i++;
9902
+ }
9903
+ return result;
9904
+ }
9744
9905
  function detectAppSchemas(schemasDir, verbose) {
9745
9906
  const schemas = /* @__PURE__ */ new Set(["public"]);
9746
9907
  if (!existsSync(schemasDir)) {
@@ -9749,12 +9910,13 @@ function detectAppSchemas(schemasDir, verbose) {
9749
9910
  const files = readdirSync(schemasDir).filter((f) => f.endsWith(".sql"));
9750
9911
  for (const file of files) {
9751
9912
  const content = readFileSync(join(schemasDir, file), "utf-8");
9752
- const contentWithoutComments = content.replace(/--[^\n]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
9913
+ const contentWithoutComments = stripSqlCommentsPreserveStrings(content);
9753
9914
  const matches = contentWithoutComments.matchAll(
9754
- /^\s*CREATE\s+SCHEMA\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)/gim
9915
+ /^\s*CREATE\s+SCHEMA\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"((?:[^"]|"")*)"|([a-zA-Z_][a-zA-Z0-9_]*))/gim
9755
9916
  );
9756
9917
  for (const match of Array.from(matches)) {
9757
- const schemaName = match[1].toLowerCase();
9918
+ const schemaNameRaw = (match[1] ?? match[2] ?? "").replace(/""/g, '"');
9919
+ const schemaName = schemaNameRaw.toLowerCase();
9758
9920
  if (!EXCLUDED_SCHEMAS.has(schemaName)) {
9759
9921
  schemas.add(schemaName);
9760
9922
  }
@@ -12733,6 +12895,20 @@ function getSchemaGitDiff(repoRoot) {
12733
12895
  return null;
12734
12896
  }
12735
12897
  }
12898
+ function extractRolesFromSql(content) {
12899
+ const roles = [];
12900
+ const roleMatches = content.matchAll(/CREATE\s+ROLE\s+(\w+)\s+WITH/gi);
12901
+ for (const match of roleMatches) {
12902
+ if (match[1]) roles.push(match[1].toLowerCase());
12903
+ }
12904
+ const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
12905
+ for (const match of existsMatches) {
12906
+ if (match[1] && !roles.includes(match[1].toLowerCase())) {
12907
+ roles.push(match[1].toLowerCase());
12908
+ }
12909
+ }
12910
+ return roles;
12911
+ }
12736
12912
  function getIdempotentRoleNames2(repoRoot) {
12737
12913
  const idempotentDir = path11__default.join(repoRoot, "supabase", "schemas", "idempotent");
12738
12914
  const roles = [];
@@ -12741,17 +12917,12 @@ function getIdempotentRoleNames2(repoRoot) {
12741
12917
  if (!fs14.existsSync(idempotentDir)) return [];
12742
12918
  const files = fs14.readdirSync(idempotentDir).filter((f) => f.endsWith(".sql"));
12743
12919
  for (const file of files) {
12744
- const content = fs14.readFileSync(path11__default.join(idempotentDir, file), "utf-8");
12745
- const roleMatches = content.matchAll(/CREATE\s+ROLE\s+(\w+)\s+WITH/gi);
12746
- for (const match of roleMatches) {
12747
- if (match[1]) roles.push(match[1].toLowerCase());
12748
- }
12749
- const existsMatches = content.matchAll(/rolname\s*=\s*'(\w+)'/gi);
12750
- for (const match of existsMatches) {
12751
- if (match[1] && !roles.includes(match[1].toLowerCase())) {
12752
- roles.push(match[1].toLowerCase());
12753
- }
12920
+ const filePath = path11__default.join(idempotentDir, file);
12921
+ if (!isPathContained(idempotentDir, filePath)) {
12922
+ continue;
12754
12923
  }
12924
+ const content = fs14.readFileSync(filePath, "utf-8");
12925
+ roles.push(...extractRolesFromSql(content));
12755
12926
  }
12756
12927
  } catch {
12757
12928
  }
@@ -15067,6 +15238,24 @@ function getSupabaseUrlWithFallback(context) {
15067
15238
  function getSupabaseAnonKeyWithFallback(context) {
15068
15239
  return context.supabase?.anonKey || DEFAULT_LOCAL_ANON_KEY;
15069
15240
  }
15241
+ function buildRuntimeEnv(context, options = {}) {
15242
+ const env2 = {
15243
+ ...context.input.runtimeEnv ?? {},
15244
+ DATABASE_URL: getDatabaseUrlForRuntime(context),
15245
+ NEXT_PUBLIC_SUPABASE_URL: getSupabaseUrlWithFallback(context),
15246
+ NEXT_PUBLIC_SUPABASE_ANON_KEY: getSupabaseAnonKeyWithFallback(context)
15247
+ };
15248
+ if (options.enablePublicE2EFlag) {
15249
+ env2.NEXT_PUBLIC_E2E_TEST = "true";
15250
+ }
15251
+ if (options.enableServerE2EFlag) {
15252
+ env2.E2E_TEST = "true";
15253
+ }
15254
+ if (options.baseUrl) {
15255
+ env2.BASE_URL = options.baseUrl;
15256
+ }
15257
+ return env2;
15258
+ }
15070
15259
  function computeExitCodeFromLayerResults(layerResults) {
15071
15260
  const classification = getClassificationForProfile("runa-strict");
15072
15261
  const classificationMap = new Map(classification.map((c) => [c.layer, c.level]));
@@ -15555,7 +15744,7 @@ ${generateProgressCommentBody(progressInput)}`;
15555
15744
  tmpDir: assertTmpDir(context),
15556
15745
  // Execute if GH_DATABASE_URL_ADMIN is set (dry-run will determine if changes exist)
15557
15746
  // PRD: GH_DATABASE_URL_ADMIN = postgres role (DDL capable)
15558
- shouldExecute: !!(process.env.GH_DATABASE_URL_ADMIN || process.env.GH_DATABASE_URL)
15747
+ shouldExecute: Boolean(context.input.productionDatabaseUrl?.trim())
15559
15748
  }),
15560
15749
  onDone: {
15561
15750
  target: "collectSchemaStats",
@@ -15747,14 +15936,10 @@ ${generateProgressCommentBody(progressInput)}`;
15747
15936
  input: ({ context }) => ({
15748
15937
  repoRoot: assertRepoRoot(context),
15749
15938
  tmpDir: assertTmpDir(context),
15750
- env: {
15751
- ...process.env,
15752
- DATABASE_URL: getDatabaseUrlForRuntime(context),
15753
- NEXT_PUBLIC_SUPABASE_URL: getSupabaseUrlWithFallback(context),
15754
- NEXT_PUBLIC_SUPABASE_ANON_KEY: getSupabaseAnonKeyWithFallback(context),
15755
- // CRITICAL: Required for XState Test Plugin to inject data-state attributes
15756
- NEXT_PUBLIC_E2E_TEST: "true"
15757
- },
15939
+ env: buildRuntimeEnv(context, {
15940
+ // Required for XState Test Plugin to inject data-state attributes.
15941
+ enablePublicE2EFlag: true
15942
+ }),
15758
15943
  isCI: context.input.isCI ?? false,
15759
15944
  skipPlaywright: shouldSkipPlaywrightInstall(context)
15760
15945
  }),
@@ -15805,16 +15990,11 @@ ${generateProgressCommentBody(progressInput)}`;
15805
15990
  tmpDir: assertTmpDir(context),
15806
15991
  appDir: context.app?.appDir ?? assertRepoRoot(context),
15807
15992
  port: context.app?.port ?? 3e3,
15808
- env: {
15809
- ...process.env,
15810
- DATABASE_URL: getDatabaseUrlForRuntime(context),
15811
- NEXT_PUBLIC_SUPABASE_URL: getSupabaseUrlWithFallback(context),
15812
- NEXT_PUBLIC_SUPABASE_ANON_KEY: getSupabaseAnonKeyWithFallback(context),
15813
- // CRITICAL: Required for middleware to bypass auth in E2E tests
15814
- // E2E_TEST is server-only (proxy.ts), NEXT_PUBLIC_E2E_TEST is for build-time (xstate-test-plugin)
15815
- E2E_TEST: "true",
15816
- NEXT_PUBLIC_E2E_TEST: "true"
15817
- }
15993
+ env: buildRuntimeEnv(context, {
15994
+ // E2E_TEST is server-only (proxy.ts), NEXT_PUBLIC_E2E_TEST is for build-time.
15995
+ enableServerE2EFlag: true,
15996
+ enablePublicE2EFlag: true
15997
+ })
15818
15998
  }),
15819
15999
  onDone: [
15820
16000
  {
@@ -15919,12 +16099,7 @@ ${generateProgressCommentBody(progressInput)}`;
15919
16099
  // ci-local: all selected layers, ci-pr: only core layers (1,2,3)
15920
16100
  layers: getLayersForCorePhase(context.selectedLayers, context.mode),
15921
16101
  baseUrl: context.baseUrl ?? `http://localhost:${context.app?.port ?? 3e3}`,
15922
- env: {
15923
- ...process.env,
15924
- DATABASE_URL: getDatabaseUrlForRuntime(context),
15925
- NEXT_PUBLIC_SUPABASE_URL: getSupabaseUrlWithFallback(context),
15926
- NEXT_PUBLIC_SUPABASE_ANON_KEY: getSupabaseAnonKeyWithFallback(context)
15927
- },
16102
+ env: buildRuntimeEnv(context),
15928
16103
  failFast: true
15929
16104
  }),
15930
16105
  onDone: [
@@ -16078,15 +16253,10 @@ ${generateCommentBody(commentInput)}`;
16078
16253
  tmpDir: assertTmpDir(context),
16079
16254
  layers: [E2E_LAYER],
16080
16255
  baseUrl: e2eBaseUrl,
16081
- env: {
16082
- ...process.env,
16083
- DATABASE_URL: getDatabaseUrlForRuntime(context),
16084
- NEXT_PUBLIC_SUPABASE_URL: getSupabaseUrlWithFallback(context),
16085
- NEXT_PUBLIC_SUPABASE_ANON_KEY: getSupabaseAnonKeyWithFallback(context),
16086
- // CRITICAL: Set BASE_URL for Playwright config and test fixtures
16087
- // This ensures cookies are set on the same domain Playwright uses
16088
- BASE_URL: e2eBaseUrl
16089
- },
16256
+ env: buildRuntimeEnv(context, {
16257
+ // Ensures cookies are set on the same domain Playwright uses.
16258
+ baseUrl: e2eBaseUrl
16259
+ }),
16090
16260
  failFast: false
16091
16261
  // Don't fail-fast for E2E (warning-only)
16092
16262
  };
@@ -16280,19 +16450,10 @@ ${generateCommentBody(commentInput)}`;
16280
16450
  output: ({ context }) => createOutput3(context)
16281
16451
  });
16282
16452
  function getStateName3(snapshot2) {
16283
- const value = snapshot2.value;
16284
- if (typeof value === "string") return value;
16285
- const keys = Object.keys(value);
16286
- if (keys.length > 0) {
16287
- const parent = keys[0];
16288
- const child = value[parent];
16289
- if (typeof child === "string") return `${parent}.${child}`;
16290
- return parent;
16291
- }
16292
- return "unknown";
16453
+ return getSnapshotStateName(snapshot2);
16293
16454
  }
16294
16455
  function isComplete3(snapshot2) {
16295
- return snapshot2.status === "done";
16456
+ return isSnapshotComplete(snapshot2);
16296
16457
  }
16297
16458
 
16298
16459
  // src/commands/ci/machine/commands/machine-runner.ts
@@ -16541,6 +16702,18 @@ async function flushAndExit(exitCode) {
16541
16702
  process.exit(exitCode);
16542
16703
  }
16543
16704
 
16705
+ // src/commands/ci/machine/commands/runtime-env.ts
16706
+ init_esm_shims();
16707
+ function captureRuntimeEnv(source = process.env) {
16708
+ const captured = {};
16709
+ for (const [key, value] of Object.entries(source)) {
16710
+ if (value !== void 0) {
16711
+ captured[key] = value;
16712
+ }
16713
+ }
16714
+ return captured;
16715
+ }
16716
+
16544
16717
  // src/commands/ci/machine/commands/ci-local.ts
16545
16718
  var isGitHubActionsMode = false;
16546
16719
  var currentGroup = null;
@@ -16662,7 +16835,8 @@ function buildMachineInput(options) {
16662
16835
  dbMode: void 0,
16663
16836
  // PRD: GH_DATABASE_URL_ADMIN = postgres role (DDL capable, for pg_dump)
16664
16837
  productionDatabaseUrl: process.env.GH_DATABASE_URL_ADMIN || process.env.GH_DATABASE_URL,
16665
- databaseUrl: process.env.DATABASE_URL
16838
+ databaseUrl: process.env.DATABASE_URL,
16839
+ runtimeEnv: captureRuntimeEnv()
16666
16840
  };
16667
16841
  }
16668
16842
  function handleCiError(error, logger16) {
@@ -16728,6 +16902,8 @@ z.object({
16728
16902
  productionDatabaseUrl: z.string().optional(),
16729
16903
  /** Local database URL */
16730
16904
  databaseUrl: z.string().optional(),
16905
+ /** Sanitized process.env captured at entry point */
16906
+ runtimeEnv: z.record(z.string(), z.string()).optional(),
16731
16907
  // === GitHub Actions Environment ===
16732
16908
  /** GitHub ref (e.g., refs/pull/123/merge) */
16733
16909
  githubRef: z.string().optional(),
@@ -17179,6 +17355,7 @@ function optionsToMachineInput(options) {
17179
17355
  databaseUrl: process.env.DATABASE_URL,
17180
17356
  // PRD: GH_DATABASE_URL_ADMIN = postgres role (DDL capable, for pg-schema-diff dry-run)
17181
17357
  productionDatabaseUrl: process.env.GH_DATABASE_URL_ADMIN || process.env.GH_DATABASE_URL,
17358
+ runtimeEnv: captureRuntimeEnv(),
17182
17359
  githubRef: process.env.GITHUB_REF,
17183
17360
  // FIX: Read action from GITHUB_EVENT_PATH JSON, not non-existent GITHUB_EVENT_ACTION env var
17184
17361
  githubEventAction: getGitHubEventAction(),
@@ -17317,6 +17494,7 @@ function tryResolveDatabaseUrl(environment) {
17317
17494
 
17318
17495
  // src/commands/db/utils/table-registry.ts
17319
17496
  init_esm_shims();
17497
+ init_config_loader();
17320
17498
 
17321
17499
  // src/commands/db/utils/semantic-mapper.ts
17322
17500
  init_esm_shims();
@@ -17403,6 +17581,223 @@ function applyMappingToTables(tables, mapping) {
17403
17581
  }));
17404
17582
  }
17405
17583
 
17584
+ // src/commands/db/utils/schema-sync.ts
17585
+ init_esm_shims();
17586
+ var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
17587
+ function validatePgIdentifier(name, context) {
17588
+ if (!name || typeof name !== "string") {
17589
+ throw new Error(`Invalid ${context}: empty or not a string`);
17590
+ }
17591
+ if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
17592
+ throw new Error(
17593
+ `Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
17594
+ );
17595
+ }
17596
+ }
17597
+ function escapePgStringLiteral(value) {
17598
+ if (typeof value !== "string") {
17599
+ throw new Error("Value must be a string");
17600
+ }
17601
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
17602
+ }
17603
+ function buildSafeSchemaInClause(schemas) {
17604
+ if (schemas.length === 0) {
17605
+ throw new Error("No schemas provided for IN clause");
17606
+ }
17607
+ const safeSchemas = [];
17608
+ for (const schema of schemas) {
17609
+ validatePgIdentifier(schema, "schema name");
17610
+ safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
17611
+ }
17612
+ return safeSchemas.join(",");
17613
+ }
17614
+ var ERROR_MESSAGES2 = {
17615
+ PATH_TRAVERSAL: "Schema path validation failed",
17616
+ SCHEMA_NOT_FOUND: "Schema file not found"
17617
+ };
17618
+ function containsPathTraversal2(inputPath) {
17619
+ const normalized = path11__default.normalize(inputPath);
17620
+ return normalized.includes("..") || inputPath.includes("\0");
17621
+ }
17622
+ function isPathWithinBase(filePath, baseDir) {
17623
+ try {
17624
+ const resolvedFile = path11__default.resolve(filePath);
17625
+ const resolvedBase = path11__default.resolve(baseDir);
17626
+ const normalizedFile = path11__default.normalize(resolvedFile);
17627
+ const normalizedBase = path11__default.normalize(resolvedBase);
17628
+ return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
17629
+ } catch {
17630
+ return false;
17631
+ }
17632
+ }
17633
+ function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
17634
+ if (containsPathTraversal2(dbPackagePath)) {
17635
+ throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
17636
+ }
17637
+ const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
17638
+ const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
17639
+ let resolvedProjectRoot;
17640
+ try {
17641
+ resolvedProjectRoot = realpathSync(projectRoot);
17642
+ } catch {
17643
+ resolvedProjectRoot = path11__default.resolve(projectRoot);
17644
+ }
17645
+ if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
17646
+ throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
17647
+ }
17648
+ if (!existsSync(absoluteSchemaPath)) {
17649
+ throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
17650
+ }
17651
+ return absoluteSchemaPath;
17652
+ }
17653
+ function uniqueSorted(values) {
17654
+ return [...new Set(values)].sort((a, b) => a.localeCompare(b));
17655
+ }
17656
+ async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
17657
+ const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
17658
+ const jiti = createJiti(projectRoot, { interopDefault: true });
17659
+ let schemaModule;
17660
+ try {
17661
+ schemaModule = await jiti.import(validatedSchemaPath);
17662
+ } catch (error) {
17663
+ const errorMessage = error instanceof Error ? error.message : String(error);
17664
+ const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
17665
+ throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
17666
+ }
17667
+ const expectedTables = /* @__PURE__ */ new Set();
17668
+ const expectedEnums = /* @__PURE__ */ new Map();
17669
+ for (const value of Object.values(schemaModule)) {
17670
+ if (isTable(value)) {
17671
+ const unique = String(getTableUniqueName(value));
17672
+ if (unique.startsWith("undefined.")) {
17673
+ expectedTables.add(`public.${getTableName(value)}`);
17674
+ } else {
17675
+ expectedTables.add(unique);
17676
+ }
17677
+ continue;
17678
+ }
17679
+ if (isPgEnum(value)) {
17680
+ expectedEnums.set(value.enumName, {
17681
+ name: value.enumName,
17682
+ values: uniqueSorted(value.enumValues)
17683
+ });
17684
+ }
17685
+ }
17686
+ return { expectedTables, expectedEnums };
17687
+ }
17688
+ async function fetchDbTablesAndEnums(databaseUrl, options) {
17689
+ const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
17690
+ const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
17691
+ const systemSchemas = /* @__PURE__ */ new Set([
17692
+ ...SUPABASE_SYSTEM_SCHEMAS,
17693
+ ...options?.additionalSystemSchemas ?? []
17694
+ ]);
17695
+ const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
17696
+ const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
17697
+ const tablesSql = `
17698
+ SELECT schemaname || '.' || tablename
17699
+ FROM pg_tables
17700
+ WHERE schemaname IN (${schemaList})
17701
+ ORDER BY schemaname, tablename;`.trim();
17702
+ const enumsSql = `
17703
+ SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
17704
+ FROM pg_type t
17705
+ JOIN pg_enum e ON t.oid = e.enumtypid
17706
+ JOIN pg_namespace n ON n.oid = t.typnamespace
17707
+ WHERE n.nspname = 'public'
17708
+ GROUP BY t.typname
17709
+ ORDER BY t.typname;`.trim();
17710
+ const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
17711
+ const dbTables = /* @__PURE__ */ new Set();
17712
+ for (const line of tablesOut.split("\n")) {
17713
+ const v = line.trim();
17714
+ if (v.length > 0) dbTables.add(v);
17715
+ }
17716
+ const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
17717
+ const dbEnums = /* @__PURE__ */ new Map();
17718
+ for (const line of enumsOut.split("\n")) {
17719
+ const trimmed = line.trim();
17720
+ if (trimmed.length === 0) continue;
17721
+ const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
17722
+ const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
17723
+ dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
17724
+ }
17725
+ return { dbTables, dbEnums };
17726
+ }
17727
+ function diffSchema(params) {
17728
+ const missingTables = uniqueSorted(
17729
+ [...params.expectedTables].filter((t) => !params.dbTables.has(t))
17730
+ );
17731
+ const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
17732
+ const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
17733
+ const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
17734
+ const isExcluded = (table) => {
17735
+ if (exactExclusions.includes(table)) return true;
17736
+ for (const pattern of exclusionPatterns) {
17737
+ const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
17738
+ if (regex.test(table)) return true;
17739
+ }
17740
+ return false;
17741
+ };
17742
+ const orphanTables = uniqueSorted(
17743
+ [...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
17744
+ );
17745
+ const expectedEnumNames = new Set(params.expectedEnums.keys());
17746
+ const dbEnumNames = new Set(params.dbEnums.keys());
17747
+ const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
17748
+ const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
17749
+ const enumValueMismatches = [];
17750
+ for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
17751
+ const s = params.expectedEnums.get(name);
17752
+ const d = params.dbEnums.get(name);
17753
+ if (!s || !d) continue;
17754
+ const schemaValues = uniqueSorted(s.values);
17755
+ const dbValues = uniqueSorted(d.values);
17756
+ const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
17757
+ if (same) continue;
17758
+ const added = schemaValues.filter((v) => !dbValues.includes(v));
17759
+ const removed = dbValues.filter((v) => !schemaValues.includes(v));
17760
+ enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
17761
+ }
17762
+ return {
17763
+ expectedTables: params.expectedTables,
17764
+ expectedEnums: params.expectedEnums,
17765
+ dbTables: params.dbTables,
17766
+ dbEnums: params.dbEnums,
17767
+ missingTables,
17768
+ orphanTables,
17769
+ missingEnums,
17770
+ extraEnums,
17771
+ enumValueMismatches
17772
+ };
17773
+ }
17774
+ function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
17775
+ const fullPath = path11__default.resolve(projectRoot, idempotentDir);
17776
+ if (!existsSync(fullPath)) {
17777
+ return [];
17778
+ }
17779
+ const tables = [];
17780
+ const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
17781
+ try {
17782
+ const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
17783
+ for (const file of files) {
17784
+ const filePath = path11__default.join(fullPath, file);
17785
+ const content = readFileSync(filePath, "utf-8");
17786
+ const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
17787
+ for (const match of contentWithoutComments.matchAll(createTablePattern)) {
17788
+ const schema = match[1] || "public";
17789
+ const tableName = match[2];
17790
+ if (tableName) {
17791
+ tables.push(`${schema}.${tableName}`);
17792
+ }
17793
+ }
17794
+ }
17795
+ } catch {
17796
+ return [];
17797
+ }
17798
+ return [...new Set(tables)].sort();
17799
+ }
17800
+
17406
17801
  // src/commands/db/utils/sql-table-extractor.ts
17407
17802
  init_esm_shims();
17408
17803
  var sqlParserUtils = null;
@@ -17425,21 +17820,35 @@ async function getSqlParserUtils() {
17425
17820
  await isAstParserAvailable();
17426
17821
  return sqlParserUtils;
17427
17822
  }
17823
+ var SQL_IDENTIFIER = String.raw`(?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*)`;
17428
17824
  var SQL_PATTERNS = {
17429
17825
  // CREATE TABLE [IF NOT EXISTS] schema.table_name (
17430
- createTable: /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)\.(\w+)\s*\(/gi,
17826
+ createTable: new RegExp(
17827
+ `CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})\\s*\\(`,
17828
+ "gi"
17829
+ ),
17431
17830
  // PRIMARY KEY (columns)
17432
17831
  primaryKey: /PRIMARY\s+KEY\s*\(([^)]+)\)/gi,
17433
17832
  // FOREIGN KEY (column) REFERENCES schema.table(column) [ON DELETE|UPDATE ...]
17434
- foreignKey: /FOREIGN\s+KEY\s*\((\w+)\)\s*REFERENCES\s+(\w+)\.(\w+)\s*\((\w+)\)(?:\s+ON\s+DELETE\s+(\w+(?:\s+\w+)?))?(?:\s+ON\s+UPDATE\s+(\w+(?:\s+\w+)?))?/gi,
17833
+ foreignKey: new RegExp(
17834
+ `FOREIGN\\s+KEY\\s*\\((${SQL_IDENTIFIER})\\)\\s*REFERENCES\\s+(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})\\s*\\((${SQL_IDENTIFIER})\\)(?:\\s+ON\\s+DELETE\\s+(\\w+(?:\\s+\\w+)?))?(?:\\s+ON\\s+UPDATE\\s+(\\w+(?:\\s+\\w+)?))?`,
17835
+ "gi"
17836
+ ),
17435
17837
  // REFERENCES schema.table(column) - inline column constraint
17436
- inlineReference: /(\w+)\s+\w+[^,]*REFERENCES\s+(\w+)\.(\w+)\s*\((\w+)\)/gi,
17838
+ inlineReference: new RegExp(
17839
+ `(${SQL_IDENTIFIER})\\s+\\w+[\\w\\s()]*REFERENCES\\s+(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})\\s*\\((${SQL_IDENTIFIER})\\)`,
17840
+ "gi"
17841
+ ),
17437
17842
  // CREATE [UNIQUE] INDEX name ON schema.table (columns)
17438
- createIndex: /CREATE\s+(UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)\s+ON\s+(\w+)\.(\w+)\s*\(([^)]+)\)/gi,
17843
+ createIndex: new RegExp(
17844
+ `CREATE\\s+(UNIQUE\\s+)?INDEX\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${SQL_IDENTIFIER})\\s+ON\\s+(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})\\s*\\(([^)]+)\\)`,
17845
+ "gi"
17846
+ ),
17439
17847
  // ALTER TABLE ... ENABLE ROW LEVEL SECURITY
17440
- enableRls: /ALTER\s+TABLE\s+(\w+)\.(\w+)\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi,
17441
- // CREATE POLICY name ON schema.table FOR command USING (...) [WITH CHECK (...)]
17442
- createPolicy: /CREATE\s+POLICY\s+"?(\w+)"?\s+ON\s+(\w+)\.(\w+)\s+(?:AS\s+\w+\s+)?FOR\s+(\w+)\s+(?:TO\s+\w+\s+)?(?:USING\s*\(([^)]+)\))?(?:\s+WITH\s+CHECK\s*\(([^)]+)\))?/gi
17848
+ enableRls: new RegExp(
17849
+ `ALTER\\s+TABLE\\s+(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`,
17850
+ "gi"
17851
+ )
17443
17852
  };
17444
17853
  function snakeToCamel2(str) {
17445
17854
  return str.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
@@ -17474,6 +17883,96 @@ function extractTableBody(content, startPos) {
17474
17883
  function normalizeType(type) {
17475
17884
  return type.trim().replace(/\s+/g, " ").toLowerCase().replace("character varying", "varchar").replace("timestamp with time zone", "timestamptz").replace("timestamp without time zone", "timestamp");
17476
17885
  }
17886
+ function unquoteIdentifier(identifier) {
17887
+ const trimmed = identifier.trim();
17888
+ if (trimmed.startsWith('"') && trimmed.endsWith('"')) {
17889
+ return trimmed.slice(1, -1).replace(/""/g, '"');
17890
+ }
17891
+ return trimmed;
17892
+ }
17893
+ function stripSqlCommentsPreserveStrings2(content) {
17894
+ let result = "";
17895
+ let i = 0;
17896
+ let inSingleQuote = false;
17897
+ let inDoubleQuote = false;
17898
+ let inDollarQuote = false;
17899
+ let dollarTag = "";
17900
+ while (i < content.length) {
17901
+ const char = content[i] ?? "";
17902
+ const next = content[i + 1] ?? "";
17903
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && char === "-" && next === "-") {
17904
+ while (i < content.length && content[i] !== "\n") {
17905
+ result += " ";
17906
+ i++;
17907
+ }
17908
+ continue;
17909
+ }
17910
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && char === "/" && next === "*") {
17911
+ result += " ";
17912
+ result += " ";
17913
+ i += 2;
17914
+ while (i < content.length) {
17915
+ const blockChar = content[i] ?? "";
17916
+ const blockNext = content[i + 1] ?? "";
17917
+ if (blockChar === "*" && blockNext === "/") {
17918
+ result += " ";
17919
+ result += " ";
17920
+ i += 2;
17921
+ break;
17922
+ }
17923
+ result += blockChar === "\n" ? "\n" : " ";
17924
+ i++;
17925
+ }
17926
+ continue;
17927
+ }
17928
+ if (!inSingleQuote && !inDoubleQuote && char === "$") {
17929
+ if (inDollarQuote) {
17930
+ const closeTag = `$${dollarTag}$`;
17931
+ if (content.slice(i).startsWith(closeTag)) {
17932
+ result += closeTag;
17933
+ i += closeTag.length;
17934
+ inDollarQuote = false;
17935
+ dollarTag = "";
17936
+ continue;
17937
+ }
17938
+ } else {
17939
+ const tagMatch = content.slice(i).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
17940
+ if (tagMatch) {
17941
+ inDollarQuote = true;
17942
+ dollarTag = tagMatch[1] ?? "";
17943
+ result += tagMatch[0];
17944
+ i += tagMatch[0].length;
17945
+ continue;
17946
+ }
17947
+ }
17948
+ }
17949
+ if (!inDoubleQuote && !inDollarQuote && char === "'") {
17950
+ if (inSingleQuote && next === "'") {
17951
+ result += "''";
17952
+ i += 2;
17953
+ continue;
17954
+ }
17955
+ inSingleQuote = !inSingleQuote;
17956
+ result += char;
17957
+ i++;
17958
+ continue;
17959
+ }
17960
+ if (!inSingleQuote && !inDollarQuote && char === '"') {
17961
+ if (inDoubleQuote && next === '"') {
17962
+ result += '""';
17963
+ i += 2;
17964
+ continue;
17965
+ }
17966
+ inDoubleQuote = !inDoubleQuote;
17967
+ result += char;
17968
+ i++;
17969
+ continue;
17970
+ }
17971
+ result += char;
17972
+ i++;
17973
+ }
17974
+ return result;
17975
+ }
17477
17976
  function parseIndexColumns(rawColumns) {
17478
17977
  return rawColumns.split(",").map((col) => {
17479
17978
  const trimmed = col.trim();
@@ -17510,8 +18009,8 @@ function findTablesRegex(ctx) {
17510
18009
  const tables = [];
17511
18010
  const regex = new RegExp(SQL_PATTERNS.createTable.source, "gi");
17512
18011
  for (const match of ctx.content.matchAll(regex)) {
17513
- const schema = match[1] ?? "";
17514
- const name = match[2] ?? "";
18012
+ const schema = unquoteIdentifier(match[1] ?? "");
18013
+ const name = unquoteIdentifier(match[2] ?? "");
17515
18014
  if (!schema || !name) continue;
17516
18015
  const lineNumber = getLineNumber(ctx.content, match.index ?? 0);
17517
18016
  const tableBody = extractTableBody(ctx.content, match.index ?? 0);
@@ -17533,10 +18032,10 @@ function parseColumnsRegex(tableBody) {
17533
18032
  const trimmed = line.trim();
17534
18033
  if (shouldSkipColumnLine(trimmed)) continue;
17535
18034
  const columnMatch = trimmed.match(
17536
- /^(\w+)\s+([\w\s()]+?)(?:\s+(NOT\s+NULL|NULL))?(?:\s+DEFAULT\s+[^,]+)?(?:\s+REFERENCES)?(?:,|$)/i
18035
+ /^((?:"(?:[^"]|"")*"|[a-zA-Z_][a-zA-Z0-9_]*))\s+([\w\s()]+?)(?:\s+(NOT\s+NULL|NULL))?(?:\s+DEFAULT\s+[^,]+)?(?:\s+REFERENCES)?(?:,|$)/i
17537
18036
  );
17538
18037
  if (columnMatch) {
17539
- const name = columnMatch[1] ?? "";
18038
+ const name = unquoteIdentifier(columnMatch[1] ?? "");
17540
18039
  if (!name || seen.has(name) || isReservedKeyword(name)) continue;
17541
18040
  seen.add(name);
17542
18041
  const rawType = columnMatch[2] ?? "";
@@ -17558,7 +18057,7 @@ function parsePrimaryKeyRegex(tableBody) {
17558
18057
  const regex = new RegExp(SQL_PATTERNS.primaryKey.source, "gi");
17559
18058
  const match = regex.exec(tableBody);
17560
18059
  if (match) {
17561
- return match[1]?.split(",").map((col) => col.trim());
18060
+ return match[1]?.split(",").map((col) => unquoteIdentifier(col.trim())).filter(Boolean) ?? [];
17562
18061
  }
17563
18062
  return [];
17564
18063
  }
@@ -17566,12 +18065,12 @@ function parseExplicitForeignKeys(tableBody) {
17566
18065
  const fks = [];
17567
18066
  const fkRegex = new RegExp(SQL_PATTERNS.foreignKey.source, "gi");
17568
18067
  for (const match of tableBody.matchAll(fkRegex)) {
17569
- const column = match[1] ?? "";
17570
- const refTable = match[4] ?? "";
18068
+ const column = unquoteIdentifier(match[1] ?? "");
18069
+ const refTable = unquoteIdentifier(match[4] ?? "");
17571
18070
  if (!column || !refTable) continue;
17572
18071
  fks.push({
17573
18072
  column,
17574
- referencesTable: `${match[2] ?? ""}.${match[3] ?? ""}`,
18073
+ referencesTable: `${unquoteIdentifier(match[2] ?? "")}.${unquoteIdentifier(match[3] ?? "")}`,
17575
18074
  referencesColumn: refTable,
17576
18075
  onDelete: normalizeOnAction(match[5]),
17577
18076
  onUpdate: normalizeOnAction(match[6])
@@ -17583,13 +18082,13 @@ function parseInlineForeignKeys(tableBody, existingColumns) {
17583
18082
  const fks = [];
17584
18083
  const inlineRegex = new RegExp(SQL_PATTERNS.inlineReference.source, "gi");
17585
18084
  for (const match of tableBody.matchAll(inlineRegex)) {
17586
- const inlineCol = match[1] ?? "";
18085
+ const inlineCol = unquoteIdentifier(match[1] ?? "");
17587
18086
  if (existingColumns.has(inlineCol)) continue;
17588
- const inlineRefCol = match[4] ?? "";
18087
+ const inlineRefCol = unquoteIdentifier(match[4] ?? "");
17589
18088
  if (!inlineCol || !inlineRefCol) continue;
17590
18089
  fks.push({
17591
18090
  column: inlineCol,
17592
- referencesTable: `${match[2] ?? ""}.${match[3] ?? ""}`,
18091
+ referencesTable: `${unquoteIdentifier(match[2] ?? "")}.${unquoteIdentifier(match[3] ?? "")}`,
17593
18092
  referencesColumn: inlineRefCol
17594
18093
  });
17595
18094
  }
@@ -17605,8 +18104,10 @@ function parseIndexesRegex(content, schema, tableName) {
17605
18104
  const indexes = [];
17606
18105
  const regex = new RegExp(SQL_PATTERNS.createIndex.source, "gi");
17607
18106
  for (const match of content.matchAll(regex)) {
17608
- if (match[3] === schema && match[4] === tableName) {
17609
- const indexName = match[2] ?? "";
18107
+ const indexSchema = unquoteIdentifier(match[3] ?? "");
18108
+ const indexTable = unquoteIdentifier(match[4] ?? "");
18109
+ if (indexSchema === schema && indexTable === tableName) {
18110
+ const indexName = unquoteIdentifier(match[2] ?? "");
17610
18111
  if (!indexName) continue;
17611
18112
  const rawColumns = match[5] ?? "";
17612
18113
  indexes.push({
@@ -17621,26 +18122,200 @@ function parseIndexesRegex(content, schema, tableName) {
17621
18122
  function hasRlsEnabledRegex(content, schema, tableName) {
17622
18123
  const regex = new RegExp(SQL_PATTERNS.enableRls.source, "gi");
17623
18124
  for (const match of content.matchAll(regex)) {
17624
- if (match[1] === schema && match[2] === tableName) {
18125
+ const matchSchema = unquoteIdentifier(match[1] ?? "");
18126
+ const matchTable = unquoteIdentifier(match[2] ?? "");
18127
+ if (matchSchema === schema && matchTable === tableName) {
17625
18128
  return true;
17626
18129
  }
17627
18130
  }
17628
18131
  return false;
17629
18132
  }
18133
+ function extractCreatePolicyStatements(content) {
18134
+ const statements = [];
18135
+ const startRegex = /\bCREATE\s+POLICY\b/gi;
18136
+ let match;
18137
+ while ((match = startRegex.exec(content)) !== null) {
18138
+ const startIndex = match.index ?? 0;
18139
+ const endIndex = findSqlStatementEndForPolicy(content, startIndex);
18140
+ statements.push(content.slice(startIndex, endIndex).trim());
18141
+ }
18142
+ return statements;
18143
+ }
18144
+ function findSqlStatementEndForPolicy(content, startIndex) {
18145
+ let inSingleQuote = false;
18146
+ let inDoubleQuote = false;
18147
+ let inDollarQuote = false;
18148
+ let dollarTag = "";
18149
+ for (let i = startIndex; i < content.length; i++) {
18150
+ const char = content[i] ?? "";
18151
+ const next = content[i + 1] ?? "";
18152
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote) {
18153
+ if (char === "-" && next === "-") {
18154
+ const newlineIndex = content.indexOf("\n", i);
18155
+ if (newlineIndex === -1) return content.length;
18156
+ i = newlineIndex;
18157
+ continue;
18158
+ }
18159
+ if (char === "/" && next === "*") {
18160
+ const closeIndex = content.indexOf("*/", i + 2);
18161
+ if (closeIndex === -1) return content.length;
18162
+ i = closeIndex + 1;
18163
+ continue;
18164
+ }
18165
+ }
18166
+ if (!inSingleQuote && !inDoubleQuote && char === "$") {
18167
+ if (inDollarQuote) {
18168
+ const closeTag = `$${dollarTag}$`;
18169
+ if (content.slice(i).startsWith(closeTag)) {
18170
+ i += closeTag.length - 1;
18171
+ inDollarQuote = false;
18172
+ dollarTag = "";
18173
+ continue;
18174
+ }
18175
+ } else {
18176
+ const tagMatch = content.slice(i).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
18177
+ if (tagMatch) {
18178
+ inDollarQuote = true;
18179
+ dollarTag = tagMatch[1] ?? "";
18180
+ i += tagMatch[0].length - 1;
18181
+ continue;
18182
+ }
18183
+ }
18184
+ }
18185
+ if (!inDoubleQuote && !inDollarQuote && char === "'") {
18186
+ if (inSingleQuote) {
18187
+ if (next === "'") {
18188
+ i++;
18189
+ continue;
18190
+ }
18191
+ inSingleQuote = false;
18192
+ } else {
18193
+ inSingleQuote = true;
18194
+ }
18195
+ continue;
18196
+ }
18197
+ if (!inSingleQuote && !inDollarQuote && char === '"') {
18198
+ if (inDoubleQuote) {
18199
+ if (next === '"') {
18200
+ i++;
18201
+ continue;
18202
+ }
18203
+ inDoubleQuote = false;
18204
+ } else {
18205
+ inDoubleQuote = true;
18206
+ }
18207
+ continue;
18208
+ }
18209
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && char === ";") {
18210
+ return i + 1;
18211
+ }
18212
+ }
18213
+ return content.length;
18214
+ }
18215
+ function extractBalancedClause(statement, startIndex) {
18216
+ const openParenIndex = statement.indexOf("(", startIndex);
18217
+ if (openParenIndex === -1) return void 0;
18218
+ let depth = 0;
18219
+ let clauseStart = -1;
18220
+ let inSingleQuote = false;
18221
+ let inDoubleQuote = false;
18222
+ let inDollarQuote = false;
18223
+ let dollarTag = "";
18224
+ for (let i = openParenIndex; i < statement.length; i++) {
18225
+ const char = statement[i] ?? "";
18226
+ const next = statement[i + 1] ?? "";
18227
+ if (!inSingleQuote && !inDoubleQuote && !inDollarQuote) {
18228
+ if (char === "-" && next === "-") {
18229
+ const newlineIndex = statement.indexOf("\n", i);
18230
+ if (newlineIndex === -1) break;
18231
+ i = newlineIndex;
18232
+ continue;
18233
+ }
18234
+ if (char === "/" && next === "*") {
18235
+ const closeIndex = statement.indexOf("*/", i + 2);
18236
+ if (closeIndex === -1) break;
18237
+ i = closeIndex + 1;
18238
+ continue;
18239
+ }
18240
+ }
18241
+ if (!inSingleQuote && !inDoubleQuote && char === "$") {
18242
+ if (inDollarQuote) {
18243
+ const closeTag = `$${dollarTag}$`;
18244
+ if (statement.slice(i).startsWith(closeTag)) {
18245
+ i += closeTag.length - 1;
18246
+ inDollarQuote = false;
18247
+ dollarTag = "";
18248
+ continue;
18249
+ }
18250
+ } else {
18251
+ const tagMatch = statement.slice(i).match(/^\$([a-zA-Z_][a-zA-Z0-9_]*)?\$/);
18252
+ if (tagMatch) {
18253
+ inDollarQuote = true;
18254
+ dollarTag = tagMatch[1] ?? "";
18255
+ i += tagMatch[0].length - 1;
18256
+ continue;
18257
+ }
18258
+ }
18259
+ }
18260
+ if (!inDoubleQuote && !inDollarQuote && char === "'") {
18261
+ if (inSingleQuote) {
18262
+ if (next === "'") {
18263
+ i++;
18264
+ continue;
18265
+ }
18266
+ inSingleQuote = false;
18267
+ } else {
18268
+ inSingleQuote = true;
18269
+ }
18270
+ continue;
18271
+ }
18272
+ if (!inSingleQuote && !inDollarQuote && char === '"') {
18273
+ if (inDoubleQuote) {
18274
+ if (next === '"') {
18275
+ i++;
18276
+ continue;
18277
+ }
18278
+ inDoubleQuote = false;
18279
+ } else {
18280
+ inDoubleQuote = true;
18281
+ }
18282
+ continue;
18283
+ }
18284
+ if (inSingleQuote || inDoubleQuote || inDollarQuote) continue;
18285
+ if (char === "(") {
18286
+ if (depth === 0) clauseStart = i + 1;
18287
+ depth++;
18288
+ } else if (char === ")") {
18289
+ depth--;
18290
+ if (depth === 0 && clauseStart !== -1) {
18291
+ return statement.slice(clauseStart, i).trim();
18292
+ }
18293
+ }
18294
+ }
18295
+ return void 0;
18296
+ }
17630
18297
  function parsePoliciesRegex(content, schema, tableName) {
17631
18298
  const policies = [];
17632
- const regex = new RegExp(SQL_PATTERNS.createPolicy.source, "gi");
17633
- for (const match of content.matchAll(regex)) {
17634
- const policyName = match[1] ?? "";
17635
- if (!policyName) continue;
17636
- if (match[2] === schema && match[3] === tableName) {
17637
- policies.push({
17638
- name: policyName,
17639
- command: match[4]?.toUpperCase(),
17640
- using: match[5] || void 0,
17641
- withCheck: match[6] || void 0
17642
- });
17643
- }
18299
+ const statements = extractCreatePolicyStatements(content);
18300
+ const headerRegex = new RegExp(
18301
+ `^\\s*CREATE\\s+POLICY\\s+(?:"((?:[^"]|"")*)"|([a-zA-Z_][a-zA-Z0-9_]*))\\s+ON\\s+(${SQL_IDENTIFIER})\\.(${SQL_IDENTIFIER})(?:\\s+AS\\s+\\w+)?(?:\\s+FOR\\s+(\\w+))?`,
18302
+ "i"
18303
+ );
18304
+ for (const statement of statements) {
18305
+ const match = statement.match(headerRegex);
18306
+ if (!match) continue;
18307
+ const policyName = unquoteIdentifier(match[1] ?? match[2] ?? "");
18308
+ const policySchema = unquoteIdentifier(match[3] ?? "");
18309
+ const policyTable = unquoteIdentifier(match[4] ?? "");
18310
+ if (!policyName || policySchema !== schema || policyTable !== tableName) continue;
18311
+ const usingIndex = statement.search(/\bUSING\s*\(/i);
18312
+ const withCheckIndex = statement.search(/\bWITH\s+CHECK\s*\(/i);
18313
+ policies.push({
18314
+ name: policyName,
18315
+ command: (match[5] || "ALL").toUpperCase(),
18316
+ using: usingIndex !== -1 ? extractBalancedClause(statement, usingIndex) : void 0,
18317
+ withCheck: withCheckIndex !== -1 ? extractBalancedClause(statement, withCheckIndex) : void 0
18318
+ });
17644
18319
  }
17645
18320
  return policies;
17646
18321
  }
@@ -17741,7 +18416,8 @@ function buildTableEntryRegex(table, content, filePath, opts) {
17741
18416
  };
17742
18417
  }
17743
18418
  function processTablesFromFileRegex(filePath, opts, seen) {
17744
- const content = readFileSync(filePath, "utf-8");
18419
+ const rawContent = readFileSync(filePath, "utf-8");
18420
+ const content = stripSqlCommentsPreserveStrings2(rawContent);
17745
18421
  const ctx = { content, lines: content.split("\n") };
17746
18422
  const tables = findTablesRegex(ctx);
17747
18423
  const entries = [];
@@ -17783,9 +18459,235 @@ async function extractTablesFromSqlDir(sqlDir, options = {}) {
17783
18459
  return tableEntries;
17784
18460
  }
17785
18461
 
18462
+ // src/commands/db/utils/table-source-classifier.ts
18463
+ init_esm_shims();
18464
+ function splitQualifiedName(qualifiedName) {
18465
+ const [schema = "", table = ""] = qualifiedName.split(".", 2);
18466
+ return { schema, table };
18467
+ }
18468
+ function escapeRegexLiteral(value) {
18469
+ return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
18470
+ }
18471
+ function buildTablePatternMatcher(patterns) {
18472
+ const compiled = patterns.map((p) => p.trim()).filter((p) => p.length > 0).map((pattern) => {
18473
+ const target = pattern.includes(".") ? "qualified" : "table";
18474
+ const regex = new RegExp(`^${escapeRegexLiteral(pattern).replace(/\\\*/g, ".*")}$`);
18475
+ return { target, regex };
18476
+ });
18477
+ return (qualifiedName) => {
18478
+ const { table } = splitQualifiedName(qualifiedName);
18479
+ for (const entry of compiled) {
18480
+ const candidate = entry.target === "qualified" ? qualifiedName : table;
18481
+ if (entry.regex.test(candidate)) {
18482
+ return true;
18483
+ }
18484
+ }
18485
+ return false;
18486
+ };
18487
+ }
18488
+ function findIdempotentAncestor(table, partitionParentMap, idempotentManagedTables) {
18489
+ if (idempotentManagedTables.has(table)) {
18490
+ return table;
18491
+ }
18492
+ let current = table;
18493
+ const visited = /* @__PURE__ */ new Set();
18494
+ while (!visited.has(current)) {
18495
+ visited.add(current);
18496
+ const parent = partitionParentMap.get(current);
18497
+ if (!parent) {
18498
+ return null;
18499
+ }
18500
+ if (idempotentManagedTables.has(parent)) {
18501
+ return parent;
18502
+ }
18503
+ current = parent;
18504
+ }
18505
+ return null;
18506
+ }
18507
+ function isSystemManagedTable(params) {
18508
+ const { schema } = splitQualifiedName(params.qualifiedName);
18509
+ return params.systemSchemas.has(schema) || params.knownSystemTables.has(params.qualifiedName);
18510
+ }
18511
+ function classifyMissingSourceTables(params) {
18512
+ const extensionManagedTables = params.extensionManagedTables ?? /* @__PURE__ */ new Map();
18513
+ const partitionParentMap = params.partitionParentMap ?? /* @__PURE__ */ new Map();
18514
+ const exclusionMatcher = buildTablePatternMatcher(params.excludeFromOrphanDetection ?? []);
18515
+ const systemSchemas = new Set(params.systemSchemas ?? []);
18516
+ const knownSystemTables = new Set(params.knownSystemTables ?? []);
18517
+ const classified = {
18518
+ definedInIdempotentDynamicDdl: [],
18519
+ extensionManagedOrSystemTable: [],
18520
+ trulyOrphaned: []
18521
+ };
18522
+ for (const qualifiedName of params.tablesWithoutSource) {
18523
+ const idempotentAncestor = findIdempotentAncestor(
18524
+ qualifiedName,
18525
+ partitionParentMap,
18526
+ params.idempotentManagedTables
18527
+ );
18528
+ if (idempotentAncestor) {
18529
+ classified.definedInIdempotentDynamicDdl.push({
18530
+ qualifiedName,
18531
+ detail: idempotentAncestor === qualifiedName ? "matched CREATE TABLE in idempotent SQL" : `partition child of ${idempotentAncestor}`
18532
+ });
18533
+ continue;
18534
+ }
18535
+ const extensionName = extensionManagedTables.get(qualifiedName);
18536
+ if (extensionName) {
18537
+ classified.extensionManagedOrSystemTable.push({
18538
+ qualifiedName,
18539
+ detail: `managed by extension "${extensionName}"`
18540
+ });
18541
+ continue;
18542
+ }
18543
+ if (isSystemManagedTable({ qualifiedName, systemSchemas, knownSystemTables })) {
18544
+ classified.extensionManagedOrSystemTable.push({
18545
+ qualifiedName,
18546
+ detail: "system-managed schema/table"
18547
+ });
18548
+ continue;
18549
+ }
18550
+ if (exclusionMatcher(qualifiedName)) {
18551
+ classified.extensionManagedOrSystemTable.push({
18552
+ qualifiedName,
18553
+ detail: "allowlisted by database.pgSchemaDiff.excludeFromOrphanDetection"
18554
+ });
18555
+ continue;
18556
+ }
18557
+ classified.trulyOrphaned.push(qualifiedName);
18558
+ }
18559
+ return classified;
18560
+ }
18561
+
17786
18562
  // src/commands/db/utils/table-registry.ts
17787
18563
  var MANIFEST_VERSION = 2;
17788
18564
  var GENERATOR_VERSION = "1.0.0";
18565
+ var DEFAULT_IDEMPOTENT_SQL_DIR = "supabase/schemas/idempotent";
18566
+ var KNOWN_EXTENSION_SYSTEM_TABLES = /* @__PURE__ */ new Set([
18567
+ "public.spatial_ref_sys",
18568
+ "public.geometry_columns",
18569
+ "public.geography_columns"
18570
+ ]);
18571
+ var SUPABASE_SYSTEM_SCHEMA_SET = new Set(SUPABASE_SYSTEM_SCHEMAS);
18572
+ var VALID_PG_IDENTIFIER2 = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
18573
+ function validatePgIdentifier2(name, context) {
18574
+ if (!name || typeof name !== "string") {
18575
+ throw new Error(`Invalid ${context}: empty or not a string`);
18576
+ }
18577
+ if (!VALID_PG_IDENTIFIER2.test(name)) {
18578
+ throw new Error(
18579
+ `Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
18580
+ );
18581
+ }
18582
+ }
18583
+ function buildSafeSchemaInClause2(schemas) {
18584
+ if (schemas.length === 0) {
18585
+ throw new Error("No schemas provided for IN clause");
18586
+ }
18587
+ const safeSchemas = [];
18588
+ for (const schema of schemas) {
18589
+ validatePgIdentifier2(schema, "schema name");
18590
+ safeSchemas.push(`'${schema.replace(/'/g, "''")}'`);
18591
+ }
18592
+ return safeSchemas.join(",");
18593
+ }
18594
+ function toRelativeSourcePath(projectRoot, sourceFile) {
18595
+ let relativeSource = relative(projectRoot, sourceFile);
18596
+ if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
18597
+ const schemaMatch = sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
18598
+ relativeSource = schemaMatch ? schemaMatch[0] : sourceFile;
18599
+ }
18600
+ return relativeSource;
18601
+ }
18602
+ function resolveSourceConfig(projectRoot, options) {
18603
+ let idempotentSqlDir = options.idempotentSqlDir ?? DEFAULT_IDEMPOTENT_SQL_DIR;
18604
+ const exclusions = new Set(options.excludeFromOrphanDetection ?? []);
18605
+ try {
18606
+ const config = loadRunaConfig2(projectRoot);
18607
+ const pgSchemaDiff = config.database?.pgSchemaDiff;
18608
+ if (!options.idempotentSqlDir && pgSchemaDiff?.idempotentSqlDir) {
18609
+ idempotentSqlDir = pgSchemaDiff.idempotentSqlDir;
18610
+ }
18611
+ if (pgSchemaDiff?.excludeFromOrphanDetection) {
18612
+ for (const pattern of pgSchemaDiff.excludeFromOrphanDetection) {
18613
+ exclusions.add(pattern);
18614
+ }
18615
+ }
18616
+ } catch {
18617
+ }
18618
+ return {
18619
+ idempotentSqlDir: isAbsolute(idempotentSqlDir) ? idempotentSqlDir : join(projectRoot, idempotentSqlDir),
18620
+ excludeFromOrphanDetection: [...exclusions].sort((a, b) => a.localeCompare(b))
18621
+ };
18622
+ }
18623
+ async function fetchMissingSourceMetadata(params) {
18624
+ const { databaseUrl, schemas } = params;
18625
+ if (schemas.length === 0) {
18626
+ return {
18627
+ extensionManagedTables: /* @__PURE__ */ new Map(),
18628
+ partitionParentMap: /* @__PURE__ */ new Map()
18629
+ };
18630
+ }
18631
+ const isRemoteSupabase = databaseUrl.includes(".supabase.co");
18632
+ const sql = postgres(databaseUrl, {
18633
+ ...isRemoteSupabase && { ssl: "require" }
18634
+ });
18635
+ try {
18636
+ const schemaList = buildSafeSchemaInClause2(schemas);
18637
+ const [extensionRows, partitionRows] = await Promise.all([
18638
+ sql`
18639
+ SELECT
18640
+ n.nspname AS schema_name,
18641
+ c.relname AS table_name,
18642
+ ext.extname AS extension_name
18643
+ FROM pg_class c
18644
+ JOIN pg_namespace n ON n.oid = c.relnamespace
18645
+ JOIN pg_depend d
18646
+ ON d.classid = 'pg_class'::regclass
18647
+ AND d.objid = c.oid
18648
+ AND d.refclassid = 'pg_extension'::regclass
18649
+ AND d.deptype = 'e'
18650
+ JOIN pg_extension ext ON ext.oid = d.refobjid
18651
+ WHERE c.relkind IN ('r', 'p')
18652
+ AND n.nspname IN (${sql.unsafe(schemaList)})
18653
+ `,
18654
+ sql`
18655
+ SELECT
18656
+ child_ns.nspname AS child_schema,
18657
+ child.relname AS child_table,
18658
+ parent_ns.nspname AS parent_schema,
18659
+ parent.relname AS parent_table
18660
+ FROM pg_inherits i
18661
+ JOIN pg_class child ON child.oid = i.inhrelid
18662
+ JOIN pg_namespace child_ns ON child_ns.oid = child.relnamespace
18663
+ JOIN pg_class parent ON parent.oid = i.inhparent
18664
+ JOIN pg_namespace parent_ns ON parent_ns.oid = parent.relnamespace
18665
+ WHERE child.relkind IN ('r', 'p')
18666
+ AND child_ns.nspname IN (${sql.unsafe(schemaList)})
18667
+ `
18668
+ ]);
18669
+ const extensionManagedTables = /* @__PURE__ */ new Map();
18670
+ for (const row of extensionRows) {
18671
+ extensionManagedTables.set(
18672
+ `${String(row.schema_name)}.${String(row.table_name)}`,
18673
+ String(row.extension_name)
18674
+ );
18675
+ }
18676
+ const partitionParentMap = /* @__PURE__ */ new Map();
18677
+ for (const row of partitionRows) {
18678
+ partitionParentMap.set(
18679
+ `${String(row.child_schema)}.${String(row.child_table)}`,
18680
+ `${String(row.parent_schema)}.${String(row.parent_table)}`
18681
+ );
18682
+ }
18683
+ return { extensionManagedTables, partitionParentMap };
18684
+ } finally {
18685
+ await sql.end();
18686
+ }
18687
+ }
18688
+ function formatMissingSourceItems(items) {
18689
+ return items.map((item) => item.detail ? `${item.qualifiedName} (${item.detail})` : item.qualifiedName).join(", ");
18690
+ }
17789
18691
  async function introspectTablesFromDb(databaseUrl, schemas) {
17790
18692
  try {
17791
18693
  const result = await introspectDatabase(databaseUrl, { schemas });
@@ -18031,14 +18933,31 @@ async function crossCheckWithDrizzle(sqlTables, drizzleSchemaPath) {
18031
18933
  return { matched: [], sqlOnly: sqlTables, drizzleOnly: [] };
18032
18934
  }
18033
18935
  }
18034
- function warnTablesWithoutSource(tables) {
18035
- const tablesWithoutSource = tables.filter((t) => !t.sourceFile);
18036
- if (tablesWithoutSource.length === 0) return;
18037
- console.warn(
18038
- `[tables-manifest] \u26A0 ${tablesWithoutSource.length} table(s) exist in DB but not in SQL files:`
18039
- );
18040
- console.warn(` ${tablesWithoutSource.map((t) => t.qualifiedName).join(", ")}`);
18041
- console.warn(" \u2192 These may be manually created or Supabase auto-generated tables.");
18936
+ function logMissingSourceClassification(classification) {
18937
+ const total = classification.definedInIdempotentDynamicDdl.length + classification.extensionManagedOrSystemTable.length + classification.trulyOrphaned.length;
18938
+ if (total === 0) return;
18939
+ console.warn(`[tables-manifest] \u26A0 ${total} table(s) exist in DB but not in SQL files.`);
18940
+ if (classification.definedInIdempotentDynamicDdl.length > 0) {
18941
+ console.log(
18942
+ `[tables-manifest] info: defined_in_idempotent_dynamic_ddl (${classification.definedInIdempotentDynamicDdl.length})`
18943
+ );
18944
+ console.log(` ${formatMissingSourceItems(classification.definedInIdempotentDynamicDdl)}`);
18945
+ }
18946
+ if (classification.extensionManagedOrSystemTable.length > 0) {
18947
+ console.log(
18948
+ `[tables-manifest] info: extension_managed/system_table (${classification.extensionManagedOrSystemTable.length})`
18949
+ );
18950
+ console.log(` ${formatMissingSourceItems(classification.extensionManagedOrSystemTable)}`);
18951
+ }
18952
+ if (classification.trulyOrphaned.length > 0) {
18953
+ console.warn(`[tables-manifest] warn: truly_orphaned (${classification.trulyOrphaned.length})`);
18954
+ console.warn(` ${classification.trulyOrphaned.join(", ")}`);
18955
+ console.warn(
18956
+ " \u2192 Add declarative/idempotent SQL definitions or allowlist via database.pgSchemaDiff.excludeFromOrphanDetection."
18957
+ );
18958
+ } else {
18959
+ console.log("[tables-manifest] info: no truly_orphaned tables detected.");
18960
+ }
18042
18961
  }
18043
18962
  async function logDrizzleCrossCheck(tables, drizzleSchemaPath) {
18044
18963
  const result = await crossCheckWithDrizzle(tables, drizzleSchemaPath);
@@ -18070,24 +18989,38 @@ async function generateTablesManifest(projectRoot, options = {}) {
18070
18989
  // Reserved for future metadata filtering feature
18071
18990
  includeMetadata: _includeMetadata = true
18072
18991
  } = options;
18992
+ const sourceConfig = resolveSourceConfig(projectRoot, options);
18073
18993
  let tables = [];
18074
18994
  const source = "introspection";
18075
- const sqlTables = await extractTablesFromSqlDir(sqlDir, {
18995
+ const declarativeTables = await extractTablesFromSqlDir(sqlDir, {
18076
18996
  includeColumns: false,
18077
18997
  // Don't need columns from SQL (DB introspection is more accurate)
18078
18998
  includeForeignKeys: false,
18079
18999
  includeIndexes: false,
18080
19000
  includeRlsPolicies: false
18081
19001
  });
19002
+ const idempotentTablesForSource = await extractTablesFromSqlDir(sourceConfig.idempotentSqlDir, {
19003
+ includeColumns: false,
19004
+ includeForeignKeys: false,
19005
+ includeIndexes: false,
19006
+ includeRlsPolicies: false
19007
+ });
19008
+ const idempotentTablesFromRegex = extractTablesFromIdempotentSql(
19009
+ sourceConfig.idempotentSqlDir,
19010
+ projectRoot
19011
+ );
19012
+ const idempotentManagedTables = /* @__PURE__ */ new Set([
19013
+ ...idempotentTablesFromRegex,
19014
+ ...idempotentTablesForSource.map((t) => t.qualifiedName)
19015
+ ]);
18082
19016
  const sourceFileMap = /* @__PURE__ */ new Map();
18083
- for (const t of sqlTables) {
18084
- let relativeSource = relative(projectRoot, t.sourceFile);
18085
- if (relativeSource.startsWith("/") || relativeSource.startsWith("..")) {
18086
- const schemaMatch = t.sourceFile.match(/supabase\/schemas\/[^/]+\/[^/]+$/);
18087
- relativeSource = schemaMatch ? schemaMatch[0] : t.sourceFile;
19017
+ const sourceTables = [...declarativeTables, ...idempotentTablesForSource];
19018
+ for (const t of sourceTables) {
19019
+ if (sourceFileMap.has(t.qualifiedName)) {
19020
+ continue;
18088
19021
  }
18089
19022
  sourceFileMap.set(t.qualifiedName, {
18090
- sourceFile: relativeSource,
19023
+ sourceFile: toRelativeSourcePath(projectRoot, t.sourceFile),
18091
19024
  lineNumber: t.lineNumber
18092
19025
  });
18093
19026
  }
@@ -18107,7 +19040,34 @@ async function generateTablesManifest(projectRoot, options = {}) {
18107
19040
  };
18108
19041
  });
18109
19042
  console.log(`[tables-manifest] \u2713 Introspected ${tables.length} tables from database`);
18110
- warnTablesWithoutSource(tables);
19043
+ const tablesWithoutSource = tables.filter((t) => !t.sourceFile);
19044
+ if (tablesWithoutSource.length > 0) {
19045
+ const missingSourceQualifiedNames = tablesWithoutSource.map((t) => t.qualifiedName);
19046
+ const missingSchemas = [...new Set(tablesWithoutSource.map((t) => t.schema))];
19047
+ let extensionManagedTables = /* @__PURE__ */ new Map();
19048
+ let partitionParentMap = /* @__PURE__ */ new Map();
19049
+ try {
19050
+ const metadata = await fetchMissingSourceMetadata({
19051
+ databaseUrl,
19052
+ schemas: missingSchemas
19053
+ });
19054
+ extensionManagedTables = metadata.extensionManagedTables;
19055
+ partitionParentMap = metadata.partitionParentMap;
19056
+ } catch (error) {
19057
+ const message = error instanceof Error ? error.message : String(error);
19058
+ console.warn(`[tables-manifest] Failed to classify extension/partition metadata: ${message}`);
19059
+ }
19060
+ const classification = classifyMissingSourceTables({
19061
+ tablesWithoutSource: missingSourceQualifiedNames,
19062
+ idempotentManagedTables,
19063
+ extensionManagedTables,
19064
+ partitionParentMap,
19065
+ excludeFromOrphanDetection: sourceConfig.excludeFromOrphanDetection,
19066
+ systemSchemas: SUPABASE_SYSTEM_SCHEMA_SET,
19067
+ knownSystemTables: KNOWN_EXTENSION_SYSTEM_TABLES
19068
+ });
19069
+ logMissingSourceClassification(classification);
19070
+ }
18111
19071
  if (crossCheck && existsSync(drizzleSchemaPath)) {
18112
19072
  await logDrizzleCrossCheck(tables, drizzleSchemaPath);
18113
19073
  }
@@ -20152,225 +21112,6 @@ var backupCommand = new Command("backup").description("Manage database backups (
20152
21112
  // src/commands/db/commands/db-cleanup.ts
20153
21113
  init_esm_shims();
20154
21114
  init_config_loader();
20155
-
20156
- // src/commands/db/utils/schema-sync.ts
20157
- init_esm_shims();
20158
- var VALID_PG_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]{0,62}$/;
20159
- function validatePgIdentifier(name, context) {
20160
- if (!name || typeof name !== "string") {
20161
- throw new Error(`Invalid ${context}: empty or not a string`);
20162
- }
20163
- if (!VALID_PG_IDENTIFIER_PATTERN.test(name)) {
20164
- throw new Error(
20165
- `Invalid ${context} "${name}": must start with letter/underscore and contain only alphanumeric/underscore characters`
20166
- );
20167
- }
20168
- }
20169
- function escapePgStringLiteral(value) {
20170
- if (typeof value !== "string") {
20171
- throw new Error("Value must be a string");
20172
- }
20173
- return value.replace(/\\/g, "\\\\").replace(/'/g, "''");
20174
- }
20175
- function buildSafeSchemaInClause(schemas) {
20176
- if (schemas.length === 0) {
20177
- throw new Error("No schemas provided for IN clause");
20178
- }
20179
- const safeSchemas = [];
20180
- for (const schema of schemas) {
20181
- validatePgIdentifier(schema, "schema name");
20182
- safeSchemas.push(`'${escapePgStringLiteral(schema)}'`);
20183
- }
20184
- return safeSchemas.join(",");
20185
- }
20186
- var ERROR_MESSAGES2 = {
20187
- PATH_TRAVERSAL: "Schema path validation failed",
20188
- SCHEMA_NOT_FOUND: "Schema file not found"
20189
- };
20190
- function containsPathTraversal2(inputPath) {
20191
- const normalized = path11__default.normalize(inputPath);
20192
- return normalized.includes("..") || inputPath.includes("\0");
20193
- }
20194
- function isPathWithinBase(filePath, baseDir) {
20195
- try {
20196
- const resolvedFile = path11__default.resolve(filePath);
20197
- const resolvedBase = path11__default.resolve(baseDir);
20198
- const normalizedFile = path11__default.normalize(resolvedFile);
20199
- const normalizedBase = path11__default.normalize(resolvedBase);
20200
- return normalizedFile === normalizedBase || normalizedFile.startsWith(normalizedBase + path11__default.sep);
20201
- } catch {
20202
- return false;
20203
- }
20204
- }
20205
- function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
20206
- if (containsPathTraversal2(dbPackagePath)) {
20207
- throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
20208
- }
20209
- const schemaEntry = path11__default.join(dbPackagePath, "src", "schema", "index.ts");
20210
- const absoluteSchemaPath = path11__default.resolve(projectRoot, schemaEntry);
20211
- let resolvedProjectRoot;
20212
- try {
20213
- resolvedProjectRoot = realpathSync(projectRoot);
20214
- } catch {
20215
- resolvedProjectRoot = path11__default.resolve(projectRoot);
20216
- }
20217
- if (!isPathWithinBase(absoluteSchemaPath, resolvedProjectRoot)) {
20218
- throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
20219
- }
20220
- if (!existsSync(absoluteSchemaPath)) {
20221
- throw new Error(ERROR_MESSAGES2.SCHEMA_NOT_FOUND);
20222
- }
20223
- return absoluteSchemaPath;
20224
- }
20225
- function uniqueSorted(values) {
20226
- return [...new Set(values)].sort((a, b) => a.localeCompare(b));
20227
- }
20228
- async function extractSchemaTablesAndEnums(dbPackagePath, projectRoot = process.cwd()) {
20229
- const validatedSchemaPath = validateSchemaPath(dbPackagePath, projectRoot);
20230
- const jiti = createJiti(projectRoot, { interopDefault: true });
20231
- let schemaModule;
20232
- try {
20233
- schemaModule = await jiti.import(validatedSchemaPath);
20234
- } catch (error) {
20235
- const errorMessage = error instanceof Error ? error.message : String(error);
20236
- const hint = errorMessage.includes("unknown is not defined") ? "\n\nHint: Add 'unknown' to drizzle-orm/pg-core imports:\n import { unknown, ... } from 'drizzle-orm/pg-core'" : "";
20237
- throw new Error(`Failed to load schema from ${validatedSchemaPath}: ${errorMessage}${hint}`);
20238
- }
20239
- const expectedTables = /* @__PURE__ */ new Set();
20240
- const expectedEnums = /* @__PURE__ */ new Map();
20241
- for (const value of Object.values(schemaModule)) {
20242
- if (isTable(value)) {
20243
- const unique = String(getTableUniqueName(value));
20244
- if (unique.startsWith("undefined.")) {
20245
- expectedTables.add(`public.${getTableName(value)}`);
20246
- } else {
20247
- expectedTables.add(unique);
20248
- }
20249
- continue;
20250
- }
20251
- if (isPgEnum(value)) {
20252
- expectedEnums.set(value.enumName, {
20253
- name: value.enumName,
20254
- values: uniqueSorted(value.enumValues)
20255
- });
20256
- }
20257
- }
20258
- return { expectedTables, expectedEnums };
20259
- }
20260
- async function fetchDbTablesAndEnums(databaseUrl, options) {
20261
- const schemaDir = options?.schemaDir ?? "packages/database/src/schema";
20262
- const managedSchemas = detectSchemaNames(schemaDir, process.cwd());
20263
- const systemSchemas = /* @__PURE__ */ new Set([
20264
- ...SUPABASE_SYSTEM_SCHEMAS,
20265
- ...options?.additionalSystemSchemas ?? []
20266
- ]);
20267
- const filteredManagedSchemas = managedSchemas.filter((s) => !systemSchemas.has(s));
20268
- const schemaList = buildSafeSchemaInClause(filteredManagedSchemas);
20269
- const tablesSql = `
20270
- SELECT schemaname || '.' || tablename
20271
- FROM pg_tables
20272
- WHERE schemaname IN (${schemaList})
20273
- ORDER BY schemaname, tablename;`.trim();
20274
- const enumsSql = `
20275
- SELECT t.typname AS enum_name, string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) AS values
20276
- FROM pg_type t
20277
- JOIN pg_enum e ON t.oid = e.enumtypid
20278
- JOIN pg_namespace n ON n.oid = t.typnamespace
20279
- WHERE n.nspname = 'public'
20280
- GROUP BY t.typname
20281
- ORDER BY t.typname;`.trim();
20282
- const tablesOut = await psqlQuery({ databaseUrl, sql: tablesSql, mode: "table" });
20283
- const dbTables = /* @__PURE__ */ new Set();
20284
- for (const line of tablesOut.split("\n")) {
20285
- const v = line.trim();
20286
- if (v.length > 0) dbTables.add(v);
20287
- }
20288
- const enumsOut = await psqlQuery({ databaseUrl, sql: enumsSql, mode: "table" });
20289
- const dbEnums = /* @__PURE__ */ new Map();
20290
- for (const line of enumsOut.split("\n")) {
20291
- const trimmed = line.trim();
20292
- if (trimmed.length === 0) continue;
20293
- const [enumName, valuesCsv] = trimmed.split("|").map((s) => s.trim());
20294
- const values = valuesCsv ? valuesCsv.split(",").map((s) => s.trim()) : [];
20295
- dbEnums.set(enumName, { name: enumName, values: uniqueSorted(values) });
20296
- }
20297
- return { dbTables, dbEnums };
20298
- }
20299
- function diffSchema(params) {
20300
- const missingTables = uniqueSorted(
20301
- [...params.expectedTables].filter((t) => !params.dbTables.has(t))
20302
- );
20303
- const exclusions = new Set(params.excludeFromOrphanDetection ?? []);
20304
- const exclusionPatterns = [...exclusions].filter((e) => e.includes("*"));
20305
- const exactExclusions = [...exclusions].filter((e) => !e.includes("*"));
20306
- const isExcluded = (table) => {
20307
- if (exactExclusions.includes(table)) return true;
20308
- for (const pattern of exclusionPatterns) {
20309
- const regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
20310
- if (regex.test(table)) return true;
20311
- }
20312
- return false;
20313
- };
20314
- const orphanTables = uniqueSorted(
20315
- [...params.dbTables].filter((t) => !params.expectedTables.has(t) && !isExcluded(t))
20316
- );
20317
- const expectedEnumNames = new Set(params.expectedEnums.keys());
20318
- const dbEnumNames = new Set(params.dbEnums.keys());
20319
- const missingEnums = uniqueSorted([...expectedEnumNames].filter((n) => !dbEnumNames.has(n)));
20320
- const extraEnums = uniqueSorted([...dbEnumNames].filter((n) => !expectedEnumNames.has(n)));
20321
- const enumValueMismatches = [];
20322
- for (const name of uniqueSorted([...expectedEnumNames].filter((n) => dbEnumNames.has(n)))) {
20323
- const s = params.expectedEnums.get(name);
20324
- const d = params.dbEnums.get(name);
20325
- if (!s || !d) continue;
20326
- const schemaValues = uniqueSorted(s.values);
20327
- const dbValues = uniqueSorted(d.values);
20328
- const same = schemaValues.length === dbValues.length && schemaValues.every((v, i) => v === dbValues[i]);
20329
- if (same) continue;
20330
- const added = schemaValues.filter((v) => !dbValues.includes(v));
20331
- const removed = dbValues.filter((v) => !schemaValues.includes(v));
20332
- enumValueMismatches.push({ name, dbValues, schemaValues, added, removed });
20333
- }
20334
- return {
20335
- expectedTables: params.expectedTables,
20336
- expectedEnums: params.expectedEnums,
20337
- dbTables: params.dbTables,
20338
- dbEnums: params.dbEnums,
20339
- missingTables,
20340
- orphanTables,
20341
- missingEnums,
20342
- extraEnums,
20343
- enumValueMismatches
20344
- };
20345
- }
20346
- function extractTablesFromIdempotentSql(idempotentDir, projectRoot = process.cwd()) {
20347
- const fullPath = path11__default.resolve(projectRoot, idempotentDir);
20348
- if (!existsSync(fullPath)) {
20349
- return [];
20350
- }
20351
- const tables = [];
20352
- const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?\.)?(?:"?([a-zA-Z_][a-zA-Z0-9_]*)"?)/gi;
20353
- try {
20354
- const files = readdirSync(fullPath).filter((f) => f.endsWith(".sql"));
20355
- for (const file of files) {
20356
- const filePath = path11__default.join(fullPath, file);
20357
- const content = readFileSync(filePath, "utf-8");
20358
- const contentWithoutComments = content.replace(/--.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
20359
- for (const match of contentWithoutComments.matchAll(createTablePattern)) {
20360
- const schema = match[1] || "public";
20361
- const tableName = match[2];
20362
- if (tableName) {
20363
- tables.push(`${schema}.${tableName}`);
20364
- }
20365
- }
20366
- }
20367
- } catch {
20368
- return [];
20369
- }
20370
- return [...new Set(tables)].sort();
20371
- }
20372
-
20373
- // src/commands/db/commands/db-cleanup.ts
20374
21115
  function quoteIdentifier(identifier) {
20375
21116
  return `"${identifier.replaceAll('"', '""')}"`;
20376
21117
  }
@@ -20938,64 +21679,6 @@ var diffVisualCommand = new Command("diff-visual").description("Visualize schema
20938
21679
  // src/commands/db/commands/db-drizzle.ts
20939
21680
  init_esm_shims();
20940
21681
 
20941
- // src/internal/machines/index.ts
20942
- init_esm_shims();
20943
-
20944
- // src/internal/machines/machine-runner.ts
20945
- init_esm_shims();
20946
- function getOutputOrThrow(snapshot2) {
20947
- const s = snapshot2;
20948
- if (s.output === void 0) {
20949
- throw new CLIError("Machine completed without output", "MACHINE_NO_OUTPUT", [
20950
- "Ensure the machine defines an `output:` function."
20951
- ]);
20952
- }
20953
- return s.output;
20954
- }
20955
- async function runMachine(params) {
20956
- const timeoutMs = params.timeoutMs ?? 10 * 60 * 1e3;
20957
- return new Promise((resolve12, reject) => {
20958
- const actor = createActor(params.machine, { input: params.input });
20959
- const timer = setTimeout(() => {
20960
- try {
20961
- actor.stop?.();
20962
- } catch {
20963
- }
20964
- reject(
20965
- new CLIError("Machine execution timed out", "MACHINE_TIMEOUT", [
20966
- `Timeout: ${timeoutMs}ms`,
20967
- "Consider increasing timeoutMs for long-running operations."
20968
- ])
20969
- );
20970
- }, timeoutMs);
20971
- const sub = actor.subscribe((snapshot2) => {
20972
- try {
20973
- params.onSnapshot?.(snapshot2);
20974
- const stateName = params.helpers.getStateName(snapshot2);
20975
- params.onTransition?.(stateName);
20976
- if (params.helpers.isComplete(snapshot2)) {
20977
- clearTimeout(timer);
20978
- sub.unsubscribe();
20979
- const out = getOutputOrThrow(snapshot2);
20980
- resolve12(out);
20981
- }
20982
- } catch (err) {
20983
- clearTimeout(timer);
20984
- sub.unsubscribe();
20985
- reject(err);
20986
- }
20987
- });
20988
- try {
20989
- actor.start();
20990
- actor.send({ type: "START" });
20991
- } catch (err) {
20992
- clearTimeout(timer);
20993
- sub.unsubscribe();
20994
- reject(err);
20995
- }
20996
- });
20997
- }
20998
-
20999
21682
  // src/utils/execution-plan.ts
21000
21683
  init_esm_shims();
21001
21684
  function getChangeIcon(type) {
@@ -21266,6 +21949,10 @@ var RISK_PATTERNS = [
21266
21949
  function normalizeContent(content) {
21267
21950
  return content.replace(/--[^\n]*/g, " ").replace(/\/\*[\s\S]*?\*\//g, " ").replace(/\s+/g, " ").trim();
21268
21951
  }
21952
+ function stripSqlCommentsPreserveLines(content) {
21953
+ const lineMasked = content.replace(/--[^\n]*/g, (match) => " ".repeat(match.length));
21954
+ return lineMasked.replace(/\/\*[\s\S]*?\*\//g, (match) => match.replace(/[^\n]/g, " "));
21955
+ }
21269
21956
  function maskWrapperFunctions(content) {
21270
21957
  const wrapperPattern = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?runa_auth_\w+\s*\([^)]*\)[^$]*(\$[a-zA-Z_]*\$)[\s\S]*?\1;?/gi;
21271
21958
  return content.replace(wrapperPattern, "/* WRAPPER_FUNCTION_MASKED */");
@@ -21280,6 +21967,9 @@ function findLineNumber(originalContent, matchText) {
21280
21967
  const beforeMatch = originalContent.substring(0, match.index);
21281
21968
  return beforeMatch.split("\n").length;
21282
21969
  }
21970
+ function lineNumberFromIndex(content, index) {
21971
+ return content.substring(0, Math.max(0, index)).split("\n").length;
21972
+ }
21283
21973
  function detectRisksFromContent(normalizedContent, originalContent) {
21284
21974
  const risks = [];
21285
21975
  const seenDescriptions = /* @__PURE__ */ new Set();
@@ -21302,18 +21992,18 @@ function detectRisksFromContent(normalizedContent, originalContent) {
21302
21992
  }
21303
21993
  return risks;
21304
21994
  }
21305
- function detectForeignKeyRisks(lines) {
21995
+ function detectForeignKeyRisks(originalContent) {
21306
21996
  const risks = [];
21307
- const fkPattern = /REFERENCES\s+[\w."]+\s*\(/i;
21308
- for (let i = 0; i < lines.length; i++) {
21309
- if (fkPattern.test(lines[i])) {
21310
- risks.push({
21311
- level: "low",
21312
- description: "Foreign key columns should have indexes",
21313
- mitigation: "Add index on foreign key column for better performance",
21314
- line: i + 1
21315
- });
21316
- }
21997
+ const fkPattern = /REFERENCES\s+[\w."]+\s*\(/gi;
21998
+ const contentWithoutComments = stripSqlCommentsPreserveLines(originalContent);
21999
+ let match;
22000
+ while ((match = fkPattern.exec(contentWithoutComments)) !== null) {
22001
+ risks.push({
22002
+ level: "low",
22003
+ description: "Foreign key columns should have indexes",
22004
+ mitigation: "Add index on foreign key column for better performance",
22005
+ line: lineNumberFromIndex(originalContent, match.index ?? 0)
22006
+ });
21317
22007
  }
21318
22008
  return risks;
21319
22009
  }
@@ -21325,9 +22015,8 @@ async function detectSchemaRisks(filePath) {
21325
22015
  const content = await readFile(filePath, "utf-8");
21326
22016
  const maskedContent = maskWrapperFunctions(content);
21327
22017
  const normalizedContent = normalizeContent(maskedContent);
21328
- const lines = content.split("\n");
21329
22018
  const contentRisks = detectRisksFromContent(normalizedContent, content);
21330
- const fkRisks = detectForeignKeyRisks(lines);
22019
+ const fkRisks = detectForeignKeyRisks(content);
21331
22020
  return [...contentRisks, ...fkRisks];
21332
22021
  } catch (_error) {
21333
22022
  return [];
@@ -21833,11 +22522,18 @@ var setupContext = fromPromise(
21833
22522
  const dbPackagePath = await getDatabasePackagePath();
21834
22523
  const databaseUrl = resolveDatabaseUrl(env2);
21835
22524
  const tmpDir = `${repoRoot}/.runa-tmp`;
22525
+ let configTimeoutMs;
22526
+ try {
22527
+ const config = loadRunaConfig2();
22528
+ configTimeoutMs = config.database?.sync?.timeoutMs;
22529
+ } catch {
22530
+ }
21836
22531
  return {
21837
22532
  repoRoot,
21838
22533
  tmpDir,
21839
22534
  databaseUrl,
21840
- dbPackagePath
22535
+ dbPackagePath,
22536
+ configTimeoutMs
21841
22537
  };
21842
22538
  }
21843
22539
  );
@@ -21982,7 +22678,9 @@ var syncSchema = fromPromise(
21982
22678
  skipCodegen: ctx.skipCodegen,
21983
22679
  reportJson: ctx.reportJson,
21984
22680
  invokedAs: "runa db sync",
21985
- fromProduction: ctx.fromProduction
22681
+ fromProduction: ctx.fromProduction,
22682
+ timeoutMs: ctx.timeoutMs,
22683
+ configTimeoutMs: ctx.configTimeoutMs
21986
22684
  });
21987
22685
  return {
21988
22686
  applied: result.applied,
@@ -22132,7 +22830,11 @@ z.object({
22132
22830
  skipCodegen: z.boolean(),
22133
22831
  fromProduction: z.boolean(),
22134
22832
  autoSnapshot: z.boolean(),
22135
- reportJson: z.string().optional()
22833
+ reportJson: z.string().optional(),
22834
+ /** Subprocess timeout in milliseconds (from CLI flag) */
22835
+ timeoutMs: z.number().int().positive().optional(),
22836
+ /** Config-level timeout from runa.config.ts (lower priority than timeoutMs and env var) */
22837
+ configTimeoutMs: z.number().int().positive().optional()
22136
22838
  });
22137
22839
  z.object({
22138
22840
  /** Target environment */
@@ -22168,8 +22870,10 @@ z.object({
22168
22870
  autoSnapshot: z.boolean().optional(),
22169
22871
  reportJson: z.string().optional(),
22170
22872
  /** Pre-clean orphan empty tables + unused enums before sync */
22171
- reconcile: z.boolean().optional()
22172
- });
22873
+ reconcile: z.boolean().optional(),
22874
+ /** Subprocess timeout in milliseconds */
22875
+ timeoutMs: z.number().int().positive().optional()
22876
+ }).strict();
22173
22877
 
22174
22878
  // src/commands/db/sync/machine.ts
22175
22879
  init_esm_shims();
@@ -22341,7 +23045,9 @@ var dbSyncMachine = setup({
22341
23045
  skipCodegen: context.input.skipCodegen ?? false,
22342
23046
  fromProduction: context.input.fromProduction ?? false,
22343
23047
  autoSnapshot: context.input.autoSnapshot ?? false,
22344
- reportJson: context.input.reportJson
23048
+ reportJson: context.input.reportJson,
23049
+ timeoutMs: context.input.timeoutMs,
23050
+ configTimeoutMs: event.output.configTimeoutMs
22345
23051
  })
22346
23052
  })
22347
23053
  },
@@ -22361,7 +23067,9 @@ var dbSyncMachine = setup({
22361
23067
  skipCodegen: context.input.skipCodegen ?? false,
22362
23068
  fromProduction: context.input.fromProduction ?? false,
22363
23069
  autoSnapshot: context.input.autoSnapshot ?? false,
22364
- reportJson: context.input.reportJson
23070
+ reportJson: context.input.reportJson,
23071
+ timeoutMs: context.input.timeoutMs,
23072
+ configTimeoutMs: event.output.configTimeoutMs
22365
23073
  })
22366
23074
  })
22367
23075
  }
@@ -22581,7 +23289,8 @@ function optionsToMachineInput2(env2, options) {
22581
23289
  fromProduction: options.fromProduction === true || typeof options.fromProduction === "string",
22582
23290
  reportJson: options.reportJson,
22583
23291
  targetDir: process.cwd(),
22584
- reconcile: options.reconcile === true
23292
+ reconcile: options.reconcile === true,
23293
+ timeoutMs: options.timeout
22585
23294
  };
22586
23295
  }
22587
23296
  var dbSyncHelpers = {
@@ -22773,6 +23482,14 @@ var syncCommand = new Command("sync").description("Sync SQL schemas to database
22773
23482
  ).option(
22774
23483
  "--bootstrap",
22775
23484
  "Bootstrap mode: Auto-start Supabase with --ignore-health-check if not running"
23485
+ ).option(
23486
+ "--timeout <ms>",
23487
+ "Subprocess timeout in ms (default: 180000 local, 600000 production)",
23488
+ (val) => {
23489
+ const n = Number.parseInt(val, 10);
23490
+ if (Number.isNaN(n) || n <= 0) throw new Error("--timeout must be a positive integer");
23491
+ return n;
23492
+ }
22776
23493
  ).action(
22777
23494
  async (env2, options) => await runSyncCommandAction(env2, options)
22778
23495
  );
@@ -23451,9 +24168,9 @@ async function checkDocker() {
23451
24168
  severity: "error",
23452
24169
  message: "Docker is not running or not installed",
23453
24170
  fixInstructions: [
23454
- "Start Docker Desktop (macOS/Windows)",
23455
- "Or start Docker daemon: sudo systemctl start docker (Linux)",
23456
- "Install Docker: https://docs.docker.com/get-docker/"
24171
+ "Start Colima: colima start --cpu 4 --memory 8 --disk 60 --vm-type vz --mount-type virtiofs",
24172
+ "Install Colima: brew install colima docker",
24173
+ "Linux: sudo systemctl start docker"
23457
24174
  ]
23458
24175
  };
23459
24176
  }
@@ -23485,24 +24202,24 @@ async function checkPort(port) {
23485
24202
  }
23486
24203
  function detectSupabasePortsFromConfig() {
23487
24204
  const configPath = path11__default.join(process.cwd(), "supabase", "config.toml");
23488
- const BASE_PORTS = { api: 54321, db: 54322, studio: 54323, inbucket: 54324 };
24205
+ const BASE_PORTS2 = { api: 54321, db: 54322, studio: 54323, inbucket: 54324 };
23489
24206
  if (!existsSync(configPath)) {
23490
- return Object.values(BASE_PORTS);
24207
+ return Object.values(BASE_PORTS2);
23491
24208
  }
23492
24209
  try {
23493
24210
  const content = readFileSync(configPath, "utf-8");
23494
24211
  const ports = [];
23495
24212
  const apiMatch = content.match(/\[api\][\s\S]*?port\s*=\s*(\d+)/);
23496
- ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) : BASE_PORTS.api);
24213
+ ports.push(apiMatch ? Number.parseInt(apiMatch[1], 10) : BASE_PORTS2.api);
23497
24214
  const dbMatch = content.match(/\[db\][\s\S]*?port\s*=\s*(\d+)/);
23498
- ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) : BASE_PORTS.db);
24215
+ ports.push(dbMatch ? Number.parseInt(dbMatch[1], 10) : BASE_PORTS2.db);
23499
24216
  const studioMatch = content.match(/\[studio\][\s\S]*?port\s*=\s*(\d+)/);
23500
- ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) : BASE_PORTS.studio);
24217
+ ports.push(studioMatch ? Number.parseInt(studioMatch[1], 10) : BASE_PORTS2.studio);
23501
24218
  const inbucketMatch = content.match(/\[inbucket\][\s\S]*?port\s*=\s*(\d+)/);
23502
- ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) : BASE_PORTS.inbucket);
24219
+ ports.push(inbucketMatch ? Number.parseInt(inbucketMatch[1], 10) : BASE_PORTS2.inbucket);
23503
24220
  return ports;
23504
24221
  } catch {
23505
- return Object.values(BASE_PORTS);
24222
+ return Object.values(BASE_PORTS2);
23506
24223
  }
23507
24224
  }
23508
24225
  async function checkSupabasePorts() {
@@ -23569,68 +24286,56 @@ function diagnoseInitFailure(errorMessage) {
23569
24286
 
23570
24287
  // src/utils/port-allocator.ts
23571
24288
  init_esm_shims();
23572
- var BASE_PORT = 54321;
23573
- var PORTS_PER_SLOT = 10;
23574
- var TOTAL_SLOTS = 100;
23575
- function calculatePortOffset(projectPath) {
23576
- const normalizedPath = path11__default.resolve(projectPath);
23577
- const hash = createHash("md5").update(normalizedPath).digest("hex");
23578
- return parseInt(hash.slice(0, 8), 16) % TOTAL_SLOTS;
23579
- }
23580
24289
  function getSupabasePorts(projectPath) {
23581
24290
  const offset = calculatePortOffset(projectPath);
23582
- const basePort = BASE_PORT + offset * PORTS_PER_SLOT;
23583
- return {
23584
- api: basePort + 0,
23585
- db: basePort + 1,
23586
- studio: basePort + 2,
23587
- inbucket: basePort + 3,
23588
- auth: basePort + 4,
23589
- rest: basePort + 5,
23590
- realtime: basePort + 6,
23591
- storage: basePort + 7,
23592
- shadow: basePort + 8
23593
- };
24291
+ return getPortsWithOffset(offset);
23594
24292
  }
23595
- function updateSupabaseConfigPorts(projectPath) {
23596
- const ports = getSupabasePorts(projectPath);
24293
+ async function updateSupabaseConfigPortsSafe(projectPath) {
23597
24294
  const configPath = path11__default.join(projectPath, "supabase", "config.toml");
24295
+ const resolved = await resolveAvailablePorts(projectPath);
24296
+ if (!resolved) {
24297
+ const ports = getSupabasePorts(projectPath);
24298
+ return { updated: false, ports, configPath, retried: false };
24299
+ }
23598
24300
  if (!existsSync(configPath)) {
23599
- return { updated: false, ports, configPath };
24301
+ return { updated: false, ports: resolved.ports, configPath, retried: resolved.retried };
23600
24302
  }
24303
+ const updated = writePortsToConfig(configPath, resolved.ports);
24304
+ return { updated, ports: resolved.ports, configPath, retried: resolved.retried };
24305
+ }
24306
+ function writePortsToConfig(configPath, ports) {
23601
24307
  let content = readFileSync(configPath, "utf-8");
23602
- let updated = false;
24308
+ let changed = false;
23603
24309
  const portMappings = [
23604
24310
  { section: "api", key: "port", value: ports.api },
23605
24311
  { section: "db", key: "port", value: ports.db },
23606
24312
  { section: "db", key: "shadow_port", value: ports.shadow },
23607
24313
  { section: "studio", key: "port", value: ports.studio },
23608
- { section: "inbucket", key: "port", value: ports.inbucket },
23609
- { section: "auth", key: "port", value: ports.auth }
23610
- // Note: rest, realtime, storage ports are internal and not in config.toml
24314
+ { section: "inbucket", key: "port", value: ports.inbucket }
23611
24315
  ];
23612
24316
  for (const { section, key, value } of portMappings) {
23613
24317
  const sectionRegex = new RegExp(`(\\[${section}\\][^\\[]*?)(${key}\\s*=\\s*)(\\d+)`, "gs");
24318
+ sectionRegex.lastIndex = 0;
23614
24319
  const newContent = content.replace(sectionRegex, (match, prefix, keyPart, oldValue) => {
23615
24320
  if (parseInt(oldValue, 10) !== value) {
23616
- updated = true;
24321
+ changed = true;
23617
24322
  return `${prefix}${keyPart}${value}`;
23618
24323
  }
23619
24324
  return match;
23620
24325
  });
23621
24326
  content = newContent;
23622
24327
  }
23623
- if (updated) {
24328
+ if (changed) {
23624
24329
  writeFileSync(configPath, content, "utf-8");
23625
24330
  }
23626
- return { updated, ports, configPath };
24331
+ return changed;
23627
24332
  }
23628
24333
  function getPortAllocationSummary(projectPath) {
23629
24334
  const ports = getSupabasePorts(projectPath);
23630
24335
  const offset = calculatePortOffset(projectPath);
23631
24336
  return [
23632
24337
  `Port allocation for: ${path11__default.basename(projectPath)}`,
23633
- ` Slot: ${offset} (hash-based)`,
24338
+ ` Slot: ${offset / 10} (hash-based, offset=${offset})`,
23634
24339
  ` API: ${ports.api}`,
23635
24340
  ` DB: ${ports.db}`,
23636
24341
  ` Studio: ${ports.studio}`,
@@ -23788,11 +24493,14 @@ var startCommand = new Command("start").description("Start local Supabase with a
23788
24493
  logger16.info(output3);
23789
24494
  return;
23790
24495
  }
23791
- const portResult = updateSupabaseConfigPorts(projectRoot);
24496
+ const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
23792
24497
  if (portResult.updated) {
23793
24498
  logger16.info("\u{1F522} Port allocation updated for this project:");
23794
24499
  logger16.info(getPortAllocationSummary(projectRoot));
23795
24500
  }
24501
+ if (portResult.retried) {
24502
+ logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
24503
+ }
23796
24504
  logStartModeInfo(logger16, options, detectionResult, finalExcluded);
23797
24505
  const result = await dbStart({
23798
24506
  exclude: finalExcluded,
@@ -23879,11 +24587,14 @@ var resetCommand = new Command("reset").description("Reset local database with s
23879
24587
  try {
23880
24588
  logger16.section("Database Reset");
23881
24589
  const projectRoot = process.cwd();
23882
- const portResult = updateSupabaseConfigPorts(projectRoot);
24590
+ const portResult = await updateSupabaseConfigPortsSafe(projectRoot);
23883
24591
  if (portResult.updated) {
23884
24592
  logger16.info("\u{1F522} Port allocation updated for this project:");
23885
24593
  logger16.info(getPortAllocationSummary(projectRoot));
23886
24594
  }
24595
+ if (portResult.retried) {
24596
+ logger16.warn("\u26A0\uFE0F Hash-based port slot was occupied; using fallback slot.");
24597
+ }
23887
24598
  logger16.step("Stopping Supabase...", 1);
23888
24599
  const resetResult = await dbReset({
23889
24600
  env: "local",
@@ -24733,9 +25444,15 @@ async function readSeedMetadataFile(metadataPath) {
24733
25444
  }
24734
25445
  }
24735
25446
  var seedMetadataCommand = new Command("metadata").description("Extract seed metadata (primary root anchor) for CI workflows").option("--file <path>", "Path to metadata JSON file (optional; prefer --from-db)").option("--from-db", "Infer metadata from applied database state (preferred)", false).option("--github-output", "Write outputs to $GITHUB_OUTPUT (GitHub Actions)", false).action(async (options) => {
24736
- const metadataPath = options.file?.trim();
24737
- const shouldUseDb = options.fromDb === true || !metadataPath;
24738
- const out = shouldUseDb ? await inferPrimaryIdsFromDatabase() : await readSeedMetadataFile(metadataPath);
25447
+ const rawMetadataPath = options.file?.trim();
25448
+ const shouldUseDb = options.fromDb === true || !rawMetadataPath;
25449
+ let out;
25450
+ if (shouldUseDb) {
25451
+ out = await inferPrimaryIdsFromDatabase();
25452
+ } else {
25453
+ const validatedPath = validateUserFilePath(rawMetadataPath, process.cwd());
25454
+ out = await readSeedMetadataFile(validatedPath);
25455
+ }
24739
25456
  if (options.githubOutput === true) {
24740
25457
  await writeGitHubOutput({
24741
25458
  ...out.primary?.root?.id ? { root_id: out.primary.root.id } : {},
@@ -26518,9 +27235,7 @@ async function getVercelRootDirectory() {
26518
27235
  init_local_supabase();
26519
27236
  var ERROR_MESSAGES3 = {
26520
27237
  INVALID_PATH: "Invalid working directory path",
26521
- PATH_TRAVERSAL: "Working directory path validation failed",
26522
- APP_NOT_FOUND: "App directory not found"
26523
- };
27238
+ PATH_TRAVERSAL: "Working directory path validation failed"};
26524
27239
  function sanitizeErrorMessage(message) {
26525
27240
  if (!message || typeof message !== "string") {
26526
27241
  return "Unknown error";
@@ -26580,23 +27295,6 @@ function validateCustomWorkingDir(cwdPath, projectRoot) {
26580
27295
  }
26581
27296
  return absolutePath;
26582
27297
  }
26583
- function validateAppDirectory2(appName, projectRoot) {
26584
- if (containsPathTraversal3(appName) || appName.includes("/") || appName.includes("\\")) {
26585
- throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
26586
- }
26587
- const appsDir = resolve(projectRoot, "apps");
26588
- const appDir = resolve(appsDir, appName);
26589
- if (!isPathWithinBase2(appDir, appsDir)) {
26590
- throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
26591
- }
26592
- if (!existsSync(appDir)) {
26593
- throw new CLIError(ERROR_MESSAGES3.APP_NOT_FOUND, "ENV_PULL_APP_NOT_FOUND", [
26594
- `Available apps: ${getAvailableApps().join(", ") || "none"}`,
26595
- "Specify full path with --cwd instead"
26596
- ]);
26597
- }
26598
- return appDir;
26599
- }
26600
27298
  var LOCAL_BOOTSTRAP_REQUIRED_KEYS = [
26601
27299
  "LOCAL_SUPABASE_HOST",
26602
27300
  "LOCAL_SUPABASE_API_PORT",
@@ -26693,22 +27391,18 @@ function resolveVercelAuth(workDir, options, logger16) {
26693
27391
  }
26694
27392
  function resolveWorkingDir(options) {
26695
27393
  const projectRoot = process.cwd();
27394
+ if (options.app) {
27395
+ console.warn(
27396
+ `\u26A0\uFE0F --app is deprecated. Environment files are managed at the monorepo root only.
27397
+ Turbo auto-propagates root .env.* files to all workspaces.
27398
+ Writing to project root instead of apps/${options.app}/.`
27399
+ );
27400
+ }
26696
27401
  if (options.cwd) {
26697
27402
  return validateCustomWorkingDir(options.cwd, projectRoot);
26698
27403
  }
26699
- if (options.app) {
26700
- return validateAppDirectory2(options.app, projectRoot);
26701
- }
26702
27404
  return projectRoot;
26703
27405
  }
26704
- function getAvailableApps() {
26705
- const appsDir = resolve(process.cwd(), "apps");
26706
- if (!existsSync(appsDir)) return [];
26707
- return readdirSync(appsDir).filter((name) => {
26708
- const fullPath = resolve(appsDir, name);
26709
- return statSync(fullPath).isDirectory();
26710
- });
26711
- }
26712
27406
  function getOutputPath(workDir, environment) {
26713
27407
  return resolve(workDir, `.env.${environment}`);
26714
27408
  }
@@ -28941,7 +29635,11 @@ async function listArchivedHotfixes(input3 = {}) {
28941
29635
  const hotfixes = [];
28942
29636
  for (const file of files.filter((f) => f.endsWith(".json"))) {
28943
29637
  try {
28944
- const content = await readFile(path11__default.join(archiveDir, file), "utf-8");
29638
+ const filePath = path11__default.join(archiveDir, file);
29639
+ if (!isPathContained(archiveDir, filePath)) {
29640
+ continue;
29641
+ }
29642
+ const content = await readFile(filePath, "utf-8");
28945
29643
  hotfixes.push(HotfixMetadataSchema.parse(JSON.parse(content)));
28946
29644
  } catch {
28947
29645
  }
@@ -30278,10 +30976,10 @@ Fix:
30278
30976
  Docker is not running or not installed.
30279
30977
 
30280
30978
  Fix:
30281
- 1. Start Docker Desktop (macOS/Windows)
30282
- 2. Or start Docker daemon (Linux): sudo systemctl start docker
30283
- 3. Verify Docker is running: docker ps
30284
- 4. Install Docker if needed: https://docs.docker.com/get-docker/
30979
+ 1. Start Colima: colima start --cpu 4 --memory 8 --vm-type vz --mount-type virtiofs
30980
+ 2. Install Colima: brew install colima docker
30981
+ 3. Linux: sudo systemctl start docker
30982
+ 4. Verify Docker is running: docker ps
30285
30983
  `,
30286
30984
  relatedCommands: ["runa check"]
30287
30985
  },
@@ -30407,7 +31105,7 @@ init_esm_shims();
30407
31105
 
30408
31106
  // src/constants/versions.ts
30409
31107
  init_esm_shims();
30410
- var COMPATIBLE_TEMPLATES_VERSION = "0.5.44";
31108
+ var COMPATIBLE_TEMPLATES_VERSION = "0.5.56";
30411
31109
  var TEMPLATES_PACKAGE_NAME = "@r06-dev/runa-templates";
30412
31110
  var GITHUB_PACKAGES_REGISTRY = "https://npm.pkg.github.com";
30413
31111
 
@@ -31371,21 +32069,28 @@ function collectRouteInfo(relativePath, code, _verbose) {
31371
32069
  if (isExcludedScope(relativePath)) {
31372
32070
  return;
31373
32071
  }
31374
- if (isPageFile(relativePath)) {
32072
+ const isPage = isPageFile(relativePath);
32073
+ const isLayout = isLayoutFile(relativePath);
32074
+ const isApiRoute = isApiRouteFile(relativePath);
32075
+ const isMiddleware = isMiddlewareFile(relativePath);
32076
+ if (isPage) {
31375
32077
  collectPageInfo(relativePath, code);
31376
32078
  }
31377
- if (isLayoutFile(relativePath)) {
32079
+ if (isLayout) {
31378
32080
  collectLayoutInfo(relativePath, code);
31379
32081
  }
31380
- if (isApiRouteFile(relativePath)) {
32082
+ if (isApiRoute) {
31381
32083
  collectApiRouteInfo(relativePath, code);
31382
32084
  }
31383
- if (isMiddlewareFile(relativePath)) {
32085
+ if (isMiddleware) {
31384
32086
  collectAuthBoundaries(relativePath, code);
31385
32087
  }
31386
32088
  if (hasMachineDefinition(code)) {
31387
32089
  collectMachineDefinition(relativePath, code);
31388
32090
  }
32091
+ if (!isPage && !isLayout && !isApiRoute && !isMiddleware) {
32092
+ collectComponentInfo(relativePath, code);
32093
+ }
31389
32094
  }
31390
32095
  function emptyResult(filePath) {
31391
32096
  return {
@@ -31826,6 +32531,12 @@ async function preprocessFile(filePath, repoRoot, options) {
31826
32531
  const attrs = extractInjectedAttributes(code);
31827
32532
  const machineIdMatches = code.matchAll(/data-machine-id="([^"]+)"/g);
31828
32533
  const machineIds = [...new Set([...machineIdMatches].map((m) => m[1]).filter(Boolean))];
32534
+ const resolutionDetails = machineIds.map((machineId) => ({
32535
+ filePath: relativePath,
32536
+ machineRef: machineId,
32537
+ resolvedId: machineId,
32538
+ source: "explicit"
32539
+ }));
31829
32540
  if (machineIds.length === 0 && mightContainMachineHooks(code)) {
31830
32541
  if (options.verbose) {
31831
32542
  console.log(` [recovery] Marker found but no attributes in ${relativePath}, re-injecting`);
@@ -31844,7 +32555,7 @@ async function preprocessFile(filePath, repoRoot, options) {
31844
32555
  machineIds,
31845
32556
  changed: false,
31846
32557
  ...attrs,
31847
- resolutionDetails: []
32558
+ resolutionDetails
31848
32559
  }
31849
32560
  };
31850
32561
  }
@@ -34984,15 +35695,15 @@ function printActionsNeeded(logger16, actions) {
34984
35695
  );
34985
35696
  }
34986
35697
  function findRepoRoot3(startDir) {
34987
- const { existsSync: existsSync52, readFileSync: readFileSync29 } = __require("fs");
35698
+ const { existsSync: existsSync53, readFileSync: readFileSync29 } = __require("fs");
34988
35699
  const { join: join23, dirname: dirname5 } = __require("path");
34989
35700
  let current = startDir;
34990
35701
  while (current !== dirname5(current)) {
34991
- if (existsSync52(join23(current, "turbo.json"))) {
35702
+ if (existsSync53(join23(current, "turbo.json"))) {
34992
35703
  return current;
34993
35704
  }
34994
35705
  const pkgPath = join23(current, "package.json");
34995
- if (existsSync52(pkgPath)) {
35706
+ if (existsSync53(pkgPath)) {
34996
35707
  try {
34997
35708
  const pkg = JSON.parse(readFileSync29(pkgPath, "utf-8"));
34998
35709
  if (pkg.workspaces) {
@@ -35060,10 +35771,10 @@ function generateReportOutput(output3, isJsonMode) {
35060
35771
  };
35061
35772
  }
35062
35773
  function validateRunaRepo(repoRoot) {
35063
- const { existsSync: existsSync52 } = __require("fs");
35774
+ const { existsSync: existsSync53 } = __require("fs");
35064
35775
  const { join: join23 } = __require("path");
35065
35776
  const templateDir = join23(repoRoot, "packages/runa-templates/templates");
35066
- if (!existsSync52(templateDir)) {
35777
+ if (!existsSync53(templateDir)) {
35067
35778
  throw new CLIError("template-check is a runa-repo only command", "NOT_RUNA_REPO", [
35068
35779
  "This command compares runa-repo with pj-repo templates",
35069
35780
  "It should only be run in the runa repository",