@runa-ai/runa-cli 0.5.30 → 0.5.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/commands/ci/commands/ci-prod-apply.d.ts.map +1 -1
  2. package/dist/commands/ci/commands/ci-prod-db-operations.d.ts +4 -0
  3. package/dist/commands/ci/commands/ci-prod-db-operations.d.ts.map +1 -1
  4. package/dist/commands/ci/utils/env-security.d.ts +58 -0
  5. package/dist/commands/ci/utils/env-security.d.ts.map +1 -0
  6. package/dist/commands/ci/utils/execa-helpers.d.ts +4 -0
  7. package/dist/commands/ci/utils/execa-helpers.d.ts.map +1 -1
  8. package/dist/commands/ci/utils/github.d.ts +6 -0
  9. package/dist/commands/ci/utils/github.d.ts.map +1 -1
  10. package/dist/commands/ci/utils/pgtap-installer.d.ts +3 -0
  11. package/dist/commands/ci/utils/pgtap-installer.d.ts.map +1 -1
  12. package/dist/commands/ci/utils/rls-verification.d.ts +3 -0
  13. package/dist/commands/ci/utils/rls-verification.d.ts.map +1 -1
  14. package/dist/commands/ci/utils/workflow-idempotency.d.ts +90 -0
  15. package/dist/commands/ci/utils/workflow-idempotency.d.ts.map +1 -0
  16. package/dist/commands/db/apply/actors.d.ts.map +1 -1
  17. package/dist/commands/db/apply/helpers/advisory-lock.d.ts.map +1 -1
  18. package/dist/commands/db/apply/helpers/pg-schema-diff-helpers.d.ts +4 -0
  19. package/dist/commands/db/apply/helpers/pg-schema-diff-helpers.d.ts.map +1 -1
  20. package/dist/commands/db/commands/db-derive-role-passwords.d.ts.map +1 -1
  21. package/dist/commands/db/commands/db-derive-urls.d.ts.map +1 -1
  22. package/dist/commands/db/utils/psql.d.ts +57 -0
  23. package/dist/commands/db/utils/psql.d.ts.map +1 -1
  24. package/dist/commands/db/utils/seed-manager.d.ts +4 -0
  25. package/dist/commands/db/utils/seed-manager.d.ts.map +1 -1
  26. package/dist/commands/env/commands/setup/file-export.d.ts +12 -1
  27. package/dist/commands/env/commands/setup/file-export.d.ts.map +1 -1
  28. package/dist/config/env-files.d.ts +5 -0
  29. package/dist/config/env-files.d.ts.map +1 -1
  30. package/dist/config/env.d.ts +4 -0
  31. package/dist/config/env.d.ts.map +1 -1
  32. package/dist/index.js +1284 -475
  33. package/dist/internal/vuln-checker/analyzers/secret-analyzer.d.ts +5 -0
  34. package/dist/internal/vuln-checker/analyzers/secret-analyzer.d.ts.map +1 -1
  35. package/dist/utils/config-updater.d.ts +5 -0
  36. package/dist/utils/config-updater.d.ts.map +1 -1
  37. package/dist/utils/github-output-security.d.ts +36 -0
  38. package/dist/utils/github-output-security.d.ts.map +1 -0
  39. package/dist/utils/path-security.d.ts +98 -0
  40. package/dist/utils/path-security.d.ts.map +1 -0
  41. package/dist/utils/secure-exec.d.ts +22 -12
  42. package/dist/utils/secure-exec.d.ts.map +1 -1
  43. package/dist/utils/template-fetcher.d.ts +11 -0
  44. package/dist/utils/template-fetcher.d.ts.map +1 -1
  45. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -1,14 +1,14 @@
1
1
  #!/usr/bin/env node
2
2
  import { createRequire } from 'module';
3
3
  import * as path10 from 'path';
4
- import path10__default, { join, dirname, resolve, relative, basename, isAbsolute, normalize } from 'path';
4
+ import path10__default, { join, dirname, resolve, relative, basename, sep, isAbsolute, normalize } from 'path';
5
5
  import { fileURLToPath } from 'url';
6
6
  import { execSync, spawnSync, execFileSync, exec, spawn } from 'child_process';
7
7
  import * as fs6 from 'fs';
8
- import fs6__default, { existsSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, rmSync, realpathSync, promises, lstatSync, chmodSync, accessSync, constants } from 'fs';
8
+ import fs6__default, { existsSync, readFileSync, readdirSync, mkdtempSync, writeFileSync, mkdirSync, copyFileSync, createWriteStream, statSync, rmSync, realpathSync, promises, lstatSync, chmodSync, accessSync, constants, unlinkSync } from 'fs';
9
9
  import { createCLILogger, cacheClear, CacheClearOutputSchema, CLIError, cachePrune, CachePruneOutputSchema, cacheStats, CacheStatsOutputSchema, cacheList, CacheListOutputSchema, cacheInvalidate, CacheInvalidateOutputSchema, syncFromProduction, dbGenerateDiagram, DbDiagramGenerateOutputSchema, createDbSnapshot, syncDatabase, emitDbPushFailureCapsule, emitDbAnnotations, writeDbPushStepSummary, exportDbReportJson, DbSyncOutputSchema, databasePaths, detectRequiredServices, formatDetectionResults, dbStart, DbLifecycleStartOutputSchema, dbStop, DbLifecycleStopOutputSchema, dbReset, DbLifecycleResetOutputSchema, dbValidateSchemas, DbSchemaValidateOutputSchema, DbSchemaRisksOutputSchema, dbDetectSchemaRisks, dbApplySchemas, DbSchemaApplyOutputSchema, dbGenerateTypes, DbSchemaGenerateOutputSchema, extractSchemaFilter, dbSeedInit, DbSeedInitOutputSchema, dbSeedValidate, DbSeedValidateOutputSchema, dbSeedGenerate, DbSeedGenerateOutputSchema, dbVerifySeeds, DbSeedVerifyOutputSchema, DbSnapshotCreateOutputSchema, restoreDbSnapshot, DbSnapshotRestoreOutputSchema, listDbSnapshots, DbSnapshotListOutputSchema, dbGeneratePgTapTests, DbTestGenOutputSchema, dbUpdateGoldenRecord, DbTestUpdateGoldenOutputSchema, repairRunaConfig, detectExistingInitConfig, initProject, validateInitResult, linkCliGlobally, LinkCliOutputSchema, unlinkCliGlobally, UnlinkCliOutputSchema, checkRepoStatus, CheckRepoStatusOutputSchema, enableTelemetry, disableTelemetry, getTelemetryStatus, uploadTelemetry, TelemetryUploadOutputSchema, runTest, TestRunOutputSchema, runTestService, TestServiceOutputSchema, runTestIntegration, TestIntegrationOutputSchema, runTestStatic, TestStaticOutputSchema, generateOwaspTop10Tests, TestOwaspGenerateOutputSchema, updateGoldenRecord, generateE2ETests, generateSecurityTests, generateUnitTests, generateApiTests, generateComponentTests, generateE2EScaffold, validateConfig, ValidateConfigOutputSchema, deploySchemaToProduction, WorkflowNotifyOutputSchema, devopsSync, workflowSync, validateInfrastructure, emitWorkflowValidateFailureCapsule, emitWorkflowAnnotations, writeWorkflowValidateStepSummary, exportWorkflowReportJson, WorkflowValidateInfrastructureOutputSchema, createSuccessEnvelopeSchema, CLI_CONTRACT_VERSION, runChecks, RunCheckOutputSchema, formatDuration as formatDuration$1, GITHUB_API, loadRunaConfig, getClassificationForProfile, recordSchemaAudit, RecordSchemaAuditOutputSchema, createBackup, CreateBackupOutputSchema, listBackups, ListBackupsOutputSchema, getBackupMetadata, restoreBackup, RestoreBackupOutputSchema, deleteBackup, DeleteBackupOutputSchema, detectSchemaNames, SUPABASE_SYSTEM_SCHEMAS, loadRunaConfigOrThrow, dbSeedApply, writeDbSeedStepSummary, DbSeedApplyOutputSchema, emitDbSeedFailureCapsule, syncEnvironment, EnvSyncOutputSchema, detectDatabasePackage, findProjectRoot as findProjectRoot$1, TelemetryEnableOutputSchema, TelemetryDisableOutputSchema, TelemetryStatusOutputSchema, workflowNotify, DevOpsSyncOutputSchema, WorkflowSyncOutputSchema, formatCLIError, DATABASE_PACKAGE_CANDIDATES, getStatusIcon as getStatusIcon$1, findWorkspaceRoot as findWorkspaceRoot$1, checkExtensionConfig, UpgradeTransaction, readRunaVersion, syncTemplates, SyncOutputSchema, ErrorEnvelopeSchema, preCheckSync, findConflictFiles, TestUnitGenOutputSchema, TestE2EGenerateOutputSchema, TestSecurityGenOutputSchema, TestApiGenOutputSchema, TestComponentGenOutputSchema } from '@runa-ai/runa';
10
10
  import { z } from 'zod';
11
- import fs10, { mkdir, writeFile, appendFile, readFile, rm, cp, readdir } from 'fs/promises';
11
+ import fs10, { mkdir, writeFile, appendFile, readFile, rm, stat, realpath, cp, readdir } from 'fs/promises';
12
12
  import { promisify } from 'util';
13
13
  import { glob } from 'glob';
14
14
  import { Project, Node, SyntaxKind } from 'ts-morph';
@@ -515,25 +515,26 @@ function isCI() {
515
515
  function isDebug() {
516
516
  return env.DEBUG === true || env.LOG_LEVEL === "debug";
517
517
  }
518
+ function looksLikeSecret(value) {
519
+ return SENSITIVE_VALUE_PATTERNS.some((pattern) => pattern.test(value));
520
+ }
518
521
  function sanitizeEnv(envVars) {
519
- const sensitiveKeys = [
520
- "POSTGRES_PASSWORD",
521
- "SUPABASE_SERVICE_ROLE_KEY",
522
- "SUPABASE_ACCESS_TOKEN",
523
- "GITHUB_TOKEN",
524
- "GITHUB_APP_PRIVATE_KEY",
525
- "VERCEL_TOKEN"
526
- ];
527
522
  const sanitized = { ...envVars };
528
- for (const key of sensitiveKeys) {
529
- const value = sanitized[key];
530
- if (value !== void 0 && value !== "") {
523
+ for (const [key, value] of Object.entries(sanitized)) {
524
+ if (typeof value !== "string" || value === "") {
525
+ continue;
526
+ }
527
+ if (SENSITIVE_KEYS.includes(key)) {
528
+ sanitized[key] = "***";
529
+ continue;
530
+ }
531
+ if (looksLikeSecret(value)) {
531
532
  sanitized[key] = "***";
532
533
  }
533
534
  }
534
535
  return sanitized;
535
536
  }
536
- var envSchema, env;
537
+ var envSchema, env, SENSITIVE_KEYS, SENSITIVE_VALUE_PATTERNS;
537
538
  var init_env = __esm({
538
539
  "src/config/env.ts"() {
539
540
  init_esm_shims();
@@ -574,6 +575,31 @@ var init_env = __esm({
574
575
  RUNA_LOG_JSON: z.string().optional().transform((val) => val === "true")
575
576
  });
576
577
  env = loadEnv();
578
+ SENSITIVE_KEYS = [
579
+ "DATABASE_URL",
580
+ // Contains password in connection string
581
+ "POSTGRES_PASSWORD",
582
+ "SUPABASE_SERVICE_ROLE_KEY",
583
+ "SUPABASE_ACCESS_TOKEN",
584
+ "SUPABASE_ANON_KEY",
585
+ "GITHUB_TOKEN",
586
+ "GITHUB_APP_PRIVATE_KEY",
587
+ "VERCEL_TOKEN"
588
+ ];
589
+ SENSITIVE_VALUE_PATTERNS = [
590
+ // Database URLs with passwords
591
+ /postgresql:\/\/[^:]+:[^@]+@/i,
592
+ /postgres:\/\/[^:]+:[^@]+@/i,
593
+ /mysql:\/\/[^:]+:[^@]+@/i,
594
+ // JWT tokens
595
+ /^eyJ[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+$/,
596
+ // GitHub tokens
597
+ /^gh[pousr]_[A-Za-z0-9_]+$/,
598
+ // API keys with common prefixes
599
+ /^sk[_-](?:live|test|proj)[_-]/i,
600
+ // AWS keys
601
+ /^AKIA[A-Z0-9]{16}$/
602
+ ];
577
603
  }
578
604
  });
579
605
 
@@ -899,7 +925,7 @@ var CLI_VERSION, HAS_ADMIN_COMMAND;
899
925
  var init_version = __esm({
900
926
  "src/version.ts"() {
901
927
  init_esm_shims();
902
- CLI_VERSION = "0.5.30";
928
+ CLI_VERSION = "0.5.31";
903
929
  HAS_ADMIN_COMMAND = false;
904
930
  }
905
931
  });
@@ -1764,6 +1790,9 @@ function calculateEntropy(str) {
1764
1790
  return entropy;
1765
1791
  }
1766
1792
  function looksLikePlaceholder(match) {
1793
+ if (match.length > 500) {
1794
+ return false;
1795
+ }
1767
1796
  const matchLower = match.toLowerCase();
1768
1797
  const placeholders = [
1769
1798
  "your_",
@@ -1797,8 +1826,12 @@ function looksLikePlaceholder(match) {
1797
1826
  if (placeholders.some((p) => matchLower.includes(p))) {
1798
1827
  return true;
1799
1828
  }
1800
- if (/^(.{1,4})\1{3,}$/.test(match)) {
1801
- return true;
1829
+ if (match.length >= 8) {
1830
+ const uniqueChars = new Set(match).size;
1831
+ const uniqueRatio = uniqueChars / match.length;
1832
+ if (uniqueRatio < 0.15) {
1833
+ return true;
1834
+ }
1802
1835
  }
1803
1836
  if (/^[a-z]+$/.test(match) || /^[A-Z]+$/.test(match) || /^[0-9]+$/.test(match)) {
1804
1837
  return true;
@@ -1929,6 +1962,9 @@ function findPatternMatches(pattern, line) {
1929
1962
  }
1930
1963
  function scanLineForSecrets(line, lineNumber, filePath) {
1931
1964
  const findings = [];
1965
+ if (line.length > MAX_LINE_LENGTH) {
1966
+ return findings;
1967
+ }
1932
1968
  for (const pattern of SECRET_PATTERNS) {
1933
1969
  const matches = findPatternMatches(pattern, line);
1934
1970
  for (const match of matches) {
@@ -1936,7 +1972,7 @@ function scanLineForSecrets(line, lineNumber, filePath) {
1936
1972
  findings.push(createSecretFinding(pattern, filePath, lineNumber, line, match));
1937
1973
  }
1938
1974
  }
1939
- const base64Pattern = /['"`]([A-Za-z0-9+/]{40,}=*)['"`]/g;
1975
+ const base64Pattern = /['"`]([A-Za-z0-9+/]{40,500}=*)['"`]/g;
1940
1976
  for (const base64Match of line.matchAll(base64Pattern)) {
1941
1977
  const base64String = base64Match[1];
1942
1978
  const result = checkBase64ForSecrets(base64String);
@@ -1970,6 +2006,10 @@ function scanLineForSecrets(line, lineNumber, filePath) {
1970
2006
  return findings;
1971
2007
  }
1972
2008
  async function scanFileForSecrets(filePath) {
2009
+ const stats = await fs10.stat(filePath);
2010
+ if (stats.size > MAX_FILE_SIZE) {
2011
+ return [];
2012
+ }
1973
2013
  const content = await fs10.readFile(filePath, "utf-8");
1974
2014
  const lines = content.split("\n");
1975
2015
  const findings = [];
@@ -2045,11 +2085,13 @@ function isMultiLineFalsePositive(blockContent, lines, startLine) {
2045
2085
  }
2046
2086
  return false;
2047
2087
  }
2048
- var SECRET_PATTERNS, BASE64_SECRET_INDICATORS, EXCLUDED_PATTERNS, MULTILINE_PATTERNS, SecretAnalyzer;
2088
+ var MAX_LINE_LENGTH, MAX_FILE_SIZE, SECRET_PATTERNS, BASE64_SECRET_INDICATORS, EXCLUDED_PATTERNS, MULTILINE_PATTERNS, SecretAnalyzer;
2049
2089
  var init_secret_analyzer = __esm({
2050
2090
  "src/internal/vuln-checker/analyzers/secret-analyzer.ts"() {
2051
2091
  init_esm_shims();
2052
2092
  init_constants2();
2093
+ MAX_LINE_LENGTH = 2e3;
2094
+ MAX_FILE_SIZE = 1024 * 1024;
2053
2095
  SECRET_PATTERNS = [
2054
2096
  // AWS
2055
2097
  {
@@ -2066,7 +2108,9 @@ var init_secret_analyzer = __esm({
2066
2108
  name: "AWS Secret Key",
2067
2109
  // AWS Secret Keys are 40 chars, but we require context clues to reduce false positives
2068
2110
  // Look for assignment context: aws_secret, secret_key, AWS_SECRET, etc.
2069
- pattern: /(?:aws[_-]?secret|secret[_-]?(?:access[_-]?)?key|AWS_SECRET)[_\s]*[:=]\s*['"]?([A-Za-z0-9/+=]{40})['"]?/gi,
2111
+ // SECURITY (Issue #463): Simplified pattern to avoid nested quantifiers
2112
+ // Changed from nested optional groups to explicit alternation
2113
+ pattern: /(?:aws_secret|aws-secret|awssecret|secret_key|secret-key|secretkey|secret_access_key|AWS_SECRET)\s{0,5}[:=]\s{0,5}['"]?([A-Za-z0-9/+=]{40})['"]?/gi,
2070
2114
  severity: "critical",
2071
2115
  description: "AWS Secret Access Key detected",
2072
2116
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2075,7 +2119,8 @@ var init_secret_analyzer = __esm({
2075
2119
  {
2076
2120
  id: "github-token",
2077
2121
  name: "GitHub Token",
2078
- pattern: /gh[pousr]_[A-Za-z0-9_]{36,}/g,
2122
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2123
+ pattern: /gh[pousr]_[A-Za-z0-9_]{36,100}/g,
2079
2124
  severity: "critical",
2080
2125
  description: "GitHub Personal Access Token detected",
2081
2126
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2109,7 +2154,8 @@ var init_secret_analyzer = __esm({
2109
2154
  {
2110
2155
  id: "stripe-secret",
2111
2156
  name: "Stripe Secret Key",
2112
- pattern: /sk_live_[0-9a-zA-Z]{24,}/g,
2157
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2158
+ pattern: /sk_live_[0-9a-zA-Z]{24,100}/g,
2113
2159
  severity: "critical",
2114
2160
  description: "Stripe Live Secret Key detected",
2115
2161
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2117,7 +2163,8 @@ var init_secret_analyzer = __esm({
2117
2163
  {
2118
2164
  id: "stripe-test",
2119
2165
  name: "Stripe Test Key",
2120
- pattern: /sk_test_[0-9a-zA-Z]{24,}/g,
2166
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2167
+ pattern: /sk_test_[0-9a-zA-Z]{24,100}/g,
2121
2168
  severity: "medium",
2122
2169
  description: "Stripe Test Secret Key detected",
2123
2170
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2158,10 +2205,12 @@ var init_secret_analyzer = __esm({
2158
2205
  cweId: CWE.HARDCODED_CRYPTO_KEY
2159
2206
  },
2160
2207
  // Generic patterns
2208
+ // SECURITY (Issue #463): All generic patterns use bounded quantifiers to prevent ReDoS
2161
2209
  {
2162
2210
  id: "password-assignment",
2163
2211
  name: "Password Assignment",
2164
- pattern: /(?:password|passwd|pwd)\s*[:=]\s*['"][^'"]{8,}['"]/gi,
2212
+ // Limited whitespace to max 5 chars, limited value length to max 200 chars
2213
+ pattern: /(?:password|passwd|pwd)\s{0,5}[:=]\s{0,5}['"][^'"]{8,200}['"]/gi,
2165
2214
  severity: "high",
2166
2215
  description: "Hardcoded password detected",
2167
2216
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2169,7 +2218,8 @@ var init_secret_analyzer = __esm({
2169
2218
  {
2170
2219
  id: "api-key-assignment",
2171
2220
  name: "API Key Assignment",
2172
- pattern: /(?:api[_-]?key|apikey)\s*[:=]\s*['"][^'"]{16,}['"]/gi,
2221
+ // Explicit alternation instead of nested optional groups
2222
+ pattern: /(?:api_key|api-key|apikey)\s{0,5}[:=]\s{0,5}['"][^'"]{16,200}['"]/gi,
2173
2223
  severity: "high",
2174
2224
  description: "Hardcoded API key detected",
2175
2225
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2177,7 +2227,7 @@ var init_secret_analyzer = __esm({
2177
2227
  {
2178
2228
  id: "secret-assignment",
2179
2229
  name: "Secret Assignment",
2180
- pattern: /(?:secret|token)\s*[:=]\s*['"][^'"]{16,}['"]/gi,
2230
+ pattern: /(?:secret|token)\s{0,5}[:=]\s{0,5}['"][^'"]{16,200}['"]/gi,
2181
2231
  severity: "high",
2182
2232
  description: "Hardcoded secret/token detected",
2183
2233
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2188,7 +2238,9 @@ var init_secret_analyzer = __esm({
2188
2238
  name: "Database Connection String",
2189
2239
  // Require actual credentials (not user:password placeholder patterns)
2190
2240
  // Exclude common placeholder usernames: user, username, admin, root with simple passwords
2191
- pattern: /(?:postgresql|mysql|mongodb|redis):\/\/(?!(?:user|username|admin|root):(?:password|pass|secret)@)[^\s'"]+:[^\s'"]+@[^\s'"]+/gi,
2241
+ // SECURITY (Issue #463): Bounded character classes to prevent ReDoS
2242
+ // Limited each segment to max 200 chars
2243
+ pattern: /(?:postgresql|mysql|mongodb|redis):\/\/(?!(?:user|username|admin|root):(?:password|pass|secret)@)[^\s'"]{1,200}:[^\s'"]{1,200}@[^\s'"]{1,200}/gi,
2192
2244
  severity: "critical",
2193
2245
  description: "Database connection string with credentials detected",
2194
2246
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2208,7 +2260,8 @@ var init_secret_analyzer = __esm({
2208
2260
  name: "JWT Token",
2209
2261
  // Improved: Require minimum signature length of 20 chars to reduce false positives
2210
2262
  // from partial JWT-like strings in code
2211
- pattern: /eyJ[A-Za-z0-9_-]{10,}\.eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{20,}/g,
2263
+ // SECURITY (Issue #463): Bounded quantifiers to prevent ReDoS
2264
+ pattern: /eyJ[A-Za-z0-9_-]{10,500}\.eyJ[A-Za-z0-9_-]{10,1000}\.[A-Za-z0-9_-]{20,600}/g,
2212
2265
  severity: "low",
2213
2266
  // Changed from medium - often false positive
2214
2267
  description: "JWT token detected (verify if production token)",
@@ -2218,7 +2271,8 @@ var init_secret_analyzer = __esm({
2218
2271
  {
2219
2272
  id: "slack-token",
2220
2273
  name: "Slack Token",
2221
- pattern: /xox[baprs]-[0-9A-Za-z-]+/g,
2274
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2275
+ pattern: /xox[baprs]-[0-9A-Za-z-]{10,100}/g,
2222
2276
  severity: "high",
2223
2277
  description: "Slack token detected",
2224
2278
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2227,7 +2281,8 @@ var init_secret_analyzer = __esm({
2227
2281
  {
2228
2282
  id: "discord-token",
2229
2283
  name: "Discord Token",
2230
- pattern: /[MN][A-Za-z\d]{23,}\.[\w-]{6}\.[\w-]{27}/g,
2284
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2285
+ pattern: /[MN][A-Za-z\d]{23,100}\.[\w-]{6}\.[\w-]{27}/g,
2231
2286
  severity: "high",
2232
2287
  description: "Discord bot token detected",
2233
2288
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2286,7 +2341,8 @@ var init_secret_analyzer = __esm({
2286
2341
  id: "anthropic-api-key",
2287
2342
  name: "Anthropic API Key",
2288
2343
  // Format: sk-ant-api03-{base64-like string}
2289
- pattern: /sk-ant-api\d{2}-[A-Za-z0-9_-]{40,}/g,
2344
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2345
+ pattern: /sk-ant-api\d{2}-[A-Za-z0-9_-]{40,200}/g,
2290
2346
  severity: "critical",
2291
2347
  description: "Anthropic (Claude) API key detected",
2292
2348
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2295,7 +2351,8 @@ var init_secret_analyzer = __esm({
2295
2351
  id: "cohere-api-key",
2296
2352
  name: "Cohere API Key",
2297
2353
  // Format: varies, but often starts with identifiable prefix
2298
- pattern: /(?:cohere[_-]?(?:api[_-]?)?key|COHERE[_-]?(?:API[_-]?)?KEY)\s*[:=]\s*['"][A-Za-z0-9]{40,}['"]/g,
2354
+ // SECURITY (Issue #463): Explicit alternation instead of nested optional groups
2355
+ pattern: /(?:cohere_api_key|cohere-api-key|cohere_key|cohere-key|coherekey|COHERE_API_KEY|COHERE_KEY)\s{0,5}[:=]\s{0,5}['"][A-Za-z0-9]{40,200}['"]/g,
2299
2356
  severity: "high",
2300
2357
  description: "Cohere API key detected",
2301
2358
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2304,7 +2361,8 @@ var init_secret_analyzer = __esm({
2304
2361
  id: "mistral-api-key",
2305
2362
  name: "Mistral API Key",
2306
2363
  // Mistral keys have specific format
2307
- pattern: /(?:mistral[_-]?(?:api[_-]?)?key|MISTRAL[_-]?(?:API[_-]?)?KEY)\s*[:=]\s*['"][A-Za-z0-9]{32,}['"]/g,
2364
+ // SECURITY (Issue #463): Explicit alternation instead of nested optional groups
2365
+ pattern: /(?:mistral_api_key|mistral-api-key|mistral_key|mistral-key|mistralkey|MISTRAL_API_KEY|MISTRAL_KEY)\s{0,5}[:=]\s{0,5}['"][A-Za-z0-9]{32,200}['"]/g,
2308
2366
  severity: "high",
2309
2367
  description: "Mistral AI API key detected",
2310
2368
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2331,7 +2389,8 @@ var init_secret_analyzer = __esm({
2331
2389
  id: "huggingface-token",
2332
2390
  name: "HuggingFace Token",
2333
2391
  // Format: hf_...
2334
- pattern: /hf_[A-Za-z0-9]{34,}/g,
2392
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2393
+ pattern: /hf_[A-Za-z0-9]{34,100}/g,
2335
2394
  severity: "high",
2336
2395
  description: "HuggingFace API token detected",
2337
2396
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2340,7 +2399,8 @@ var init_secret_analyzer = __esm({
2340
2399
  id: "groq-api-key",
2341
2400
  name: "Groq API Key",
2342
2401
  // Format: gsk_...
2343
- pattern: /gsk_[A-Za-z0-9]{52,}/g,
2402
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2403
+ pattern: /gsk_[A-Za-z0-9]{52,100}/g,
2344
2404
  severity: "critical",
2345
2405
  description: "Groq API key detected",
2346
2406
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2349,7 +2409,8 @@ var init_secret_analyzer = __esm({
2349
2409
  id: "together-api-key",
2350
2410
  name: "Together AI API Key",
2351
2411
  // Context-based detection
2352
- pattern: /(?:together[_-]?(?:api[_-]?)?key|TOGETHER[_-]?(?:API[_-]?)?KEY)\s*[:=]\s*['"][A-Za-z0-9]{64,}['"]/g,
2412
+ // SECURITY (Issue #463): Explicit alternation instead of nested optional groups
2413
+ pattern: /(?:together_api_key|together-api-key|together_key|together-key|togetherkey|TOGETHER_API_KEY|TOGETHER_KEY)\s{0,5}[:=]\s{0,5}['"][A-Za-z0-9]{64,200}['"]/g,
2353
2414
  severity: "high",
2354
2415
  description: "Together AI API key detected",
2355
2416
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -2358,7 +2419,8 @@ var init_secret_analyzer = __esm({
2358
2419
  id: "fireworks-api-key",
2359
2420
  name: "Fireworks AI API Key",
2360
2421
  // Format: fw_...
2361
- pattern: /fw_[A-Za-z0-9]{40,}/g,
2422
+ // SECURITY (Issue #463): Bounded quantifier to prevent ReDoS
2423
+ pattern: /fw_[A-Za-z0-9]{40,100}/g,
2362
2424
  severity: "high",
2363
2425
  description: "Fireworks AI API key detected",
2364
2426
  cweId: CWE.HARDCODED_CREDENTIALS
@@ -4957,18 +5019,182 @@ init_esm_shims();
4957
5019
 
4958
5020
  // src/commands/ci/utils/execa-helpers.ts
4959
5021
  init_esm_shims();
4960
- function redactDatabaseUrl(arg) {
4961
- try {
4962
- if (!arg.startsWith("postgresql://") && !arg.startsWith("postgres://")) return arg;
4963
- const url = new URL(arg);
4964
- if (url.password) url.password = "***";
4965
- return url.toString();
4966
- } catch {
4967
- return arg.replace(/\/\/([^/:?#]+):([^@]+)@/g, "//${1}:***@");
5022
+
5023
+ // src/commands/ci/utils/env-security.ts
5024
+ init_esm_shims();
5025
+ var SENSITIVE_ENV_PATTERNS = [
5026
+ // Database credentials
5027
+ /^DATABASE_URL$/i,
5028
+ /^DRIZZLE_.*URL$/i,
5029
+ /^.*_DATABASE_URL$/i,
5030
+ /^PGPASSWORD$/i,
5031
+ // Supabase
5032
+ /^SUPABASE_.*KEY$/i,
5033
+ /^SUPABASE_.*TOKEN$/i,
5034
+ /^SUPABASE_.*SECRET$/i,
5035
+ /^SUPABASE_ACCESS_TOKEN$/i,
5036
+ // GitHub
5037
+ /^GITHUB_TOKEN$/i,
5038
+ /^GH_TOKEN$/i,
5039
+ /^GITHUB_PAT$/i,
5040
+ // Generic secrets
5041
+ /^.*_SECRET$/i,
5042
+ /^.*_PASSWORD$/i,
5043
+ /^.*_TOKEN$/i,
5044
+ /^.*_API_KEY$/i,
5045
+ /^.*_PRIVATE_KEY$/i,
5046
+ /^.*_CREDENTIALS$/i,
5047
+ // Encryption keys
5048
+ /^DOTENV_PRIVATE_KEY.*$/i,
5049
+ /^ENCRYPTION_KEY$/i,
5050
+ // Cloud providers
5051
+ /^AWS_SECRET_ACCESS_KEY$/i,
5052
+ /^AZURE_.*_KEY$/i,
5053
+ /^GCP_.*_KEY$/i,
5054
+ // CI-specific secrets
5055
+ /^RUNA_APP_API_TOKEN$/i,
5056
+ /^VERCEL_TOKEN$/i
5057
+ ];
5058
+ var SAFE_ENV_PASSTHROUGH = [
5059
+ // Standard shell environment
5060
+ "PATH",
5061
+ "HOME",
5062
+ "USER",
5063
+ "SHELL",
5064
+ "LANG",
5065
+ "LC_ALL",
5066
+ "LC_CTYPE",
5067
+ "TERM",
5068
+ "TMPDIR",
5069
+ "TZ",
5070
+ // Node.js
5071
+ "NODE_ENV",
5072
+ "NODE_PATH",
5073
+ "NODE_OPTIONS",
5074
+ "npm_config_registry",
5075
+ // CI environment (non-sensitive)
5076
+ "CI",
5077
+ "GITHUB_ACTIONS",
5078
+ "GITHUB_WORKFLOW",
5079
+ "GITHUB_RUN_ID",
5080
+ "GITHUB_RUN_NUMBER",
5081
+ "GITHUB_JOB",
5082
+ "GITHUB_ACTION",
5083
+ "GITHUB_EVENT_NAME",
5084
+ "GITHUB_EVENT_PATH",
5085
+ "GITHUB_WORKSPACE",
5086
+ "GITHUB_SHA",
5087
+ "GITHUB_REF",
5088
+ "GITHUB_REF_NAME",
5089
+ "GITHUB_HEAD_REF",
5090
+ "GITHUB_BASE_REF",
5091
+ "GITHUB_ACTOR",
5092
+ "GITHUB_REPOSITORY",
5093
+ "GITHUB_REPOSITORY_OWNER",
5094
+ "GITHUB_OUTPUT",
5095
+ "GITHUB_STEP_SUMMARY",
5096
+ "GITHUB_ENV",
5097
+ "RUNNER_OS",
5098
+ "RUNNER_ARCH",
5099
+ "RUNNER_NAME",
5100
+ "RUNNER_TOOL_CACHE",
5101
+ "RUNNER_TEMP",
5102
+ // Vercel (non-sensitive)
5103
+ "VERCEL",
5104
+ "VERCEL_ENV",
5105
+ "VERCEL_URL",
5106
+ "VERCEL_GIT_COMMIT_SHA",
5107
+ "VERCEL_GIT_COMMIT_REF",
5108
+ // Supabase (non-sensitive)
5109
+ "NEXT_PUBLIC_SUPABASE_URL",
5110
+ // runa debug/config (non-sensitive)
5111
+ "RUNA_DEBUG",
5112
+ "RUNA_SKIP_SCHEMA_RISK",
5113
+ // dotenvx (non-sensitive)
5114
+ "DOTENVX_QUIET"
5115
+ ];
5116
+ function isSensitiveEnvVar(name) {
5117
+ return SENSITIVE_ENV_PATTERNS.some((pattern) => pattern.test(name));
5118
+ }
5119
+ function getSafeEnv(additionalSafe = [], requiredSecrets = []) {
5120
+ const result = {};
5121
+ const allowedNames = /* @__PURE__ */ new Set([...SAFE_ENV_PASSTHROUGH, ...additionalSafe]);
5122
+ for (const name of allowedNames) {
5123
+ if (process.env[name] !== void 0) {
5124
+ result[name] = process.env[name];
5125
+ }
5126
+ }
5127
+ for (const name of requiredSecrets) {
5128
+ if (process.env[name] !== void 0) {
5129
+ result[name] = process.env[name];
5130
+ }
5131
+ }
5132
+ return result;
5133
+ }
5134
+ function getFilteredEnv() {
5135
+ const result = {};
5136
+ for (const [name, value] of Object.entries(process.env)) {
5137
+ if (!isSensitiveEnvVar(name)) {
5138
+ result[name] = value;
5139
+ }
4968
5140
  }
5141
+ return result;
4969
5142
  }
5143
+ var SECRET_VALUE_PATTERNS = [
5144
+ // PostgreSQL URLs with passwords
5145
+ {
5146
+ pattern: /postgresql:\/\/([^/:@]+):([^@]+)@/gi,
5147
+ replacement: "postgresql://$1:***@"
5148
+ },
5149
+ {
5150
+ pattern: /postgres:\/\/([^/:@]+):([^@]+)@/gi,
5151
+ replacement: "postgres://$1:***@"
5152
+ },
5153
+ // Generic URL with credentials
5154
+ {
5155
+ pattern: /https?:\/\/([^/:@]+):([^@]+)@/gi,
5156
+ replacement: "https://$1:***@"
5157
+ },
5158
+ // Supabase keys (long base64-like strings)
5159
+ {
5160
+ pattern: /eyJ[A-Za-z0-9_-]{100,}/g,
5161
+ replacement: "[SUPABASE_KEY_REDACTED]"
5162
+ },
5163
+ // GitHub tokens
5164
+ {
5165
+ pattern: /gh[ps]_[A-Za-z0-9_]{36,}/g,
5166
+ replacement: "[GITHUB_TOKEN_REDACTED]"
5167
+ },
5168
+ {
5169
+ pattern: /github_pat_[A-Za-z0-9_]{36,}/g,
5170
+ replacement: "[GITHUB_PAT_REDACTED]"
5171
+ },
5172
+ // Generic API keys (32+ char alphanumeric)
5173
+ {
5174
+ pattern: /[A-Za-z0-9_-]{40,}/g,
5175
+ replacement: (match) => {
5176
+ if (/[a-z]/.test(match) && /[A-Z0-9_]/.test(match)) {
5177
+ return `[KEY_REDACTED:${match.length}chars]`;
5178
+ }
5179
+ return match;
5180
+ }
5181
+ }
5182
+ ];
5183
+ function redactSecrets(input3) {
5184
+ let result = input3;
5185
+ for (const { pattern, replacement } of SECRET_VALUE_PATTERNS) {
5186
+ if (typeof replacement === "function") {
5187
+ result = result.replace(pattern, replacement);
5188
+ } else {
5189
+ result = result.replace(pattern, replacement);
5190
+ }
5191
+ }
5192
+ return result;
5193
+ }
5194
+
5195
+ // src/commands/ci/utils/execa-helpers.ts
4970
5196
  function redactArgsForLog(args) {
4971
- return args.map((a) => redactDatabaseUrl(a));
5197
+ return args.map((a) => redactSecrets(a));
4972
5198
  }
4973
5199
  function pipeToSharedLog(params) {
4974
5200
  let pending = 0;
@@ -5004,7 +5230,8 @@ function runLogged(params) {
5004
5230
  const proc = execa(params.command, params.args, {
5005
5231
  cwd: params.cwd,
5006
5232
  env: { DOTENVX_QUIET: "1", ...params.env },
5007
- stdio: ["ignore", "pipe", "pipe"]
5233
+ stdio: ["ignore", "pipe", "pipe"],
5234
+ shell: false
5008
5235
  });
5009
5236
  const stream = createWriteStream(params.logFile, { flags: "a" });
5010
5237
  if (proc.stdout) proc.stdout.pipe(process.stdout, { end: false });
@@ -5078,8 +5305,35 @@ var TRUSTED_BINARIES = [
5078
5305
  "lsof",
5079
5306
  "runa"
5080
5307
  ];
5308
+ var TRUSTED_DIRECTORIES_UNIX = [
5309
+ "/usr/bin",
5310
+ "/usr/local/bin",
5311
+ "/bin",
5312
+ "/usr/sbin",
5313
+ "/sbin",
5314
+ // Homebrew on macOS
5315
+ "/opt/homebrew/bin",
5316
+ "/opt/homebrew/sbin",
5317
+ // Node.js version managers
5318
+ "/usr/local/lib/node_modules/.bin",
5319
+ // Common package manager locations
5320
+ "/usr/local/share/npm/bin"
5321
+ ];
5322
+ var TRUSTED_DIRECTORY_PATTERNS_WINDOWS = [
5323
+ /^[A-Z]:\\Windows\\System32\\?$/i,
5324
+ /^[A-Z]:\\Windows\\?$/i,
5325
+ /^[A-Z]:\\Program Files\\.*\\bin\\?$/i,
5326
+ /^[A-Z]:\\Program Files \(x86\)\\.*\\bin\\?$/i,
5327
+ // Node.js and npm
5328
+ /^[A-Z]:\\Program Files\\nodejs\\?$/i,
5329
+ /^[A-Z]:\\Users\\[^\\]+\\AppData\\Roaming\\npm\\?$/i
5330
+ ];
5081
5331
  var CACHE_TTL_MS = 5 * 60 * 1e3;
5082
5332
  var binaryPathCache = /* @__PURE__ */ new Map();
5333
+ function getPathFingerprint() {
5334
+ const pathEnv = process.env.PATH || "";
5335
+ return `${pathEnv.length}:${pathEnv.slice(0, 100)}:${pathEnv.slice(-100)}`;
5336
+ }
5083
5337
  function isExecutable(filePath) {
5084
5338
  try {
5085
5339
  accessSync(filePath, constants.X_OK);
@@ -5089,12 +5343,37 @@ function isExecutable(filePath) {
5089
5343
  return false;
5090
5344
  }
5091
5345
  }
5346
+ function isTrustedDirectory(dir) {
5347
+ const normalizedDir = normalize(dir).replace(/[/\\]$/, "");
5348
+ if (process.platform === "win32") {
5349
+ return TRUSTED_DIRECTORY_PATTERNS_WINDOWS.some((pattern) => pattern.test(normalizedDir));
5350
+ }
5351
+ for (const trusted of TRUSTED_DIRECTORIES_UNIX) {
5352
+ const normalizedTrusted = normalize(trusted).replace(/[/\\]$/, "");
5353
+ if (normalizedDir === normalizedTrusted || normalizedDir.startsWith(normalizedTrusted + "/")) {
5354
+ return true;
5355
+ }
5356
+ }
5357
+ if (normalizedDir.match(/^\/opt\/homebrew\/Cellar\/[^/]+\/[^/]+\/bin$/)) {
5358
+ return true;
5359
+ }
5360
+ if (normalizedDir.match(/^\/Users\/[^/]+\/\.nvm\/versions\/node\/[^/]+\/bin$/) || normalizedDir.match(/^\/Users\/[^/]+\/\.fnm\/node-versions\/[^/]+\/installation\/bin$/) || normalizedDir.match(/^\/home\/[^/]+\/\.nvm\/versions\/node\/[^/]+\/bin$/) || normalizedDir.match(/^\/home\/[^/]+\/\.fnm\/node-versions\/[^/]+\/installation\/bin$/)) {
5361
+ return true;
5362
+ }
5363
+ if (normalizedDir.match(/^\/Users\/[^/]+\/Library\/pnpm$/) || normalizedDir.match(/^\/home\/[^/]+\/\.local\/share\/pnpm$/)) {
5364
+ return true;
5365
+ }
5366
+ return false;
5367
+ }
5092
5368
  function searchPathForBinary(binaryName) {
5093
5369
  const pathEnv = process.env.PATH || "";
5094
5370
  const pathDirs = pathEnv.split(process.platform === "win32" ? ";" : ":");
5095
5371
  const extensions = process.platform === "win32" ? ["", ".exe", ".cmd", ".bat"] : [""];
5096
5372
  for (const dir of pathDirs) {
5097
5373
  if (!dir || !isAbsolute(dir)) continue;
5374
+ if (!isTrustedDirectory(dir)) {
5375
+ continue;
5376
+ }
5098
5377
  for (const ext of extensions) {
5099
5378
  const fullPath = join(dir, binaryName + ext);
5100
5379
  if (isExecutable(fullPath)) {
@@ -5110,15 +5389,25 @@ function resolveBinaryPath(binaryName) {
5110
5389
  `Security: Binary '${binaryName}' is not in the trusted binaries list. Allowed: ${TRUSTED_BINARIES.join(", ")}`
5111
5390
  );
5112
5391
  }
5113
- const cached = binaryPathCache.get(binaryName);
5114
5392
  const now = Date.now();
5115
- if (cached && now - cached.resolvedAt < CACHE_TTL_MS) {
5116
- return cached.path;
5393
+ const currentPathFingerprint = getPathFingerprint();
5394
+ const cached = binaryPathCache.get(binaryName);
5395
+ if (cached) {
5396
+ const isExpired = now - cached.resolvedAt >= CACHE_TTL_MS;
5397
+ const pathChanged = cached.pathFingerprint !== currentPathFingerprint;
5398
+ if (!isExpired && !pathChanged) {
5399
+ if (isExecutable(cached.path)) {
5400
+ return cached.path;
5401
+ }
5402
+ binaryPathCache.delete(binaryName);
5403
+ } else {
5404
+ binaryPathCache.delete(binaryName);
5405
+ }
5117
5406
  }
5118
5407
  const resolvedPath = searchPathForBinary(binaryName);
5119
5408
  if (!resolvedPath) {
5120
5409
  throw new Error(
5121
- `Binary '${binaryName}' not found in PATH. Ensure ${binaryName} is installed and available in your PATH.`
5410
+ `Binary '${binaryName}' not found in trusted PATH directories. Ensure ${binaryName} is installed in a system directory.`
5122
5411
  );
5123
5412
  }
5124
5413
  if (!isExecutable(resolvedPath)) {
@@ -5126,13 +5415,14 @@ function resolveBinaryPath(binaryName) {
5126
5415
  }
5127
5416
  binaryPathCache.set(binaryName, {
5128
5417
  path: resolvedPath,
5129
- resolvedAt: now
5418
+ resolvedAt: now,
5419
+ pathFingerprint: currentPathFingerprint
5130
5420
  });
5131
5421
  return resolvedPath;
5132
5422
  }
5133
5423
  function secureExeca(binaryName, args = [], options) {
5134
5424
  const resolvedPath = resolveBinaryPath(binaryName);
5135
- return execa(resolvedPath, args, options);
5425
+ return execa(resolvedPath, args, { ...options, shell: false });
5136
5426
  }
5137
5427
  function securePnpm(args = [], options) {
5138
5428
  return secureExeca("pnpm", args, options);
@@ -6824,15 +7114,15 @@ function printSummary(logger15, output3) {
6824
7114
  }
6825
7115
  }
6826
7116
  function findRepoRoot(startDir) {
6827
- const { existsSync: existsSync48, readFileSync: readFileSync27 } = __require("fs");
7117
+ const { existsSync: existsSync49, readFileSync: readFileSync27 } = __require("fs");
6828
7118
  const { join: join22, dirname: dirname4 } = __require("path");
6829
7119
  let current = startDir;
6830
7120
  while (current !== dirname4(current)) {
6831
- if (existsSync48(join22(current, "turbo.json"))) {
7121
+ if (existsSync49(join22(current, "turbo.json"))) {
6832
7122
  return current;
6833
7123
  }
6834
7124
  const pkgPath = join22(current, "package.json");
6835
- if (existsSync48(pkgPath)) {
7125
+ if (existsSync49(pkgPath)) {
6836
7126
  try {
6837
7127
  const pkg = JSON.parse(readFileSync27(pkgPath, "utf-8"));
6838
7128
  if (pkg.workspaces) {
@@ -6847,7 +7137,7 @@ function findRepoRoot(startDir) {
6847
7137
  }
6848
7138
  async function runBuildMachine(input3, logger15) {
6849
7139
  const repoRoot = input3.targetDir ?? findRepoRoot(process.cwd());
6850
- return new Promise((resolve11, reject) => {
7140
+ return new Promise((resolve12, reject) => {
6851
7141
  const actor = createActor(buildMachine, {
6852
7142
  input: { input: input3, repoRoot }
6853
7143
  });
@@ -6861,7 +7151,7 @@ async function runBuildMachine(input3, logger15) {
6861
7151
  if (isComplete(snapshot2)) {
6862
7152
  const output3 = snapshot2.output;
6863
7153
  if (output3) {
6864
- resolve11(output3);
7154
+ resolve12(output3);
6865
7155
  }
6866
7156
  }
6867
7157
  });
@@ -7877,7 +8167,7 @@ function findRepoRoot2(startDir) {
7877
8167
  }
7878
8168
  async function runDevMachine(input3, logger15) {
7879
8169
  const repoRoot = input3.targetDir ?? findRepoRoot2(process.cwd());
7880
- return new Promise((resolve11, reject) => {
8170
+ return new Promise((resolve12, reject) => {
7881
8171
  const actor = createActor(devMachine, {
7882
8172
  input: { input: input3, repoRoot }
7883
8173
  });
@@ -7891,7 +8181,7 @@ async function runDevMachine(input3, logger15) {
7891
8181
  if (isComplete2(snapshot2)) {
7892
8182
  const output3 = snapshot2.output;
7893
8183
  if (output3) {
7894
- resolve11(output3);
8184
+ resolve12(output3);
7895
8185
  }
7896
8186
  }
7897
8187
  });
@@ -7952,6 +8242,88 @@ init_esm_shims();
7952
8242
 
7953
8243
  // src/config/env-files.ts
7954
8244
  init_esm_shims();
8245
+
8246
+ // src/utils/path-security.ts
8247
+ init_esm_shims();
8248
+ var SHELL_METACHARACTERS = /[|;`$&<>]/;
8249
+ function isControlChar(charCode) {
8250
+ return charCode <= 31 || charCode === 127;
8251
+ }
8252
+ function hasNoDangerousChars(userPath) {
8253
+ if (SHELL_METACHARACTERS.test(userPath)) {
8254
+ return false;
8255
+ }
8256
+ for (let i = 0; i < userPath.length; i++) {
8257
+ if (isControlChar(userPath.charCodeAt(i))) {
8258
+ return false;
8259
+ }
8260
+ }
8261
+ return true;
8262
+ }
8263
+ function containsPathTraversal(userPath) {
8264
+ const normalized = normalize(userPath);
8265
+ if (normalized.includes("..")) {
8266
+ return true;
8267
+ }
8268
+ if (normalized.startsWith("/") || normalized.startsWith("\\")) {
8269
+ return true;
8270
+ }
8271
+ return false;
8272
+ }
8273
+ function validateSafePath(userPath, baseDir) {
8274
+ if (!userPath || userPath.trim() === "") {
8275
+ return false;
8276
+ }
8277
+ if (!hasNoDangerousChars(userPath)) {
8278
+ return false;
8279
+ }
8280
+ if (containsPathTraversal(userPath)) {
8281
+ return false;
8282
+ }
8283
+ const absoluteBase = resolve(baseDir);
8284
+ const absolutePath = resolve(baseDir, userPath);
8285
+ const relativePath = relative(absoluteBase, absolutePath);
8286
+ if (relativePath.startsWith("..") || isAbsolute(relativePath)) {
8287
+ return false;
8288
+ }
8289
+ return true;
8290
+ }
8291
+ function resolveSafePath(baseDir, userPath) {
8292
+ if (!validateSafePath(userPath, baseDir)) {
8293
+ throw new Error(
8294
+ `Invalid path: '${userPath}' is not allowed. Path must be relative and within the base directory.`
8295
+ );
8296
+ }
8297
+ return resolve(baseDir, userPath);
8298
+ }
8299
+ function filterSafePaths(paths, baseDir, logger15) {
8300
+ return paths.filter((p) => {
8301
+ const safe = validateSafePath(p, baseDir);
8302
+ return safe;
8303
+ });
8304
+ }
8305
+ var MAX_DIRECTORY_TRAVERSAL_DEPTH = 10;
8306
+ var SAFE_ENV_SUFFIX_PATTERN = /^[a-zA-Z0-9._-]+$/;
8307
+ function validateEnvSuffix(suffix) {
8308
+ if (!suffix || suffix.trim() === "") {
8309
+ throw new Error("Environment suffix cannot be empty");
8310
+ }
8311
+ if (suffix.includes("..") || suffix.includes("/") || suffix.includes("\\")) {
8312
+ throw new Error(`Environment suffix '${suffix}' contains path traversal sequences`);
8313
+ }
8314
+ if (!SAFE_ENV_SUFFIX_PATTERN.test(suffix)) {
8315
+ throw new Error(
8316
+ `Environment suffix '${suffix}' contains invalid characters. Only alphanumeric, dots, hyphens, and underscores are allowed.`
8317
+ );
8318
+ }
8319
+ }
8320
+ function isPathContained(basePath, targetPath) {
8321
+ const normalizedBase = resolve(basePath);
8322
+ const normalizedTarget = resolve(targetPath);
8323
+ return normalizedTarget === normalizedBase || normalizedTarget.startsWith(normalizedBase + sep);
8324
+ }
8325
+
8326
+ // src/config/env-files.ts
7955
8327
  init_workspace_detector();
7956
8328
  process.env.DOTENVX_QUIET = "true";
7957
8329
  function isSupportedNodeEnv(value) {
@@ -7963,13 +8335,15 @@ function findProjectRoot(from) {
7963
8335
  if (workspaceRoot) return workspaceRoot;
7964
8336
  let current = path10__default.resolve(from);
7965
8337
  const root = path10__default.parse(current).root;
7966
- while (current !== root) {
8338
+ let depth = 0;
8339
+ while (current !== root && depth < MAX_DIRECTORY_TRAVERSAL_DEPTH) {
7967
8340
  const hasGit = existsSync(path10__default.join(current, ".git"));
7968
8341
  const hasPackageJson2 = existsSync(path10__default.join(current, "package.json"));
7969
8342
  if (hasGit || hasPackageJson2) return current;
7970
8343
  const parent = path10__default.dirname(current);
7971
8344
  if (parent === current) break;
7972
8345
  current = parent;
8346
+ depth++;
7973
8347
  }
7974
8348
  return path10__default.resolve(from);
7975
8349
  }
@@ -8080,6 +8454,8 @@ function resolveNodeEnv(requested) {
8080
8454
  return "development";
8081
8455
  }
8082
8456
  function buildBaseEnvFilePaths(params) {
8457
+ validateEnvSuffix(params.nodeEnv);
8458
+ validateEnvSuffix(params.vercelEnv);
8083
8459
  const baseFiles = [
8084
8460
  ".env",
8085
8461
  // Vercel-aligned: .env.{environment} (3 files = 3 keys)
@@ -8093,6 +8469,7 @@ function buildBaseEnvFilePaths(params) {
8093
8469
  return baseFiles;
8094
8470
  }
8095
8471
  function buildRunaEnvFilePaths(runaEnv) {
8472
+ validateEnvSuffix(runaEnv);
8096
8473
  const runaFiles = [`.env.${runaEnv}`, `.env.${runaEnv}.local`];
8097
8474
  if (runaEnv === "main" || runaEnv === "production") {
8098
8475
  runaFiles.push(".env.production", ".env.production.local");
@@ -8103,8 +8480,12 @@ function loadEnvFilesIntoTarget(params) {
8103
8480
  const loadedFiles = [];
8104
8481
  for (const relPath of params.relPaths) {
8105
8482
  const fullPath = path10__default.join(params.projectRoot, relPath);
8483
+ const resolvedPath = path10__default.resolve(fullPath);
8484
+ if (!isPathContained(params.projectRoot, resolvedPath)) {
8485
+ continue;
8486
+ }
8106
8487
  const loaded = loadEnvFileIntoTarget({
8107
- filePath: fullPath,
8488
+ filePath: resolvedPath,
8108
8489
  targetEnv: params.targetEnv,
8109
8490
  protectedKeys: params.protectedKeys
8110
8491
  });
@@ -8337,7 +8718,7 @@ z.object({
8337
8718
  // src/commands/ci/commands/ci-checks.ts
8338
8719
  async function runTool(params) {
8339
8720
  const startTime = Date.now();
8340
- return new Promise((resolve11) => {
8721
+ return new Promise((resolve12) => {
8341
8722
  const proc = spawn(params.command, params.args, {
8342
8723
  cwd: process.cwd(),
8343
8724
  stdio: ["ignore", "pipe", "pipe"],
@@ -8360,7 +8741,7 @@ async function runTool(params) {
8360
8741
  const issueCount = params.parseIssueCount(output3);
8361
8742
  await writeFile(params.outputPath, output3, "utf-8").catch(() => {
8362
8743
  });
8363
- resolve11({
8744
+ resolve12({
8364
8745
  name: params.name,
8365
8746
  label: params.label,
8366
8747
  status: exitCode === 0 ? "passed" : "failed",
@@ -8379,7 +8760,7 @@ async function runTool(params) {
8379
8760
  "utf-8"
8380
8761
  ).catch(() => {
8381
8762
  });
8382
- resolve11({
8763
+ resolve12({
8383
8764
  name: params.name,
8384
8765
  label: params.label,
8385
8766
  status: "skipped",
@@ -8392,10 +8773,10 @@ async function runTool(params) {
8392
8773
  });
8393
8774
  }
8394
8775
  async function checkToolExists(command) {
8395
- return new Promise((resolve11) => {
8776
+ return new Promise((resolve12) => {
8396
8777
  const proc = spawn("which", [command], { shell: true });
8397
- proc.on("close", (code) => resolve11(code === 0));
8398
- proc.on("error", () => resolve11(false));
8778
+ proc.on("close", (code) => resolve12(code === 0));
8779
+ proc.on("error", () => resolve12(false));
8399
8780
  });
8400
8781
  }
8401
8782
  async function executeWorkflowCheck(params) {
@@ -8905,7 +9286,7 @@ async function detectStack(repoRoot, tmpDir, productionDbUrlAdmin) {
8905
9286
  const res = await runLogged({
8906
9287
  cwd: repoRoot,
8907
9288
  env: {
8908
- ...process.env,
9289
+ ...getFilteredEnv(),
8909
9290
  GH_DATABASE_URL_ADMIN: productionDbUrlAdmin
8910
9291
  },
8911
9292
  label: "detect-stack",
@@ -8936,15 +9317,20 @@ function checkIfInitialDeployment(repoRoot, productionDbUrl) {
8936
9317
  const result = spawnSync("psql", [productionDbUrl, "-t", "-A", "-c", query], {
8937
9318
  encoding: "utf-8",
8938
9319
  stdio: ["pipe", "pipe", "pipe"],
8939
- timeout: 3e4
9320
+ timeout: 3e4,
8940
9321
  // 30 second timeout
9322
+ env: getSafeEnv(),
9323
+ // Only pass safe env vars
9324
+ shell: false
9325
+ // Prevent shell injection
8941
9326
  });
8942
9327
  if (result.status !== 0) {
9328
+ const safeError = redactSecrets(result.stderr?.slice(0, 200) || "unknown error");
8943
9329
  return {
8944
9330
  isInitialDeployment: false,
8945
9331
  tableCount: -1,
8946
9332
  schemasChecked: schemasToCheck,
8947
- reason: `DB connection failed: ${result.stderr?.slice(0, 200) || "unknown error"}`,
9333
+ reason: `DB connection failed: ${safeError}`,
8948
9334
  checkSuccessful: false
8949
9335
  };
8950
9336
  }
@@ -8978,11 +9364,12 @@ function checkIfInitialDeployment(repoRoot, productionDbUrl) {
8978
9364
  }
8979
9365
  async function showSchemaDiff(repoRoot, tmpDir) {
8980
9366
  const diffLog = path10__default.join(tmpDir, "schema-diff.log");
9367
+ const safeEnv = getSafeEnv();
8981
9368
  const declarativeSqlPath = path10__default.join(repoRoot, "supabase/schemas/declarative");
8982
9369
  if (existsSync(declarativeSqlPath)) {
8983
9370
  await runLogged({
8984
9371
  cwd: repoRoot,
8985
- env: process.env,
9372
+ env: safeEnv,
8986
9373
  label: "git diff schema (supabase/schemas/declarative)",
8987
9374
  command: "git",
8988
9375
  args: ["diff", "origin/main", "HEAD", "--", "supabase/schemas/declarative/"],
@@ -8996,7 +9383,7 @@ async function showSchemaDiff(repoRoot, tmpDir) {
8996
9383
  if (existsSync(path10__default.join(repoRoot, schemaPath))) {
8997
9384
  await runLogged({
8998
9385
  cwd: repoRoot,
8999
- env: process.env,
9386
+ env: safeEnv,
9000
9387
  label: `git diff schema (${detected})`,
9001
9388
  command: "git",
9002
9389
  args: ["diff", "origin/main", "HEAD", "--", `${schemaPath}/`],
@@ -9010,7 +9397,7 @@ async function showSchemaDiff(repoRoot, tmpDir) {
9010
9397
  if (existsSync(path10__default.join(repoRoot, candidatePath))) {
9011
9398
  await runLogged({
9012
9399
  cwd: repoRoot,
9013
- env: process.env,
9400
+ env: safeEnv,
9014
9401
  label: `git diff schema (packages/${candidate})`,
9015
9402
  command: "git",
9016
9403
  args: ["diff", "origin/main", "HEAD", "--", `${candidatePath}/`],
@@ -9022,7 +9409,7 @@ async function showSchemaDiff(repoRoot, tmpDir) {
9022
9409
  if (existsSync(path10__default.join(repoRoot, "src", "schema"))) {
9023
9410
  await runLogged({
9024
9411
  cwd: repoRoot,
9025
- env: process.env,
9412
+ env: safeEnv,
9026
9413
  label: "git diff schema (src/schema)",
9027
9414
  command: "git",
9028
9415
  args: ["diff", "origin/main", "HEAD", "--", "src/schema/"],
@@ -9033,10 +9420,10 @@ async function showSchemaDiff(repoRoot, tmpDir) {
9033
9420
  async function detectRisks(repoRoot, tmpDir) {
9034
9421
  const logFile = path10__default.join(tmpDir, "db-risks.log");
9035
9422
  try {
9423
+ const env2 = getFilteredEnv();
9036
9424
  await runLogged({
9037
9425
  cwd: repoRoot,
9038
- env: process.env,
9039
- // RUNA_SKIP_SCHEMA_RISK passed through if set
9426
+ env: env2,
9040
9427
  label: "db:risks",
9041
9428
  command: "pnpm",
9042
9429
  args: ["exec", "runa", "db", "risks"],
@@ -9071,7 +9458,7 @@ async function snapshotCreate(repoRoot, tmpDir, productionDbUrlAdmin, commit) {
9071
9458
  await runLogged({
9072
9459
  cwd: repoRoot,
9073
9460
  env: {
9074
- ...process.env,
9461
+ ...getFilteredEnv(),
9075
9462
  GH_DATABASE_URL_ADMIN: productionDbUrlAdmin
9076
9463
  },
9077
9464
  label: "snapshot create production",
@@ -9084,7 +9471,7 @@ async function snapshotRestoreLatest(repoRoot, tmpDir, productionDbUrlAdmin) {
9084
9471
  await runLogged({
9085
9472
  cwd: repoRoot,
9086
9473
  env: {
9087
- ...process.env,
9474
+ ...getFilteredEnv(),
9088
9475
  GH_DATABASE_URL_ADMIN: productionDbUrlAdmin
9089
9476
  },
9090
9477
  label: "snapshot restore production (latest)",
@@ -9159,7 +9546,7 @@ async function applyProductionSchema(repoRoot, tmpDir, productionDbUrlAdmin, pro
9159
9546
  await runLogged({
9160
9547
  cwd: repoRoot,
9161
9548
  env: {
9162
- ...process.env,
9549
+ ...getFilteredEnv(),
9163
9550
  // PRD Naming Convention:
9164
9551
  // - GH_DATABASE_URL_ADMIN = postgres role (DDL capable, for pg-schema-diff)
9165
9552
  // - GH_DATABASE_URL = drizzle_app role (app runtime)
@@ -9192,9 +9579,10 @@ async function applyProductionSchema(repoRoot, tmpDir, productionDbUrlAdmin, pro
9192
9579
  };
9193
9580
  }
9194
9581
  async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
9582
+ const safeEnv = getSafeEnv();
9195
9583
  const msg = await runLogged({
9196
9584
  cwd: repoRoot,
9197
- env: process.env,
9585
+ env: safeEnv,
9198
9586
  label: "git log (commit message)",
9199
9587
  command: "git",
9200
9588
  args: ["log", "-1", "--pretty=format:%s"],
@@ -9218,7 +9606,7 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
9218
9606
  }
9219
9607
  const diff = await runLogged({
9220
9608
  cwd: repoRoot,
9221
- env: process.env,
9609
+ env: safeEnv,
9222
9610
  label: "git diff (schema)",
9223
9611
  command: "git",
9224
9612
  args: ["diff", "origin/main", "HEAD", "--", ...schemaPaths],
@@ -9229,7 +9617,7 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
9229
9617
  await runLogged({
9230
9618
  cwd: repoRoot,
9231
9619
  env: {
9232
- ...process.env,
9620
+ ...getFilteredEnv(),
9233
9621
  GH_DATABASE_URL_ADMIN: productionDbUrlAdmin
9234
9622
  },
9235
9623
  label: "db audit record",
@@ -9257,7 +9645,7 @@ async function auditRecord(repoRoot, tmpDir, productionDbUrlAdmin, params) {
9257
9645
  async function notifyDeployment(repoRoot, tmpDir, params) {
9258
9646
  await runLogged({
9259
9647
  cwd: repoRoot,
9260
- env: process.env,
9648
+ env: getFilteredEnv(),
9261
9649
  label: "workflow notify deployment",
9262
9650
  command: "pnpm",
9263
9651
  args: [
@@ -9676,6 +10064,207 @@ function parseIntOr(value, fallback) {
9676
10064
  return Number.isNaN(n) ? fallback : n;
9677
10065
  }
9678
10066
 
10067
+ // src/commands/ci/utils/workflow-idempotency.ts
10068
+ init_esm_shims();
10069
+
10070
+ // src/commands/db/utils/psql.ts
10071
+ init_esm_shims();
10072
+ function parsePostgresUrl(url) {
10073
+ const u = new URL(url);
10074
+ const host = u.hostname;
10075
+ const port = u.port || "5432";
10076
+ const user = u.username || "postgres";
10077
+ const password = u.password || void 0;
10078
+ const database = u.pathname.replace(/^\//, "") || "postgres";
10079
+ return { host, port, user, database, password };
10080
+ }
10081
+ function buildPsqlArgs(conn, options) {
10082
+ const args = ["-X", "-h", conn.host, "-p", conn.port, "-U", conn.user, "-d", conn.database];
10083
+ if (options?.onErrorStop !== false) {
10084
+ args.splice(1, 0, "-v", "ON_ERROR_STOP=1");
10085
+ }
10086
+ return args;
10087
+ }
10088
+ function buildPsqlEnv(conn) {
10089
+ if (!conn.password) return { ...process.env };
10090
+ return { ...process.env, PGPASSWORD: conn.password };
10091
+ }
10092
+ function psqlSyncQuery(params) {
10093
+ const conn = parsePostgresUrl(params.databaseUrl);
10094
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
10095
+ const result = spawnSync("psql", [...args, "-t", "-c", params.sql], {
10096
+ encoding: "utf-8",
10097
+ stdio: ["pipe", "pipe", "pipe"],
10098
+ env: buildPsqlEnv(conn),
10099
+ timeout: params.timeout ?? 1e4
10100
+ });
10101
+ return {
10102
+ status: result.status,
10103
+ stdout: result.stdout || "",
10104
+ stderr: result.stderr || ""
10105
+ };
10106
+ }
10107
+ function psqlSyncFile(params) {
10108
+ const conn = parsePostgresUrl(params.databaseUrl);
10109
+ const args = buildPsqlArgs(conn, { onErrorStop: params.onErrorStop ?? true });
10110
+ const result = spawnSync("psql", [...args, "-f", params.filePath], {
10111
+ encoding: "utf-8",
10112
+ stdio: ["pipe", "pipe", "pipe"],
10113
+ env: buildPsqlEnv(conn)
10114
+ });
10115
+ return {
10116
+ status: result.status,
10117
+ stdout: result.stdout || "",
10118
+ stderr: result.stderr || ""
10119
+ };
10120
+ }
10121
+ async function psqlQuery(params) {
10122
+ const conn = parsePostgresUrl(params.databaseUrl);
10123
+ const args = buildPsqlArgs(conn);
10124
+ if (params.mode === "scalar") {
10125
+ const { stdout: stdout2 } = await execa("psql", [...args, "-A", "-t", "-c", params.sql], {
10126
+ env: buildPsqlEnv(conn),
10127
+ stdio: ["ignore", "pipe", "pipe"]
10128
+ });
10129
+ return stdout2.trim();
10130
+ }
10131
+ const { stdout } = await execa("psql", [...args, "-A", "-t", "-c", params.sql], {
10132
+ env: buildPsqlEnv(conn),
10133
+ stdio: ["ignore", "pipe", "pipe"]
10134
+ });
10135
+ return stdout.trim();
10136
+ }
10137
+ async function psqlExec(params) {
10138
+ const conn = parsePostgresUrl(params.databaseUrl);
10139
+ const args = buildPsqlArgs(conn);
10140
+ await execa("psql", [...args, "-c", params.sql], {
10141
+ env: buildPsqlEnv(conn),
10142
+ stdio: "inherit"
10143
+ });
10144
+ }
10145
+
10146
+ // src/commands/ci/utils/workflow-idempotency.ts
10147
+ var PROD_DEPLOY_LOCK_ID = 88889;
10148
+ function acquireWorkflowLock(dbUrl, lockId) {
10149
+ try {
10150
+ const conn = parsePostgresUrl(dbUrl);
10151
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
10152
+ const result = spawnSync(
10153
+ "psql",
10154
+ [...args, "-t", "-c", `SELECT pg_try_advisory_lock(${lockId})`],
10155
+ {
10156
+ encoding: "utf-8",
10157
+ stdio: ["pipe", "pipe", "pipe"],
10158
+ env: buildPsqlEnv(conn)
10159
+ }
10160
+ );
10161
+ if (result.error) {
10162
+ return { acquired: false, error: result.error.message };
10163
+ }
10164
+ const acquired = result.stdout?.trim() === "t";
10165
+ return { acquired };
10166
+ } catch (error) {
10167
+ return {
10168
+ acquired: false,
10169
+ error: error instanceof Error ? error.message : "Unknown error"
10170
+ };
10171
+ }
10172
+ }
10173
+ function releaseWorkflowLock(dbUrl, lockId) {
10174
+ try {
10175
+ const conn = parsePostgresUrl(dbUrl);
10176
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
10177
+ spawnSync("psql", [...args, "-t", "-c", `SELECT pg_advisory_unlock(${lockId})`], {
10178
+ encoding: "utf-8",
10179
+ stdio: ["pipe", "pipe", "pipe"],
10180
+ env: buildPsqlEnv(conn)
10181
+ });
10182
+ } catch {
10183
+ }
10184
+ }
10185
+ function checkDeploymentIdempotency(dbUrl, commit) {
10186
+ const auditTable = process.env.RUNA_DB_AUDIT_TABLE;
10187
+ if (!auditTable) {
10188
+ return { alreadyDeployed: false, checkSuccessful: true };
10189
+ }
10190
+ try {
10191
+ const conn = parsePostgresUrl(dbUrl);
10192
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
10193
+ if (!/^[a-f0-9]{7,40}$/i.test(commit)) {
10194
+ return {
10195
+ alreadyDeployed: false,
10196
+ checkSuccessful: false,
10197
+ error: "Invalid commit SHA format"
10198
+ };
10199
+ }
10200
+ const query = `SELECT deployed_at FROM ${auditTable} WHERE commit = '${commit}' AND environment = 'production' ORDER BY deployed_at DESC LIMIT 1`;
10201
+ const result = spawnSync("psql", [...args, "-t", "-c", query], {
10202
+ encoding: "utf-8",
10203
+ stdio: ["pipe", "pipe", "pipe"],
10204
+ env: buildPsqlEnv(conn)
10205
+ });
10206
+ if (result.error) {
10207
+ return {
10208
+ alreadyDeployed: false,
10209
+ checkSuccessful: false,
10210
+ error: result.error.message
10211
+ };
10212
+ }
10213
+ const deployedAt = result.stdout?.trim();
10214
+ if (deployedAt && deployedAt.length > 0) {
10215
+ return {
10216
+ alreadyDeployed: true,
10217
+ deployedAt,
10218
+ checkSuccessful: true
10219
+ };
10220
+ }
10221
+ return { alreadyDeployed: false, checkSuccessful: true };
10222
+ } catch (error) {
10223
+ return {
10224
+ alreadyDeployed: false,
10225
+ checkSuccessful: false,
10226
+ error: error instanceof Error ? error.message : "Unknown error"
10227
+ };
10228
+ }
10229
+ }
10230
+ function guardProductionDeployment(dbUrl, commit) {
10231
+ const lockResult = acquireWorkflowLock(dbUrl, PROD_DEPLOY_LOCK_ID);
10232
+ if (!lockResult.acquired) {
10233
+ return {
10234
+ canProceed: false,
10235
+ lockAcquired: false,
10236
+ alreadyDeployed: false,
10237
+ message: lockResult.error ? `Failed to acquire deployment lock: ${lockResult.error}` : "Another production deployment is in progress. Please wait and retry."
10238
+ };
10239
+ }
10240
+ const idempotencyResult = checkDeploymentIdempotency(dbUrl, commit);
10241
+ if (idempotencyResult.alreadyDeployed) {
10242
+ releaseWorkflowLock(dbUrl, PROD_DEPLOY_LOCK_ID);
10243
+ return {
10244
+ canProceed: false,
10245
+ lockAcquired: false,
10246
+ alreadyDeployed: true,
10247
+ message: `Commit ${commit.slice(0, 7)} was already deployed at ${idempotencyResult.deployedAt}. Skipping duplicate deployment.`
10248
+ };
10249
+ }
10250
+ if (!idempotencyResult.checkSuccessful && idempotencyResult.error) {
10251
+ console.warn(
10252
+ `[WARN] Idempotency check failed: ${idempotencyResult.error}. Proceeding with deployment.`
10253
+ );
10254
+ }
10255
+ return {
10256
+ canProceed: true,
10257
+ lockAcquired: true,
10258
+ alreadyDeployed: false,
10259
+ message: "Deployment guard passed. Proceeding with production deployment."
10260
+ };
10261
+ }
10262
+ function releaseProductionDeploymentLock(dbUrl, lockAcquired) {
10263
+ if (lockAcquired) {
10264
+ releaseWorkflowLock(dbUrl, PROD_DEPLOY_LOCK_ID);
10265
+ }
10266
+ }
10267
+
9679
10268
  // src/commands/ci/commands/ci-prod-apply.ts
9680
10269
  var CiProdApplyOutputSchema = z.object({
9681
10270
  summaryPath: z.string().min(1)
@@ -9683,6 +10272,7 @@ var CiProdApplyOutputSchema = z.object({
9683
10272
  async function runCiProdApply(params) {
9684
10273
  console.log("[DEBUG] runCiProdApply: starting");
9685
10274
  const steps = [
10275
+ { id: "guard", description: "Acquire deployment lock and check idempotency" },
9686
10276
  { id: "stack", description: "Validate stack and inputs" },
9687
10277
  { id: "risks", description: "Detect risky changes" },
9688
10278
  { id: "snapshot", description: "Create pre-deploy snapshot" },
@@ -9702,152 +10292,183 @@ async function runCiProdApply(params) {
9702
10292
  if (cfg.config) params.summary.detected.config = cfg.config;
9703
10293
  requireCiAutoApprove({ mode: params.summary.mode, autoApprove: params.options.autoApprove });
9704
10294
  const inputs = resolveProdApplyInputs();
9705
- logKeyValueTable({
9706
- title: "Inputs",
9707
- rows: [
9708
- ["mode", params.summary.mode],
9709
- ["repoKind", params.summary.repoKind],
9710
- ["GH_DATABASE_URL_ADMIN", "<set, masked>"],
9711
- ["GH_DATABASE_URL", "<set, masked>"],
9712
- ["GITHUB_SHA", inputs.githubSha]
9713
- ]
9714
- });
9715
- logSection("Execute");
9716
- console.log("[DEBUG] Step 1/7: detectStack...");
9717
- const stack = await detectStack(
9718
- params.repoRoot,
9719
- params.tmpDir,
9720
- inputs.productionDatabaseUrlAdmin
9721
- );
9722
- console.log(`[DEBUG] Step 1/7: detectStack done (stack=${stack})`);
9723
- params.summary.detected.stack = stack;
9724
- const supportedStacks = ["drizzle", "supabase"];
9725
- if (!stack || !supportedStacks.includes(stack)) {
9726
- throw new CLIError(
9727
- `Production apply requires a supported database stack (detected: ${stack || "<empty>"}).`,
9728
- "CI_PROD_STACK_UNSUPPORTED",
9729
- [
9730
- "Supported stacks: drizzle, supabase",
9731
- "Run: runa init (to set up database structure)",
9732
- "See: packages/database or supabase/schemas/"
9733
- ],
9734
- void 0,
9735
- 11
9736
- );
9737
- }
9738
- console.log("[DEBUG] Step 2/7: showSchemaDiff...");
9739
- await showSchemaDiff(params.repoRoot, params.tmpDir);
9740
- console.log("[DEBUG] Step 2/7: showSchemaDiff done");
9741
- console.log("[DEBUG] Step 2.5/7: checkIfInitialDeployment...");
9742
- const initialDeploymentCheck = checkIfInitialDeployment(
9743
- params.repoRoot,
9744
- inputs.productionDatabaseUrlAdmin
9745
- );
9746
- console.log(
9747
- `[DEBUG] Step 2.5/7: checkIfInitialDeployment done (isInitial=${initialDeploymentCheck.isInitialDeployment}, tables=${initialDeploymentCheck.tableCount})`
9748
- );
9749
- if (initialDeploymentCheck.checkSuccessful) {
9750
- if (initialDeploymentCheck.isInitialDeployment) {
9751
- params.logger.info(`Initial deployment detected: ${initialDeploymentCheck.reason}`);
9752
- } else {
9753
- params.logger.info(
9754
- `Existing deployment: ${initialDeploymentCheck.tableCount} table(s) found`
9755
- );
9756
- }
9757
- } else {
9758
- params.logger.warn(
9759
- `Initial deployment check failed: ${initialDeploymentCheck.reason}. Proceeding with risk analysis.`
9760
- );
9761
- }
9762
- const skipViaEnv = process.env.RUNA_SKIP_SCHEMA_RISK === "1" || process.env.RUNA_SKIP_SCHEMA_RISK === "true";
9763
- const skipViaAutoDetect = initialDeploymentCheck.checkSuccessful && initialDeploymentCheck.isInitialDeployment;
9764
- const shouldSkipRisks = params.options.skipRisks === true || params.options.initialDeployment === true || skipViaEnv || skipViaAutoDetect;
9765
- if (shouldSkipRisks) {
9766
- const skipReason = params.options.skipRisks ? "--skip-risks flag" : params.options.initialDeployment ? "--initial-deployment flag" : skipViaEnv ? "RUNA_SKIP_SCHEMA_RISK env" : `auto-detected initial deployment (${initialDeploymentCheck.reason})`;
9767
- console.log(`[DEBUG] Step 3/7: detectRisks SKIPPED (${skipReason})`);
9768
- params.logger.info(`Risk detection skipped: ${skipReason}`);
9769
- } else {
9770
- console.log("[DEBUG] Step 3/7: detectRisks...");
9771
- await detectRisks(params.repoRoot, params.tmpDir);
9772
- console.log("[DEBUG] Step 3/7: detectRisks done");
9773
- }
9774
- console.log("[DEBUG] Step 4/7: snapshotCreate...");
9775
- await snapshotCreate(
9776
- params.repoRoot,
9777
- params.tmpDir,
10295
+ console.log("[DEBUG] Step 0/8: guardProductionDeployment...");
10296
+ const guardResult = guardProductionDeployment(
9778
10297
  inputs.productionDatabaseUrlAdmin,
9779
10298
  inputs.githubSha
9780
10299
  );
9781
- console.log("[DEBUG] Step 4/7: snapshotCreate done");
9782
- console.log("[DEBUG] Step 5/7: repairTimestampInvariants...");
9783
- await repairTimestampInvariants({
9784
- repoRoot: params.repoRoot,
9785
- tmpDir: params.tmpDir,
9786
- databaseUrl: inputs.productionDatabaseUrlAdmin,
9787
- labelPrefix: "production"
9788
- });
9789
- console.log("[DEBUG] Step 5/7: repairTimestampInvariants done");
9790
- console.log("[DEBUG] Step 6/7: applyProductionSchema...");
9791
- let schemaApplyResult;
10300
+ if (!guardResult.canProceed) {
10301
+ if (guardResult.alreadyDeployed) {
10302
+ params.logger.info(guardResult.message);
10303
+ params.summary.status = "success";
10304
+ params.summary.endedAt = (/* @__PURE__ */ new Date()).toISOString();
10305
+ params.summary.durationMs = new Date(params.summary.endedAt).getTime() - new Date(params.summary.startedAt).getTime();
10306
+ const summaryPath = await writeCiSummary({ summary: params.summary });
10307
+ return { summaryPath };
10308
+ }
10309
+ throw new CLIError(guardResult.message, "CI_PROD_DEPLOYMENT_LOCKED", [
10310
+ "Another deployment is in progress",
10311
+ "Wait for the current deployment to complete",
10312
+ "If stuck, check for orphaned advisory locks in the database"
10313
+ ]);
10314
+ }
10315
+ console.log("[DEBUG] Step 0/8: guardProductionDeployment done (lock acquired)");
9792
10316
  try {
9793
- schemaApplyResult = await applyProductionSchema(
10317
+ logKeyValueTable({
10318
+ title: "Inputs",
10319
+ rows: [
10320
+ ["mode", params.summary.mode],
10321
+ ["repoKind", params.summary.repoKind],
10322
+ ["GH_DATABASE_URL_ADMIN", "<set, masked>"],
10323
+ ["GH_DATABASE_URL", "<set, masked>"],
10324
+ ["GITHUB_SHA", inputs.githubSha]
10325
+ ]
10326
+ });
10327
+ logSection("Execute");
10328
+ console.log("[DEBUG] Step 1/7: detectStack...");
10329
+ const stack = await detectStack(
9794
10330
  params.repoRoot,
9795
10331
  params.tmpDir,
9796
- inputs.productionDatabaseUrlAdmin,
9797
- // DDL operations (pg-schema-diff)
9798
- inputs.productionDatabaseUrl,
9799
- // App verification (drizzle_app)
9800
- {
9801
- allowDataLoss: params.options.allowDataLoss === true,
9802
- confirmAuthzUpdate: params.options.confirmAuthzUpdate === true,
9803
- maxLockWaitMs: typeof params.options.maxLockWaitMs === "number" ? params.options.maxLockWaitMs : void 0
9804
- }
10332
+ inputs.productionDatabaseUrlAdmin
10333
+ );
10334
+ console.log(`[DEBUG] Step 1/7: detectStack done (stack=${stack})`);
10335
+ params.summary.detected.stack = stack;
10336
+ const supportedStacks = ["drizzle", "supabase"];
10337
+ if (!stack || !supportedStacks.includes(stack)) {
10338
+ throw new CLIError(
10339
+ `Production apply requires a supported database stack (detected: ${stack || "<empty>"}).`,
10340
+ "CI_PROD_STACK_UNSUPPORTED",
10341
+ [
10342
+ "Supported stacks: drizzle, supabase",
10343
+ "Run: runa init (to set up database structure)",
10344
+ "See: packages/database or supabase/schemas/"
10345
+ ],
10346
+ void 0,
10347
+ 11
10348
+ );
10349
+ }
10350
+ console.log("[DEBUG] Step 2/7: showSchemaDiff...");
10351
+ await showSchemaDiff(params.repoRoot, params.tmpDir);
10352
+ console.log("[DEBUG] Step 2/7: showSchemaDiff done");
10353
+ console.log("[DEBUG] Step 2.5/7: checkIfInitialDeployment...");
10354
+ const initialDeploymentCheck = checkIfInitialDeployment(
10355
+ params.repoRoot,
10356
+ inputs.productionDatabaseUrlAdmin
9805
10357
  );
9806
10358
  console.log(
9807
- `[DEBUG] Step 6/7: applyProductionSchema done (${schemaApplyResult.changeSummary})`
10359
+ `[DEBUG] Step 2.5/7: checkIfInitialDeployment done (isInitial=${initialDeploymentCheck.isInitialDeployment}, tables=${initialDeploymentCheck.tableCount})`
9808
10360
  );
9809
- } catch (applyError) {
9810
- params.logger.error("Schema apply failed; attempting rollback from snapshot");
9811
- await snapshotRestoreLatest(params.repoRoot, params.tmpDir, inputs.productionDatabaseUrlAdmin);
9812
- throw applyError;
9813
- }
9814
- console.log("[DEBUG] Step 7a/7: auditRecord...");
9815
- try {
9816
- await auditRecord(params.repoRoot, params.tmpDir, inputs.productionDatabaseUrlAdmin, {
9817
- commit: inputs.githubSha,
9818
- author: inputs.githubActor
9819
- });
9820
- console.log("[DEBUG] Step 7a/7: auditRecord done");
9821
- } catch (auditError) {
9822
- console.log("[DEBUG] Step 7a/7: auditRecord failed (non-blocking)");
9823
- params.logger.warn(
9824
- auditError instanceof Error ? auditError.message : "Audit record failed (best-effort)"
10361
+ if (initialDeploymentCheck.checkSuccessful) {
10362
+ if (initialDeploymentCheck.isInitialDeployment) {
10363
+ params.logger.info(`Initial deployment detected: ${initialDeploymentCheck.reason}`);
10364
+ } else {
10365
+ params.logger.info(
10366
+ `Existing deployment: ${initialDeploymentCheck.tableCount} table(s) found`
10367
+ );
10368
+ }
10369
+ } else {
10370
+ params.logger.warn(
10371
+ `Initial deployment check failed: ${initialDeploymentCheck.reason}. Proceeding with risk analysis.`
10372
+ );
10373
+ }
10374
+ const skipViaEnv = process.env.RUNA_SKIP_SCHEMA_RISK === "1" || process.env.RUNA_SKIP_SCHEMA_RISK === "true";
10375
+ const skipViaAutoDetect = initialDeploymentCheck.checkSuccessful && initialDeploymentCheck.isInitialDeployment;
10376
+ const shouldSkipRisks = params.options.skipRisks === true || params.options.initialDeployment === true || skipViaEnv || skipViaAutoDetect;
10377
+ if (shouldSkipRisks) {
10378
+ const skipReason = params.options.skipRisks ? "--skip-risks flag" : params.options.initialDeployment ? "--initial-deployment flag" : skipViaEnv ? "RUNA_SKIP_SCHEMA_RISK env" : `auto-detected initial deployment (${initialDeploymentCheck.reason})`;
10379
+ console.log(`[DEBUG] Step 3/7: detectRisks SKIPPED (${skipReason})`);
10380
+ params.logger.info(`Risk detection skipped: ${skipReason}`);
10381
+ } else {
10382
+ console.log("[DEBUG] Step 3/7: detectRisks...");
10383
+ await detectRisks(params.repoRoot, params.tmpDir);
10384
+ console.log("[DEBUG] Step 3/7: detectRisks done");
10385
+ }
10386
+ console.log("[DEBUG] Step 4/7: snapshotCreate...");
10387
+ await snapshotCreate(
10388
+ params.repoRoot,
10389
+ params.tmpDir,
10390
+ inputs.productionDatabaseUrlAdmin,
10391
+ inputs.githubSha
9825
10392
  );
10393
+ console.log("[DEBUG] Step 4/7: snapshotCreate done");
10394
+ console.log("[DEBUG] Step 5/7: repairTimestampInvariants...");
10395
+ await repairTimestampInvariants({
10396
+ repoRoot: params.repoRoot,
10397
+ tmpDir: params.tmpDir,
10398
+ databaseUrl: inputs.productionDatabaseUrlAdmin,
10399
+ labelPrefix: "production"
10400
+ });
10401
+ console.log("[DEBUG] Step 5/7: repairTimestampInvariants done");
10402
+ console.log("[DEBUG] Step 6/7: applyProductionSchema...");
10403
+ let schemaApplyResult;
10404
+ try {
10405
+ schemaApplyResult = await applyProductionSchema(
10406
+ params.repoRoot,
10407
+ params.tmpDir,
10408
+ inputs.productionDatabaseUrlAdmin,
10409
+ // DDL operations (pg-schema-diff)
10410
+ inputs.productionDatabaseUrl,
10411
+ // App verification (drizzle_app)
10412
+ {
10413
+ allowDataLoss: params.options.allowDataLoss === true,
10414
+ confirmAuthzUpdate: params.options.confirmAuthzUpdate === true,
10415
+ maxLockWaitMs: typeof params.options.maxLockWaitMs === "number" ? params.options.maxLockWaitMs : void 0
10416
+ }
10417
+ );
10418
+ console.log(
10419
+ `[DEBUG] Step 6/7: applyProductionSchema done (${schemaApplyResult.changeSummary})`
10420
+ );
10421
+ } catch (applyError) {
10422
+ params.logger.error("Schema apply failed; attempting rollback from snapshot");
10423
+ await snapshotRestoreLatest(
10424
+ params.repoRoot,
10425
+ params.tmpDir,
10426
+ inputs.productionDatabaseUrlAdmin
10427
+ );
10428
+ throw applyError;
10429
+ }
10430
+ console.log("[DEBUG] Step 7a/7: auditRecord...");
10431
+ try {
10432
+ await auditRecord(params.repoRoot, params.tmpDir, inputs.productionDatabaseUrlAdmin, {
10433
+ commit: inputs.githubSha,
10434
+ author: inputs.githubActor
10435
+ });
10436
+ console.log("[DEBUG] Step 7a/7: auditRecord done");
10437
+ } catch (auditError) {
10438
+ console.log("[DEBUG] Step 7a/7: auditRecord failed (non-blocking)");
10439
+ params.logger.warn(
10440
+ auditError instanceof Error ? auditError.message : "Audit record failed (best-effort)"
10441
+ );
10442
+ }
10443
+ console.log("[DEBUG] Step 7b/7: maybeNotifyExternal...");
10444
+ await maybeNotifyExternal({
10445
+ logger: params.logger,
10446
+ repoRoot: params.repoRoot,
10447
+ tmpDir: params.tmpDir,
10448
+ githubRepository: inputs.githubRepository,
10449
+ githubSha: inputs.githubSha,
10450
+ skipNotify: params.options.skipNotify === true
10451
+ });
10452
+ console.log("[DEBUG] Step 7b/7: maybeNotifyExternal done");
10453
+ console.log("[DEBUG] Step 7c/7: maybeAddGithubLabelAndComment...");
10454
+ await maybeAddGithubLabelAndComment({
10455
+ mode: params.summary.mode,
10456
+ skipGithubLabel: params.options.skipGithubLabel === true,
10457
+ githubSha: inputs.githubSha,
10458
+ schemaApplyResult
10459
+ });
10460
+ console.log("[DEBUG] Step 7c/7: maybeAddGithubLabelAndComment done");
10461
+ console.log("[DEBUG] All steps complete, writing summary...");
10462
+ params.summary.status = "success";
10463
+ params.summary.endedAt = (/* @__PURE__ */ new Date()).toISOString();
10464
+ params.summary.durationMs = new Date(params.summary.endedAt).getTime() - new Date(params.summary.startedAt).getTime();
10465
+ const summaryPath = await writeCiSummary({ summary: params.summary });
10466
+ return { summaryPath };
10467
+ } finally {
10468
+ console.log("[DEBUG] Releasing production deployment lock...");
10469
+ releaseProductionDeploymentLock(inputs.productionDatabaseUrlAdmin, guardResult.lockAcquired);
10470
+ console.log("[DEBUG] Production deployment lock released");
9826
10471
  }
9827
- console.log("[DEBUG] Step 7b/7: maybeNotifyExternal...");
9828
- await maybeNotifyExternal({
9829
- logger: params.logger,
9830
- repoRoot: params.repoRoot,
9831
- tmpDir: params.tmpDir,
9832
- githubRepository: inputs.githubRepository,
9833
- githubSha: inputs.githubSha,
9834
- skipNotify: params.options.skipNotify === true
9835
- });
9836
- console.log("[DEBUG] Step 7b/7: maybeNotifyExternal done");
9837
- console.log("[DEBUG] Step 7c/7: maybeAddGithubLabelAndComment...");
9838
- await maybeAddGithubLabelAndComment({
9839
- mode: params.summary.mode,
9840
- skipGithubLabel: params.options.skipGithubLabel === true,
9841
- githubSha: inputs.githubSha,
9842
- schemaApplyResult
9843
- });
9844
- console.log("[DEBUG] Step 7c/7: maybeAddGithubLabelAndComment done");
9845
- console.log("[DEBUG] All steps complete, writing summary...");
9846
- params.summary.status = "success";
9847
- params.summary.endedAt = (/* @__PURE__ */ new Date()).toISOString();
9848
- params.summary.durationMs = new Date(params.summary.endedAt).getTime() - new Date(params.summary.startedAt).getTime();
9849
- const summaryPath = await writeCiSummary({ summary: params.summary });
9850
- return { summaryPath };
9851
10472
  }
9852
10473
  var ciProdApplyCommand = new Command("prod-apply").description("Apply production schema (pg-schema-diff) with safety steps").option("--mode <mode>", "Mode: github-actions | local").option("--output <output>", "Output: human | json").option("--config <path>", "Config path (defaults to .runa/ci.config.json if present)").option("--auto-approve", "Required in CI (non-interactive) mode", false).option("--allow-data-loss", "Allow DELETES_DATA hazard (for large schema migrations)", false).option(
9853
10474
  "--confirm-authz-update",
@@ -9981,7 +10602,7 @@ function getChecks() {
9981
10602
  async function runCheck(check, logDir) {
9982
10603
  const startTime = Date.now();
9983
10604
  const logPath = path10__default.join(logDir, `${check.name}.log`);
9984
- return new Promise((resolve11) => {
10605
+ return new Promise((resolve12) => {
9985
10606
  const proc = spawn(check.command, check.args, {
9986
10607
  cwd: process.cwd(),
9987
10608
  stdio: ["ignore", "pipe", "pipe"],
@@ -10003,7 +10624,7 @@ async function runCheck(check, logDir) {
10003
10624
  const durationMs = Date.now() - startTime;
10004
10625
  await writeFile(logPath, output3, "utf-8").catch(() => {
10005
10626
  });
10006
- resolve11({
10627
+ resolve12({
10007
10628
  name: check.name,
10008
10629
  label: check.label,
10009
10630
  description: check.description,
@@ -10018,7 +10639,7 @@ async function runCheck(check, logDir) {
10018
10639
  await writeFile(logPath, `${output3}
10019
10640
  [spawn error]`, "utf-8").catch(() => {
10020
10641
  });
10021
- resolve11({
10642
+ resolve12({
10022
10643
  name: check.name,
10023
10644
  label: check.label,
10024
10645
  description: check.description,
@@ -10993,7 +11614,7 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
10993
11614
  `.trim();
10994
11615
  const probe = await runLogged({
10995
11616
  cwd: params.repoRoot,
10996
- env: process.env,
11617
+ env: getSafeEnv(),
10997
11618
  label: `probe pgtap (best-effort, ${params.label})`,
10998
11619
  command: "psql",
10999
11620
  args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-t", "-A", "-c", probeSql],
@@ -11008,7 +11629,7 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
11008
11629
  }
11009
11630
  await runLogged({
11010
11631
  cwd: params.repoRoot,
11011
- env: process.env,
11632
+ env: getSafeEnv(),
11012
11633
  label: `ensure pgtap (best-effort, ${params.label})`,
11013
11634
  command: "psql",
11014
11635
  args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-c", installSql],
@@ -11026,7 +11647,7 @@ SELECT (SELECT available FROM a)::text || ',' || (SELECT installed FROM i)::text
11026
11647
  });
11027
11648
  const probeAfter = await runLogged({
11028
11649
  cwd: params.repoRoot,
11029
- env: process.env,
11650
+ env: getSafeEnv(),
11030
11651
  label: `probe pgtap (best-effort, ${params.label}, after)`,
11031
11652
  command: "psql",
11032
11653
  args: [ddlUrl, "-X", "-v", "ON_ERROR_STOP=1", "-q", "-t", "-A", "-c", probeSql],
@@ -15317,7 +15938,7 @@ function handleProgressCommentUpdate(snapshot2, prevState) {
15317
15938
  updateProgressComment(context, effectiveStep, failedStep);
15318
15939
  }
15319
15940
  async function runCiMachine(input3, logger15, onStateChange) {
15320
- return new Promise((resolve11, reject) => {
15941
+ return new Promise((resolve12, reject) => {
15321
15942
  const actor = createActor(ciMachine, { input: input3 });
15322
15943
  let previousState = "";
15323
15944
  actor.subscribe((snapshot2) => {
@@ -15337,7 +15958,7 @@ async function runCiMachine(input3, logger15, onStateChange) {
15337
15958
  `[DEBUG] Machine completed: state=${currentState}, status=${status}, exitCode=${output3?.exitCode}`
15338
15959
  );
15339
15960
  if (output3) {
15340
- resolve11(output3);
15961
+ resolve12(output3);
15341
15962
  }
15342
15963
  }
15343
15964
  });
@@ -15362,14 +15983,14 @@ function formatLayerResults2(layers) {
15362
15983
  return { passed, failed, lines };
15363
15984
  }
15364
15985
  async function flushAndExit(exitCode) {
15365
- await new Promise((resolve11) => {
15986
+ await new Promise((resolve12) => {
15366
15987
  process.stdout.write("", () => {
15367
15988
  process.stderr.write("", () => {
15368
- resolve11();
15989
+ resolve12();
15369
15990
  });
15370
15991
  });
15371
15992
  });
15372
- await new Promise((resolve11) => setTimeout(resolve11, FLUSH_DELAY_MS));
15993
+ await new Promise((resolve12) => setTimeout(resolve12, FLUSH_DELAY_MS));
15373
15994
  process.exit(exitCode);
15374
15995
  }
15375
15996
 
@@ -17021,10 +17642,16 @@ init_esm_shims();
17021
17642
  var logger2 = createCLILogger("db:apply");
17022
17643
  var MIGRATION_LOCK_ID = 88888;
17023
17644
  function acquireAdvisoryLock(dbUrl, verbose) {
17645
+ const conn = parsePostgresUrl(dbUrl);
17646
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
17024
17647
  const result = spawnSync(
17025
17648
  "psql",
17026
- [dbUrl, "-X", "-t", "-c", `SELECT pg_try_advisory_lock(${MIGRATION_LOCK_ID})`],
17027
- { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
17649
+ [...args, "-t", "-c", `SELECT pg_try_advisory_lock(${MIGRATION_LOCK_ID})`],
17650
+ {
17651
+ encoding: "utf-8",
17652
+ stdio: ["pipe", "pipe", "pipe"],
17653
+ env: buildPsqlEnv(conn)
17654
+ }
17028
17655
  );
17029
17656
  const acquired = result.stdout?.trim() === "t";
17030
17657
  if (verbose) {
@@ -17037,9 +17664,12 @@ function acquireAdvisoryLock(dbUrl, verbose) {
17037
17664
  return acquired;
17038
17665
  }
17039
17666
  function releaseAdvisoryLock(dbUrl, verbose) {
17040
- spawnSync("psql", [dbUrl, "-X", "-t", "-c", `SELECT pg_advisory_unlock(${MIGRATION_LOCK_ID})`], {
17667
+ const conn = parsePostgresUrl(dbUrl);
17668
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
17669
+ spawnSync("psql", [...args, "-t", "-c", `SELECT pg_advisory_unlock(${MIGRATION_LOCK_ID})`], {
17041
17670
  encoding: "utf-8",
17042
- stdio: ["pipe", "pipe", "pipe"]
17671
+ stdio: ["pipe", "pipe", "pipe"],
17672
+ env: buildPsqlEnv(conn)
17043
17673
  });
17044
17674
  if (verbose) {
17045
17675
  logger2.step("Advisory lock released");
@@ -17131,9 +17761,9 @@ function verifyPgSchemaDiffBinary() {
17131
17761
  logger3.step(`pg-schema-diff found: ${(whichResult.stdout || "").trim()}`);
17132
17762
  }
17133
17763
  function verifyDatabaseConnection(dbUrl) {
17134
- const connTestResult = spawnSync("psql", [dbUrl, "-X", "-c", "SELECT 1"], {
17135
- encoding: "utf-8",
17136
- stdio: ["pipe", "pipe", "pipe"],
17764
+ const connTestResult = psqlSyncQuery({
17765
+ databaseUrl: dbUrl,
17766
+ sql: "SELECT 1",
17137
17767
  timeout: 3e4
17138
17768
  });
17139
17769
  if (connTestResult.status !== 0) {
@@ -17320,7 +17950,7 @@ var MAX_RETRIES = 5;
17320
17950
  var BASE_DELAY_MS = 1e3;
17321
17951
  var DEFAULT_MAX_DELAY_MS = 3e4;
17322
17952
  function sleep(ms) {
17323
- return new Promise((resolve11) => setTimeout(resolve11, ms));
17953
+ return new Promise((resolve12) => setTimeout(resolve12, ms));
17324
17954
  }
17325
17955
  function calculateBackoffDelay(attempt, maxDelayMs = DEFAULT_MAX_DELAY_MS) {
17326
17956
  const exponentialDelay = BASE_DELAY_MS * 2 ** attempt;
@@ -17514,13 +18144,13 @@ function checkFreshDb(dbUrl, schemasDir, customSql, verbose) {
17514
18144
  return result;
17515
18145
  }
17516
18146
  function executeFreshDbCheck(dbUrl, sql) {
17517
- const result = spawnSync("psql", [dbUrl, "-X", "-t", "-c", sql.trim()], {
17518
- encoding: "utf-8",
17519
- stdio: ["pipe", "pipe", "pipe"],
18147
+ const result = psqlSyncQuery({
18148
+ databaseUrl: dbUrl,
18149
+ sql: sql.trim(),
17520
18150
  timeout: 1e4
17521
18151
  });
17522
- if (result.status !== 0 || result.error) {
17523
- const rawError = result.stderr?.trim() || result.error?.message || "Unknown error";
18152
+ if (result.status !== 0) {
18153
+ const rawError = result.stderr?.trim() || "Unknown error";
17524
18154
  return {
17525
18155
  success: false,
17526
18156
  hasData: true,
@@ -17545,9 +18175,11 @@ function applySingleSchemaFile(dbUrl, filePath, fileName, verbose) {
17545
18175
  if (verbose) {
17546
18176
  logger5.debug(`Applying ${fileName}...`);
17547
18177
  }
17548
- const result = spawnSync("psql", [dbUrl, "-X", "-v", "ON_ERROR_STOP=0", "-f", filePath], {
17549
- encoding: "utf-8",
17550
- stdio: ["pipe", "pipe", "pipe"]
18178
+ const result = psqlSyncFile({
18179
+ databaseUrl: dbUrl,
18180
+ filePath,
18181
+ onErrorStop: false
18182
+ // Continue on errors like "already exists"
17551
18183
  });
17552
18184
  const stdout = result.stdout || "";
17553
18185
  const stderr = result.stderr || "";
@@ -17718,21 +18350,31 @@ var ROLE_PASSWORD_CONFIGS = [
17718
18350
  }
17719
18351
  ];
17720
18352
  function roleExists(dbUrl, roleName) {
18353
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(roleName)) {
18354
+ return false;
18355
+ }
17721
18356
  const sql = `SELECT 1 FROM pg_roles WHERE rolname = '${roleName}'`;
17722
- const result = spawnSync("psql", [dbUrl, "-X", "-t", "-c", sql], {
17723
- encoding: "utf-8",
17724
- stdio: ["pipe", "pipe", "pipe"],
18357
+ const result = psqlSyncQuery({
18358
+ databaseUrl: dbUrl,
18359
+ sql,
17725
18360
  timeout: 1e4
17726
18361
  });
17727
18362
  return result.status === 0 && result.stdout?.trim() === "1";
17728
18363
  }
17729
18364
  function setRolePassword(dbUrl, roleName, password, verbose) {
18365
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(roleName)) {
18366
+ logger5.warn(`Invalid role name: ${roleName}`);
18367
+ return false;
18368
+ }
17730
18369
  const escapedPassword = password.replace(/'/g, "''");
17731
18370
  const sql = `ALTER ROLE ${roleName} PASSWORD '${escapedPassword}'`;
17732
- const result = spawnSync("psql", [dbUrl, "-X", "-c", sql], {
18371
+ const conn = parsePostgresUrl(dbUrl);
18372
+ const args = buildPsqlArgs(conn, { onErrorStop: false });
18373
+ const result = spawnSync("psql", [...args, "-c", sql], {
17733
18374
  encoding: "utf-8",
17734
18375
  stdio: ["pipe", "pipe", "pipe"],
17735
- timeout: 1e4
18376
+ timeout: 1e4,
18377
+ env: buildPsqlEnv(conn)
17736
18378
  });
17737
18379
  if (result.status === 0) {
17738
18380
  if (verbose) {
@@ -17807,9 +18449,11 @@ function shouldSkipInProduction(file, env2, verbose) {
17807
18449
  function applySingleIdempotentFile(dbUrl, schemasDir, file, verbose) {
17808
18450
  const filePath = join(schemasDir, file);
17809
18451
  if (verbose) logger5.debug(`Applying ${file}...`);
17810
- const result = spawnSync("psql", [dbUrl, "-X", "-v", "ON_ERROR_STOP=1", "-f", filePath], {
17811
- encoding: "utf-8",
17812
- stdio: ["pipe", "pipe", "pipe"]
18452
+ const result = psqlSyncFile({
18453
+ databaseUrl: dbUrl,
18454
+ filePath,
18455
+ onErrorStop: true
18456
+ // Fail fast on idempotent schemas
17813
18457
  });
17814
18458
  if (verbose) {
17815
18459
  if (result.stdout) process.stdout.write(result.stdout);
@@ -17891,9 +18535,10 @@ var applySeeds = fromPromise(async ({ input: { input: input3, targetDir } }) =>
17891
18535
  const seedFile = join(targetDir, "supabase/seeds/ci.sql");
17892
18536
  if (existsSync(seedFile)) {
17893
18537
  logger5.step("Applying seeds...");
17894
- const result = spawnSync("psql", [dbUrl, "-X", "-v", "ON_ERROR_STOP=1", "-f", seedFile], {
17895
- encoding: "utf-8",
17896
- stdio: ["pipe", "pipe", "pipe"]
18538
+ const result = psqlSyncFile({
18539
+ databaseUrl: dbUrl,
18540
+ filePath: seedFile,
18541
+ onErrorStop: true
17897
18542
  });
17898
18543
  const stdout = result.stdout || "";
17899
18544
  const stderr = result.stderr || "";
@@ -18267,7 +18912,7 @@ async function runDbApply(env2, options) {
18267
18912
  maxLockWaitMs: options.maxLockWaitMs ?? 3e4,
18268
18913
  freshDbCheckSql: options.freshDbCheckSql
18269
18914
  };
18270
- return new Promise((resolve11, reject) => {
18915
+ return new Promise((resolve12, reject) => {
18271
18916
  const actor = createActor(dbApplyMachine, {
18272
18917
  input: { input: input3, targetDir: process.cwd() }
18273
18918
  });
@@ -18281,7 +18926,7 @@ async function runDbApply(env2, options) {
18281
18926
  if (isDbApplyComplete(snapshot2)) {
18282
18927
  const output3 = snapshot2.output;
18283
18928
  if (output3) {
18284
- resolve11(output3);
18929
+ resolve12(output3);
18285
18930
  }
18286
18931
  actor.stop();
18287
18932
  }
@@ -18697,68 +19342,13 @@ var backupCommand = new Command("backup").description("Manage database backups (
18697
19342
  init_esm_shims();
18698
19343
  init_config_loader();
18699
19344
 
18700
- // src/commands/db/utils/psql.ts
18701
- init_esm_shims();
18702
- function parsePostgresUrl(url) {
18703
- const u = new URL(url);
18704
- const host = u.hostname;
18705
- const port = u.port || "5432";
18706
- const user = u.username || "postgres";
18707
- const password = u.password || void 0;
18708
- const database = u.pathname.replace(/^\//, "") || "postgres";
18709
- return { host, port, user, database, password };
18710
- }
18711
- function buildPsqlArgs(conn) {
18712
- return [
18713
- "-X",
18714
- "-v",
18715
- "ON_ERROR_STOP=1",
18716
- "-h",
18717
- conn.host,
18718
- "-p",
18719
- conn.port,
18720
- "-U",
18721
- conn.user,
18722
- "-d",
18723
- conn.database
18724
- ];
18725
- }
18726
- function buildPsqlEnv(conn) {
18727
- if (!conn.password) return { ...process.env };
18728
- return { ...process.env, PGPASSWORD: conn.password };
18729
- }
18730
- async function psqlQuery(params) {
18731
- const conn = parsePostgresUrl(params.databaseUrl);
18732
- const args = buildPsqlArgs(conn);
18733
- if (params.mode === "scalar") {
18734
- const { stdout: stdout2 } = await execa("psql", [...args, "-A", "-t", "-c", params.sql], {
18735
- env: buildPsqlEnv(conn),
18736
- stdio: ["ignore", "pipe", "pipe"]
18737
- });
18738
- return stdout2.trim();
18739
- }
18740
- const { stdout } = await execa("psql", [...args, "-A", "-t", "-c", params.sql], {
18741
- env: buildPsqlEnv(conn),
18742
- stdio: ["ignore", "pipe", "pipe"]
18743
- });
18744
- return stdout.trim();
18745
- }
18746
- async function psqlExec(params) {
18747
- const conn = parsePostgresUrl(params.databaseUrl);
18748
- const args = buildPsqlArgs(conn);
18749
- await execa("psql", [...args, "-c", params.sql], {
18750
- env: buildPsqlEnv(conn),
18751
- stdio: "inherit"
18752
- });
18753
- }
18754
-
18755
19345
  // src/commands/db/utils/schema-sync.ts
18756
19346
  init_esm_shims();
18757
19347
  var ERROR_MESSAGES2 = {
18758
19348
  PATH_TRAVERSAL: "Schema path validation failed",
18759
19349
  SCHEMA_NOT_FOUND: "Schema file not found"
18760
19350
  };
18761
- function containsPathTraversal(inputPath) {
19351
+ function containsPathTraversal2(inputPath) {
18762
19352
  const normalized = path10__default.normalize(inputPath);
18763
19353
  return normalized.includes("..") || inputPath.includes("\0");
18764
19354
  }
@@ -18774,7 +19364,7 @@ function isPathWithinBase(filePath, baseDir) {
18774
19364
  }
18775
19365
  }
18776
19366
  function validateSchemaPath(dbPackagePath, projectRoot = process.cwd()) {
18777
- if (containsPathTraversal(dbPackagePath)) {
19367
+ if (containsPathTraversal2(dbPackagePath)) {
18778
19368
  throw new Error(ERROR_MESSAGES2.PATH_TRAVERSAL);
18779
19369
  }
18780
19370
  const schemaEntry = path10__default.join(dbPackagePath, "src", "schema", "index.ts");
@@ -19046,12 +19636,91 @@ var cleanupCommand = new Command("cleanup").description("Safely cleanup orphan D
19046
19636
 
19047
19637
  // src/commands/db/commands/db-derive-role-passwords.ts
19048
19638
  init_esm_shims();
19049
- var DbDeriveRolePasswordsOutputSchema = z.object({
19050
- drizzleAppPassword: z.string().min(1),
19051
- drizzleReadonlyPassword: z.string().min(1),
19052
- branchName: z.string().min(1)
19053
- }).strict();
19054
- async function writeGitHubOutput(values) {
19639
+
19640
+ // src/utils/github-output-security.ts
19641
+ init_esm_shims();
19642
+ var FORBIDDEN_PATHS = [
19643
+ "/etc/",
19644
+ "/var/log/",
19645
+ "/root/",
19646
+ "/home/",
19647
+ "/.ssh/",
19648
+ "/.gnupg/",
19649
+ "/.config/",
19650
+ "/proc/",
19651
+ "/sys/",
19652
+ "/dev/"
19653
+ ];
19654
+ var ALLOWED_DIRECTORY_PATTERNS = [
19655
+ // GitHub Actions runner temp directories
19656
+ /^\/home\/runner\/work\/_temp\//,
19657
+ /^\/tmp\//,
19658
+ /^\/var\/folders\//,
19659
+ // macOS temp directories
19660
+ /^\/private\/tmp\//,
19661
+ /^\/private\/var\/folders\//,
19662
+ // Windows-style paths (via Git Bash or WSL)
19663
+ /^[A-Za-z]:[\\/].*[\\/]temp[\\/]/i,
19664
+ /^[A-Za-z]:[\\/].*[\\/]tmp[\\/]/i,
19665
+ // Local development (allow current working directory for testing)
19666
+ /^\.?\//
19667
+ ];
19668
+ async function validateGitHubOutputPath(filePath) {
19669
+ if (!filePath || filePath.trim().length === 0) {
19670
+ return { valid: false, resolvedPath: null, error: "Empty file path" };
19671
+ }
19672
+ const normalizedPath = path10__default.normalize(filePath);
19673
+ if (normalizedPath.includes("..")) {
19674
+ return {
19675
+ valid: false,
19676
+ resolvedPath: null,
19677
+ error: "Path traversal detected (..) in file path"
19678
+ };
19679
+ }
19680
+ const absolutePath = path10__default.resolve(normalizedPath);
19681
+ for (const forbidden of FORBIDDEN_PATHS) {
19682
+ if (absolutePath.startsWith(forbidden) || absolutePath.includes(forbidden)) {
19683
+ return {
19684
+ valid: false,
19685
+ resolvedPath: null,
19686
+ error: `Forbidden path: ${forbidden}`
19687
+ };
19688
+ }
19689
+ }
19690
+ let resolvedPath;
19691
+ try {
19692
+ const stats = await stat(absolutePath).catch(() => null);
19693
+ if (stats) {
19694
+ resolvedPath = await realpath(absolutePath);
19695
+ for (const forbidden of FORBIDDEN_PATHS) {
19696
+ if (resolvedPath.startsWith(forbidden) || resolvedPath.includes(forbidden)) {
19697
+ return {
19698
+ valid: false,
19699
+ resolvedPath: null,
19700
+ error: `Symlink resolves to forbidden path: ${forbidden}`
19701
+ };
19702
+ }
19703
+ }
19704
+ } else {
19705
+ resolvedPath = absolutePath;
19706
+ }
19707
+ } catch {
19708
+ resolvedPath = absolutePath;
19709
+ }
19710
+ const isCI3 = process.env.CI === "true" || process.env.GITHUB_ACTIONS === "true";
19711
+ if (isCI3) {
19712
+ const inAllowedDir = ALLOWED_DIRECTORY_PATTERNS.some((pattern) => pattern.test(resolvedPath));
19713
+ if (!inAllowedDir) {
19714
+ return {
19715
+ valid: false,
19716
+ resolvedPath: null,
19717
+ error: `GITHUB_OUTPUT path is not in an allowed directory: ${resolvedPath}`
19718
+ };
19719
+ }
19720
+ }
19721
+ return { valid: true, resolvedPath };
19722
+ }
19723
+ async function writeGitHubOutputSecure(values) {
19055
19724
  const outFile = process.env.GITHUB_OUTPUT;
19056
19725
  if (!outFile) {
19057
19726
  throw new CLIError(
@@ -19062,10 +19731,30 @@ async function writeGitHubOutput(values) {
19062
19731
  2
19063
19732
  );
19064
19733
  }
19734
+ const validation = await validateGitHubOutputPath(outFile);
19735
+ if (!validation.valid || !validation.resolvedPath) {
19736
+ throw new CLIError(
19737
+ `Invalid GITHUB_OUTPUT path: ${validation.error}`,
19738
+ "GITHUB_OUTPUT_INVALID_PATH",
19739
+ [
19740
+ "GITHUB_OUTPUT must point to a safe file path",
19741
+ "Ensure you are running in a trusted GitHub Actions environment"
19742
+ ],
19743
+ void 0,
19744
+ 2
19745
+ );
19746
+ }
19065
19747
  const lines = Object.entries(values).map(([k, v]) => `${k}=${v}`).join("\n");
19066
- await appendFile(outFile, `${lines}
19748
+ await appendFile(validation.resolvedPath, `${lines}
19067
19749
  `, "utf-8");
19068
19750
  }
19751
+
19752
+ // src/commands/db/commands/db-derive-role-passwords.ts
19753
+ var DbDeriveRolePasswordsOutputSchema = z.object({
19754
+ drizzleAppPassword: z.string().min(1),
19755
+ drizzleReadonlyPassword: z.string().min(1),
19756
+ branchName: z.string().min(1)
19757
+ }).strict();
19069
19758
  function readRequiredEnv(name) {
19070
19759
  const value = (process.env[name] || "").trim();
19071
19760
  if (value.length > 0) return value;
@@ -19091,7 +19780,7 @@ var deriveRolePasswordsCommand = new Command("derive-role-passwords").descriptio
19091
19780
  branchName: branch
19092
19781
  };
19093
19782
  if (options.githubOutput === true) {
19094
- await writeGitHubOutput({
19783
+ await writeGitHubOutputSecure({
19095
19784
  drizzle_app_password: out.drizzleAppPassword,
19096
19785
  drizzle_readonly_password: out.drizzleReadonlyPassword
19097
19786
  });
@@ -19107,21 +19796,6 @@ var DbDeriveUrlsOutputSchema = z.object({
19107
19796
  appDatabaseUrl: z.string().min(1),
19108
19797
  drizzleDatabaseUrl: z.string().min(1)
19109
19798
  }).strict();
19110
- async function writeGitHubOutput2(values) {
19111
- const outFile = process.env.GITHUB_OUTPUT;
19112
- if (!outFile) {
19113
- throw new CLIError(
19114
- "GITHUB_OUTPUT is not set",
19115
- "GITHUB_OUTPUT_MISSING",
19116
- ["Run in GitHub Actions or set GITHUB_OUTPUT to a file path"],
19117
- void 0,
19118
- 2
19119
- );
19120
- }
19121
- const lines = Object.entries(values).map(([k, v]) => `${k}=${v}`).join("\n");
19122
- await appendFile(outFile, `${lines}
19123
- `, "utf-8");
19124
- }
19125
19799
  function readRequired(paramName, cliValue) {
19126
19800
  const val = cliValue?.trim() || "";
19127
19801
  if (val.length > 0) return val;
@@ -19157,7 +19831,7 @@ function deriveUrlsFromSources(params) {
19157
19831
  }
19158
19832
  async function maybeWriteGithubOutputs(params) {
19159
19833
  if (!params.enabled) return;
19160
- await writeGitHubOutput2({
19834
+ await writeGitHubOutputSecure({
19161
19835
  app_database_url: params.out.appDatabaseUrl,
19162
19836
  drizzle_database_url: params.out.drizzleDatabaseUrl
19163
19837
  });
@@ -19373,7 +20047,7 @@ function getOutputOrThrow(snapshot2) {
19373
20047
  }
19374
20048
  async function runMachine(params) {
19375
20049
  const timeoutMs = params.timeoutMs ?? 10 * 60 * 1e3;
19376
- return new Promise((resolve11, reject) => {
20050
+ return new Promise((resolve12, reject) => {
19377
20051
  const actor = createActor(params.machine, { input: params.input });
19378
20052
  const timer = setTimeout(() => {
19379
20053
  try {
@@ -19396,7 +20070,7 @@ async function runMachine(params) {
19396
20070
  clearTimeout(timer);
19397
20071
  sub.unsubscribe();
19398
20072
  const out = getOutputOrThrow(snapshot2);
19399
- resolve11(out);
20073
+ resolve12(out);
19400
20074
  }
19401
20075
  } catch (err) {
19402
20076
  clearTimeout(timer);
@@ -22047,7 +22721,8 @@ function parseSeedPaths(configPath) {
22047
22721
  }
22048
22722
  }
22049
22723
  async function applySeedFile(seedPath, dbUrl) {
22050
- const absolutePath = path10__default.join(process.cwd(), "supabase", seedPath);
22724
+ const supabaseDir = path10__default.join(process.cwd(), "supabase");
22725
+ const absolutePath = resolveSafePath(supabaseDir, seedPath);
22051
22726
  if (!existsSync(absolutePath)) {
22052
22727
  return;
22053
22728
  }
@@ -22062,8 +22737,13 @@ async function applySeeds2(configPath) {
22062
22737
  if (seedPaths.length === 0) {
22063
22738
  return;
22064
22739
  }
22740
+ const supabaseDir = path10__default.join(process.cwd(), "supabase");
22741
+ const safePaths = filterSafePaths(seedPaths, supabaseDir);
22742
+ if (safePaths.length === 0) {
22743
+ return;
22744
+ }
22065
22745
  const dbUrl = getLocalDbUrl();
22066
- for (const seedPath of seedPaths) {
22746
+ for (const seedPath of safePaths) {
22067
22747
  await applySeedFile(seedPath, dbUrl);
22068
22748
  }
22069
22749
  }
@@ -22908,7 +23588,7 @@ var DbSeedMetadataOutputSchema = z.object({
22908
23588
  }).passthrough()
22909
23589
  }).passthrough().optional()
22910
23590
  }).strict();
22911
- async function writeGitHubOutput3(values) {
23591
+ async function writeGitHubOutput(values) {
22912
23592
  const outFile = process.env.GITHUB_OUTPUT;
22913
23593
  if (!outFile) {
22914
23594
  throw new CLIError(
@@ -23115,7 +23795,7 @@ var seedMetadataCommand = new Command("metadata").description("Extract seed meta
23115
23795
  const shouldUseDb = options.fromDb === true || !metadataPath;
23116
23796
  const out = shouldUseDb ? await inferPrimaryIdsFromDatabase() : await readSeedMetadataFile(metadataPath);
23117
23797
  if (options.githubOutput === true) {
23118
- await writeGitHubOutput3({
23798
+ await writeGitHubOutput({
23119
23799
  ...out.primary?.root?.id ? { root_id: out.primary.root.id } : {},
23120
23800
  ...out.primary?.root?.table ? { root_table: out.primary.root.table } : {},
23121
23801
  ...out.primary?.root?.id ? { primary_root_id: out.primary.root.id } : {}
@@ -24514,7 +25194,41 @@ init_esm_shims();
24514
25194
 
24515
25195
  // src/utils/config-updater.ts
24516
25196
  init_esm_shims();
25197
+ var VALID_DIRECTORY_PATTERN = /^[a-zA-Z0-9.][a-zA-Z0-9/_.-]*$/;
25198
+ var VALID_VERSION_PATTERN = /^[0-9]+\.[0-9]+\.[0-9]+(?:-[a-zA-Z0-9.-]+)?$/;
25199
+ function isControlChar2(charCode) {
25200
+ return charCode <= 31 || charCode === 127;
25201
+ }
25202
+ function isValidDirectoryPath(dir) {
25203
+ if (!dir || typeof dir !== "string") {
25204
+ return false;
25205
+ }
25206
+ if (dir.trim() === "") {
25207
+ return false;
25208
+ }
25209
+ if (/['"\\]/.test(dir)) {
25210
+ return false;
25211
+ }
25212
+ for (let i = 0; i < dir.length; i++) {
25213
+ if (isControlChar2(dir.charCodeAt(i))) {
25214
+ return false;
25215
+ }
25216
+ }
25217
+ return VALID_DIRECTORY_PATTERN.test(dir);
25218
+ }
25219
+ function isValidVersion(version) {
25220
+ if (!version || typeof version !== "string") {
25221
+ return false;
25222
+ }
25223
+ return VALID_VERSION_PATTERN.test(version);
25224
+ }
25225
+ function escapeReplacementString(str) {
25226
+ return str.replace(/\$/g, "$$$$");
25227
+ }
24517
25228
  function updateRunaConfigAppDirectory(appDirectory, targetDir = process.cwd()) {
25229
+ if (!isValidDirectoryPath(appDirectory)) {
25230
+ return { updated: false, reason: "Invalid directory path format" };
25231
+ }
24518
25232
  const configPath = resolve(targetDir, "runa.config.ts");
24519
25233
  if (!existsSync(configPath)) {
24520
25234
  return { updated: false, reason: "runa.config.ts not found" };
@@ -24532,7 +25246,8 @@ function updateRunaConfigAppDirectory(appDirectory, targetDir = process.cwd()) {
24532
25246
  if (!directoryPattern.test(content)) {
24533
25247
  return { updated: false, reason: "Could not find directory field in config" };
24534
25248
  }
24535
- const newContent = content.replace(directoryPattern, `$1'${appDirectory}'`);
25249
+ const safeDirectory = escapeReplacementString(appDirectory);
25250
+ const newContent = content.replace(directoryPattern, `$1'${safeDirectory}'`);
24536
25251
  if (newContent === content) {
24537
25252
  return { updated: false, reason: "No changes needed" };
24538
25253
  }
@@ -24551,6 +25266,9 @@ function syncRunaConfigWithVercel(targetDir, vercelRootDirectory) {
24551
25266
  };
24552
25267
  }
24553
25268
  function updateRunaConfigSdkVersion(version, targetDir = process.cwd()) {
25269
+ if (!isValidVersion(version)) {
25270
+ return { updated: false, reason: "Invalid version format" };
25271
+ }
24554
25272
  const configPath = resolve(targetDir, "runa.config.ts");
24555
25273
  if (!existsSync(configPath)) {
24556
25274
  return { updated: false, reason: "runa.config.ts not found" };
@@ -24562,8 +25280,9 @@ function updateRunaConfigSdkVersion(version, targetDir = process.cwd()) {
24562
25280
  return { updated: false, reason: "Already at this version" };
24563
25281
  }
24564
25282
  const sdkVersionPattern = /(sdkVersion:\s*)(['"])([^'"]*)\2/;
25283
+ const safeVersion = escapeReplacementString(version);
24565
25284
  if (sdkVersionPattern.test(content)) {
24566
- const newContent = content.replace(sdkVersionPattern, `$1'${version}'`);
25285
+ const newContent = content.replace(sdkVersionPattern, `$1'${safeVersion}'`);
24567
25286
  if (newContent === content) {
24568
25287
  return { updated: false, reason: "No changes needed" };
24569
25288
  }
@@ -24575,7 +25294,10 @@ function updateRunaConfigSdkVersion(version, targetDir = process.cwd()) {
24575
25294
  }
24576
25295
  const configVersionPattern = /(configVersion:\s*\d+,?)(\s*\n)/;
24577
25296
  if (configVersionPattern.test(content)) {
24578
- const newContent = content.replace(configVersionPattern, `$1$2 sdkVersion: '${version}',$2`);
25297
+ const newContent = content.replace(
25298
+ configVersionPattern,
25299
+ `$1$2 sdkVersion: '${safeVersion}',$2`
25300
+ );
24579
25301
  writeFileSync(configPath, newContent, "utf-8");
24580
25302
  return { updated: true, reason: `Added sdkVersion: '${version}'` };
24581
25303
  }
@@ -24701,7 +25423,7 @@ var ERROR_MESSAGES3 = {
24701
25423
  PATH_TRAVERSAL: "Working directory path validation failed",
24702
25424
  APP_NOT_FOUND: "App directory not found"
24703
25425
  };
24704
- function containsPathTraversal2(inputPath) {
25426
+ function containsPathTraversal3(inputPath) {
24705
25427
  const normalized = path10__default.normalize(inputPath);
24706
25428
  return normalized.includes("..") || inputPath.includes("\0");
24707
25429
  }
@@ -24717,7 +25439,7 @@ function isPathWithinBase2(filePath, baseDir) {
24717
25439
  }
24718
25440
  }
24719
25441
  function validateCustomWorkingDir(cwdPath, projectRoot) {
24720
- if (containsPathTraversal2(cwdPath)) {
25442
+ if (containsPathTraversal3(cwdPath)) {
24721
25443
  throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
24722
25444
  }
24723
25445
  const absolutePath = path10__default.isAbsolute(cwdPath) ? cwdPath : path10__default.resolve(projectRoot, cwdPath);
@@ -24736,7 +25458,7 @@ function validateCustomWorkingDir(cwdPath, projectRoot) {
24736
25458
  return absolutePath;
24737
25459
  }
24738
25460
  function validateAppDirectory2(appName, projectRoot) {
24739
- if (containsPathTraversal2(appName) || appName.includes("/") || appName.includes("\\")) {
25461
+ if (containsPathTraversal3(appName) || appName.includes("/") || appName.includes("\\")) {
24740
25462
  throw new CLIError(ERROR_MESSAGES3.PATH_TRAVERSAL, "ENV_PULL_PATH_TRAVERSAL");
24741
25463
  }
24742
25464
  const appsDir = resolve(projectRoot, "apps");
@@ -25350,7 +26072,55 @@ async function checkAuth(logger15) {
25350
26072
 
25351
26073
  // src/commands/env/commands/setup/file-export.ts
25352
26074
  init_esm_shims();
26075
+
26076
+ // src/cli/signal-handler.ts
26077
+ init_esm_shims();
26078
+ var logger7 = createCLILogger("signal");
26079
+ var state = {
26080
+ cleanupFns: [],
26081
+ sigintCount: 0,
26082
+ isShuttingDown: false
26083
+ };
26084
+ function registerCleanup(fn) {
26085
+ state.cleanupFns.push(fn);
26086
+ }
26087
+ async function runCleanup() {
26088
+ for (const fn of state.cleanupFns) {
26089
+ try {
26090
+ await fn();
26091
+ } catch (error) {
26092
+ logger7.debug(`Cleanup error: ${error instanceof Error ? error.message : String(error)}`);
26093
+ }
26094
+ }
26095
+ state.cleanupFns = [];
26096
+ }
26097
+ async function handleShutdown(signal, exitCode) {
26098
+ if (state.isShuttingDown) {
26099
+ logger7.warn("Force exit");
26100
+ process.exit(exitCode);
26101
+ }
26102
+ state.isShuttingDown = true;
26103
+ logger7.info(`Received ${signal}, shutting down...`);
26104
+ await runCleanup();
26105
+ process.exit(exitCode);
26106
+ }
26107
+ function setupSignalHandlers() {
26108
+ process.on("SIGINT", async () => {
26109
+ state.sigintCount++;
26110
+ if (state.sigintCount >= 2) {
26111
+ logger7.warn("Force exit (double Ctrl+C)");
26112
+ process.exit(130);
26113
+ }
26114
+ await handleShutdown("SIGINT", 130);
26115
+ });
26116
+ process.on("SIGTERM", async () => {
26117
+ await handleShutdown("SIGTERM", 143);
26118
+ });
26119
+ }
26120
+
26121
+ // src/commands/env/commands/setup/file-export.ts
25353
26122
  var ENV_SETUP_TMP_FILE = ".env.setup-tmp";
26123
+ var cleanupRegistered = false;
25354
26124
  var SECURE_FILE_MODE = 384;
25355
26125
  function writeEnvSetupFile(envVars, drizzleAppPassword, drizzleServicePassword, logger15) {
25356
26126
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
@@ -25413,8 +26183,21 @@ function writeEnvSetupFile(envVars, drizzleAppPassword, drizzleServicePassword,
25413
26183
  chmodSync(filePath, SECURE_FILE_MODE);
25414
26184
  } catch {
25415
26185
  }
26186
+ if (!cleanupRegistered) {
26187
+ registerCleanup(cleanupEnvSetupFileInternal);
26188
+ cleanupRegistered = true;
26189
+ }
25416
26190
  logger15.success(`\u2713 Written to ${ENV_SETUP_TMP_FILE} (permissions: owner-only)`);
25417
26191
  }
26192
+ function cleanupEnvSetupFileInternal() {
26193
+ try {
26194
+ const filePath = join(process.cwd(), ENV_SETUP_TMP_FILE);
26195
+ if (existsSync(filePath)) {
26196
+ unlinkSync(filePath);
26197
+ }
26198
+ } catch {
26199
+ }
26200
+ }
25418
26201
 
25419
26202
  // src/commands/env/commands/setup/helpers.ts
25420
26203
  init_esm_shims();
@@ -25566,7 +26349,7 @@ async function promptForPassword(prompt) {
25566
26349
  rl.close();
25567
26350
  return answer.trim();
25568
26351
  }
25569
- return new Promise((resolve11) => {
26352
+ return new Promise((resolve12) => {
25570
26353
  process.stdout.write(`${prompt}: `);
25571
26354
  let password = "";
25572
26355
  process.stdin.setRawMode(true);
@@ -25584,7 +26367,7 @@ async function promptForPassword(prompt) {
25584
26367
  if (result.action === "enter") {
25585
26368
  cleanup();
25586
26369
  process.stdout.write("\n");
25587
- resolve11(password);
26370
+ resolve12(password);
25588
26371
  return;
25589
26372
  }
25590
26373
  if (result.action === "exit") {
@@ -25956,7 +26739,7 @@ async function setVercelEnvVar(linkedDir, key, value, overwrite, logger15) {
25956
26739
  const environments = ["development", "preview", "production"];
25957
26740
  for (const env2 of environments) {
25958
26741
  const addArgs = ["env", "add", key, env2, "--cwd", linkedDir];
25959
- await new Promise((resolve11, reject) => {
26742
+ await new Promise((resolve12, reject) => {
25960
26743
  const proc = spawn("vercel", addArgs, {
25961
26744
  stdio: ["pipe", "pipe", "pipe"]
25962
26745
  });
@@ -25968,7 +26751,7 @@ async function setVercelEnvVar(linkedDir, key, value, overwrite, logger15) {
25968
26751
  proc.stdin?.end();
25969
26752
  proc.on("close", (code) => {
25970
26753
  if (code === 0) {
25971
- resolve11();
26754
+ resolve12();
25972
26755
  } else {
25973
26756
  reject(
25974
26757
  new Error(`vercel env add (${env2}) failed with exit code ${code}: ${stderr.trim()}`)
@@ -27408,7 +28191,7 @@ async function createHotfixActor(targetDir = process.cwd(), options = {}) {
27408
28191
  const actor = createActor(hotfixMachine, {
27409
28192
  input: { targetDir }
27410
28193
  });
27411
- return new Promise((resolve11, reject) => {
28194
+ return new Promise((resolve12, reject) => {
27412
28195
  const timeoutId = setTimeout(() => {
27413
28196
  actor.stop();
27414
28197
  reject(new Error("Hotfix machine initialization timed out"));
@@ -27417,7 +28200,7 @@ async function createHotfixActor(targetDir = process.cwd(), options = {}) {
27417
28200
  const state2 = snapshot2.value;
27418
28201
  if (state2 !== "loading") {
27419
28202
  clearTimeout(timeoutId);
27420
- resolve11(actor);
28203
+ resolve12(actor);
27421
28204
  }
27422
28205
  });
27423
28206
  actor.start();
@@ -27426,7 +28209,7 @@ async function createHotfixActor(targetDir = process.cwd(), options = {}) {
27426
28209
  async function sendAndWait(actor, event, targetStates, options = {}) {
27427
28210
  const { timeout = 6e4, transitionStates = [] } = options;
27428
28211
  actor.send(event);
27429
- return new Promise((resolve11, reject) => {
28212
+ return new Promise((resolve12, reject) => {
27430
28213
  const timeoutId = setTimeout(() => {
27431
28214
  reject(new Error(`State transition timed out after sending ${event.type}`));
27432
28215
  }, timeout);
@@ -27436,7 +28219,7 @@ async function sendAndWait(actor, event, targetStates, options = {}) {
27436
28219
  if (targetStates.includes(state2)) {
27437
28220
  clearTimeout(timeoutId);
27438
28221
  subscription.unsubscribe();
27439
- resolve11(snapshot2);
28222
+ resolve12(snapshot2);
27440
28223
  return;
27441
28224
  }
27442
28225
  if (error && !transitionStates.includes(state2) && !targetStates.includes(state2)) {
@@ -27448,7 +28231,7 @@ async function sendAndWait(actor, event, targetStates, options = {}) {
27448
28231
  if (snapshot2.status === "done") {
27449
28232
  clearTimeout(timeoutId);
27450
28233
  subscription.unsubscribe();
27451
- resolve11(snapshot2);
28234
+ resolve12(snapshot2);
27452
28235
  }
27453
28236
  });
27454
28237
  });
@@ -28199,7 +28982,7 @@ init_esm_shims();
28199
28982
 
28200
28983
  // src/utils/help-system.ts
28201
28984
  init_esm_shims();
28202
- var logger7 = createCLILogger("help");
28985
+ var logger8 = createCLILogger("help");
28203
28986
  var HELP_DATABASE = [
28204
28987
  // Init command errors
28205
28988
  {
@@ -28409,15 +29192,15 @@ Try:
28409
29192
  return genericHelp[command];
28410
29193
  }
28411
29194
  function displayHelp(help) {
28412
- logger7.info(help.helpText);
29195
+ logger8.info(help.helpText);
28413
29196
  if (help.relatedCommands && help.relatedCommands.length > 0) {
28414
- logger7.info("\n\u{1F517} Related commands:");
29197
+ logger8.info("\n\u{1F517} Related commands:");
28415
29198
  for (const cmd of help.relatedCommands) {
28416
- logger7.info(` ${cmd}`);
29199
+ logger8.info(` ${cmd}`);
28417
29200
  }
28418
29201
  }
28419
29202
  if (help.learnMoreUrl) {
28420
- logger7.info(`
29203
+ logger8.info(`
28421
29204
  \u{1F4D6} Learn more: ${help.learnMoreUrl}`);
28422
29205
  }
28423
29206
  }
@@ -28432,9 +29215,50 @@ var TEMPLATES_PACKAGE_NAME = "@r06-dev/runa-templates";
28432
29215
  var GITHUB_PACKAGES_REGISTRY = "https://npm.pkg.github.com";
28433
29216
 
28434
29217
  // src/utils/template-fetcher.ts
29218
+ var SAFE_VERSION_PATTERN = /^[a-zA-Z0-9._-]+$/;
29219
+ function validateVersion(version) {
29220
+ if (!version || version.trim() === "") {
29221
+ throw new CLIError("Invalid version: version cannot be empty.", "INVALID_VERSION", [
29222
+ "Provide a valid semantic version (e.g., 0.1.0)"
29223
+ ]);
29224
+ }
29225
+ if (version.includes("..") || version.includes("/") || version.includes("\\")) {
29226
+ throw new CLIError(
29227
+ "Invalid version: path traversal sequences detected.",
29228
+ "INVALID_VERSION_PATH_TRAVERSAL",
29229
+ [
29230
+ 'Version cannot contain "..", "/", or "\\"',
29231
+ "Provide a valid semantic version (e.g., 0.1.0)"
29232
+ ]
29233
+ );
29234
+ }
29235
+ if (!SAFE_VERSION_PATTERN.test(version)) {
29236
+ throw new CLIError(
29237
+ "Invalid version: contains prohibited characters.",
29238
+ "INVALID_VERSION_CHARS",
29239
+ [
29240
+ 'Version can only contain: a-z, A-Z, 0-9, ".", "-", "_"',
29241
+ "Provide a valid semantic version (e.g., 0.1.0)"
29242
+ ]
29243
+ );
29244
+ }
29245
+ }
29246
+ function getCacheBase() {
29247
+ return path10__default.join(os.homedir(), ".cache", "runa", "templates");
29248
+ }
28435
29249
  function getCacheDir(version) {
28436
- const cacheBase = path10__default.join(os.homedir(), ".cache", "runa", "templates");
28437
- return path10__default.join(cacheBase, version);
29250
+ validateVersion(version);
29251
+ const cacheBase = getCacheBase();
29252
+ const cacheDir = path10__default.join(cacheBase, version);
29253
+ const resolvedCacheDir = path10__default.resolve(cacheDir);
29254
+ if (!isPathContained(cacheBase, resolvedCacheDir)) {
29255
+ throw new CLIError(
29256
+ "Security: cache directory would escape allowed location.",
29257
+ "CACHE_PATH_ESCAPE",
29258
+ ["This is likely a bug. Please report this issue."]
29259
+ );
29260
+ }
29261
+ return resolvedCacheDir;
28438
29262
  }
28439
29263
  function isCached(version) {
28440
29264
  const cacheDir = getCacheDir(version);
@@ -28608,24 +29432,56 @@ async function fetchTemplates(options = {}) {
28608
29432
  cleanupAuthToken();
28609
29433
  }
28610
29434
  }
29435
+ var MAX_WORKSPACE_TRAVERSAL_DEPTH = 10;
29436
+ function isLegitimateWorkspaceRoot(workspaceRoot) {
29437
+ const pnpmWorkspaceFile = path10__default.join(workspaceRoot, "pnpm-workspace.yaml");
29438
+ if (!fs6__default.existsSync(pnpmWorkspaceFile)) {
29439
+ return false;
29440
+ }
29441
+ const rootPackageFile = path10__default.join(workspaceRoot, "package.json");
29442
+ if (!fs6__default.existsSync(rootPackageFile)) {
29443
+ return false;
29444
+ }
29445
+ try {
29446
+ const rootPkg = JSON.parse(fs6__default.readFileSync(rootPackageFile, "utf-8"));
29447
+ const hasWorkspaces = Array.isArray(rootPkg.workspaces) && rootPkg.workspaces.length > 0;
29448
+ const hasExpectedName = rootPkg.name === "runa" || rootPkg.name === "@r06-dev/runa";
29449
+ return hasWorkspaces || hasExpectedName;
29450
+ } catch {
29451
+ return false;
29452
+ }
29453
+ }
28611
29454
  function resolveWorkspaceTemplates() {
28612
- let current = process.cwd();
29455
+ let current = path10__default.resolve(process.cwd());
28613
29456
  const root = path10__default.parse(current).root;
28614
- while (current !== root) {
29457
+ let depth = 0;
29458
+ while (current !== root && depth < MAX_WORKSPACE_TRAVERSAL_DEPTH) {
29459
+ if (!isLegitimateWorkspaceRoot(current)) {
29460
+ current = path10__default.dirname(current);
29461
+ depth++;
29462
+ continue;
29463
+ }
28615
29464
  const packagesTemplates = path10__default.join(current, "packages", "runa-templates", "templates");
28616
- if (fs6__default.existsSync(packagesTemplates)) {
29465
+ const normalizedTemplatesPath = path10__default.resolve(packagesTemplates);
29466
+ if (!normalizedTemplatesPath.startsWith(current + path10__default.sep)) {
29467
+ current = path10__default.dirname(current);
29468
+ depth++;
29469
+ continue;
29470
+ }
29471
+ if (fs6__default.existsSync(normalizedTemplatesPath)) {
28617
29472
  const markerFile = path10__default.join(current, "packages", "runa-templates", "package.json");
28618
29473
  if (fs6__default.existsSync(markerFile)) {
28619
29474
  try {
28620
29475
  const pkg = JSON.parse(fs6__default.readFileSync(markerFile, "utf-8"));
28621
29476
  if (pkg.name === "@r06-dev/runa-templates") {
28622
- return packagesTemplates;
29477
+ return normalizedTemplatesPath;
28623
29478
  }
28624
29479
  } catch {
28625
29480
  }
28626
29481
  }
28627
29482
  }
28628
29483
  current = path10__default.dirname(current);
29484
+ depth++;
28629
29485
  }
28630
29486
  return void 0;
28631
29487
  }
@@ -29352,7 +30208,7 @@ function buildDefinitionMap(definitions) {
29352
30208
  function buildEnhancedMachines(e2eManifest, definitionMap, machineDefinitions) {
29353
30209
  const enhanced = {};
29354
30210
  const normalizedIdToCanonical = /* @__PURE__ */ new Map();
29355
- const isMachineFile = (path63) => path63.includes("machine.ts") || path63.includes("machines/") || path63.includes(".machine.ts") || path63.includes("/machine/");
30211
+ const isMachineFile = (path64) => path64.includes("machine.ts") || path64.includes("machines/") || path64.includes(".machine.ts") || path64.includes("/machine/");
29356
30212
  const addMachine = (id, sourceFile, entryBuilder) => {
29357
30213
  const normalizedId = normalizeToCanonicalId(id).toLowerCase();
29358
30214
  const existingCanonical = normalizedIdToCanonical.get(normalizedId);
@@ -30376,7 +31232,7 @@ async function startService(service, options) {
30376
31232
  return true;
30377
31233
  }
30378
31234
  }
30379
- await new Promise((resolve11) => setTimeout(resolve11, 1e3));
31235
+ await new Promise((resolve12) => setTimeout(resolve12, 1e3));
30380
31236
  }
30381
31237
  console.warn(`\u26A0\uFE0F ${service} started but health check timed out`);
30382
31238
  return true;
@@ -30440,15 +31296,15 @@ var servicesCommand = new Command("services").description("Manage Supabase servi
30440
31296
 
30441
31297
  // src/commands/session/index.ts
30442
31298
  init_esm_shims();
30443
- var logger8 = createCLILogger("session");
31299
+ var logger9 = createCLILogger("session");
30444
31300
  async function listSessionsAction() {
30445
31301
  const sessions = await listSessions();
30446
31302
  if (sessions.length === 0) {
30447
- logger8.info("No active sessions");
31303
+ logger9.info("No active sessions");
30448
31304
  return;
30449
31305
  }
30450
31306
  const now = Date.now();
30451
- logger8.info(`Active sessions: ${sessions.length}`);
31307
+ logger9.info(`Active sessions: ${sessions.length}`);
30452
31308
  console.log("");
30453
31309
  for (const session of sessions) {
30454
31310
  const lastHeartbeat = new Date(session.lastHeartbeat).getTime();
@@ -30471,9 +31327,9 @@ async function listSessionsAction() {
30471
31327
  async function cleanupAction() {
30472
31328
  const cleaned = await cleanupStaleSessions();
30473
31329
  if (cleaned === 0) {
30474
- logger8.info("No stale sessions to clean up");
31330
+ logger9.info("No stale sessions to clean up");
30475
31331
  } else {
30476
- logger8.success(`Cleaned up ${cleaned} stale session(s)`);
31332
+ logger9.success(`Cleaned up ${cleaned} stale session(s)`);
30477
31333
  }
30478
31334
  }
30479
31335
  async function clearAction() {
@@ -30481,7 +31337,7 @@ async function clearAction() {
30481
31337
  for (const session of sessions) {
30482
31338
  await removeSession(session.sessionId);
30483
31339
  }
30484
- logger8.success(`Cleared ${sessions.length} session(s)`);
31340
+ logger9.success(`Cleared ${sessions.length} session(s)`);
30485
31341
  }
30486
31342
  async function checkAction() {
30487
31343
  if (isSessionCheckDisabled()) {
@@ -30539,7 +31395,7 @@ async function registerAction(activityType, options) {
30539
31395
  }
30540
31396
  async function startAction() {
30541
31397
  const session = await createSession();
30542
- logger8.success(`Session started: ${session.sessionId.slice(0, 8)}`);
31398
+ logger9.success(`Session started: ${session.sessionId.slice(0, 8)}`);
30543
31399
  console.log(` PID: ${session.pid}`);
30544
31400
  console.log(` Use 'runa session:stop' to end session`);
30545
31401
  }
@@ -30548,9 +31404,9 @@ async function stopAction() {
30548
31404
  const current = sessions.find((s) => s.pid === process.pid);
30549
31405
  if (current) {
30550
31406
  await removeSession(current.sessionId);
30551
- logger8.success("Session stopped");
31407
+ logger9.success("Session stopped");
30552
31408
  } else {
30553
- logger8.info("No active session for this process");
31409
+ logger9.info("No active session for this process");
30554
31410
  }
30555
31411
  }
30556
31412
  var sessionListCommand = new Command("session:list").description("List active sessions").action(listSessionsAction);
@@ -31907,9 +32763,9 @@ var PATH_MAPPING_RULES = [
31907
32763
  category: "prompts"
31908
32764
  }
31909
32765
  ];
31910
- function getCategoryForPath(path63) {
32766
+ function getCategoryForPath(path64) {
31911
32767
  for (const rule of PATH_MAPPING_RULES) {
31912
- if (matchGlobPattern(path63, rule.runa) || matchGlobPattern(path63, rule.template)) {
32768
+ if (matchGlobPattern(path64, rule.runa) || matchGlobPattern(path64, rule.template)) {
31913
32769
  return rule.category;
31914
32770
  }
31915
32771
  }
@@ -31929,17 +32785,17 @@ function applyReverseRename(normalized, rule, isTemplate) {
31929
32785
  }
31930
32786
  return normalized;
31931
32787
  }
31932
- function generateComparisonKey(path63, isTemplate) {
31933
- let normalized = isTemplate ? normalizeTemplatePath(path63) : path63;
32788
+ function generateComparisonKey(path64, isTemplate) {
32789
+ let normalized = isTemplate ? normalizeTemplatePath(path64) : path64;
31934
32790
  for (const rule of PATH_MAPPING_RULES) {
31935
32791
  normalized = applyReverseRename(normalized, rule, isTemplate);
31936
32792
  }
31937
32793
  return normalized;
31938
32794
  }
31939
- function matchGlobPattern(path63, pattern) {
32795
+ function matchGlobPattern(path64, pattern) {
31940
32796
  const regexPattern = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*\*/g, "<<DOUBLE_STAR>>").replace(/\*/g, "([^/]*)").replace(/<<DOUBLE_STAR>>/g, "(.*)");
31941
32797
  const regex = new RegExp(`^${regexPattern}$`);
31942
- const match = path63.match(regex);
32798
+ const match = path64.match(regex);
31943
32799
  if (match) {
31944
32800
  const subPath = match[1] ?? "";
31945
32801
  return { matched: true, subPath };
@@ -32574,15 +33430,15 @@ function printActionsNeeded(logger15, actions) {
32574
33430
  );
32575
33431
  }
32576
33432
  function findRepoRoot3(startDir) {
32577
- const { existsSync: existsSync48, readFileSync: readFileSync27 } = __require("fs");
33433
+ const { existsSync: existsSync49, readFileSync: readFileSync27 } = __require("fs");
32578
33434
  const { join: join22, dirname: dirname4 } = __require("path");
32579
33435
  let current = startDir;
32580
33436
  while (current !== dirname4(current)) {
32581
- if (existsSync48(join22(current, "turbo.json"))) {
33437
+ if (existsSync49(join22(current, "turbo.json"))) {
32582
33438
  return current;
32583
33439
  }
32584
33440
  const pkgPath = join22(current, "package.json");
32585
- if (existsSync48(pkgPath)) {
33441
+ if (existsSync49(pkgPath)) {
32586
33442
  try {
32587
33443
  const pkg = JSON.parse(readFileSync27(pkgPath, "utf-8"));
32588
33444
  if (pkg.workspaces) {
@@ -32650,10 +33506,10 @@ function generateReportOutput(output3, isJsonMode) {
32650
33506
  };
32651
33507
  }
32652
33508
  function validateRunaRepo(repoRoot) {
32653
- const { existsSync: existsSync48 } = __require("fs");
33509
+ const { existsSync: existsSync49 } = __require("fs");
32654
33510
  const { join: join22 } = __require("path");
32655
33511
  const templateDir = join22(repoRoot, "packages/runa-templates/templates");
32656
- if (!existsSync48(templateDir)) {
33512
+ if (!existsSync49(templateDir)) {
32657
33513
  throw new CLIError("template-check is a runa-repo only command", "NOT_RUNA_REPO", [
32658
33514
  "This command compares runa-repo with pj-repo templates",
32659
33515
  "It should only be run in the runa repository",
@@ -32664,7 +33520,7 @@ function validateRunaRepo(repoRoot) {
32664
33520
  async function runTemplateCheckMachine(input3, logger15) {
32665
33521
  const repoRoot = input3.targetDir ?? findRepoRoot3(process.cwd());
32666
33522
  validateRunaRepo(repoRoot);
32667
- return new Promise((resolve11, reject) => {
33523
+ return new Promise((resolve12, reject) => {
32668
33524
  const actor = createActor(syncCheckMachine, {
32669
33525
  input: { input: input3, repoRoot }
32670
33526
  });
@@ -32681,7 +33537,7 @@ async function runTemplateCheckMachine(input3, logger15) {
32681
33537
  const output3 = snapshot2.output;
32682
33538
  if (output3) {
32683
33539
  const reportOutput = generateReportOutput(output3, isJsonMode);
32684
- resolve11({ output: output3, reportOutput });
33540
+ resolve12({ output: output3, reportOutput });
32685
33541
  }
32686
33542
  }
32687
33543
  });
@@ -33602,7 +34458,7 @@ function renderDashboard(props = {}) {
33602
34458
  }
33603
34459
 
33604
34460
  // src/commands/ui.ts
33605
- var logger9 = createCLILogger("ui");
34461
+ var logger10 = createCLILogger("ui");
33606
34462
  var uiCommand = new Command("ui").description("Launch interactive TUI dashboard").option("--mode <mode>", "Dashboard mode: test, db, or all", "all").option("--no-refresh", "Disable auto-refresh").option("--interval <ms>", "Refresh interval in milliseconds", "1000").action(async (options) => {
33607
34463
  try {
33608
34464
  if (isNonInteractiveEnabled()) {
@@ -33613,7 +34469,7 @@ var uiCommand = new Command("ui").description("Launch interactive TUI dashboard"
33613
34469
  );
33614
34470
  }
33615
34471
  assertTtyRequired("TUI dashboard");
33616
- logger9.info("Launching TUI dashboard...\n");
34472
+ logger10.info("Launching TUI dashboard...\n");
33617
34473
  const validModes = ["test", "db", "all"];
33618
34474
  if (!validModes.includes(options.mode)) {
33619
34475
  throw new CLIError(`Invalid mode: ${options.mode}`, "INVALID_MODE", [
@@ -33883,7 +34739,7 @@ async function runVerification(logger15) {
33883
34739
  }
33884
34740
  async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
33885
34741
  const fs15 = await import('fs/promises');
33886
- const path63 = await import('path');
34742
+ const path64 = await import('path');
33887
34743
  const { glob: glob8 } = await import('glob');
33888
34744
  const result = [];
33889
34745
  const packageJsonPaths = await glob8("**/package.json", {
@@ -33892,7 +34748,7 @@ async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
33892
34748
  // Exclude root
33893
34749
  });
33894
34750
  for (const relPath of packageJsonPaths) {
33895
- const fullPath = path63.join(rootDir, relPath);
34751
+ const fullPath = path64.join(rootDir, relPath);
33896
34752
  try {
33897
34753
  const content = await fs15.readFile(fullPath, "utf-8");
33898
34754
  const pkg = JSON.parse(content);
@@ -33905,7 +34761,7 @@ async function findWorkspacesWithSdkPackages(rootDir, packageNames) {
33905
34761
  }
33906
34762
  if (hasAnySdkPackage) {
33907
34763
  result.push({
33908
- dir: path63.dirname(fullPath),
34764
+ dir: path64.dirname(fullPath),
33909
34765
  packages: packageNames
33910
34766
  // All packages, not just found ones
33911
34767
  });
@@ -34396,53 +35252,6 @@ var vulnCheckCommand = new Command("vuln-check").description("Run comprehensive
34396
35252
 
34397
35253
  // src/commands/watch.ts
34398
35254
  init_esm_shims();
34399
-
34400
- // src/cli/signal-handler.ts
34401
- init_esm_shims();
34402
- var logger10 = createCLILogger("signal");
34403
- var state = {
34404
- cleanupFns: [],
34405
- sigintCount: 0,
34406
- isShuttingDown: false
34407
- };
34408
- function registerCleanup(fn) {
34409
- state.cleanupFns.push(fn);
34410
- }
34411
- async function runCleanup() {
34412
- for (const fn of state.cleanupFns) {
34413
- try {
34414
- await fn();
34415
- } catch (error) {
34416
- logger10.debug(`Cleanup error: ${error instanceof Error ? error.message : String(error)}`);
34417
- }
34418
- }
34419
- state.cleanupFns = [];
34420
- }
34421
- async function handleShutdown(signal, exitCode) {
34422
- if (state.isShuttingDown) {
34423
- logger10.warn("Force exit");
34424
- process.exit(exitCode);
34425
- }
34426
- state.isShuttingDown = true;
34427
- logger10.info(`Received ${signal}, shutting down...`);
34428
- await runCleanup();
34429
- process.exit(exitCode);
34430
- }
34431
- function setupSignalHandlers() {
34432
- process.on("SIGINT", async () => {
34433
- state.sigintCount++;
34434
- if (state.sigintCount >= 2) {
34435
- logger10.warn("Force exit (double Ctrl+C)");
34436
- process.exit(130);
34437
- }
34438
- await handleShutdown("SIGINT", 130);
34439
- });
34440
- process.on("SIGTERM", async () => {
34441
- await handleShutdown("SIGTERM", 143);
34442
- });
34443
- }
34444
-
34445
- // src/commands/watch.ts
34446
35255
  init_config_loader();
34447
35256
 
34448
35257
  // src/watchers/schema-watcher.ts
@@ -34595,7 +35404,7 @@ var SchemaWatcher = class {
34595
35404
  persistent: true,
34596
35405
  ignoreInitial: true
34597
35406
  });
34598
- this.watcher.on("add", (path63) => this.handleFileEvent("add", path63)).on("change", (path63) => this.handleFileEvent("change", path63)).on("unlink", (path63) => this.handleFileEvent("unlink", path63)).on("error", (error) => this.handleError(error));
35407
+ this.watcher.on("add", (path64) => this.handleFileEvent("add", path64)).on("change", (path64) => this.handleFileEvent("change", path64)).on("unlink", (path64) => this.handleFileEvent("unlink", path64)).on("error", (error) => this.handleError(error));
34599
35408
  this.logger.success("\u2705 Schema watcher started");
34600
35409
  this.logger.info(chalk.dim(" Press Ctrl+C to stop\n"));
34601
35410
  }
@@ -34616,23 +35425,23 @@ var SchemaWatcher = class {
34616
35425
  /**
34617
35426
  * Handle file system events with debouncing
34618
35427
  */
34619
- handleFileEvent(type, path63) {
34620
- const existingTimer = this.debounceTimers.get(path63);
35428
+ handleFileEvent(type, path64) {
35429
+ const existingTimer = this.debounceTimers.get(path64);
34621
35430
  if (existingTimer) {
34622
35431
  clearTimeout(existingTimer);
34623
35432
  }
34624
35433
  const timer = setTimeout(() => {
34625
- this.processFileEvent({ type, path: path63, timestamp: /* @__PURE__ */ new Date() });
34626
- this.debounceTimers.delete(path63);
35434
+ this.processFileEvent({ type, path: path64, timestamp: /* @__PURE__ */ new Date() });
35435
+ this.debounceTimers.delete(path64);
34627
35436
  }, this.options.debounceMs);
34628
- this.debounceTimers.set(path63, timer);
35437
+ this.debounceTimers.set(path64, timer);
34629
35438
  }
34630
35439
  /**
34631
35440
  * Process file system event
34632
35441
  */
34633
35442
  async processFileEvent(event) {
34634
- const { type, path: path63 } = event;
34635
- const fileName = path63.split("/").pop() || path63;
35443
+ const { type, path: path64 } = event;
35444
+ const fileName = path64.split("/").pop() || path64;
34636
35445
  switch (type) {
34637
35446
  case "add":
34638
35447
  this.logger.info(chalk.green(`\u2795 Added: ${fileName}`));
@@ -34645,19 +35454,19 @@ var SchemaWatcher = class {
34645
35454
  return;
34646
35455
  }
34647
35456
  if (this.options.autoValidate) {
34648
- await this.validateFile(path63);
35457
+ await this.validateFile(path64);
34649
35458
  }
34650
35459
  }
34651
35460
  /**
34652
35461
  * Validate schema file
34653
35462
  */
34654
- async validateFile(path63) {
35463
+ async validateFile(path64) {
34655
35464
  try {
34656
35465
  this.logger.info(chalk.dim(" Validating..."));
34657
- const validationResult = await validateSchemaFile(path63);
35466
+ const validationResult = await validateSchemaFile(path64);
34658
35467
  if (validationResult.isValid) {
34659
35468
  this.logger.success(chalk.green(" \u2713 Validation passed"));
34660
- const risks = await detectSchemaRisks(path63);
35469
+ const risks = await detectSchemaRisks(path64);
34661
35470
  if (risks.length > 0) {
34662
35471
  this.logger.warn(` \u26A0\uFE0F ${risks.length} risk(s) detected`);
34663
35472
  for (const risk of risks) {
@@ -34672,7 +35481,7 @@ var SchemaWatcher = class {
34672
35481
  if (this.options.notifyOnError) {
34673
35482
  await notifyDesktop({
34674
35483
  title: "Schema Validation Failed",
34675
- message: `${validationResult.errors.length} error(s) in ${path63.split("/").pop()}`,
35484
+ message: `${validationResult.errors.length} error(s) in ${path64.split("/").pop()}`,
34676
35485
  type: "error"
34677
35486
  });
34678
35487
  }
@@ -34891,7 +35700,7 @@ var TestWatcher = class {
34891
35700
  persistent: true,
34892
35701
  ignoreInitial: true
34893
35702
  });
34894
- this.watcher.on("add", (path63) => this.handleFileEvent("add", path63)).on("change", (path63) => this.handleFileEvent("change", path63)).on("unlink", (path63) => this.handleFileEvent("unlink", path63)).on("error", (error) => this.handleError(error));
35703
+ this.watcher.on("add", (path64) => this.handleFileEvent("add", path64)).on("change", (path64) => this.handleFileEvent("change", path64)).on("unlink", (path64) => this.handleFileEvent("unlink", path64)).on("error", (error) => this.handleError(error));
34895
35704
  this.logger.success("\u2705 Test watcher started");
34896
35705
  this.logger.info(chalk.dim(" Press Ctrl+C to stop\n"));
34897
35706
  }
@@ -34912,26 +35721,26 @@ var TestWatcher = class {
34912
35721
  /**
34913
35722
  * Handle file system events with debouncing
34914
35723
  */
34915
- handleFileEvent(type, path63) {
34916
- const existingTimer = this.debounceTimers.get(path63);
35724
+ handleFileEvent(type, path64) {
35725
+ const existingTimer = this.debounceTimers.get(path64);
34917
35726
  if (existingTimer) {
34918
35727
  clearTimeout(existingTimer);
34919
35728
  }
34920
35729
  const timer = setTimeout(() => {
34921
- this.processFileEvent(type, path63);
34922
- this.debounceTimers.delete(path63);
35730
+ this.processFileEvent(type, path64);
35731
+ this.debounceTimers.delete(path64);
34923
35732
  }, this.options.debounceMs);
34924
- this.debounceTimers.set(path63, timer);
35733
+ this.debounceTimers.set(path64, timer);
34925
35734
  }
34926
35735
  /**
34927
35736
  * Process file system event
34928
35737
  */
34929
- async processFileEvent(type, path63) {
35738
+ async processFileEvent(type, path64) {
34930
35739
  if (this.isRunning) {
34931
35740
  this.logger.warn(chalk.yellow("\u23F3 Tests already running, skipping..."));
34932
35741
  return;
34933
35742
  }
34934
- const fileName = path63.split("/").pop() || path63;
35743
+ const fileName = path64.split("/").pop() || path64;
34935
35744
  switch (type) {
34936
35745
  case "add":
34937
35746
  this.logger.info(chalk.green(`\u2795 Test added: ${fileName}`));
@@ -34944,7 +35753,7 @@ var TestWatcher = class {
34944
35753
  return;
34945
35754
  }
34946
35755
  if (this.options.autoRun) {
34947
- await this.runTests(path63);
35756
+ await this.runTests(path64);
34948
35757
  }
34949
35758
  }
34950
35759
  /**
@@ -35233,7 +36042,7 @@ var WorkflowFinalStatusOutputSchema = z.object({
35233
36042
  errorMessage: z.string(),
35234
36043
  cleanupResult: JobResultSchema.optional()
35235
36044
  }).strict();
35236
- async function writeGitHubOutput4(values) {
36045
+ async function writeGitHubOutput2(values) {
35237
36046
  const outFile = process.env.GITHUB_OUTPUT;
35238
36047
  if (!outFile) {
35239
36048
  throw new CLIError(
@@ -35313,7 +36122,7 @@ var workflowFinalStatusCommand = new Command("final-status").description("Aggreg
35313
36122
  serverUrl
35314
36123
  });
35315
36124
  if (options.githubOutput === true) {
35316
- await writeGitHubOutput4({ status: out.status, error_message: out.errorMessage });
36125
+ await writeGitHubOutput2({ status: out.status, error_message: out.errorMessage });
35317
36126
  }
35318
36127
  if (getOutputFormatFromEnv() === "json") {
35319
36128
  emitJsonSuccess(workflowFinalStatusCommand, WorkflowFinalStatusOutputSchema, out);
@@ -35659,7 +36468,7 @@ var WorkflowPathsOutputSchema = z.object({
35659
36468
  dbStack: z.enum(["drizzle", "supabase"]),
35660
36469
  dbRoot: z.string().min(1)
35661
36470
  }).strict();
35662
- async function writeGitHubOutput5(values) {
36471
+ async function writeGitHubOutput3(values) {
35663
36472
  const outFile = process.env.GITHUB_OUTPUT;
35664
36473
  if (!outFile) {
35665
36474
  throw new CLIError(
@@ -35686,7 +36495,7 @@ var workflowPathsCommand = new Command("paths").description("Detect project path
35686
36495
  dbRoot: paths.rootDir
35687
36496
  };
35688
36497
  if (options.githubOutput === true) {
35689
- await writeGitHubOutput5({
36498
+ await writeGitHubOutput3({
35690
36499
  app_dir: out.appDir,
35691
36500
  db_stack: out.dbStack,
35692
36501
  db_root: out.dbRoot