@probelabs/visor 0.1.152 → 0.1.153-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/dist/config.d.ts.map +1 -1
  2. package/dist/docs/ai-configuration.md +4 -4
  3. package/dist/docs/sandbox-engines.md +53 -5
  4. package/dist/examples/ai-with-bash.yaml +2 -2
  5. package/dist/examples/sandbox-bind-paths.yaml +31 -0
  6. package/dist/examples/sandbox-host-workdir.yaml +27 -0
  7. package/dist/examples/workflow-sandbox.yaml +43 -0
  8. package/dist/generated/config-schema.d.ts +38 -8
  9. package/dist/generated/config-schema.d.ts.map +1 -1
  10. package/dist/generated/config-schema.json +40 -8
  11. package/dist/index.js +1987 -58
  12. package/dist/sandbox/bubblewrap-sandbox.d.ts.map +1 -1
  13. package/dist/sandbox/check-runner.d.ts.map +1 -1
  14. package/dist/sandbox/docker-image-sandbox.d.ts.map +1 -1
  15. package/dist/sandbox/seatbelt-sandbox.d.ts.map +1 -1
  16. package/dist/sandbox/types.d.ts +13 -0
  17. package/dist/sandbox/types.d.ts.map +1 -1
  18. package/dist/scheduler/schedule-tool.d.ts +2 -0
  19. package/dist/scheduler/schedule-tool.d.ts.map +1 -1
  20. package/dist/sdk/{check-provider-registry-QBURXJ6B.mjs → check-provider-registry-XZE5PRSE.mjs} +3 -3
  21. package/dist/sdk/{check-provider-registry-DCZR46KQ.mjs → check-provider-registry-ZOGNKTC3.mjs} +3 -3
  22. package/dist/sdk/{chunk-FYK2DJK6.mjs → chunk-CPYQDJ27.mjs} +86 -26
  23. package/dist/sdk/chunk-CPYQDJ27.mjs.map +1 -0
  24. package/dist/sdk/{chunk-XJZKNTKZ.mjs → chunk-DXNWUIAN.mjs} +132 -93
  25. package/dist/sdk/chunk-DXNWUIAN.mjs.map +1 -0
  26. package/dist/sdk/{chunk-QRR6OJQN.mjs → chunk-EYQWEVZF.mjs} +130 -91
  27. package/dist/sdk/chunk-EYQWEVZF.mjs.map +1 -0
  28. package/dist/sdk/{config-MTEIGCOQ.mjs → config-SW3VO4DQ.mjs} +2 -2
  29. package/dist/sdk/{host-WNCX3MPT.mjs → host-X7ID5EW7.mjs} +2 -2
  30. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  31. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  32. package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
  33. package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
  34. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  35. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  36. package/dist/sdk/{schedule-tool-IJC2TSKU.mjs → schedule-tool-74VMD77T.mjs} +3 -3
  37. package/dist/sdk/{schedule-tool-62XTFB6K.mjs → schedule-tool-WSZKJUNQ.mjs} +3 -3
  38. package/dist/sdk/{schedule-tool-handler-5K275UT6.mjs → schedule-tool-handler-EOQBRZSD.mjs} +3 -3
  39. package/dist/sdk/{schedule-tool-handler-T4L2ECBA.mjs → schedule-tool-handler-Q7E5NBHS.mjs} +3 -3
  40. package/dist/sdk/sdk.d.mts +15 -2
  41. package/dist/sdk/sdk.d.ts +15 -2
  42. package/dist/sdk/sdk.js +1765 -292
  43. package/dist/sdk/sdk.js.map +1 -1
  44. package/dist/sdk/sdk.mjs +5 -5
  45. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  46. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  47. package/dist/sdk/{workflow-check-provider-6TEZHBZJ.mjs → workflow-check-provider-GIW4WECT.mjs} +3 -3
  48. package/dist/sdk/{workflow-check-provider-FONJYRMR.mjs → workflow-check-provider-JHHACJHF.mjs} +3 -3
  49. package/dist/state-machine/workflow-projection.d.ts.map +1 -1
  50. package/dist/types/config.d.ts +2 -2
  51. package/dist/types/config.d.ts.map +1 -1
  52. package/dist/types/workflow.d.ts +8 -0
  53. package/dist/types/workflow.d.ts.map +1 -1
  54. package/package.json +1 -1
  55. package/dist/output/traces/run-2026-03-04T06-46-24-105Z.ndjson +0 -138
  56. package/dist/output/traces/run-2026-03-04T06-47-08-884Z.ndjson +0 -2197
  57. package/dist/sdk/check-provider-registry-VY5ZZAEU.mjs +0 -29
  58. package/dist/sdk/chunk-6VQ73GYD.mjs +0 -443
  59. package/dist/sdk/chunk-6VQ73GYD.mjs.map +0 -1
  60. package/dist/sdk/chunk-FP3RZSLW.mjs +0 -739
  61. package/dist/sdk/chunk-FP3RZSLW.mjs.map +0 -1
  62. package/dist/sdk/chunk-FYK2DJK6.mjs.map +0 -1
  63. package/dist/sdk/chunk-LLVVHYIP.mjs +0 -1502
  64. package/dist/sdk/chunk-LLVVHYIP.mjs.map +0 -1
  65. package/dist/sdk/chunk-PCI4FXAO.mjs +0 -43715
  66. package/dist/sdk/chunk-PCI4FXAO.mjs.map +0 -1
  67. package/dist/sdk/chunk-QRR6OJQN.mjs.map +0 -1
  68. package/dist/sdk/chunk-XJZKNTKZ.mjs.map +0 -1
  69. package/dist/sdk/failure-condition-evaluator-TV227HAG.mjs +0 -17
  70. package/dist/sdk/github-frontend-YLSS5NQ7.mjs +0 -1368
  71. package/dist/sdk/github-frontend-YLSS5NQ7.mjs.map +0 -1
  72. package/dist/sdk/routing-FKWK5BHS.mjs +0 -25
  73. package/dist/sdk/schedule-tool-VOZ536P4.mjs +0 -35
  74. package/dist/sdk/schedule-tool-handler-5K275UT6.mjs.map +0 -1
  75. package/dist/sdk/schedule-tool-handler-T4L2ECBA.mjs.map +0 -1
  76. package/dist/sdk/schedule-tool-handler-WBIZSBGJ.mjs +0 -39
  77. package/dist/sdk/schedule-tool-handler-WBIZSBGJ.mjs.map +0 -1
  78. package/dist/sdk/trace-helpers-W33WMBL7.mjs +0 -25
  79. package/dist/sdk/trace-helpers-W33WMBL7.mjs.map +0 -1
  80. package/dist/sdk/workflow-check-provider-6TEZHBZJ.mjs.map +0 -1
  81. package/dist/sdk/workflow-check-provider-FONJYRMR.mjs.map +0 -1
  82. package/dist/sdk/workflow-check-provider-LREOGGTH.mjs +0 -29
  83. package/dist/sdk/workflow-check-provider-LREOGGTH.mjs.map +0 -1
  84. package/dist/traces/run-2026-03-04T06-46-24-105Z.ndjson +0 -138
  85. package/dist/traces/run-2026-03-04T06-47-08-884Z.ndjson +0 -2197
  86. /package/dist/sdk/{check-provider-registry-DCZR46KQ.mjs.map → check-provider-registry-XZE5PRSE.mjs.map} +0 -0
  87. /package/dist/sdk/{check-provider-registry-QBURXJ6B.mjs.map → check-provider-registry-ZOGNKTC3.mjs.map} +0 -0
  88. /package/dist/sdk/{check-provider-registry-VY5ZZAEU.mjs.map → config-SW3VO4DQ.mjs.map} +0 -0
  89. /package/dist/sdk/{host-WNCX3MPT.mjs.map → host-X7ID5EW7.mjs.map} +0 -0
  90. /package/dist/sdk/{config-MTEIGCOQ.mjs.map → schedule-tool-74VMD77T.mjs.map} +0 -0
  91. /package/dist/sdk/{failure-condition-evaluator-TV227HAG.mjs.map → schedule-tool-WSZKJUNQ.mjs.map} +0 -0
  92. /package/dist/sdk/{routing-FKWK5BHS.mjs.map → schedule-tool-handler-EOQBRZSD.mjs.map} +0 -0
  93. /package/dist/sdk/{schedule-tool-62XTFB6K.mjs.map → schedule-tool-handler-Q7E5NBHS.mjs.map} +0 -0
  94. /package/dist/sdk/{schedule-tool-IJC2TSKU.mjs.map → workflow-check-provider-GIW4WECT.mjs.map} +0 -0
  95. /package/dist/sdk/{schedule-tool-VOZ536P4.mjs.map → workflow-check-provider-JHHACJHF.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -646,7 +646,7 @@ var require_package = __commonJS({
646
646
  "package.json"(exports2, module2) {
647
647
  module2.exports = {
648
648
  name: "@probelabs/visor",
649
- version: "0.1.152",
649
+ version: "0.1.42",
650
650
  main: "dist/index.js",
651
651
  bin: {
652
652
  visor: "./dist/index.js"
@@ -864,11 +864,11 @@ function getTracer() {
864
864
  }
865
865
  async function withActiveSpan(name, attrs, fn) {
866
866
  const tracer = getTracer();
867
- return await new Promise((resolve14, reject) => {
867
+ return await new Promise((resolve19, reject) => {
868
868
  const callback = async (span) => {
869
869
  try {
870
870
  const res = await fn(span);
871
- resolve14(res);
871
+ resolve19(res);
872
872
  } catch (err) {
873
873
  try {
874
874
  if (err instanceof Error) span.recordException(err);
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
945
945
  try {
946
946
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
947
947
  return null;
948
- const path27 = require("path");
949
- const fs23 = require("fs");
948
+ const path31 = require("path");
949
+ const fs27 = require("fs");
950
950
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
951
951
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
952
- const dir = path27.dirname(__ndjsonPath);
953
- if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
952
+ const dir = path31.dirname(__ndjsonPath);
953
+ if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
954
954
  return __ndjsonPath;
955
955
  }
956
- const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
957
- if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
956
+ const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
957
+ if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
958
958
  if (!__ndjsonPath) {
959
959
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
960
- __ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
960
+ __ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
961
961
  }
962
962
  return __ndjsonPath;
963
963
  } catch {
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
966
966
  }
967
967
  function _appendRunMarker() {
968
968
  try {
969
- const fs23 = require("fs");
969
+ const fs27 = require("fs");
970
970
  const p = __getOrCreateNdjsonPath();
971
971
  if (!p) return;
972
972
  const line = { name: "visor.run", attributes: { started: true } };
973
- fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
973
+ fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
974
974
  } catch {
975
975
  }
976
976
  }
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
3193
3193
  */
3194
3194
  evaluateExpression(condition, context2) {
3195
3195
  try {
3196
- const normalize4 = (expr) => {
3196
+ const normalize8 = (expr) => {
3197
3197
  const trimmed = expr.trim();
3198
3198
  if (!/[\n;]/.test(trimmed)) return trimmed;
3199
3199
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
3351
3351
  try {
3352
3352
  exec2 = this.sandbox.compile(`return (${raw});`);
3353
3353
  } catch {
3354
- const normalizedExpr = normalize4(condition);
3354
+ const normalizedExpr = normalize8(condition);
3355
3355
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3356
3356
  }
3357
3357
  const result = exec2(scope).run();
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
3734
3734
  });
3735
3735
  liquid.registerFilter("get", (obj, pathExpr) => {
3736
3736
  if (obj == null) return void 0;
3737
- const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
- if (!path27) return obj;
3739
- const parts = path27.split(".");
3737
+ const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
+ if (!path31) return obj;
3739
+ const parts = path31.split(".");
3740
3740
  let cur = obj;
3741
3741
  for (const p of parts) {
3742
3742
  if (cur == null) return void 0;
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
3855
3855
  }
3856
3856
  }
3857
3857
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
3858
- const getNested = (obj, path27) => {
3859
- if (!obj || !path27) return void 0;
3860
- const parts = path27.split(".");
3858
+ const getNested = (obj, path31) => {
3859
+ if (!obj || !path31) return void 0;
3860
+ const parts = path31.split(".");
3861
3861
  let cur = obj;
3862
3862
  for (const p of parts) {
3863
3863
  if (cur == null) return void 0;
@@ -6039,7 +6039,7 @@ var init_check_runner = __esm({
6039
6039
  sandboxConfig.env_passthrough,
6040
6040
  workspaceDefaults?.env_passthrough
6041
6041
  );
6042
- const workdir = sandboxConfig.workdir || "/workspace";
6042
+ const workdir = sandboxConfig.workdir === "host" ? sandboxManager.getRepoPath() : sandboxConfig.workdir || "/workspace";
6043
6043
  let hostTracePath;
6044
6044
  if (!sandboxConfig.read_only) {
6045
6045
  const traceFileName = `.visor-trace-${(0, import_crypto.randomUUID)().slice(0, 8)}.ndjson`;
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
6409
6409
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6410
6410
  try {
6411
6411
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6412
- const fs23 = await import("fs/promises");
6413
- const path27 = await import("path");
6412
+ const fs27 = await import("fs/promises");
6413
+ const path31 = await import("path");
6414
6414
  const schemaRaw = checkConfig.schema || "plain";
6415
6415
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6416
6416
  let templateContent;
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6418
6418
  templateContent = String(checkConfig.template.content);
6419
6419
  } else if (checkConfig.template && checkConfig.template.file) {
6420
6420
  const file = String(checkConfig.template.file);
6421
- const resolved = path27.resolve(process.cwd(), file);
6422
- templateContent = await fs23.readFile(resolved, "utf-8");
6421
+ const resolved = path31.resolve(process.cwd(), file);
6422
+ templateContent = await fs27.readFile(resolved, "utf-8");
6423
6423
  } else if (schema && schema !== "plain") {
6424
6424
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6425
6425
  if (sanitized) {
6426
6426
  const candidatePaths = [
6427
- path27.join(__dirname, "output", sanitized, "template.liquid"),
6427
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
6428
6428
  // bundled: dist/output/
6429
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6429
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6430
6430
  // source: output/
6431
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
6431
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
6432
6432
  // fallback: cwd/output/
6433
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6433
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6434
6434
  // fallback: cwd/dist/output/
6435
6435
  ];
6436
6436
  for (const p of candidatePaths) {
6437
6437
  try {
6438
- templateContent = await fs23.readFile(p, "utf-8");
6438
+ templateContent = await fs27.readFile(p, "utf-8");
6439
6439
  if (templateContent) break;
6440
6440
  } catch {
6441
6441
  }
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
6840
6840
  }
6841
6841
  try {
6842
6842
  const originalProbePath = process.env.PROBE_PATH;
6843
- const fs23 = require("fs");
6843
+ const fs27 = require("fs");
6844
6844
  const possiblePaths = [
6845
6845
  // Relative to current working directory (most common in production)
6846
6846
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
6851
6851
  ];
6852
6852
  let probeBinaryPath;
6853
6853
  for (const candidatePath of possiblePaths) {
6854
- if (fs23.existsSync(candidatePath)) {
6854
+ if (fs27.existsSync(candidatePath)) {
6855
6855
  probeBinaryPath = candidatePath;
6856
6856
  break;
6857
6857
  }
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
6972
6972
  if (chromiumPath) {
6973
6973
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
6974
6974
  }
6975
- const result = await new Promise((resolve14) => {
6975
+ const result = await new Promise((resolve19) => {
6976
6976
  const proc = (0, import_child_process.spawn)(
6977
6977
  "npx",
6978
6978
  [
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
7002
7002
  });
7003
7003
  proc.on("close", (code) => {
7004
7004
  if (code === 0) {
7005
- resolve14({ success: true });
7005
+ resolve19({ success: true });
7006
7006
  } else {
7007
- resolve14({ success: false, error: stderr || `Exit code ${code}` });
7007
+ resolve19({ success: false, error: stderr || `Exit code ${code}` });
7008
7008
  }
7009
7009
  });
7010
7010
  proc.on("error", (err) => {
7011
- resolve14({ success: false, error: err.message });
7011
+ resolve19({ success: false, error: err.message });
7012
7012
  });
7013
7013
  });
7014
7014
  if (!result.success) {
@@ -8170,8 +8170,8 @@ ${schemaString}`);
8170
8170
  }
8171
8171
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8172
8172
  try {
8173
- const fs23 = require("fs");
8174
- const path27 = require("path");
8173
+ const fs27 = require("fs");
8174
+ const path31 = require("path");
8175
8175
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8176
8176
  const provider = this.config.provider || "auto";
8177
8177
  const model = this.config.model || "default";
@@ -8285,20 +8285,20 @@ ${"=".repeat(60)}
8285
8285
  `;
8286
8286
  readableVersion += `${"=".repeat(60)}
8287
8287
  `;
8288
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8289
- if (!fs23.existsSync(debugArtifactsDir)) {
8290
- fs23.mkdirSync(debugArtifactsDir, { recursive: true });
8288
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8289
+ if (!fs27.existsSync(debugArtifactsDir)) {
8290
+ fs27.mkdirSync(debugArtifactsDir, { recursive: true });
8291
8291
  }
8292
- const debugFile = path27.join(
8292
+ const debugFile = path31.join(
8293
8293
  debugArtifactsDir,
8294
8294
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8295
8295
  );
8296
- fs23.writeFileSync(debugFile, debugJson, "utf-8");
8297
- const readableFile = path27.join(
8296
+ fs27.writeFileSync(debugFile, debugJson, "utf-8");
8297
+ const readableFile = path31.join(
8298
8298
  debugArtifactsDir,
8299
8299
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8300
8300
  );
8301
- fs23.writeFileSync(readableFile, readableVersion, "utf-8");
8301
+ fs27.writeFileSync(readableFile, readableVersion, "utf-8");
8302
8302
  log(`
8303
8303
  \u{1F4BE} Full debug info saved to:`);
8304
8304
  log(` JSON: ${debugFile}`);
@@ -8331,8 +8331,8 @@ ${"=".repeat(60)}
8331
8331
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8332
8332
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8333
8333
  try {
8334
- const fs23 = require("fs");
8335
- const path27 = require("path");
8334
+ const fs27 = require("fs");
8335
+ const path31 = require("path");
8336
8336
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8337
8337
  const agentAny2 = agent;
8338
8338
  let fullHistory = [];
@@ -8343,8 +8343,8 @@ ${"=".repeat(60)}
8343
8343
  } else if (agentAny2._messages) {
8344
8344
  fullHistory = agentAny2._messages;
8345
8345
  }
8346
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8347
- const sessionBase = path27.join(
8346
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8347
+ const sessionBase = path31.join(
8348
8348
  debugArtifactsDir,
8349
8349
  `session-${_checkName || "unknown"}-${timestamp}`
8350
8350
  );
@@ -8356,7 +8356,7 @@ ${"=".repeat(60)}
8356
8356
  schema: effectiveSchema,
8357
8357
  totalMessages: fullHistory.length
8358
8358
  };
8359
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8359
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8360
8360
  let readable = `=============================================================
8361
8361
  `;
8362
8362
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8383,7 +8383,7 @@ ${"=".repeat(60)}
8383
8383
  `;
8384
8384
  readable += content + "\n";
8385
8385
  });
8386
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8386
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8387
8387
  log(`\u{1F4BE} Complete session history saved:`);
8388
8388
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8389
8389
  } catch (error) {
@@ -8392,11 +8392,11 @@ ${"=".repeat(60)}
8392
8392
  }
8393
8393
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8394
8394
  try {
8395
- const fs23 = require("fs");
8396
- const path27 = require("path");
8395
+ const fs27 = require("fs");
8396
+ const path31 = require("path");
8397
8397
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8398
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8399
- const responseFile = path27.join(
8398
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8399
+ const responseFile = path31.join(
8400
8400
  debugArtifactsDir,
8401
8401
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8402
8402
  );
@@ -8429,7 +8429,7 @@ ${"=".repeat(60)}
8429
8429
  `;
8430
8430
  responseContent += `${"=".repeat(60)}
8431
8431
  `;
8432
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8432
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8433
8433
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8434
8434
  } catch (error) {
8435
8435
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8445,9 +8445,9 @@ ${"=".repeat(60)}
8445
8445
  await agentAny._telemetryConfig.shutdown();
8446
8446
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8447
8447
  if (process.env.GITHUB_ACTIONS) {
8448
- const fs23 = require("fs");
8449
- if (fs23.existsSync(agentAny._traceFilePath)) {
8450
- const stats = fs23.statSync(agentAny._traceFilePath);
8448
+ const fs27 = require("fs");
8449
+ if (fs27.existsSync(agentAny._traceFilePath)) {
8450
+ const stats = fs27.statSync(agentAny._traceFilePath);
8451
8451
  console.log(
8452
8452
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8453
8453
  );
@@ -8654,9 +8654,9 @@ ${schemaString}`);
8654
8654
  const model = this.config.model || "default";
8655
8655
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8656
8656
  try {
8657
- const fs23 = require("fs");
8658
- const path27 = require("path");
8659
- const os2 = require("os");
8657
+ const fs27 = require("fs");
8658
+ const path31 = require("path");
8659
+ const os3 = require("os");
8660
8660
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8661
8661
  const debugData = {
8662
8662
  timestamp,
@@ -8728,19 +8728,19 @@ ${"=".repeat(60)}
8728
8728
  `;
8729
8729
  readableVersion += `${"=".repeat(60)}
8730
8730
  `;
8731
- const tempDir = os2.tmpdir();
8732
- const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
8733
- fs23.writeFileSync(promptFile, prompt, "utf-8");
8731
+ const tempDir = os3.tmpdir();
8732
+ const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
8733
+ fs27.writeFileSync(promptFile, prompt, "utf-8");
8734
8734
  log(`
8735
8735
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8736
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8736
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8737
8737
  try {
8738
- const base = path27.join(
8738
+ const base = path31.join(
8739
8739
  debugArtifactsDir,
8740
8740
  `prompt-${_checkName || "unknown"}-${timestamp}`
8741
8741
  );
8742
- fs23.writeFileSync(base + ".json", debugJson, "utf-8");
8743
- fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8742
+ fs27.writeFileSync(base + ".json", debugJson, "utf-8");
8743
+ fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8744
8744
  log(`
8745
8745
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8746
8746
  } catch {
@@ -8785,8 +8785,8 @@ $ ${cliCommand}
8785
8785
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8786
8786
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8787
8787
  try {
8788
- const fs23 = require("fs");
8789
- const path27 = require("path");
8788
+ const fs27 = require("fs");
8789
+ const path31 = require("path");
8790
8790
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8791
8791
  const agentAny = agent;
8792
8792
  let fullHistory = [];
@@ -8797,8 +8797,8 @@ $ ${cliCommand}
8797
8797
  } else if (agentAny._messages) {
8798
8798
  fullHistory = agentAny._messages;
8799
8799
  }
8800
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8801
- const sessionBase = path27.join(
8800
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8801
+ const sessionBase = path31.join(
8802
8802
  debugArtifactsDir,
8803
8803
  `session-${_checkName || "unknown"}-${timestamp}`
8804
8804
  );
@@ -8810,7 +8810,7 @@ $ ${cliCommand}
8810
8810
  schema: effectiveSchema,
8811
8811
  totalMessages: fullHistory.length
8812
8812
  };
8813
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8813
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8814
8814
  let readable = `=============================================================
8815
8815
  `;
8816
8816
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8837,7 +8837,7 @@ ${"=".repeat(60)}
8837
8837
  `;
8838
8838
  readable += content + "\n";
8839
8839
  });
8840
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8840
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8841
8841
  log(`\u{1F4BE} Complete session history saved:`);
8842
8842
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8843
8843
  } catch (error) {
@@ -8846,11 +8846,11 @@ ${"=".repeat(60)}
8846
8846
  }
8847
8847
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8848
8848
  try {
8849
- const fs23 = require("fs");
8850
- const path27 = require("path");
8849
+ const fs27 = require("fs");
8850
+ const path31 = require("path");
8851
8851
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8852
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8853
- const responseFile = path27.join(
8852
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8853
+ const responseFile = path31.join(
8854
8854
  debugArtifactsDir,
8855
8855
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8856
8856
  );
@@ -8883,7 +8883,7 @@ ${"=".repeat(60)}
8883
8883
  `;
8884
8884
  responseContent += `${"=".repeat(60)}
8885
8885
  `;
8886
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8886
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8887
8887
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8888
8888
  } catch (error) {
8889
8889
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8901,9 +8901,9 @@ ${"=".repeat(60)}
8901
8901
  await telemetry.shutdown();
8902
8902
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
8903
8903
  if (process.env.GITHUB_ACTIONS) {
8904
- const fs23 = require("fs");
8905
- if (fs23.existsSync(traceFilePath)) {
8906
- const stats = fs23.statSync(traceFilePath);
8904
+ const fs27 = require("fs");
8905
+ if (fs27.existsSync(traceFilePath)) {
8906
+ const stats = fs27.statSync(traceFilePath);
8907
8907
  console.log(
8908
8908
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
8909
8909
  );
@@ -8941,8 +8941,8 @@ ${"=".repeat(60)}
8941
8941
  * Load schema content from schema files or inline definitions
8942
8942
  */
8943
8943
  async loadSchemaContent(schema) {
8944
- const fs23 = require("fs").promises;
8945
- const path27 = require("path");
8944
+ const fs27 = require("fs").promises;
8945
+ const path31 = require("path");
8946
8946
  if (typeof schema === "object" && schema !== null) {
8947
8947
  log("\u{1F4CB} Using inline schema object from configuration");
8948
8948
  return JSON.stringify(schema);
@@ -8955,14 +8955,14 @@ ${"=".repeat(60)}
8955
8955
  }
8956
8956
  } catch {
8957
8957
  }
8958
- if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
8958
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
8959
8959
  if (schema.includes("..") || schema.includes("\0")) {
8960
8960
  throw new Error("Invalid schema path: path traversal not allowed");
8961
8961
  }
8962
8962
  try {
8963
- const schemaPath = path27.resolve(process.cwd(), schema);
8963
+ const schemaPath = path31.resolve(process.cwd(), schema);
8964
8964
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
8965
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8965
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8966
8966
  return schemaContent.trim();
8967
8967
  } catch (error) {
8968
8968
  throw new Error(
@@ -8976,22 +8976,22 @@ ${"=".repeat(60)}
8976
8976
  }
8977
8977
  const candidatePaths = [
8978
8978
  // GitHub Action bundle location
8979
- path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8979
+ path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8980
8980
  // Historical fallback when src/output was inadvertently bundled as output1/
8981
- path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8981
+ path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8982
8982
  // Local dev (repo root)
8983
- path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8983
+ path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8984
8984
  ];
8985
8985
  for (const schemaPath of candidatePaths) {
8986
8986
  try {
8987
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8987
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8988
8988
  return schemaContent.trim();
8989
8989
  } catch {
8990
8990
  }
8991
8991
  }
8992
- const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8993
- const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8994
- const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8992
+ const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8993
+ const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8994
+ const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8995
8995
  throw new Error(
8996
8996
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
8997
8997
  );
@@ -9236,7 +9236,7 @@ ${"=".repeat(60)}
9236
9236
  * Generate mock response for testing
9237
9237
  */
9238
9238
  async generateMockResponse(_prompt, _checkName, _schema) {
9239
- await new Promise((resolve14) => setTimeout(resolve14, 500));
9239
+ await new Promise((resolve19) => setTimeout(resolve19, 500));
9240
9240
  const name = (_checkName || "").toLowerCase();
9241
9241
  if (name.includes("extract-facts")) {
9242
9242
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9597,7 +9597,7 @@ var init_command_executor = __esm({
9597
9597
  * Execute command with stdin input
9598
9598
  */
9599
9599
  executeWithStdin(command, options) {
9600
- return new Promise((resolve14, reject) => {
9600
+ return new Promise((resolve19, reject) => {
9601
9601
  const childProcess = (0, import_child_process2.exec)(
9602
9602
  command,
9603
9603
  {
@@ -9609,7 +9609,7 @@ var init_command_executor = __esm({
9609
9609
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9610
9610
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9611
9611
  } else {
9612
- resolve14({
9612
+ resolve19({
9613
9613
  stdout: stdout || "",
9614
9614
  stderr: stderr || "",
9615
9615
  exitCode: error ? error.code || 1 : 0
@@ -11514,7 +11514,10 @@ function projectWorkflowToGraph(workflow, workflowInputs, _parentCheckId) {
11514
11514
  group_by: "check",
11515
11515
  collapse: false
11516
11516
  }
11517
- }
11517
+ },
11518
+ ...workflow.sandboxes && { sandboxes: workflow.sandboxes },
11519
+ ...workflow.sandbox && { sandbox: workflow.sandbox },
11520
+ ...workflow.sandbox_defaults && { sandbox_defaults: workflow.sandbox_defaults }
11518
11521
  };
11519
11522
  if (logger.isDebugEnabled?.()) {
11520
11523
  logger.debug(
@@ -13163,7 +13166,7 @@ var init_config_schema = __esm({
13163
13166
  description: "Arguments/inputs for the workflow"
13164
13167
  },
13165
13168
  overrides: {
13166
- $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255%3E%3E",
13169
+ $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265%3E%3E",
13167
13170
  description: "Override specific step configurations in the workflow"
13168
13171
  },
13169
13172
  output_mapping: {
@@ -13179,7 +13182,7 @@ var init_config_schema = __esm({
13179
13182
  description: "Config file path - alternative to workflow ID (loads a Visor config file as workflow)"
13180
13183
  },
13181
13184
  workflow_overrides: {
13182
- $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255%3E%3E",
13185
+ $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265%3E%3E",
13183
13186
  description: "Alias for overrides - workflow step overrides (backward compatibility)"
13184
13187
  },
13185
13188
  ref: {
@@ -13600,11 +13603,11 @@ var init_config_schema = __esm({
13600
13603
  },
13601
13604
  description: "Array of blocked command patterns (e.g., ['rm -rf', 'sudo'])"
13602
13605
  },
13603
- noDefaultAllow: {
13606
+ disableDefaultAllow: {
13604
13607
  type: "boolean",
13605
13608
  description: "Disable default safe command list (use with caution)"
13606
13609
  },
13607
- noDefaultDeny: {
13610
+ disableDefaultDeny: {
13608
13611
  type: "boolean",
13609
13612
  description: "Disable default dangerous command blocklist (use with extreme caution)"
13610
13613
  },
@@ -13868,7 +13871,7 @@ var init_config_schema = __esm({
13868
13871
  description: "Custom output name (defaults to workflow name)"
13869
13872
  },
13870
13873
  overrides: {
13871
- $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255%3E%3E",
13874
+ $ref: "#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265%3E%3E",
13872
13875
  description: "Step overrides"
13873
13876
  },
13874
13877
  output_mapping: {
@@ -13883,13 +13886,13 @@ var init_config_schema = __esm({
13883
13886
  "^x-": {}
13884
13887
  }
13885
13888
  },
13886
- "Record<string,Partial<interface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255>>": {
13889
+ "Record<string,Partial<interface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265>>": {
13887
13890
  type: "object",
13888
13891
  additionalProperties: {
13889
- $ref: "#/definitions/Partial%3Cinterface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255%3E"
13892
+ $ref: "#/definitions/Partial%3Cinterface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265%3E"
13890
13893
  }
13891
13894
  },
13892
- "Partial<interface-src_types_config.ts-13509-28103-src_types_config.ts-0-55255>": {
13895
+ "Partial<interface-src_types_config.ts-13519-28113-src_types_config.ts-0-55265>": {
13893
13896
  type: "object",
13894
13897
  additionalProperties: false
13895
13898
  },
@@ -14595,6 +14598,13 @@ var init_config_schema = __esm({
14595
14598
  cache: {
14596
14599
  $ref: "#/definitions/SandboxCacheConfig",
14597
14600
  description: "Cache volume configuration"
14601
+ },
14602
+ bind_paths: {
14603
+ type: "array",
14604
+ items: {
14605
+ $ref: "#/definitions/SandboxBindPath"
14606
+ },
14607
+ description: "Additional host paths to bind-mount into the sandbox"
14598
14608
  }
14599
14609
  },
14600
14610
  additionalProperties: false,
@@ -14655,6 +14665,29 @@ var init_config_schema = __esm({
14655
14665
  "^x-": {}
14656
14666
  }
14657
14667
  },
14668
+ SandboxBindPath: {
14669
+ type: "object",
14670
+ properties: {
14671
+ host: {
14672
+ type: "string",
14673
+ description: "Host path (supports ~ prefix for home directory)"
14674
+ },
14675
+ container: {
14676
+ type: "string",
14677
+ description: "Container path (defaults to resolved host path)"
14678
+ },
14679
+ read_only: {
14680
+ type: "boolean",
14681
+ description: "Mount as read-only (default: true)"
14682
+ }
14683
+ },
14684
+ required: ["host"],
14685
+ additionalProperties: false,
14686
+ description: "Additional host path to bind-mount into the sandbox",
14687
+ patternProperties: {
14688
+ "^x-": {}
14689
+ }
14690
+ },
14658
14691
  SandboxDefaults: {
14659
14692
  type: "object",
14660
14693
  properties: {
@@ -16028,7 +16061,6 @@ ${errors}`);
16028
16061
  ["compose", config.compose],
16029
16062
  ["service", config.service],
16030
16063
  ["cache", config.cache],
16031
- ["visor_path", config.visor_path],
16032
16064
  ["resources", config.resources]
16033
16065
  ];
16034
16066
  for (const [field, value] of dockerOnlyFields) {
@@ -16074,20 +16106,6 @@ ${errors}`);
16074
16106
  message: `Compose file path '${config.compose}' in sandbox '${name}' must not contain '..' path traversal`
16075
16107
  });
16076
16108
  }
16077
- if (config.visor_path) {
16078
- if (!config.visor_path.startsWith("/")) {
16079
- errors.push({
16080
- field: `sandboxes.${name}.visor_path`,
16081
- message: `visor_path '${config.visor_path}' in sandbox '${name}' must be an absolute path (start with /)`
16082
- });
16083
- }
16084
- if (/\.\./.test(config.visor_path)) {
16085
- errors.push({
16086
- field: `sandboxes.${name}.visor_path`,
16087
- message: `visor_path '${config.visor_path}' in sandbox '${name}' must not contain '..' path traversal`
16088
- });
16089
- }
16090
- }
16091
16109
  if (config.cache?.paths) {
16092
16110
  for (const p of config.cache.paths) {
16093
16111
  if (!p.startsWith("/")) {
@@ -16116,11 +16134,25 @@ ${errors}`);
16116
16134
  }
16117
16135
  }
16118
16136
  }
16119
- if (config.workdir) {
16137
+ if (config.visor_path) {
16138
+ if (!config.visor_path.startsWith("/")) {
16139
+ errors.push({
16140
+ field: `sandboxes.${name}.visor_path`,
16141
+ message: `visor_path '${config.visor_path}' in sandbox '${name}' must be an absolute path (start with /)`
16142
+ });
16143
+ }
16144
+ if (/\.\./.test(config.visor_path)) {
16145
+ errors.push({
16146
+ field: `sandboxes.${name}.visor_path`,
16147
+ message: `visor_path '${config.visor_path}' in sandbox '${name}' must not contain '..' path traversal`
16148
+ });
16149
+ }
16150
+ }
16151
+ if (config.workdir && config.workdir !== "host") {
16120
16152
  if (!config.workdir.startsWith("/")) {
16121
16153
  errors.push({
16122
16154
  field: `sandboxes.${name}.workdir`,
16123
- message: `Workdir '${config.workdir}' in sandbox '${name}' must be an absolute path (start with /)`
16155
+ message: `Workdir '${config.workdir}' in sandbox '${name}' must be an absolute path (start with /) or the literal "host"`
16124
16156
  });
16125
16157
  }
16126
16158
  if (/\.\./.test(config.workdir)) {
@@ -16130,6 +16162,37 @@ ${errors}`);
16130
16162
  });
16131
16163
  }
16132
16164
  }
16165
+ if (config.bind_paths) {
16166
+ for (let i = 0; i < config.bind_paths.length; i++) {
16167
+ const bp = config.bind_paths[i];
16168
+ if (!bp.host) {
16169
+ errors.push({
16170
+ field: `sandboxes.${name}.bind_paths[${i}].host`,
16171
+ message: `bind_paths[${i}] in sandbox '${name}' is missing required 'host' field`
16172
+ });
16173
+ }
16174
+ if (bp.host && /\.\./.test(bp.host)) {
16175
+ errors.push({
16176
+ field: `sandboxes.${name}.bind_paths[${i}].host`,
16177
+ message: `bind_paths[${i}].host '${bp.host}' in sandbox '${name}' must not contain '..' path traversal`
16178
+ });
16179
+ }
16180
+ if (bp.container) {
16181
+ if (!bp.container.startsWith("/")) {
16182
+ errors.push({
16183
+ field: `sandboxes.${name}.bind_paths[${i}].container`,
16184
+ message: `bind_paths[${i}].container '${bp.container}' in sandbox '${name}' must be an absolute path (start with /)`
16185
+ });
16186
+ }
16187
+ if (/\.\./.test(bp.container)) {
16188
+ errors.push({
16189
+ field: `sandboxes.${name}.bind_paths[${i}].container`,
16190
+ message: `bind_paths[${i}].container '${bp.container}' in sandbox '${name}' must not contain '..' path traversal`
16191
+ });
16192
+ }
16193
+ }
16194
+ }
16195
+ }
16133
16196
  }
16134
16197
  /**
16135
16198
  * Validate individual check configuration
@@ -17546,17 +17609,17 @@ var init_workflow_check_provider = __esm({
17546
17609
  * so it can be executed by the state machine as a nested workflow.
17547
17610
  */
17548
17611
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
17549
- const path27 = require("path");
17550
- const fs23 = require("fs");
17612
+ const path31 = require("path");
17613
+ const fs27 = require("fs");
17551
17614
  const yaml5 = require("js-yaml");
17552
- const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
17553
- if (!fs23.existsSync(resolved)) {
17615
+ const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
17616
+ if (!fs27.existsSync(resolved)) {
17554
17617
  throw new Error(`Workflow config not found at: ${resolved}`);
17555
17618
  }
17556
- const rawContent = fs23.readFileSync(resolved, "utf8");
17619
+ const rawContent = fs27.readFileSync(resolved, "utf8");
17557
17620
  const rawData = yaml5.load(rawContent);
17558
17621
  if (rawData.imports && Array.isArray(rawData.imports)) {
17559
- const configDir = path27.dirname(resolved);
17622
+ const configDir = path31.dirname(resolved);
17560
17623
  for (const source of rawData.imports) {
17561
17624
  const results = await this.registry.import(source, {
17562
17625
  basePath: configDir,
@@ -17586,8 +17649,8 @@ ${errors}`);
17586
17649
  if (!steps || Object.keys(steps).length === 0) {
17587
17650
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
17588
17651
  }
17589
- const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
17590
- const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
17652
+ const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
17653
+ const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
17591
17654
  const workflowDef = {
17592
17655
  id,
17593
17656
  name,
@@ -18393,8 +18456,8 @@ async function createStoreBackend(storageConfig, haConfig) {
18393
18456
  case "mssql": {
18394
18457
  try {
18395
18458
  const loaderPath = "../../enterprise/loader";
18396
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
18397
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
18459
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
18460
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
18398
18461
  } catch (err) {
18399
18462
  const msg = err instanceof Error ? err.message : String(err);
18400
18463
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -20236,11 +20299,11 @@ async function handleCreateTrigger(args, context2, store) {
20236
20299
  error: `Available workflows: ${context2.availableWorkflows.slice(0, 5).join(", ")}${context2.availableWorkflows.length > 5 ? "..." : ""}`
20237
20300
  };
20238
20301
  }
20239
- if ((!args.trigger_channels || args.trigger_channels.length === 0) && (!args.trigger_contains || args.trigger_contains.length === 0) && !args.trigger_match) {
20302
+ if ((!args.trigger_channels || args.trigger_channels.length === 0) && (!args.trigger_from || args.trigger_from.length === 0) && (!args.trigger_contains || args.trigger_contains.length === 0) && !args.trigger_match) {
20240
20303
  return {
20241
20304
  success: false,
20242
20305
  message: "Missing trigger filters",
20243
- error: "Please specify at least one filter: trigger_channels, trigger_contains, or trigger_match."
20306
+ error: "Please specify at least one filter: trigger_channels, trigger_from, trigger_contains, or trigger_match."
20244
20307
  };
20245
20308
  }
20246
20309
  const permissionCheck = checkSchedulePermissions(context2, args.workflow);
@@ -20258,6 +20321,7 @@ async function handleCreateTrigger(args, context2, store) {
20258
20321
  creatorName: context2.userName,
20259
20322
  description: args.trigger_description,
20260
20323
  channels: args.trigger_channels,
20324
+ fromUsers: args.trigger_from,
20261
20325
  fromBots: args.trigger_from_bots ?? false,
20262
20326
  contains: args.trigger_contains,
20263
20327
  matchPattern: args.trigger_match,
@@ -20276,7 +20340,8 @@ async function handleCreateTrigger(args, context2, store) {
20276
20340
 
20277
20341
  **Workflow**: ${trigger.workflow}
20278
20342
  **Channels**: ${trigger.channels?.join(", ") || "all"}
20279
- ${trigger.contains?.length ? `**Contains**: ${trigger.contains.join(", ")}
20343
+ ${trigger.fromUsers?.length ? `**From users**: ${trigger.fromUsers.join(", ")}
20344
+ ` : ""}${trigger.contains?.length ? `**Contains**: ${trigger.contains.join(", ")}
20280
20345
  ` : ""}${trigger.matchPattern ? `**Pattern**: /${trigger.matchPattern}/
20281
20346
  ` : ""}${trigger.description ? `**Description**: ${trigger.description}
20282
20347
  ` : ""}
@@ -20402,7 +20467,7 @@ Slack messages in specific channels. Use the create_trigger, list_triggers, dele
20402
20467
  actions for this. Message triggers fire workflows based on message content, channel, sender, and thread scope.
20403
20468
 
20404
20469
  TRIGGER ACTIONS:
20405
- - create_trigger: Create a new message trigger (requires workflow + at least one filter)
20470
+ - create_trigger: Create a new message trigger (requires workflow + at least one filter). Supports filtering by user IDs (trigger_from), channels, keywords, regex, and thread scope.
20406
20471
  - list_triggers: Show user's message triggers
20407
20472
  - delete_trigger: Remove a trigger by ID
20408
20473
  - update_trigger: Enable/disable a trigger by ID
@@ -20499,6 +20564,9 @@ User: "cancel schedule abc123"
20499
20564
  User: "watch #cicd for messages containing 'failed' and run %handle-cicd"
20500
20565
  \u2192 { "action": "create_trigger", "trigger_channels": ["C0CICD"], "trigger_contains": ["failed"], "workflow": "handle-cicd" }
20501
20566
 
20567
+ User: "trigger on each of my messages in this channel and run %auto-reply" (user ID is U3P2L4XNE)
20568
+ \u2192 { "action": "create_trigger", "trigger_channels": ["C09V810NY6R"], "trigger_from": ["U3P2L4XNE"], "workflow": "auto-reply" }
20569
+
20502
20570
  User: "list my message triggers"
20503
20571
  \u2192 { "action": "list_triggers" }
20504
20572
 
@@ -20580,6 +20648,11 @@ User: "disable trigger abc123"
20580
20648
  items: { type: "string" },
20581
20649
  description: 'For create_trigger: Slack channel IDs to monitor (e.g., ["C0CICD"]). Supports wildcard suffix (e.g., "CENG*").'
20582
20650
  },
20651
+ trigger_from: {
20652
+ type: "array",
20653
+ items: { type: "string" },
20654
+ description: 'For create_trigger: Slack user IDs to filter by. Only messages from these users will trigger the workflow. E.g., ["U3P2L4XNE"]. If omitted, messages from any user will trigger.'
20655
+ },
20583
20656
  trigger_from_bots: {
20584
20657
  type: "boolean",
20585
20658
  description: "For create_trigger: allow bot messages to trigger (default: false)"
@@ -20938,7 +21011,7 @@ var init_mcp_custom_sse_server = __esm({
20938
21011
  * Returns the actual bound port number
20939
21012
  */
20940
21013
  async start() {
20941
- return new Promise((resolve14, reject) => {
21014
+ return new Promise((resolve19, reject) => {
20942
21015
  try {
20943
21016
  this.server = import_http.default.createServer((req, res) => {
20944
21017
  this.handleRequest(req, res).catch((error) => {
@@ -20972,7 +21045,7 @@ var init_mcp_custom_sse_server = __esm({
20972
21045
  );
20973
21046
  }
20974
21047
  this.startKeepalive();
20975
- resolve14(this.port);
21048
+ resolve19(this.port);
20976
21049
  });
20977
21050
  } catch (error) {
20978
21051
  reject(error);
@@ -21035,7 +21108,7 @@ var init_mcp_custom_sse_server = __esm({
21035
21108
  logger.debug(
21036
21109
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
21037
21110
  );
21038
- await new Promise((resolve14) => setTimeout(resolve14, waitMs));
21111
+ await new Promise((resolve19) => setTimeout(resolve19, waitMs));
21039
21112
  }
21040
21113
  }
21041
21114
  if (this.activeToolCalls > 0) {
@@ -21044,7 +21117,7 @@ var init_mcp_custom_sse_server = __esm({
21044
21117
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
21045
21118
  );
21046
21119
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
21047
- await new Promise((resolve14) => setTimeout(resolve14, 250));
21120
+ await new Promise((resolve19) => setTimeout(resolve19, 250));
21048
21121
  }
21049
21122
  if (this.activeToolCalls > 0) {
21050
21123
  logger.warn(
@@ -21069,21 +21142,21 @@ var init_mcp_custom_sse_server = __esm({
21069
21142
  }
21070
21143
  this.connections.clear();
21071
21144
  if (this.server) {
21072
- await new Promise((resolve14, reject) => {
21145
+ await new Promise((resolve19, reject) => {
21073
21146
  const timeout = setTimeout(() => {
21074
21147
  if (this.debug) {
21075
21148
  logger.debug(
21076
21149
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
21077
21150
  );
21078
21151
  }
21079
- this.server?.close(() => resolve14());
21152
+ this.server?.close(() => resolve19());
21080
21153
  }, 5e3);
21081
21154
  this.server.close((error) => {
21082
21155
  clearTimeout(timeout);
21083
21156
  if (error) {
21084
21157
  reject(error);
21085
21158
  } else {
21086
- resolve14();
21159
+ resolve19();
21087
21160
  }
21088
21161
  });
21089
21162
  });
@@ -21509,7 +21582,7 @@ var init_mcp_custom_sse_server = __esm({
21509
21582
  logger.warn(
21510
21583
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
21511
21584
  );
21512
- await new Promise((resolve14) => setTimeout(resolve14, delay));
21585
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
21513
21586
  attempt++;
21514
21587
  }
21515
21588
  }
@@ -21822,9 +21895,9 @@ var init_ai_check_provider = __esm({
21822
21895
  } else {
21823
21896
  resolvedPath = import_path7.default.resolve(process.cwd(), str);
21824
21897
  }
21825
- const fs23 = require("fs").promises;
21898
+ const fs27 = require("fs").promises;
21826
21899
  try {
21827
- const stat2 = await fs23.stat(resolvedPath);
21900
+ const stat2 = await fs27.stat(resolvedPath);
21828
21901
  return stat2.isFile();
21829
21902
  } catch {
21830
21903
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -27752,14 +27825,14 @@ var require_util = __commonJS({
27752
27825
  }
27753
27826
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
27754
27827
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
27755
- let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27828
+ let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27756
27829
  if (origin.endsWith("/")) {
27757
27830
  origin = origin.substring(0, origin.length - 1);
27758
27831
  }
27759
- if (path27 && !path27.startsWith("/")) {
27760
- path27 = `/${path27}`;
27832
+ if (path31 && !path31.startsWith("/")) {
27833
+ path31 = `/${path31}`;
27761
27834
  }
27762
- url = new URL(origin + path27);
27835
+ url = new URL(origin + path31);
27763
27836
  }
27764
27837
  return url;
27765
27838
  }
@@ -29373,20 +29446,20 @@ var require_parseParams = __commonJS({
29373
29446
  var require_basename = __commonJS({
29374
29447
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
29375
29448
  "use strict";
29376
- module2.exports = function basename4(path27) {
29377
- if (typeof path27 !== "string") {
29449
+ module2.exports = function basename4(path31) {
29450
+ if (typeof path31 !== "string") {
29378
29451
  return "";
29379
29452
  }
29380
- for (var i = path27.length - 1; i >= 0; --i) {
29381
- switch (path27.charCodeAt(i)) {
29453
+ for (var i = path31.length - 1; i >= 0; --i) {
29454
+ switch (path31.charCodeAt(i)) {
29382
29455
  case 47:
29383
29456
  // '/'
29384
29457
  case 92:
29385
- path27 = path27.slice(i + 1);
29386
- return path27 === ".." || path27 === "." ? "" : path27;
29458
+ path31 = path31.slice(i + 1);
29459
+ return path31 === ".." || path31 === "." ? "" : path31;
29387
29460
  }
29388
29461
  }
29389
- return path27 === ".." || path27 === "." ? "" : path27;
29462
+ return path31 === ".." || path31 === "." ? "" : path31;
29390
29463
  };
29391
29464
  }
29392
29465
  });
@@ -30390,11 +30463,11 @@ var require_util2 = __commonJS({
30390
30463
  var assert = require("assert");
30391
30464
  var { isUint8Array } = require("util/types");
30392
30465
  var supportedHashes = [];
30393
- var crypto2;
30466
+ var crypto4;
30394
30467
  try {
30395
- crypto2 = require("crypto");
30468
+ crypto4 = require("crypto");
30396
30469
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
30397
- supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30470
+ supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30398
30471
  } catch {
30399
30472
  }
30400
30473
  function responseURL(response) {
@@ -30671,7 +30744,7 @@ var require_util2 = __commonJS({
30671
30744
  }
30672
30745
  }
30673
30746
  function bytesMatch(bytes, metadataList) {
30674
- if (crypto2 === void 0) {
30747
+ if (crypto4 === void 0) {
30675
30748
  return true;
30676
30749
  }
30677
30750
  const parsedMetadata = parseMetadata(metadataList);
@@ -30686,7 +30759,7 @@ var require_util2 = __commonJS({
30686
30759
  for (const item of metadata) {
30687
30760
  const algorithm = item.algo;
30688
30761
  const expectedValue = item.hash;
30689
- let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
30762
+ let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
30690
30763
  if (actualValue[actualValue.length - 1] === "=") {
30691
30764
  if (actualValue[actualValue.length - 2] === "=") {
30692
30765
  actualValue = actualValue.slice(0, -2);
@@ -30779,8 +30852,8 @@ var require_util2 = __commonJS({
30779
30852
  function createDeferredPromise() {
30780
30853
  let res;
30781
30854
  let rej;
30782
- const promise = new Promise((resolve14, reject) => {
30783
- res = resolve14;
30855
+ const promise = new Promise((resolve19, reject) => {
30856
+ res = resolve19;
30784
30857
  rej = reject;
30785
30858
  });
30786
30859
  return { promise, resolve: res, reject: rej };
@@ -32033,8 +32106,8 @@ var require_body = __commonJS({
32033
32106
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
32034
32107
  var random;
32035
32108
  try {
32036
- const crypto2 = require("crypto");
32037
- random = (max) => crypto2.randomInt(0, max);
32109
+ const crypto4 = require("crypto");
32110
+ random = (max) => crypto4.randomInt(0, max);
32038
32111
  } catch {
32039
32112
  random = (max) => Math.floor(Math.random(max));
32040
32113
  }
@@ -32285,8 +32358,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
32285
32358
  });
32286
32359
  }
32287
32360
  });
32288
- const busboyResolve = new Promise((resolve14, reject) => {
32289
- busboy.on("finish", resolve14);
32361
+ const busboyResolve = new Promise((resolve19, reject) => {
32362
+ busboy.on("finish", resolve19);
32290
32363
  busboy.on("error", (err) => reject(new TypeError(err)));
32291
32364
  });
32292
32365
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -32417,7 +32490,7 @@ var require_request = __commonJS({
32417
32490
  }
32418
32491
  var Request = class _Request {
32419
32492
  constructor(origin, {
32420
- path: path27,
32493
+ path: path31,
32421
32494
  method,
32422
32495
  body,
32423
32496
  headers,
@@ -32431,11 +32504,11 @@ var require_request = __commonJS({
32431
32504
  throwOnError,
32432
32505
  expectContinue
32433
32506
  }, handler) {
32434
- if (typeof path27 !== "string") {
32507
+ if (typeof path31 !== "string") {
32435
32508
  throw new InvalidArgumentError("path must be a string");
32436
- } else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
32509
+ } else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
32437
32510
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
32438
- } else if (invalidPathRegex.exec(path27) !== null) {
32511
+ } else if (invalidPathRegex.exec(path31) !== null) {
32439
32512
  throw new InvalidArgumentError("invalid request path");
32440
32513
  }
32441
32514
  if (typeof method !== "string") {
@@ -32498,7 +32571,7 @@ var require_request = __commonJS({
32498
32571
  this.completed = false;
32499
32572
  this.aborted = false;
32500
32573
  this.upgrade = upgrade || null;
32501
- this.path = query ? util.buildURL(path27, query) : path27;
32574
+ this.path = query ? util.buildURL(path31, query) : path31;
32502
32575
  this.origin = origin;
32503
32576
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
32504
32577
  this.blocking = blocking == null ? false : blocking;
@@ -32820,9 +32893,9 @@ var require_dispatcher_base = __commonJS({
32820
32893
  }
32821
32894
  close(callback) {
32822
32895
  if (callback === void 0) {
32823
- return new Promise((resolve14, reject) => {
32896
+ return new Promise((resolve19, reject) => {
32824
32897
  this.close((err, data) => {
32825
- return err ? reject(err) : resolve14(data);
32898
+ return err ? reject(err) : resolve19(data);
32826
32899
  });
32827
32900
  });
32828
32901
  }
@@ -32860,12 +32933,12 @@ var require_dispatcher_base = __commonJS({
32860
32933
  err = null;
32861
32934
  }
32862
32935
  if (callback === void 0) {
32863
- return new Promise((resolve14, reject) => {
32936
+ return new Promise((resolve19, reject) => {
32864
32937
  this.destroy(err, (err2, data) => {
32865
32938
  return err2 ? (
32866
32939
  /* istanbul ignore next: should never error */
32867
32940
  reject(err2)
32868
- ) : resolve14(data);
32941
+ ) : resolve19(data);
32869
32942
  });
32870
32943
  });
32871
32944
  }
@@ -33506,9 +33579,9 @@ var require_RedirectHandler = __commonJS({
33506
33579
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
33507
33580
  }
33508
33581
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
33509
- const path27 = search ? `${pathname}${search}` : pathname;
33582
+ const path31 = search ? `${pathname}${search}` : pathname;
33510
33583
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
33511
- this.opts.path = path27;
33584
+ this.opts.path = path31;
33512
33585
  this.opts.origin = origin;
33513
33586
  this.opts.maxRedirections = 0;
33514
33587
  this.opts.query = null;
@@ -33927,16 +34000,16 @@ var require_client = __commonJS({
33927
34000
  return this[kNeedDrain] < 2;
33928
34001
  }
33929
34002
  async [kClose]() {
33930
- return new Promise((resolve14) => {
34003
+ return new Promise((resolve19) => {
33931
34004
  if (!this[kSize]) {
33932
- resolve14(null);
34005
+ resolve19(null);
33933
34006
  } else {
33934
- this[kClosedResolve] = resolve14;
34007
+ this[kClosedResolve] = resolve19;
33935
34008
  }
33936
34009
  });
33937
34010
  }
33938
34011
  async [kDestroy](err) {
33939
- return new Promise((resolve14) => {
34012
+ return new Promise((resolve19) => {
33940
34013
  const requests = this[kQueue].splice(this[kPendingIdx]);
33941
34014
  for (let i = 0; i < requests.length; i++) {
33942
34015
  const request = requests[i];
@@ -33947,7 +34020,7 @@ var require_client = __commonJS({
33947
34020
  this[kClosedResolve]();
33948
34021
  this[kClosedResolve] = null;
33949
34022
  }
33950
- resolve14();
34023
+ resolve19();
33951
34024
  };
33952
34025
  if (this[kHTTP2Session] != null) {
33953
34026
  util.destroy(this[kHTTP2Session], err);
@@ -34527,7 +34600,7 @@ var require_client = __commonJS({
34527
34600
  });
34528
34601
  }
34529
34602
  try {
34530
- const socket = await new Promise((resolve14, reject) => {
34603
+ const socket = await new Promise((resolve19, reject) => {
34531
34604
  client[kConnector]({
34532
34605
  host,
34533
34606
  hostname,
@@ -34539,7 +34612,7 @@ var require_client = __commonJS({
34539
34612
  if (err) {
34540
34613
  reject(err);
34541
34614
  } else {
34542
- resolve14(socket2);
34615
+ resolve19(socket2);
34543
34616
  }
34544
34617
  });
34545
34618
  });
@@ -34750,7 +34823,7 @@ var require_client = __commonJS({
34750
34823
  writeH2(client, client[kHTTP2Session], request);
34751
34824
  return;
34752
34825
  }
34753
- const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
34826
+ const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
34754
34827
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
34755
34828
  if (body && typeof body.read === "function") {
34756
34829
  body.read(0);
@@ -34800,7 +34873,7 @@ var require_client = __commonJS({
34800
34873
  if (blocking) {
34801
34874
  socket[kBlocking] = true;
34802
34875
  }
34803
- let header = `${method} ${path27} HTTP/1.1\r
34876
+ let header = `${method} ${path31} HTTP/1.1\r
34804
34877
  `;
34805
34878
  if (typeof host === "string") {
34806
34879
  header += `host: ${host}\r
@@ -34863,7 +34936,7 @@ upgrade: ${upgrade}\r
34863
34936
  return true;
34864
34937
  }
34865
34938
  function writeH2(client, session, request) {
34866
- const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
34939
+ const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
34867
34940
  let headers;
34868
34941
  if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
34869
34942
  else headers = reqHeaders;
@@ -34906,7 +34979,7 @@ upgrade: ${upgrade}\r
34906
34979
  });
34907
34980
  return true;
34908
34981
  }
34909
- headers[HTTP2_HEADER_PATH] = path27;
34982
+ headers[HTTP2_HEADER_PATH] = path31;
34910
34983
  headers[HTTP2_HEADER_SCHEME] = "https";
34911
34984
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
34912
34985
  if (body && typeof body.read === "function") {
@@ -35163,12 +35236,12 @@ upgrade: ${upgrade}\r
35163
35236
  cb();
35164
35237
  }
35165
35238
  }
35166
- const waitForDrain = () => new Promise((resolve14, reject) => {
35239
+ const waitForDrain = () => new Promise((resolve19, reject) => {
35167
35240
  assert(callback === null);
35168
35241
  if (socket[kError]) {
35169
35242
  reject(socket[kError]);
35170
35243
  } else {
35171
- callback = resolve14;
35244
+ callback = resolve19;
35172
35245
  }
35173
35246
  });
35174
35247
  if (client[kHTTPConnVersion] === "h2") {
@@ -35514,8 +35587,8 @@ var require_pool_base = __commonJS({
35514
35587
  if (this[kQueue].isEmpty()) {
35515
35588
  return Promise.all(this[kClients].map((c) => c.close()));
35516
35589
  } else {
35517
- return new Promise((resolve14) => {
35518
- this[kClosedResolve] = resolve14;
35590
+ return new Promise((resolve19) => {
35591
+ this[kClosedResolve] = resolve19;
35519
35592
  });
35520
35593
  }
35521
35594
  }
@@ -36093,7 +36166,7 @@ var require_readable = __commonJS({
36093
36166
  if (this.closed) {
36094
36167
  return Promise.resolve(null);
36095
36168
  }
36096
- return new Promise((resolve14, reject) => {
36169
+ return new Promise((resolve19, reject) => {
36097
36170
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
36098
36171
  this.destroy();
36099
36172
  }) : noop;
@@ -36102,7 +36175,7 @@ var require_readable = __commonJS({
36102
36175
  if (signal && signal.aborted) {
36103
36176
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
36104
36177
  } else {
36105
- resolve14(null);
36178
+ resolve19(null);
36106
36179
  }
36107
36180
  }).on("error", noop).on("data", function(chunk) {
36108
36181
  limit -= chunk.length;
@@ -36124,11 +36197,11 @@ var require_readable = __commonJS({
36124
36197
  throw new TypeError("unusable");
36125
36198
  }
36126
36199
  assert(!stream[kConsume]);
36127
- return new Promise((resolve14, reject) => {
36200
+ return new Promise((resolve19, reject) => {
36128
36201
  stream[kConsume] = {
36129
36202
  type,
36130
36203
  stream,
36131
- resolve: resolve14,
36204
+ resolve: resolve19,
36132
36205
  reject,
36133
36206
  length: 0,
36134
36207
  body: []
@@ -36163,12 +36236,12 @@ var require_readable = __commonJS({
36163
36236
  }
36164
36237
  }
36165
36238
  function consumeEnd(consume2) {
36166
- const { type, body, resolve: resolve14, stream, length } = consume2;
36239
+ const { type, body, resolve: resolve19, stream, length } = consume2;
36167
36240
  try {
36168
36241
  if (type === "text") {
36169
- resolve14(toUSVString(Buffer.concat(body)));
36242
+ resolve19(toUSVString(Buffer.concat(body)));
36170
36243
  } else if (type === "json") {
36171
- resolve14(JSON.parse(Buffer.concat(body)));
36244
+ resolve19(JSON.parse(Buffer.concat(body)));
36172
36245
  } else if (type === "arrayBuffer") {
36173
36246
  const dst = new Uint8Array(length);
36174
36247
  let pos = 0;
@@ -36176,12 +36249,12 @@ var require_readable = __commonJS({
36176
36249
  dst.set(buf, pos);
36177
36250
  pos += buf.byteLength;
36178
36251
  }
36179
- resolve14(dst.buffer);
36252
+ resolve19(dst.buffer);
36180
36253
  } else if (type === "blob") {
36181
36254
  if (!Blob2) {
36182
36255
  Blob2 = require("buffer").Blob;
36183
36256
  }
36184
- resolve14(new Blob2(body, { type: stream[kContentType] }));
36257
+ resolve19(new Blob2(body, { type: stream[kContentType] }));
36185
36258
  }
36186
36259
  consumeFinish(consume2);
36187
36260
  } catch (err) {
@@ -36438,9 +36511,9 @@ var require_api_request = __commonJS({
36438
36511
  };
36439
36512
  function request(opts, callback) {
36440
36513
  if (callback === void 0) {
36441
- return new Promise((resolve14, reject) => {
36514
+ return new Promise((resolve19, reject) => {
36442
36515
  request.call(this, opts, (err, data) => {
36443
- return err ? reject(err) : resolve14(data);
36516
+ return err ? reject(err) : resolve19(data);
36444
36517
  });
36445
36518
  });
36446
36519
  }
@@ -36613,9 +36686,9 @@ var require_api_stream = __commonJS({
36613
36686
  };
36614
36687
  function stream(opts, factory, callback) {
36615
36688
  if (callback === void 0) {
36616
- return new Promise((resolve14, reject) => {
36689
+ return new Promise((resolve19, reject) => {
36617
36690
  stream.call(this, opts, factory, (err, data) => {
36618
- return err ? reject(err) : resolve14(data);
36691
+ return err ? reject(err) : resolve19(data);
36619
36692
  });
36620
36693
  });
36621
36694
  }
@@ -36896,9 +36969,9 @@ var require_api_upgrade = __commonJS({
36896
36969
  };
36897
36970
  function upgrade(opts, callback) {
36898
36971
  if (callback === void 0) {
36899
- return new Promise((resolve14, reject) => {
36972
+ return new Promise((resolve19, reject) => {
36900
36973
  upgrade.call(this, opts, (err, data) => {
36901
- return err ? reject(err) : resolve14(data);
36974
+ return err ? reject(err) : resolve19(data);
36902
36975
  });
36903
36976
  });
36904
36977
  }
@@ -36987,9 +37060,9 @@ var require_api_connect = __commonJS({
36987
37060
  };
36988
37061
  function connect(opts, callback) {
36989
37062
  if (callback === void 0) {
36990
- return new Promise((resolve14, reject) => {
37063
+ return new Promise((resolve19, reject) => {
36991
37064
  connect.call(this, opts, (err, data) => {
36992
- return err ? reject(err) : resolve14(data);
37065
+ return err ? reject(err) : resolve19(data);
36993
37066
  });
36994
37067
  });
36995
37068
  }
@@ -37149,20 +37222,20 @@ var require_mock_utils = __commonJS({
37149
37222
  }
37150
37223
  return true;
37151
37224
  }
37152
- function safeUrl(path27) {
37153
- if (typeof path27 !== "string") {
37154
- return path27;
37225
+ function safeUrl(path31) {
37226
+ if (typeof path31 !== "string") {
37227
+ return path31;
37155
37228
  }
37156
- const pathSegments = path27.split("?");
37229
+ const pathSegments = path31.split("?");
37157
37230
  if (pathSegments.length !== 2) {
37158
- return path27;
37231
+ return path31;
37159
37232
  }
37160
37233
  const qp = new URLSearchParams(pathSegments.pop());
37161
37234
  qp.sort();
37162
37235
  return [...pathSegments, qp.toString()].join("?");
37163
37236
  }
37164
- function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
37165
- const pathMatch = matchValue(mockDispatch2.path, path27);
37237
+ function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
37238
+ const pathMatch = matchValue(mockDispatch2.path, path31);
37166
37239
  const methodMatch = matchValue(mockDispatch2.method, method);
37167
37240
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
37168
37241
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -37180,7 +37253,7 @@ var require_mock_utils = __commonJS({
37180
37253
  function getMockDispatch(mockDispatches, key) {
37181
37254
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
37182
37255
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
37183
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
37256
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
37184
37257
  if (matchedMockDispatches.length === 0) {
37185
37258
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
37186
37259
  }
@@ -37217,9 +37290,9 @@ var require_mock_utils = __commonJS({
37217
37290
  }
37218
37291
  }
37219
37292
  function buildKey(opts) {
37220
- const { path: path27, method, body, headers, query } = opts;
37293
+ const { path: path31, method, body, headers, query } = opts;
37221
37294
  return {
37222
- path: path27,
37295
+ path: path31,
37223
37296
  method,
37224
37297
  body,
37225
37298
  headers,
@@ -37668,10 +37741,10 @@ var require_pending_interceptors_formatter = __commonJS({
37668
37741
  }
37669
37742
  format(pendingInterceptors) {
37670
37743
  const withPrettyHeaders = pendingInterceptors.map(
37671
- ({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37744
+ ({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37672
37745
  Method: method,
37673
37746
  Origin: origin,
37674
- Path: path27,
37747
+ Path: path31,
37675
37748
  "Status code": statusCode,
37676
37749
  Persistent: persist ? "\u2705" : "\u274C",
37677
37750
  Invocations: timesInvoked,
@@ -40612,7 +40685,7 @@ var require_fetch = __commonJS({
40612
40685
  async function dispatch({ body }) {
40613
40686
  const url = requestCurrentURL(request);
40614
40687
  const agent = fetchParams.controller.dispatcher;
40615
- return new Promise((resolve14, reject) => agent.dispatch(
40688
+ return new Promise((resolve19, reject) => agent.dispatch(
40616
40689
  {
40617
40690
  path: url.pathname + url.search,
40618
40691
  origin: url.origin,
@@ -40688,7 +40761,7 @@ var require_fetch = __commonJS({
40688
40761
  }
40689
40762
  }
40690
40763
  }
40691
- resolve14({
40764
+ resolve19({
40692
40765
  status,
40693
40766
  statusText,
40694
40767
  headersList: headers[kHeadersList],
@@ -40731,7 +40804,7 @@ var require_fetch = __commonJS({
40731
40804
  const val = headersList[n + 1].toString("latin1");
40732
40805
  headers[kHeadersList].append(key, val);
40733
40806
  }
40734
- resolve14({
40807
+ resolve19({
40735
40808
  status,
40736
40809
  statusText: STATUS_CODES[status],
40737
40810
  headersList: headers[kHeadersList],
@@ -42292,8 +42365,8 @@ var require_util6 = __commonJS({
42292
42365
  }
42293
42366
  }
42294
42367
  }
42295
- function validateCookiePath(path27) {
42296
- for (const char of path27) {
42368
+ function validateCookiePath(path31) {
42369
+ for (const char of path31) {
42297
42370
  const code = char.charCodeAt(0);
42298
42371
  if (code < 33 || char === ";") {
42299
42372
  throw new Error("Invalid cookie path");
@@ -43090,9 +43163,9 @@ var require_connection = __commonJS({
43090
43163
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
43091
43164
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
43092
43165
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
43093
- var crypto2;
43166
+ var crypto4;
43094
43167
  try {
43095
- crypto2 = require("crypto");
43168
+ crypto4 = require("crypto");
43096
43169
  } catch {
43097
43170
  }
43098
43171
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -43111,7 +43184,7 @@ var require_connection = __commonJS({
43111
43184
  const headersList = new Headers(options.headers)[kHeadersList];
43112
43185
  request.headersList = headersList;
43113
43186
  }
43114
- const keyValue = crypto2.randomBytes(16).toString("base64");
43187
+ const keyValue = crypto4.randomBytes(16).toString("base64");
43115
43188
  request.headersList.append("sec-websocket-key", keyValue);
43116
43189
  request.headersList.append("sec-websocket-version", "13");
43117
43190
  for (const protocol of protocols) {
@@ -43140,7 +43213,7 @@ var require_connection = __commonJS({
43140
43213
  return;
43141
43214
  }
43142
43215
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
43143
- const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
43216
+ const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
43144
43217
  if (secWSAccept !== digest) {
43145
43218
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
43146
43219
  return;
@@ -43220,9 +43293,9 @@ var require_frame = __commonJS({
43220
43293
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
43221
43294
  "use strict";
43222
43295
  var { maxUnsigned16Bit } = require_constants5();
43223
- var crypto2;
43296
+ var crypto4;
43224
43297
  try {
43225
- crypto2 = require("crypto");
43298
+ crypto4 = require("crypto");
43226
43299
  } catch {
43227
43300
  }
43228
43301
  var WebsocketFrameSend = class {
@@ -43231,7 +43304,7 @@ var require_frame = __commonJS({
43231
43304
  */
43232
43305
  constructor(data) {
43233
43306
  this.frameData = data;
43234
- this.maskKey = crypto2.randomBytes(4);
43307
+ this.maskKey = crypto4.randomBytes(4);
43235
43308
  }
43236
43309
  createFrame(opcode) {
43237
43310
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -43973,11 +44046,11 @@ var require_undici = __commonJS({
43973
44046
  if (typeof opts.path !== "string") {
43974
44047
  throw new InvalidArgumentError("invalid opts.path");
43975
44048
  }
43976
- let path27 = opts.path;
44049
+ let path31 = opts.path;
43977
44050
  if (!opts.path.startsWith("/")) {
43978
- path27 = `/${path27}`;
44051
+ path31 = `/${path31}`;
43979
44052
  }
43980
- url = new URL(util.parseOrigin(url).origin + path27);
44053
+ url = new URL(util.parseOrigin(url).origin + path31);
43981
44054
  } else {
43982
44055
  if (!opts) {
43983
44056
  opts = typeof url === "object" ? url : {};
@@ -44526,7 +44599,7 @@ var init_mcp_check_provider = __esm({
44526
44599
  logger.warn(
44527
44600
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
44528
44601
  );
44529
- await new Promise((resolve14) => setTimeout(resolve14, delay));
44602
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
44530
44603
  attempt += 1;
44531
44604
  } finally {
44532
44605
  try {
@@ -44808,7 +44881,7 @@ async function acquirePromptLock() {
44808
44881
  activePrompt = true;
44809
44882
  return;
44810
44883
  }
44811
- await new Promise((resolve14) => waiters.push(resolve14));
44884
+ await new Promise((resolve19) => waiters.push(resolve19));
44812
44885
  activePrompt = true;
44813
44886
  }
44814
44887
  function releasePromptLock() {
@@ -44818,7 +44891,7 @@ function releasePromptLock() {
44818
44891
  }
44819
44892
  async function interactivePrompt(options) {
44820
44893
  await acquirePromptLock();
44821
- return new Promise((resolve14, reject) => {
44894
+ return new Promise((resolve19, reject) => {
44822
44895
  const dbg = process.env.VISOR_DEBUG === "true";
44823
44896
  try {
44824
44897
  if (dbg) {
@@ -44905,12 +44978,12 @@ async function interactivePrompt(options) {
44905
44978
  };
44906
44979
  const finish = (value) => {
44907
44980
  cleanup();
44908
- resolve14(value);
44981
+ resolve19(value);
44909
44982
  };
44910
44983
  if (options.timeout && options.timeout > 0) {
44911
44984
  timeoutId = setTimeout(() => {
44912
44985
  cleanup();
44913
- if (defaultValue !== void 0) return resolve14(defaultValue);
44986
+ if (defaultValue !== void 0) return resolve19(defaultValue);
44914
44987
  return reject(new Error("Input timeout"));
44915
44988
  }, options.timeout);
44916
44989
  }
@@ -45042,7 +45115,7 @@ async function interactivePrompt(options) {
45042
45115
  });
45043
45116
  }
45044
45117
  async function simplePrompt(prompt) {
45045
- return new Promise((resolve14) => {
45118
+ return new Promise((resolve19) => {
45046
45119
  const rl = readline.createInterface({
45047
45120
  input: process.stdin,
45048
45121
  output: process.stdout
@@ -45058,7 +45131,7 @@ async function simplePrompt(prompt) {
45058
45131
  rl.question(`${prompt}
45059
45132
  > `, (answer) => {
45060
45133
  rl.close();
45061
- resolve14(answer.trim());
45134
+ resolve19(answer.trim());
45062
45135
  });
45063
45136
  });
45064
45137
  }
@@ -45226,7 +45299,7 @@ function isStdinAvailable() {
45226
45299
  return !process.stdin.isTTY;
45227
45300
  }
45228
45301
  async function readStdin(timeout, maxSize = 1024 * 1024) {
45229
- return new Promise((resolve14, reject) => {
45302
+ return new Promise((resolve19, reject) => {
45230
45303
  let data = "";
45231
45304
  let timeoutId;
45232
45305
  if (timeout) {
@@ -45253,7 +45326,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
45253
45326
  };
45254
45327
  const onEnd = () => {
45255
45328
  cleanup();
45256
- resolve14(data.trim());
45329
+ resolve19(data.trim());
45257
45330
  };
45258
45331
  const onError = (err) => {
45259
45332
  cleanup();
@@ -49371,23 +49444,23 @@ __export(renderer_schema_exports, {
49371
49444
  });
49372
49445
  async function loadRendererSchema(name) {
49373
49446
  try {
49374
- const fs23 = await import("fs/promises");
49375
- const path27 = await import("path");
49447
+ const fs27 = await import("fs/promises");
49448
+ const path31 = await import("path");
49376
49449
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
49377
49450
  if (!sanitized) return void 0;
49378
49451
  const candidates = [
49379
49452
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
49380
- path27.join(__dirname, "output", sanitized, "schema.json"),
49453
+ path31.join(__dirname, "output", sanitized, "schema.json"),
49381
49454
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
49382
- path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49455
+ path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49383
49456
  // When running from a checkout with output/ folder copied to CWD
49384
- path27.join(process.cwd(), "output", sanitized, "schema.json"),
49457
+ path31.join(process.cwd(), "output", sanitized, "schema.json"),
49385
49458
  // Fallback: cwd/dist/output/
49386
- path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49459
+ path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49387
49460
  ];
49388
49461
  for (const p of candidates) {
49389
49462
  try {
49390
- const raw = await fs23.readFile(p, "utf-8");
49463
+ const raw = await fs27.readFile(p, "utf-8");
49391
49464
  return JSON.parse(raw);
49392
49465
  } catch {
49393
49466
  }
@@ -51806,8 +51879,8 @@ function updateStats2(results, state, isForEachIteration = false) {
51806
51879
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51807
51880
  try {
51808
51881
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
51809
- const fs23 = await import("fs/promises");
51810
- const path27 = await import("path");
51882
+ const fs27 = await import("fs/promises");
51883
+ const path31 = await import("path");
51811
51884
  const schemaRaw = checkConfig.schema || "plain";
51812
51885
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
51813
51886
  let templateContent;
@@ -51816,27 +51889,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51816
51889
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
51817
51890
  } else if (checkConfig.template && checkConfig.template.file) {
51818
51891
  const file = String(checkConfig.template.file);
51819
- const resolved = path27.resolve(process.cwd(), file);
51820
- templateContent = await fs23.readFile(resolved, "utf-8");
51892
+ const resolved = path31.resolve(process.cwd(), file);
51893
+ templateContent = await fs27.readFile(resolved, "utf-8");
51821
51894
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
51822
51895
  } else if (schema && schema !== "plain") {
51823
51896
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
51824
51897
  if (sanitized) {
51825
51898
  const candidatePaths = [
51826
- path27.join(__dirname, "output", sanitized, "template.liquid"),
51899
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
51827
51900
  // bundled: dist/output/
51828
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
51901
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
51829
51902
  // source (from state-machine/states)
51830
- path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
51903
+ path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
51831
51904
  // source (alternate)
51832
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
51905
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
51833
51906
  // fallback: cwd/output/
51834
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
51907
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
51835
51908
  // fallback: cwd/dist/output/
51836
51909
  ];
51837
51910
  for (const p of candidatePaths) {
51838
51911
  try {
51839
- templateContent = await fs23.readFile(p, "utf-8");
51912
+ templateContent = await fs27.readFile(p, "utf-8");
51840
51913
  if (templateContent) {
51841
51914
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
51842
51915
  break;
@@ -52534,7 +52607,7 @@ var init_docker_image_sandbox = __esm({
52534
52607
  */
52535
52608
  async start() {
52536
52609
  const image = await this.buildImageIfNeeded();
52537
- const workdir = this.config.workdir || "/workspace";
52610
+ const workdir = this.config.workdir === "host" ? this.repoPath : this.config.workdir || "/workspace";
52538
52611
  const visorPath = this.config.visor_path || "/opt/visor";
52539
52612
  const readOnlySuffix = this.config.read_only ? ":ro" : "";
52540
52613
  const args = [
@@ -52562,6 +52635,14 @@ var init_docker_image_sandbox = __esm({
52562
52635
  for (const mount of this.cacheVolumeMounts) {
52563
52636
  args.push("-v", mount);
52564
52637
  }
52638
+ if (this.config.bind_paths) {
52639
+ for (const bp of this.config.bind_paths) {
52640
+ const hostPath = bp.host.startsWith("~") ? (0, import_path9.resolve)((process.env.HOME || "/root") + bp.host.slice(1)) : (0, import_path9.resolve)(bp.host);
52641
+ const containerPath = bp.container || hostPath;
52642
+ const readOnly = bp.read_only !== false;
52643
+ args.push("-v", `${hostPath}:${containerPath}${readOnly ? ":ro" : ""}`);
52644
+ }
52645
+ }
52565
52646
  args.push(image, "sleep", "infinity");
52566
52647
  logger.info(`Starting sandbox container '${this.containerName}'`);
52567
52648
  const { stdout } = await execFileAsync(args[0], args.slice(1), {
@@ -53014,7 +53095,7 @@ var init_bubblewrap_sandbox = __esm({
53014
53095
  * Build the bwrap command-line arguments.
53015
53096
  */
53016
53097
  buildArgs(options) {
53017
- const workdir = this.config.workdir || "/workspace";
53098
+ const workdir = this.config.workdir === "host" ? this.repoPath : this.config.workdir || "/workspace";
53018
53099
  const args = [];
53019
53100
  args.push("--ro-bind", "/usr", "/usr");
53020
53101
  args.push("--ro-bind", "/bin", "/bin");
@@ -53041,6 +53122,14 @@ var init_bubblewrap_sandbox = __esm({
53041
53122
  }
53042
53123
  const visorPath = this.config.visor_path || "/opt/visor";
53043
53124
  args.push("--ro-bind", this.visorDistPath, visorPath);
53125
+ if (this.config.bind_paths) {
53126
+ for (const bp of this.config.bind_paths) {
53127
+ const hostPath = bp.host.startsWith("~") ? (0, import_path10.resolve)((process.env.HOME || "/root") + bp.host.slice(1)) : (0, import_path10.resolve)(bp.host);
53128
+ const containerPath = bp.container || hostPath;
53129
+ const readOnly = bp.read_only !== false;
53130
+ args.push(readOnly ? "--ro-bind" : "--bind", hostPath, containerPath);
53131
+ }
53132
+ }
53044
53133
  args.push("--chdir", workdir);
53045
53134
  args.push("--unshare-pid");
53046
53135
  args.push("--new-session");
@@ -53190,6 +53279,16 @@ var init_seatbelt_sandbox = __esm({
53190
53279
  }
53191
53280
  const visorDistPath = this.escapePath(this.visorDistPath);
53192
53281
  lines.push(`(allow file-read* (subpath "${visorDistPath}"))`);
53282
+ if (this.config.bind_paths) {
53283
+ for (const bp of this.config.bind_paths) {
53284
+ const hostPath = bp.host.startsWith("~") ? (0, import_path11.resolve)((process.env.HOME || "/root") + bp.host.slice(1)) : (0, import_path11.resolve)(bp.host);
53285
+ const escapedPath = this.escapePath(hostPath);
53286
+ lines.push(`(allow file-read* (subpath "${escapedPath}"))`);
53287
+ if (bp.read_only === false) {
53288
+ lines.push(`(allow file-write* (subpath "${escapedPath}"))`);
53289
+ }
53290
+ }
53291
+ }
53193
53292
  if (this.config.network !== false) {
53194
53293
  lines.push("(allow network*)");
53195
53294
  }
@@ -53950,8 +54049,8 @@ var init_workspace_manager = __esm({
53950
54049
  );
53951
54050
  if (this.cleanupRequested && this.activeOperations === 0) {
53952
54051
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
53953
- for (const resolve14 of this.cleanupResolvers) {
53954
- resolve14();
54052
+ for (const resolve19 of this.cleanupResolvers) {
54053
+ resolve19();
53955
54054
  }
53956
54055
  this.cleanupResolvers = [];
53957
54056
  }
@@ -54108,19 +54207,19 @@ var init_workspace_manager = __esm({
54108
54207
  );
54109
54208
  this.cleanupRequested = true;
54110
54209
  await Promise.race([
54111
- new Promise((resolve14) => {
54210
+ new Promise((resolve19) => {
54112
54211
  if (this.activeOperations === 0) {
54113
- resolve14();
54212
+ resolve19();
54114
54213
  } else {
54115
- this.cleanupResolvers.push(resolve14);
54214
+ this.cleanupResolvers.push(resolve19);
54116
54215
  }
54117
54216
  }),
54118
- new Promise((resolve14) => {
54217
+ new Promise((resolve19) => {
54119
54218
  setTimeout(() => {
54120
54219
  logger.warn(
54121
54220
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
54122
54221
  );
54123
- resolve14();
54222
+ resolve19();
54124
54223
  }, timeout);
54125
54224
  })
54126
54225
  ]);
@@ -54598,6 +54697,1380 @@ var init_build_engine_context = __esm({
54598
54697
  }
54599
54698
  });
54600
54699
 
54700
+ // src/policy/default-engine.ts
54701
+ var DefaultPolicyEngine;
54702
+ var init_default_engine = __esm({
54703
+ "src/policy/default-engine.ts"() {
54704
+ "use strict";
54705
+ DefaultPolicyEngine = class {
54706
+ async initialize(_config) {
54707
+ }
54708
+ async evaluateCheckExecution(_checkId, _checkConfig) {
54709
+ return { allowed: true };
54710
+ }
54711
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
54712
+ return { allowed: true };
54713
+ }
54714
+ async evaluateCapabilities(_checkId, _capabilities) {
54715
+ return { allowed: true };
54716
+ }
54717
+ async shutdown() {
54718
+ }
54719
+ };
54720
+ }
54721
+ });
54722
+
54723
+ // src/enterprise/license/validator.ts
54724
+ var validator_exports = {};
54725
+ __export(validator_exports, {
54726
+ LicenseValidator: () => LicenseValidator
54727
+ });
54728
+ var crypto2, fs21, path25, LicenseValidator;
54729
+ var init_validator = __esm({
54730
+ "src/enterprise/license/validator.ts"() {
54731
+ "use strict";
54732
+ crypto2 = __toESM(require("crypto"));
54733
+ fs21 = __toESM(require("fs"));
54734
+ path25 = __toESM(require("path"));
54735
+ LicenseValidator = class _LicenseValidator {
54736
+ /** Ed25519 public key for license verification (PEM format). */
54737
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
54738
+ cache = null;
54739
+ static CACHE_TTL = 5 * 60 * 1e3;
54740
+ // 5 minutes
54741
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
54742
+ // 72 hours after expiry
54743
+ /**
54744
+ * Load and validate license from environment or file.
54745
+ *
54746
+ * Resolution order:
54747
+ * 1. VISOR_LICENSE env var (JWT string)
54748
+ * 2. VISOR_LICENSE_FILE env var (path to file)
54749
+ * 3. .visor-license in project root (cwd)
54750
+ * 4. .visor-license in ~/.config/visor/
54751
+ */
54752
+ async loadAndValidate() {
54753
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
54754
+ return this.cache.payload;
54755
+ }
54756
+ const token = this.resolveToken();
54757
+ if (!token) return null;
54758
+ const payload = this.verifyAndDecode(token);
54759
+ if (!payload) return null;
54760
+ this.cache = { payload, validatedAt: Date.now() };
54761
+ return payload;
54762
+ }
54763
+ /** Check if a specific feature is licensed */
54764
+ hasFeature(feature) {
54765
+ if (!this.cache) return false;
54766
+ return this.cache.payload.features.includes(feature);
54767
+ }
54768
+ /** Check if license is valid (with grace period) */
54769
+ isValid() {
54770
+ if (!this.cache) return false;
54771
+ const now = Date.now();
54772
+ const expiryMs = this.cache.payload.exp * 1e3;
54773
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54774
+ }
54775
+ /** Check if the license is within its grace period (expired but still valid) */
54776
+ isInGracePeriod() {
54777
+ if (!this.cache) return false;
54778
+ const now = Date.now();
54779
+ const expiryMs = this.cache.payload.exp * 1e3;
54780
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54781
+ }
54782
+ resolveToken() {
54783
+ if (process.env.VISOR_LICENSE) {
54784
+ return process.env.VISOR_LICENSE.trim();
54785
+ }
54786
+ if (process.env.VISOR_LICENSE_FILE) {
54787
+ const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
54788
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
54789
+ const allowedPrefixes = [path25.normalize(process.cwd())];
54790
+ if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
54791
+ let realPath;
54792
+ try {
54793
+ realPath = fs21.realpathSync(resolved);
54794
+ } catch {
54795
+ return null;
54796
+ }
54797
+ const isSafe = allowedPrefixes.some(
54798
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
54799
+ );
54800
+ if (!isSafe) return null;
54801
+ return this.readFile(realPath);
54802
+ }
54803
+ const cwdPath = path25.join(process.cwd(), ".visor-license");
54804
+ const cwdToken = this.readFile(cwdPath);
54805
+ if (cwdToken) return cwdToken;
54806
+ const home = process.env.HOME || process.env.USERPROFILE || "";
54807
+ if (home) {
54808
+ const configPath = path25.join(home, ".config", "visor", ".visor-license");
54809
+ const configToken = this.readFile(configPath);
54810
+ if (configToken) return configToken;
54811
+ }
54812
+ return null;
54813
+ }
54814
+ readFile(filePath) {
54815
+ try {
54816
+ return fs21.readFileSync(filePath, "utf-8").trim();
54817
+ } catch {
54818
+ return null;
54819
+ }
54820
+ }
54821
+ verifyAndDecode(token) {
54822
+ try {
54823
+ const parts = token.split(".");
54824
+ if (parts.length !== 3) return null;
54825
+ const [headerB64, payloadB64, signatureB64] = parts;
54826
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
54827
+ if (header.alg !== "EdDSA") return null;
54828
+ const data = `${headerB64}.${payloadB64}`;
54829
+ const signature = Buffer.from(signatureB64, "base64url");
54830
+ const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
54831
+ if (publicKey.asymmetricKeyType !== "ed25519") {
54832
+ return null;
54833
+ }
54834
+ const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
54835
+ if (!isValid) return null;
54836
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
54837
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
54838
+ return null;
54839
+ }
54840
+ const now = Date.now();
54841
+ const expiryMs = payload.exp * 1e3;
54842
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
54843
+ return null;
54844
+ }
54845
+ return payload;
54846
+ } catch {
54847
+ return null;
54848
+ }
54849
+ }
54850
+ };
54851
+ }
54852
+ });
54853
+
54854
+ // src/enterprise/policy/opa-compiler.ts
54855
+ var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
54856
+ var init_opa_compiler = __esm({
54857
+ "src/enterprise/policy/opa-compiler.ts"() {
54858
+ "use strict";
54859
+ fs22 = __toESM(require("fs"));
54860
+ path26 = __toESM(require("path"));
54861
+ os2 = __toESM(require("os"));
54862
+ crypto3 = __toESM(require("crypto"));
54863
+ import_child_process8 = require("child_process");
54864
+ OpaCompiler = class _OpaCompiler {
54865
+ static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
54866
+ /**
54867
+ * Resolve the input paths to WASM bytes.
54868
+ *
54869
+ * Strategy:
54870
+ * 1. If any path is a .wasm file, read it directly
54871
+ * 2. If a directory contains policy.wasm, read it
54872
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
54873
+ */
54874
+ async resolveWasmBytes(paths) {
54875
+ const regoFiles = [];
54876
+ for (const p of paths) {
54877
+ const resolved = path26.resolve(p);
54878
+ if (path26.normalize(resolved).includes("..")) {
54879
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
54880
+ }
54881
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
54882
+ return fs22.readFileSync(resolved);
54883
+ }
54884
+ if (!fs22.existsSync(resolved)) continue;
54885
+ const stat2 = fs22.statSync(resolved);
54886
+ if (stat2.isDirectory()) {
54887
+ const wasmCandidate = path26.join(resolved, "policy.wasm");
54888
+ if (fs22.existsSync(wasmCandidate)) {
54889
+ return fs22.readFileSync(wasmCandidate);
54890
+ }
54891
+ const files = fs22.readdirSync(resolved);
54892
+ for (const f of files) {
54893
+ if (f.endsWith(".rego")) {
54894
+ regoFiles.push(path26.join(resolved, f));
54895
+ }
54896
+ }
54897
+ } else if (resolved.endsWith(".rego")) {
54898
+ regoFiles.push(resolved);
54899
+ }
54900
+ }
54901
+ if (regoFiles.length === 0) {
54902
+ throw new Error(
54903
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
54904
+ );
54905
+ }
54906
+ return this.compileRego(regoFiles);
54907
+ }
54908
+ /**
54909
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
54910
+ *
54911
+ * Caches the compiled bundle based on a content hash of all input .rego files
54912
+ * so subsequent runs skip compilation if policies haven't changed.
54913
+ */
54914
+ compileRego(regoFiles) {
54915
+ try {
54916
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
54917
+ } catch {
54918
+ throw new Error(
54919
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
54920
+ );
54921
+ }
54922
+ const hash = crypto3.createHash("sha256");
54923
+ for (const f of regoFiles.sort()) {
54924
+ hash.update(fs22.readFileSync(f));
54925
+ hash.update(f);
54926
+ }
54927
+ const cacheKey = hash.digest("hex").slice(0, 16);
54928
+ const cacheDir = _OpaCompiler.CACHE_DIR;
54929
+ const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
54930
+ if (fs22.existsSync(cachedWasm)) {
54931
+ return fs22.readFileSync(cachedWasm);
54932
+ }
54933
+ fs22.mkdirSync(cacheDir, { recursive: true });
54934
+ const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
54935
+ try {
54936
+ const args = [
54937
+ "build",
54938
+ "-t",
54939
+ "wasm",
54940
+ "-e",
54941
+ "visor",
54942
+ // entrypoint: the visor package tree
54943
+ "-o",
54944
+ bundleTar,
54945
+ ...regoFiles
54946
+ ];
54947
+ (0, import_child_process8.execFileSync)("opa", args, {
54948
+ stdio: "pipe",
54949
+ timeout: 3e4
54950
+ });
54951
+ } catch (err) {
54952
+ const stderr = err?.stderr?.toString() || "";
54953
+ throw new Error(
54954
+ `Failed to compile .rego files to WASM:
54955
+ ${stderr}
54956
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
54957
+ );
54958
+ }
54959
+ try {
54960
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
54961
+ stdio: "pipe"
54962
+ });
54963
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
54964
+ if (fs22.existsSync(extractedWasm)) {
54965
+ fs22.renameSync(extractedWasm, cachedWasm);
54966
+ }
54967
+ } catch {
54968
+ try {
54969
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
54970
+ stdio: "pipe"
54971
+ });
54972
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
54973
+ if (fs22.existsSync(extractedWasm)) {
54974
+ fs22.renameSync(extractedWasm, cachedWasm);
54975
+ }
54976
+ } catch (err2) {
54977
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
54978
+ }
54979
+ }
54980
+ try {
54981
+ fs22.unlinkSync(bundleTar);
54982
+ } catch {
54983
+ }
54984
+ if (!fs22.existsSync(cachedWasm)) {
54985
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
54986
+ }
54987
+ return fs22.readFileSync(cachedWasm);
54988
+ }
54989
+ };
54990
+ }
54991
+ });
54992
+
54993
+ // src/enterprise/policy/opa-wasm-evaluator.ts
54994
+ var fs23, path27, OpaWasmEvaluator;
54995
+ var init_opa_wasm_evaluator = __esm({
54996
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
54997
+ "use strict";
54998
+ fs23 = __toESM(require("fs"));
54999
+ path27 = __toESM(require("path"));
55000
+ init_opa_compiler();
55001
+ OpaWasmEvaluator = class {
55002
+ policy = null;
55003
+ dataDocument = {};
55004
+ compiler = new OpaCompiler();
55005
+ async initialize(rulesPath) {
55006
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
55007
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
55008
+ try {
55009
+ const { createRequire } = require("module");
55010
+ const runtimeRequire = createRequire(__filename);
55011
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
55012
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
55013
+ if (!loadPolicy) {
55014
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
55015
+ }
55016
+ this.policy = await loadPolicy(wasmBytes);
55017
+ } catch (err) {
55018
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
55019
+ throw new Error(
55020
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
55021
+ );
55022
+ }
55023
+ throw err;
55024
+ }
55025
+ }
55026
+ /**
55027
+ * Load external data from a JSON file to use as the OPA data document.
55028
+ * The loaded data will be passed to `policy.setData()` during evaluation,
55029
+ * making it available in Rego via `data.<key>`.
55030
+ */
55031
+ loadData(dataPath) {
55032
+ const resolved = path27.resolve(dataPath);
55033
+ if (path27.normalize(resolved).includes("..")) {
55034
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
55035
+ }
55036
+ if (!fs23.existsSync(resolved)) {
55037
+ throw new Error(`OPA data file not found: ${resolved}`);
55038
+ }
55039
+ const stat2 = fs23.statSync(resolved);
55040
+ if (stat2.size > 10 * 1024 * 1024) {
55041
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
55042
+ }
55043
+ const raw = fs23.readFileSync(resolved, "utf-8");
55044
+ try {
55045
+ const parsed = JSON.parse(raw);
55046
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
55047
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
55048
+ }
55049
+ this.dataDocument = parsed;
55050
+ } catch (err) {
55051
+ if (err.message.startsWith("OPA data file must")) {
55052
+ throw err;
55053
+ }
55054
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
55055
+ }
55056
+ }
55057
+ async evaluate(input) {
55058
+ if (!this.policy) {
55059
+ throw new Error("OPA WASM evaluator not initialized");
55060
+ }
55061
+ this.policy.setData(this.dataDocument);
55062
+ const resultSet = this.policy.evaluate(input);
55063
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
55064
+ return resultSet[0].result;
55065
+ }
55066
+ return void 0;
55067
+ }
55068
+ async shutdown() {
55069
+ if (this.policy) {
55070
+ if (typeof this.policy.close === "function") {
55071
+ try {
55072
+ this.policy.close();
55073
+ } catch {
55074
+ }
55075
+ } else if (typeof this.policy.free === "function") {
55076
+ try {
55077
+ this.policy.free();
55078
+ } catch {
55079
+ }
55080
+ }
55081
+ }
55082
+ this.policy = null;
55083
+ }
55084
+ };
55085
+ }
55086
+ });
55087
+
55088
+ // src/enterprise/policy/opa-http-evaluator.ts
55089
+ var OpaHttpEvaluator;
55090
+ var init_opa_http_evaluator = __esm({
55091
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
55092
+ "use strict";
55093
+ OpaHttpEvaluator = class {
55094
+ baseUrl;
55095
+ timeout;
55096
+ constructor(baseUrl, timeout = 5e3) {
55097
+ let parsed;
55098
+ try {
55099
+ parsed = new URL(baseUrl);
55100
+ } catch {
55101
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
55102
+ }
55103
+ if (!["http:", "https:"].includes(parsed.protocol)) {
55104
+ throw new Error(
55105
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
55106
+ );
55107
+ }
55108
+ const hostname = parsed.hostname;
55109
+ if (this.isBlockedHostname(hostname)) {
55110
+ throw new Error(
55111
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
55112
+ );
55113
+ }
55114
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
55115
+ this.timeout = timeout;
55116
+ }
55117
+ /**
55118
+ * Check if a hostname is blocked due to SSRF concerns.
55119
+ *
55120
+ * Blocks:
55121
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
55122
+ * - Link-local addresses (169.254.x.x)
55123
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
55124
+ * - IPv6 unique local addresses (fd00::/8)
55125
+ * - Cloud metadata services (*.internal)
55126
+ */
55127
+ isBlockedHostname(hostname) {
55128
+ if (!hostname) return true;
55129
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
55130
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
55131
+ return true;
55132
+ }
55133
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
55134
+ return true;
55135
+ }
55136
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
55137
+ return true;
55138
+ }
55139
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
55140
+ const ipv4Match = normalized.match(ipv4Pattern);
55141
+ if (ipv4Match) {
55142
+ const octets = ipv4Match.slice(1, 5).map(Number);
55143
+ if (octets.some((octet) => octet > 255)) {
55144
+ return false;
55145
+ }
55146
+ const [a, b] = octets;
55147
+ if (a === 127) {
55148
+ return true;
55149
+ }
55150
+ if (a === 0) {
55151
+ return true;
55152
+ }
55153
+ if (a === 169 && b === 254) {
55154
+ return true;
55155
+ }
55156
+ if (a === 10) {
55157
+ return true;
55158
+ }
55159
+ if (a === 172 && b >= 16 && b <= 31) {
55160
+ return true;
55161
+ }
55162
+ if (a === 192 && b === 168) {
55163
+ return true;
55164
+ }
55165
+ }
55166
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
55167
+ return true;
55168
+ }
55169
+ if (normalized.startsWith("fe80:")) {
55170
+ return true;
55171
+ }
55172
+ return false;
55173
+ }
55174
+ /**
55175
+ * Evaluate a policy rule against an input document via OPA REST API.
55176
+ *
55177
+ * @param input - The input document to evaluate
55178
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
55179
+ * @returns The result object from OPA, or undefined on error
55180
+ */
55181
+ async evaluate(input, rulePath) {
55182
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
55183
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
55184
+ const controller = new AbortController();
55185
+ const timer = setTimeout(() => controller.abort(), this.timeout);
55186
+ try {
55187
+ const response = await fetch(url, {
55188
+ method: "POST",
55189
+ headers: { "Content-Type": "application/json" },
55190
+ body: JSON.stringify({ input }),
55191
+ signal: controller.signal
55192
+ });
55193
+ if (!response.ok) {
55194
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
55195
+ }
55196
+ let body;
55197
+ try {
55198
+ body = await response.json();
55199
+ } catch (jsonErr) {
55200
+ throw new Error(
55201
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
55202
+ );
55203
+ }
55204
+ return body?.result;
55205
+ } finally {
55206
+ clearTimeout(timer);
55207
+ }
55208
+ }
55209
+ async shutdown() {
55210
+ }
55211
+ };
55212
+ }
55213
+ });
55214
+
55215
+ // src/enterprise/policy/policy-input-builder.ts
55216
+ var PolicyInputBuilder;
55217
+ var init_policy_input_builder = __esm({
55218
+ "src/enterprise/policy/policy-input-builder.ts"() {
55219
+ "use strict";
55220
+ PolicyInputBuilder = class {
55221
+ roles;
55222
+ actor;
55223
+ repository;
55224
+ pullRequest;
55225
+ constructor(policyConfig, actor, repository, pullRequest) {
55226
+ this.roles = policyConfig.roles || {};
55227
+ this.actor = actor;
55228
+ this.repository = repository;
55229
+ this.pullRequest = pullRequest;
55230
+ }
55231
+ /** Resolve which roles apply to the current actor. */
55232
+ resolveRoles() {
55233
+ const matched = [];
55234
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
55235
+ let identityMatch = false;
55236
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
55237
+ identityMatch = true;
55238
+ }
55239
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
55240
+ identityMatch = true;
55241
+ }
55242
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
55243
+ identityMatch = true;
55244
+ }
55245
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
55246
+ const actorEmail = this.actor.slack.email.toLowerCase();
55247
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
55248
+ identityMatch = true;
55249
+ }
55250
+ }
55251
+ if (!identityMatch) continue;
55252
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
55253
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
55254
+ continue;
55255
+ }
55256
+ }
55257
+ matched.push(roleName);
55258
+ }
55259
+ return matched;
55260
+ }
55261
+ buildActor() {
55262
+ return {
55263
+ authorAssociation: this.actor.authorAssociation,
55264
+ login: this.actor.login,
55265
+ roles: this.resolveRoles(),
55266
+ isLocalMode: this.actor.isLocalMode,
55267
+ ...this.actor.slack && { slack: this.actor.slack }
55268
+ };
55269
+ }
55270
+ forCheckExecution(check) {
55271
+ return {
55272
+ scope: "check.execute",
55273
+ check: {
55274
+ id: check.id,
55275
+ type: check.type,
55276
+ group: check.group,
55277
+ tags: check.tags,
55278
+ criticality: check.criticality,
55279
+ sandbox: check.sandbox,
55280
+ policy: check.policy
55281
+ },
55282
+ actor: this.buildActor(),
55283
+ repository: this.repository,
55284
+ pullRequest: this.pullRequest
55285
+ };
55286
+ }
55287
+ forToolInvocation(serverName, methodName, transport) {
55288
+ return {
55289
+ scope: "tool.invoke",
55290
+ tool: { serverName, methodName, transport },
55291
+ actor: this.buildActor(),
55292
+ repository: this.repository,
55293
+ pullRequest: this.pullRequest
55294
+ };
55295
+ }
55296
+ forCapabilityResolve(checkId, capabilities) {
55297
+ return {
55298
+ scope: "capability.resolve",
55299
+ check: { id: checkId, type: "ai" },
55300
+ capability: capabilities,
55301
+ actor: this.buildActor(),
55302
+ repository: this.repository,
55303
+ pullRequest: this.pullRequest
55304
+ };
55305
+ }
55306
+ };
55307
+ }
55308
+ });
55309
+
55310
+ // src/enterprise/policy/opa-policy-engine.ts
55311
+ var opa_policy_engine_exports = {};
55312
+ __export(opa_policy_engine_exports, {
55313
+ OpaPolicyEngine: () => OpaPolicyEngine
55314
+ });
55315
+ var OpaPolicyEngine;
55316
+ var init_opa_policy_engine = __esm({
55317
+ "src/enterprise/policy/opa-policy-engine.ts"() {
55318
+ "use strict";
55319
+ init_opa_wasm_evaluator();
55320
+ init_opa_http_evaluator();
55321
+ init_policy_input_builder();
55322
+ OpaPolicyEngine = class {
55323
+ evaluator = null;
55324
+ fallback;
55325
+ timeout;
55326
+ config;
55327
+ inputBuilder = null;
55328
+ logger = null;
55329
+ constructor(config) {
55330
+ this.config = config;
55331
+ this.fallback = config.fallback || "deny";
55332
+ this.timeout = config.timeout || 5e3;
55333
+ }
55334
+ async initialize(config) {
55335
+ try {
55336
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
55337
+ } catch {
55338
+ }
55339
+ const actor = {
55340
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
55341
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
55342
+ isLocalMode: !process.env.GITHUB_ACTIONS
55343
+ };
55344
+ const repo = {
55345
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
55346
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
55347
+ branch: process.env.GITHUB_HEAD_REF,
55348
+ baseBranch: process.env.GITHUB_BASE_REF,
55349
+ event: process.env.GITHUB_EVENT_NAME
55350
+ };
55351
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
55352
+ const pullRequest = {
55353
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
55354
+ };
55355
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
55356
+ if (config.engine === "local") {
55357
+ if (!config.rules) {
55358
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
55359
+ }
55360
+ const wasm = new OpaWasmEvaluator();
55361
+ await wasm.initialize(config.rules);
55362
+ if (config.data) {
55363
+ wasm.loadData(config.data);
55364
+ }
55365
+ this.evaluator = wasm;
55366
+ } else if (config.engine === "remote") {
55367
+ if (!config.url) {
55368
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
55369
+ }
55370
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
55371
+ } else {
55372
+ this.evaluator = null;
55373
+ }
55374
+ }
55375
+ /**
55376
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
55377
+ * Called by the enterprise loader when engine context is enriched.
55378
+ */
55379
+ setActorContext(actor, repo, pullRequest) {
55380
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
55381
+ }
55382
+ async evaluateCheckExecution(checkId, checkConfig) {
55383
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55384
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
55385
+ const policyOverride = cfg.policy;
55386
+ const input = this.inputBuilder.forCheckExecution({
55387
+ id: checkId,
55388
+ type: cfg.type || "ai",
55389
+ group: cfg.group,
55390
+ tags: cfg.tags,
55391
+ criticality: cfg.criticality,
55392
+ sandbox: cfg.sandbox,
55393
+ policy: policyOverride
55394
+ });
55395
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
55396
+ }
55397
+ async evaluateToolInvocation(serverName, methodName, transport) {
55398
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55399
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
55400
+ return this.doEvaluate(input, "visor/tool/invoke");
55401
+ }
55402
+ async evaluateCapabilities(checkId, capabilities) {
55403
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55404
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
55405
+ return this.doEvaluate(input, "visor/capability/resolve");
55406
+ }
55407
+ async shutdown() {
55408
+ if (this.evaluator && "shutdown" in this.evaluator) {
55409
+ await this.evaluator.shutdown();
55410
+ }
55411
+ this.evaluator = null;
55412
+ this.inputBuilder = null;
55413
+ }
55414
+ resolveRulePath(defaultScope, override) {
55415
+ if (override) {
55416
+ return override.startsWith("visor/") ? override : `visor/${override}`;
55417
+ }
55418
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
55419
+ }
55420
+ async doEvaluate(input, rulePath) {
55421
+ try {
55422
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
55423
+ let timer;
55424
+ const timeoutPromise = new Promise((_resolve, reject) => {
55425
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
55426
+ });
55427
+ try {
55428
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
55429
+ const decision = this.parseDecision(result);
55430
+ if (!decision.allowed && this.fallback === "warn") {
55431
+ decision.allowed = true;
55432
+ decision.warn = true;
55433
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
55434
+ }
55435
+ this.logger?.debug(
55436
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
55437
+ );
55438
+ return decision;
55439
+ } finally {
55440
+ if (timer) clearTimeout(timer);
55441
+ }
55442
+ } catch (err) {
55443
+ const msg = err instanceof Error ? err.message : String(err);
55444
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
55445
+ return {
55446
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55447
+ warn: this.fallback === "warn" ? true : void 0,
55448
+ reason: `policy evaluation failed, fallback=${this.fallback}`
55449
+ };
55450
+ }
55451
+ }
55452
+ async rawEvaluate(input, rulePath) {
55453
+ if (this.evaluator instanceof OpaWasmEvaluator) {
55454
+ const result = await this.evaluator.evaluate(input);
55455
+ return this.navigateWasmResult(result, rulePath);
55456
+ }
55457
+ return this.evaluator.evaluate(input, rulePath);
55458
+ }
55459
+ /**
55460
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
55461
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
55462
+ * so we strip the `visor/` prefix and walk the remaining segments.
55463
+ */
55464
+ navigateWasmResult(result, rulePath) {
55465
+ if (!result || typeof result !== "object") return result;
55466
+ const segments = rulePath.replace(/^visor\//, "").split("/");
55467
+ let current = result;
55468
+ for (const seg of segments) {
55469
+ if (current && typeof current === "object" && seg in current) {
55470
+ current = current[seg];
55471
+ } else {
55472
+ return void 0;
55473
+ }
55474
+ }
55475
+ return current;
55476
+ }
55477
+ parseDecision(result) {
55478
+ if (result === void 0 || result === null) {
55479
+ return {
55480
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55481
+ warn: this.fallback === "warn" ? true : void 0,
55482
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
55483
+ };
55484
+ }
55485
+ const allowed = result.allowed !== false;
55486
+ const decision = {
55487
+ allowed,
55488
+ reason: result.reason
55489
+ };
55490
+ if (result.capabilities) {
55491
+ decision.capabilities = result.capabilities;
55492
+ }
55493
+ return decision;
55494
+ }
55495
+ };
55496
+ }
55497
+ });
55498
+
55499
+ // src/enterprise/scheduler/knex-store.ts
55500
+ var knex_store_exports = {};
55501
+ __export(knex_store_exports, {
55502
+ KnexStoreBackend: () => KnexStoreBackend
55503
+ });
55504
+ function toNum(val) {
55505
+ if (val === null || val === void 0) return void 0;
55506
+ return typeof val === "string" ? parseInt(val, 10) : val;
55507
+ }
55508
+ function safeJsonParse2(value) {
55509
+ if (!value) return void 0;
55510
+ try {
55511
+ return JSON.parse(value);
55512
+ } catch {
55513
+ return void 0;
55514
+ }
55515
+ }
55516
+ function fromTriggerRow2(row) {
55517
+ return {
55518
+ id: row.id,
55519
+ creatorId: row.creator_id,
55520
+ creatorContext: row.creator_context ?? void 0,
55521
+ creatorName: row.creator_name ?? void 0,
55522
+ description: row.description ?? void 0,
55523
+ channels: safeJsonParse2(row.channels),
55524
+ fromUsers: safeJsonParse2(row.from_users),
55525
+ fromBots: row.from_bots === true || row.from_bots === 1,
55526
+ contains: safeJsonParse2(row.contains),
55527
+ matchPattern: row.match_pattern ?? void 0,
55528
+ threads: row.threads,
55529
+ workflow: row.workflow,
55530
+ inputs: safeJsonParse2(row.inputs),
55531
+ outputContext: safeJsonParse2(row.output_context),
55532
+ status: row.status,
55533
+ enabled: row.enabled === true || row.enabled === 1,
55534
+ createdAt: toNum(row.created_at)
55535
+ };
55536
+ }
55537
+ function toTriggerInsertRow(trigger) {
55538
+ return {
55539
+ id: trigger.id,
55540
+ creator_id: trigger.creatorId,
55541
+ creator_context: trigger.creatorContext ?? null,
55542
+ creator_name: trigger.creatorName ?? null,
55543
+ description: trigger.description ?? null,
55544
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
55545
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
55546
+ from_bots: trigger.fromBots,
55547
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
55548
+ match_pattern: trigger.matchPattern ?? null,
55549
+ threads: trigger.threads,
55550
+ workflow: trigger.workflow,
55551
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
55552
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
55553
+ status: trigger.status,
55554
+ enabled: trigger.enabled,
55555
+ created_at: trigger.createdAt
55556
+ };
55557
+ }
55558
+ function fromDbRow2(row) {
55559
+ return {
55560
+ id: row.id,
55561
+ creatorId: row.creator_id,
55562
+ creatorContext: row.creator_context ?? void 0,
55563
+ creatorName: row.creator_name ?? void 0,
55564
+ timezone: row.timezone,
55565
+ schedule: row.schedule_expr,
55566
+ runAt: toNum(row.run_at),
55567
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
55568
+ originalExpression: row.original_expression,
55569
+ workflow: row.workflow ?? void 0,
55570
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
55571
+ outputContext: safeJsonParse2(row.output_context),
55572
+ status: row.status,
55573
+ createdAt: toNum(row.created_at),
55574
+ lastRunAt: toNum(row.last_run_at),
55575
+ nextRunAt: toNum(row.next_run_at),
55576
+ runCount: row.run_count,
55577
+ failureCount: row.failure_count,
55578
+ lastError: row.last_error ?? void 0,
55579
+ previousResponse: row.previous_response ?? void 0
55580
+ };
55581
+ }
55582
+ function toInsertRow(schedule) {
55583
+ return {
55584
+ id: schedule.id,
55585
+ creator_id: schedule.creatorId,
55586
+ creator_context: schedule.creatorContext ?? null,
55587
+ creator_name: schedule.creatorName ?? null,
55588
+ timezone: schedule.timezone,
55589
+ schedule_expr: schedule.schedule,
55590
+ run_at: schedule.runAt ?? null,
55591
+ is_recurring: schedule.isRecurring,
55592
+ original_expression: schedule.originalExpression,
55593
+ workflow: schedule.workflow ?? null,
55594
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
55595
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
55596
+ status: schedule.status,
55597
+ created_at: schedule.createdAt,
55598
+ last_run_at: schedule.lastRunAt ?? null,
55599
+ next_run_at: schedule.nextRunAt ?? null,
55600
+ run_count: schedule.runCount,
55601
+ failure_count: schedule.failureCount,
55602
+ last_error: schedule.lastError ?? null,
55603
+ previous_response: schedule.previousResponse ?? null
55604
+ };
55605
+ }
55606
+ var fs24, path28, import_uuid2, KnexStoreBackend;
55607
+ var init_knex_store = __esm({
55608
+ "src/enterprise/scheduler/knex-store.ts"() {
55609
+ "use strict";
55610
+ fs24 = __toESM(require("fs"));
55611
+ path28 = __toESM(require("path"));
55612
+ import_uuid2 = require("uuid");
55613
+ init_logger();
55614
+ KnexStoreBackend = class {
55615
+ knex = null;
55616
+ driver;
55617
+ connection;
55618
+ constructor(driver, storageConfig, _haConfig) {
55619
+ this.driver = driver;
55620
+ this.connection = storageConfig.connection || {};
55621
+ }
55622
+ async initialize() {
55623
+ const { createRequire } = require("module");
55624
+ const runtimeRequire = createRequire(__filename);
55625
+ let knexFactory;
55626
+ try {
55627
+ knexFactory = runtimeRequire("knex");
55628
+ } catch (err) {
55629
+ const code = err?.code;
55630
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
55631
+ throw new Error(
55632
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
55633
+ );
55634
+ }
55635
+ throw err;
55636
+ }
55637
+ const clientMap = {
55638
+ postgresql: "pg",
55639
+ mysql: "mysql2",
55640
+ mssql: "tedious"
55641
+ };
55642
+ const client = clientMap[this.driver];
55643
+ let connection;
55644
+ if (this.connection.connection_string) {
55645
+ connection = this.connection.connection_string;
55646
+ } else if (this.driver === "mssql") {
55647
+ connection = this.buildMssqlConnection();
55648
+ } else {
55649
+ connection = this.buildStandardConnection();
55650
+ }
55651
+ this.knex = knexFactory({
55652
+ client,
55653
+ connection,
55654
+ pool: {
55655
+ min: this.connection.pool?.min ?? 0,
55656
+ max: this.connection.pool?.max ?? 10
55657
+ }
55658
+ });
55659
+ await this.migrateSchema();
55660
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
55661
+ }
55662
+ buildStandardConnection() {
55663
+ return {
55664
+ host: this.connection.host || "localhost",
55665
+ port: this.connection.port,
55666
+ database: this.connection.database || "visor",
55667
+ user: this.connection.user,
55668
+ password: this.connection.password,
55669
+ ssl: this.resolveSslConfig()
55670
+ };
55671
+ }
55672
+ buildMssqlConnection() {
55673
+ const ssl = this.connection.ssl;
55674
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
55675
+ return {
55676
+ server: this.connection.host || "localhost",
55677
+ port: this.connection.port,
55678
+ database: this.connection.database || "visor",
55679
+ user: this.connection.user,
55680
+ password: this.connection.password,
55681
+ options: {
55682
+ encrypt: sslEnabled,
55683
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
55684
+ }
55685
+ };
55686
+ }
55687
+ resolveSslConfig() {
55688
+ const ssl = this.connection.ssl;
55689
+ if (ssl === false || ssl === void 0) return false;
55690
+ if (ssl === true) return { rejectUnauthorized: true };
55691
+ if (ssl.enabled === false) return false;
55692
+ const result = {
55693
+ rejectUnauthorized: ssl.reject_unauthorized !== false
55694
+ };
55695
+ if (ssl.ca) {
55696
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
55697
+ result.ca = fs24.readFileSync(caPath, "utf8");
55698
+ }
55699
+ if (ssl.cert) {
55700
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
55701
+ result.cert = fs24.readFileSync(certPath, "utf8");
55702
+ }
55703
+ if (ssl.key) {
55704
+ const keyPath = this.validateSslPath(ssl.key, "client key");
55705
+ result.key = fs24.readFileSync(keyPath, "utf8");
55706
+ }
55707
+ return result;
55708
+ }
55709
+ validateSslPath(filePath, label) {
55710
+ const resolved = path28.resolve(filePath);
55711
+ if (resolved !== path28.normalize(resolved)) {
55712
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
55713
+ }
55714
+ if (!fs24.existsSync(resolved)) {
55715
+ throw new Error(`SSL ${label} not found: ${filePath}`);
55716
+ }
55717
+ return resolved;
55718
+ }
55719
+ async shutdown() {
55720
+ if (this.knex) {
55721
+ await this.knex.destroy();
55722
+ this.knex = null;
55723
+ }
55724
+ }
55725
+ async migrateSchema() {
55726
+ const knex = this.getKnex();
55727
+ const exists = await knex.schema.hasTable("schedules");
55728
+ if (!exists) {
55729
+ await knex.schema.createTable("schedules", (table) => {
55730
+ table.string("id", 36).primary();
55731
+ table.string("creator_id", 255).notNullable().index();
55732
+ table.string("creator_context", 255);
55733
+ table.string("creator_name", 255);
55734
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
55735
+ table.string("schedule_expr", 255);
55736
+ table.bigInteger("run_at");
55737
+ table.boolean("is_recurring").notNullable();
55738
+ table.text("original_expression");
55739
+ table.string("workflow", 255);
55740
+ table.text("workflow_inputs");
55741
+ table.text("output_context");
55742
+ table.string("status", 20).notNullable().index();
55743
+ table.bigInteger("created_at").notNullable();
55744
+ table.bigInteger("last_run_at");
55745
+ table.bigInteger("next_run_at");
55746
+ table.integer("run_count").notNullable().defaultTo(0);
55747
+ table.integer("failure_count").notNullable().defaultTo(0);
55748
+ table.text("last_error");
55749
+ table.text("previous_response");
55750
+ table.index(["status", "next_run_at"]);
55751
+ });
55752
+ }
55753
+ const triggersExist = await knex.schema.hasTable("message_triggers");
55754
+ if (!triggersExist) {
55755
+ await knex.schema.createTable("message_triggers", (table) => {
55756
+ table.string("id", 36).primary();
55757
+ table.string("creator_id", 255).notNullable().index();
55758
+ table.string("creator_context", 255);
55759
+ table.string("creator_name", 255);
55760
+ table.text("description");
55761
+ table.text("channels");
55762
+ table.text("from_users");
55763
+ table.boolean("from_bots").notNullable().defaultTo(false);
55764
+ table.text("contains");
55765
+ table.text("match_pattern");
55766
+ table.string("threads", 20).notNullable().defaultTo("any");
55767
+ table.string("workflow", 255).notNullable();
55768
+ table.text("inputs");
55769
+ table.text("output_context");
55770
+ table.string("status", 20).notNullable().defaultTo("active").index();
55771
+ table.boolean("enabled").notNullable().defaultTo(true);
55772
+ table.bigInteger("created_at").notNullable();
55773
+ });
55774
+ }
55775
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
55776
+ if (!locksExist) {
55777
+ await knex.schema.createTable("scheduler_locks", (table) => {
55778
+ table.string("lock_id", 255).primary();
55779
+ table.string("node_id", 255).notNullable();
55780
+ table.string("lock_token", 36).notNullable();
55781
+ table.bigInteger("acquired_at").notNullable();
55782
+ table.bigInteger("expires_at").notNullable();
55783
+ });
55784
+ }
55785
+ }
55786
+ getKnex() {
55787
+ if (!this.knex) {
55788
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
55789
+ }
55790
+ return this.knex;
55791
+ }
55792
+ // --- CRUD ---
55793
+ async create(schedule) {
55794
+ const knex = this.getKnex();
55795
+ const newSchedule = {
55796
+ ...schedule,
55797
+ id: (0, import_uuid2.v4)(),
55798
+ createdAt: Date.now(),
55799
+ runCount: 0,
55800
+ failureCount: 0,
55801
+ status: "active"
55802
+ };
55803
+ await knex("schedules").insert(toInsertRow(newSchedule));
55804
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
55805
+ return newSchedule;
55806
+ }
55807
+ async importSchedule(schedule) {
55808
+ const knex = this.getKnex();
55809
+ const existing = await knex("schedules").where("id", schedule.id).first();
55810
+ if (existing) return;
55811
+ await knex("schedules").insert(toInsertRow(schedule));
55812
+ }
55813
+ async get(id) {
55814
+ const knex = this.getKnex();
55815
+ const row = await knex("schedules").where("id", id).first();
55816
+ return row ? fromDbRow2(row) : void 0;
55817
+ }
55818
+ async update(id, patch) {
55819
+ const knex = this.getKnex();
55820
+ const existing = await knex("schedules").where("id", id).first();
55821
+ if (!existing) return void 0;
55822
+ const current = fromDbRow2(existing);
55823
+ const updated = { ...current, ...patch, id: current.id };
55824
+ const row = toInsertRow(updated);
55825
+ delete row.id;
55826
+ await knex("schedules").where("id", id).update(row);
55827
+ return updated;
55828
+ }
55829
+ async delete(id) {
55830
+ const knex = this.getKnex();
55831
+ const deleted = await knex("schedules").where("id", id).del();
55832
+ if (deleted > 0) {
55833
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
55834
+ return true;
55835
+ }
55836
+ return false;
55837
+ }
55838
+ // --- Queries ---
55839
+ async getByCreator(creatorId) {
55840
+ const knex = this.getKnex();
55841
+ const rows = await knex("schedules").where("creator_id", creatorId);
55842
+ return rows.map((r) => fromDbRow2(r));
55843
+ }
55844
+ async getActiveSchedules() {
55845
+ const knex = this.getKnex();
55846
+ const rows = await knex("schedules").where("status", "active");
55847
+ return rows.map((r) => fromDbRow2(r));
55848
+ }
55849
+ async getDueSchedules(now) {
55850
+ const ts = now ?? Date.now();
55851
+ const knex = this.getKnex();
55852
+ const bFalse = this.driver === "mssql" ? 0 : false;
55853
+ const bTrue = this.driver === "mssql" ? 1 : true;
55854
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
55855
+ this.where(function() {
55856
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
55857
+ }).orWhere(function() {
55858
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
55859
+ });
55860
+ });
55861
+ return rows.map((r) => fromDbRow2(r));
55862
+ }
55863
+ async findByWorkflow(creatorId, workflowName) {
55864
+ const knex = this.getKnex();
55865
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
55866
+ const pattern = `%${escaped}%`;
55867
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
55868
+ return rows.map((r) => fromDbRow2(r));
55869
+ }
55870
+ async getAll() {
55871
+ const knex = this.getKnex();
55872
+ const rows = await knex("schedules");
55873
+ return rows.map((r) => fromDbRow2(r));
55874
+ }
55875
+ async getStats() {
55876
+ const knex = this.getKnex();
55877
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
55878
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
55879
+ const result = await knex("schedules").select(
55880
+ knex.raw("COUNT(*) as total"),
55881
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
55882
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
55883
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
55884
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
55885
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
55886
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
55887
+ ).first();
55888
+ return {
55889
+ total: Number(result.total) || 0,
55890
+ active: Number(result.active) || 0,
55891
+ paused: Number(result.paused) || 0,
55892
+ completed: Number(result.completed) || 0,
55893
+ failed: Number(result.failed) || 0,
55894
+ recurring: Number(result.recurring) || 0,
55895
+ oneTime: Number(result.one_time) || 0
55896
+ };
55897
+ }
55898
+ async validateLimits(creatorId, isRecurring, limits) {
55899
+ const knex = this.getKnex();
55900
+ if (limits.maxGlobal) {
55901
+ const result = await knex("schedules").count("* as cnt").first();
55902
+ if (Number(result?.cnt) >= limits.maxGlobal) {
55903
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
55904
+ }
55905
+ }
55906
+ if (limits.maxPerUser) {
55907
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
55908
+ if (Number(result?.cnt) >= limits.maxPerUser) {
55909
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
55910
+ }
55911
+ }
55912
+ if (isRecurring && limits.maxRecurringPerUser) {
55913
+ const bTrue = this.driver === "mssql" ? 1 : true;
55914
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
55915
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
55916
+ throw new Error(
55917
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
55918
+ );
55919
+ }
55920
+ }
55921
+ }
55922
+ // --- HA Distributed Locking (via scheduler_locks table) ---
55923
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
55924
+ const knex = this.getKnex();
55925
+ const now = Date.now();
55926
+ const expiresAt = now + ttlSeconds * 1e3;
55927
+ const token = (0, import_uuid2.v4)();
55928
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
55929
+ node_id: nodeId,
55930
+ lock_token: token,
55931
+ acquired_at: now,
55932
+ expires_at: expiresAt
55933
+ });
55934
+ if (updated > 0) return token;
55935
+ try {
55936
+ await knex("scheduler_locks").insert({
55937
+ lock_id: lockId,
55938
+ node_id: nodeId,
55939
+ lock_token: token,
55940
+ acquired_at: now,
55941
+ expires_at: expiresAt
55942
+ });
55943
+ return token;
55944
+ } catch {
55945
+ return null;
55946
+ }
55947
+ }
55948
+ async releaseLock(lockId, lockToken) {
55949
+ const knex = this.getKnex();
55950
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
55951
+ }
55952
+ async renewLock(lockId, lockToken, ttlSeconds) {
55953
+ const knex = this.getKnex();
55954
+ const now = Date.now();
55955
+ const expiresAt = now + ttlSeconds * 1e3;
55956
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
55957
+ return updated > 0;
55958
+ }
55959
+ async flush() {
55960
+ }
55961
+ // --- Message Trigger CRUD ---
55962
+ async createTrigger(trigger) {
55963
+ const knex = this.getKnex();
55964
+ const newTrigger = {
55965
+ ...trigger,
55966
+ id: (0, import_uuid2.v4)(),
55967
+ createdAt: Date.now()
55968
+ };
55969
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
55970
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
55971
+ return newTrigger;
55972
+ }
55973
+ async getTrigger(id) {
55974
+ const knex = this.getKnex();
55975
+ const row = await knex("message_triggers").where("id", id).first();
55976
+ return row ? fromTriggerRow2(row) : void 0;
55977
+ }
55978
+ async updateTrigger(id, patch) {
55979
+ const knex = this.getKnex();
55980
+ const existing = await knex("message_triggers").where("id", id).first();
55981
+ if (!existing) return void 0;
55982
+ const current = fromTriggerRow2(existing);
55983
+ const updated = {
55984
+ ...current,
55985
+ ...patch,
55986
+ id: current.id,
55987
+ createdAt: current.createdAt
55988
+ };
55989
+ const row = toTriggerInsertRow(updated);
55990
+ delete row.id;
55991
+ await knex("message_triggers").where("id", id).update(row);
55992
+ return updated;
55993
+ }
55994
+ async deleteTrigger(id) {
55995
+ const knex = this.getKnex();
55996
+ const deleted = await knex("message_triggers").where("id", id).del();
55997
+ if (deleted > 0) {
55998
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
55999
+ return true;
56000
+ }
56001
+ return false;
56002
+ }
56003
+ async getTriggersByCreator(creatorId) {
56004
+ const knex = this.getKnex();
56005
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
56006
+ return rows.map((r) => fromTriggerRow2(r));
56007
+ }
56008
+ async getActiveTriggers() {
56009
+ const knex = this.getKnex();
56010
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
56011
+ return rows.map((r) => fromTriggerRow2(r));
56012
+ }
56013
+ };
56014
+ }
56015
+ });
56016
+
56017
+ // src/enterprise/loader.ts
56018
+ var loader_exports = {};
56019
+ __export(loader_exports, {
56020
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
56021
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
56022
+ });
56023
+ async function loadEnterprisePolicyEngine(config) {
56024
+ try {
56025
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56026
+ const validator = new LicenseValidator2();
56027
+ const license = await validator.loadAndValidate();
56028
+ if (!license || !validator.hasFeature("policy")) {
56029
+ return new DefaultPolicyEngine();
56030
+ }
56031
+ if (validator.isInGracePeriod()) {
56032
+ console.warn(
56033
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56034
+ );
56035
+ }
56036
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
56037
+ const engine = new OpaPolicyEngine2(config);
56038
+ await engine.initialize(config);
56039
+ return engine;
56040
+ } catch (err) {
56041
+ const msg = err instanceof Error ? err.message : String(err);
56042
+ try {
56043
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
56044
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
56045
+ } catch {
56046
+ }
56047
+ return new DefaultPolicyEngine();
56048
+ }
56049
+ }
56050
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
56051
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56052
+ const validator = new LicenseValidator2();
56053
+ const license = await validator.loadAndValidate();
56054
+ if (!license || !validator.hasFeature("scheduler-sql")) {
56055
+ throw new Error(
56056
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
56057
+ );
56058
+ }
56059
+ if (validator.isInGracePeriod()) {
56060
+ console.warn(
56061
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56062
+ );
56063
+ }
56064
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
56065
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
56066
+ }
56067
+ var init_loader = __esm({
56068
+ "src/enterprise/loader.ts"() {
56069
+ "use strict";
56070
+ init_default_engine();
56071
+ }
56072
+ });
56073
+
54601
56074
  // src/event-bus/event-bus.ts
54602
56075
  var event_bus_exports = {};
54603
56076
  __export(event_bus_exports, {
@@ -55504,8 +56977,8 @@ ${content}
55504
56977
  * Sleep utility
55505
56978
  */
55506
56979
  sleep(ms) {
55507
- return new Promise((resolve14) => {
55508
- const t = setTimeout(resolve14, ms);
56980
+ return new Promise((resolve19) => {
56981
+ const t = setTimeout(resolve19, ms);
55509
56982
  if (typeof t.unref === "function") {
55510
56983
  try {
55511
56984
  t.unref();
@@ -55790,8 +57263,8 @@ ${end}`);
55790
57263
  async updateGroupedComment(ctx, comments, group, changedIds) {
55791
57264
  const existingLock = this.updateLocks.get(group);
55792
57265
  let resolveLock;
55793
- const ourLock = new Promise((resolve14) => {
55794
- resolveLock = resolve14;
57266
+ const ourLock = new Promise((resolve19) => {
57267
+ resolveLock = resolve19;
55795
57268
  });
55796
57269
  this.updateLocks.set(group, ourLock);
55797
57270
  try {
@@ -56104,7 +57577,7 @@ ${blocks}
56104
57577
  * Sleep utility for enforcing delays
56105
57578
  */
56106
57579
  sleep(ms) {
56107
- return new Promise((resolve14) => setTimeout(resolve14, ms));
57580
+ return new Promise((resolve19) => setTimeout(resolve19, ms));
56108
57581
  }
56109
57582
  };
56110
57583
  }
@@ -57375,15 +58848,15 @@ function serializeRunState(state) {
57375
58848
  ])
57376
58849
  };
57377
58850
  }
57378
- var path26, fs22, StateMachineExecutionEngine;
58851
+ var path30, fs26, StateMachineExecutionEngine;
57379
58852
  var init_state_machine_execution_engine = __esm({
57380
58853
  "src/state-machine-execution-engine.ts"() {
57381
58854
  "use strict";
57382
58855
  init_runner();
57383
58856
  init_logger();
57384
58857
  init_sandbox_manager();
57385
- path26 = __toESM(require("path"));
57386
- fs22 = __toESM(require("fs"));
58858
+ path30 = __toESM(require("path"));
58859
+ fs26 = __toESM(require("fs"));
57387
58860
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
57388
58861
  workingDirectory;
57389
58862
  executionContext;
@@ -57615,8 +59088,8 @@ var init_state_machine_execution_engine = __esm({
57615
59088
  logger.debug(
57616
59089
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
57617
59090
  );
57618
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
57619
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
59091
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
59092
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
57620
59093
  logger.debug(
57621
59094
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
57622
59095
  );
@@ -57768,9 +59241,9 @@ var init_state_machine_execution_engine = __esm({
57768
59241
  }
57769
59242
  const checkId = String(ev?.checkId || "unknown");
57770
59243
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
57771
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
57772
- fs22.mkdirSync(baseDir, { recursive: true });
57773
- const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
59244
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
59245
+ fs26.mkdirSync(baseDir, { recursive: true });
59246
+ const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
57774
59247
  await this.saveSnapshotToFile(filePath);
57775
59248
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
57776
59249
  try {
@@ -57911,7 +59384,7 @@ var init_state_machine_execution_engine = __esm({
57911
59384
  * Does not include secrets. Intended for debugging and future resume support.
57912
59385
  */
57913
59386
  async saveSnapshotToFile(filePath) {
57914
- const fs23 = await import("fs/promises");
59387
+ const fs27 = await import("fs/promises");
57915
59388
  const ctx = this._lastContext;
57916
59389
  const runner = this._lastRunner;
57917
59390
  if (!ctx || !runner) {
@@ -57931,14 +59404,14 @@ var init_state_machine_execution_engine = __esm({
57931
59404
  journal: entries,
57932
59405
  requestedChecks: ctx.requestedChecks || []
57933
59406
  };
57934
- await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
59407
+ await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
57935
59408
  }
57936
59409
  /**
57937
59410
  * Load a snapshot JSON from file and return it. Resume support can build on this.
57938
59411
  */
57939
59412
  async loadSnapshotFromFile(filePath) {
57940
- const fs23 = await import("fs/promises");
57941
- const raw = await fs23.readFile(filePath, "utf8");
59413
+ const fs27 = await import("fs/promises");
59414
+ const raw = await fs27.readFile(filePath, "utf8");
57942
59415
  return JSON.parse(raw);
57943
59416
  }
57944
59417
  /**