@probelabs/visor 0.1.156 → 0.1.157-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/defaults/assistant.yaml +76 -8
  2. package/defaults/code-talk.yaml +20 -6
  3. package/dist/defaults/assistant.yaml +76 -8
  4. package/dist/defaults/code-talk.yaml +20 -6
  5. package/dist/index.js +1842 -25
  6. package/dist/sdk/{check-provider-registry-GJ4EZAIO.mjs → check-provider-registry-OBUYAPPC.mjs} +2 -2
  7. package/dist/sdk/{chunk-D7STLGAD.mjs → chunk-2XSKH755.mjs} +9 -9
  8. package/dist/sdk/{chunk-2UN6C3VO.mjs.map → chunk-2XSKH755.mjs.map} +1 -1
  9. package/dist/sdk/{host-3CBVPE5U.mjs → host-MIHKJ63G.mjs} +2 -2
  10. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  11. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  12. package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
  13. package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
  14. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  15. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  16. package/dist/sdk/{schedule-tool-SLR7ZHBZ.mjs → schedule-tool-M2H7O5WL.mjs} +2 -2
  17. package/dist/sdk/{schedule-tool-handler-XVHYK4KT.mjs → schedule-tool-handler-J3CJWB6A.mjs} +2 -2
  18. package/dist/sdk/sdk.js +1633 -259
  19. package/dist/sdk/sdk.js.map +1 -1
  20. package/dist/sdk/sdk.mjs +4 -4
  21. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  22. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  23. package/dist/sdk/{workflow-check-provider-NYBQAYVH.mjs → workflow-check-provider-7RLM3T2I.mjs} +2 -2
  24. package/package.json +1 -1
  25. package/dist/output/traces/run-2026-03-04T18-51-17-205Z.ndjson +0 -138
  26. package/dist/output/traces/run-2026-03-04T18-52-01-841Z.ndjson +0 -2197
  27. package/dist/sdk/check-provider-registry-NXGF7OAN.mjs +0 -29
  28. package/dist/sdk/chunk-2UN6C3VO.mjs +0 -43774
  29. package/dist/sdk/chunk-D7STLGAD.mjs.map +0 -1
  30. package/dist/sdk/chunk-G5JCPBXD.mjs +0 -739
  31. package/dist/sdk/chunk-G5JCPBXD.mjs.map +0 -1
  32. package/dist/sdk/chunk-YNZCDEI5.mjs +0 -443
  33. package/dist/sdk/chunk-YNZCDEI5.mjs.map +0 -1
  34. package/dist/sdk/chunk-ZSXL4I4C.mjs +0 -1502
  35. package/dist/sdk/chunk-ZSXL4I4C.mjs.map +0 -1
  36. package/dist/sdk/failure-condition-evaluator-QGLA6IJ6.mjs +0 -17
  37. package/dist/sdk/github-frontend-G6NTMPEG.mjs +0 -1368
  38. package/dist/sdk/github-frontend-G6NTMPEG.mjs.map +0 -1
  39. package/dist/sdk/routing-GYSENS76.mjs +0 -25
  40. package/dist/sdk/schedule-tool-JLSHSHQA.mjs +0 -35
  41. package/dist/sdk/schedule-tool-JLSHSHQA.mjs.map +0 -1
  42. package/dist/sdk/schedule-tool-SLR7ZHBZ.mjs.map +0 -1
  43. package/dist/sdk/schedule-tool-handler-5A37VO62.mjs +0 -39
  44. package/dist/sdk/schedule-tool-handler-5A37VO62.mjs.map +0 -1
  45. package/dist/sdk/schedule-tool-handler-XVHYK4KT.mjs.map +0 -1
  46. package/dist/sdk/trace-helpers-HU65M6UT.mjs +0 -25
  47. package/dist/sdk/trace-helpers-HU65M6UT.mjs.map +0 -1
  48. package/dist/sdk/workflow-check-provider-M77ZHQDH.mjs +0 -29
  49. package/dist/sdk/workflow-check-provider-M77ZHQDH.mjs.map +0 -1
  50. package/dist/sdk/workflow-check-provider-NYBQAYVH.mjs.map +0 -1
  51. package/dist/traces/run-2026-03-04T18-51-17-205Z.ndjson +0 -138
  52. package/dist/traces/run-2026-03-04T18-52-01-841Z.ndjson +0 -2197
  53. /package/dist/sdk/{check-provider-registry-GJ4EZAIO.mjs.map → check-provider-registry-OBUYAPPC.mjs.map} +0 -0
  54. /package/dist/sdk/{host-3CBVPE5U.mjs.map → host-MIHKJ63G.mjs.map} +0 -0
  55. /package/dist/sdk/{check-provider-registry-NXGF7OAN.mjs.map → schedule-tool-M2H7O5WL.mjs.map} +0 -0
  56. /package/dist/sdk/{failure-condition-evaluator-QGLA6IJ6.mjs.map → schedule-tool-handler-J3CJWB6A.mjs.map} +0 -0
  57. /package/dist/sdk/{routing-GYSENS76.mjs.map → workflow-check-provider-7RLM3T2I.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -646,7 +646,7 @@ var require_package = __commonJS({
646
646
  "package.json"(exports2, module2) {
647
647
  module2.exports = {
648
648
  name: "@probelabs/visor",
649
- version: "0.1.156",
649
+ version: "0.1.42",
650
650
  main: "dist/index.js",
651
651
  bin: {
652
652
  visor: "./dist/index.js"
@@ -864,11 +864,11 @@ function getTracer() {
864
864
  }
865
865
  async function withActiveSpan(name, attrs, fn) {
866
866
  const tracer = getTracer();
867
- return await new Promise((resolve15, reject) => {
867
+ return await new Promise((resolve19, reject) => {
868
868
  const callback = async (span) => {
869
869
  try {
870
870
  const res = await fn(span);
871
- resolve15(res);
871
+ resolve19(res);
872
872
  } catch (err) {
873
873
  try {
874
874
  if (err instanceof Error) span.recordException(err);
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
945
945
  try {
946
946
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
947
947
  return null;
948
- const path27 = require("path");
949
- const fs23 = require("fs");
948
+ const path31 = require("path");
949
+ const fs27 = require("fs");
950
950
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
951
951
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
952
- const dir = path27.dirname(__ndjsonPath);
953
- if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
952
+ const dir = path31.dirname(__ndjsonPath);
953
+ if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
954
954
  return __ndjsonPath;
955
955
  }
956
- const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
957
- if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
956
+ const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
957
+ if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
958
958
  if (!__ndjsonPath) {
959
959
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
960
- __ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
960
+ __ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
961
961
  }
962
962
  return __ndjsonPath;
963
963
  } catch {
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
966
966
  }
967
967
  function _appendRunMarker() {
968
968
  try {
969
- const fs23 = require("fs");
969
+ const fs27 = require("fs");
970
970
  const p = __getOrCreateNdjsonPath();
971
971
  if (!p) return;
972
972
  const line = { name: "visor.run", attributes: { started: true } };
973
- fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
973
+ fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
974
974
  } catch {
975
975
  }
976
976
  }
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
3193
3193
  */
3194
3194
  evaluateExpression(condition, context2) {
3195
3195
  try {
3196
- const normalize4 = (expr) => {
3196
+ const normalize8 = (expr) => {
3197
3197
  const trimmed = expr.trim();
3198
3198
  if (!/[\n;]/.test(trimmed)) return trimmed;
3199
3199
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
3351
3351
  try {
3352
3352
  exec2 = this.sandbox.compile(`return (${raw});`);
3353
3353
  } catch {
3354
- const normalizedExpr = normalize4(condition);
3354
+ const normalizedExpr = normalize8(condition);
3355
3355
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3356
3356
  }
3357
3357
  const result = exec2(scope).run();
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
3734
3734
  });
3735
3735
  liquid.registerFilter("get", (obj, pathExpr) => {
3736
3736
  if (obj == null) return void 0;
3737
- const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
- if (!path27) return obj;
3739
- const parts = path27.split(".");
3737
+ const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
+ if (!path31) return obj;
3739
+ const parts = path31.split(".");
3740
3740
  let cur = obj;
3741
3741
  for (const p of parts) {
3742
3742
  if (cur == null) return void 0;
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
3855
3855
  }
3856
3856
  }
3857
3857
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
3858
- const getNested = (obj, path27) => {
3859
- if (!obj || !path27) return void 0;
3860
- const parts = path27.split(".");
3858
+ const getNested = (obj, path31) => {
3859
+ if (!obj || !path31) return void 0;
3860
+ const parts = path31.split(".");
3861
3861
  let cur = obj;
3862
3862
  for (const p of parts) {
3863
3863
  if (cur == null) return void 0;
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
6409
6409
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6410
6410
  try {
6411
6411
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6412
- const fs23 = await import("fs/promises");
6413
- const path27 = await import("path");
6412
+ const fs27 = await import("fs/promises");
6413
+ const path31 = await import("path");
6414
6414
  const schemaRaw = checkConfig.schema || "plain";
6415
6415
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6416
6416
  let templateContent;
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6418
6418
  templateContent = String(checkConfig.template.content);
6419
6419
  } else if (checkConfig.template && checkConfig.template.file) {
6420
6420
  const file = String(checkConfig.template.file);
6421
- const resolved = path27.resolve(process.cwd(), file);
6422
- templateContent = await fs23.readFile(resolved, "utf-8");
6421
+ const resolved = path31.resolve(process.cwd(), file);
6422
+ templateContent = await fs27.readFile(resolved, "utf-8");
6423
6423
  } else if (schema && schema !== "plain") {
6424
6424
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6425
6425
  if (sanitized) {
6426
6426
  const candidatePaths = [
6427
- path27.join(__dirname, "output", sanitized, "template.liquid"),
6427
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
6428
6428
  // bundled: dist/output/
6429
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6429
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6430
6430
  // source: output/
6431
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
6431
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
6432
6432
  // fallback: cwd/output/
6433
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6433
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6434
6434
  // fallback: cwd/dist/output/
6435
6435
  ];
6436
6436
  for (const p of candidatePaths) {
6437
6437
  try {
6438
- templateContent = await fs23.readFile(p, "utf-8");
6438
+ templateContent = await fs27.readFile(p, "utf-8");
6439
6439
  if (templateContent) break;
6440
6440
  } catch {
6441
6441
  }
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
6840
6840
  }
6841
6841
  try {
6842
6842
  const originalProbePath = process.env.PROBE_PATH;
6843
- const fs23 = require("fs");
6843
+ const fs27 = require("fs");
6844
6844
  const possiblePaths = [
6845
6845
  // Relative to current working directory (most common in production)
6846
6846
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
6851
6851
  ];
6852
6852
  let probeBinaryPath;
6853
6853
  for (const candidatePath of possiblePaths) {
6854
- if (fs23.existsSync(candidatePath)) {
6854
+ if (fs27.existsSync(candidatePath)) {
6855
6855
  probeBinaryPath = candidatePath;
6856
6856
  break;
6857
6857
  }
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
6972
6972
  if (chromiumPath) {
6973
6973
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
6974
6974
  }
6975
- const result = await new Promise((resolve15) => {
6975
+ const result = await new Promise((resolve19) => {
6976
6976
  const proc = (0, import_child_process.spawn)(
6977
6977
  "npx",
6978
6978
  [
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
7002
7002
  });
7003
7003
  proc.on("close", (code) => {
7004
7004
  if (code === 0) {
7005
- resolve15({ success: true });
7005
+ resolve19({ success: true });
7006
7006
  } else {
7007
- resolve15({ success: false, error: stderr || `Exit code ${code}` });
7007
+ resolve19({ success: false, error: stderr || `Exit code ${code}` });
7008
7008
  }
7009
7009
  });
7010
7010
  proc.on("error", (err) => {
7011
- resolve15({ success: false, error: err.message });
7011
+ resolve19({ success: false, error: err.message });
7012
7012
  });
7013
7013
  });
7014
7014
  if (!result.success) {
@@ -8170,8 +8170,8 @@ ${schemaString}`);
8170
8170
  }
8171
8171
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8172
8172
  try {
8173
- const fs23 = require("fs");
8174
- const path27 = require("path");
8173
+ const fs27 = require("fs");
8174
+ const path31 = require("path");
8175
8175
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8176
8176
  const provider = this.config.provider || "auto";
8177
8177
  const model = this.config.model || "default";
@@ -8285,20 +8285,20 @@ ${"=".repeat(60)}
8285
8285
  `;
8286
8286
  readableVersion += `${"=".repeat(60)}
8287
8287
  `;
8288
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8289
- if (!fs23.existsSync(debugArtifactsDir)) {
8290
- fs23.mkdirSync(debugArtifactsDir, { recursive: true });
8288
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8289
+ if (!fs27.existsSync(debugArtifactsDir)) {
8290
+ fs27.mkdirSync(debugArtifactsDir, { recursive: true });
8291
8291
  }
8292
- const debugFile = path27.join(
8292
+ const debugFile = path31.join(
8293
8293
  debugArtifactsDir,
8294
8294
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8295
8295
  );
8296
- fs23.writeFileSync(debugFile, debugJson, "utf-8");
8297
- const readableFile = path27.join(
8296
+ fs27.writeFileSync(debugFile, debugJson, "utf-8");
8297
+ const readableFile = path31.join(
8298
8298
  debugArtifactsDir,
8299
8299
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8300
8300
  );
8301
- fs23.writeFileSync(readableFile, readableVersion, "utf-8");
8301
+ fs27.writeFileSync(readableFile, readableVersion, "utf-8");
8302
8302
  log(`
8303
8303
  \u{1F4BE} Full debug info saved to:`);
8304
8304
  log(` JSON: ${debugFile}`);
@@ -8331,8 +8331,8 @@ ${"=".repeat(60)}
8331
8331
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8332
8332
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8333
8333
  try {
8334
- const fs23 = require("fs");
8335
- const path27 = require("path");
8334
+ const fs27 = require("fs");
8335
+ const path31 = require("path");
8336
8336
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8337
8337
  const agentAny2 = agent;
8338
8338
  let fullHistory = [];
@@ -8343,8 +8343,8 @@ ${"=".repeat(60)}
8343
8343
  } else if (agentAny2._messages) {
8344
8344
  fullHistory = agentAny2._messages;
8345
8345
  }
8346
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8347
- const sessionBase = path27.join(
8346
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8347
+ const sessionBase = path31.join(
8348
8348
  debugArtifactsDir,
8349
8349
  `session-${_checkName || "unknown"}-${timestamp}`
8350
8350
  );
@@ -8356,7 +8356,7 @@ ${"=".repeat(60)}
8356
8356
  schema: effectiveSchema,
8357
8357
  totalMessages: fullHistory.length
8358
8358
  };
8359
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8359
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8360
8360
  let readable = `=============================================================
8361
8361
  `;
8362
8362
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8383,7 +8383,7 @@ ${"=".repeat(60)}
8383
8383
  `;
8384
8384
  readable += content + "\n";
8385
8385
  });
8386
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8386
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8387
8387
  log(`\u{1F4BE} Complete session history saved:`);
8388
8388
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8389
8389
  } catch (error) {
@@ -8392,11 +8392,11 @@ ${"=".repeat(60)}
8392
8392
  }
8393
8393
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8394
8394
  try {
8395
- const fs23 = require("fs");
8396
- const path27 = require("path");
8395
+ const fs27 = require("fs");
8396
+ const path31 = require("path");
8397
8397
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8398
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8399
- const responseFile = path27.join(
8398
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8399
+ const responseFile = path31.join(
8400
8400
  debugArtifactsDir,
8401
8401
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8402
8402
  );
@@ -8429,7 +8429,7 @@ ${"=".repeat(60)}
8429
8429
  `;
8430
8430
  responseContent += `${"=".repeat(60)}
8431
8431
  `;
8432
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8432
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8433
8433
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8434
8434
  } catch (error) {
8435
8435
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8445,9 +8445,9 @@ ${"=".repeat(60)}
8445
8445
  await agentAny._telemetryConfig.shutdown();
8446
8446
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8447
8447
  if (process.env.GITHUB_ACTIONS) {
8448
- const fs23 = require("fs");
8449
- if (fs23.existsSync(agentAny._traceFilePath)) {
8450
- const stats = fs23.statSync(agentAny._traceFilePath);
8448
+ const fs27 = require("fs");
8449
+ if (fs27.existsSync(agentAny._traceFilePath)) {
8450
+ const stats = fs27.statSync(agentAny._traceFilePath);
8451
8451
  console.log(
8452
8452
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8453
8453
  );
@@ -8654,9 +8654,9 @@ ${schemaString}`);
8654
8654
  const model = this.config.model || "default";
8655
8655
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8656
8656
  try {
8657
- const fs23 = require("fs");
8658
- const path27 = require("path");
8659
- const os2 = require("os");
8657
+ const fs27 = require("fs");
8658
+ const path31 = require("path");
8659
+ const os3 = require("os");
8660
8660
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8661
8661
  const debugData = {
8662
8662
  timestamp,
@@ -8728,19 +8728,19 @@ ${"=".repeat(60)}
8728
8728
  `;
8729
8729
  readableVersion += `${"=".repeat(60)}
8730
8730
  `;
8731
- const tempDir = os2.tmpdir();
8732
- const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
8733
- fs23.writeFileSync(promptFile, prompt, "utf-8");
8731
+ const tempDir = os3.tmpdir();
8732
+ const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
8733
+ fs27.writeFileSync(promptFile, prompt, "utf-8");
8734
8734
  log(`
8735
8735
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8736
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8736
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8737
8737
  try {
8738
- const base = path27.join(
8738
+ const base = path31.join(
8739
8739
  debugArtifactsDir,
8740
8740
  `prompt-${_checkName || "unknown"}-${timestamp}`
8741
8741
  );
8742
- fs23.writeFileSync(base + ".json", debugJson, "utf-8");
8743
- fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8742
+ fs27.writeFileSync(base + ".json", debugJson, "utf-8");
8743
+ fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8744
8744
  log(`
8745
8745
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8746
8746
  } catch {
@@ -8785,8 +8785,8 @@ $ ${cliCommand}
8785
8785
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8786
8786
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8787
8787
  try {
8788
- const fs23 = require("fs");
8789
- const path27 = require("path");
8788
+ const fs27 = require("fs");
8789
+ const path31 = require("path");
8790
8790
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8791
8791
  const agentAny = agent;
8792
8792
  let fullHistory = [];
@@ -8797,8 +8797,8 @@ $ ${cliCommand}
8797
8797
  } else if (agentAny._messages) {
8798
8798
  fullHistory = agentAny._messages;
8799
8799
  }
8800
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8801
- const sessionBase = path27.join(
8800
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8801
+ const sessionBase = path31.join(
8802
8802
  debugArtifactsDir,
8803
8803
  `session-${_checkName || "unknown"}-${timestamp}`
8804
8804
  );
@@ -8810,7 +8810,7 @@ $ ${cliCommand}
8810
8810
  schema: effectiveSchema,
8811
8811
  totalMessages: fullHistory.length
8812
8812
  };
8813
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8813
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8814
8814
  let readable = `=============================================================
8815
8815
  `;
8816
8816
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8837,7 +8837,7 @@ ${"=".repeat(60)}
8837
8837
  `;
8838
8838
  readable += content + "\n";
8839
8839
  });
8840
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8840
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8841
8841
  log(`\u{1F4BE} Complete session history saved:`);
8842
8842
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8843
8843
  } catch (error) {
@@ -8846,11 +8846,11 @@ ${"=".repeat(60)}
8846
8846
  }
8847
8847
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8848
8848
  try {
8849
- const fs23 = require("fs");
8850
- const path27 = require("path");
8849
+ const fs27 = require("fs");
8850
+ const path31 = require("path");
8851
8851
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8852
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8853
- const responseFile = path27.join(
8852
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8853
+ const responseFile = path31.join(
8854
8854
  debugArtifactsDir,
8855
8855
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8856
8856
  );
@@ -8883,7 +8883,7 @@ ${"=".repeat(60)}
8883
8883
  `;
8884
8884
  responseContent += `${"=".repeat(60)}
8885
8885
  `;
8886
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8886
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8887
8887
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8888
8888
  } catch (error) {
8889
8889
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8901,9 +8901,9 @@ ${"=".repeat(60)}
8901
8901
  await telemetry.shutdown();
8902
8902
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
8903
8903
  if (process.env.GITHUB_ACTIONS) {
8904
- const fs23 = require("fs");
8905
- if (fs23.existsSync(traceFilePath)) {
8906
- const stats = fs23.statSync(traceFilePath);
8904
+ const fs27 = require("fs");
8905
+ if (fs27.existsSync(traceFilePath)) {
8906
+ const stats = fs27.statSync(traceFilePath);
8907
8907
  console.log(
8908
8908
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
8909
8909
  );
@@ -8941,8 +8941,8 @@ ${"=".repeat(60)}
8941
8941
  * Load schema content from schema files or inline definitions
8942
8942
  */
8943
8943
  async loadSchemaContent(schema) {
8944
- const fs23 = require("fs").promises;
8945
- const path27 = require("path");
8944
+ const fs27 = require("fs").promises;
8945
+ const path31 = require("path");
8946
8946
  if (typeof schema === "object" && schema !== null) {
8947
8947
  log("\u{1F4CB} Using inline schema object from configuration");
8948
8948
  return JSON.stringify(schema);
@@ -8955,14 +8955,14 @@ ${"=".repeat(60)}
8955
8955
  }
8956
8956
  } catch {
8957
8957
  }
8958
- if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
8958
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
8959
8959
  if (schema.includes("..") || schema.includes("\0")) {
8960
8960
  throw new Error("Invalid schema path: path traversal not allowed");
8961
8961
  }
8962
8962
  try {
8963
- const schemaPath = path27.resolve(process.cwd(), schema);
8963
+ const schemaPath = path31.resolve(process.cwd(), schema);
8964
8964
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
8965
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8965
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8966
8966
  return schemaContent.trim();
8967
8967
  } catch (error) {
8968
8968
  throw new Error(
@@ -8976,22 +8976,22 @@ ${"=".repeat(60)}
8976
8976
  }
8977
8977
  const candidatePaths = [
8978
8978
  // GitHub Action bundle location
8979
- path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8979
+ path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8980
8980
  // Historical fallback when src/output was inadvertently bundled as output1/
8981
- path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8981
+ path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8982
8982
  // Local dev (repo root)
8983
- path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8983
+ path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8984
8984
  ];
8985
8985
  for (const schemaPath of candidatePaths) {
8986
8986
  try {
8987
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8987
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8988
8988
  return schemaContent.trim();
8989
8989
  } catch {
8990
8990
  }
8991
8991
  }
8992
- const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8993
- const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8994
- const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8992
+ const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8993
+ const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8994
+ const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8995
8995
  throw new Error(
8996
8996
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
8997
8997
  );
@@ -9236,7 +9236,7 @@ ${"=".repeat(60)}
9236
9236
  * Generate mock response for testing
9237
9237
  */
9238
9238
  async generateMockResponse(_prompt, _checkName, _schema) {
9239
- await new Promise((resolve15) => setTimeout(resolve15, 500));
9239
+ await new Promise((resolve19) => setTimeout(resolve19, 500));
9240
9240
  const name = (_checkName || "").toLowerCase();
9241
9241
  if (name.includes("extract-facts")) {
9242
9242
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9597,7 +9597,7 @@ var init_command_executor = __esm({
9597
9597
  * Execute command with stdin input
9598
9598
  */
9599
9599
  executeWithStdin(command, options) {
9600
- return new Promise((resolve15, reject) => {
9600
+ return new Promise((resolve19, reject) => {
9601
9601
  const childProcess = (0, import_child_process2.exec)(
9602
9602
  command,
9603
9603
  {
@@ -9609,7 +9609,7 @@ var init_command_executor = __esm({
9609
9609
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9610
9610
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9611
9611
  } else {
9612
- resolve15({
9612
+ resolve19({
9613
9613
  stdout: stdout || "",
9614
9614
  stderr: stderr || "",
9615
9615
  exitCode: error ? error.code || 1 : 0
@@ -17615,17 +17615,17 @@ var init_workflow_check_provider = __esm({
17615
17615
  * so it can be executed by the state machine as a nested workflow.
17616
17616
  */
17617
17617
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
17618
- const path27 = require("path");
17619
- const fs23 = require("fs");
17618
+ const path31 = require("path");
17619
+ const fs27 = require("fs");
17620
17620
  const yaml5 = require("js-yaml");
17621
- const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
17622
- if (!fs23.existsSync(resolved)) {
17621
+ const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
17622
+ if (!fs27.existsSync(resolved)) {
17623
17623
  throw new Error(`Workflow config not found at: ${resolved}`);
17624
17624
  }
17625
- const rawContent = fs23.readFileSync(resolved, "utf8");
17625
+ const rawContent = fs27.readFileSync(resolved, "utf8");
17626
17626
  const rawData = yaml5.load(rawContent);
17627
17627
  if (rawData.imports && Array.isArray(rawData.imports)) {
17628
- const configDir = path27.dirname(resolved);
17628
+ const configDir = path31.dirname(resolved);
17629
17629
  for (const source of rawData.imports) {
17630
17630
  const results = await this.registry.import(source, {
17631
17631
  basePath: configDir,
@@ -17655,8 +17655,8 @@ ${errors}`);
17655
17655
  if (!steps || Object.keys(steps).length === 0) {
17656
17656
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
17657
17657
  }
17658
- const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
17659
- const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
17658
+ const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
17659
+ const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
17660
17660
  const workflowDef = {
17661
17661
  id,
17662
17662
  name,
@@ -18462,8 +18462,8 @@ async function createStoreBackend(storageConfig, haConfig) {
18462
18462
  case "mssql": {
18463
18463
  try {
18464
18464
  const loaderPath = "../../enterprise/loader";
18465
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
18466
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
18465
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
18466
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
18467
18467
  } catch (err) {
18468
18468
  const msg = err instanceof Error ? err.message : String(err);
18469
18469
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -21028,7 +21028,7 @@ var init_mcp_custom_sse_server = __esm({
21028
21028
  * Returns the actual bound port number
21029
21029
  */
21030
21030
  async start() {
21031
- return new Promise((resolve15, reject) => {
21031
+ return new Promise((resolve19, reject) => {
21032
21032
  try {
21033
21033
  this.server = import_http.default.createServer((req, res) => {
21034
21034
  this.handleRequest(req, res).catch((error) => {
@@ -21062,7 +21062,7 @@ var init_mcp_custom_sse_server = __esm({
21062
21062
  );
21063
21063
  }
21064
21064
  this.startKeepalive();
21065
- resolve15(this.port);
21065
+ resolve19(this.port);
21066
21066
  });
21067
21067
  } catch (error) {
21068
21068
  reject(error);
@@ -21125,7 +21125,7 @@ var init_mcp_custom_sse_server = __esm({
21125
21125
  logger.debug(
21126
21126
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
21127
21127
  );
21128
- await new Promise((resolve15) => setTimeout(resolve15, waitMs));
21128
+ await new Promise((resolve19) => setTimeout(resolve19, waitMs));
21129
21129
  }
21130
21130
  }
21131
21131
  if (this.activeToolCalls > 0) {
@@ -21134,7 +21134,7 @@ var init_mcp_custom_sse_server = __esm({
21134
21134
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
21135
21135
  );
21136
21136
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
21137
- await new Promise((resolve15) => setTimeout(resolve15, 250));
21137
+ await new Promise((resolve19) => setTimeout(resolve19, 250));
21138
21138
  }
21139
21139
  if (this.activeToolCalls > 0) {
21140
21140
  logger.warn(
@@ -21159,21 +21159,21 @@ var init_mcp_custom_sse_server = __esm({
21159
21159
  }
21160
21160
  this.connections.clear();
21161
21161
  if (this.server) {
21162
- await new Promise((resolve15, reject) => {
21162
+ await new Promise((resolve19, reject) => {
21163
21163
  const timeout = setTimeout(() => {
21164
21164
  if (this.debug) {
21165
21165
  logger.debug(
21166
21166
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
21167
21167
  );
21168
21168
  }
21169
- this.server?.close(() => resolve15());
21169
+ this.server?.close(() => resolve19());
21170
21170
  }, 5e3);
21171
21171
  this.server.close((error) => {
21172
21172
  clearTimeout(timeout);
21173
21173
  if (error) {
21174
21174
  reject(error);
21175
21175
  } else {
21176
- resolve15();
21176
+ resolve19();
21177
21177
  }
21178
21178
  });
21179
21179
  });
@@ -21608,7 +21608,7 @@ var init_mcp_custom_sse_server = __esm({
21608
21608
  logger.warn(
21609
21609
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
21610
21610
  );
21611
- await new Promise((resolve15) => setTimeout(resolve15, delay));
21611
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
21612
21612
  attempt++;
21613
21613
  }
21614
21614
  }
@@ -21921,9 +21921,9 @@ var init_ai_check_provider = __esm({
21921
21921
  } else {
21922
21922
  resolvedPath = import_path7.default.resolve(process.cwd(), str);
21923
21923
  }
21924
- const fs23 = require("fs").promises;
21924
+ const fs27 = require("fs").promises;
21925
21925
  try {
21926
- const stat2 = await fs23.stat(resolvedPath);
21926
+ const stat2 = await fs27.stat(resolvedPath);
21927
21927
  return stat2.isFile();
21928
21928
  } catch {
21929
21929
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -27851,14 +27851,14 @@ var require_util = __commonJS({
27851
27851
  }
27852
27852
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
27853
27853
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
27854
- let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27854
+ let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27855
27855
  if (origin.endsWith("/")) {
27856
27856
  origin = origin.substring(0, origin.length - 1);
27857
27857
  }
27858
- if (path27 && !path27.startsWith("/")) {
27859
- path27 = `/${path27}`;
27858
+ if (path31 && !path31.startsWith("/")) {
27859
+ path31 = `/${path31}`;
27860
27860
  }
27861
- url = new URL(origin + path27);
27861
+ url = new URL(origin + path31);
27862
27862
  }
27863
27863
  return url;
27864
27864
  }
@@ -29472,20 +29472,20 @@ var require_parseParams = __commonJS({
29472
29472
  var require_basename = __commonJS({
29473
29473
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
29474
29474
  "use strict";
29475
- module2.exports = function basename4(path27) {
29476
- if (typeof path27 !== "string") {
29475
+ module2.exports = function basename4(path31) {
29476
+ if (typeof path31 !== "string") {
29477
29477
  return "";
29478
29478
  }
29479
- for (var i = path27.length - 1; i >= 0; --i) {
29480
- switch (path27.charCodeAt(i)) {
29479
+ for (var i = path31.length - 1; i >= 0; --i) {
29480
+ switch (path31.charCodeAt(i)) {
29481
29481
  case 47:
29482
29482
  // '/'
29483
29483
  case 92:
29484
- path27 = path27.slice(i + 1);
29485
- return path27 === ".." || path27 === "." ? "" : path27;
29484
+ path31 = path31.slice(i + 1);
29485
+ return path31 === ".." || path31 === "." ? "" : path31;
29486
29486
  }
29487
29487
  }
29488
- return path27 === ".." || path27 === "." ? "" : path27;
29488
+ return path31 === ".." || path31 === "." ? "" : path31;
29489
29489
  };
29490
29490
  }
29491
29491
  });
@@ -30489,11 +30489,11 @@ var require_util2 = __commonJS({
30489
30489
  var assert = require("assert");
30490
30490
  var { isUint8Array } = require("util/types");
30491
30491
  var supportedHashes = [];
30492
- var crypto2;
30492
+ var crypto4;
30493
30493
  try {
30494
- crypto2 = require("crypto");
30494
+ crypto4 = require("crypto");
30495
30495
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
30496
- supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30496
+ supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30497
30497
  } catch {
30498
30498
  }
30499
30499
  function responseURL(response) {
@@ -30770,7 +30770,7 @@ var require_util2 = __commonJS({
30770
30770
  }
30771
30771
  }
30772
30772
  function bytesMatch(bytes, metadataList) {
30773
- if (crypto2 === void 0) {
30773
+ if (crypto4 === void 0) {
30774
30774
  return true;
30775
30775
  }
30776
30776
  const parsedMetadata = parseMetadata(metadataList);
@@ -30785,7 +30785,7 @@ var require_util2 = __commonJS({
30785
30785
  for (const item of metadata) {
30786
30786
  const algorithm = item.algo;
30787
30787
  const expectedValue = item.hash;
30788
- let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
30788
+ let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
30789
30789
  if (actualValue[actualValue.length - 1] === "=") {
30790
30790
  if (actualValue[actualValue.length - 2] === "=") {
30791
30791
  actualValue = actualValue.slice(0, -2);
@@ -30878,8 +30878,8 @@ var require_util2 = __commonJS({
30878
30878
  function createDeferredPromise() {
30879
30879
  let res;
30880
30880
  let rej;
30881
- const promise = new Promise((resolve15, reject) => {
30882
- res = resolve15;
30881
+ const promise = new Promise((resolve19, reject) => {
30882
+ res = resolve19;
30883
30883
  rej = reject;
30884
30884
  });
30885
30885
  return { promise, resolve: res, reject: rej };
@@ -32132,8 +32132,8 @@ var require_body = __commonJS({
32132
32132
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
32133
32133
  var random;
32134
32134
  try {
32135
- const crypto2 = require("crypto");
32136
- random = (max) => crypto2.randomInt(0, max);
32135
+ const crypto4 = require("crypto");
32136
+ random = (max) => crypto4.randomInt(0, max);
32137
32137
  } catch {
32138
32138
  random = (max) => Math.floor(Math.random(max));
32139
32139
  }
@@ -32384,8 +32384,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
32384
32384
  });
32385
32385
  }
32386
32386
  });
32387
- const busboyResolve = new Promise((resolve15, reject) => {
32388
- busboy.on("finish", resolve15);
32387
+ const busboyResolve = new Promise((resolve19, reject) => {
32388
+ busboy.on("finish", resolve19);
32389
32389
  busboy.on("error", (err) => reject(new TypeError(err)));
32390
32390
  });
32391
32391
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -32516,7 +32516,7 @@ var require_request = __commonJS({
32516
32516
  }
32517
32517
  var Request = class _Request {
32518
32518
  constructor(origin, {
32519
- path: path27,
32519
+ path: path31,
32520
32520
  method,
32521
32521
  body,
32522
32522
  headers,
@@ -32530,11 +32530,11 @@ var require_request = __commonJS({
32530
32530
  throwOnError,
32531
32531
  expectContinue
32532
32532
  }, handler) {
32533
- if (typeof path27 !== "string") {
32533
+ if (typeof path31 !== "string") {
32534
32534
  throw new InvalidArgumentError("path must be a string");
32535
- } else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
32535
+ } else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
32536
32536
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
32537
- } else if (invalidPathRegex.exec(path27) !== null) {
32537
+ } else if (invalidPathRegex.exec(path31) !== null) {
32538
32538
  throw new InvalidArgumentError("invalid request path");
32539
32539
  }
32540
32540
  if (typeof method !== "string") {
@@ -32597,7 +32597,7 @@ var require_request = __commonJS({
32597
32597
  this.completed = false;
32598
32598
  this.aborted = false;
32599
32599
  this.upgrade = upgrade || null;
32600
- this.path = query ? util.buildURL(path27, query) : path27;
32600
+ this.path = query ? util.buildURL(path31, query) : path31;
32601
32601
  this.origin = origin;
32602
32602
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
32603
32603
  this.blocking = blocking == null ? false : blocking;
@@ -32919,9 +32919,9 @@ var require_dispatcher_base = __commonJS({
32919
32919
  }
32920
32920
  close(callback) {
32921
32921
  if (callback === void 0) {
32922
- return new Promise((resolve15, reject) => {
32922
+ return new Promise((resolve19, reject) => {
32923
32923
  this.close((err, data) => {
32924
- return err ? reject(err) : resolve15(data);
32924
+ return err ? reject(err) : resolve19(data);
32925
32925
  });
32926
32926
  });
32927
32927
  }
@@ -32959,12 +32959,12 @@ var require_dispatcher_base = __commonJS({
32959
32959
  err = null;
32960
32960
  }
32961
32961
  if (callback === void 0) {
32962
- return new Promise((resolve15, reject) => {
32962
+ return new Promise((resolve19, reject) => {
32963
32963
  this.destroy(err, (err2, data) => {
32964
32964
  return err2 ? (
32965
32965
  /* istanbul ignore next: should never error */
32966
32966
  reject(err2)
32967
- ) : resolve15(data);
32967
+ ) : resolve19(data);
32968
32968
  });
32969
32969
  });
32970
32970
  }
@@ -33605,9 +33605,9 @@ var require_RedirectHandler = __commonJS({
33605
33605
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
33606
33606
  }
33607
33607
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
33608
- const path27 = search ? `${pathname}${search}` : pathname;
33608
+ const path31 = search ? `${pathname}${search}` : pathname;
33609
33609
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
33610
- this.opts.path = path27;
33610
+ this.opts.path = path31;
33611
33611
  this.opts.origin = origin;
33612
33612
  this.opts.maxRedirections = 0;
33613
33613
  this.opts.query = null;
@@ -34026,16 +34026,16 @@ var require_client = __commonJS({
34026
34026
  return this[kNeedDrain] < 2;
34027
34027
  }
34028
34028
  async [kClose]() {
34029
- return new Promise((resolve15) => {
34029
+ return new Promise((resolve19) => {
34030
34030
  if (!this[kSize]) {
34031
- resolve15(null);
34031
+ resolve19(null);
34032
34032
  } else {
34033
- this[kClosedResolve] = resolve15;
34033
+ this[kClosedResolve] = resolve19;
34034
34034
  }
34035
34035
  });
34036
34036
  }
34037
34037
  async [kDestroy](err) {
34038
- return new Promise((resolve15) => {
34038
+ return new Promise((resolve19) => {
34039
34039
  const requests = this[kQueue].splice(this[kPendingIdx]);
34040
34040
  for (let i = 0; i < requests.length; i++) {
34041
34041
  const request = requests[i];
@@ -34046,7 +34046,7 @@ var require_client = __commonJS({
34046
34046
  this[kClosedResolve]();
34047
34047
  this[kClosedResolve] = null;
34048
34048
  }
34049
- resolve15();
34049
+ resolve19();
34050
34050
  };
34051
34051
  if (this[kHTTP2Session] != null) {
34052
34052
  util.destroy(this[kHTTP2Session], err);
@@ -34626,7 +34626,7 @@ var require_client = __commonJS({
34626
34626
  });
34627
34627
  }
34628
34628
  try {
34629
- const socket = await new Promise((resolve15, reject) => {
34629
+ const socket = await new Promise((resolve19, reject) => {
34630
34630
  client[kConnector]({
34631
34631
  host,
34632
34632
  hostname,
@@ -34638,7 +34638,7 @@ var require_client = __commonJS({
34638
34638
  if (err) {
34639
34639
  reject(err);
34640
34640
  } else {
34641
- resolve15(socket2);
34641
+ resolve19(socket2);
34642
34642
  }
34643
34643
  });
34644
34644
  });
@@ -34849,7 +34849,7 @@ var require_client = __commonJS({
34849
34849
  writeH2(client, client[kHTTP2Session], request);
34850
34850
  return;
34851
34851
  }
34852
- const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
34852
+ const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
34853
34853
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
34854
34854
  if (body && typeof body.read === "function") {
34855
34855
  body.read(0);
@@ -34899,7 +34899,7 @@ var require_client = __commonJS({
34899
34899
  if (blocking) {
34900
34900
  socket[kBlocking] = true;
34901
34901
  }
34902
- let header = `${method} ${path27} HTTP/1.1\r
34902
+ let header = `${method} ${path31} HTTP/1.1\r
34903
34903
  `;
34904
34904
  if (typeof host === "string") {
34905
34905
  header += `host: ${host}\r
@@ -34962,7 +34962,7 @@ upgrade: ${upgrade}\r
34962
34962
  return true;
34963
34963
  }
34964
34964
  function writeH2(client, session, request) {
34965
- const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
34965
+ const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
34966
34966
  let headers;
34967
34967
  if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
34968
34968
  else headers = reqHeaders;
@@ -35005,7 +35005,7 @@ upgrade: ${upgrade}\r
35005
35005
  });
35006
35006
  return true;
35007
35007
  }
35008
- headers[HTTP2_HEADER_PATH] = path27;
35008
+ headers[HTTP2_HEADER_PATH] = path31;
35009
35009
  headers[HTTP2_HEADER_SCHEME] = "https";
35010
35010
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
35011
35011
  if (body && typeof body.read === "function") {
@@ -35262,12 +35262,12 @@ upgrade: ${upgrade}\r
35262
35262
  cb();
35263
35263
  }
35264
35264
  }
35265
- const waitForDrain = () => new Promise((resolve15, reject) => {
35265
+ const waitForDrain = () => new Promise((resolve19, reject) => {
35266
35266
  assert(callback === null);
35267
35267
  if (socket[kError]) {
35268
35268
  reject(socket[kError]);
35269
35269
  } else {
35270
- callback = resolve15;
35270
+ callback = resolve19;
35271
35271
  }
35272
35272
  });
35273
35273
  if (client[kHTTPConnVersion] === "h2") {
@@ -35613,8 +35613,8 @@ var require_pool_base = __commonJS({
35613
35613
  if (this[kQueue].isEmpty()) {
35614
35614
  return Promise.all(this[kClients].map((c) => c.close()));
35615
35615
  } else {
35616
- return new Promise((resolve15) => {
35617
- this[kClosedResolve] = resolve15;
35616
+ return new Promise((resolve19) => {
35617
+ this[kClosedResolve] = resolve19;
35618
35618
  });
35619
35619
  }
35620
35620
  }
@@ -36192,7 +36192,7 @@ var require_readable = __commonJS({
36192
36192
  if (this.closed) {
36193
36193
  return Promise.resolve(null);
36194
36194
  }
36195
- return new Promise((resolve15, reject) => {
36195
+ return new Promise((resolve19, reject) => {
36196
36196
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
36197
36197
  this.destroy();
36198
36198
  }) : noop;
@@ -36201,7 +36201,7 @@ var require_readable = __commonJS({
36201
36201
  if (signal && signal.aborted) {
36202
36202
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
36203
36203
  } else {
36204
- resolve15(null);
36204
+ resolve19(null);
36205
36205
  }
36206
36206
  }).on("error", noop).on("data", function(chunk) {
36207
36207
  limit -= chunk.length;
@@ -36223,11 +36223,11 @@ var require_readable = __commonJS({
36223
36223
  throw new TypeError("unusable");
36224
36224
  }
36225
36225
  assert(!stream[kConsume]);
36226
- return new Promise((resolve15, reject) => {
36226
+ return new Promise((resolve19, reject) => {
36227
36227
  stream[kConsume] = {
36228
36228
  type,
36229
36229
  stream,
36230
- resolve: resolve15,
36230
+ resolve: resolve19,
36231
36231
  reject,
36232
36232
  length: 0,
36233
36233
  body: []
@@ -36262,12 +36262,12 @@ var require_readable = __commonJS({
36262
36262
  }
36263
36263
  }
36264
36264
  function consumeEnd(consume2) {
36265
- const { type, body, resolve: resolve15, stream, length } = consume2;
36265
+ const { type, body, resolve: resolve19, stream, length } = consume2;
36266
36266
  try {
36267
36267
  if (type === "text") {
36268
- resolve15(toUSVString(Buffer.concat(body)));
36268
+ resolve19(toUSVString(Buffer.concat(body)));
36269
36269
  } else if (type === "json") {
36270
- resolve15(JSON.parse(Buffer.concat(body)));
36270
+ resolve19(JSON.parse(Buffer.concat(body)));
36271
36271
  } else if (type === "arrayBuffer") {
36272
36272
  const dst = new Uint8Array(length);
36273
36273
  let pos = 0;
@@ -36275,12 +36275,12 @@ var require_readable = __commonJS({
36275
36275
  dst.set(buf, pos);
36276
36276
  pos += buf.byteLength;
36277
36277
  }
36278
- resolve15(dst.buffer);
36278
+ resolve19(dst.buffer);
36279
36279
  } else if (type === "blob") {
36280
36280
  if (!Blob2) {
36281
36281
  Blob2 = require("buffer").Blob;
36282
36282
  }
36283
- resolve15(new Blob2(body, { type: stream[kContentType] }));
36283
+ resolve19(new Blob2(body, { type: stream[kContentType] }));
36284
36284
  }
36285
36285
  consumeFinish(consume2);
36286
36286
  } catch (err) {
@@ -36537,9 +36537,9 @@ var require_api_request = __commonJS({
36537
36537
  };
36538
36538
  function request(opts, callback) {
36539
36539
  if (callback === void 0) {
36540
- return new Promise((resolve15, reject) => {
36540
+ return new Promise((resolve19, reject) => {
36541
36541
  request.call(this, opts, (err, data) => {
36542
- return err ? reject(err) : resolve15(data);
36542
+ return err ? reject(err) : resolve19(data);
36543
36543
  });
36544
36544
  });
36545
36545
  }
@@ -36712,9 +36712,9 @@ var require_api_stream = __commonJS({
36712
36712
  };
36713
36713
  function stream(opts, factory, callback) {
36714
36714
  if (callback === void 0) {
36715
- return new Promise((resolve15, reject) => {
36715
+ return new Promise((resolve19, reject) => {
36716
36716
  stream.call(this, opts, factory, (err, data) => {
36717
- return err ? reject(err) : resolve15(data);
36717
+ return err ? reject(err) : resolve19(data);
36718
36718
  });
36719
36719
  });
36720
36720
  }
@@ -36995,9 +36995,9 @@ var require_api_upgrade = __commonJS({
36995
36995
  };
36996
36996
  function upgrade(opts, callback) {
36997
36997
  if (callback === void 0) {
36998
- return new Promise((resolve15, reject) => {
36998
+ return new Promise((resolve19, reject) => {
36999
36999
  upgrade.call(this, opts, (err, data) => {
37000
- return err ? reject(err) : resolve15(data);
37000
+ return err ? reject(err) : resolve19(data);
37001
37001
  });
37002
37002
  });
37003
37003
  }
@@ -37086,9 +37086,9 @@ var require_api_connect = __commonJS({
37086
37086
  };
37087
37087
  function connect(opts, callback) {
37088
37088
  if (callback === void 0) {
37089
- return new Promise((resolve15, reject) => {
37089
+ return new Promise((resolve19, reject) => {
37090
37090
  connect.call(this, opts, (err, data) => {
37091
- return err ? reject(err) : resolve15(data);
37091
+ return err ? reject(err) : resolve19(data);
37092
37092
  });
37093
37093
  });
37094
37094
  }
@@ -37248,20 +37248,20 @@ var require_mock_utils = __commonJS({
37248
37248
  }
37249
37249
  return true;
37250
37250
  }
37251
- function safeUrl(path27) {
37252
- if (typeof path27 !== "string") {
37253
- return path27;
37251
+ function safeUrl(path31) {
37252
+ if (typeof path31 !== "string") {
37253
+ return path31;
37254
37254
  }
37255
- const pathSegments = path27.split("?");
37255
+ const pathSegments = path31.split("?");
37256
37256
  if (pathSegments.length !== 2) {
37257
- return path27;
37257
+ return path31;
37258
37258
  }
37259
37259
  const qp = new URLSearchParams(pathSegments.pop());
37260
37260
  qp.sort();
37261
37261
  return [...pathSegments, qp.toString()].join("?");
37262
37262
  }
37263
- function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
37264
- const pathMatch = matchValue(mockDispatch2.path, path27);
37263
+ function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
37264
+ const pathMatch = matchValue(mockDispatch2.path, path31);
37265
37265
  const methodMatch = matchValue(mockDispatch2.method, method);
37266
37266
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
37267
37267
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -37279,7 +37279,7 @@ var require_mock_utils = __commonJS({
37279
37279
  function getMockDispatch(mockDispatches, key) {
37280
37280
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
37281
37281
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
37282
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
37282
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
37283
37283
  if (matchedMockDispatches.length === 0) {
37284
37284
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
37285
37285
  }
@@ -37316,9 +37316,9 @@ var require_mock_utils = __commonJS({
37316
37316
  }
37317
37317
  }
37318
37318
  function buildKey(opts) {
37319
- const { path: path27, method, body, headers, query } = opts;
37319
+ const { path: path31, method, body, headers, query } = opts;
37320
37320
  return {
37321
- path: path27,
37321
+ path: path31,
37322
37322
  method,
37323
37323
  body,
37324
37324
  headers,
@@ -37767,10 +37767,10 @@ var require_pending_interceptors_formatter = __commonJS({
37767
37767
  }
37768
37768
  format(pendingInterceptors) {
37769
37769
  const withPrettyHeaders = pendingInterceptors.map(
37770
- ({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37770
+ ({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37771
37771
  Method: method,
37772
37772
  Origin: origin,
37773
- Path: path27,
37773
+ Path: path31,
37774
37774
  "Status code": statusCode,
37775
37775
  Persistent: persist ? "\u2705" : "\u274C",
37776
37776
  Invocations: timesInvoked,
@@ -40711,7 +40711,7 @@ var require_fetch = __commonJS({
40711
40711
  async function dispatch({ body }) {
40712
40712
  const url = requestCurrentURL(request);
40713
40713
  const agent = fetchParams.controller.dispatcher;
40714
- return new Promise((resolve15, reject) => agent.dispatch(
40714
+ return new Promise((resolve19, reject) => agent.dispatch(
40715
40715
  {
40716
40716
  path: url.pathname + url.search,
40717
40717
  origin: url.origin,
@@ -40787,7 +40787,7 @@ var require_fetch = __commonJS({
40787
40787
  }
40788
40788
  }
40789
40789
  }
40790
- resolve15({
40790
+ resolve19({
40791
40791
  status,
40792
40792
  statusText,
40793
40793
  headersList: headers[kHeadersList],
@@ -40830,7 +40830,7 @@ var require_fetch = __commonJS({
40830
40830
  const val = headersList[n + 1].toString("latin1");
40831
40831
  headers[kHeadersList].append(key, val);
40832
40832
  }
40833
- resolve15({
40833
+ resolve19({
40834
40834
  status,
40835
40835
  statusText: STATUS_CODES[status],
40836
40836
  headersList: headers[kHeadersList],
@@ -42391,8 +42391,8 @@ var require_util6 = __commonJS({
42391
42391
  }
42392
42392
  }
42393
42393
  }
42394
- function validateCookiePath(path27) {
42395
- for (const char of path27) {
42394
+ function validateCookiePath(path31) {
42395
+ for (const char of path31) {
42396
42396
  const code = char.charCodeAt(0);
42397
42397
  if (code < 33 || char === ";") {
42398
42398
  throw new Error("Invalid cookie path");
@@ -43189,9 +43189,9 @@ var require_connection = __commonJS({
43189
43189
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
43190
43190
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
43191
43191
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
43192
- var crypto2;
43192
+ var crypto4;
43193
43193
  try {
43194
- crypto2 = require("crypto");
43194
+ crypto4 = require("crypto");
43195
43195
  } catch {
43196
43196
  }
43197
43197
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -43210,7 +43210,7 @@ var require_connection = __commonJS({
43210
43210
  const headersList = new Headers(options.headers)[kHeadersList];
43211
43211
  request.headersList = headersList;
43212
43212
  }
43213
- const keyValue = crypto2.randomBytes(16).toString("base64");
43213
+ const keyValue = crypto4.randomBytes(16).toString("base64");
43214
43214
  request.headersList.append("sec-websocket-key", keyValue);
43215
43215
  request.headersList.append("sec-websocket-version", "13");
43216
43216
  for (const protocol of protocols) {
@@ -43239,7 +43239,7 @@ var require_connection = __commonJS({
43239
43239
  return;
43240
43240
  }
43241
43241
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
43242
- const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
43242
+ const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
43243
43243
  if (secWSAccept !== digest) {
43244
43244
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
43245
43245
  return;
@@ -43319,9 +43319,9 @@ var require_frame = __commonJS({
43319
43319
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
43320
43320
  "use strict";
43321
43321
  var { maxUnsigned16Bit } = require_constants5();
43322
- var crypto2;
43322
+ var crypto4;
43323
43323
  try {
43324
- crypto2 = require("crypto");
43324
+ crypto4 = require("crypto");
43325
43325
  } catch {
43326
43326
  }
43327
43327
  var WebsocketFrameSend = class {
@@ -43330,7 +43330,7 @@ var require_frame = __commonJS({
43330
43330
  */
43331
43331
  constructor(data) {
43332
43332
  this.frameData = data;
43333
- this.maskKey = crypto2.randomBytes(4);
43333
+ this.maskKey = crypto4.randomBytes(4);
43334
43334
  }
43335
43335
  createFrame(opcode) {
43336
43336
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -44072,11 +44072,11 @@ var require_undici = __commonJS({
44072
44072
  if (typeof opts.path !== "string") {
44073
44073
  throw new InvalidArgumentError("invalid opts.path");
44074
44074
  }
44075
- let path27 = opts.path;
44075
+ let path31 = opts.path;
44076
44076
  if (!opts.path.startsWith("/")) {
44077
- path27 = `/${path27}`;
44077
+ path31 = `/${path31}`;
44078
44078
  }
44079
- url = new URL(util.parseOrigin(url).origin + path27);
44079
+ url = new URL(util.parseOrigin(url).origin + path31);
44080
44080
  } else {
44081
44081
  if (!opts) {
44082
44082
  opts = typeof url === "object" ? url : {};
@@ -44625,7 +44625,7 @@ var init_mcp_check_provider = __esm({
44625
44625
  logger.warn(
44626
44626
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
44627
44627
  );
44628
- await new Promise((resolve15) => setTimeout(resolve15, delay));
44628
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
44629
44629
  attempt += 1;
44630
44630
  } finally {
44631
44631
  try {
@@ -44907,7 +44907,7 @@ async function acquirePromptLock() {
44907
44907
  activePrompt = true;
44908
44908
  return;
44909
44909
  }
44910
- await new Promise((resolve15) => waiters.push(resolve15));
44910
+ await new Promise((resolve19) => waiters.push(resolve19));
44911
44911
  activePrompt = true;
44912
44912
  }
44913
44913
  function releasePromptLock() {
@@ -44917,7 +44917,7 @@ function releasePromptLock() {
44917
44917
  }
44918
44918
  async function interactivePrompt(options) {
44919
44919
  await acquirePromptLock();
44920
- return new Promise((resolve15, reject) => {
44920
+ return new Promise((resolve19, reject) => {
44921
44921
  const dbg = process.env.VISOR_DEBUG === "true";
44922
44922
  try {
44923
44923
  if (dbg) {
@@ -45004,12 +45004,12 @@ async function interactivePrompt(options) {
45004
45004
  };
45005
45005
  const finish = (value) => {
45006
45006
  cleanup();
45007
- resolve15(value);
45007
+ resolve19(value);
45008
45008
  };
45009
45009
  if (options.timeout && options.timeout > 0) {
45010
45010
  timeoutId = setTimeout(() => {
45011
45011
  cleanup();
45012
- if (defaultValue !== void 0) return resolve15(defaultValue);
45012
+ if (defaultValue !== void 0) return resolve19(defaultValue);
45013
45013
  return reject(new Error("Input timeout"));
45014
45014
  }, options.timeout);
45015
45015
  }
@@ -45141,7 +45141,7 @@ async function interactivePrompt(options) {
45141
45141
  });
45142
45142
  }
45143
45143
  async function simplePrompt(prompt) {
45144
- return new Promise((resolve15) => {
45144
+ return new Promise((resolve19) => {
45145
45145
  const rl = readline.createInterface({
45146
45146
  input: process.stdin,
45147
45147
  output: process.stdout
@@ -45157,7 +45157,7 @@ async function simplePrompt(prompt) {
45157
45157
  rl.question(`${prompt}
45158
45158
  > `, (answer) => {
45159
45159
  rl.close();
45160
- resolve15(answer.trim());
45160
+ resolve19(answer.trim());
45161
45161
  });
45162
45162
  });
45163
45163
  }
@@ -45325,7 +45325,7 @@ function isStdinAvailable() {
45325
45325
  return !process.stdin.isTTY;
45326
45326
  }
45327
45327
  async function readStdin(timeout, maxSize = 1024 * 1024) {
45328
- return new Promise((resolve15, reject) => {
45328
+ return new Promise((resolve19, reject) => {
45329
45329
  let data = "";
45330
45330
  let timeoutId;
45331
45331
  if (timeout) {
@@ -45352,7 +45352,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
45352
45352
  };
45353
45353
  const onEnd = () => {
45354
45354
  cleanup();
45355
- resolve15(data.trim());
45355
+ resolve19(data.trim());
45356
45356
  };
45357
45357
  const onError = (err) => {
45358
45358
  cleanup();
@@ -49470,23 +49470,23 @@ __export(renderer_schema_exports, {
49470
49470
  });
49471
49471
  async function loadRendererSchema(name) {
49472
49472
  try {
49473
- const fs23 = await import("fs/promises");
49474
- const path27 = await import("path");
49473
+ const fs27 = await import("fs/promises");
49474
+ const path31 = await import("path");
49475
49475
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
49476
49476
  if (!sanitized) return void 0;
49477
49477
  const candidates = [
49478
49478
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
49479
- path27.join(__dirname, "output", sanitized, "schema.json"),
49479
+ path31.join(__dirname, "output", sanitized, "schema.json"),
49480
49480
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
49481
- path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49481
+ path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49482
49482
  // When running from a checkout with output/ folder copied to CWD
49483
- path27.join(process.cwd(), "output", sanitized, "schema.json"),
49483
+ path31.join(process.cwd(), "output", sanitized, "schema.json"),
49484
49484
  // Fallback: cwd/dist/output/
49485
- path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49485
+ path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49486
49486
  ];
49487
49487
  for (const p of candidates) {
49488
49488
  try {
49489
- const raw = await fs23.readFile(p, "utf-8");
49489
+ const raw = await fs27.readFile(p, "utf-8");
49490
49490
  return JSON.parse(raw);
49491
49491
  } catch {
49492
49492
  }
@@ -51905,8 +51905,8 @@ function updateStats2(results, state, isForEachIteration = false) {
51905
51905
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51906
51906
  try {
51907
51907
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
51908
- const fs23 = await import("fs/promises");
51909
- const path27 = await import("path");
51908
+ const fs27 = await import("fs/promises");
51909
+ const path31 = await import("path");
51910
51910
  const schemaRaw = checkConfig.schema || "plain";
51911
51911
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
51912
51912
  let templateContent;
@@ -51915,27 +51915,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51915
51915
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
51916
51916
  } else if (checkConfig.template && checkConfig.template.file) {
51917
51917
  const file = String(checkConfig.template.file);
51918
- const resolved = path27.resolve(process.cwd(), file);
51919
- templateContent = await fs23.readFile(resolved, "utf-8");
51918
+ const resolved = path31.resolve(process.cwd(), file);
51919
+ templateContent = await fs27.readFile(resolved, "utf-8");
51920
51920
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
51921
51921
  } else if (schema && schema !== "plain") {
51922
51922
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
51923
51923
  if (sanitized) {
51924
51924
  const candidatePaths = [
51925
- path27.join(__dirname, "output", sanitized, "template.liquid"),
51925
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
51926
51926
  // bundled: dist/output/
51927
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
51927
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
51928
51928
  // source (from state-machine/states)
51929
- path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
51929
+ path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
51930
51930
  // source (alternate)
51931
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
51931
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
51932
51932
  // fallback: cwd/output/
51933
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
51933
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
51934
51934
  // fallback: cwd/dist/output/
51935
51935
  ];
51936
51936
  for (const p of candidatePaths) {
51937
51937
  try {
51938
- templateContent = await fs23.readFile(p, "utf-8");
51938
+ templateContent = await fs27.readFile(p, "utf-8");
51939
51939
  if (templateContent) {
51940
51940
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
51941
51941
  break;
@@ -54075,8 +54075,8 @@ var init_workspace_manager = __esm({
54075
54075
  );
54076
54076
  if (this.cleanupRequested && this.activeOperations === 0) {
54077
54077
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
54078
- for (const resolve15 of this.cleanupResolvers) {
54079
- resolve15();
54078
+ for (const resolve19 of this.cleanupResolvers) {
54079
+ resolve19();
54080
54080
  }
54081
54081
  this.cleanupResolvers = [];
54082
54082
  }
@@ -54233,19 +54233,19 @@ var init_workspace_manager = __esm({
54233
54233
  );
54234
54234
  this.cleanupRequested = true;
54235
54235
  await Promise.race([
54236
- new Promise((resolve15) => {
54236
+ new Promise((resolve19) => {
54237
54237
  if (this.activeOperations === 0) {
54238
- resolve15();
54238
+ resolve19();
54239
54239
  } else {
54240
- this.cleanupResolvers.push(resolve15);
54240
+ this.cleanupResolvers.push(resolve19);
54241
54241
  }
54242
54242
  }),
54243
- new Promise((resolve15) => {
54243
+ new Promise((resolve19) => {
54244
54244
  setTimeout(() => {
54245
54245
  logger.warn(
54246
54246
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
54247
54247
  );
54248
- resolve15();
54248
+ resolve19();
54249
54249
  }, timeout);
54250
54250
  })
54251
54251
  ]);
@@ -54723,6 +54723,1380 @@ var init_build_engine_context = __esm({
54723
54723
  }
54724
54724
  });
54725
54725
 
54726
+ // src/policy/default-engine.ts
54727
+ var DefaultPolicyEngine;
54728
+ var init_default_engine = __esm({
54729
+ "src/policy/default-engine.ts"() {
54730
+ "use strict";
54731
+ DefaultPolicyEngine = class {
54732
+ async initialize(_config) {
54733
+ }
54734
+ async evaluateCheckExecution(_checkId, _checkConfig) {
54735
+ return { allowed: true };
54736
+ }
54737
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
54738
+ return { allowed: true };
54739
+ }
54740
+ async evaluateCapabilities(_checkId, _capabilities) {
54741
+ return { allowed: true };
54742
+ }
54743
+ async shutdown() {
54744
+ }
54745
+ };
54746
+ }
54747
+ });
54748
+
54749
+ // src/enterprise/license/validator.ts
54750
+ var validator_exports = {};
54751
+ __export(validator_exports, {
54752
+ LicenseValidator: () => LicenseValidator
54753
+ });
54754
+ var crypto2, fs21, path25, LicenseValidator;
54755
+ var init_validator = __esm({
54756
+ "src/enterprise/license/validator.ts"() {
54757
+ "use strict";
54758
+ crypto2 = __toESM(require("crypto"));
54759
+ fs21 = __toESM(require("fs"));
54760
+ path25 = __toESM(require("path"));
54761
+ LicenseValidator = class _LicenseValidator {
54762
+ /** Ed25519 public key for license verification (PEM format). */
54763
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
54764
+ cache = null;
54765
+ static CACHE_TTL = 5 * 60 * 1e3;
54766
+ // 5 minutes
54767
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
54768
+ // 72 hours after expiry
54769
+ /**
54770
+ * Load and validate license from environment or file.
54771
+ *
54772
+ * Resolution order:
54773
+ * 1. VISOR_LICENSE env var (JWT string)
54774
+ * 2. VISOR_LICENSE_FILE env var (path to file)
54775
+ * 3. .visor-license in project root (cwd)
54776
+ * 4. .visor-license in ~/.config/visor/
54777
+ */
54778
+ async loadAndValidate() {
54779
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
54780
+ return this.cache.payload;
54781
+ }
54782
+ const token = this.resolveToken();
54783
+ if (!token) return null;
54784
+ const payload = this.verifyAndDecode(token);
54785
+ if (!payload) return null;
54786
+ this.cache = { payload, validatedAt: Date.now() };
54787
+ return payload;
54788
+ }
54789
+ /** Check if a specific feature is licensed */
54790
+ hasFeature(feature) {
54791
+ if (!this.cache) return false;
54792
+ return this.cache.payload.features.includes(feature);
54793
+ }
54794
+ /** Check if license is valid (with grace period) */
54795
+ isValid() {
54796
+ if (!this.cache) return false;
54797
+ const now = Date.now();
54798
+ const expiryMs = this.cache.payload.exp * 1e3;
54799
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54800
+ }
54801
+ /** Check if the license is within its grace period (expired but still valid) */
54802
+ isInGracePeriod() {
54803
+ if (!this.cache) return false;
54804
+ const now = Date.now();
54805
+ const expiryMs = this.cache.payload.exp * 1e3;
54806
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54807
+ }
54808
+ resolveToken() {
54809
+ if (process.env.VISOR_LICENSE) {
54810
+ return process.env.VISOR_LICENSE.trim();
54811
+ }
54812
+ if (process.env.VISOR_LICENSE_FILE) {
54813
+ const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
54814
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
54815
+ const allowedPrefixes = [path25.normalize(process.cwd())];
54816
+ if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
54817
+ let realPath;
54818
+ try {
54819
+ realPath = fs21.realpathSync(resolved);
54820
+ } catch {
54821
+ return null;
54822
+ }
54823
+ const isSafe = allowedPrefixes.some(
54824
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
54825
+ );
54826
+ if (!isSafe) return null;
54827
+ return this.readFile(realPath);
54828
+ }
54829
+ const cwdPath = path25.join(process.cwd(), ".visor-license");
54830
+ const cwdToken = this.readFile(cwdPath);
54831
+ if (cwdToken) return cwdToken;
54832
+ const home = process.env.HOME || process.env.USERPROFILE || "";
54833
+ if (home) {
54834
+ const configPath = path25.join(home, ".config", "visor", ".visor-license");
54835
+ const configToken = this.readFile(configPath);
54836
+ if (configToken) return configToken;
54837
+ }
54838
+ return null;
54839
+ }
54840
+ readFile(filePath) {
54841
+ try {
54842
+ return fs21.readFileSync(filePath, "utf-8").trim();
54843
+ } catch {
54844
+ return null;
54845
+ }
54846
+ }
54847
+ verifyAndDecode(token) {
54848
+ try {
54849
+ const parts = token.split(".");
54850
+ if (parts.length !== 3) return null;
54851
+ const [headerB64, payloadB64, signatureB64] = parts;
54852
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
54853
+ if (header.alg !== "EdDSA") return null;
54854
+ const data = `${headerB64}.${payloadB64}`;
54855
+ const signature = Buffer.from(signatureB64, "base64url");
54856
+ const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
54857
+ if (publicKey.asymmetricKeyType !== "ed25519") {
54858
+ return null;
54859
+ }
54860
+ const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
54861
+ if (!isValid) return null;
54862
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
54863
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
54864
+ return null;
54865
+ }
54866
+ const now = Date.now();
54867
+ const expiryMs = payload.exp * 1e3;
54868
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
54869
+ return null;
54870
+ }
54871
+ return payload;
54872
+ } catch {
54873
+ return null;
54874
+ }
54875
+ }
54876
+ };
54877
+ }
54878
+ });
54879
+
54880
+ // src/enterprise/policy/opa-compiler.ts
54881
+ var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
54882
+ var init_opa_compiler = __esm({
54883
+ "src/enterprise/policy/opa-compiler.ts"() {
54884
+ "use strict";
54885
+ fs22 = __toESM(require("fs"));
54886
+ path26 = __toESM(require("path"));
54887
+ os2 = __toESM(require("os"));
54888
+ crypto3 = __toESM(require("crypto"));
54889
+ import_child_process8 = require("child_process");
54890
+ OpaCompiler = class _OpaCompiler {
54891
+ static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
54892
+ /**
54893
+ * Resolve the input paths to WASM bytes.
54894
+ *
54895
+ * Strategy:
54896
+ * 1. If any path is a .wasm file, read it directly
54897
+ * 2. If a directory contains policy.wasm, read it
54898
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
54899
+ */
54900
+ async resolveWasmBytes(paths) {
54901
+ const regoFiles = [];
54902
+ for (const p of paths) {
54903
+ const resolved = path26.resolve(p);
54904
+ if (path26.normalize(resolved).includes("..")) {
54905
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
54906
+ }
54907
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
54908
+ return fs22.readFileSync(resolved);
54909
+ }
54910
+ if (!fs22.existsSync(resolved)) continue;
54911
+ const stat2 = fs22.statSync(resolved);
54912
+ if (stat2.isDirectory()) {
54913
+ const wasmCandidate = path26.join(resolved, "policy.wasm");
54914
+ if (fs22.existsSync(wasmCandidate)) {
54915
+ return fs22.readFileSync(wasmCandidate);
54916
+ }
54917
+ const files = fs22.readdirSync(resolved);
54918
+ for (const f of files) {
54919
+ if (f.endsWith(".rego")) {
54920
+ regoFiles.push(path26.join(resolved, f));
54921
+ }
54922
+ }
54923
+ } else if (resolved.endsWith(".rego")) {
54924
+ regoFiles.push(resolved);
54925
+ }
54926
+ }
54927
+ if (regoFiles.length === 0) {
54928
+ throw new Error(
54929
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
54930
+ );
54931
+ }
54932
+ return this.compileRego(regoFiles);
54933
+ }
54934
+ /**
54935
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
54936
+ *
54937
+ * Caches the compiled bundle based on a content hash of all input .rego files
54938
+ * so subsequent runs skip compilation if policies haven't changed.
54939
+ */
54940
+ compileRego(regoFiles) {
54941
+ try {
54942
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
54943
+ } catch {
54944
+ throw new Error(
54945
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
54946
+ );
54947
+ }
54948
+ const hash = crypto3.createHash("sha256");
54949
+ for (const f of regoFiles.sort()) {
54950
+ hash.update(fs22.readFileSync(f));
54951
+ hash.update(f);
54952
+ }
54953
+ const cacheKey = hash.digest("hex").slice(0, 16);
54954
+ const cacheDir = _OpaCompiler.CACHE_DIR;
54955
+ const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
54956
+ if (fs22.existsSync(cachedWasm)) {
54957
+ return fs22.readFileSync(cachedWasm);
54958
+ }
54959
+ fs22.mkdirSync(cacheDir, { recursive: true });
54960
+ const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
54961
+ try {
54962
+ const args = [
54963
+ "build",
54964
+ "-t",
54965
+ "wasm",
54966
+ "-e",
54967
+ "visor",
54968
+ // entrypoint: the visor package tree
54969
+ "-o",
54970
+ bundleTar,
54971
+ ...regoFiles
54972
+ ];
54973
+ (0, import_child_process8.execFileSync)("opa", args, {
54974
+ stdio: "pipe",
54975
+ timeout: 3e4
54976
+ });
54977
+ } catch (err) {
54978
+ const stderr = err?.stderr?.toString() || "";
54979
+ throw new Error(
54980
+ `Failed to compile .rego files to WASM:
54981
+ ${stderr}
54982
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
54983
+ );
54984
+ }
54985
+ try {
54986
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
54987
+ stdio: "pipe"
54988
+ });
54989
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
54990
+ if (fs22.existsSync(extractedWasm)) {
54991
+ fs22.renameSync(extractedWasm, cachedWasm);
54992
+ }
54993
+ } catch {
54994
+ try {
54995
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
54996
+ stdio: "pipe"
54997
+ });
54998
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
54999
+ if (fs22.existsSync(extractedWasm)) {
55000
+ fs22.renameSync(extractedWasm, cachedWasm);
55001
+ }
55002
+ } catch (err2) {
55003
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
55004
+ }
55005
+ }
55006
+ try {
55007
+ fs22.unlinkSync(bundleTar);
55008
+ } catch {
55009
+ }
55010
+ if (!fs22.existsSync(cachedWasm)) {
55011
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
55012
+ }
55013
+ return fs22.readFileSync(cachedWasm);
55014
+ }
55015
+ };
55016
+ }
55017
+ });
55018
+
55019
+ // src/enterprise/policy/opa-wasm-evaluator.ts
55020
+ var fs23, path27, OpaWasmEvaluator;
55021
+ var init_opa_wasm_evaluator = __esm({
55022
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
55023
+ "use strict";
55024
+ fs23 = __toESM(require("fs"));
55025
+ path27 = __toESM(require("path"));
55026
+ init_opa_compiler();
55027
+ OpaWasmEvaluator = class {
55028
+ policy = null;
55029
+ dataDocument = {};
55030
+ compiler = new OpaCompiler();
55031
+ async initialize(rulesPath) {
55032
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
55033
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
55034
+ try {
55035
+ const { createRequire } = require("module");
55036
+ const runtimeRequire = createRequire(__filename);
55037
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
55038
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
55039
+ if (!loadPolicy) {
55040
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
55041
+ }
55042
+ this.policy = await loadPolicy(wasmBytes);
55043
+ } catch (err) {
55044
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
55045
+ throw new Error(
55046
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
55047
+ );
55048
+ }
55049
+ throw err;
55050
+ }
55051
+ }
55052
+ /**
55053
+ * Load external data from a JSON file to use as the OPA data document.
55054
+ * The loaded data will be passed to `policy.setData()` during evaluation,
55055
+ * making it available in Rego via `data.<key>`.
55056
+ */
55057
+ loadData(dataPath) {
55058
+ const resolved = path27.resolve(dataPath);
55059
+ if (path27.normalize(resolved).includes("..")) {
55060
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
55061
+ }
55062
+ if (!fs23.existsSync(resolved)) {
55063
+ throw new Error(`OPA data file not found: ${resolved}`);
55064
+ }
55065
+ const stat2 = fs23.statSync(resolved);
55066
+ if (stat2.size > 10 * 1024 * 1024) {
55067
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
55068
+ }
55069
+ const raw = fs23.readFileSync(resolved, "utf-8");
55070
+ try {
55071
+ const parsed = JSON.parse(raw);
55072
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
55073
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
55074
+ }
55075
+ this.dataDocument = parsed;
55076
+ } catch (err) {
55077
+ if (err.message.startsWith("OPA data file must")) {
55078
+ throw err;
55079
+ }
55080
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
55081
+ }
55082
+ }
55083
+ async evaluate(input) {
55084
+ if (!this.policy) {
55085
+ throw new Error("OPA WASM evaluator not initialized");
55086
+ }
55087
+ this.policy.setData(this.dataDocument);
55088
+ const resultSet = this.policy.evaluate(input);
55089
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
55090
+ return resultSet[0].result;
55091
+ }
55092
+ return void 0;
55093
+ }
55094
+ async shutdown() {
55095
+ if (this.policy) {
55096
+ if (typeof this.policy.close === "function") {
55097
+ try {
55098
+ this.policy.close();
55099
+ } catch {
55100
+ }
55101
+ } else if (typeof this.policy.free === "function") {
55102
+ try {
55103
+ this.policy.free();
55104
+ } catch {
55105
+ }
55106
+ }
55107
+ }
55108
+ this.policy = null;
55109
+ }
55110
+ };
55111
+ }
55112
+ });
55113
+
55114
+ // src/enterprise/policy/opa-http-evaluator.ts
55115
+ var OpaHttpEvaluator;
55116
+ var init_opa_http_evaluator = __esm({
55117
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
55118
+ "use strict";
55119
+ OpaHttpEvaluator = class {
55120
+ baseUrl;
55121
+ timeout;
55122
+ constructor(baseUrl, timeout = 5e3) {
55123
+ let parsed;
55124
+ try {
55125
+ parsed = new URL(baseUrl);
55126
+ } catch {
55127
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
55128
+ }
55129
+ if (!["http:", "https:"].includes(parsed.protocol)) {
55130
+ throw new Error(
55131
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
55132
+ );
55133
+ }
55134
+ const hostname = parsed.hostname;
55135
+ if (this.isBlockedHostname(hostname)) {
55136
+ throw new Error(
55137
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
55138
+ );
55139
+ }
55140
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
55141
+ this.timeout = timeout;
55142
+ }
55143
+ /**
55144
+ * Check if a hostname is blocked due to SSRF concerns.
55145
+ *
55146
+ * Blocks:
55147
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
55148
+ * - Link-local addresses (169.254.x.x)
55149
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
55150
+ * - IPv6 unique local addresses (fd00::/8)
55151
+ * - Cloud metadata services (*.internal)
55152
+ */
55153
+ isBlockedHostname(hostname) {
55154
+ if (!hostname) return true;
55155
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
55156
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
55157
+ return true;
55158
+ }
55159
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
55160
+ return true;
55161
+ }
55162
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
55163
+ return true;
55164
+ }
55165
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
55166
+ const ipv4Match = normalized.match(ipv4Pattern);
55167
+ if (ipv4Match) {
55168
+ const octets = ipv4Match.slice(1, 5).map(Number);
55169
+ if (octets.some((octet) => octet > 255)) {
55170
+ return false;
55171
+ }
55172
+ const [a, b] = octets;
55173
+ if (a === 127) {
55174
+ return true;
55175
+ }
55176
+ if (a === 0) {
55177
+ return true;
55178
+ }
55179
+ if (a === 169 && b === 254) {
55180
+ return true;
55181
+ }
55182
+ if (a === 10) {
55183
+ return true;
55184
+ }
55185
+ if (a === 172 && b >= 16 && b <= 31) {
55186
+ return true;
55187
+ }
55188
+ if (a === 192 && b === 168) {
55189
+ return true;
55190
+ }
55191
+ }
55192
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
55193
+ return true;
55194
+ }
55195
+ if (normalized.startsWith("fe80:")) {
55196
+ return true;
55197
+ }
55198
+ return false;
55199
+ }
55200
+ /**
55201
+ * Evaluate a policy rule against an input document via OPA REST API.
55202
+ *
55203
+ * @param input - The input document to evaluate
55204
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
55205
+ * @returns The result object from OPA, or undefined on error
55206
+ */
55207
+ async evaluate(input, rulePath) {
55208
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
55209
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
55210
+ const controller = new AbortController();
55211
+ const timer = setTimeout(() => controller.abort(), this.timeout);
55212
+ try {
55213
+ const response = await fetch(url, {
55214
+ method: "POST",
55215
+ headers: { "Content-Type": "application/json" },
55216
+ body: JSON.stringify({ input }),
55217
+ signal: controller.signal
55218
+ });
55219
+ if (!response.ok) {
55220
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
55221
+ }
55222
+ let body;
55223
+ try {
55224
+ body = await response.json();
55225
+ } catch (jsonErr) {
55226
+ throw new Error(
55227
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
55228
+ );
55229
+ }
55230
+ return body?.result;
55231
+ } finally {
55232
+ clearTimeout(timer);
55233
+ }
55234
+ }
55235
+ async shutdown() {
55236
+ }
55237
+ };
55238
+ }
55239
+ });
55240
+
55241
+ // src/enterprise/policy/policy-input-builder.ts
55242
+ var PolicyInputBuilder;
55243
+ var init_policy_input_builder = __esm({
55244
+ "src/enterprise/policy/policy-input-builder.ts"() {
55245
+ "use strict";
55246
+ PolicyInputBuilder = class {
55247
+ roles;
55248
+ actor;
55249
+ repository;
55250
+ pullRequest;
55251
+ constructor(policyConfig, actor, repository, pullRequest) {
55252
+ this.roles = policyConfig.roles || {};
55253
+ this.actor = actor;
55254
+ this.repository = repository;
55255
+ this.pullRequest = pullRequest;
55256
+ }
55257
+ /** Resolve which roles apply to the current actor. */
55258
+ resolveRoles() {
55259
+ const matched = [];
55260
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
55261
+ let identityMatch = false;
55262
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
55263
+ identityMatch = true;
55264
+ }
55265
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
55266
+ identityMatch = true;
55267
+ }
55268
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
55269
+ identityMatch = true;
55270
+ }
55271
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
55272
+ const actorEmail = this.actor.slack.email.toLowerCase();
55273
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
55274
+ identityMatch = true;
55275
+ }
55276
+ }
55277
+ if (!identityMatch) continue;
55278
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
55279
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
55280
+ continue;
55281
+ }
55282
+ }
55283
+ matched.push(roleName);
55284
+ }
55285
+ return matched;
55286
+ }
55287
+ buildActor() {
55288
+ return {
55289
+ authorAssociation: this.actor.authorAssociation,
55290
+ login: this.actor.login,
55291
+ roles: this.resolveRoles(),
55292
+ isLocalMode: this.actor.isLocalMode,
55293
+ ...this.actor.slack && { slack: this.actor.slack }
55294
+ };
55295
+ }
55296
+ forCheckExecution(check) {
55297
+ return {
55298
+ scope: "check.execute",
55299
+ check: {
55300
+ id: check.id,
55301
+ type: check.type,
55302
+ group: check.group,
55303
+ tags: check.tags,
55304
+ criticality: check.criticality,
55305
+ sandbox: check.sandbox,
55306
+ policy: check.policy
55307
+ },
55308
+ actor: this.buildActor(),
55309
+ repository: this.repository,
55310
+ pullRequest: this.pullRequest
55311
+ };
55312
+ }
55313
+ forToolInvocation(serverName, methodName, transport) {
55314
+ return {
55315
+ scope: "tool.invoke",
55316
+ tool: { serverName, methodName, transport },
55317
+ actor: this.buildActor(),
55318
+ repository: this.repository,
55319
+ pullRequest: this.pullRequest
55320
+ };
55321
+ }
55322
+ forCapabilityResolve(checkId, capabilities) {
55323
+ return {
55324
+ scope: "capability.resolve",
55325
+ check: { id: checkId, type: "ai" },
55326
+ capability: capabilities,
55327
+ actor: this.buildActor(),
55328
+ repository: this.repository,
55329
+ pullRequest: this.pullRequest
55330
+ };
55331
+ }
55332
+ };
55333
+ }
55334
+ });
55335
+
55336
+ // src/enterprise/policy/opa-policy-engine.ts
55337
+ var opa_policy_engine_exports = {};
55338
+ __export(opa_policy_engine_exports, {
55339
+ OpaPolicyEngine: () => OpaPolicyEngine
55340
+ });
55341
+ var OpaPolicyEngine;
55342
+ var init_opa_policy_engine = __esm({
55343
+ "src/enterprise/policy/opa-policy-engine.ts"() {
55344
+ "use strict";
55345
+ init_opa_wasm_evaluator();
55346
+ init_opa_http_evaluator();
55347
+ init_policy_input_builder();
55348
+ OpaPolicyEngine = class {
55349
+ evaluator = null;
55350
+ fallback;
55351
+ timeout;
55352
+ config;
55353
+ inputBuilder = null;
55354
+ logger = null;
55355
+ constructor(config) {
55356
+ this.config = config;
55357
+ this.fallback = config.fallback || "deny";
55358
+ this.timeout = config.timeout || 5e3;
55359
+ }
55360
+ async initialize(config) {
55361
+ try {
55362
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
55363
+ } catch {
55364
+ }
55365
+ const actor = {
55366
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
55367
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
55368
+ isLocalMode: !process.env.GITHUB_ACTIONS
55369
+ };
55370
+ const repo = {
55371
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
55372
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
55373
+ branch: process.env.GITHUB_HEAD_REF,
55374
+ baseBranch: process.env.GITHUB_BASE_REF,
55375
+ event: process.env.GITHUB_EVENT_NAME
55376
+ };
55377
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
55378
+ const pullRequest = {
55379
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
55380
+ };
55381
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
55382
+ if (config.engine === "local") {
55383
+ if (!config.rules) {
55384
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
55385
+ }
55386
+ const wasm = new OpaWasmEvaluator();
55387
+ await wasm.initialize(config.rules);
55388
+ if (config.data) {
55389
+ wasm.loadData(config.data);
55390
+ }
55391
+ this.evaluator = wasm;
55392
+ } else if (config.engine === "remote") {
55393
+ if (!config.url) {
55394
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
55395
+ }
55396
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
55397
+ } else {
55398
+ this.evaluator = null;
55399
+ }
55400
+ }
55401
+ /**
55402
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
55403
+ * Called by the enterprise loader when engine context is enriched.
55404
+ */
55405
+ setActorContext(actor, repo, pullRequest) {
55406
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
55407
+ }
55408
+ async evaluateCheckExecution(checkId, checkConfig) {
55409
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55410
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
55411
+ const policyOverride = cfg.policy;
55412
+ const input = this.inputBuilder.forCheckExecution({
55413
+ id: checkId,
55414
+ type: cfg.type || "ai",
55415
+ group: cfg.group,
55416
+ tags: cfg.tags,
55417
+ criticality: cfg.criticality,
55418
+ sandbox: cfg.sandbox,
55419
+ policy: policyOverride
55420
+ });
55421
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
55422
+ }
55423
+ async evaluateToolInvocation(serverName, methodName, transport) {
55424
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55425
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
55426
+ return this.doEvaluate(input, "visor/tool/invoke");
55427
+ }
55428
+ async evaluateCapabilities(checkId, capabilities) {
55429
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55430
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
55431
+ return this.doEvaluate(input, "visor/capability/resolve");
55432
+ }
55433
+ async shutdown() {
55434
+ if (this.evaluator && "shutdown" in this.evaluator) {
55435
+ await this.evaluator.shutdown();
55436
+ }
55437
+ this.evaluator = null;
55438
+ this.inputBuilder = null;
55439
+ }
55440
+ resolveRulePath(defaultScope, override) {
55441
+ if (override) {
55442
+ return override.startsWith("visor/") ? override : `visor/${override}`;
55443
+ }
55444
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
55445
+ }
55446
+ async doEvaluate(input, rulePath) {
55447
+ try {
55448
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
55449
+ let timer;
55450
+ const timeoutPromise = new Promise((_resolve, reject) => {
55451
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
55452
+ });
55453
+ try {
55454
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
55455
+ const decision = this.parseDecision(result);
55456
+ if (!decision.allowed && this.fallback === "warn") {
55457
+ decision.allowed = true;
55458
+ decision.warn = true;
55459
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
55460
+ }
55461
+ this.logger?.debug(
55462
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
55463
+ );
55464
+ return decision;
55465
+ } finally {
55466
+ if (timer) clearTimeout(timer);
55467
+ }
55468
+ } catch (err) {
55469
+ const msg = err instanceof Error ? err.message : String(err);
55470
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
55471
+ return {
55472
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55473
+ warn: this.fallback === "warn" ? true : void 0,
55474
+ reason: `policy evaluation failed, fallback=${this.fallback}`
55475
+ };
55476
+ }
55477
+ }
55478
+ async rawEvaluate(input, rulePath) {
55479
+ if (this.evaluator instanceof OpaWasmEvaluator) {
55480
+ const result = await this.evaluator.evaluate(input);
55481
+ return this.navigateWasmResult(result, rulePath);
55482
+ }
55483
+ return this.evaluator.evaluate(input, rulePath);
55484
+ }
55485
+ /**
55486
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
55487
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
55488
+ * so we strip the `visor/` prefix and walk the remaining segments.
55489
+ */
55490
+ navigateWasmResult(result, rulePath) {
55491
+ if (!result || typeof result !== "object") return result;
55492
+ const segments = rulePath.replace(/^visor\//, "").split("/");
55493
+ let current = result;
55494
+ for (const seg of segments) {
55495
+ if (current && typeof current === "object" && seg in current) {
55496
+ current = current[seg];
55497
+ } else {
55498
+ return void 0;
55499
+ }
55500
+ }
55501
+ return current;
55502
+ }
55503
+ parseDecision(result) {
55504
+ if (result === void 0 || result === null) {
55505
+ return {
55506
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55507
+ warn: this.fallback === "warn" ? true : void 0,
55508
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
55509
+ };
55510
+ }
55511
+ const allowed = result.allowed !== false;
55512
+ const decision = {
55513
+ allowed,
55514
+ reason: result.reason
55515
+ };
55516
+ if (result.capabilities) {
55517
+ decision.capabilities = result.capabilities;
55518
+ }
55519
+ return decision;
55520
+ }
55521
+ };
55522
+ }
55523
+ });
55524
+
55525
+ // src/enterprise/scheduler/knex-store.ts
55526
+ var knex_store_exports = {};
55527
+ __export(knex_store_exports, {
55528
+ KnexStoreBackend: () => KnexStoreBackend
55529
+ });
55530
+ function toNum(val) {
55531
+ if (val === null || val === void 0) return void 0;
55532
+ return typeof val === "string" ? parseInt(val, 10) : val;
55533
+ }
55534
+ function safeJsonParse2(value) {
55535
+ if (!value) return void 0;
55536
+ try {
55537
+ return JSON.parse(value);
55538
+ } catch {
55539
+ return void 0;
55540
+ }
55541
+ }
55542
+ function fromTriggerRow2(row) {
55543
+ return {
55544
+ id: row.id,
55545
+ creatorId: row.creator_id,
55546
+ creatorContext: row.creator_context ?? void 0,
55547
+ creatorName: row.creator_name ?? void 0,
55548
+ description: row.description ?? void 0,
55549
+ channels: safeJsonParse2(row.channels),
55550
+ fromUsers: safeJsonParse2(row.from_users),
55551
+ fromBots: row.from_bots === true || row.from_bots === 1,
55552
+ contains: safeJsonParse2(row.contains),
55553
+ matchPattern: row.match_pattern ?? void 0,
55554
+ threads: row.threads,
55555
+ workflow: row.workflow,
55556
+ inputs: safeJsonParse2(row.inputs),
55557
+ outputContext: safeJsonParse2(row.output_context),
55558
+ status: row.status,
55559
+ enabled: row.enabled === true || row.enabled === 1,
55560
+ createdAt: toNum(row.created_at)
55561
+ };
55562
+ }
55563
+ function toTriggerInsertRow(trigger) {
55564
+ return {
55565
+ id: trigger.id,
55566
+ creator_id: trigger.creatorId,
55567
+ creator_context: trigger.creatorContext ?? null,
55568
+ creator_name: trigger.creatorName ?? null,
55569
+ description: trigger.description ?? null,
55570
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
55571
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
55572
+ from_bots: trigger.fromBots,
55573
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
55574
+ match_pattern: trigger.matchPattern ?? null,
55575
+ threads: trigger.threads,
55576
+ workflow: trigger.workflow,
55577
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
55578
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
55579
+ status: trigger.status,
55580
+ enabled: trigger.enabled,
55581
+ created_at: trigger.createdAt
55582
+ };
55583
+ }
55584
+ function fromDbRow2(row) {
55585
+ return {
55586
+ id: row.id,
55587
+ creatorId: row.creator_id,
55588
+ creatorContext: row.creator_context ?? void 0,
55589
+ creatorName: row.creator_name ?? void 0,
55590
+ timezone: row.timezone,
55591
+ schedule: row.schedule_expr,
55592
+ runAt: toNum(row.run_at),
55593
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
55594
+ originalExpression: row.original_expression,
55595
+ workflow: row.workflow ?? void 0,
55596
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
55597
+ outputContext: safeJsonParse2(row.output_context),
55598
+ status: row.status,
55599
+ createdAt: toNum(row.created_at),
55600
+ lastRunAt: toNum(row.last_run_at),
55601
+ nextRunAt: toNum(row.next_run_at),
55602
+ runCount: row.run_count,
55603
+ failureCount: row.failure_count,
55604
+ lastError: row.last_error ?? void 0,
55605
+ previousResponse: row.previous_response ?? void 0
55606
+ };
55607
+ }
55608
+ function toInsertRow(schedule) {
55609
+ return {
55610
+ id: schedule.id,
55611
+ creator_id: schedule.creatorId,
55612
+ creator_context: schedule.creatorContext ?? null,
55613
+ creator_name: schedule.creatorName ?? null,
55614
+ timezone: schedule.timezone,
55615
+ schedule_expr: schedule.schedule,
55616
+ run_at: schedule.runAt ?? null,
55617
+ is_recurring: schedule.isRecurring,
55618
+ original_expression: schedule.originalExpression,
55619
+ workflow: schedule.workflow ?? null,
55620
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
55621
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
55622
+ status: schedule.status,
55623
+ created_at: schedule.createdAt,
55624
+ last_run_at: schedule.lastRunAt ?? null,
55625
+ next_run_at: schedule.nextRunAt ?? null,
55626
+ run_count: schedule.runCount,
55627
+ failure_count: schedule.failureCount,
55628
+ last_error: schedule.lastError ?? null,
55629
+ previous_response: schedule.previousResponse ?? null
55630
+ };
55631
+ }
55632
+ var fs24, path28, import_uuid2, KnexStoreBackend;
55633
+ var init_knex_store = __esm({
55634
+ "src/enterprise/scheduler/knex-store.ts"() {
55635
+ "use strict";
55636
+ fs24 = __toESM(require("fs"));
55637
+ path28 = __toESM(require("path"));
55638
+ import_uuid2 = require("uuid");
55639
+ init_logger();
55640
+ KnexStoreBackend = class {
55641
+ knex = null;
55642
+ driver;
55643
+ connection;
55644
+ constructor(driver, storageConfig, _haConfig) {
55645
+ this.driver = driver;
55646
+ this.connection = storageConfig.connection || {};
55647
+ }
55648
+ async initialize() {
55649
+ const { createRequire } = require("module");
55650
+ const runtimeRequire = createRequire(__filename);
55651
+ let knexFactory;
55652
+ try {
55653
+ knexFactory = runtimeRequire("knex");
55654
+ } catch (err) {
55655
+ const code = err?.code;
55656
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
55657
+ throw new Error(
55658
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
55659
+ );
55660
+ }
55661
+ throw err;
55662
+ }
55663
+ const clientMap = {
55664
+ postgresql: "pg",
55665
+ mysql: "mysql2",
55666
+ mssql: "tedious"
55667
+ };
55668
+ const client = clientMap[this.driver];
55669
+ let connection;
55670
+ if (this.connection.connection_string) {
55671
+ connection = this.connection.connection_string;
55672
+ } else if (this.driver === "mssql") {
55673
+ connection = this.buildMssqlConnection();
55674
+ } else {
55675
+ connection = this.buildStandardConnection();
55676
+ }
55677
+ this.knex = knexFactory({
55678
+ client,
55679
+ connection,
55680
+ pool: {
55681
+ min: this.connection.pool?.min ?? 0,
55682
+ max: this.connection.pool?.max ?? 10
55683
+ }
55684
+ });
55685
+ await this.migrateSchema();
55686
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
55687
+ }
55688
+ buildStandardConnection() {
55689
+ return {
55690
+ host: this.connection.host || "localhost",
55691
+ port: this.connection.port,
55692
+ database: this.connection.database || "visor",
55693
+ user: this.connection.user,
55694
+ password: this.connection.password,
55695
+ ssl: this.resolveSslConfig()
55696
+ };
55697
+ }
55698
+ buildMssqlConnection() {
55699
+ const ssl = this.connection.ssl;
55700
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
55701
+ return {
55702
+ server: this.connection.host || "localhost",
55703
+ port: this.connection.port,
55704
+ database: this.connection.database || "visor",
55705
+ user: this.connection.user,
55706
+ password: this.connection.password,
55707
+ options: {
55708
+ encrypt: sslEnabled,
55709
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
55710
+ }
55711
+ };
55712
+ }
55713
+ resolveSslConfig() {
55714
+ const ssl = this.connection.ssl;
55715
+ if (ssl === false || ssl === void 0) return false;
55716
+ if (ssl === true) return { rejectUnauthorized: true };
55717
+ if (ssl.enabled === false) return false;
55718
+ const result = {
55719
+ rejectUnauthorized: ssl.reject_unauthorized !== false
55720
+ };
55721
+ if (ssl.ca) {
55722
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
55723
+ result.ca = fs24.readFileSync(caPath, "utf8");
55724
+ }
55725
+ if (ssl.cert) {
55726
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
55727
+ result.cert = fs24.readFileSync(certPath, "utf8");
55728
+ }
55729
+ if (ssl.key) {
55730
+ const keyPath = this.validateSslPath(ssl.key, "client key");
55731
+ result.key = fs24.readFileSync(keyPath, "utf8");
55732
+ }
55733
+ return result;
55734
+ }
55735
+ validateSslPath(filePath, label) {
55736
+ const resolved = path28.resolve(filePath);
55737
+ if (resolved !== path28.normalize(resolved)) {
55738
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
55739
+ }
55740
+ if (!fs24.existsSync(resolved)) {
55741
+ throw new Error(`SSL ${label} not found: ${filePath}`);
55742
+ }
55743
+ return resolved;
55744
+ }
55745
+ async shutdown() {
55746
+ if (this.knex) {
55747
+ await this.knex.destroy();
55748
+ this.knex = null;
55749
+ }
55750
+ }
55751
+ async migrateSchema() {
55752
+ const knex = this.getKnex();
55753
+ const exists = await knex.schema.hasTable("schedules");
55754
+ if (!exists) {
55755
+ await knex.schema.createTable("schedules", (table) => {
55756
+ table.string("id", 36).primary();
55757
+ table.string("creator_id", 255).notNullable().index();
55758
+ table.string("creator_context", 255);
55759
+ table.string("creator_name", 255);
55760
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
55761
+ table.string("schedule_expr", 255);
55762
+ table.bigInteger("run_at");
55763
+ table.boolean("is_recurring").notNullable();
55764
+ table.text("original_expression");
55765
+ table.string("workflow", 255);
55766
+ table.text("workflow_inputs");
55767
+ table.text("output_context");
55768
+ table.string("status", 20).notNullable().index();
55769
+ table.bigInteger("created_at").notNullable();
55770
+ table.bigInteger("last_run_at");
55771
+ table.bigInteger("next_run_at");
55772
+ table.integer("run_count").notNullable().defaultTo(0);
55773
+ table.integer("failure_count").notNullable().defaultTo(0);
55774
+ table.text("last_error");
55775
+ table.text("previous_response");
55776
+ table.index(["status", "next_run_at"]);
55777
+ });
55778
+ }
55779
+ const triggersExist = await knex.schema.hasTable("message_triggers");
55780
+ if (!triggersExist) {
55781
+ await knex.schema.createTable("message_triggers", (table) => {
55782
+ table.string("id", 36).primary();
55783
+ table.string("creator_id", 255).notNullable().index();
55784
+ table.string("creator_context", 255);
55785
+ table.string("creator_name", 255);
55786
+ table.text("description");
55787
+ table.text("channels");
55788
+ table.text("from_users");
55789
+ table.boolean("from_bots").notNullable().defaultTo(false);
55790
+ table.text("contains");
55791
+ table.text("match_pattern");
55792
+ table.string("threads", 20).notNullable().defaultTo("any");
55793
+ table.string("workflow", 255).notNullable();
55794
+ table.text("inputs");
55795
+ table.text("output_context");
55796
+ table.string("status", 20).notNullable().defaultTo("active").index();
55797
+ table.boolean("enabled").notNullable().defaultTo(true);
55798
+ table.bigInteger("created_at").notNullable();
55799
+ });
55800
+ }
55801
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
55802
+ if (!locksExist) {
55803
+ await knex.schema.createTable("scheduler_locks", (table) => {
55804
+ table.string("lock_id", 255).primary();
55805
+ table.string("node_id", 255).notNullable();
55806
+ table.string("lock_token", 36).notNullable();
55807
+ table.bigInteger("acquired_at").notNullable();
55808
+ table.bigInteger("expires_at").notNullable();
55809
+ });
55810
+ }
55811
+ }
55812
+ getKnex() {
55813
+ if (!this.knex) {
55814
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
55815
+ }
55816
+ return this.knex;
55817
+ }
55818
+ // --- CRUD ---
55819
+ async create(schedule) {
55820
+ const knex = this.getKnex();
55821
+ const newSchedule = {
55822
+ ...schedule,
55823
+ id: (0, import_uuid2.v4)(),
55824
+ createdAt: Date.now(),
55825
+ runCount: 0,
55826
+ failureCount: 0,
55827
+ status: "active"
55828
+ };
55829
+ await knex("schedules").insert(toInsertRow(newSchedule));
55830
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
55831
+ return newSchedule;
55832
+ }
55833
+ async importSchedule(schedule) {
55834
+ const knex = this.getKnex();
55835
+ const existing = await knex("schedules").where("id", schedule.id).first();
55836
+ if (existing) return;
55837
+ await knex("schedules").insert(toInsertRow(schedule));
55838
+ }
55839
+ async get(id) {
55840
+ const knex = this.getKnex();
55841
+ const row = await knex("schedules").where("id", id).first();
55842
+ return row ? fromDbRow2(row) : void 0;
55843
+ }
55844
+ async update(id, patch) {
55845
+ const knex = this.getKnex();
55846
+ const existing = await knex("schedules").where("id", id).first();
55847
+ if (!existing) return void 0;
55848
+ const current = fromDbRow2(existing);
55849
+ const updated = { ...current, ...patch, id: current.id };
55850
+ const row = toInsertRow(updated);
55851
+ delete row.id;
55852
+ await knex("schedules").where("id", id).update(row);
55853
+ return updated;
55854
+ }
55855
+ async delete(id) {
55856
+ const knex = this.getKnex();
55857
+ const deleted = await knex("schedules").where("id", id).del();
55858
+ if (deleted > 0) {
55859
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
55860
+ return true;
55861
+ }
55862
+ return false;
55863
+ }
55864
+ // --- Queries ---
55865
+ async getByCreator(creatorId) {
55866
+ const knex = this.getKnex();
55867
+ const rows = await knex("schedules").where("creator_id", creatorId);
55868
+ return rows.map((r) => fromDbRow2(r));
55869
+ }
55870
+ async getActiveSchedules() {
55871
+ const knex = this.getKnex();
55872
+ const rows = await knex("schedules").where("status", "active");
55873
+ return rows.map((r) => fromDbRow2(r));
55874
+ }
55875
+ async getDueSchedules(now) {
55876
+ const ts = now ?? Date.now();
55877
+ const knex = this.getKnex();
55878
+ const bFalse = this.driver === "mssql" ? 0 : false;
55879
+ const bTrue = this.driver === "mssql" ? 1 : true;
55880
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
55881
+ this.where(function() {
55882
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
55883
+ }).orWhere(function() {
55884
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
55885
+ });
55886
+ });
55887
+ return rows.map((r) => fromDbRow2(r));
55888
+ }
55889
+ async findByWorkflow(creatorId, workflowName) {
55890
+ const knex = this.getKnex();
55891
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
55892
+ const pattern = `%${escaped}%`;
55893
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
55894
+ return rows.map((r) => fromDbRow2(r));
55895
+ }
55896
+ async getAll() {
55897
+ const knex = this.getKnex();
55898
+ const rows = await knex("schedules");
55899
+ return rows.map((r) => fromDbRow2(r));
55900
+ }
55901
+ async getStats() {
55902
+ const knex = this.getKnex();
55903
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
55904
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
55905
+ const result = await knex("schedules").select(
55906
+ knex.raw("COUNT(*) as total"),
55907
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
55908
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
55909
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
55910
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
55911
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
55912
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
55913
+ ).first();
55914
+ return {
55915
+ total: Number(result.total) || 0,
55916
+ active: Number(result.active) || 0,
55917
+ paused: Number(result.paused) || 0,
55918
+ completed: Number(result.completed) || 0,
55919
+ failed: Number(result.failed) || 0,
55920
+ recurring: Number(result.recurring) || 0,
55921
+ oneTime: Number(result.one_time) || 0
55922
+ };
55923
+ }
55924
+ async validateLimits(creatorId, isRecurring, limits) {
55925
+ const knex = this.getKnex();
55926
+ if (limits.maxGlobal) {
55927
+ const result = await knex("schedules").count("* as cnt").first();
55928
+ if (Number(result?.cnt) >= limits.maxGlobal) {
55929
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
55930
+ }
55931
+ }
55932
+ if (limits.maxPerUser) {
55933
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
55934
+ if (Number(result?.cnt) >= limits.maxPerUser) {
55935
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
55936
+ }
55937
+ }
55938
+ if (isRecurring && limits.maxRecurringPerUser) {
55939
+ const bTrue = this.driver === "mssql" ? 1 : true;
55940
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
55941
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
55942
+ throw new Error(
55943
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
55944
+ );
55945
+ }
55946
+ }
55947
+ }
55948
+ // --- HA Distributed Locking (via scheduler_locks table) ---
55949
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
55950
+ const knex = this.getKnex();
55951
+ const now = Date.now();
55952
+ const expiresAt = now + ttlSeconds * 1e3;
55953
+ const token = (0, import_uuid2.v4)();
55954
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
55955
+ node_id: nodeId,
55956
+ lock_token: token,
55957
+ acquired_at: now,
55958
+ expires_at: expiresAt
55959
+ });
55960
+ if (updated > 0) return token;
55961
+ try {
55962
+ await knex("scheduler_locks").insert({
55963
+ lock_id: lockId,
55964
+ node_id: nodeId,
55965
+ lock_token: token,
55966
+ acquired_at: now,
55967
+ expires_at: expiresAt
55968
+ });
55969
+ return token;
55970
+ } catch {
55971
+ return null;
55972
+ }
55973
+ }
55974
+ async releaseLock(lockId, lockToken) {
55975
+ const knex = this.getKnex();
55976
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
55977
+ }
55978
+ async renewLock(lockId, lockToken, ttlSeconds) {
55979
+ const knex = this.getKnex();
55980
+ const now = Date.now();
55981
+ const expiresAt = now + ttlSeconds * 1e3;
55982
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
55983
+ return updated > 0;
55984
+ }
55985
+ async flush() {
55986
+ }
55987
+ // --- Message Trigger CRUD ---
55988
+ async createTrigger(trigger) {
55989
+ const knex = this.getKnex();
55990
+ const newTrigger = {
55991
+ ...trigger,
55992
+ id: (0, import_uuid2.v4)(),
55993
+ createdAt: Date.now()
55994
+ };
55995
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
55996
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
55997
+ return newTrigger;
55998
+ }
55999
+ async getTrigger(id) {
56000
+ const knex = this.getKnex();
56001
+ const row = await knex("message_triggers").where("id", id).first();
56002
+ return row ? fromTriggerRow2(row) : void 0;
56003
+ }
56004
+ async updateTrigger(id, patch) {
56005
+ const knex = this.getKnex();
56006
+ const existing = await knex("message_triggers").where("id", id).first();
56007
+ if (!existing) return void 0;
56008
+ const current = fromTriggerRow2(existing);
56009
+ const updated = {
56010
+ ...current,
56011
+ ...patch,
56012
+ id: current.id,
56013
+ createdAt: current.createdAt
56014
+ };
56015
+ const row = toTriggerInsertRow(updated);
56016
+ delete row.id;
56017
+ await knex("message_triggers").where("id", id).update(row);
56018
+ return updated;
56019
+ }
56020
+ async deleteTrigger(id) {
56021
+ const knex = this.getKnex();
56022
+ const deleted = await knex("message_triggers").where("id", id).del();
56023
+ if (deleted > 0) {
56024
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
56025
+ return true;
56026
+ }
56027
+ return false;
56028
+ }
56029
+ async getTriggersByCreator(creatorId) {
56030
+ const knex = this.getKnex();
56031
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
56032
+ return rows.map((r) => fromTriggerRow2(r));
56033
+ }
56034
+ async getActiveTriggers() {
56035
+ const knex = this.getKnex();
56036
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
56037
+ return rows.map((r) => fromTriggerRow2(r));
56038
+ }
56039
+ };
56040
+ }
56041
+ });
56042
+
56043
+ // src/enterprise/loader.ts
56044
+ var loader_exports = {};
56045
+ __export(loader_exports, {
56046
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
56047
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
56048
+ });
56049
+ async function loadEnterprisePolicyEngine(config) {
56050
+ try {
56051
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56052
+ const validator = new LicenseValidator2();
56053
+ const license = await validator.loadAndValidate();
56054
+ if (!license || !validator.hasFeature("policy")) {
56055
+ return new DefaultPolicyEngine();
56056
+ }
56057
+ if (validator.isInGracePeriod()) {
56058
+ console.warn(
56059
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56060
+ );
56061
+ }
56062
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
56063
+ const engine = new OpaPolicyEngine2(config);
56064
+ await engine.initialize(config);
56065
+ return engine;
56066
+ } catch (err) {
56067
+ const msg = err instanceof Error ? err.message : String(err);
56068
+ try {
56069
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
56070
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
56071
+ } catch {
56072
+ }
56073
+ return new DefaultPolicyEngine();
56074
+ }
56075
+ }
56076
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
56077
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56078
+ const validator = new LicenseValidator2();
56079
+ const license = await validator.loadAndValidate();
56080
+ if (!license || !validator.hasFeature("scheduler-sql")) {
56081
+ throw new Error(
56082
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
56083
+ );
56084
+ }
56085
+ if (validator.isInGracePeriod()) {
56086
+ console.warn(
56087
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56088
+ );
56089
+ }
56090
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
56091
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
56092
+ }
56093
+ var init_loader = __esm({
56094
+ "src/enterprise/loader.ts"() {
56095
+ "use strict";
56096
+ init_default_engine();
56097
+ }
56098
+ });
56099
+
54726
56100
  // src/event-bus/event-bus.ts
54727
56101
  var event_bus_exports = {};
54728
56102
  __export(event_bus_exports, {
@@ -55629,8 +57003,8 @@ ${content}
55629
57003
  * Sleep utility
55630
57004
  */
55631
57005
  sleep(ms) {
55632
- return new Promise((resolve15) => {
55633
- const t = setTimeout(resolve15, ms);
57006
+ return new Promise((resolve19) => {
57007
+ const t = setTimeout(resolve19, ms);
55634
57008
  if (typeof t.unref === "function") {
55635
57009
  try {
55636
57010
  t.unref();
@@ -55915,8 +57289,8 @@ ${end}`);
55915
57289
  async updateGroupedComment(ctx, comments, group, changedIds) {
55916
57290
  const existingLock = this.updateLocks.get(group);
55917
57291
  let resolveLock;
55918
- const ourLock = new Promise((resolve15) => {
55919
- resolveLock = resolve15;
57292
+ const ourLock = new Promise((resolve19) => {
57293
+ resolveLock = resolve19;
55920
57294
  });
55921
57295
  this.updateLocks.set(group, ourLock);
55922
57296
  try {
@@ -56229,7 +57603,7 @@ ${blocks}
56229
57603
  * Sleep utility for enforcing delays
56230
57604
  */
56231
57605
  sleep(ms) {
56232
- return new Promise((resolve15) => setTimeout(resolve15, ms));
57606
+ return new Promise((resolve19) => setTimeout(resolve19, ms));
56233
57607
  }
56234
57608
  };
56235
57609
  }
@@ -57521,15 +58895,15 @@ function serializeRunState(state) {
57521
58895
  ])
57522
58896
  };
57523
58897
  }
57524
- var path26, fs22, StateMachineExecutionEngine;
58898
+ var path30, fs26, StateMachineExecutionEngine;
57525
58899
  var init_state_machine_execution_engine = __esm({
57526
58900
  "src/state-machine-execution-engine.ts"() {
57527
58901
  "use strict";
57528
58902
  init_runner();
57529
58903
  init_logger();
57530
58904
  init_sandbox_manager();
57531
- path26 = __toESM(require("path"));
57532
- fs22 = __toESM(require("fs"));
58905
+ path30 = __toESM(require("path"));
58906
+ fs26 = __toESM(require("fs"));
57533
58907
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
57534
58908
  workingDirectory;
57535
58909
  executionContext;
@@ -57761,8 +59135,8 @@ var init_state_machine_execution_engine = __esm({
57761
59135
  logger.debug(
57762
59136
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
57763
59137
  );
57764
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
57765
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
59138
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
59139
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
57766
59140
  logger.debug(
57767
59141
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
57768
59142
  );
@@ -57914,9 +59288,9 @@ var init_state_machine_execution_engine = __esm({
57914
59288
  }
57915
59289
  const checkId = String(ev?.checkId || "unknown");
57916
59290
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
57917
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
57918
- fs22.mkdirSync(baseDir, { recursive: true });
57919
- const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
59291
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
59292
+ fs26.mkdirSync(baseDir, { recursive: true });
59293
+ const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
57920
59294
  await this.saveSnapshotToFile(filePath);
57921
59295
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
57922
59296
  try {
@@ -58057,7 +59431,7 @@ var init_state_machine_execution_engine = __esm({
58057
59431
  * Does not include secrets. Intended for debugging and future resume support.
58058
59432
  */
58059
59433
  async saveSnapshotToFile(filePath) {
58060
- const fs23 = await import("fs/promises");
59434
+ const fs27 = await import("fs/promises");
58061
59435
  const ctx = this._lastContext;
58062
59436
  const runner = this._lastRunner;
58063
59437
  if (!ctx || !runner) {
@@ -58077,14 +59451,14 @@ var init_state_machine_execution_engine = __esm({
58077
59451
  journal: entries,
58078
59452
  requestedChecks: ctx.requestedChecks || []
58079
59453
  };
58080
- await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
59454
+ await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
58081
59455
  }
58082
59456
  /**
58083
59457
  * Load a snapshot JSON from file and return it. Resume support can build on this.
58084
59458
  */
58085
59459
  async loadSnapshotFromFile(filePath) {
58086
- const fs23 = await import("fs/promises");
58087
- const raw = await fs23.readFile(filePath, "utf8");
59460
+ const fs27 = await import("fs/promises");
59461
+ const raw = await fs27.readFile(filePath, "utf8");
58088
59462
  return JSON.parse(raw);
58089
59463
  }
58090
59464
  /**