@probelabs/visor 0.1.159 → 0.1.160-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/defaults/assistant.yaml +143 -0
  2. package/defaults/skills/code-explorer.yaml +41 -0
  3. package/dist/defaults/assistant.yaml +143 -0
  4. package/dist/defaults/skills/code-explorer.yaml +41 -0
  5. package/dist/index.js +2012 -45
  6. package/dist/providers/workflow-check-provider.d.ts.map +1 -1
  7. package/dist/sdk/{check-provider-registry-UVLGKORU.mjs → check-provider-registry-EFBU3XPX.mjs} +5 -5
  8. package/dist/sdk/{chunk-5N33URMM.mjs → chunk-2LL6GIEU.mjs} +3 -3
  9. package/dist/sdk/{chunk-PZONH3RK.mjs → chunk-CYIKOFJZ.mjs} +3 -3
  10. package/dist/sdk/chunk-CYIKOFJZ.mjs.map +1 -0
  11. package/dist/sdk/{chunk-AW3IHBCI.mjs → chunk-FHWFG5AS.mjs} +112 -23
  12. package/dist/sdk/{chunk-AW3IHBCI.mjs.map → chunk-FHWFG5AS.mjs.map} +1 -1
  13. package/dist/sdk/{chunk-PSZI5NZL.mjs → chunk-HT3EFZ75.mjs} +2 -2
  14. package/dist/sdk/{failure-condition-evaluator-LQ5NUFZN.mjs → failure-condition-evaluator-3Q45T5EW.mjs} +3 -3
  15. package/dist/sdk/{github-frontend-YSRVB5FS.mjs → github-frontend-SZBGCH43.mjs} +3 -3
  16. package/dist/sdk/{host-YG6LPL6H.mjs → host-Z6OFA24O.mjs} +2 -2
  17. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  18. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  19. package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
  20. package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
  21. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  22. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  23. package/dist/sdk/{routing-ZJSROSUU.mjs → routing-GGZBWOLZ.mjs} +4 -4
  24. package/dist/sdk/{schedule-tool-2GSWLIWA.mjs → schedule-tool-DWHKTTEY.mjs} +5 -5
  25. package/dist/sdk/{schedule-tool-handler-HWHHAHJD.mjs → schedule-tool-handler-IZPIN2GK.mjs} +5 -5
  26. package/dist/sdk/sdk.js +1730 -267
  27. package/dist/sdk/sdk.js.map +1 -1
  28. package/dist/sdk/sdk.mjs +4 -4
  29. package/dist/sdk/{trace-helpers-23KC3NGK.mjs → trace-helpers-AXJZLLWE.mjs} +2 -2
  30. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  31. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  32. package/dist/sdk/{workflow-check-provider-DHNWJKYK.mjs → workflow-check-provider-XGW3WVPS.mjs} +5 -5
  33. package/package.json +2 -2
  34. package/dist/output/traces/run-2026-03-05T15-45-37-714Z.ndjson +0 -138
  35. package/dist/output/traces/run-2026-03-05T15-46-23-118Z.ndjson +0 -2197
  36. package/dist/sdk/check-provider-registry-74VLAETF.mjs +0 -29
  37. package/dist/sdk/chunk-IVTSD7ZW.mjs +0 -43780
  38. package/dist/sdk/chunk-IVTSD7ZW.mjs.map +0 -1
  39. package/dist/sdk/chunk-PZONH3RK.mjs.map +0 -1
  40. package/dist/sdk/schedule-tool-QCJWLYS3.mjs +0 -35
  41. package/dist/sdk/schedule-tool-handler-K2LO33KU.mjs +0 -39
  42. package/dist/sdk/schedule-tool-handler-K2LO33KU.mjs.map +0 -1
  43. package/dist/sdk/trace-helpers-23KC3NGK.mjs.map +0 -1
  44. package/dist/sdk/workflow-check-provider-DHNWJKYK.mjs.map +0 -1
  45. package/dist/sdk/workflow-check-provider-OUREVWAI.mjs +0 -29
  46. package/dist/sdk/workflow-check-provider-OUREVWAI.mjs.map +0 -1
  47. package/dist/traces/run-2026-03-05T15-45-37-714Z.ndjson +0 -138
  48. package/dist/traces/run-2026-03-05T15-46-23-118Z.ndjson +0 -2197
  49. /package/dist/sdk/{check-provider-registry-74VLAETF.mjs.map → check-provider-registry-EFBU3XPX.mjs.map} +0 -0
  50. /package/dist/sdk/{chunk-5N33URMM.mjs.map → chunk-2LL6GIEU.mjs.map} +0 -0
  51. /package/dist/sdk/{chunk-PSZI5NZL.mjs.map → chunk-HT3EFZ75.mjs.map} +0 -0
  52. /package/dist/sdk/{check-provider-registry-UVLGKORU.mjs.map → failure-condition-evaluator-3Q45T5EW.mjs.map} +0 -0
  53. /package/dist/sdk/{github-frontend-YSRVB5FS.mjs.map → github-frontend-SZBGCH43.mjs.map} +0 -0
  54. /package/dist/sdk/{host-YG6LPL6H.mjs.map → host-Z6OFA24O.mjs.map} +0 -0
  55. /package/dist/sdk/{failure-condition-evaluator-LQ5NUFZN.mjs.map → routing-GGZBWOLZ.mjs.map} +0 -0
  56. /package/dist/sdk/{routing-ZJSROSUU.mjs.map → schedule-tool-DWHKTTEY.mjs.map} +0 -0
  57. /package/dist/sdk/{schedule-tool-2GSWLIWA.mjs.map → schedule-tool-handler-IZPIN2GK.mjs.map} +0 -0
  58. /package/dist/sdk/{schedule-tool-QCJWLYS3.mjs.map → trace-helpers-AXJZLLWE.mjs.map} +0 -0
  59. /package/dist/sdk/{schedule-tool-handler-HWHHAHJD.mjs.map → workflow-check-provider-XGW3WVPS.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -646,7 +646,7 @@ var require_package = __commonJS({
646
646
  "package.json"(exports2, module2) {
647
647
  module2.exports = {
648
648
  name: "@probelabs/visor",
649
- version: "0.1.159",
649
+ version: "0.1.42",
650
650
  main: "dist/index.js",
651
651
  bin: {
652
652
  visor: "./dist/index.js"
@@ -760,7 +760,7 @@ var require_package = __commonJS({
760
760
  "@opentelemetry/sdk-node": "^0.203.0",
761
761
  "@opentelemetry/sdk-trace-base": "^1.30.1",
762
762
  "@opentelemetry/semantic-conventions": "^1.30.1",
763
- "@probelabs/probe": "^0.6.0-rc274",
763
+ "@probelabs/probe": "^0.6.0-rc275",
764
764
  "@types/commander": "^2.12.0",
765
765
  "@types/uuid": "^10.0.0",
766
766
  acorn: "^8.16.0",
@@ -864,11 +864,11 @@ function getTracer() {
864
864
  }
865
865
  async function withActiveSpan(name, attrs, fn) {
866
866
  const tracer = getTracer();
867
- return await new Promise((resolve15, reject) => {
867
+ return await new Promise((resolve19, reject) => {
868
868
  const callback = async (span) => {
869
869
  try {
870
870
  const res = await fn(span);
871
- resolve15(res);
871
+ resolve19(res);
872
872
  } catch (err) {
873
873
  try {
874
874
  if (err instanceof Error) span.recordException(err);
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
945
945
  try {
946
946
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
947
947
  return null;
948
- const path27 = require("path");
949
- const fs23 = require("fs");
948
+ const path31 = require("path");
949
+ const fs27 = require("fs");
950
950
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
951
951
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
952
- const dir = path27.dirname(__ndjsonPath);
953
- if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
952
+ const dir = path31.dirname(__ndjsonPath);
953
+ if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
954
954
  return __ndjsonPath;
955
955
  }
956
- const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
957
- if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
956
+ const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
957
+ if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
958
958
  if (!__ndjsonPath) {
959
959
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
960
- __ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
960
+ __ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
961
961
  }
962
962
  return __ndjsonPath;
963
963
  } catch {
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
966
966
  }
967
967
  function _appendRunMarker() {
968
968
  try {
969
- const fs23 = require("fs");
969
+ const fs27 = require("fs");
970
970
  const p = __getOrCreateNdjsonPath();
971
971
  if (!p) return;
972
972
  const line = { name: "visor.run", attributes: { started: true } };
973
- fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
973
+ fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
974
974
  } catch {
975
975
  }
976
976
  }
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
3193
3193
  */
3194
3194
  evaluateExpression(condition, context2) {
3195
3195
  try {
3196
- const normalize4 = (expr) => {
3196
+ const normalize8 = (expr) => {
3197
3197
  const trimmed = expr.trim();
3198
3198
  if (!/[\n;]/.test(trimmed)) return trimmed;
3199
3199
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
3351
3351
  try {
3352
3352
  exec2 = this.sandbox.compile(`return (${raw});`);
3353
3353
  } catch {
3354
- const normalizedExpr = normalize4(condition);
3354
+ const normalizedExpr = normalize8(condition);
3355
3355
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3356
3356
  }
3357
3357
  const result = exec2(scope).run();
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
3734
3734
  });
3735
3735
  liquid.registerFilter("get", (obj, pathExpr) => {
3736
3736
  if (obj == null) return void 0;
3737
- const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
- if (!path27) return obj;
3739
- const parts = path27.split(".");
3737
+ const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
+ if (!path31) return obj;
3739
+ const parts = path31.split(".");
3740
3740
  let cur = obj;
3741
3741
  for (const p of parts) {
3742
3742
  if (cur == null) return void 0;
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
3855
3855
  }
3856
3856
  }
3857
3857
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
3858
- const getNested = (obj, path27) => {
3859
- if (!obj || !path27) return void 0;
3860
- const parts = path27.split(".");
3858
+ const getNested = (obj, path31) => {
3859
+ if (!obj || !path31) return void 0;
3860
+ const parts = path31.split(".");
3861
3861
  let cur = obj;
3862
3862
  for (const p of parts) {
3863
3863
  if (cur == null) return void 0;
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
6409
6409
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6410
6410
  try {
6411
6411
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6412
- const fs23 = await import("fs/promises");
6413
- const path27 = await import("path");
6412
+ const fs27 = await import("fs/promises");
6413
+ const path31 = await import("path");
6414
6414
  const schemaRaw = checkConfig.schema || "plain";
6415
6415
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6416
6416
  let templateContent;
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6418
6418
  templateContent = String(checkConfig.template.content);
6419
6419
  } else if (checkConfig.template && checkConfig.template.file) {
6420
6420
  const file = String(checkConfig.template.file);
6421
- const resolved = path27.resolve(process.cwd(), file);
6422
- templateContent = await fs23.readFile(resolved, "utf-8");
6421
+ const resolved = path31.resolve(process.cwd(), file);
6422
+ templateContent = await fs27.readFile(resolved, "utf-8");
6423
6423
  } else if (schema && schema !== "plain") {
6424
6424
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6425
6425
  if (sanitized) {
6426
6426
  const candidatePaths = [
6427
- path27.join(__dirname, "output", sanitized, "template.liquid"),
6427
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
6428
6428
  // bundled: dist/output/
6429
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6429
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6430
6430
  // source: output/
6431
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
6431
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
6432
6432
  // fallback: cwd/output/
6433
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6433
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6434
6434
  // fallback: cwd/dist/output/
6435
6435
  ];
6436
6436
  for (const p of candidatePaths) {
6437
6437
  try {
6438
- templateContent = await fs23.readFile(p, "utf-8");
6438
+ templateContent = await fs27.readFile(p, "utf-8");
6439
6439
  if (templateContent) break;
6440
6440
  } catch {
6441
6441
  }
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
6840
6840
  }
6841
6841
  try {
6842
6842
  const originalProbePath = process.env.PROBE_PATH;
6843
- const fs23 = require("fs");
6843
+ const fs27 = require("fs");
6844
6844
  const possiblePaths = [
6845
6845
  // Relative to current working directory (most common in production)
6846
6846
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
6851
6851
  ];
6852
6852
  let probeBinaryPath;
6853
6853
  for (const candidatePath of possiblePaths) {
6854
- if (fs23.existsSync(candidatePath)) {
6854
+ if (fs27.existsSync(candidatePath)) {
6855
6855
  probeBinaryPath = candidatePath;
6856
6856
  break;
6857
6857
  }
@@ -6958,7 +6958,7 @@ async function renderMermaidToPng(mermaidCode) {
6958
6958
  if (chromiumPath) {
6959
6959
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
6960
6960
  }
6961
- const result = await new Promise((resolve15) => {
6961
+ const result = await new Promise((resolve19) => {
6962
6962
  const proc = (0, import_child_process.spawn)(
6963
6963
  "npx",
6964
6964
  [
@@ -6988,13 +6988,13 @@ async function renderMermaidToPng(mermaidCode) {
6988
6988
  });
6989
6989
  proc.on("close", (code) => {
6990
6990
  if (code === 0) {
6991
- resolve15({ success: true });
6991
+ resolve19({ success: true });
6992
6992
  } else {
6993
- resolve15({ success: false, error: stderr || `Exit code ${code}` });
6993
+ resolve19({ success: false, error: stderr || `Exit code ${code}` });
6994
6994
  }
6995
6995
  });
6996
6996
  proc.on("error", (err) => {
6997
- resolve15({ success: false, error: err.message });
6997
+ resolve19({ success: false, error: err.message });
6998
6998
  });
6999
6999
  });
7000
7000
  if (!result.success) {
@@ -8156,8 +8156,8 @@ ${schemaString}`);
8156
8156
  }
8157
8157
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8158
8158
  try {
8159
- const fs23 = require("fs");
8160
- const path27 = require("path");
8159
+ const fs27 = require("fs");
8160
+ const path31 = require("path");
8161
8161
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8162
8162
  const provider = this.config.provider || "auto";
8163
8163
  const model = this.config.model || "default";
@@ -8271,20 +8271,20 @@ ${"=".repeat(60)}
8271
8271
  `;
8272
8272
  readableVersion += `${"=".repeat(60)}
8273
8273
  `;
8274
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8275
- if (!fs23.existsSync(debugArtifactsDir)) {
8276
- fs23.mkdirSync(debugArtifactsDir, { recursive: true });
8274
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8275
+ if (!fs27.existsSync(debugArtifactsDir)) {
8276
+ fs27.mkdirSync(debugArtifactsDir, { recursive: true });
8277
8277
  }
8278
- const debugFile = path27.join(
8278
+ const debugFile = path31.join(
8279
8279
  debugArtifactsDir,
8280
8280
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8281
8281
  );
8282
- fs23.writeFileSync(debugFile, debugJson, "utf-8");
8283
- const readableFile = path27.join(
8282
+ fs27.writeFileSync(debugFile, debugJson, "utf-8");
8283
+ const readableFile = path31.join(
8284
8284
  debugArtifactsDir,
8285
8285
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8286
8286
  );
8287
- fs23.writeFileSync(readableFile, readableVersion, "utf-8");
8287
+ fs27.writeFileSync(readableFile, readableVersion, "utf-8");
8288
8288
  log(`
8289
8289
  \u{1F4BE} Full debug info saved to:`);
8290
8290
  log(` JSON: ${debugFile}`);
@@ -8317,8 +8317,8 @@ ${"=".repeat(60)}
8317
8317
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8318
8318
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8319
8319
  try {
8320
- const fs23 = require("fs");
8321
- const path27 = require("path");
8320
+ const fs27 = require("fs");
8321
+ const path31 = require("path");
8322
8322
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8323
8323
  const agentAny2 = agent;
8324
8324
  let fullHistory = [];
@@ -8329,8 +8329,8 @@ ${"=".repeat(60)}
8329
8329
  } else if (agentAny2._messages) {
8330
8330
  fullHistory = agentAny2._messages;
8331
8331
  }
8332
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8333
- const sessionBase = path27.join(
8332
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8333
+ const sessionBase = path31.join(
8334
8334
  debugArtifactsDir,
8335
8335
  `session-${_checkName || "unknown"}-${timestamp}`
8336
8336
  );
@@ -8342,7 +8342,7 @@ ${"=".repeat(60)}
8342
8342
  schema: effectiveSchema,
8343
8343
  totalMessages: fullHistory.length
8344
8344
  };
8345
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8345
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8346
8346
  let readable = `=============================================================
8347
8347
  `;
8348
8348
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8369,7 +8369,7 @@ ${"=".repeat(60)}
8369
8369
  `;
8370
8370
  readable += content + "\n";
8371
8371
  });
8372
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8372
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8373
8373
  log(`\u{1F4BE} Complete session history saved:`);
8374
8374
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8375
8375
  } catch (error) {
@@ -8378,11 +8378,11 @@ ${"=".repeat(60)}
8378
8378
  }
8379
8379
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8380
8380
  try {
8381
- const fs23 = require("fs");
8382
- const path27 = require("path");
8381
+ const fs27 = require("fs");
8382
+ const path31 = require("path");
8383
8383
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8384
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8385
- const responseFile = path27.join(
8384
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8385
+ const responseFile = path31.join(
8386
8386
  debugArtifactsDir,
8387
8387
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8388
8388
  );
@@ -8415,7 +8415,7 @@ ${"=".repeat(60)}
8415
8415
  `;
8416
8416
  responseContent += `${"=".repeat(60)}
8417
8417
  `;
8418
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8418
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8419
8419
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8420
8420
  } catch (error) {
8421
8421
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8431,9 +8431,9 @@ ${"=".repeat(60)}
8431
8431
  await agentAny._telemetryConfig.shutdown();
8432
8432
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8433
8433
  if (process.env.GITHUB_ACTIONS) {
8434
- const fs23 = require("fs");
8435
- if (fs23.existsSync(agentAny._traceFilePath)) {
8436
- const stats = fs23.statSync(agentAny._traceFilePath);
8434
+ const fs27 = require("fs");
8435
+ if (fs27.existsSync(agentAny._traceFilePath)) {
8436
+ const stats = fs27.statSync(agentAny._traceFilePath);
8437
8437
  console.log(
8438
8438
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8439
8439
  );
@@ -8640,9 +8640,9 @@ ${schemaString}`);
8640
8640
  const model = this.config.model || "default";
8641
8641
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8642
8642
  try {
8643
- const fs23 = require("fs");
8644
- const path27 = require("path");
8645
- const os2 = require("os");
8643
+ const fs27 = require("fs");
8644
+ const path31 = require("path");
8645
+ const os3 = require("os");
8646
8646
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8647
8647
  const debugData = {
8648
8648
  timestamp,
@@ -8714,19 +8714,19 @@ ${"=".repeat(60)}
8714
8714
  `;
8715
8715
  readableVersion += `${"=".repeat(60)}
8716
8716
  `;
8717
- const tempDir = os2.tmpdir();
8718
- const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
8719
- fs23.writeFileSync(promptFile, prompt, "utf-8");
8717
+ const tempDir = os3.tmpdir();
8718
+ const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
8719
+ fs27.writeFileSync(promptFile, prompt, "utf-8");
8720
8720
  log(`
8721
8721
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8722
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8722
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8723
8723
  try {
8724
- const base = path27.join(
8724
+ const base = path31.join(
8725
8725
  debugArtifactsDir,
8726
8726
  `prompt-${_checkName || "unknown"}-${timestamp}`
8727
8727
  );
8728
- fs23.writeFileSync(base + ".json", debugJson, "utf-8");
8729
- fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8728
+ fs27.writeFileSync(base + ".json", debugJson, "utf-8");
8729
+ fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8730
8730
  log(`
8731
8731
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8732
8732
  } catch {
@@ -8771,8 +8771,8 @@ $ ${cliCommand}
8771
8771
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8772
8772
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8773
8773
  try {
8774
- const fs23 = require("fs");
8775
- const path27 = require("path");
8774
+ const fs27 = require("fs");
8775
+ const path31 = require("path");
8776
8776
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8777
8777
  const agentAny = agent;
8778
8778
  let fullHistory = [];
@@ -8783,8 +8783,8 @@ $ ${cliCommand}
8783
8783
  } else if (agentAny._messages) {
8784
8784
  fullHistory = agentAny._messages;
8785
8785
  }
8786
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8787
- const sessionBase = path27.join(
8786
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8787
+ const sessionBase = path31.join(
8788
8788
  debugArtifactsDir,
8789
8789
  `session-${_checkName || "unknown"}-${timestamp}`
8790
8790
  );
@@ -8796,7 +8796,7 @@ $ ${cliCommand}
8796
8796
  schema: effectiveSchema,
8797
8797
  totalMessages: fullHistory.length
8798
8798
  };
8799
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8799
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8800
8800
  let readable = `=============================================================
8801
8801
  `;
8802
8802
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8823,7 +8823,7 @@ ${"=".repeat(60)}
8823
8823
  `;
8824
8824
  readable += content + "\n";
8825
8825
  });
8826
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8826
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8827
8827
  log(`\u{1F4BE} Complete session history saved:`);
8828
8828
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8829
8829
  } catch (error) {
@@ -8832,11 +8832,11 @@ ${"=".repeat(60)}
8832
8832
  }
8833
8833
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8834
8834
  try {
8835
- const fs23 = require("fs");
8836
- const path27 = require("path");
8835
+ const fs27 = require("fs");
8836
+ const path31 = require("path");
8837
8837
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8838
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8839
- const responseFile = path27.join(
8838
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8839
+ const responseFile = path31.join(
8840
8840
  debugArtifactsDir,
8841
8841
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8842
8842
  );
@@ -8869,7 +8869,7 @@ ${"=".repeat(60)}
8869
8869
  `;
8870
8870
  responseContent += `${"=".repeat(60)}
8871
8871
  `;
8872
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8872
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8873
8873
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8874
8874
  } catch (error) {
8875
8875
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8887,9 +8887,9 @@ ${"=".repeat(60)}
8887
8887
  await telemetry.shutdown();
8888
8888
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
8889
8889
  if (process.env.GITHUB_ACTIONS) {
8890
- const fs23 = require("fs");
8891
- if (fs23.existsSync(traceFilePath)) {
8892
- const stats = fs23.statSync(traceFilePath);
8890
+ const fs27 = require("fs");
8891
+ if (fs27.existsSync(traceFilePath)) {
8892
+ const stats = fs27.statSync(traceFilePath);
8893
8893
  console.log(
8894
8894
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
8895
8895
  );
@@ -8927,8 +8927,8 @@ ${"=".repeat(60)}
8927
8927
  * Load schema content from schema files or inline definitions
8928
8928
  */
8929
8929
  async loadSchemaContent(schema) {
8930
- const fs23 = require("fs").promises;
8931
- const path27 = require("path");
8930
+ const fs27 = require("fs").promises;
8931
+ const path31 = require("path");
8932
8932
  if (typeof schema === "object" && schema !== null) {
8933
8933
  log("\u{1F4CB} Using inline schema object from configuration");
8934
8934
  return JSON.stringify(schema);
@@ -8941,14 +8941,14 @@ ${"=".repeat(60)}
8941
8941
  }
8942
8942
  } catch {
8943
8943
  }
8944
- if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
8944
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
8945
8945
  if (schema.includes("..") || schema.includes("\0")) {
8946
8946
  throw new Error("Invalid schema path: path traversal not allowed");
8947
8947
  }
8948
8948
  try {
8949
- const schemaPath = path27.resolve(process.cwd(), schema);
8949
+ const schemaPath = path31.resolve(process.cwd(), schema);
8950
8950
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
8951
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8951
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8952
8952
  return schemaContent.trim();
8953
8953
  } catch (error) {
8954
8954
  throw new Error(
@@ -8962,22 +8962,22 @@ ${"=".repeat(60)}
8962
8962
  }
8963
8963
  const candidatePaths = [
8964
8964
  // GitHub Action bundle location
8965
- path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8965
+ path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8966
8966
  // Historical fallback when src/output was inadvertently bundled as output1/
8967
- path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8967
+ path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8968
8968
  // Local dev (repo root)
8969
- path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8969
+ path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8970
8970
  ];
8971
8971
  for (const schemaPath of candidatePaths) {
8972
8972
  try {
8973
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8973
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8974
8974
  return schemaContent.trim();
8975
8975
  } catch {
8976
8976
  }
8977
8977
  }
8978
- const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8979
- const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8980
- const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8978
+ const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8979
+ const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8980
+ const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8981
8981
  throw new Error(
8982
8982
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
8983
8983
  );
@@ -9222,7 +9222,7 @@ ${"=".repeat(60)}
9222
9222
  * Generate mock response for testing
9223
9223
  */
9224
9224
  async generateMockResponse(_prompt, _checkName, _schema) {
9225
- await new Promise((resolve15) => setTimeout(resolve15, 500));
9225
+ await new Promise((resolve19) => setTimeout(resolve19, 500));
9226
9226
  const name = (_checkName || "").toLowerCase();
9227
9227
  if (name.includes("extract-facts")) {
9228
9228
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9583,7 +9583,7 @@ var init_command_executor = __esm({
9583
9583
  * Execute command with stdin input
9584
9584
  */
9585
9585
  executeWithStdin(command, options) {
9586
- return new Promise((resolve15, reject) => {
9586
+ return new Promise((resolve19, reject) => {
9587
9587
  const childProcess = (0, import_child_process2.exec)(
9588
9588
  command,
9589
9589
  {
@@ -9595,7 +9595,7 @@ var init_command_executor = __esm({
9595
9595
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9596
9596
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9597
9597
  } else {
9598
- resolve15({
9598
+ resolve19({
9599
9599
  stdout: stdout || "",
9600
9600
  stderr: stderr || "",
9601
9601
  exitCode: error ? error.code || 1 : 0
@@ -17142,24 +17142,113 @@ var init_workflow_check_provider = __esm({
17142
17142
  const parentInputs = config.workflowInputs || {};
17143
17143
  const basePath = config.basePath || config._parentContext?.originalWorkingDirectory || config._parentContext?.workingDirectory || process.cwd();
17144
17144
  const loadConfigLiquid = createExtendedLiquid();
17145
+ const deepMerge = (base, override) => {
17146
+ if (!base || !override || typeof base !== "object" || typeof override !== "object" || Array.isArray(base) || Array.isArray(override)) {
17147
+ return override;
17148
+ }
17149
+ const result = { ...base };
17150
+ for (const key of Object.keys(override)) {
17151
+ if (key in result && typeof result[key] === "object" && result[key] !== null && !Array.isArray(result[key]) && typeof override[key] === "object" && override[key] !== null && !Array.isArray(override[key])) {
17152
+ result[key] = deepMerge(result[key], override[key]);
17153
+ } else {
17154
+ result[key] = override[key];
17155
+ }
17156
+ }
17157
+ return result;
17158
+ };
17159
+ const resolveVisorPath = (filePath) => {
17160
+ if (!filePath.startsWith("visor://") && !filePath.startsWith("visor-ee://")) {
17161
+ return null;
17162
+ }
17163
+ const relativePath = filePath.replace(/^visor(?:-ee)?:\/\//, "");
17164
+ const candidates = [
17165
+ path13.resolve(__dirname, "..", "defaults"),
17166
+ path13.resolve(__dirname, "..", "..", "defaults")
17167
+ ];
17168
+ let defaultsDir;
17169
+ for (const candidate of candidates) {
17170
+ if (require("fs").existsSync(candidate)) {
17171
+ defaultsDir = candidate;
17172
+ break;
17173
+ }
17174
+ }
17175
+ if (!defaultsDir) {
17176
+ throw new Error(`loadConfig: cannot find defaults directory for visor:// paths`);
17177
+ }
17178
+ const resolved = path13.resolve(defaultsDir, relativePath);
17179
+ if (!resolved.startsWith(defaultsDir + path13.sep) && resolved !== defaultsDir) {
17180
+ throw new Error(`loadConfig: visor:// path escapes defaults directory`);
17181
+ }
17182
+ return { resolvedPath: resolved, configDir: path13.dirname(resolved) };
17183
+ };
17184
+ let loadConfigCallDepth = 0;
17185
+ const resolveExpressions = (value, depth) => {
17186
+ if (depth > 10) {
17187
+ throw new Error("loadConfig: maximum expression nesting depth (10) exceeded");
17188
+ }
17189
+ if (Array.isArray(value)) {
17190
+ return value.map((item) => resolveExpressions(item, depth + 1));
17191
+ }
17192
+ if (value && typeof value === "object" && !Array.isArray(value)) {
17193
+ const obj = value;
17194
+ const keys = Object.keys(obj);
17195
+ if (keys.length === 1 && keys[0] === "expression" && typeof obj.expression === "string") {
17196
+ const sandbox = createSecureSandbox();
17197
+ const result2 = compileAndRun(sandbox, obj.expression, templateContext, {
17198
+ injectLog: true,
17199
+ logPrefix: "loadConfig.expression"
17200
+ });
17201
+ return resolveExpressions(result2, depth + 1);
17202
+ }
17203
+ if ("extends" in obj && typeof obj.extends === "string") {
17204
+ const extendsPath = obj.extends;
17205
+ const overrideObj = { ...obj };
17206
+ delete overrideObj.extends;
17207
+ const resolvedOverride = resolveExpressions(overrideObj, depth + 1);
17208
+ const baseConfig = loadConfig2(extendsPath);
17209
+ return deepMerge(baseConfig, resolvedOverride);
17210
+ }
17211
+ const result = {};
17212
+ for (const [k, v] of Object.entries(obj)) {
17213
+ result[k] = resolveExpressions(v, depth + 1);
17214
+ }
17215
+ return result;
17216
+ }
17217
+ return value;
17218
+ };
17145
17219
  const loadConfig2 = (filePath) => {
17146
17220
  try {
17147
- const normalizedBasePath = path13.normalize(basePath);
17148
- const resolvedPath = path13.isAbsolute(filePath) ? path13.normalize(filePath) : path13.normalize(path13.resolve(basePath, filePath));
17149
- const basePathWithSep = normalizedBasePath.endsWith(path13.sep) ? normalizedBasePath : normalizedBasePath + path13.sep;
17150
- if (!resolvedPath.startsWith(basePathWithSep) && resolvedPath !== normalizedBasePath) {
17151
- throw new Error(`Path '${filePath}' escapes base directory`);
17221
+ loadConfigCallDepth++;
17222
+ if (loadConfigCallDepth > 10) {
17223
+ throw new Error("maximum loadConfig nesting depth (10) exceeded");
17224
+ }
17225
+ let resolvedPath;
17226
+ let configDir;
17227
+ const visorResolved = resolveVisorPath(filePath);
17228
+ if (visorResolved) {
17229
+ resolvedPath = visorResolved.resolvedPath;
17230
+ configDir = visorResolved.configDir;
17231
+ } else {
17232
+ const normalizedBasePath = path13.normalize(basePath);
17233
+ resolvedPath = path13.isAbsolute(filePath) ? path13.normalize(filePath) : path13.normalize(path13.resolve(basePath, filePath));
17234
+ const basePathWithSep = normalizedBasePath.endsWith(path13.sep) ? normalizedBasePath : normalizedBasePath + path13.sep;
17235
+ if (!resolvedPath.startsWith(basePathWithSep) && resolvedPath !== normalizedBasePath) {
17236
+ throw new Error(`Path '${filePath}' escapes base directory`);
17237
+ }
17238
+ configDir = path13.dirname(resolvedPath);
17152
17239
  }
17153
- const configDir = path13.dirname(resolvedPath);
17154
17240
  const rawContent = require("fs").readFileSync(resolvedPath, "utf-8");
17155
17241
  const renderedContent = loadConfigLiquid.parseAndRenderSync(rawContent, {
17156
17242
  basePath: configDir
17157
17243
  });
17158
- return yaml4.load(renderedContent, { schema: yaml4.JSON_SCHEMA });
17244
+ const parsed = yaml4.load(renderedContent, { schema: yaml4.JSON_SCHEMA });
17245
+ return resolveExpressions(parsed, 0);
17159
17246
  } catch (error) {
17160
17247
  const msg = error instanceof Error ? error.message : String(error);
17161
17248
  logger.error(`[WorkflowProvider] loadConfig failed for '${filePath}': ${msg}`);
17162
17249
  throw new Error(`loadConfig('${filePath}') failed: ${msg}`);
17250
+ } finally {
17251
+ loadConfigCallDepth--;
17163
17252
  }
17164
17253
  };
17165
17254
  const templateContext = {
@@ -17601,17 +17690,17 @@ var init_workflow_check_provider = __esm({
17601
17690
  * so it can be executed by the state machine as a nested workflow.
17602
17691
  */
17603
17692
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
17604
- const path27 = require("path");
17605
- const fs23 = require("fs");
17693
+ const path31 = require("path");
17694
+ const fs27 = require("fs");
17606
17695
  const yaml5 = require("js-yaml");
17607
- const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
17608
- if (!fs23.existsSync(resolved)) {
17696
+ const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
17697
+ if (!fs27.existsSync(resolved)) {
17609
17698
  throw new Error(`Workflow config not found at: ${resolved}`);
17610
17699
  }
17611
- const rawContent = fs23.readFileSync(resolved, "utf8");
17700
+ const rawContent = fs27.readFileSync(resolved, "utf8");
17612
17701
  const rawData = yaml5.load(rawContent);
17613
17702
  if (rawData.imports && Array.isArray(rawData.imports)) {
17614
- const configDir = path27.dirname(resolved);
17703
+ const configDir = path31.dirname(resolved);
17615
17704
  for (const source of rawData.imports) {
17616
17705
  const results = await this.registry.import(source, {
17617
17706
  basePath: configDir,
@@ -17641,8 +17730,8 @@ ${errors}`);
17641
17730
  if (!steps || Object.keys(steps).length === 0) {
17642
17731
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
17643
17732
  }
17644
- const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
17645
- const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
17733
+ const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
17734
+ const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
17646
17735
  const workflowDef = {
17647
17736
  id,
17648
17737
  name,
@@ -18448,8 +18537,8 @@ async function createStoreBackend(storageConfig, haConfig) {
18448
18537
  case "mssql": {
18449
18538
  try {
18450
18539
  const loaderPath = "../../enterprise/loader";
18451
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
18452
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
18540
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
18541
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
18453
18542
  } catch (err) {
18454
18543
  const msg = err instanceof Error ? err.message : String(err);
18455
18544
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -21020,7 +21109,7 @@ var init_mcp_custom_sse_server = __esm({
21020
21109
  * Returns the actual bound port number
21021
21110
  */
21022
21111
  async start() {
21023
- return new Promise((resolve15, reject) => {
21112
+ return new Promise((resolve19, reject) => {
21024
21113
  try {
21025
21114
  this.server = import_http.default.createServer((req, res) => {
21026
21115
  this.handleRequest(req, res).catch((error) => {
@@ -21054,7 +21143,7 @@ var init_mcp_custom_sse_server = __esm({
21054
21143
  );
21055
21144
  }
21056
21145
  this.startKeepalive();
21057
- resolve15(this.port);
21146
+ resolve19(this.port);
21058
21147
  });
21059
21148
  } catch (error) {
21060
21149
  reject(error);
@@ -21117,7 +21206,7 @@ var init_mcp_custom_sse_server = __esm({
21117
21206
  logger.debug(
21118
21207
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
21119
21208
  );
21120
- await new Promise((resolve15) => setTimeout(resolve15, waitMs));
21209
+ await new Promise((resolve19) => setTimeout(resolve19, waitMs));
21121
21210
  }
21122
21211
  }
21123
21212
  if (this.activeToolCalls > 0) {
@@ -21126,7 +21215,7 @@ var init_mcp_custom_sse_server = __esm({
21126
21215
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
21127
21216
  );
21128
21217
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
21129
- await new Promise((resolve15) => setTimeout(resolve15, 250));
21218
+ await new Promise((resolve19) => setTimeout(resolve19, 250));
21130
21219
  }
21131
21220
  if (this.activeToolCalls > 0) {
21132
21221
  logger.warn(
@@ -21151,21 +21240,21 @@ var init_mcp_custom_sse_server = __esm({
21151
21240
  }
21152
21241
  this.connections.clear();
21153
21242
  if (this.server) {
21154
- await new Promise((resolve15, reject) => {
21243
+ await new Promise((resolve19, reject) => {
21155
21244
  const timeout = setTimeout(() => {
21156
21245
  if (this.debug) {
21157
21246
  logger.debug(
21158
21247
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
21159
21248
  );
21160
21249
  }
21161
- this.server?.close(() => resolve15());
21250
+ this.server?.close(() => resolve19());
21162
21251
  }, 5e3);
21163
21252
  this.server.close((error) => {
21164
21253
  clearTimeout(timeout);
21165
21254
  if (error) {
21166
21255
  reject(error);
21167
21256
  } else {
21168
- resolve15();
21257
+ resolve19();
21169
21258
  }
21170
21259
  });
21171
21260
  });
@@ -21600,7 +21689,7 @@ var init_mcp_custom_sse_server = __esm({
21600
21689
  logger.warn(
21601
21690
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
21602
21691
  );
21603
- await new Promise((resolve15) => setTimeout(resolve15, delay));
21692
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
21604
21693
  attempt++;
21605
21694
  }
21606
21695
  }
@@ -21913,9 +22002,9 @@ var init_ai_check_provider = __esm({
21913
22002
  } else {
21914
22003
  resolvedPath = import_path7.default.resolve(process.cwd(), str);
21915
22004
  }
21916
- const fs23 = require("fs").promises;
22005
+ const fs27 = require("fs").promises;
21917
22006
  try {
21918
- const stat2 = await fs23.stat(resolvedPath);
22007
+ const stat2 = await fs27.stat(resolvedPath);
21919
22008
  return stat2.isFile();
21920
22009
  } catch {
21921
22010
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -27843,14 +27932,14 @@ var require_util = __commonJS({
27843
27932
  }
27844
27933
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
27845
27934
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
27846
- let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27935
+ let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27847
27936
  if (origin.endsWith("/")) {
27848
27937
  origin = origin.substring(0, origin.length - 1);
27849
27938
  }
27850
- if (path27 && !path27.startsWith("/")) {
27851
- path27 = `/${path27}`;
27939
+ if (path31 && !path31.startsWith("/")) {
27940
+ path31 = `/${path31}`;
27852
27941
  }
27853
- url = new URL(origin + path27);
27942
+ url = new URL(origin + path31);
27854
27943
  }
27855
27944
  return url;
27856
27945
  }
@@ -29464,20 +29553,20 @@ var require_parseParams = __commonJS({
29464
29553
  var require_basename = __commonJS({
29465
29554
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
29466
29555
  "use strict";
29467
- module2.exports = function basename4(path27) {
29468
- if (typeof path27 !== "string") {
29556
+ module2.exports = function basename4(path31) {
29557
+ if (typeof path31 !== "string") {
29469
29558
  return "";
29470
29559
  }
29471
- for (var i = path27.length - 1; i >= 0; --i) {
29472
- switch (path27.charCodeAt(i)) {
29560
+ for (var i = path31.length - 1; i >= 0; --i) {
29561
+ switch (path31.charCodeAt(i)) {
29473
29562
  case 47:
29474
29563
  // '/'
29475
29564
  case 92:
29476
- path27 = path27.slice(i + 1);
29477
- return path27 === ".." || path27 === "." ? "" : path27;
29565
+ path31 = path31.slice(i + 1);
29566
+ return path31 === ".." || path31 === "." ? "" : path31;
29478
29567
  }
29479
29568
  }
29480
- return path27 === ".." || path27 === "." ? "" : path27;
29569
+ return path31 === ".." || path31 === "." ? "" : path31;
29481
29570
  };
29482
29571
  }
29483
29572
  });
@@ -30481,11 +30570,11 @@ var require_util2 = __commonJS({
30481
30570
  var assert = require("assert");
30482
30571
  var { isUint8Array } = require("util/types");
30483
30572
  var supportedHashes = [];
30484
- var crypto2;
30573
+ var crypto4;
30485
30574
  try {
30486
- crypto2 = require("crypto");
30575
+ crypto4 = require("crypto");
30487
30576
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
30488
- supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30577
+ supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30489
30578
  } catch {
30490
30579
  }
30491
30580
  function responseURL(response) {
@@ -30762,7 +30851,7 @@ var require_util2 = __commonJS({
30762
30851
  }
30763
30852
  }
30764
30853
  function bytesMatch(bytes, metadataList) {
30765
- if (crypto2 === void 0) {
30854
+ if (crypto4 === void 0) {
30766
30855
  return true;
30767
30856
  }
30768
30857
  const parsedMetadata = parseMetadata(metadataList);
@@ -30777,7 +30866,7 @@ var require_util2 = __commonJS({
30777
30866
  for (const item of metadata) {
30778
30867
  const algorithm = item.algo;
30779
30868
  const expectedValue = item.hash;
30780
- let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
30869
+ let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
30781
30870
  if (actualValue[actualValue.length - 1] === "=") {
30782
30871
  if (actualValue[actualValue.length - 2] === "=") {
30783
30872
  actualValue = actualValue.slice(0, -2);
@@ -30870,8 +30959,8 @@ var require_util2 = __commonJS({
30870
30959
  function createDeferredPromise() {
30871
30960
  let res;
30872
30961
  let rej;
30873
- const promise = new Promise((resolve15, reject) => {
30874
- res = resolve15;
30962
+ const promise = new Promise((resolve19, reject) => {
30963
+ res = resolve19;
30875
30964
  rej = reject;
30876
30965
  });
30877
30966
  return { promise, resolve: res, reject: rej };
@@ -32124,8 +32213,8 @@ var require_body = __commonJS({
32124
32213
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
32125
32214
  var random;
32126
32215
  try {
32127
- const crypto2 = require("crypto");
32128
- random = (max) => crypto2.randomInt(0, max);
32216
+ const crypto4 = require("crypto");
32217
+ random = (max) => crypto4.randomInt(0, max);
32129
32218
  } catch {
32130
32219
  random = (max) => Math.floor(Math.random(max));
32131
32220
  }
@@ -32376,8 +32465,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
32376
32465
  });
32377
32466
  }
32378
32467
  });
32379
- const busboyResolve = new Promise((resolve15, reject) => {
32380
- busboy.on("finish", resolve15);
32468
+ const busboyResolve = new Promise((resolve19, reject) => {
32469
+ busboy.on("finish", resolve19);
32381
32470
  busboy.on("error", (err) => reject(new TypeError(err)));
32382
32471
  });
32383
32472
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -32508,7 +32597,7 @@ var require_request = __commonJS({
32508
32597
  }
32509
32598
  var Request = class _Request {
32510
32599
  constructor(origin, {
32511
- path: path27,
32600
+ path: path31,
32512
32601
  method,
32513
32602
  body,
32514
32603
  headers,
@@ -32522,11 +32611,11 @@ var require_request = __commonJS({
32522
32611
  throwOnError,
32523
32612
  expectContinue
32524
32613
  }, handler) {
32525
- if (typeof path27 !== "string") {
32614
+ if (typeof path31 !== "string") {
32526
32615
  throw new InvalidArgumentError("path must be a string");
32527
- } else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
32616
+ } else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
32528
32617
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
32529
- } else if (invalidPathRegex.exec(path27) !== null) {
32618
+ } else if (invalidPathRegex.exec(path31) !== null) {
32530
32619
  throw new InvalidArgumentError("invalid request path");
32531
32620
  }
32532
32621
  if (typeof method !== "string") {
@@ -32589,7 +32678,7 @@ var require_request = __commonJS({
32589
32678
  this.completed = false;
32590
32679
  this.aborted = false;
32591
32680
  this.upgrade = upgrade || null;
32592
- this.path = query ? util.buildURL(path27, query) : path27;
32681
+ this.path = query ? util.buildURL(path31, query) : path31;
32593
32682
  this.origin = origin;
32594
32683
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
32595
32684
  this.blocking = blocking == null ? false : blocking;
@@ -32911,9 +33000,9 @@ var require_dispatcher_base = __commonJS({
32911
33000
  }
32912
33001
  close(callback) {
32913
33002
  if (callback === void 0) {
32914
- return new Promise((resolve15, reject) => {
33003
+ return new Promise((resolve19, reject) => {
32915
33004
  this.close((err, data) => {
32916
- return err ? reject(err) : resolve15(data);
33005
+ return err ? reject(err) : resolve19(data);
32917
33006
  });
32918
33007
  });
32919
33008
  }
@@ -32951,12 +33040,12 @@ var require_dispatcher_base = __commonJS({
32951
33040
  err = null;
32952
33041
  }
32953
33042
  if (callback === void 0) {
32954
- return new Promise((resolve15, reject) => {
33043
+ return new Promise((resolve19, reject) => {
32955
33044
  this.destroy(err, (err2, data) => {
32956
33045
  return err2 ? (
32957
33046
  /* istanbul ignore next: should never error */
32958
33047
  reject(err2)
32959
- ) : resolve15(data);
33048
+ ) : resolve19(data);
32960
33049
  });
32961
33050
  });
32962
33051
  }
@@ -33597,9 +33686,9 @@ var require_RedirectHandler = __commonJS({
33597
33686
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
33598
33687
  }
33599
33688
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
33600
- const path27 = search ? `${pathname}${search}` : pathname;
33689
+ const path31 = search ? `${pathname}${search}` : pathname;
33601
33690
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
33602
- this.opts.path = path27;
33691
+ this.opts.path = path31;
33603
33692
  this.opts.origin = origin;
33604
33693
  this.opts.maxRedirections = 0;
33605
33694
  this.opts.query = null;
@@ -34018,16 +34107,16 @@ var require_client = __commonJS({
34018
34107
  return this[kNeedDrain] < 2;
34019
34108
  }
34020
34109
  async [kClose]() {
34021
- return new Promise((resolve15) => {
34110
+ return new Promise((resolve19) => {
34022
34111
  if (!this[kSize]) {
34023
- resolve15(null);
34112
+ resolve19(null);
34024
34113
  } else {
34025
- this[kClosedResolve] = resolve15;
34114
+ this[kClosedResolve] = resolve19;
34026
34115
  }
34027
34116
  });
34028
34117
  }
34029
34118
  async [kDestroy](err) {
34030
- return new Promise((resolve15) => {
34119
+ return new Promise((resolve19) => {
34031
34120
  const requests = this[kQueue].splice(this[kPendingIdx]);
34032
34121
  for (let i = 0; i < requests.length; i++) {
34033
34122
  const request = requests[i];
@@ -34038,7 +34127,7 @@ var require_client = __commonJS({
34038
34127
  this[kClosedResolve]();
34039
34128
  this[kClosedResolve] = null;
34040
34129
  }
34041
- resolve15();
34130
+ resolve19();
34042
34131
  };
34043
34132
  if (this[kHTTP2Session] != null) {
34044
34133
  util.destroy(this[kHTTP2Session], err);
@@ -34618,7 +34707,7 @@ var require_client = __commonJS({
34618
34707
  });
34619
34708
  }
34620
34709
  try {
34621
- const socket = await new Promise((resolve15, reject) => {
34710
+ const socket = await new Promise((resolve19, reject) => {
34622
34711
  client[kConnector]({
34623
34712
  host,
34624
34713
  hostname,
@@ -34630,7 +34719,7 @@ var require_client = __commonJS({
34630
34719
  if (err) {
34631
34720
  reject(err);
34632
34721
  } else {
34633
- resolve15(socket2);
34722
+ resolve19(socket2);
34634
34723
  }
34635
34724
  });
34636
34725
  });
@@ -34841,7 +34930,7 @@ var require_client = __commonJS({
34841
34930
  writeH2(client, client[kHTTP2Session], request);
34842
34931
  return;
34843
34932
  }
34844
- const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
34933
+ const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
34845
34934
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
34846
34935
  if (body && typeof body.read === "function") {
34847
34936
  body.read(0);
@@ -34891,7 +34980,7 @@ var require_client = __commonJS({
34891
34980
  if (blocking) {
34892
34981
  socket[kBlocking] = true;
34893
34982
  }
34894
- let header = `${method} ${path27} HTTP/1.1\r
34983
+ let header = `${method} ${path31} HTTP/1.1\r
34895
34984
  `;
34896
34985
  if (typeof host === "string") {
34897
34986
  header += `host: ${host}\r
@@ -34954,7 +35043,7 @@ upgrade: ${upgrade}\r
34954
35043
  return true;
34955
35044
  }
34956
35045
  function writeH2(client, session, request) {
34957
- const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
35046
+ const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
34958
35047
  let headers;
34959
35048
  if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
34960
35049
  else headers = reqHeaders;
@@ -34997,7 +35086,7 @@ upgrade: ${upgrade}\r
34997
35086
  });
34998
35087
  return true;
34999
35088
  }
35000
- headers[HTTP2_HEADER_PATH] = path27;
35089
+ headers[HTTP2_HEADER_PATH] = path31;
35001
35090
  headers[HTTP2_HEADER_SCHEME] = "https";
35002
35091
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
35003
35092
  if (body && typeof body.read === "function") {
@@ -35254,12 +35343,12 @@ upgrade: ${upgrade}\r
35254
35343
  cb();
35255
35344
  }
35256
35345
  }
35257
- const waitForDrain = () => new Promise((resolve15, reject) => {
35346
+ const waitForDrain = () => new Promise((resolve19, reject) => {
35258
35347
  assert(callback === null);
35259
35348
  if (socket[kError]) {
35260
35349
  reject(socket[kError]);
35261
35350
  } else {
35262
- callback = resolve15;
35351
+ callback = resolve19;
35263
35352
  }
35264
35353
  });
35265
35354
  if (client[kHTTPConnVersion] === "h2") {
@@ -35605,8 +35694,8 @@ var require_pool_base = __commonJS({
35605
35694
  if (this[kQueue].isEmpty()) {
35606
35695
  return Promise.all(this[kClients].map((c) => c.close()));
35607
35696
  } else {
35608
- return new Promise((resolve15) => {
35609
- this[kClosedResolve] = resolve15;
35697
+ return new Promise((resolve19) => {
35698
+ this[kClosedResolve] = resolve19;
35610
35699
  });
35611
35700
  }
35612
35701
  }
@@ -36184,7 +36273,7 @@ var require_readable = __commonJS({
36184
36273
  if (this.closed) {
36185
36274
  return Promise.resolve(null);
36186
36275
  }
36187
- return new Promise((resolve15, reject) => {
36276
+ return new Promise((resolve19, reject) => {
36188
36277
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
36189
36278
  this.destroy();
36190
36279
  }) : noop;
@@ -36193,7 +36282,7 @@ var require_readable = __commonJS({
36193
36282
  if (signal && signal.aborted) {
36194
36283
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
36195
36284
  } else {
36196
- resolve15(null);
36285
+ resolve19(null);
36197
36286
  }
36198
36287
  }).on("error", noop).on("data", function(chunk) {
36199
36288
  limit -= chunk.length;
@@ -36215,11 +36304,11 @@ var require_readable = __commonJS({
36215
36304
  throw new TypeError("unusable");
36216
36305
  }
36217
36306
  assert(!stream[kConsume]);
36218
- return new Promise((resolve15, reject) => {
36307
+ return new Promise((resolve19, reject) => {
36219
36308
  stream[kConsume] = {
36220
36309
  type,
36221
36310
  stream,
36222
- resolve: resolve15,
36311
+ resolve: resolve19,
36223
36312
  reject,
36224
36313
  length: 0,
36225
36314
  body: []
@@ -36254,12 +36343,12 @@ var require_readable = __commonJS({
36254
36343
  }
36255
36344
  }
36256
36345
  function consumeEnd(consume2) {
36257
- const { type, body, resolve: resolve15, stream, length } = consume2;
36346
+ const { type, body, resolve: resolve19, stream, length } = consume2;
36258
36347
  try {
36259
36348
  if (type === "text") {
36260
- resolve15(toUSVString(Buffer.concat(body)));
36349
+ resolve19(toUSVString(Buffer.concat(body)));
36261
36350
  } else if (type === "json") {
36262
- resolve15(JSON.parse(Buffer.concat(body)));
36351
+ resolve19(JSON.parse(Buffer.concat(body)));
36263
36352
  } else if (type === "arrayBuffer") {
36264
36353
  const dst = new Uint8Array(length);
36265
36354
  let pos = 0;
@@ -36267,12 +36356,12 @@ var require_readable = __commonJS({
36267
36356
  dst.set(buf, pos);
36268
36357
  pos += buf.byteLength;
36269
36358
  }
36270
- resolve15(dst.buffer);
36359
+ resolve19(dst.buffer);
36271
36360
  } else if (type === "blob") {
36272
36361
  if (!Blob2) {
36273
36362
  Blob2 = require("buffer").Blob;
36274
36363
  }
36275
- resolve15(new Blob2(body, { type: stream[kContentType] }));
36364
+ resolve19(new Blob2(body, { type: stream[kContentType] }));
36276
36365
  }
36277
36366
  consumeFinish(consume2);
36278
36367
  } catch (err) {
@@ -36529,9 +36618,9 @@ var require_api_request = __commonJS({
36529
36618
  };
36530
36619
  function request(opts, callback) {
36531
36620
  if (callback === void 0) {
36532
- return new Promise((resolve15, reject) => {
36621
+ return new Promise((resolve19, reject) => {
36533
36622
  request.call(this, opts, (err, data) => {
36534
- return err ? reject(err) : resolve15(data);
36623
+ return err ? reject(err) : resolve19(data);
36535
36624
  });
36536
36625
  });
36537
36626
  }
@@ -36704,9 +36793,9 @@ var require_api_stream = __commonJS({
36704
36793
  };
36705
36794
  function stream(opts, factory, callback) {
36706
36795
  if (callback === void 0) {
36707
- return new Promise((resolve15, reject) => {
36796
+ return new Promise((resolve19, reject) => {
36708
36797
  stream.call(this, opts, factory, (err, data) => {
36709
- return err ? reject(err) : resolve15(data);
36798
+ return err ? reject(err) : resolve19(data);
36710
36799
  });
36711
36800
  });
36712
36801
  }
@@ -36987,9 +37076,9 @@ var require_api_upgrade = __commonJS({
36987
37076
  };
36988
37077
  function upgrade(opts, callback) {
36989
37078
  if (callback === void 0) {
36990
- return new Promise((resolve15, reject) => {
37079
+ return new Promise((resolve19, reject) => {
36991
37080
  upgrade.call(this, opts, (err, data) => {
36992
- return err ? reject(err) : resolve15(data);
37081
+ return err ? reject(err) : resolve19(data);
36993
37082
  });
36994
37083
  });
36995
37084
  }
@@ -37078,9 +37167,9 @@ var require_api_connect = __commonJS({
37078
37167
  };
37079
37168
  function connect(opts, callback) {
37080
37169
  if (callback === void 0) {
37081
- return new Promise((resolve15, reject) => {
37170
+ return new Promise((resolve19, reject) => {
37082
37171
  connect.call(this, opts, (err, data) => {
37083
- return err ? reject(err) : resolve15(data);
37172
+ return err ? reject(err) : resolve19(data);
37084
37173
  });
37085
37174
  });
37086
37175
  }
@@ -37240,20 +37329,20 @@ var require_mock_utils = __commonJS({
37240
37329
  }
37241
37330
  return true;
37242
37331
  }
37243
- function safeUrl(path27) {
37244
- if (typeof path27 !== "string") {
37245
- return path27;
37332
+ function safeUrl(path31) {
37333
+ if (typeof path31 !== "string") {
37334
+ return path31;
37246
37335
  }
37247
- const pathSegments = path27.split("?");
37336
+ const pathSegments = path31.split("?");
37248
37337
  if (pathSegments.length !== 2) {
37249
- return path27;
37338
+ return path31;
37250
37339
  }
37251
37340
  const qp = new URLSearchParams(pathSegments.pop());
37252
37341
  qp.sort();
37253
37342
  return [...pathSegments, qp.toString()].join("?");
37254
37343
  }
37255
- function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
37256
- const pathMatch = matchValue(mockDispatch2.path, path27);
37344
+ function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
37345
+ const pathMatch = matchValue(mockDispatch2.path, path31);
37257
37346
  const methodMatch = matchValue(mockDispatch2.method, method);
37258
37347
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
37259
37348
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -37271,7 +37360,7 @@ var require_mock_utils = __commonJS({
37271
37360
  function getMockDispatch(mockDispatches, key) {
37272
37361
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
37273
37362
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
37274
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
37363
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
37275
37364
  if (matchedMockDispatches.length === 0) {
37276
37365
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
37277
37366
  }
@@ -37308,9 +37397,9 @@ var require_mock_utils = __commonJS({
37308
37397
  }
37309
37398
  }
37310
37399
  function buildKey(opts) {
37311
- const { path: path27, method, body, headers, query } = opts;
37400
+ const { path: path31, method, body, headers, query } = opts;
37312
37401
  return {
37313
- path: path27,
37402
+ path: path31,
37314
37403
  method,
37315
37404
  body,
37316
37405
  headers,
@@ -37759,10 +37848,10 @@ var require_pending_interceptors_formatter = __commonJS({
37759
37848
  }
37760
37849
  format(pendingInterceptors) {
37761
37850
  const withPrettyHeaders = pendingInterceptors.map(
37762
- ({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37851
+ ({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37763
37852
  Method: method,
37764
37853
  Origin: origin,
37765
- Path: path27,
37854
+ Path: path31,
37766
37855
  "Status code": statusCode,
37767
37856
  Persistent: persist ? "\u2705" : "\u274C",
37768
37857
  Invocations: timesInvoked,
@@ -40703,7 +40792,7 @@ var require_fetch = __commonJS({
40703
40792
  async function dispatch({ body }) {
40704
40793
  const url = requestCurrentURL(request);
40705
40794
  const agent = fetchParams.controller.dispatcher;
40706
- return new Promise((resolve15, reject) => agent.dispatch(
40795
+ return new Promise((resolve19, reject) => agent.dispatch(
40707
40796
  {
40708
40797
  path: url.pathname + url.search,
40709
40798
  origin: url.origin,
@@ -40779,7 +40868,7 @@ var require_fetch = __commonJS({
40779
40868
  }
40780
40869
  }
40781
40870
  }
40782
- resolve15({
40871
+ resolve19({
40783
40872
  status,
40784
40873
  statusText,
40785
40874
  headersList: headers[kHeadersList],
@@ -40822,7 +40911,7 @@ var require_fetch = __commonJS({
40822
40911
  const val = headersList[n + 1].toString("latin1");
40823
40912
  headers[kHeadersList].append(key, val);
40824
40913
  }
40825
- resolve15({
40914
+ resolve19({
40826
40915
  status,
40827
40916
  statusText: STATUS_CODES[status],
40828
40917
  headersList: headers[kHeadersList],
@@ -42383,8 +42472,8 @@ var require_util6 = __commonJS({
42383
42472
  }
42384
42473
  }
42385
42474
  }
42386
- function validateCookiePath(path27) {
42387
- for (const char of path27) {
42475
+ function validateCookiePath(path31) {
42476
+ for (const char of path31) {
42388
42477
  const code = char.charCodeAt(0);
42389
42478
  if (code < 33 || char === ";") {
42390
42479
  throw new Error("Invalid cookie path");
@@ -43181,9 +43270,9 @@ var require_connection = __commonJS({
43181
43270
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
43182
43271
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
43183
43272
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
43184
- var crypto2;
43273
+ var crypto4;
43185
43274
  try {
43186
- crypto2 = require("crypto");
43275
+ crypto4 = require("crypto");
43187
43276
  } catch {
43188
43277
  }
43189
43278
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -43202,7 +43291,7 @@ var require_connection = __commonJS({
43202
43291
  const headersList = new Headers(options.headers)[kHeadersList];
43203
43292
  request.headersList = headersList;
43204
43293
  }
43205
- const keyValue = crypto2.randomBytes(16).toString("base64");
43294
+ const keyValue = crypto4.randomBytes(16).toString("base64");
43206
43295
  request.headersList.append("sec-websocket-key", keyValue);
43207
43296
  request.headersList.append("sec-websocket-version", "13");
43208
43297
  for (const protocol of protocols) {
@@ -43231,7 +43320,7 @@ var require_connection = __commonJS({
43231
43320
  return;
43232
43321
  }
43233
43322
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
43234
- const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
43323
+ const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
43235
43324
  if (secWSAccept !== digest) {
43236
43325
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
43237
43326
  return;
@@ -43311,9 +43400,9 @@ var require_frame = __commonJS({
43311
43400
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
43312
43401
  "use strict";
43313
43402
  var { maxUnsigned16Bit } = require_constants5();
43314
- var crypto2;
43403
+ var crypto4;
43315
43404
  try {
43316
- crypto2 = require("crypto");
43405
+ crypto4 = require("crypto");
43317
43406
  } catch {
43318
43407
  }
43319
43408
  var WebsocketFrameSend = class {
@@ -43322,7 +43411,7 @@ var require_frame = __commonJS({
43322
43411
  */
43323
43412
  constructor(data) {
43324
43413
  this.frameData = data;
43325
- this.maskKey = crypto2.randomBytes(4);
43414
+ this.maskKey = crypto4.randomBytes(4);
43326
43415
  }
43327
43416
  createFrame(opcode) {
43328
43417
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -44064,11 +44153,11 @@ var require_undici = __commonJS({
44064
44153
  if (typeof opts.path !== "string") {
44065
44154
  throw new InvalidArgumentError("invalid opts.path");
44066
44155
  }
44067
- let path27 = opts.path;
44156
+ let path31 = opts.path;
44068
44157
  if (!opts.path.startsWith("/")) {
44069
- path27 = `/${path27}`;
44158
+ path31 = `/${path31}`;
44070
44159
  }
44071
- url = new URL(util.parseOrigin(url).origin + path27);
44160
+ url = new URL(util.parseOrigin(url).origin + path31);
44072
44161
  } else {
44073
44162
  if (!opts) {
44074
44163
  opts = typeof url === "object" ? url : {};
@@ -44617,7 +44706,7 @@ var init_mcp_check_provider = __esm({
44617
44706
  logger.warn(
44618
44707
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
44619
44708
  );
44620
- await new Promise((resolve15) => setTimeout(resolve15, delay));
44709
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
44621
44710
  attempt += 1;
44622
44711
  } finally {
44623
44712
  try {
@@ -44899,7 +44988,7 @@ async function acquirePromptLock() {
44899
44988
  activePrompt = true;
44900
44989
  return;
44901
44990
  }
44902
- await new Promise((resolve15) => waiters.push(resolve15));
44991
+ await new Promise((resolve19) => waiters.push(resolve19));
44903
44992
  activePrompt = true;
44904
44993
  }
44905
44994
  function releasePromptLock() {
@@ -44909,7 +44998,7 @@ function releasePromptLock() {
44909
44998
  }
44910
44999
  async function interactivePrompt(options) {
44911
45000
  await acquirePromptLock();
44912
- return new Promise((resolve15, reject) => {
45001
+ return new Promise((resolve19, reject) => {
44913
45002
  const dbg = process.env.VISOR_DEBUG === "true";
44914
45003
  try {
44915
45004
  if (dbg) {
@@ -44996,12 +45085,12 @@ async function interactivePrompt(options) {
44996
45085
  };
44997
45086
  const finish = (value) => {
44998
45087
  cleanup();
44999
- resolve15(value);
45088
+ resolve19(value);
45000
45089
  };
45001
45090
  if (options.timeout && options.timeout > 0) {
45002
45091
  timeoutId = setTimeout(() => {
45003
45092
  cleanup();
45004
- if (defaultValue !== void 0) return resolve15(defaultValue);
45093
+ if (defaultValue !== void 0) return resolve19(defaultValue);
45005
45094
  return reject(new Error("Input timeout"));
45006
45095
  }, options.timeout);
45007
45096
  }
@@ -45133,7 +45222,7 @@ async function interactivePrompt(options) {
45133
45222
  });
45134
45223
  }
45135
45224
  async function simplePrompt(prompt) {
45136
- return new Promise((resolve15) => {
45225
+ return new Promise((resolve19) => {
45137
45226
  const rl = readline.createInterface({
45138
45227
  input: process.stdin,
45139
45228
  output: process.stdout
@@ -45149,7 +45238,7 @@ async function simplePrompt(prompt) {
45149
45238
  rl.question(`${prompt}
45150
45239
  > `, (answer) => {
45151
45240
  rl.close();
45152
- resolve15(answer.trim());
45241
+ resolve19(answer.trim());
45153
45242
  });
45154
45243
  });
45155
45244
  }
@@ -45317,7 +45406,7 @@ function isStdinAvailable() {
45317
45406
  return !process.stdin.isTTY;
45318
45407
  }
45319
45408
  async function readStdin(timeout, maxSize = 1024 * 1024) {
45320
- return new Promise((resolve15, reject) => {
45409
+ return new Promise((resolve19, reject) => {
45321
45410
  let data = "";
45322
45411
  let timeoutId;
45323
45412
  if (timeout) {
@@ -45344,7 +45433,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
45344
45433
  };
45345
45434
  const onEnd = () => {
45346
45435
  cleanup();
45347
- resolve15(data.trim());
45436
+ resolve19(data.trim());
45348
45437
  };
45349
45438
  const onError = (err) => {
45350
45439
  cleanup();
@@ -49462,23 +49551,23 @@ __export(renderer_schema_exports, {
49462
49551
  });
49463
49552
  async function loadRendererSchema(name) {
49464
49553
  try {
49465
- const fs23 = await import("fs/promises");
49466
- const path27 = await import("path");
49554
+ const fs27 = await import("fs/promises");
49555
+ const path31 = await import("path");
49467
49556
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
49468
49557
  if (!sanitized) return void 0;
49469
49558
  const candidates = [
49470
49559
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
49471
- path27.join(__dirname, "output", sanitized, "schema.json"),
49560
+ path31.join(__dirname, "output", sanitized, "schema.json"),
49472
49561
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
49473
- path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49562
+ path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49474
49563
  // When running from a checkout with output/ folder copied to CWD
49475
- path27.join(process.cwd(), "output", sanitized, "schema.json"),
49564
+ path31.join(process.cwd(), "output", sanitized, "schema.json"),
49476
49565
  // Fallback: cwd/dist/output/
49477
- path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49566
+ path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49478
49567
  ];
49479
49568
  for (const p of candidates) {
49480
49569
  try {
49481
- const raw = await fs23.readFile(p, "utf-8");
49570
+ const raw = await fs27.readFile(p, "utf-8");
49482
49571
  return JSON.parse(raw);
49483
49572
  } catch {
49484
49573
  }
@@ -51897,8 +51986,8 @@ function updateStats2(results, state, isForEachIteration = false) {
51897
51986
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51898
51987
  try {
51899
51988
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
51900
- const fs23 = await import("fs/promises");
51901
- const path27 = await import("path");
51989
+ const fs27 = await import("fs/promises");
51990
+ const path31 = await import("path");
51902
51991
  const schemaRaw = checkConfig.schema || "plain";
51903
51992
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
51904
51993
  let templateContent;
@@ -51907,27 +51996,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
51907
51996
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
51908
51997
  } else if (checkConfig.template && checkConfig.template.file) {
51909
51998
  const file = String(checkConfig.template.file);
51910
- const resolved = path27.resolve(process.cwd(), file);
51911
- templateContent = await fs23.readFile(resolved, "utf-8");
51999
+ const resolved = path31.resolve(process.cwd(), file);
52000
+ templateContent = await fs27.readFile(resolved, "utf-8");
51912
52001
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
51913
52002
  } else if (schema && schema !== "plain") {
51914
52003
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
51915
52004
  if (sanitized) {
51916
52005
  const candidatePaths = [
51917
- path27.join(__dirname, "output", sanitized, "template.liquid"),
52006
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
51918
52007
  // bundled: dist/output/
51919
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
52008
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
51920
52009
  // source (from state-machine/states)
51921
- path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
52010
+ path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
51922
52011
  // source (alternate)
51923
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
52012
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
51924
52013
  // fallback: cwd/output/
51925
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
52014
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
51926
52015
  // fallback: cwd/dist/output/
51927
52016
  ];
51928
52017
  for (const p of candidatePaths) {
51929
52018
  try {
51930
- templateContent = await fs23.readFile(p, "utf-8");
52019
+ templateContent = await fs27.readFile(p, "utf-8");
51931
52020
  if (templateContent) {
51932
52021
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
51933
52022
  break;
@@ -54067,8 +54156,8 @@ var init_workspace_manager = __esm({
54067
54156
  );
54068
54157
  if (this.cleanupRequested && this.activeOperations === 0) {
54069
54158
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
54070
- for (const resolve15 of this.cleanupResolvers) {
54071
- resolve15();
54159
+ for (const resolve19 of this.cleanupResolvers) {
54160
+ resolve19();
54072
54161
  }
54073
54162
  this.cleanupResolvers = [];
54074
54163
  }
@@ -54225,19 +54314,19 @@ var init_workspace_manager = __esm({
54225
54314
  );
54226
54315
  this.cleanupRequested = true;
54227
54316
  await Promise.race([
54228
- new Promise((resolve15) => {
54317
+ new Promise((resolve19) => {
54229
54318
  if (this.activeOperations === 0) {
54230
- resolve15();
54319
+ resolve19();
54231
54320
  } else {
54232
- this.cleanupResolvers.push(resolve15);
54321
+ this.cleanupResolvers.push(resolve19);
54233
54322
  }
54234
54323
  }),
54235
- new Promise((resolve15) => {
54324
+ new Promise((resolve19) => {
54236
54325
  setTimeout(() => {
54237
54326
  logger.warn(
54238
54327
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
54239
54328
  );
54240
- resolve15();
54329
+ resolve19();
54241
54330
  }, timeout);
54242
54331
  })
54243
54332
  ]);
@@ -54715,6 +54804,1380 @@ var init_build_engine_context = __esm({
54715
54804
  }
54716
54805
  });
54717
54806
 
54807
+ // src/policy/default-engine.ts
54808
+ var DefaultPolicyEngine;
54809
+ var init_default_engine = __esm({
54810
+ "src/policy/default-engine.ts"() {
54811
+ "use strict";
54812
+ DefaultPolicyEngine = class {
54813
+ async initialize(_config) {
54814
+ }
54815
+ async evaluateCheckExecution(_checkId, _checkConfig) {
54816
+ return { allowed: true };
54817
+ }
54818
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
54819
+ return { allowed: true };
54820
+ }
54821
+ async evaluateCapabilities(_checkId, _capabilities) {
54822
+ return { allowed: true };
54823
+ }
54824
+ async shutdown() {
54825
+ }
54826
+ };
54827
+ }
54828
+ });
54829
+
54830
+ // src/enterprise/license/validator.ts
54831
+ var validator_exports = {};
54832
+ __export(validator_exports, {
54833
+ LicenseValidator: () => LicenseValidator
54834
+ });
54835
+ var crypto2, fs21, path25, LicenseValidator;
54836
+ var init_validator = __esm({
54837
+ "src/enterprise/license/validator.ts"() {
54838
+ "use strict";
54839
+ crypto2 = __toESM(require("crypto"));
54840
+ fs21 = __toESM(require("fs"));
54841
+ path25 = __toESM(require("path"));
54842
+ LicenseValidator = class _LicenseValidator {
54843
+ /** Ed25519 public key for license verification (PEM format). */
54844
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
54845
+ cache = null;
54846
+ static CACHE_TTL = 5 * 60 * 1e3;
54847
+ // 5 minutes
54848
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
54849
+ // 72 hours after expiry
54850
+ /**
54851
+ * Load and validate license from environment or file.
54852
+ *
54853
+ * Resolution order:
54854
+ * 1. VISOR_LICENSE env var (JWT string)
54855
+ * 2. VISOR_LICENSE_FILE env var (path to file)
54856
+ * 3. .visor-license in project root (cwd)
54857
+ * 4. .visor-license in ~/.config/visor/
54858
+ */
54859
+ async loadAndValidate() {
54860
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
54861
+ return this.cache.payload;
54862
+ }
54863
+ const token = this.resolveToken();
54864
+ if (!token) return null;
54865
+ const payload = this.verifyAndDecode(token);
54866
+ if (!payload) return null;
54867
+ this.cache = { payload, validatedAt: Date.now() };
54868
+ return payload;
54869
+ }
54870
+ /** Check if a specific feature is licensed */
54871
+ hasFeature(feature) {
54872
+ if (!this.cache) return false;
54873
+ return this.cache.payload.features.includes(feature);
54874
+ }
54875
+ /** Check if license is valid (with grace period) */
54876
+ isValid() {
54877
+ if (!this.cache) return false;
54878
+ const now = Date.now();
54879
+ const expiryMs = this.cache.payload.exp * 1e3;
54880
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54881
+ }
54882
+ /** Check if the license is within its grace period (expired but still valid) */
54883
+ isInGracePeriod() {
54884
+ if (!this.cache) return false;
54885
+ const now = Date.now();
54886
+ const expiryMs = this.cache.payload.exp * 1e3;
54887
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
54888
+ }
54889
+ resolveToken() {
54890
+ if (process.env.VISOR_LICENSE) {
54891
+ return process.env.VISOR_LICENSE.trim();
54892
+ }
54893
+ if (process.env.VISOR_LICENSE_FILE) {
54894
+ const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
54895
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
54896
+ const allowedPrefixes = [path25.normalize(process.cwd())];
54897
+ if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
54898
+ let realPath;
54899
+ try {
54900
+ realPath = fs21.realpathSync(resolved);
54901
+ } catch {
54902
+ return null;
54903
+ }
54904
+ const isSafe = allowedPrefixes.some(
54905
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
54906
+ );
54907
+ if (!isSafe) return null;
54908
+ return this.readFile(realPath);
54909
+ }
54910
+ const cwdPath = path25.join(process.cwd(), ".visor-license");
54911
+ const cwdToken = this.readFile(cwdPath);
54912
+ if (cwdToken) return cwdToken;
54913
+ const home = process.env.HOME || process.env.USERPROFILE || "";
54914
+ if (home) {
54915
+ const configPath = path25.join(home, ".config", "visor", ".visor-license");
54916
+ const configToken = this.readFile(configPath);
54917
+ if (configToken) return configToken;
54918
+ }
54919
+ return null;
54920
+ }
54921
+ readFile(filePath) {
54922
+ try {
54923
+ return fs21.readFileSync(filePath, "utf-8").trim();
54924
+ } catch {
54925
+ return null;
54926
+ }
54927
+ }
54928
+ verifyAndDecode(token) {
54929
+ try {
54930
+ const parts = token.split(".");
54931
+ if (parts.length !== 3) return null;
54932
+ const [headerB64, payloadB64, signatureB64] = parts;
54933
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
54934
+ if (header.alg !== "EdDSA") return null;
54935
+ const data = `${headerB64}.${payloadB64}`;
54936
+ const signature = Buffer.from(signatureB64, "base64url");
54937
+ const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
54938
+ if (publicKey.asymmetricKeyType !== "ed25519") {
54939
+ return null;
54940
+ }
54941
+ const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
54942
+ if (!isValid) return null;
54943
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
54944
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
54945
+ return null;
54946
+ }
54947
+ const now = Date.now();
54948
+ const expiryMs = payload.exp * 1e3;
54949
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
54950
+ return null;
54951
+ }
54952
+ return payload;
54953
+ } catch {
54954
+ return null;
54955
+ }
54956
+ }
54957
+ };
54958
+ }
54959
+ });
54960
+
54961
+ // src/enterprise/policy/opa-compiler.ts
54962
+ var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
54963
+ var init_opa_compiler = __esm({
54964
+ "src/enterprise/policy/opa-compiler.ts"() {
54965
+ "use strict";
54966
+ fs22 = __toESM(require("fs"));
54967
+ path26 = __toESM(require("path"));
54968
+ os2 = __toESM(require("os"));
54969
+ crypto3 = __toESM(require("crypto"));
54970
+ import_child_process8 = require("child_process");
54971
+ OpaCompiler = class _OpaCompiler {
54972
+ static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
54973
+ /**
54974
+ * Resolve the input paths to WASM bytes.
54975
+ *
54976
+ * Strategy:
54977
+ * 1. If any path is a .wasm file, read it directly
54978
+ * 2. If a directory contains policy.wasm, read it
54979
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
54980
+ */
54981
+ async resolveWasmBytes(paths) {
54982
+ const regoFiles = [];
54983
+ for (const p of paths) {
54984
+ const resolved = path26.resolve(p);
54985
+ if (path26.normalize(resolved).includes("..")) {
54986
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
54987
+ }
54988
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
54989
+ return fs22.readFileSync(resolved);
54990
+ }
54991
+ if (!fs22.existsSync(resolved)) continue;
54992
+ const stat2 = fs22.statSync(resolved);
54993
+ if (stat2.isDirectory()) {
54994
+ const wasmCandidate = path26.join(resolved, "policy.wasm");
54995
+ if (fs22.existsSync(wasmCandidate)) {
54996
+ return fs22.readFileSync(wasmCandidate);
54997
+ }
54998
+ const files = fs22.readdirSync(resolved);
54999
+ for (const f of files) {
55000
+ if (f.endsWith(".rego")) {
55001
+ regoFiles.push(path26.join(resolved, f));
55002
+ }
55003
+ }
55004
+ } else if (resolved.endsWith(".rego")) {
55005
+ regoFiles.push(resolved);
55006
+ }
55007
+ }
55008
+ if (regoFiles.length === 0) {
55009
+ throw new Error(
55010
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
55011
+ );
55012
+ }
55013
+ return this.compileRego(regoFiles);
55014
+ }
55015
+ /**
55016
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
55017
+ *
55018
+ * Caches the compiled bundle based on a content hash of all input .rego files
55019
+ * so subsequent runs skip compilation if policies haven't changed.
55020
+ */
55021
+ compileRego(regoFiles) {
55022
+ try {
55023
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
55024
+ } catch {
55025
+ throw new Error(
55026
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
55027
+ );
55028
+ }
55029
+ const hash = crypto3.createHash("sha256");
55030
+ for (const f of regoFiles.sort()) {
55031
+ hash.update(fs22.readFileSync(f));
55032
+ hash.update(f);
55033
+ }
55034
+ const cacheKey = hash.digest("hex").slice(0, 16);
55035
+ const cacheDir = _OpaCompiler.CACHE_DIR;
55036
+ const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
55037
+ if (fs22.existsSync(cachedWasm)) {
55038
+ return fs22.readFileSync(cachedWasm);
55039
+ }
55040
+ fs22.mkdirSync(cacheDir, { recursive: true });
55041
+ const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
55042
+ try {
55043
+ const args = [
55044
+ "build",
55045
+ "-t",
55046
+ "wasm",
55047
+ "-e",
55048
+ "visor",
55049
+ // entrypoint: the visor package tree
55050
+ "-o",
55051
+ bundleTar,
55052
+ ...regoFiles
55053
+ ];
55054
+ (0, import_child_process8.execFileSync)("opa", args, {
55055
+ stdio: "pipe",
55056
+ timeout: 3e4
55057
+ });
55058
+ } catch (err) {
55059
+ const stderr = err?.stderr?.toString() || "";
55060
+ throw new Error(
55061
+ `Failed to compile .rego files to WASM:
55062
+ ${stderr}
55063
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
55064
+ );
55065
+ }
55066
+ try {
55067
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
55068
+ stdio: "pipe"
55069
+ });
55070
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
55071
+ if (fs22.existsSync(extractedWasm)) {
55072
+ fs22.renameSync(extractedWasm, cachedWasm);
55073
+ }
55074
+ } catch {
55075
+ try {
55076
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
55077
+ stdio: "pipe"
55078
+ });
55079
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
55080
+ if (fs22.existsSync(extractedWasm)) {
55081
+ fs22.renameSync(extractedWasm, cachedWasm);
55082
+ }
55083
+ } catch (err2) {
55084
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
55085
+ }
55086
+ }
55087
+ try {
55088
+ fs22.unlinkSync(bundleTar);
55089
+ } catch {
55090
+ }
55091
+ if (!fs22.existsSync(cachedWasm)) {
55092
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
55093
+ }
55094
+ return fs22.readFileSync(cachedWasm);
55095
+ }
55096
+ };
55097
+ }
55098
+ });
55099
+
55100
+ // src/enterprise/policy/opa-wasm-evaluator.ts
55101
+ var fs23, path27, OpaWasmEvaluator;
55102
+ var init_opa_wasm_evaluator = __esm({
55103
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
55104
+ "use strict";
55105
+ fs23 = __toESM(require("fs"));
55106
+ path27 = __toESM(require("path"));
55107
+ init_opa_compiler();
55108
+ OpaWasmEvaluator = class {
55109
+ policy = null;
55110
+ dataDocument = {};
55111
+ compiler = new OpaCompiler();
55112
+ async initialize(rulesPath) {
55113
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
55114
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
55115
+ try {
55116
+ const { createRequire } = require("module");
55117
+ const runtimeRequire = createRequire(__filename);
55118
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
55119
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
55120
+ if (!loadPolicy) {
55121
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
55122
+ }
55123
+ this.policy = await loadPolicy(wasmBytes);
55124
+ } catch (err) {
55125
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
55126
+ throw new Error(
55127
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
55128
+ );
55129
+ }
55130
+ throw err;
55131
+ }
55132
+ }
55133
+ /**
55134
+ * Load external data from a JSON file to use as the OPA data document.
55135
+ * The loaded data will be passed to `policy.setData()` during evaluation,
55136
+ * making it available in Rego via `data.<key>`.
55137
+ */
55138
+ loadData(dataPath) {
55139
+ const resolved = path27.resolve(dataPath);
55140
+ if (path27.normalize(resolved).includes("..")) {
55141
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
55142
+ }
55143
+ if (!fs23.existsSync(resolved)) {
55144
+ throw new Error(`OPA data file not found: ${resolved}`);
55145
+ }
55146
+ const stat2 = fs23.statSync(resolved);
55147
+ if (stat2.size > 10 * 1024 * 1024) {
55148
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
55149
+ }
55150
+ const raw = fs23.readFileSync(resolved, "utf-8");
55151
+ try {
55152
+ const parsed = JSON.parse(raw);
55153
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
55154
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
55155
+ }
55156
+ this.dataDocument = parsed;
55157
+ } catch (err) {
55158
+ if (err.message.startsWith("OPA data file must")) {
55159
+ throw err;
55160
+ }
55161
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
55162
+ }
55163
+ }
55164
+ async evaluate(input) {
55165
+ if (!this.policy) {
55166
+ throw new Error("OPA WASM evaluator not initialized");
55167
+ }
55168
+ this.policy.setData(this.dataDocument);
55169
+ const resultSet = this.policy.evaluate(input);
55170
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
55171
+ return resultSet[0].result;
55172
+ }
55173
+ return void 0;
55174
+ }
55175
+ async shutdown() {
55176
+ if (this.policy) {
55177
+ if (typeof this.policy.close === "function") {
55178
+ try {
55179
+ this.policy.close();
55180
+ } catch {
55181
+ }
55182
+ } else if (typeof this.policy.free === "function") {
55183
+ try {
55184
+ this.policy.free();
55185
+ } catch {
55186
+ }
55187
+ }
55188
+ }
55189
+ this.policy = null;
55190
+ }
55191
+ };
55192
+ }
55193
+ });
55194
+
55195
+ // src/enterprise/policy/opa-http-evaluator.ts
55196
+ var OpaHttpEvaluator;
55197
+ var init_opa_http_evaluator = __esm({
55198
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
55199
+ "use strict";
55200
+ OpaHttpEvaluator = class {
55201
+ baseUrl;
55202
+ timeout;
55203
+ constructor(baseUrl, timeout = 5e3) {
55204
+ let parsed;
55205
+ try {
55206
+ parsed = new URL(baseUrl);
55207
+ } catch {
55208
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
55209
+ }
55210
+ if (!["http:", "https:"].includes(parsed.protocol)) {
55211
+ throw new Error(
55212
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
55213
+ );
55214
+ }
55215
+ const hostname = parsed.hostname;
55216
+ if (this.isBlockedHostname(hostname)) {
55217
+ throw new Error(
55218
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
55219
+ );
55220
+ }
55221
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
55222
+ this.timeout = timeout;
55223
+ }
55224
+ /**
55225
+ * Check if a hostname is blocked due to SSRF concerns.
55226
+ *
55227
+ * Blocks:
55228
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
55229
+ * - Link-local addresses (169.254.x.x)
55230
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
55231
+ * - IPv6 unique local addresses (fd00::/8)
55232
+ * - Cloud metadata services (*.internal)
55233
+ */
55234
+ isBlockedHostname(hostname) {
55235
+ if (!hostname) return true;
55236
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
55237
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
55238
+ return true;
55239
+ }
55240
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
55241
+ return true;
55242
+ }
55243
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
55244
+ return true;
55245
+ }
55246
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
55247
+ const ipv4Match = normalized.match(ipv4Pattern);
55248
+ if (ipv4Match) {
55249
+ const octets = ipv4Match.slice(1, 5).map(Number);
55250
+ if (octets.some((octet) => octet > 255)) {
55251
+ return false;
55252
+ }
55253
+ const [a, b] = octets;
55254
+ if (a === 127) {
55255
+ return true;
55256
+ }
55257
+ if (a === 0) {
55258
+ return true;
55259
+ }
55260
+ if (a === 169 && b === 254) {
55261
+ return true;
55262
+ }
55263
+ if (a === 10) {
55264
+ return true;
55265
+ }
55266
+ if (a === 172 && b >= 16 && b <= 31) {
55267
+ return true;
55268
+ }
55269
+ if (a === 192 && b === 168) {
55270
+ return true;
55271
+ }
55272
+ }
55273
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
55274
+ return true;
55275
+ }
55276
+ if (normalized.startsWith("fe80:")) {
55277
+ return true;
55278
+ }
55279
+ return false;
55280
+ }
55281
+ /**
55282
+ * Evaluate a policy rule against an input document via OPA REST API.
55283
+ *
55284
+ * @param input - The input document to evaluate
55285
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
55286
+ * @returns The result object from OPA, or undefined on error
55287
+ */
55288
+ async evaluate(input, rulePath) {
55289
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
55290
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
55291
+ const controller = new AbortController();
55292
+ const timer = setTimeout(() => controller.abort(), this.timeout);
55293
+ try {
55294
+ const response = await fetch(url, {
55295
+ method: "POST",
55296
+ headers: { "Content-Type": "application/json" },
55297
+ body: JSON.stringify({ input }),
55298
+ signal: controller.signal
55299
+ });
55300
+ if (!response.ok) {
55301
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
55302
+ }
55303
+ let body;
55304
+ try {
55305
+ body = await response.json();
55306
+ } catch (jsonErr) {
55307
+ throw new Error(
55308
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
55309
+ );
55310
+ }
55311
+ return body?.result;
55312
+ } finally {
55313
+ clearTimeout(timer);
55314
+ }
55315
+ }
55316
+ async shutdown() {
55317
+ }
55318
+ };
55319
+ }
55320
+ });
55321
+
55322
+ // src/enterprise/policy/policy-input-builder.ts
55323
+ var PolicyInputBuilder;
55324
+ var init_policy_input_builder = __esm({
55325
+ "src/enterprise/policy/policy-input-builder.ts"() {
55326
+ "use strict";
55327
+ PolicyInputBuilder = class {
55328
+ roles;
55329
+ actor;
55330
+ repository;
55331
+ pullRequest;
55332
+ constructor(policyConfig, actor, repository, pullRequest) {
55333
+ this.roles = policyConfig.roles || {};
55334
+ this.actor = actor;
55335
+ this.repository = repository;
55336
+ this.pullRequest = pullRequest;
55337
+ }
55338
+ /** Resolve which roles apply to the current actor. */
55339
+ resolveRoles() {
55340
+ const matched = [];
55341
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
55342
+ let identityMatch = false;
55343
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
55344
+ identityMatch = true;
55345
+ }
55346
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
55347
+ identityMatch = true;
55348
+ }
55349
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
55350
+ identityMatch = true;
55351
+ }
55352
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
55353
+ const actorEmail = this.actor.slack.email.toLowerCase();
55354
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
55355
+ identityMatch = true;
55356
+ }
55357
+ }
55358
+ if (!identityMatch) continue;
55359
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
55360
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
55361
+ continue;
55362
+ }
55363
+ }
55364
+ matched.push(roleName);
55365
+ }
55366
+ return matched;
55367
+ }
55368
+ buildActor() {
55369
+ return {
55370
+ authorAssociation: this.actor.authorAssociation,
55371
+ login: this.actor.login,
55372
+ roles: this.resolveRoles(),
55373
+ isLocalMode: this.actor.isLocalMode,
55374
+ ...this.actor.slack && { slack: this.actor.slack }
55375
+ };
55376
+ }
55377
+ forCheckExecution(check) {
55378
+ return {
55379
+ scope: "check.execute",
55380
+ check: {
55381
+ id: check.id,
55382
+ type: check.type,
55383
+ group: check.group,
55384
+ tags: check.tags,
55385
+ criticality: check.criticality,
55386
+ sandbox: check.sandbox,
55387
+ policy: check.policy
55388
+ },
55389
+ actor: this.buildActor(),
55390
+ repository: this.repository,
55391
+ pullRequest: this.pullRequest
55392
+ };
55393
+ }
55394
+ forToolInvocation(serverName, methodName, transport) {
55395
+ return {
55396
+ scope: "tool.invoke",
55397
+ tool: { serverName, methodName, transport },
55398
+ actor: this.buildActor(),
55399
+ repository: this.repository,
55400
+ pullRequest: this.pullRequest
55401
+ };
55402
+ }
55403
+ forCapabilityResolve(checkId, capabilities) {
55404
+ return {
55405
+ scope: "capability.resolve",
55406
+ check: { id: checkId, type: "ai" },
55407
+ capability: capabilities,
55408
+ actor: this.buildActor(),
55409
+ repository: this.repository,
55410
+ pullRequest: this.pullRequest
55411
+ };
55412
+ }
55413
+ };
55414
+ }
55415
+ });
55416
+
55417
+ // src/enterprise/policy/opa-policy-engine.ts
55418
+ var opa_policy_engine_exports = {};
55419
+ __export(opa_policy_engine_exports, {
55420
+ OpaPolicyEngine: () => OpaPolicyEngine
55421
+ });
55422
+ var OpaPolicyEngine;
55423
+ var init_opa_policy_engine = __esm({
55424
+ "src/enterprise/policy/opa-policy-engine.ts"() {
55425
+ "use strict";
55426
+ init_opa_wasm_evaluator();
55427
+ init_opa_http_evaluator();
55428
+ init_policy_input_builder();
55429
+ OpaPolicyEngine = class {
55430
+ evaluator = null;
55431
+ fallback;
55432
+ timeout;
55433
+ config;
55434
+ inputBuilder = null;
55435
+ logger = null;
55436
+ constructor(config) {
55437
+ this.config = config;
55438
+ this.fallback = config.fallback || "deny";
55439
+ this.timeout = config.timeout || 5e3;
55440
+ }
55441
+ async initialize(config) {
55442
+ try {
55443
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
55444
+ } catch {
55445
+ }
55446
+ const actor = {
55447
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
55448
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
55449
+ isLocalMode: !process.env.GITHUB_ACTIONS
55450
+ };
55451
+ const repo = {
55452
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
55453
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
55454
+ branch: process.env.GITHUB_HEAD_REF,
55455
+ baseBranch: process.env.GITHUB_BASE_REF,
55456
+ event: process.env.GITHUB_EVENT_NAME
55457
+ };
55458
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
55459
+ const pullRequest = {
55460
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
55461
+ };
55462
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
55463
+ if (config.engine === "local") {
55464
+ if (!config.rules) {
55465
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
55466
+ }
55467
+ const wasm = new OpaWasmEvaluator();
55468
+ await wasm.initialize(config.rules);
55469
+ if (config.data) {
55470
+ wasm.loadData(config.data);
55471
+ }
55472
+ this.evaluator = wasm;
55473
+ } else if (config.engine === "remote") {
55474
+ if (!config.url) {
55475
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
55476
+ }
55477
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
55478
+ } else {
55479
+ this.evaluator = null;
55480
+ }
55481
+ }
55482
+ /**
55483
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
55484
+ * Called by the enterprise loader when engine context is enriched.
55485
+ */
55486
+ setActorContext(actor, repo, pullRequest) {
55487
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
55488
+ }
55489
+ async evaluateCheckExecution(checkId, checkConfig) {
55490
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55491
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
55492
+ const policyOverride = cfg.policy;
55493
+ const input = this.inputBuilder.forCheckExecution({
55494
+ id: checkId,
55495
+ type: cfg.type || "ai",
55496
+ group: cfg.group,
55497
+ tags: cfg.tags,
55498
+ criticality: cfg.criticality,
55499
+ sandbox: cfg.sandbox,
55500
+ policy: policyOverride
55501
+ });
55502
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
55503
+ }
55504
+ async evaluateToolInvocation(serverName, methodName, transport) {
55505
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55506
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
55507
+ return this.doEvaluate(input, "visor/tool/invoke");
55508
+ }
55509
+ async evaluateCapabilities(checkId, capabilities) {
55510
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55511
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
55512
+ return this.doEvaluate(input, "visor/capability/resolve");
55513
+ }
55514
+ async shutdown() {
55515
+ if (this.evaluator && "shutdown" in this.evaluator) {
55516
+ await this.evaluator.shutdown();
55517
+ }
55518
+ this.evaluator = null;
55519
+ this.inputBuilder = null;
55520
+ }
55521
+ resolveRulePath(defaultScope, override) {
55522
+ if (override) {
55523
+ return override.startsWith("visor/") ? override : `visor/${override}`;
55524
+ }
55525
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
55526
+ }
55527
+ async doEvaluate(input, rulePath) {
55528
+ try {
55529
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
55530
+ let timer;
55531
+ const timeoutPromise = new Promise((_resolve, reject) => {
55532
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
55533
+ });
55534
+ try {
55535
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
55536
+ const decision = this.parseDecision(result);
55537
+ if (!decision.allowed && this.fallback === "warn") {
55538
+ decision.allowed = true;
55539
+ decision.warn = true;
55540
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
55541
+ }
55542
+ this.logger?.debug(
55543
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
55544
+ );
55545
+ return decision;
55546
+ } finally {
55547
+ if (timer) clearTimeout(timer);
55548
+ }
55549
+ } catch (err) {
55550
+ const msg = err instanceof Error ? err.message : String(err);
55551
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
55552
+ return {
55553
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55554
+ warn: this.fallback === "warn" ? true : void 0,
55555
+ reason: `policy evaluation failed, fallback=${this.fallback}`
55556
+ };
55557
+ }
55558
+ }
55559
+ async rawEvaluate(input, rulePath) {
55560
+ if (this.evaluator instanceof OpaWasmEvaluator) {
55561
+ const result = await this.evaluator.evaluate(input);
55562
+ return this.navigateWasmResult(result, rulePath);
55563
+ }
55564
+ return this.evaluator.evaluate(input, rulePath);
55565
+ }
55566
+ /**
55567
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
55568
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
55569
+ * so we strip the `visor/` prefix and walk the remaining segments.
55570
+ */
55571
+ navigateWasmResult(result, rulePath) {
55572
+ if (!result || typeof result !== "object") return result;
55573
+ const segments = rulePath.replace(/^visor\//, "").split("/");
55574
+ let current = result;
55575
+ for (const seg of segments) {
55576
+ if (current && typeof current === "object" && seg in current) {
55577
+ current = current[seg];
55578
+ } else {
55579
+ return void 0;
55580
+ }
55581
+ }
55582
+ return current;
55583
+ }
55584
+ parseDecision(result) {
55585
+ if (result === void 0 || result === null) {
55586
+ return {
55587
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55588
+ warn: this.fallback === "warn" ? true : void 0,
55589
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
55590
+ };
55591
+ }
55592
+ const allowed = result.allowed !== false;
55593
+ const decision = {
55594
+ allowed,
55595
+ reason: result.reason
55596
+ };
55597
+ if (result.capabilities) {
55598
+ decision.capabilities = result.capabilities;
55599
+ }
55600
+ return decision;
55601
+ }
55602
+ };
55603
+ }
55604
+ });
55605
+
55606
+ // src/enterprise/scheduler/knex-store.ts
55607
+ var knex_store_exports = {};
55608
+ __export(knex_store_exports, {
55609
+ KnexStoreBackend: () => KnexStoreBackend
55610
+ });
55611
+ function toNum(val) {
55612
+ if (val === null || val === void 0) return void 0;
55613
+ return typeof val === "string" ? parseInt(val, 10) : val;
55614
+ }
55615
+ function safeJsonParse2(value) {
55616
+ if (!value) return void 0;
55617
+ try {
55618
+ return JSON.parse(value);
55619
+ } catch {
55620
+ return void 0;
55621
+ }
55622
+ }
55623
+ function fromTriggerRow2(row) {
55624
+ return {
55625
+ id: row.id,
55626
+ creatorId: row.creator_id,
55627
+ creatorContext: row.creator_context ?? void 0,
55628
+ creatorName: row.creator_name ?? void 0,
55629
+ description: row.description ?? void 0,
55630
+ channels: safeJsonParse2(row.channels),
55631
+ fromUsers: safeJsonParse2(row.from_users),
55632
+ fromBots: row.from_bots === true || row.from_bots === 1,
55633
+ contains: safeJsonParse2(row.contains),
55634
+ matchPattern: row.match_pattern ?? void 0,
55635
+ threads: row.threads,
55636
+ workflow: row.workflow,
55637
+ inputs: safeJsonParse2(row.inputs),
55638
+ outputContext: safeJsonParse2(row.output_context),
55639
+ status: row.status,
55640
+ enabled: row.enabled === true || row.enabled === 1,
55641
+ createdAt: toNum(row.created_at)
55642
+ };
55643
+ }
55644
+ function toTriggerInsertRow(trigger) {
55645
+ return {
55646
+ id: trigger.id,
55647
+ creator_id: trigger.creatorId,
55648
+ creator_context: trigger.creatorContext ?? null,
55649
+ creator_name: trigger.creatorName ?? null,
55650
+ description: trigger.description ?? null,
55651
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
55652
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
55653
+ from_bots: trigger.fromBots,
55654
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
55655
+ match_pattern: trigger.matchPattern ?? null,
55656
+ threads: trigger.threads,
55657
+ workflow: trigger.workflow,
55658
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
55659
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
55660
+ status: trigger.status,
55661
+ enabled: trigger.enabled,
55662
+ created_at: trigger.createdAt
55663
+ };
55664
+ }
55665
+ function fromDbRow2(row) {
55666
+ return {
55667
+ id: row.id,
55668
+ creatorId: row.creator_id,
55669
+ creatorContext: row.creator_context ?? void 0,
55670
+ creatorName: row.creator_name ?? void 0,
55671
+ timezone: row.timezone,
55672
+ schedule: row.schedule_expr,
55673
+ runAt: toNum(row.run_at),
55674
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
55675
+ originalExpression: row.original_expression,
55676
+ workflow: row.workflow ?? void 0,
55677
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
55678
+ outputContext: safeJsonParse2(row.output_context),
55679
+ status: row.status,
55680
+ createdAt: toNum(row.created_at),
55681
+ lastRunAt: toNum(row.last_run_at),
55682
+ nextRunAt: toNum(row.next_run_at),
55683
+ runCount: row.run_count,
55684
+ failureCount: row.failure_count,
55685
+ lastError: row.last_error ?? void 0,
55686
+ previousResponse: row.previous_response ?? void 0
55687
+ };
55688
+ }
55689
+ function toInsertRow(schedule) {
55690
+ return {
55691
+ id: schedule.id,
55692
+ creator_id: schedule.creatorId,
55693
+ creator_context: schedule.creatorContext ?? null,
55694
+ creator_name: schedule.creatorName ?? null,
55695
+ timezone: schedule.timezone,
55696
+ schedule_expr: schedule.schedule,
55697
+ run_at: schedule.runAt ?? null,
55698
+ is_recurring: schedule.isRecurring,
55699
+ original_expression: schedule.originalExpression,
55700
+ workflow: schedule.workflow ?? null,
55701
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
55702
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
55703
+ status: schedule.status,
55704
+ created_at: schedule.createdAt,
55705
+ last_run_at: schedule.lastRunAt ?? null,
55706
+ next_run_at: schedule.nextRunAt ?? null,
55707
+ run_count: schedule.runCount,
55708
+ failure_count: schedule.failureCount,
55709
+ last_error: schedule.lastError ?? null,
55710
+ previous_response: schedule.previousResponse ?? null
55711
+ };
55712
+ }
55713
+ var fs24, path28, import_uuid2, KnexStoreBackend;
55714
+ var init_knex_store = __esm({
55715
+ "src/enterprise/scheduler/knex-store.ts"() {
55716
+ "use strict";
55717
+ fs24 = __toESM(require("fs"));
55718
+ path28 = __toESM(require("path"));
55719
+ import_uuid2 = require("uuid");
55720
+ init_logger();
55721
+ KnexStoreBackend = class {
55722
+ knex = null;
55723
+ driver;
55724
+ connection;
55725
+ constructor(driver, storageConfig, _haConfig) {
55726
+ this.driver = driver;
55727
+ this.connection = storageConfig.connection || {};
55728
+ }
55729
+ async initialize() {
55730
+ const { createRequire } = require("module");
55731
+ const runtimeRequire = createRequire(__filename);
55732
+ let knexFactory;
55733
+ try {
55734
+ knexFactory = runtimeRequire("knex");
55735
+ } catch (err) {
55736
+ const code = err?.code;
55737
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
55738
+ throw new Error(
55739
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
55740
+ );
55741
+ }
55742
+ throw err;
55743
+ }
55744
+ const clientMap = {
55745
+ postgresql: "pg",
55746
+ mysql: "mysql2",
55747
+ mssql: "tedious"
55748
+ };
55749
+ const client = clientMap[this.driver];
55750
+ let connection;
55751
+ if (this.connection.connection_string) {
55752
+ connection = this.connection.connection_string;
55753
+ } else if (this.driver === "mssql") {
55754
+ connection = this.buildMssqlConnection();
55755
+ } else {
55756
+ connection = this.buildStandardConnection();
55757
+ }
55758
+ this.knex = knexFactory({
55759
+ client,
55760
+ connection,
55761
+ pool: {
55762
+ min: this.connection.pool?.min ?? 0,
55763
+ max: this.connection.pool?.max ?? 10
55764
+ }
55765
+ });
55766
+ await this.migrateSchema();
55767
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
55768
+ }
55769
+ buildStandardConnection() {
55770
+ return {
55771
+ host: this.connection.host || "localhost",
55772
+ port: this.connection.port,
55773
+ database: this.connection.database || "visor",
55774
+ user: this.connection.user,
55775
+ password: this.connection.password,
55776
+ ssl: this.resolveSslConfig()
55777
+ };
55778
+ }
55779
+ buildMssqlConnection() {
55780
+ const ssl = this.connection.ssl;
55781
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
55782
+ return {
55783
+ server: this.connection.host || "localhost",
55784
+ port: this.connection.port,
55785
+ database: this.connection.database || "visor",
55786
+ user: this.connection.user,
55787
+ password: this.connection.password,
55788
+ options: {
55789
+ encrypt: sslEnabled,
55790
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
55791
+ }
55792
+ };
55793
+ }
55794
+ resolveSslConfig() {
55795
+ const ssl = this.connection.ssl;
55796
+ if (ssl === false || ssl === void 0) return false;
55797
+ if (ssl === true) return { rejectUnauthorized: true };
55798
+ if (ssl.enabled === false) return false;
55799
+ const result = {
55800
+ rejectUnauthorized: ssl.reject_unauthorized !== false
55801
+ };
55802
+ if (ssl.ca) {
55803
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
55804
+ result.ca = fs24.readFileSync(caPath, "utf8");
55805
+ }
55806
+ if (ssl.cert) {
55807
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
55808
+ result.cert = fs24.readFileSync(certPath, "utf8");
55809
+ }
55810
+ if (ssl.key) {
55811
+ const keyPath = this.validateSslPath(ssl.key, "client key");
55812
+ result.key = fs24.readFileSync(keyPath, "utf8");
55813
+ }
55814
+ return result;
55815
+ }
55816
+ validateSslPath(filePath, label) {
55817
+ const resolved = path28.resolve(filePath);
55818
+ if (resolved !== path28.normalize(resolved)) {
55819
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
55820
+ }
55821
+ if (!fs24.existsSync(resolved)) {
55822
+ throw new Error(`SSL ${label} not found: ${filePath}`);
55823
+ }
55824
+ return resolved;
55825
+ }
55826
+ async shutdown() {
55827
+ if (this.knex) {
55828
+ await this.knex.destroy();
55829
+ this.knex = null;
55830
+ }
55831
+ }
55832
+ async migrateSchema() {
55833
+ const knex = this.getKnex();
55834
+ const exists = await knex.schema.hasTable("schedules");
55835
+ if (!exists) {
55836
+ await knex.schema.createTable("schedules", (table) => {
55837
+ table.string("id", 36).primary();
55838
+ table.string("creator_id", 255).notNullable().index();
55839
+ table.string("creator_context", 255);
55840
+ table.string("creator_name", 255);
55841
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
55842
+ table.string("schedule_expr", 255);
55843
+ table.bigInteger("run_at");
55844
+ table.boolean("is_recurring").notNullable();
55845
+ table.text("original_expression");
55846
+ table.string("workflow", 255);
55847
+ table.text("workflow_inputs");
55848
+ table.text("output_context");
55849
+ table.string("status", 20).notNullable().index();
55850
+ table.bigInteger("created_at").notNullable();
55851
+ table.bigInteger("last_run_at");
55852
+ table.bigInteger("next_run_at");
55853
+ table.integer("run_count").notNullable().defaultTo(0);
55854
+ table.integer("failure_count").notNullable().defaultTo(0);
55855
+ table.text("last_error");
55856
+ table.text("previous_response");
55857
+ table.index(["status", "next_run_at"]);
55858
+ });
55859
+ }
55860
+ const triggersExist = await knex.schema.hasTable("message_triggers");
55861
+ if (!triggersExist) {
55862
+ await knex.schema.createTable("message_triggers", (table) => {
55863
+ table.string("id", 36).primary();
55864
+ table.string("creator_id", 255).notNullable().index();
55865
+ table.string("creator_context", 255);
55866
+ table.string("creator_name", 255);
55867
+ table.text("description");
55868
+ table.text("channels");
55869
+ table.text("from_users");
55870
+ table.boolean("from_bots").notNullable().defaultTo(false);
55871
+ table.text("contains");
55872
+ table.text("match_pattern");
55873
+ table.string("threads", 20).notNullable().defaultTo("any");
55874
+ table.string("workflow", 255).notNullable();
55875
+ table.text("inputs");
55876
+ table.text("output_context");
55877
+ table.string("status", 20).notNullable().defaultTo("active").index();
55878
+ table.boolean("enabled").notNullable().defaultTo(true);
55879
+ table.bigInteger("created_at").notNullable();
55880
+ });
55881
+ }
55882
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
55883
+ if (!locksExist) {
55884
+ await knex.schema.createTable("scheduler_locks", (table) => {
55885
+ table.string("lock_id", 255).primary();
55886
+ table.string("node_id", 255).notNullable();
55887
+ table.string("lock_token", 36).notNullable();
55888
+ table.bigInteger("acquired_at").notNullable();
55889
+ table.bigInteger("expires_at").notNullable();
55890
+ });
55891
+ }
55892
+ }
55893
+ getKnex() {
55894
+ if (!this.knex) {
55895
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
55896
+ }
55897
+ return this.knex;
55898
+ }
55899
+ // --- CRUD ---
55900
+ async create(schedule) {
55901
+ const knex = this.getKnex();
55902
+ const newSchedule = {
55903
+ ...schedule,
55904
+ id: (0, import_uuid2.v4)(),
55905
+ createdAt: Date.now(),
55906
+ runCount: 0,
55907
+ failureCount: 0,
55908
+ status: "active"
55909
+ };
55910
+ await knex("schedules").insert(toInsertRow(newSchedule));
55911
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
55912
+ return newSchedule;
55913
+ }
55914
+ async importSchedule(schedule) {
55915
+ const knex = this.getKnex();
55916
+ const existing = await knex("schedules").where("id", schedule.id).first();
55917
+ if (existing) return;
55918
+ await knex("schedules").insert(toInsertRow(schedule));
55919
+ }
55920
+ async get(id) {
55921
+ const knex = this.getKnex();
55922
+ const row = await knex("schedules").where("id", id).first();
55923
+ return row ? fromDbRow2(row) : void 0;
55924
+ }
55925
+ async update(id, patch) {
55926
+ const knex = this.getKnex();
55927
+ const existing = await knex("schedules").where("id", id).first();
55928
+ if (!existing) return void 0;
55929
+ const current = fromDbRow2(existing);
55930
+ const updated = { ...current, ...patch, id: current.id };
55931
+ const row = toInsertRow(updated);
55932
+ delete row.id;
55933
+ await knex("schedules").where("id", id).update(row);
55934
+ return updated;
55935
+ }
55936
+ async delete(id) {
55937
+ const knex = this.getKnex();
55938
+ const deleted = await knex("schedules").where("id", id).del();
55939
+ if (deleted > 0) {
55940
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
55941
+ return true;
55942
+ }
55943
+ return false;
55944
+ }
55945
+ // --- Queries ---
55946
+ async getByCreator(creatorId) {
55947
+ const knex = this.getKnex();
55948
+ const rows = await knex("schedules").where("creator_id", creatorId);
55949
+ return rows.map((r) => fromDbRow2(r));
55950
+ }
55951
+ async getActiveSchedules() {
55952
+ const knex = this.getKnex();
55953
+ const rows = await knex("schedules").where("status", "active");
55954
+ return rows.map((r) => fromDbRow2(r));
55955
+ }
55956
+ async getDueSchedules(now) {
55957
+ const ts = now ?? Date.now();
55958
+ const knex = this.getKnex();
55959
+ const bFalse = this.driver === "mssql" ? 0 : false;
55960
+ const bTrue = this.driver === "mssql" ? 1 : true;
55961
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
55962
+ this.where(function() {
55963
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
55964
+ }).orWhere(function() {
55965
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
55966
+ });
55967
+ });
55968
+ return rows.map((r) => fromDbRow2(r));
55969
+ }
55970
+ async findByWorkflow(creatorId, workflowName) {
55971
+ const knex = this.getKnex();
55972
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
55973
+ const pattern = `%${escaped}%`;
55974
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
55975
+ return rows.map((r) => fromDbRow2(r));
55976
+ }
55977
+ async getAll() {
55978
+ const knex = this.getKnex();
55979
+ const rows = await knex("schedules");
55980
+ return rows.map((r) => fromDbRow2(r));
55981
+ }
55982
+ async getStats() {
55983
+ const knex = this.getKnex();
55984
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
55985
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
55986
+ const result = await knex("schedules").select(
55987
+ knex.raw("COUNT(*) as total"),
55988
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
55989
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
55990
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
55991
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
55992
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
55993
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
55994
+ ).first();
55995
+ return {
55996
+ total: Number(result.total) || 0,
55997
+ active: Number(result.active) || 0,
55998
+ paused: Number(result.paused) || 0,
55999
+ completed: Number(result.completed) || 0,
56000
+ failed: Number(result.failed) || 0,
56001
+ recurring: Number(result.recurring) || 0,
56002
+ oneTime: Number(result.one_time) || 0
56003
+ };
56004
+ }
56005
+ async validateLimits(creatorId, isRecurring, limits) {
56006
+ const knex = this.getKnex();
56007
+ if (limits.maxGlobal) {
56008
+ const result = await knex("schedules").count("* as cnt").first();
56009
+ if (Number(result?.cnt) >= limits.maxGlobal) {
56010
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
56011
+ }
56012
+ }
56013
+ if (limits.maxPerUser) {
56014
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
56015
+ if (Number(result?.cnt) >= limits.maxPerUser) {
56016
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
56017
+ }
56018
+ }
56019
+ if (isRecurring && limits.maxRecurringPerUser) {
56020
+ const bTrue = this.driver === "mssql" ? 1 : true;
56021
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
56022
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
56023
+ throw new Error(
56024
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
56025
+ );
56026
+ }
56027
+ }
56028
+ }
56029
+ // --- HA Distributed Locking (via scheduler_locks table) ---
56030
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
56031
+ const knex = this.getKnex();
56032
+ const now = Date.now();
56033
+ const expiresAt = now + ttlSeconds * 1e3;
56034
+ const token = (0, import_uuid2.v4)();
56035
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
56036
+ node_id: nodeId,
56037
+ lock_token: token,
56038
+ acquired_at: now,
56039
+ expires_at: expiresAt
56040
+ });
56041
+ if (updated > 0) return token;
56042
+ try {
56043
+ await knex("scheduler_locks").insert({
56044
+ lock_id: lockId,
56045
+ node_id: nodeId,
56046
+ lock_token: token,
56047
+ acquired_at: now,
56048
+ expires_at: expiresAt
56049
+ });
56050
+ return token;
56051
+ } catch {
56052
+ return null;
56053
+ }
56054
+ }
56055
+ async releaseLock(lockId, lockToken) {
56056
+ const knex = this.getKnex();
56057
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
56058
+ }
56059
+ async renewLock(lockId, lockToken, ttlSeconds) {
56060
+ const knex = this.getKnex();
56061
+ const now = Date.now();
56062
+ const expiresAt = now + ttlSeconds * 1e3;
56063
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
56064
+ return updated > 0;
56065
+ }
56066
+ async flush() {
56067
+ }
56068
+ // --- Message Trigger CRUD ---
56069
+ async createTrigger(trigger) {
56070
+ const knex = this.getKnex();
56071
+ const newTrigger = {
56072
+ ...trigger,
56073
+ id: (0, import_uuid2.v4)(),
56074
+ createdAt: Date.now()
56075
+ };
56076
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
56077
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
56078
+ return newTrigger;
56079
+ }
56080
+ async getTrigger(id) {
56081
+ const knex = this.getKnex();
56082
+ const row = await knex("message_triggers").where("id", id).first();
56083
+ return row ? fromTriggerRow2(row) : void 0;
56084
+ }
56085
+ async updateTrigger(id, patch) {
56086
+ const knex = this.getKnex();
56087
+ const existing = await knex("message_triggers").where("id", id).first();
56088
+ if (!existing) return void 0;
56089
+ const current = fromTriggerRow2(existing);
56090
+ const updated = {
56091
+ ...current,
56092
+ ...patch,
56093
+ id: current.id,
56094
+ createdAt: current.createdAt
56095
+ };
56096
+ const row = toTriggerInsertRow(updated);
56097
+ delete row.id;
56098
+ await knex("message_triggers").where("id", id).update(row);
56099
+ return updated;
56100
+ }
56101
+ async deleteTrigger(id) {
56102
+ const knex = this.getKnex();
56103
+ const deleted = await knex("message_triggers").where("id", id).del();
56104
+ if (deleted > 0) {
56105
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
56106
+ return true;
56107
+ }
56108
+ return false;
56109
+ }
56110
+ async getTriggersByCreator(creatorId) {
56111
+ const knex = this.getKnex();
56112
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
56113
+ return rows.map((r) => fromTriggerRow2(r));
56114
+ }
56115
+ async getActiveTriggers() {
56116
+ const knex = this.getKnex();
56117
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
56118
+ return rows.map((r) => fromTriggerRow2(r));
56119
+ }
56120
+ };
56121
+ }
56122
+ });
56123
+
56124
+ // src/enterprise/loader.ts
56125
+ var loader_exports = {};
56126
+ __export(loader_exports, {
56127
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
56128
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
56129
+ });
56130
+ async function loadEnterprisePolicyEngine(config) {
56131
+ try {
56132
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56133
+ const validator = new LicenseValidator2();
56134
+ const license = await validator.loadAndValidate();
56135
+ if (!license || !validator.hasFeature("policy")) {
56136
+ return new DefaultPolicyEngine();
56137
+ }
56138
+ if (validator.isInGracePeriod()) {
56139
+ console.warn(
56140
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56141
+ );
56142
+ }
56143
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
56144
+ const engine = new OpaPolicyEngine2(config);
56145
+ await engine.initialize(config);
56146
+ return engine;
56147
+ } catch (err) {
56148
+ const msg = err instanceof Error ? err.message : String(err);
56149
+ try {
56150
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
56151
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
56152
+ } catch {
56153
+ }
56154
+ return new DefaultPolicyEngine();
56155
+ }
56156
+ }
56157
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
56158
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56159
+ const validator = new LicenseValidator2();
56160
+ const license = await validator.loadAndValidate();
56161
+ if (!license || !validator.hasFeature("scheduler-sql")) {
56162
+ throw new Error(
56163
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
56164
+ );
56165
+ }
56166
+ if (validator.isInGracePeriod()) {
56167
+ console.warn(
56168
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56169
+ );
56170
+ }
56171
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
56172
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
56173
+ }
56174
+ var init_loader = __esm({
56175
+ "src/enterprise/loader.ts"() {
56176
+ "use strict";
56177
+ init_default_engine();
56178
+ }
56179
+ });
56180
+
54718
56181
  // src/event-bus/event-bus.ts
54719
56182
  var event_bus_exports = {};
54720
56183
  __export(event_bus_exports, {
@@ -55621,8 +57084,8 @@ ${content}
55621
57084
  * Sleep utility
55622
57085
  */
55623
57086
  sleep(ms) {
55624
- return new Promise((resolve15) => {
55625
- const t = setTimeout(resolve15, ms);
57087
+ return new Promise((resolve19) => {
57088
+ const t = setTimeout(resolve19, ms);
55626
57089
  if (typeof t.unref === "function") {
55627
57090
  try {
55628
57091
  t.unref();
@@ -55907,8 +57370,8 @@ ${end}`);
55907
57370
  async updateGroupedComment(ctx, comments, group, changedIds) {
55908
57371
  const existingLock = this.updateLocks.get(group);
55909
57372
  let resolveLock;
55910
- const ourLock = new Promise((resolve15) => {
55911
- resolveLock = resolve15;
57373
+ const ourLock = new Promise((resolve19) => {
57374
+ resolveLock = resolve19;
55912
57375
  });
55913
57376
  this.updateLocks.set(group, ourLock);
55914
57377
  try {
@@ -56221,7 +57684,7 @@ ${blocks}
56221
57684
  * Sleep utility for enforcing delays
56222
57685
  */
56223
57686
  sleep(ms) {
56224
- return new Promise((resolve15) => setTimeout(resolve15, ms));
57687
+ return new Promise((resolve19) => setTimeout(resolve19, ms));
56225
57688
  }
56226
57689
  };
56227
57690
  }
@@ -57513,15 +58976,15 @@ function serializeRunState(state) {
57513
58976
  ])
57514
58977
  };
57515
58978
  }
57516
- var path26, fs22, StateMachineExecutionEngine;
58979
+ var path30, fs26, StateMachineExecutionEngine;
57517
58980
  var init_state_machine_execution_engine = __esm({
57518
58981
  "src/state-machine-execution-engine.ts"() {
57519
58982
  "use strict";
57520
58983
  init_runner();
57521
58984
  init_logger();
57522
58985
  init_sandbox_manager();
57523
- path26 = __toESM(require("path"));
57524
- fs22 = __toESM(require("fs"));
58986
+ path30 = __toESM(require("path"));
58987
+ fs26 = __toESM(require("fs"));
57525
58988
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
57526
58989
  workingDirectory;
57527
58990
  executionContext;
@@ -57753,8 +59216,8 @@ var init_state_machine_execution_engine = __esm({
57753
59216
  logger.debug(
57754
59217
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
57755
59218
  );
57756
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
57757
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
59219
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
59220
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
57758
59221
  logger.debug(
57759
59222
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
57760
59223
  );
@@ -57906,9 +59369,9 @@ var init_state_machine_execution_engine = __esm({
57906
59369
  }
57907
59370
  const checkId = String(ev?.checkId || "unknown");
57908
59371
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
57909
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
57910
- fs22.mkdirSync(baseDir, { recursive: true });
57911
- const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
59372
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
59373
+ fs26.mkdirSync(baseDir, { recursive: true });
59374
+ const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
57912
59375
  await this.saveSnapshotToFile(filePath);
57913
59376
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
57914
59377
  try {
@@ -58049,7 +59512,7 @@ var init_state_machine_execution_engine = __esm({
58049
59512
  * Does not include secrets. Intended for debugging and future resume support.
58050
59513
  */
58051
59514
  async saveSnapshotToFile(filePath) {
58052
- const fs23 = await import("fs/promises");
59515
+ const fs27 = await import("fs/promises");
58053
59516
  const ctx = this._lastContext;
58054
59517
  const runner = this._lastRunner;
58055
59518
  if (!ctx || !runner) {
@@ -58069,14 +59532,14 @@ var init_state_machine_execution_engine = __esm({
58069
59532
  journal: entries,
58070
59533
  requestedChecks: ctx.requestedChecks || []
58071
59534
  };
58072
- await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
59535
+ await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
58073
59536
  }
58074
59537
  /**
58075
59538
  * Load a snapshot JSON from file and return it. Resume support can build on this.
58076
59539
  */
58077
59540
  async loadSnapshotFromFile(filePath) {
58078
- const fs23 = await import("fs/promises");
58079
- const raw = await fs23.readFile(filePath, "utf8");
59541
+ const fs27 = await import("fs/promises");
59542
+ const raw = await fs27.readFile(filePath, "utf8");
58080
59543
  return JSON.parse(raw);
58081
59544
  }
58082
59545
  /**