@probelabs/visor 0.1.165 → 0.1.166-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/dist/index.js +2363 -141
  2. package/dist/pr-analyzer.d.ts +2 -1
  3. package/dist/pr-analyzer.d.ts.map +1 -1
  4. package/dist/providers/http-client-provider.d.ts.map +1 -1
  5. package/dist/sdk/{check-provider-registry-6P2KJ423.mjs → check-provider-registry-PU67PWTU.mjs} +5 -5
  6. package/dist/sdk/{check-provider-registry-TTVN3V2O.mjs → check-provider-registry-TGPICTHD.mjs} +5 -5
  7. package/dist/sdk/{chunk-EO4IJNM7.mjs → chunk-E7NRUDWL.mjs} +2 -2
  8. package/dist/sdk/{chunk-G5JBWW3O.mjs → chunk-P5P6BOO7.mjs} +161 -21
  9. package/dist/sdk/chunk-P5P6BOO7.mjs.map +1 -0
  10. package/dist/sdk/{chunk-GMHSXC5K.mjs → chunk-RV5SK4FZ.mjs} +3 -3
  11. package/dist/sdk/{chunk-S47KBQQK.mjs → chunk-T5USZCCM.mjs} +2 -2
  12. package/dist/sdk/{chunk-S47KBQQK.mjs.map → chunk-T5USZCCM.mjs.map} +1 -1
  13. package/dist/sdk/{chunk-GOJRNYTV.mjs → chunk-WSYVK6ML.mjs} +188 -22
  14. package/dist/sdk/chunk-WSYVK6ML.mjs.map +1 -0
  15. package/dist/sdk/{failure-condition-evaluator-N3VNLWZD.mjs → failure-condition-evaluator-GPANOHP2.mjs} +3 -3
  16. package/dist/sdk/{github-frontend-ATORHHF6.mjs → github-frontend-P274ISBJ.mjs} +3 -3
  17. package/dist/sdk/{host-JROON6IT.mjs → host-AIMRV5YL.mjs} +2 -2
  18. package/dist/sdk/{host-OBXKDFT7.mjs → host-QYPOS4R6.mjs} +2 -2
  19. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  20. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  21. package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
  22. package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
  23. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  24. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  25. package/dist/sdk/{routing-QCDX43XD.mjs → routing-BXHP2E62.mjs} +4 -4
  26. package/dist/sdk/{schedule-tool-D5TSTGP2.mjs → schedule-tool-5FVFYH2A.mjs} +5 -5
  27. package/dist/sdk/{schedule-tool-XCGJI2VB.mjs → schedule-tool-MQHISNJ6.mjs} +5 -5
  28. package/dist/sdk/{schedule-tool-handler-DKHHPZAG.mjs → schedule-tool-handler-4TCT2P7A.mjs} +5 -5
  29. package/dist/sdk/{schedule-tool-handler-OKZ53WMC.mjs → schedule-tool-handler-TZYXM664.mjs} +5 -5
  30. package/dist/sdk/sdk.js +1779 -265
  31. package/dist/sdk/sdk.js.map +1 -1
  32. package/dist/sdk/sdk.mjs +4 -4
  33. package/dist/sdk/{trace-helpers-J5CJ4PUN.mjs → trace-helpers-UG6FOWVV.mjs} +2 -2
  34. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  35. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  36. package/dist/sdk/{workflow-check-provider-T6WFK4RB.mjs → workflow-check-provider-BE2SVYWW.mjs} +5 -5
  37. package/dist/sdk/{workflow-check-provider-WLUAJPAS.mjs → workflow-check-provider-QKHL6AFT.mjs} +5 -5
  38. package/dist/slack/socket-runner.d.ts +14 -0
  39. package/dist/slack/socket-runner.d.ts.map +1 -1
  40. package/dist/utils/oauth2-token-cache.d.ts +44 -0
  41. package/dist/utils/oauth2-token-cache.d.ts.map +1 -0
  42. package/package.json +2 -2
  43. package/dist/output/traces/run-2026-03-06T06-08-10-897Z.ndjson +0 -138
  44. package/dist/output/traces/run-2026-03-06T06-08-55-016Z.ndjson +0 -2235
  45. package/dist/sdk/check-provider-registry-4SHN3GSH.mjs +0 -29
  46. package/dist/sdk/chunk-G5JBWW3O.mjs.map +0 -1
  47. package/dist/sdk/chunk-GOJRNYTV.mjs.map +0 -1
  48. package/dist/sdk/chunk-J236ZVYX.mjs +0 -1502
  49. package/dist/sdk/chunk-J236ZVYX.mjs.map +0 -1
  50. package/dist/sdk/chunk-LDE33FGE.mjs +0 -443
  51. package/dist/sdk/chunk-LDE33FGE.mjs.map +0 -1
  52. package/dist/sdk/chunk-MYROK4LB.mjs +0 -43917
  53. package/dist/sdk/chunk-MYROK4LB.mjs.map +0 -1
  54. package/dist/sdk/chunk-XDIBL7QB.mjs +0 -739
  55. package/dist/sdk/chunk-XDIBL7QB.mjs.map +0 -1
  56. package/dist/sdk/failure-condition-evaluator-M6SIUQF4.mjs +0 -17
  57. package/dist/sdk/github-frontend-MHXL2Q2V.mjs +0 -1368
  58. package/dist/sdk/github-frontend-MHXL2Q2V.mjs.map +0 -1
  59. package/dist/sdk/routing-TGJD66Q5.mjs +0 -25
  60. package/dist/sdk/schedule-tool-C5QN5OQU.mjs +0 -35
  61. package/dist/sdk/schedule-tool-handler-OKZ53WMC.mjs.map +0 -1
  62. package/dist/sdk/schedule-tool-handler-ZUMPNAVY.mjs +0 -39
  63. package/dist/sdk/schedule-tool-handler-ZUMPNAVY.mjs.map +0 -1
  64. package/dist/sdk/trace-helpers-J5CJ4PUN.mjs.map +0 -1
  65. package/dist/sdk/trace-helpers-KFQJ7IAG.mjs +0 -25
  66. package/dist/sdk/trace-helpers-KFQJ7IAG.mjs.map +0 -1
  67. package/dist/sdk/workflow-check-provider-RBYA6ZGU.mjs +0 -29
  68. package/dist/sdk/workflow-check-provider-RBYA6ZGU.mjs.map +0 -1
  69. package/dist/sdk/workflow-check-provider-T6WFK4RB.mjs.map +0 -1
  70. package/dist/sdk/workflow-check-provider-WLUAJPAS.mjs.map +0 -1
  71. package/dist/traces/run-2026-03-06T06-08-10-897Z.ndjson +0 -138
  72. package/dist/traces/run-2026-03-06T06-08-55-016Z.ndjson +0 -2235
  73. /package/dist/sdk/{check-provider-registry-4SHN3GSH.mjs.map → check-provider-registry-PU67PWTU.mjs.map} +0 -0
  74. /package/dist/sdk/{check-provider-registry-6P2KJ423.mjs.map → check-provider-registry-TGPICTHD.mjs.map} +0 -0
  75. /package/dist/sdk/{chunk-EO4IJNM7.mjs.map → chunk-E7NRUDWL.mjs.map} +0 -0
  76. /package/dist/sdk/{chunk-GMHSXC5K.mjs.map → chunk-RV5SK4FZ.mjs.map} +0 -0
  77. /package/dist/sdk/{check-provider-registry-TTVN3V2O.mjs.map → failure-condition-evaluator-GPANOHP2.mjs.map} +0 -0
  78. /package/dist/sdk/{github-frontend-ATORHHF6.mjs.map → github-frontend-P274ISBJ.mjs.map} +0 -0
  79. /package/dist/sdk/{host-JROON6IT.mjs.map → host-AIMRV5YL.mjs.map} +0 -0
  80. /package/dist/sdk/{host-OBXKDFT7.mjs.map → host-QYPOS4R6.mjs.map} +0 -0
  81. /package/dist/sdk/{failure-condition-evaluator-M6SIUQF4.mjs.map → routing-BXHP2E62.mjs.map} +0 -0
  82. /package/dist/sdk/{failure-condition-evaluator-N3VNLWZD.mjs.map → schedule-tool-5FVFYH2A.mjs.map} +0 -0
  83. /package/dist/sdk/{routing-QCDX43XD.mjs.map → schedule-tool-MQHISNJ6.mjs.map} +0 -0
  84. /package/dist/sdk/{routing-TGJD66Q5.mjs.map → schedule-tool-handler-4TCT2P7A.mjs.map} +0 -0
  85. /package/dist/sdk/{schedule-tool-C5QN5OQU.mjs.map → schedule-tool-handler-TZYXM664.mjs.map} +0 -0
  86. /package/dist/sdk/{schedule-tool-D5TSTGP2.mjs.map → trace-helpers-UG6FOWVV.mjs.map} +0 -0
  87. /package/dist/sdk/{schedule-tool-XCGJI2VB.mjs.map → workflow-check-provider-BE2SVYWW.mjs.map} +0 -0
  88. /package/dist/sdk/{schedule-tool-handler-DKHHPZAG.mjs.map → workflow-check-provider-QKHL6AFT.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -646,7 +646,7 @@ var require_package = __commonJS({
646
646
  "package.json"(exports2, module2) {
647
647
  module2.exports = {
648
648
  name: "@probelabs/visor",
649
- version: "0.1.165",
649
+ version: "0.1.42",
650
650
  main: "dist/index.js",
651
651
  bin: {
652
652
  visor: "./dist/index.js"
@@ -760,7 +760,7 @@ var require_package = __commonJS({
760
760
  "@opentelemetry/sdk-node": "^0.203.0",
761
761
  "@opentelemetry/sdk-trace-base": "^1.30.1",
762
762
  "@opentelemetry/semantic-conventions": "^1.30.1",
763
- "@probelabs/probe": "^0.6.0-rc278",
763
+ "@probelabs/probe": "^0.6.0-rc280",
764
764
  "@types/commander": "^2.12.0",
765
765
  "@types/uuid": "^10.0.0",
766
766
  acorn: "^8.16.0",
@@ -864,11 +864,11 @@ function getTracer() {
864
864
  }
865
865
  async function withActiveSpan(name, attrs, fn) {
866
866
  const tracer = getTracer();
867
- return await new Promise((resolve15, reject) => {
867
+ return await new Promise((resolve19, reject) => {
868
868
  const callback = async (span) => {
869
869
  try {
870
870
  const res = await fn(span);
871
- resolve15(res);
871
+ resolve19(res);
872
872
  } catch (err) {
873
873
  try {
874
874
  if (err instanceof Error) span.recordException(err);
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
945
945
  try {
946
946
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
947
947
  return null;
948
- const path27 = require("path");
949
- const fs23 = require("fs");
948
+ const path31 = require("path");
949
+ const fs27 = require("fs");
950
950
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
951
951
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
952
- const dir = path27.dirname(__ndjsonPath);
953
- if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
952
+ const dir = path31.dirname(__ndjsonPath);
953
+ if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
954
954
  return __ndjsonPath;
955
955
  }
956
- const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
957
- if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
956
+ const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
957
+ if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
958
958
  if (!__ndjsonPath) {
959
959
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
960
- __ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
960
+ __ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
961
961
  }
962
962
  return __ndjsonPath;
963
963
  } catch {
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
966
966
  }
967
967
  function _appendRunMarker() {
968
968
  try {
969
- const fs23 = require("fs");
969
+ const fs27 = require("fs");
970
970
  const p = __getOrCreateNdjsonPath();
971
971
  if (!p) return;
972
972
  const line = { name: "visor.run", attributes: { started: true } };
973
- fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
973
+ fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
974
974
  } catch {
975
975
  }
976
976
  }
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
3193
3193
  */
3194
3194
  evaluateExpression(condition, context2) {
3195
3195
  try {
3196
- const normalize4 = (expr) => {
3196
+ const normalize8 = (expr) => {
3197
3197
  const trimmed = expr.trim();
3198
3198
  if (!/[\n;]/.test(trimmed)) return trimmed;
3199
3199
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
3351
3351
  try {
3352
3352
  exec2 = this.sandbox.compile(`return (${raw});`);
3353
3353
  } catch {
3354
- const normalizedExpr = normalize4(condition);
3354
+ const normalizedExpr = normalize8(condition);
3355
3355
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3356
3356
  }
3357
3357
  const result = exec2(scope).run();
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
3734
3734
  });
3735
3735
  liquid.registerFilter("get", (obj, pathExpr) => {
3736
3736
  if (obj == null) return void 0;
3737
- const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
- if (!path27) return obj;
3739
- const parts = path27.split(".");
3737
+ const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3738
+ if (!path31) return obj;
3739
+ const parts = path31.split(".");
3740
3740
  let cur = obj;
3741
3741
  for (const p of parts) {
3742
3742
  if (cur == null) return void 0;
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
3855
3855
  }
3856
3856
  }
3857
3857
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
3858
- const getNested = (obj, path27) => {
3859
- if (!obj || !path27) return void 0;
3860
- const parts = path27.split(".");
3858
+ const getNested = (obj, path31) => {
3859
+ if (!obj || !path31) return void 0;
3860
+ const parts = path31.split(".");
3861
3861
  let cur = obj;
3862
3862
  for (const p of parts) {
3863
3863
  if (cur == null) return void 0;
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
6409
6409
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6410
6410
  try {
6411
6411
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6412
- const fs23 = await import("fs/promises");
6413
- const path27 = await import("path");
6412
+ const fs27 = await import("fs/promises");
6413
+ const path31 = await import("path");
6414
6414
  const schemaRaw = checkConfig.schema || "plain";
6415
6415
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6416
6416
  let templateContent;
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6418
6418
  templateContent = String(checkConfig.template.content);
6419
6419
  } else if (checkConfig.template && checkConfig.template.file) {
6420
6420
  const file = String(checkConfig.template.file);
6421
- const resolved = path27.resolve(process.cwd(), file);
6422
- templateContent = await fs23.readFile(resolved, "utf-8");
6421
+ const resolved = path31.resolve(process.cwd(), file);
6422
+ templateContent = await fs27.readFile(resolved, "utf-8");
6423
6423
  } else if (schema && schema !== "plain") {
6424
6424
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6425
6425
  if (sanitized) {
6426
6426
  const candidatePaths = [
6427
- path27.join(__dirname, "output", sanitized, "template.liquid"),
6427
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
6428
6428
  // bundled: dist/output/
6429
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6429
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6430
6430
  // source: output/
6431
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
6431
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
6432
6432
  // fallback: cwd/output/
6433
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6433
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6434
6434
  // fallback: cwd/dist/output/
6435
6435
  ];
6436
6436
  for (const p of candidatePaths) {
6437
6437
  try {
6438
- templateContent = await fs23.readFile(p, "utf-8");
6438
+ templateContent = await fs27.readFile(p, "utf-8");
6439
6439
  if (templateContent) break;
6440
6440
  } catch {
6441
6441
  }
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
6840
6840
  }
6841
6841
  try {
6842
6842
  const originalProbePath = process.env.PROBE_PATH;
6843
- const fs23 = require("fs");
6843
+ const fs27 = require("fs");
6844
6844
  const possiblePaths = [
6845
6845
  // Relative to current working directory (most common in production)
6846
6846
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
6851
6851
  ];
6852
6852
  let probeBinaryPath;
6853
6853
  for (const candidatePath of possiblePaths) {
6854
- if (fs23.existsSync(candidatePath)) {
6854
+ if (fs27.existsSync(candidatePath)) {
6855
6855
  probeBinaryPath = candidatePath;
6856
6856
  break;
6857
6857
  }
@@ -6958,7 +6958,7 @@ async function renderMermaidToPng(mermaidCode) {
6958
6958
  if (chromiumPath) {
6959
6959
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
6960
6960
  }
6961
- const result = await new Promise((resolve15) => {
6961
+ const result = await new Promise((resolve19) => {
6962
6962
  const proc = (0, import_child_process.spawn)(
6963
6963
  "npx",
6964
6964
  [
@@ -6988,13 +6988,13 @@ async function renderMermaidToPng(mermaidCode) {
6988
6988
  });
6989
6989
  proc.on("close", (code) => {
6990
6990
  if (code === 0) {
6991
- resolve15({ success: true });
6991
+ resolve19({ success: true });
6992
6992
  } else {
6993
- resolve15({ success: false, error: stderr || `Exit code ${code}` });
6993
+ resolve19({ success: false, error: stderr || `Exit code ${code}` });
6994
6994
  }
6995
6995
  });
6996
6996
  proc.on("error", (err) => {
6997
- resolve15({ success: false, error: err.message });
6997
+ resolve19({ success: false, error: err.message });
6998
6998
  });
6999
6999
  });
7000
7000
  if (!result.success) {
@@ -8156,8 +8156,8 @@ ${schemaString}`);
8156
8156
  }
8157
8157
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8158
8158
  try {
8159
- const fs23 = require("fs");
8160
- const path27 = require("path");
8159
+ const fs27 = require("fs");
8160
+ const path31 = require("path");
8161
8161
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8162
8162
  const provider = this.config.provider || "auto";
8163
8163
  const model = this.config.model || "default";
@@ -8271,20 +8271,20 @@ ${"=".repeat(60)}
8271
8271
  `;
8272
8272
  readableVersion += `${"=".repeat(60)}
8273
8273
  `;
8274
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8275
- if (!fs23.existsSync(debugArtifactsDir)) {
8276
- fs23.mkdirSync(debugArtifactsDir, { recursive: true });
8274
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8275
+ if (!fs27.existsSync(debugArtifactsDir)) {
8276
+ fs27.mkdirSync(debugArtifactsDir, { recursive: true });
8277
8277
  }
8278
- const debugFile = path27.join(
8278
+ const debugFile = path31.join(
8279
8279
  debugArtifactsDir,
8280
8280
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8281
8281
  );
8282
- fs23.writeFileSync(debugFile, debugJson, "utf-8");
8283
- const readableFile = path27.join(
8282
+ fs27.writeFileSync(debugFile, debugJson, "utf-8");
8283
+ const readableFile = path31.join(
8284
8284
  debugArtifactsDir,
8285
8285
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8286
8286
  );
8287
- fs23.writeFileSync(readableFile, readableVersion, "utf-8");
8287
+ fs27.writeFileSync(readableFile, readableVersion, "utf-8");
8288
8288
  log(`
8289
8289
  \u{1F4BE} Full debug info saved to:`);
8290
8290
  log(` JSON: ${debugFile}`);
@@ -8317,8 +8317,8 @@ ${"=".repeat(60)}
8317
8317
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8318
8318
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8319
8319
  try {
8320
- const fs23 = require("fs");
8321
- const path27 = require("path");
8320
+ const fs27 = require("fs");
8321
+ const path31 = require("path");
8322
8322
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8323
8323
  const agentAny2 = agent;
8324
8324
  let fullHistory = [];
@@ -8329,8 +8329,8 @@ ${"=".repeat(60)}
8329
8329
  } else if (agentAny2._messages) {
8330
8330
  fullHistory = agentAny2._messages;
8331
8331
  }
8332
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8333
- const sessionBase = path27.join(
8332
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8333
+ const sessionBase = path31.join(
8334
8334
  debugArtifactsDir,
8335
8335
  `session-${_checkName || "unknown"}-${timestamp}`
8336
8336
  );
@@ -8342,7 +8342,7 @@ ${"=".repeat(60)}
8342
8342
  schema: effectiveSchema,
8343
8343
  totalMessages: fullHistory.length
8344
8344
  };
8345
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8345
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8346
8346
  let readable = `=============================================================
8347
8347
  `;
8348
8348
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8369,7 +8369,7 @@ ${"=".repeat(60)}
8369
8369
  `;
8370
8370
  readable += content + "\n";
8371
8371
  });
8372
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8372
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8373
8373
  log(`\u{1F4BE} Complete session history saved:`);
8374
8374
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8375
8375
  } catch (error) {
@@ -8378,11 +8378,11 @@ ${"=".repeat(60)}
8378
8378
  }
8379
8379
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8380
8380
  try {
8381
- const fs23 = require("fs");
8382
- const path27 = require("path");
8381
+ const fs27 = require("fs");
8382
+ const path31 = require("path");
8383
8383
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8384
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8385
- const responseFile = path27.join(
8384
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8385
+ const responseFile = path31.join(
8386
8386
  debugArtifactsDir,
8387
8387
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8388
8388
  );
@@ -8415,7 +8415,7 @@ ${"=".repeat(60)}
8415
8415
  `;
8416
8416
  responseContent += `${"=".repeat(60)}
8417
8417
  `;
8418
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8418
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8419
8419
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8420
8420
  } catch (error) {
8421
8421
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8431,9 +8431,9 @@ ${"=".repeat(60)}
8431
8431
  await agentAny._telemetryConfig.shutdown();
8432
8432
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8433
8433
  if (process.env.GITHUB_ACTIONS) {
8434
- const fs23 = require("fs");
8435
- if (fs23.existsSync(agentAny._traceFilePath)) {
8436
- const stats = fs23.statSync(agentAny._traceFilePath);
8434
+ const fs27 = require("fs");
8435
+ if (fs27.existsSync(agentAny._traceFilePath)) {
8436
+ const stats = fs27.statSync(agentAny._traceFilePath);
8437
8437
  console.log(
8438
8438
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8439
8439
  );
@@ -8646,9 +8646,9 @@ ${schemaString}`);
8646
8646
  const model = this.config.model || "default";
8647
8647
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8648
8648
  try {
8649
- const fs23 = require("fs");
8650
- const path27 = require("path");
8651
- const os2 = require("os");
8649
+ const fs27 = require("fs");
8650
+ const path31 = require("path");
8651
+ const os3 = require("os");
8652
8652
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8653
8653
  const debugData = {
8654
8654
  timestamp,
@@ -8720,19 +8720,19 @@ ${"=".repeat(60)}
8720
8720
  `;
8721
8721
  readableVersion += `${"=".repeat(60)}
8722
8722
  `;
8723
- const tempDir = os2.tmpdir();
8724
- const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
8725
- fs23.writeFileSync(promptFile, prompt, "utf-8");
8723
+ const tempDir = os3.tmpdir();
8724
+ const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
8725
+ fs27.writeFileSync(promptFile, prompt, "utf-8");
8726
8726
  log(`
8727
8727
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8728
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8728
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8729
8729
  try {
8730
- const base = path27.join(
8730
+ const base = path31.join(
8731
8731
  debugArtifactsDir,
8732
8732
  `prompt-${_checkName || "unknown"}-${timestamp}`
8733
8733
  );
8734
- fs23.writeFileSync(base + ".json", debugJson, "utf-8");
8735
- fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8734
+ fs27.writeFileSync(base + ".json", debugJson, "utf-8");
8735
+ fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8736
8736
  log(`
8737
8737
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8738
8738
  } catch {
@@ -8777,8 +8777,8 @@ $ ${cliCommand}
8777
8777
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8778
8778
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8779
8779
  try {
8780
- const fs23 = require("fs");
8781
- const path27 = require("path");
8780
+ const fs27 = require("fs");
8781
+ const path31 = require("path");
8782
8782
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8783
8783
  const agentAny = agent;
8784
8784
  let fullHistory = [];
@@ -8789,8 +8789,8 @@ $ ${cliCommand}
8789
8789
  } else if (agentAny._messages) {
8790
8790
  fullHistory = agentAny._messages;
8791
8791
  }
8792
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8793
- const sessionBase = path27.join(
8792
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8793
+ const sessionBase = path31.join(
8794
8794
  debugArtifactsDir,
8795
8795
  `session-${_checkName || "unknown"}-${timestamp}`
8796
8796
  );
@@ -8802,7 +8802,7 @@ $ ${cliCommand}
8802
8802
  schema: effectiveSchema,
8803
8803
  totalMessages: fullHistory.length
8804
8804
  };
8805
- fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8805
+ fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8806
8806
  let readable = `=============================================================
8807
8807
  `;
8808
8808
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8829,7 +8829,7 @@ ${"=".repeat(60)}
8829
8829
  `;
8830
8830
  readable += content + "\n";
8831
8831
  });
8832
- fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8832
+ fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8833
8833
  log(`\u{1F4BE} Complete session history saved:`);
8834
8834
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8835
8835
  } catch (error) {
@@ -8838,11 +8838,11 @@ ${"=".repeat(60)}
8838
8838
  }
8839
8839
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8840
8840
  try {
8841
- const fs23 = require("fs");
8842
- const path27 = require("path");
8841
+ const fs27 = require("fs");
8842
+ const path31 = require("path");
8843
8843
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8844
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
8845
- const responseFile = path27.join(
8844
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
8845
+ const responseFile = path31.join(
8846
8846
  debugArtifactsDir,
8847
8847
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8848
8848
  );
@@ -8875,7 +8875,7 @@ ${"=".repeat(60)}
8875
8875
  `;
8876
8876
  responseContent += `${"=".repeat(60)}
8877
8877
  `;
8878
- fs23.writeFileSync(responseFile, responseContent, "utf-8");
8878
+ fs27.writeFileSync(responseFile, responseContent, "utf-8");
8879
8879
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8880
8880
  } catch (error) {
8881
8881
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8893,9 +8893,9 @@ ${"=".repeat(60)}
8893
8893
  await telemetry.shutdown();
8894
8894
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
8895
8895
  if (process.env.GITHUB_ACTIONS) {
8896
- const fs23 = require("fs");
8897
- if (fs23.existsSync(traceFilePath)) {
8898
- const stats = fs23.statSync(traceFilePath);
8896
+ const fs27 = require("fs");
8897
+ if (fs27.existsSync(traceFilePath)) {
8898
+ const stats = fs27.statSync(traceFilePath);
8899
8899
  console.log(
8900
8900
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
8901
8901
  );
@@ -8933,8 +8933,8 @@ ${"=".repeat(60)}
8933
8933
  * Load schema content from schema files or inline definitions
8934
8934
  */
8935
8935
  async loadSchemaContent(schema) {
8936
- const fs23 = require("fs").promises;
8937
- const path27 = require("path");
8936
+ const fs27 = require("fs").promises;
8937
+ const path31 = require("path");
8938
8938
  if (typeof schema === "object" && schema !== null) {
8939
8939
  log("\u{1F4CB} Using inline schema object from configuration");
8940
8940
  return JSON.stringify(schema);
@@ -8947,14 +8947,14 @@ ${"=".repeat(60)}
8947
8947
  }
8948
8948
  } catch {
8949
8949
  }
8950
- if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
8950
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
8951
8951
  if (schema.includes("..") || schema.includes("\0")) {
8952
8952
  throw new Error("Invalid schema path: path traversal not allowed");
8953
8953
  }
8954
8954
  try {
8955
- const schemaPath = path27.resolve(process.cwd(), schema);
8955
+ const schemaPath = path31.resolve(process.cwd(), schema);
8956
8956
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
8957
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8957
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8958
8958
  return schemaContent.trim();
8959
8959
  } catch (error) {
8960
8960
  throw new Error(
@@ -8968,22 +8968,22 @@ ${"=".repeat(60)}
8968
8968
  }
8969
8969
  const candidatePaths = [
8970
8970
  // GitHub Action bundle location
8971
- path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8971
+ path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
8972
8972
  // Historical fallback when src/output was inadvertently bundled as output1/
8973
- path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8973
+ path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
8974
8974
  // Local dev (repo root)
8975
- path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8975
+ path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
8976
8976
  ];
8977
8977
  for (const schemaPath of candidatePaths) {
8978
8978
  try {
8979
- const schemaContent = await fs23.readFile(schemaPath, "utf-8");
8979
+ const schemaContent = await fs27.readFile(schemaPath, "utf-8");
8980
8980
  return schemaContent.trim();
8981
8981
  } catch {
8982
8982
  }
8983
8983
  }
8984
- const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8985
- const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8986
- const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8984
+ const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
8985
+ const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
8986
+ const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
8987
8987
  throw new Error(
8988
8988
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
8989
8989
  );
@@ -9228,7 +9228,7 @@ ${"=".repeat(60)}
9228
9228
  * Generate mock response for testing
9229
9229
  */
9230
9230
  async generateMockResponse(_prompt, _checkName, _schema) {
9231
- await new Promise((resolve15) => setTimeout(resolve15, 500));
9231
+ await new Promise((resolve19) => setTimeout(resolve19, 500));
9232
9232
  const name = (_checkName || "").toLowerCase();
9233
9233
  if (name.includes("extract-facts")) {
9234
9234
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9589,7 +9589,7 @@ var init_command_executor = __esm({
9589
9589
  * Execute command with stdin input
9590
9590
  */
9591
9591
  executeWithStdin(command, options) {
9592
- return new Promise((resolve15, reject) => {
9592
+ return new Promise((resolve19, reject) => {
9593
9593
  const childProcess = (0, import_child_process2.exec)(
9594
9594
  command,
9595
9595
  {
@@ -9601,7 +9601,7 @@ var init_command_executor = __esm({
9601
9601
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9602
9602
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9603
9603
  } else {
9604
- resolve15({
9604
+ resolve19({
9605
9605
  stdout: stdout || "",
9606
9606
  stderr: stderr || "",
9607
9607
  exitCode: error ? error.code || 1 : 0
@@ -17704,17 +17704,17 @@ var init_workflow_check_provider = __esm({
17704
17704
  * so it can be executed by the state machine as a nested workflow.
17705
17705
  */
17706
17706
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
17707
- const path27 = require("path");
17708
- const fs23 = require("fs");
17707
+ const path31 = require("path");
17708
+ const fs27 = require("fs");
17709
17709
  const yaml5 = require("js-yaml");
17710
- const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
17711
- if (!fs23.existsSync(resolved)) {
17710
+ const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
17711
+ if (!fs27.existsSync(resolved)) {
17712
17712
  throw new Error(`Workflow config not found at: ${resolved}`);
17713
17713
  }
17714
- const rawContent = fs23.readFileSync(resolved, "utf8");
17714
+ const rawContent = fs27.readFileSync(resolved, "utf8");
17715
17715
  const rawData = yaml5.load(rawContent);
17716
17716
  if (rawData.imports && Array.isArray(rawData.imports)) {
17717
- const configDir = path27.dirname(resolved);
17717
+ const configDir = path31.dirname(resolved);
17718
17718
  for (const source of rawData.imports) {
17719
17719
  const results = await this.registry.import(source, {
17720
17720
  basePath: configDir,
@@ -17744,8 +17744,8 @@ ${errors}`);
17744
17744
  if (!steps || Object.keys(steps).length === 0) {
17745
17745
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
17746
17746
  }
17747
- const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
17748
- const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
17747
+ const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
17748
+ const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
17749
17749
  const workflowDef = {
17750
17750
  id,
17751
17751
  name,
@@ -18551,8 +18551,8 @@ async function createStoreBackend(storageConfig, haConfig) {
18551
18551
  case "mssql": {
18552
18552
  try {
18553
18553
  const loaderPath = "../../enterprise/loader";
18554
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
18555
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
18554
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
18555
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
18556
18556
  } catch (err) {
18557
18557
  const msg = err instanceof Error ? err.message : String(err);
18558
18558
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -21123,7 +21123,7 @@ var init_mcp_custom_sse_server = __esm({
21123
21123
  * Returns the actual bound port number
21124
21124
  */
21125
21125
  async start() {
21126
- return new Promise((resolve15, reject) => {
21126
+ return new Promise((resolve19, reject) => {
21127
21127
  try {
21128
21128
  this.server = import_http.default.createServer((req, res) => {
21129
21129
  this.handleRequest(req, res).catch((error) => {
@@ -21157,7 +21157,7 @@ var init_mcp_custom_sse_server = __esm({
21157
21157
  );
21158
21158
  }
21159
21159
  this.startKeepalive();
21160
- resolve15(this.port);
21160
+ resolve19(this.port);
21161
21161
  });
21162
21162
  } catch (error) {
21163
21163
  reject(error);
@@ -21220,7 +21220,7 @@ var init_mcp_custom_sse_server = __esm({
21220
21220
  logger.debug(
21221
21221
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
21222
21222
  );
21223
- await new Promise((resolve15) => setTimeout(resolve15, waitMs));
21223
+ await new Promise((resolve19) => setTimeout(resolve19, waitMs));
21224
21224
  }
21225
21225
  }
21226
21226
  if (this.activeToolCalls > 0) {
@@ -21229,7 +21229,7 @@ var init_mcp_custom_sse_server = __esm({
21229
21229
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
21230
21230
  );
21231
21231
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
21232
- await new Promise((resolve15) => setTimeout(resolve15, 250));
21232
+ await new Promise((resolve19) => setTimeout(resolve19, 250));
21233
21233
  }
21234
21234
  if (this.activeToolCalls > 0) {
21235
21235
  logger.warn(
@@ -21254,21 +21254,21 @@ var init_mcp_custom_sse_server = __esm({
21254
21254
  }
21255
21255
  this.connections.clear();
21256
21256
  if (this.server) {
21257
- await new Promise((resolve15, reject) => {
21257
+ await new Promise((resolve19, reject) => {
21258
21258
  const timeout = setTimeout(() => {
21259
21259
  if (this.debug) {
21260
21260
  logger.debug(
21261
21261
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
21262
21262
  );
21263
21263
  }
21264
- this.server?.close(() => resolve15());
21264
+ this.server?.close(() => resolve19());
21265
21265
  }, 5e3);
21266
21266
  this.server.close((error) => {
21267
21267
  clearTimeout(timeout);
21268
21268
  if (error) {
21269
21269
  reject(error);
21270
21270
  } else {
21271
- resolve15();
21271
+ resolve19();
21272
21272
  }
21273
21273
  });
21274
21274
  });
@@ -21703,7 +21703,7 @@ var init_mcp_custom_sse_server = __esm({
21703
21703
  logger.warn(
21704
21704
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
21705
21705
  );
21706
- await new Promise((resolve15) => setTimeout(resolve15, delay));
21706
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
21707
21707
  attempt++;
21708
21708
  }
21709
21709
  }
@@ -22016,9 +22016,9 @@ var init_ai_check_provider = __esm({
22016
22016
  } else {
22017
22017
  resolvedPath = import_path7.default.resolve(process.cwd(), str);
22018
22018
  }
22019
- const fs23 = require("fs").promises;
22019
+ const fs27 = require("fs").promises;
22020
22020
  try {
22021
- const stat2 = await fs23.stat(resolvedPath);
22021
+ const stat2 = await fs27.stat(resolvedPath);
22022
22022
  return stat2.isFile();
22023
22023
  } catch {
22024
22024
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -24114,6 +24114,112 @@ var init_template_context = __esm({
24114
24114
  }
24115
24115
  });
24116
24116
 
24117
+ // src/utils/oauth2-token-cache.ts
24118
+ var OAuth2TokenCache;
24119
+ var init_oauth2_token_cache = __esm({
24120
+ "src/utils/oauth2-token-cache.ts"() {
24121
+ "use strict";
24122
+ init_logger();
24123
+ init_env_resolver();
24124
+ OAuth2TokenCache = class _OAuth2TokenCache {
24125
+ static instance;
24126
+ cache = /* @__PURE__ */ new Map();
24127
+ static getInstance() {
24128
+ if (!_OAuth2TokenCache.instance) {
24129
+ _OAuth2TokenCache.instance = new _OAuth2TokenCache();
24130
+ }
24131
+ return _OAuth2TokenCache.instance;
24132
+ }
24133
+ /** Visible for testing */
24134
+ static resetInstance() {
24135
+ _OAuth2TokenCache.instance = void 0;
24136
+ }
24137
+ /**
24138
+ * Get a valid Bearer token for the given config.
24139
+ * Returns a cached token if still valid, otherwise fetches a new one.
24140
+ */
24141
+ async getToken(config) {
24142
+ const clientId = String(EnvironmentResolver.resolveValue(config.client_id));
24143
+ const clientSecret = String(EnvironmentResolver.resolveValue(config.client_secret));
24144
+ const tokenUrl = String(EnvironmentResolver.resolveValue(config.token_url));
24145
+ const bufferMs = (config.token_ttl_buffer ?? 300) * 1e3;
24146
+ const cacheKey = `${tokenUrl}|${clientId}`;
24147
+ const cached = this.cache.get(cacheKey);
24148
+ if (cached && cached.expires_at - bufferMs > Date.now()) {
24149
+ logger.verbose("[oauth2] Using cached token");
24150
+ return cached.access_token;
24151
+ }
24152
+ if (cached?.refreshPromise) {
24153
+ logger.verbose("[oauth2] Awaiting in-flight token refresh");
24154
+ return cached.refreshPromise;
24155
+ }
24156
+ const refreshPromise = this.fetchToken(tokenUrl, clientId, clientSecret, config.scopes);
24157
+ if (cached) {
24158
+ cached.refreshPromise = refreshPromise;
24159
+ } else {
24160
+ this.cache.set(cacheKey, {
24161
+ access_token: "",
24162
+ expires_at: 0,
24163
+ refreshPromise
24164
+ });
24165
+ }
24166
+ try {
24167
+ const token = await refreshPromise;
24168
+ return token;
24169
+ } finally {
24170
+ const entry = this.cache.get(cacheKey);
24171
+ if (entry) {
24172
+ entry.refreshPromise = void 0;
24173
+ }
24174
+ }
24175
+ }
24176
+ async fetchToken(tokenUrl, clientId, clientSecret, scopes) {
24177
+ logger.verbose(`[oauth2] Fetching token from ${tokenUrl}`);
24178
+ const credentials = Buffer.from(`${clientId}:${clientSecret}`).toString("base64");
24179
+ const bodyParams = new URLSearchParams({ grant_type: "client_credentials" });
24180
+ if (scopes?.length) {
24181
+ bodyParams.set("scope", scopes.join(" "));
24182
+ }
24183
+ const response = await fetch(tokenUrl, {
24184
+ method: "POST",
24185
+ headers: {
24186
+ "Content-Type": "application/x-www-form-urlencoded",
24187
+ Authorization: `Basic ${credentials}`
24188
+ },
24189
+ body: bodyParams.toString()
24190
+ });
24191
+ if (!response.ok) {
24192
+ let errorDetail = "";
24193
+ try {
24194
+ errorDetail = await response.text();
24195
+ } catch {
24196
+ }
24197
+ throw new Error(
24198
+ `OAuth2 token request failed: HTTP ${response.status} ${response.statusText}${errorDetail ? ` - ${errorDetail.substring(0, 200)}` : ""}`
24199
+ );
24200
+ }
24201
+ const data = await response.json();
24202
+ if (!data.access_token) {
24203
+ throw new Error("OAuth2 token response missing access_token");
24204
+ }
24205
+ const expiresIn = data.expires_in ?? 3600;
24206
+ const expiresAt = Date.now() + expiresIn * 1e3;
24207
+ const cacheKey = `${tokenUrl}|${clientId}`;
24208
+ this.cache.set(cacheKey, {
24209
+ access_token: data.access_token,
24210
+ expires_at: expiresAt
24211
+ });
24212
+ logger.verbose(`[oauth2] Token acquired, expires in ${expiresIn}s`);
24213
+ return data.access_token;
24214
+ }
24215
+ /** Clear all cached tokens (for testing or credential rotation) */
24216
+ clear() {
24217
+ this.cache.clear();
24218
+ }
24219
+ };
24220
+ }
24221
+ });
24222
+
24117
24223
  // src/providers/http-client-provider.ts
24118
24224
  var fs15, path18, HttpClientProvider;
24119
24225
  var init_http_client_provider = __esm({
@@ -24124,6 +24230,7 @@ var init_http_client_provider = __esm({
24124
24230
  init_env_resolver();
24125
24231
  init_sandbox();
24126
24232
  init_template_context();
24233
+ init_oauth2_token_cache();
24127
24234
  init_logger();
24128
24235
  fs15 = __toESM(require("fs"));
24129
24236
  path18 = __toESM(require("path"));
@@ -24151,24 +24258,43 @@ var init_http_client_provider = __esm({
24151
24258
  if (cfg.type !== "http_client") {
24152
24259
  return false;
24153
24260
  }
24154
- if (typeof cfg.url !== "string" || !cfg.url) {
24261
+ const hasUrl = typeof cfg.url === "string" && cfg.url;
24262
+ const hasBaseUrl = typeof cfg.base_url === "string" && cfg.base_url;
24263
+ if (!hasUrl && !hasBaseUrl) {
24155
24264
  return false;
24156
24265
  }
24157
24266
  try {
24158
- new URL(cfg.url);
24267
+ new URL(hasUrl ? cfg.url : cfg.base_url);
24159
24268
  return true;
24160
24269
  } catch {
24161
24270
  return false;
24162
24271
  }
24163
24272
  }
24164
24273
  async execute(prInfo, config, dependencyResults, context2) {
24165
- const url = config.url;
24274
+ const baseUrl = config.base_url;
24275
+ const rawPath = config.path;
24276
+ const pathParams = config.params || {};
24277
+ const queryParams = config.query || {};
24278
+ const authConfig = config.auth;
24279
+ let url;
24280
+ if (baseUrl && rawPath) {
24281
+ let resolvedPath = rawPath;
24282
+ for (const [key, value] of Object.entries(pathParams)) {
24283
+ resolvedPath = resolvedPath.replace(`{${key}}`, encodeURIComponent(value));
24284
+ }
24285
+ url = `${baseUrl.replace(/\/+$/, "")}/${resolvedPath.replace(/^\/+/, "")}`;
24286
+ if (Object.keys(queryParams).length > 0) {
24287
+ const qs = new URLSearchParams(queryParams).toString();
24288
+ url += `${url.includes("?") ? "&" : "?"}${qs}`;
24289
+ }
24290
+ } else {
24291
+ url = config.url;
24292
+ }
24166
24293
  const method = config.method || "GET";
24167
24294
  const headers = config.headers || {};
24168
24295
  const timeout = config.timeout || 3e4;
24169
24296
  const transform = config.transform;
24170
24297
  const transformJs = config.transform_js;
24171
- const bodyTemplate = config.body;
24172
24298
  const outputFileTemplate = config.output_file;
24173
24299
  const skipIfExists = config.skip_if_exists !== false;
24174
24300
  let resolvedUrlForErrors = url;
@@ -24192,7 +24318,11 @@ var init_http_client_provider = __esm({
24192
24318
  resolvedUrlForErrors = renderedUrl;
24193
24319
  }
24194
24320
  let requestBody;
24195
- if (bodyTemplate) {
24321
+ const rawBody = config.body;
24322
+ const bodyTemplate = typeof rawBody === "string" ? rawBody : void 0;
24323
+ if (rawBody && typeof rawBody === "object") {
24324
+ requestBody = JSON.stringify(rawBody);
24325
+ } else if (bodyTemplate) {
24196
24326
  let resolvedBody = String(EnvironmentResolver.resolveValue(bodyTemplate));
24197
24327
  if (resolvedBody.includes("{{") || resolvedBody.includes("{%")) {
24198
24328
  resolvedBody = await this.liquid.parseAndRender(resolvedBody, templateContext);
@@ -24211,6 +24341,11 @@ var init_http_client_provider = __esm({
24211
24341
  logger.verbose(`[http_client] ${key}: ${maskedValue}`);
24212
24342
  }
24213
24343
  }
24344
+ if (authConfig?.type === "oauth2_client_credentials") {
24345
+ const tokenCache = OAuth2TokenCache.getInstance();
24346
+ const token = await tokenCache.getToken(authConfig);
24347
+ resolvedHeaders["Authorization"] = `Bearer ${token}`;
24348
+ }
24214
24349
  let resolvedOutputFile;
24215
24350
  if (outputFileTemplate) {
24216
24351
  let outputPath = String(EnvironmentResolver.resolveValue(outputFileTemplate));
@@ -24489,6 +24624,11 @@ var init_http_client_provider = __esm({
24489
24624
  return [
24490
24625
  "type",
24491
24626
  "url",
24627
+ "base_url",
24628
+ "path",
24629
+ "params",
24630
+ "query",
24631
+ "auth",
24492
24632
  "method",
24493
24633
  "headers",
24494
24634
  "body",
@@ -27980,14 +28120,14 @@ var require_util = __commonJS({
27980
28120
  }
27981
28121
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
27982
28122
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
27983
- let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
28123
+ let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
27984
28124
  if (origin.endsWith("/")) {
27985
28125
  origin = origin.substring(0, origin.length - 1);
27986
28126
  }
27987
- if (path27 && !path27.startsWith("/")) {
27988
- path27 = `/${path27}`;
28127
+ if (path31 && !path31.startsWith("/")) {
28128
+ path31 = `/${path31}`;
27989
28129
  }
27990
- url = new URL(origin + path27);
28130
+ url = new URL(origin + path31);
27991
28131
  }
27992
28132
  return url;
27993
28133
  }
@@ -29601,20 +29741,20 @@ var require_parseParams = __commonJS({
29601
29741
  var require_basename = __commonJS({
29602
29742
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
29603
29743
  "use strict";
29604
- module2.exports = function basename4(path27) {
29605
- if (typeof path27 !== "string") {
29744
+ module2.exports = function basename4(path31) {
29745
+ if (typeof path31 !== "string") {
29606
29746
  return "";
29607
29747
  }
29608
- for (var i = path27.length - 1; i >= 0; --i) {
29609
- switch (path27.charCodeAt(i)) {
29748
+ for (var i = path31.length - 1; i >= 0; --i) {
29749
+ switch (path31.charCodeAt(i)) {
29610
29750
  case 47:
29611
29751
  // '/'
29612
29752
  case 92:
29613
- path27 = path27.slice(i + 1);
29614
- return path27 === ".." || path27 === "." ? "" : path27;
29753
+ path31 = path31.slice(i + 1);
29754
+ return path31 === ".." || path31 === "." ? "" : path31;
29615
29755
  }
29616
29756
  }
29617
- return path27 === ".." || path27 === "." ? "" : path27;
29757
+ return path31 === ".." || path31 === "." ? "" : path31;
29618
29758
  };
29619
29759
  }
29620
29760
  });
@@ -30618,11 +30758,11 @@ var require_util2 = __commonJS({
30618
30758
  var assert = require("assert");
30619
30759
  var { isUint8Array } = require("util/types");
30620
30760
  var supportedHashes = [];
30621
- var crypto2;
30761
+ var crypto4;
30622
30762
  try {
30623
- crypto2 = require("crypto");
30763
+ crypto4 = require("crypto");
30624
30764
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
30625
- supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30765
+ supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
30626
30766
  } catch {
30627
30767
  }
30628
30768
  function responseURL(response) {
@@ -30899,7 +31039,7 @@ var require_util2 = __commonJS({
30899
31039
  }
30900
31040
  }
30901
31041
  function bytesMatch(bytes, metadataList) {
30902
- if (crypto2 === void 0) {
31042
+ if (crypto4 === void 0) {
30903
31043
  return true;
30904
31044
  }
30905
31045
  const parsedMetadata = parseMetadata(metadataList);
@@ -30914,7 +31054,7 @@ var require_util2 = __commonJS({
30914
31054
  for (const item of metadata) {
30915
31055
  const algorithm = item.algo;
30916
31056
  const expectedValue = item.hash;
30917
- let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
31057
+ let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
30918
31058
  if (actualValue[actualValue.length - 1] === "=") {
30919
31059
  if (actualValue[actualValue.length - 2] === "=") {
30920
31060
  actualValue = actualValue.slice(0, -2);
@@ -31007,8 +31147,8 @@ var require_util2 = __commonJS({
31007
31147
  function createDeferredPromise() {
31008
31148
  let res;
31009
31149
  let rej;
31010
- const promise = new Promise((resolve15, reject) => {
31011
- res = resolve15;
31150
+ const promise = new Promise((resolve19, reject) => {
31151
+ res = resolve19;
31012
31152
  rej = reject;
31013
31153
  });
31014
31154
  return { promise, resolve: res, reject: rej };
@@ -32261,8 +32401,8 @@ var require_body = __commonJS({
32261
32401
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
32262
32402
  var random;
32263
32403
  try {
32264
- const crypto2 = require("crypto");
32265
- random = (max) => crypto2.randomInt(0, max);
32404
+ const crypto4 = require("crypto");
32405
+ random = (max) => crypto4.randomInt(0, max);
32266
32406
  } catch {
32267
32407
  random = (max) => Math.floor(Math.random(max));
32268
32408
  }
@@ -32513,8 +32653,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
32513
32653
  });
32514
32654
  }
32515
32655
  });
32516
- const busboyResolve = new Promise((resolve15, reject) => {
32517
- busboy.on("finish", resolve15);
32656
+ const busboyResolve = new Promise((resolve19, reject) => {
32657
+ busboy.on("finish", resolve19);
32518
32658
  busboy.on("error", (err) => reject(new TypeError(err)));
32519
32659
  });
32520
32660
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -32645,7 +32785,7 @@ var require_request = __commonJS({
32645
32785
  }
32646
32786
  var Request = class _Request {
32647
32787
  constructor(origin, {
32648
- path: path27,
32788
+ path: path31,
32649
32789
  method,
32650
32790
  body,
32651
32791
  headers,
@@ -32659,11 +32799,11 @@ var require_request = __commonJS({
32659
32799
  throwOnError,
32660
32800
  expectContinue
32661
32801
  }, handler) {
32662
- if (typeof path27 !== "string") {
32802
+ if (typeof path31 !== "string") {
32663
32803
  throw new InvalidArgumentError("path must be a string");
32664
- } else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
32804
+ } else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
32665
32805
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
32666
- } else if (invalidPathRegex.exec(path27) !== null) {
32806
+ } else if (invalidPathRegex.exec(path31) !== null) {
32667
32807
  throw new InvalidArgumentError("invalid request path");
32668
32808
  }
32669
32809
  if (typeof method !== "string") {
@@ -32726,7 +32866,7 @@ var require_request = __commonJS({
32726
32866
  this.completed = false;
32727
32867
  this.aborted = false;
32728
32868
  this.upgrade = upgrade || null;
32729
- this.path = query ? util.buildURL(path27, query) : path27;
32869
+ this.path = query ? util.buildURL(path31, query) : path31;
32730
32870
  this.origin = origin;
32731
32871
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
32732
32872
  this.blocking = blocking == null ? false : blocking;
@@ -33048,9 +33188,9 @@ var require_dispatcher_base = __commonJS({
33048
33188
  }
33049
33189
  close(callback) {
33050
33190
  if (callback === void 0) {
33051
- return new Promise((resolve15, reject) => {
33191
+ return new Promise((resolve19, reject) => {
33052
33192
  this.close((err, data) => {
33053
- return err ? reject(err) : resolve15(data);
33193
+ return err ? reject(err) : resolve19(data);
33054
33194
  });
33055
33195
  });
33056
33196
  }
@@ -33088,12 +33228,12 @@ var require_dispatcher_base = __commonJS({
33088
33228
  err = null;
33089
33229
  }
33090
33230
  if (callback === void 0) {
33091
- return new Promise((resolve15, reject) => {
33231
+ return new Promise((resolve19, reject) => {
33092
33232
  this.destroy(err, (err2, data) => {
33093
33233
  return err2 ? (
33094
33234
  /* istanbul ignore next: should never error */
33095
33235
  reject(err2)
33096
- ) : resolve15(data);
33236
+ ) : resolve19(data);
33097
33237
  });
33098
33238
  });
33099
33239
  }
@@ -33734,9 +33874,9 @@ var require_RedirectHandler = __commonJS({
33734
33874
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
33735
33875
  }
33736
33876
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
33737
- const path27 = search ? `${pathname}${search}` : pathname;
33877
+ const path31 = search ? `${pathname}${search}` : pathname;
33738
33878
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
33739
- this.opts.path = path27;
33879
+ this.opts.path = path31;
33740
33880
  this.opts.origin = origin;
33741
33881
  this.opts.maxRedirections = 0;
33742
33882
  this.opts.query = null;
@@ -34155,16 +34295,16 @@ var require_client = __commonJS({
34155
34295
  return this[kNeedDrain] < 2;
34156
34296
  }
34157
34297
  async [kClose]() {
34158
- return new Promise((resolve15) => {
34298
+ return new Promise((resolve19) => {
34159
34299
  if (!this[kSize]) {
34160
- resolve15(null);
34300
+ resolve19(null);
34161
34301
  } else {
34162
- this[kClosedResolve] = resolve15;
34302
+ this[kClosedResolve] = resolve19;
34163
34303
  }
34164
34304
  });
34165
34305
  }
34166
34306
  async [kDestroy](err) {
34167
- return new Promise((resolve15) => {
34307
+ return new Promise((resolve19) => {
34168
34308
  const requests = this[kQueue].splice(this[kPendingIdx]);
34169
34309
  for (let i = 0; i < requests.length; i++) {
34170
34310
  const request = requests[i];
@@ -34175,7 +34315,7 @@ var require_client = __commonJS({
34175
34315
  this[kClosedResolve]();
34176
34316
  this[kClosedResolve] = null;
34177
34317
  }
34178
- resolve15();
34318
+ resolve19();
34179
34319
  };
34180
34320
  if (this[kHTTP2Session] != null) {
34181
34321
  util.destroy(this[kHTTP2Session], err);
@@ -34755,7 +34895,7 @@ var require_client = __commonJS({
34755
34895
  });
34756
34896
  }
34757
34897
  try {
34758
- const socket = await new Promise((resolve15, reject) => {
34898
+ const socket = await new Promise((resolve19, reject) => {
34759
34899
  client[kConnector]({
34760
34900
  host,
34761
34901
  hostname,
@@ -34767,7 +34907,7 @@ var require_client = __commonJS({
34767
34907
  if (err) {
34768
34908
  reject(err);
34769
34909
  } else {
34770
- resolve15(socket2);
34910
+ resolve19(socket2);
34771
34911
  }
34772
34912
  });
34773
34913
  });
@@ -34978,7 +35118,7 @@ var require_client = __commonJS({
34978
35118
  writeH2(client, client[kHTTP2Session], request);
34979
35119
  return;
34980
35120
  }
34981
- const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
35121
+ const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
34982
35122
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
34983
35123
  if (body && typeof body.read === "function") {
34984
35124
  body.read(0);
@@ -35028,7 +35168,7 @@ var require_client = __commonJS({
35028
35168
  if (blocking) {
35029
35169
  socket[kBlocking] = true;
35030
35170
  }
35031
- let header = `${method} ${path27} HTTP/1.1\r
35171
+ let header = `${method} ${path31} HTTP/1.1\r
35032
35172
  `;
35033
35173
  if (typeof host === "string") {
35034
35174
  header += `host: ${host}\r
@@ -35091,7 +35231,7 @@ upgrade: ${upgrade}\r
35091
35231
  return true;
35092
35232
  }
35093
35233
  function writeH2(client, session, request) {
35094
- const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
35234
+ const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
35095
35235
  let headers;
35096
35236
  if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
35097
35237
  else headers = reqHeaders;
@@ -35134,7 +35274,7 @@ upgrade: ${upgrade}\r
35134
35274
  });
35135
35275
  return true;
35136
35276
  }
35137
- headers[HTTP2_HEADER_PATH] = path27;
35277
+ headers[HTTP2_HEADER_PATH] = path31;
35138
35278
  headers[HTTP2_HEADER_SCHEME] = "https";
35139
35279
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
35140
35280
  if (body && typeof body.read === "function") {
@@ -35391,12 +35531,12 @@ upgrade: ${upgrade}\r
35391
35531
  cb();
35392
35532
  }
35393
35533
  }
35394
- const waitForDrain = () => new Promise((resolve15, reject) => {
35534
+ const waitForDrain = () => new Promise((resolve19, reject) => {
35395
35535
  assert(callback === null);
35396
35536
  if (socket[kError]) {
35397
35537
  reject(socket[kError]);
35398
35538
  } else {
35399
- callback = resolve15;
35539
+ callback = resolve19;
35400
35540
  }
35401
35541
  });
35402
35542
  if (client[kHTTPConnVersion] === "h2") {
@@ -35742,8 +35882,8 @@ var require_pool_base = __commonJS({
35742
35882
  if (this[kQueue].isEmpty()) {
35743
35883
  return Promise.all(this[kClients].map((c) => c.close()));
35744
35884
  } else {
35745
- return new Promise((resolve15) => {
35746
- this[kClosedResolve] = resolve15;
35885
+ return new Promise((resolve19) => {
35886
+ this[kClosedResolve] = resolve19;
35747
35887
  });
35748
35888
  }
35749
35889
  }
@@ -36321,7 +36461,7 @@ var require_readable = __commonJS({
36321
36461
  if (this.closed) {
36322
36462
  return Promise.resolve(null);
36323
36463
  }
36324
- return new Promise((resolve15, reject) => {
36464
+ return new Promise((resolve19, reject) => {
36325
36465
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
36326
36466
  this.destroy();
36327
36467
  }) : noop;
@@ -36330,7 +36470,7 @@ var require_readable = __commonJS({
36330
36470
  if (signal && signal.aborted) {
36331
36471
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
36332
36472
  } else {
36333
- resolve15(null);
36473
+ resolve19(null);
36334
36474
  }
36335
36475
  }).on("error", noop).on("data", function(chunk) {
36336
36476
  limit -= chunk.length;
@@ -36352,11 +36492,11 @@ var require_readable = __commonJS({
36352
36492
  throw new TypeError("unusable");
36353
36493
  }
36354
36494
  assert(!stream[kConsume]);
36355
- return new Promise((resolve15, reject) => {
36495
+ return new Promise((resolve19, reject) => {
36356
36496
  stream[kConsume] = {
36357
36497
  type,
36358
36498
  stream,
36359
- resolve: resolve15,
36499
+ resolve: resolve19,
36360
36500
  reject,
36361
36501
  length: 0,
36362
36502
  body: []
@@ -36391,12 +36531,12 @@ var require_readable = __commonJS({
36391
36531
  }
36392
36532
  }
36393
36533
  function consumeEnd(consume2) {
36394
- const { type, body, resolve: resolve15, stream, length } = consume2;
36534
+ const { type, body, resolve: resolve19, stream, length } = consume2;
36395
36535
  try {
36396
36536
  if (type === "text") {
36397
- resolve15(toUSVString(Buffer.concat(body)));
36537
+ resolve19(toUSVString(Buffer.concat(body)));
36398
36538
  } else if (type === "json") {
36399
- resolve15(JSON.parse(Buffer.concat(body)));
36539
+ resolve19(JSON.parse(Buffer.concat(body)));
36400
36540
  } else if (type === "arrayBuffer") {
36401
36541
  const dst = new Uint8Array(length);
36402
36542
  let pos = 0;
@@ -36404,12 +36544,12 @@ var require_readable = __commonJS({
36404
36544
  dst.set(buf, pos);
36405
36545
  pos += buf.byteLength;
36406
36546
  }
36407
- resolve15(dst.buffer);
36547
+ resolve19(dst.buffer);
36408
36548
  } else if (type === "blob") {
36409
36549
  if (!Blob2) {
36410
36550
  Blob2 = require("buffer").Blob;
36411
36551
  }
36412
- resolve15(new Blob2(body, { type: stream[kContentType] }));
36552
+ resolve19(new Blob2(body, { type: stream[kContentType] }));
36413
36553
  }
36414
36554
  consumeFinish(consume2);
36415
36555
  } catch (err) {
@@ -36666,9 +36806,9 @@ var require_api_request = __commonJS({
36666
36806
  };
36667
36807
  function request(opts, callback) {
36668
36808
  if (callback === void 0) {
36669
- return new Promise((resolve15, reject) => {
36809
+ return new Promise((resolve19, reject) => {
36670
36810
  request.call(this, opts, (err, data) => {
36671
- return err ? reject(err) : resolve15(data);
36811
+ return err ? reject(err) : resolve19(data);
36672
36812
  });
36673
36813
  });
36674
36814
  }
@@ -36841,9 +36981,9 @@ var require_api_stream = __commonJS({
36841
36981
  };
36842
36982
  function stream(opts, factory, callback) {
36843
36983
  if (callback === void 0) {
36844
- return new Promise((resolve15, reject) => {
36984
+ return new Promise((resolve19, reject) => {
36845
36985
  stream.call(this, opts, factory, (err, data) => {
36846
- return err ? reject(err) : resolve15(data);
36986
+ return err ? reject(err) : resolve19(data);
36847
36987
  });
36848
36988
  });
36849
36989
  }
@@ -37124,9 +37264,9 @@ var require_api_upgrade = __commonJS({
37124
37264
  };
37125
37265
  function upgrade(opts, callback) {
37126
37266
  if (callback === void 0) {
37127
- return new Promise((resolve15, reject) => {
37267
+ return new Promise((resolve19, reject) => {
37128
37268
  upgrade.call(this, opts, (err, data) => {
37129
- return err ? reject(err) : resolve15(data);
37269
+ return err ? reject(err) : resolve19(data);
37130
37270
  });
37131
37271
  });
37132
37272
  }
@@ -37215,9 +37355,9 @@ var require_api_connect = __commonJS({
37215
37355
  };
37216
37356
  function connect(opts, callback) {
37217
37357
  if (callback === void 0) {
37218
- return new Promise((resolve15, reject) => {
37358
+ return new Promise((resolve19, reject) => {
37219
37359
  connect.call(this, opts, (err, data) => {
37220
- return err ? reject(err) : resolve15(data);
37360
+ return err ? reject(err) : resolve19(data);
37221
37361
  });
37222
37362
  });
37223
37363
  }
@@ -37377,20 +37517,20 @@ var require_mock_utils = __commonJS({
37377
37517
  }
37378
37518
  return true;
37379
37519
  }
37380
- function safeUrl(path27) {
37381
- if (typeof path27 !== "string") {
37382
- return path27;
37520
+ function safeUrl(path31) {
37521
+ if (typeof path31 !== "string") {
37522
+ return path31;
37383
37523
  }
37384
- const pathSegments = path27.split("?");
37524
+ const pathSegments = path31.split("?");
37385
37525
  if (pathSegments.length !== 2) {
37386
- return path27;
37526
+ return path31;
37387
37527
  }
37388
37528
  const qp = new URLSearchParams(pathSegments.pop());
37389
37529
  qp.sort();
37390
37530
  return [...pathSegments, qp.toString()].join("?");
37391
37531
  }
37392
- function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
37393
- const pathMatch = matchValue(mockDispatch2.path, path27);
37532
+ function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
37533
+ const pathMatch = matchValue(mockDispatch2.path, path31);
37394
37534
  const methodMatch = matchValue(mockDispatch2.method, method);
37395
37535
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
37396
37536
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -37408,7 +37548,7 @@ var require_mock_utils = __commonJS({
37408
37548
  function getMockDispatch(mockDispatches, key) {
37409
37549
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
37410
37550
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
37411
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
37551
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
37412
37552
  if (matchedMockDispatches.length === 0) {
37413
37553
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
37414
37554
  }
@@ -37445,9 +37585,9 @@ var require_mock_utils = __commonJS({
37445
37585
  }
37446
37586
  }
37447
37587
  function buildKey(opts) {
37448
- const { path: path27, method, body, headers, query } = opts;
37588
+ const { path: path31, method, body, headers, query } = opts;
37449
37589
  return {
37450
- path: path27,
37590
+ path: path31,
37451
37591
  method,
37452
37592
  body,
37453
37593
  headers,
@@ -37896,10 +38036,10 @@ var require_pending_interceptors_formatter = __commonJS({
37896
38036
  }
37897
38037
  format(pendingInterceptors) {
37898
38038
  const withPrettyHeaders = pendingInterceptors.map(
37899
- ({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
38039
+ ({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
37900
38040
  Method: method,
37901
38041
  Origin: origin,
37902
- Path: path27,
38042
+ Path: path31,
37903
38043
  "Status code": statusCode,
37904
38044
  Persistent: persist ? "\u2705" : "\u274C",
37905
38045
  Invocations: timesInvoked,
@@ -40840,7 +40980,7 @@ var require_fetch = __commonJS({
40840
40980
  async function dispatch({ body }) {
40841
40981
  const url = requestCurrentURL(request);
40842
40982
  const agent = fetchParams.controller.dispatcher;
40843
- return new Promise((resolve15, reject) => agent.dispatch(
40983
+ return new Promise((resolve19, reject) => agent.dispatch(
40844
40984
  {
40845
40985
  path: url.pathname + url.search,
40846
40986
  origin: url.origin,
@@ -40916,7 +41056,7 @@ var require_fetch = __commonJS({
40916
41056
  }
40917
41057
  }
40918
41058
  }
40919
- resolve15({
41059
+ resolve19({
40920
41060
  status,
40921
41061
  statusText,
40922
41062
  headersList: headers[kHeadersList],
@@ -40959,7 +41099,7 @@ var require_fetch = __commonJS({
40959
41099
  const val = headersList[n + 1].toString("latin1");
40960
41100
  headers[kHeadersList].append(key, val);
40961
41101
  }
40962
- resolve15({
41102
+ resolve19({
40963
41103
  status,
40964
41104
  statusText: STATUS_CODES[status],
40965
41105
  headersList: headers[kHeadersList],
@@ -42520,8 +42660,8 @@ var require_util6 = __commonJS({
42520
42660
  }
42521
42661
  }
42522
42662
  }
42523
- function validateCookiePath(path27) {
42524
- for (const char of path27) {
42663
+ function validateCookiePath(path31) {
42664
+ for (const char of path31) {
42525
42665
  const code = char.charCodeAt(0);
42526
42666
  if (code < 33 || char === ";") {
42527
42667
  throw new Error("Invalid cookie path");
@@ -43318,9 +43458,9 @@ var require_connection = __commonJS({
43318
43458
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
43319
43459
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
43320
43460
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
43321
- var crypto2;
43461
+ var crypto4;
43322
43462
  try {
43323
- crypto2 = require("crypto");
43463
+ crypto4 = require("crypto");
43324
43464
  } catch {
43325
43465
  }
43326
43466
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -43339,7 +43479,7 @@ var require_connection = __commonJS({
43339
43479
  const headersList = new Headers(options.headers)[kHeadersList];
43340
43480
  request.headersList = headersList;
43341
43481
  }
43342
- const keyValue = crypto2.randomBytes(16).toString("base64");
43482
+ const keyValue = crypto4.randomBytes(16).toString("base64");
43343
43483
  request.headersList.append("sec-websocket-key", keyValue);
43344
43484
  request.headersList.append("sec-websocket-version", "13");
43345
43485
  for (const protocol of protocols) {
@@ -43368,7 +43508,7 @@ var require_connection = __commonJS({
43368
43508
  return;
43369
43509
  }
43370
43510
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
43371
- const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
43511
+ const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
43372
43512
  if (secWSAccept !== digest) {
43373
43513
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
43374
43514
  return;
@@ -43448,9 +43588,9 @@ var require_frame = __commonJS({
43448
43588
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
43449
43589
  "use strict";
43450
43590
  var { maxUnsigned16Bit } = require_constants5();
43451
- var crypto2;
43591
+ var crypto4;
43452
43592
  try {
43453
- crypto2 = require("crypto");
43593
+ crypto4 = require("crypto");
43454
43594
  } catch {
43455
43595
  }
43456
43596
  var WebsocketFrameSend = class {
@@ -43459,7 +43599,7 @@ var require_frame = __commonJS({
43459
43599
  */
43460
43600
  constructor(data) {
43461
43601
  this.frameData = data;
43462
- this.maskKey = crypto2.randomBytes(4);
43602
+ this.maskKey = crypto4.randomBytes(4);
43463
43603
  }
43464
43604
  createFrame(opcode) {
43465
43605
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -44201,11 +44341,11 @@ var require_undici = __commonJS({
44201
44341
  if (typeof opts.path !== "string") {
44202
44342
  throw new InvalidArgumentError("invalid opts.path");
44203
44343
  }
44204
- let path27 = opts.path;
44344
+ let path31 = opts.path;
44205
44345
  if (!opts.path.startsWith("/")) {
44206
- path27 = `/${path27}`;
44346
+ path31 = `/${path31}`;
44207
44347
  }
44208
- url = new URL(util.parseOrigin(url).origin + path27);
44348
+ url = new URL(util.parseOrigin(url).origin + path31);
44209
44349
  } else {
44210
44350
  if (!opts) {
44211
44351
  opts = typeof url === "object" ? url : {};
@@ -44754,7 +44894,7 @@ var init_mcp_check_provider = __esm({
44754
44894
  logger.warn(
44755
44895
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
44756
44896
  );
44757
- await new Promise((resolve15) => setTimeout(resolve15, delay));
44897
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
44758
44898
  attempt += 1;
44759
44899
  } finally {
44760
44900
  try {
@@ -45036,7 +45176,7 @@ async function acquirePromptLock() {
45036
45176
  activePrompt = true;
45037
45177
  return;
45038
45178
  }
45039
- await new Promise((resolve15) => waiters.push(resolve15));
45179
+ await new Promise((resolve19) => waiters.push(resolve19));
45040
45180
  activePrompt = true;
45041
45181
  }
45042
45182
  function releasePromptLock() {
@@ -45046,7 +45186,7 @@ function releasePromptLock() {
45046
45186
  }
45047
45187
  async function interactivePrompt(options) {
45048
45188
  await acquirePromptLock();
45049
- return new Promise((resolve15, reject) => {
45189
+ return new Promise((resolve19, reject) => {
45050
45190
  const dbg = process.env.VISOR_DEBUG === "true";
45051
45191
  try {
45052
45192
  if (dbg) {
@@ -45133,12 +45273,12 @@ async function interactivePrompt(options) {
45133
45273
  };
45134
45274
  const finish = (value) => {
45135
45275
  cleanup();
45136
- resolve15(value);
45276
+ resolve19(value);
45137
45277
  };
45138
45278
  if (options.timeout && options.timeout > 0) {
45139
45279
  timeoutId = setTimeout(() => {
45140
45280
  cleanup();
45141
- if (defaultValue !== void 0) return resolve15(defaultValue);
45281
+ if (defaultValue !== void 0) return resolve19(defaultValue);
45142
45282
  return reject(new Error("Input timeout"));
45143
45283
  }, options.timeout);
45144
45284
  }
@@ -45270,7 +45410,7 @@ async function interactivePrompt(options) {
45270
45410
  });
45271
45411
  }
45272
45412
  async function simplePrompt(prompt) {
45273
- return new Promise((resolve15) => {
45413
+ return new Promise((resolve19) => {
45274
45414
  const rl = readline.createInterface({
45275
45415
  input: process.stdin,
45276
45416
  output: process.stdout
@@ -45286,7 +45426,7 @@ async function simplePrompt(prompt) {
45286
45426
  rl.question(`${prompt}
45287
45427
  > `, (answer) => {
45288
45428
  rl.close();
45289
- resolve15(answer.trim());
45429
+ resolve19(answer.trim());
45290
45430
  });
45291
45431
  });
45292
45432
  }
@@ -45454,7 +45594,7 @@ function isStdinAvailable() {
45454
45594
  return !process.stdin.isTTY;
45455
45595
  }
45456
45596
  async function readStdin(timeout, maxSize = 1024 * 1024) {
45457
- return new Promise((resolve15, reject) => {
45597
+ return new Promise((resolve19, reject) => {
45458
45598
  let data = "";
45459
45599
  let timeoutId;
45460
45600
  if (timeout) {
@@ -45481,7 +45621,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
45481
45621
  };
45482
45622
  const onEnd = () => {
45483
45623
  cleanup();
45484
- resolve15(data.trim());
45624
+ resolve19(data.trim());
45485
45625
  };
45486
45626
  const onError = (err) => {
45487
45627
  cleanup();
@@ -49607,23 +49747,23 @@ __export(renderer_schema_exports, {
49607
49747
  });
49608
49748
  async function loadRendererSchema(name) {
49609
49749
  try {
49610
- const fs23 = await import("fs/promises");
49611
- const path27 = await import("path");
49750
+ const fs27 = await import("fs/promises");
49751
+ const path31 = await import("path");
49612
49752
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
49613
49753
  if (!sanitized) return void 0;
49614
49754
  const candidates = [
49615
49755
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
49616
- path27.join(__dirname, "output", sanitized, "schema.json"),
49756
+ path31.join(__dirname, "output", sanitized, "schema.json"),
49617
49757
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
49618
- path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49758
+ path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
49619
49759
  // When running from a checkout with output/ folder copied to CWD
49620
- path27.join(process.cwd(), "output", sanitized, "schema.json"),
49760
+ path31.join(process.cwd(), "output", sanitized, "schema.json"),
49621
49761
  // Fallback: cwd/dist/output/
49622
- path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49762
+ path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
49623
49763
  ];
49624
49764
  for (const p of candidates) {
49625
49765
  try {
49626
- const raw = await fs23.readFile(p, "utf-8");
49766
+ const raw = await fs27.readFile(p, "utf-8");
49627
49767
  return JSON.parse(raw);
49628
49768
  } catch {
49629
49769
  }
@@ -52042,8 +52182,8 @@ function updateStats2(results, state, isForEachIteration = false) {
52042
52182
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
52043
52183
  try {
52044
52184
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
52045
- const fs23 = await import("fs/promises");
52046
- const path27 = await import("path");
52185
+ const fs27 = await import("fs/promises");
52186
+ const path31 = await import("path");
52047
52187
  const schemaRaw = checkConfig.schema || "plain";
52048
52188
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
52049
52189
  let templateContent;
@@ -52052,27 +52192,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
52052
52192
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
52053
52193
  } else if (checkConfig.template && checkConfig.template.file) {
52054
52194
  const file = String(checkConfig.template.file);
52055
- const resolved = path27.resolve(process.cwd(), file);
52056
- templateContent = await fs23.readFile(resolved, "utf-8");
52195
+ const resolved = path31.resolve(process.cwd(), file);
52196
+ templateContent = await fs27.readFile(resolved, "utf-8");
52057
52197
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
52058
52198
  } else if (schema && schema !== "plain") {
52059
52199
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
52060
52200
  if (sanitized) {
52061
52201
  const candidatePaths = [
52062
- path27.join(__dirname, "output", sanitized, "template.liquid"),
52202
+ path31.join(__dirname, "output", sanitized, "template.liquid"),
52063
52203
  // bundled: dist/output/
52064
- path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
52204
+ path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
52065
52205
  // source (from state-machine/states)
52066
- path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
52206
+ path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
52067
52207
  // source (alternate)
52068
- path27.join(process.cwd(), "output", sanitized, "template.liquid"),
52208
+ path31.join(process.cwd(), "output", sanitized, "template.liquid"),
52069
52209
  // fallback: cwd/output/
52070
- path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
52210
+ path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
52071
52211
  // fallback: cwd/dist/output/
52072
52212
  ];
52073
52213
  for (const p of candidatePaths) {
52074
52214
  try {
52075
- templateContent = await fs23.readFile(p, "utf-8");
52215
+ templateContent = await fs27.readFile(p, "utf-8");
52076
52216
  if (templateContent) {
52077
52217
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
52078
52218
  break;
@@ -54212,8 +54352,8 @@ var init_workspace_manager = __esm({
54212
54352
  );
54213
54353
  if (this.cleanupRequested && this.activeOperations === 0) {
54214
54354
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
54215
- for (const resolve15 of this.cleanupResolvers) {
54216
- resolve15();
54355
+ for (const resolve19 of this.cleanupResolvers) {
54356
+ resolve19();
54217
54357
  }
54218
54358
  this.cleanupResolvers = [];
54219
54359
  }
@@ -54370,19 +54510,19 @@ var init_workspace_manager = __esm({
54370
54510
  );
54371
54511
  this.cleanupRequested = true;
54372
54512
  await Promise.race([
54373
- new Promise((resolve15) => {
54513
+ new Promise((resolve19) => {
54374
54514
  if (this.activeOperations === 0) {
54375
- resolve15();
54515
+ resolve19();
54376
54516
  } else {
54377
- this.cleanupResolvers.push(resolve15);
54517
+ this.cleanupResolvers.push(resolve19);
54378
54518
  }
54379
54519
  }),
54380
- new Promise((resolve15) => {
54520
+ new Promise((resolve19) => {
54381
54521
  setTimeout(() => {
54382
54522
  logger.warn(
54383
54523
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
54384
54524
  );
54385
- resolve15();
54525
+ resolve19();
54386
54526
  }, timeout);
54387
54527
  })
54388
54528
  ]);
@@ -54860,6 +55000,1380 @@ var init_build_engine_context = __esm({
54860
55000
  }
54861
55001
  });
54862
55002
 
55003
+ // src/policy/default-engine.ts
55004
+ var DefaultPolicyEngine;
55005
+ var init_default_engine = __esm({
55006
+ "src/policy/default-engine.ts"() {
55007
+ "use strict";
55008
+ DefaultPolicyEngine = class {
55009
+ async initialize(_config) {
55010
+ }
55011
+ async evaluateCheckExecution(_checkId, _checkConfig) {
55012
+ return { allowed: true };
55013
+ }
55014
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
55015
+ return { allowed: true };
55016
+ }
55017
+ async evaluateCapabilities(_checkId, _capabilities) {
55018
+ return { allowed: true };
55019
+ }
55020
+ async shutdown() {
55021
+ }
55022
+ };
55023
+ }
55024
+ });
55025
+
55026
+ // src/enterprise/license/validator.ts
55027
+ var validator_exports = {};
55028
+ __export(validator_exports, {
55029
+ LicenseValidator: () => LicenseValidator
55030
+ });
55031
+ var crypto2, fs21, path25, LicenseValidator;
55032
+ var init_validator = __esm({
55033
+ "src/enterprise/license/validator.ts"() {
55034
+ "use strict";
55035
+ crypto2 = __toESM(require("crypto"));
55036
+ fs21 = __toESM(require("fs"));
55037
+ path25 = __toESM(require("path"));
55038
+ LicenseValidator = class _LicenseValidator {
55039
+ /** Ed25519 public key for license verification (PEM format). */
55040
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
55041
+ cache = null;
55042
+ static CACHE_TTL = 5 * 60 * 1e3;
55043
+ // 5 minutes
55044
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
55045
+ // 72 hours after expiry
55046
+ /**
55047
+ * Load and validate license from environment or file.
55048
+ *
55049
+ * Resolution order:
55050
+ * 1. VISOR_LICENSE env var (JWT string)
55051
+ * 2. VISOR_LICENSE_FILE env var (path to file)
55052
+ * 3. .visor-license in project root (cwd)
55053
+ * 4. .visor-license in ~/.config/visor/
55054
+ */
55055
+ async loadAndValidate() {
55056
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
55057
+ return this.cache.payload;
55058
+ }
55059
+ const token = this.resolveToken();
55060
+ if (!token) return null;
55061
+ const payload = this.verifyAndDecode(token);
55062
+ if (!payload) return null;
55063
+ this.cache = { payload, validatedAt: Date.now() };
55064
+ return payload;
55065
+ }
55066
+ /** Check if a specific feature is licensed */
55067
+ hasFeature(feature) {
55068
+ if (!this.cache) return false;
55069
+ return this.cache.payload.features.includes(feature);
55070
+ }
55071
+ /** Check if license is valid (with grace period) */
55072
+ isValid() {
55073
+ if (!this.cache) return false;
55074
+ const now = Date.now();
55075
+ const expiryMs = this.cache.payload.exp * 1e3;
55076
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
55077
+ }
55078
+ /** Check if the license is within its grace period (expired but still valid) */
55079
+ isInGracePeriod() {
55080
+ if (!this.cache) return false;
55081
+ const now = Date.now();
55082
+ const expiryMs = this.cache.payload.exp * 1e3;
55083
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
55084
+ }
55085
+ resolveToken() {
55086
+ if (process.env.VISOR_LICENSE) {
55087
+ return process.env.VISOR_LICENSE.trim();
55088
+ }
55089
+ if (process.env.VISOR_LICENSE_FILE) {
55090
+ const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
55091
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
55092
+ const allowedPrefixes = [path25.normalize(process.cwd())];
55093
+ if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
55094
+ let realPath;
55095
+ try {
55096
+ realPath = fs21.realpathSync(resolved);
55097
+ } catch {
55098
+ return null;
55099
+ }
55100
+ const isSafe = allowedPrefixes.some(
55101
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
55102
+ );
55103
+ if (!isSafe) return null;
55104
+ return this.readFile(realPath);
55105
+ }
55106
+ const cwdPath = path25.join(process.cwd(), ".visor-license");
55107
+ const cwdToken = this.readFile(cwdPath);
55108
+ if (cwdToken) return cwdToken;
55109
+ const home = process.env.HOME || process.env.USERPROFILE || "";
55110
+ if (home) {
55111
+ const configPath = path25.join(home, ".config", "visor", ".visor-license");
55112
+ const configToken = this.readFile(configPath);
55113
+ if (configToken) return configToken;
55114
+ }
55115
+ return null;
55116
+ }
55117
+ readFile(filePath) {
55118
+ try {
55119
+ return fs21.readFileSync(filePath, "utf-8").trim();
55120
+ } catch {
55121
+ return null;
55122
+ }
55123
+ }
55124
+ verifyAndDecode(token) {
55125
+ try {
55126
+ const parts = token.split(".");
55127
+ if (parts.length !== 3) return null;
55128
+ const [headerB64, payloadB64, signatureB64] = parts;
55129
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
55130
+ if (header.alg !== "EdDSA") return null;
55131
+ const data = `${headerB64}.${payloadB64}`;
55132
+ const signature = Buffer.from(signatureB64, "base64url");
55133
+ const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
55134
+ if (publicKey.asymmetricKeyType !== "ed25519") {
55135
+ return null;
55136
+ }
55137
+ const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
55138
+ if (!isValid) return null;
55139
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
55140
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
55141
+ return null;
55142
+ }
55143
+ const now = Date.now();
55144
+ const expiryMs = payload.exp * 1e3;
55145
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
55146
+ return null;
55147
+ }
55148
+ return payload;
55149
+ } catch {
55150
+ return null;
55151
+ }
55152
+ }
55153
+ };
55154
+ }
55155
+ });
55156
+
55157
+ // src/enterprise/policy/opa-compiler.ts
55158
+ var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
55159
+ var init_opa_compiler = __esm({
55160
+ "src/enterprise/policy/opa-compiler.ts"() {
55161
+ "use strict";
55162
+ fs22 = __toESM(require("fs"));
55163
+ path26 = __toESM(require("path"));
55164
+ os2 = __toESM(require("os"));
55165
+ crypto3 = __toESM(require("crypto"));
55166
+ import_child_process8 = require("child_process");
55167
+ OpaCompiler = class _OpaCompiler {
55168
+ static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
55169
+ /**
55170
+ * Resolve the input paths to WASM bytes.
55171
+ *
55172
+ * Strategy:
55173
+ * 1. If any path is a .wasm file, read it directly
55174
+ * 2. If a directory contains policy.wasm, read it
55175
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
55176
+ */
55177
+ async resolveWasmBytes(paths) {
55178
+ const regoFiles = [];
55179
+ for (const p of paths) {
55180
+ const resolved = path26.resolve(p);
55181
+ if (path26.normalize(resolved).includes("..")) {
55182
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
55183
+ }
55184
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
55185
+ return fs22.readFileSync(resolved);
55186
+ }
55187
+ if (!fs22.existsSync(resolved)) continue;
55188
+ const stat2 = fs22.statSync(resolved);
55189
+ if (stat2.isDirectory()) {
55190
+ const wasmCandidate = path26.join(resolved, "policy.wasm");
55191
+ if (fs22.existsSync(wasmCandidate)) {
55192
+ return fs22.readFileSync(wasmCandidate);
55193
+ }
55194
+ const files = fs22.readdirSync(resolved);
55195
+ for (const f of files) {
55196
+ if (f.endsWith(".rego")) {
55197
+ regoFiles.push(path26.join(resolved, f));
55198
+ }
55199
+ }
55200
+ } else if (resolved.endsWith(".rego")) {
55201
+ regoFiles.push(resolved);
55202
+ }
55203
+ }
55204
+ if (regoFiles.length === 0) {
55205
+ throw new Error(
55206
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
55207
+ );
55208
+ }
55209
+ return this.compileRego(regoFiles);
55210
+ }
55211
+ /**
55212
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
55213
+ *
55214
+ * Caches the compiled bundle based on a content hash of all input .rego files
55215
+ * so subsequent runs skip compilation if policies haven't changed.
55216
+ */
55217
+ compileRego(regoFiles) {
55218
+ try {
55219
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
55220
+ } catch {
55221
+ throw new Error(
55222
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
55223
+ );
55224
+ }
55225
+ const hash = crypto3.createHash("sha256");
55226
+ for (const f of regoFiles.sort()) {
55227
+ hash.update(fs22.readFileSync(f));
55228
+ hash.update(f);
55229
+ }
55230
+ const cacheKey = hash.digest("hex").slice(0, 16);
55231
+ const cacheDir = _OpaCompiler.CACHE_DIR;
55232
+ const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
55233
+ if (fs22.existsSync(cachedWasm)) {
55234
+ return fs22.readFileSync(cachedWasm);
55235
+ }
55236
+ fs22.mkdirSync(cacheDir, { recursive: true });
55237
+ const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
55238
+ try {
55239
+ const args = [
55240
+ "build",
55241
+ "-t",
55242
+ "wasm",
55243
+ "-e",
55244
+ "visor",
55245
+ // entrypoint: the visor package tree
55246
+ "-o",
55247
+ bundleTar,
55248
+ ...regoFiles
55249
+ ];
55250
+ (0, import_child_process8.execFileSync)("opa", args, {
55251
+ stdio: "pipe",
55252
+ timeout: 3e4
55253
+ });
55254
+ } catch (err) {
55255
+ const stderr = err?.stderr?.toString() || "";
55256
+ throw new Error(
55257
+ `Failed to compile .rego files to WASM:
55258
+ ${stderr}
55259
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
55260
+ );
55261
+ }
55262
+ try {
55263
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
55264
+ stdio: "pipe"
55265
+ });
55266
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
55267
+ if (fs22.existsSync(extractedWasm)) {
55268
+ fs22.renameSync(extractedWasm, cachedWasm);
55269
+ }
55270
+ } catch {
55271
+ try {
55272
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
55273
+ stdio: "pipe"
55274
+ });
55275
+ const extractedWasm = path26.join(cacheDir, "policy.wasm");
55276
+ if (fs22.existsSync(extractedWasm)) {
55277
+ fs22.renameSync(extractedWasm, cachedWasm);
55278
+ }
55279
+ } catch (err2) {
55280
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
55281
+ }
55282
+ }
55283
+ try {
55284
+ fs22.unlinkSync(bundleTar);
55285
+ } catch {
55286
+ }
55287
+ if (!fs22.existsSync(cachedWasm)) {
55288
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
55289
+ }
55290
+ return fs22.readFileSync(cachedWasm);
55291
+ }
55292
+ };
55293
+ }
55294
+ });
55295
+
55296
+ // src/enterprise/policy/opa-wasm-evaluator.ts
55297
+ var fs23, path27, OpaWasmEvaluator;
55298
+ var init_opa_wasm_evaluator = __esm({
55299
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
55300
+ "use strict";
55301
+ fs23 = __toESM(require("fs"));
55302
+ path27 = __toESM(require("path"));
55303
+ init_opa_compiler();
55304
+ OpaWasmEvaluator = class {
55305
+ policy = null;
55306
+ dataDocument = {};
55307
+ compiler = new OpaCompiler();
55308
+ async initialize(rulesPath) {
55309
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
55310
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
55311
+ try {
55312
+ const { createRequire } = require("module");
55313
+ const runtimeRequire = createRequire(__filename);
55314
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
55315
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
55316
+ if (!loadPolicy) {
55317
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
55318
+ }
55319
+ this.policy = await loadPolicy(wasmBytes);
55320
+ } catch (err) {
55321
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
55322
+ throw new Error(
55323
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
55324
+ );
55325
+ }
55326
+ throw err;
55327
+ }
55328
+ }
55329
+ /**
55330
+ * Load external data from a JSON file to use as the OPA data document.
55331
+ * The loaded data will be passed to `policy.setData()` during evaluation,
55332
+ * making it available in Rego via `data.<key>`.
55333
+ */
55334
+ loadData(dataPath) {
55335
+ const resolved = path27.resolve(dataPath);
55336
+ if (path27.normalize(resolved).includes("..")) {
55337
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
55338
+ }
55339
+ if (!fs23.existsSync(resolved)) {
55340
+ throw new Error(`OPA data file not found: ${resolved}`);
55341
+ }
55342
+ const stat2 = fs23.statSync(resolved);
55343
+ if (stat2.size > 10 * 1024 * 1024) {
55344
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
55345
+ }
55346
+ const raw = fs23.readFileSync(resolved, "utf-8");
55347
+ try {
55348
+ const parsed = JSON.parse(raw);
55349
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
55350
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
55351
+ }
55352
+ this.dataDocument = parsed;
55353
+ } catch (err) {
55354
+ if (err.message.startsWith("OPA data file must")) {
55355
+ throw err;
55356
+ }
55357
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
55358
+ }
55359
+ }
55360
+ async evaluate(input) {
55361
+ if (!this.policy) {
55362
+ throw new Error("OPA WASM evaluator not initialized");
55363
+ }
55364
+ this.policy.setData(this.dataDocument);
55365
+ const resultSet = this.policy.evaluate(input);
55366
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
55367
+ return resultSet[0].result;
55368
+ }
55369
+ return void 0;
55370
+ }
55371
+ async shutdown() {
55372
+ if (this.policy) {
55373
+ if (typeof this.policy.close === "function") {
55374
+ try {
55375
+ this.policy.close();
55376
+ } catch {
55377
+ }
55378
+ } else if (typeof this.policy.free === "function") {
55379
+ try {
55380
+ this.policy.free();
55381
+ } catch {
55382
+ }
55383
+ }
55384
+ }
55385
+ this.policy = null;
55386
+ }
55387
+ };
55388
+ }
55389
+ });
55390
+
55391
+ // src/enterprise/policy/opa-http-evaluator.ts
55392
+ var OpaHttpEvaluator;
55393
+ var init_opa_http_evaluator = __esm({
55394
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
55395
+ "use strict";
55396
+ OpaHttpEvaluator = class {
55397
+ baseUrl;
55398
+ timeout;
55399
+ constructor(baseUrl, timeout = 5e3) {
55400
+ let parsed;
55401
+ try {
55402
+ parsed = new URL(baseUrl);
55403
+ } catch {
55404
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
55405
+ }
55406
+ if (!["http:", "https:"].includes(parsed.protocol)) {
55407
+ throw new Error(
55408
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
55409
+ );
55410
+ }
55411
+ const hostname = parsed.hostname;
55412
+ if (this.isBlockedHostname(hostname)) {
55413
+ throw new Error(
55414
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
55415
+ );
55416
+ }
55417
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
55418
+ this.timeout = timeout;
55419
+ }
55420
+ /**
55421
+ * Check if a hostname is blocked due to SSRF concerns.
55422
+ *
55423
+ * Blocks:
55424
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
55425
+ * - Link-local addresses (169.254.x.x)
55426
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
55427
+ * - IPv6 unique local addresses (fd00::/8)
55428
+ * - Cloud metadata services (*.internal)
55429
+ */
55430
+ isBlockedHostname(hostname) {
55431
+ if (!hostname) return true;
55432
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
55433
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
55434
+ return true;
55435
+ }
55436
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
55437
+ return true;
55438
+ }
55439
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
55440
+ return true;
55441
+ }
55442
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
55443
+ const ipv4Match = normalized.match(ipv4Pattern);
55444
+ if (ipv4Match) {
55445
+ const octets = ipv4Match.slice(1, 5).map(Number);
55446
+ if (octets.some((octet) => octet > 255)) {
55447
+ return false;
55448
+ }
55449
+ const [a, b] = octets;
55450
+ if (a === 127) {
55451
+ return true;
55452
+ }
55453
+ if (a === 0) {
55454
+ return true;
55455
+ }
55456
+ if (a === 169 && b === 254) {
55457
+ return true;
55458
+ }
55459
+ if (a === 10) {
55460
+ return true;
55461
+ }
55462
+ if (a === 172 && b >= 16 && b <= 31) {
55463
+ return true;
55464
+ }
55465
+ if (a === 192 && b === 168) {
55466
+ return true;
55467
+ }
55468
+ }
55469
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
55470
+ return true;
55471
+ }
55472
+ if (normalized.startsWith("fe80:")) {
55473
+ return true;
55474
+ }
55475
+ return false;
55476
+ }
55477
+ /**
55478
+ * Evaluate a policy rule against an input document via OPA REST API.
55479
+ *
55480
+ * @param input - The input document to evaluate
55481
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
55482
+ * @returns The result object from OPA, or undefined on error
55483
+ */
55484
+ async evaluate(input, rulePath) {
55485
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
55486
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
55487
+ const controller = new AbortController();
55488
+ const timer = setTimeout(() => controller.abort(), this.timeout);
55489
+ try {
55490
+ const response = await fetch(url, {
55491
+ method: "POST",
55492
+ headers: { "Content-Type": "application/json" },
55493
+ body: JSON.stringify({ input }),
55494
+ signal: controller.signal
55495
+ });
55496
+ if (!response.ok) {
55497
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
55498
+ }
55499
+ let body;
55500
+ try {
55501
+ body = await response.json();
55502
+ } catch (jsonErr) {
55503
+ throw new Error(
55504
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
55505
+ );
55506
+ }
55507
+ return body?.result;
55508
+ } finally {
55509
+ clearTimeout(timer);
55510
+ }
55511
+ }
55512
+ async shutdown() {
55513
+ }
55514
+ };
55515
+ }
55516
+ });
55517
+
55518
+ // src/enterprise/policy/policy-input-builder.ts
55519
+ var PolicyInputBuilder;
55520
+ var init_policy_input_builder = __esm({
55521
+ "src/enterprise/policy/policy-input-builder.ts"() {
55522
+ "use strict";
55523
+ PolicyInputBuilder = class {
55524
+ roles;
55525
+ actor;
55526
+ repository;
55527
+ pullRequest;
55528
+ constructor(policyConfig, actor, repository, pullRequest) {
55529
+ this.roles = policyConfig.roles || {};
55530
+ this.actor = actor;
55531
+ this.repository = repository;
55532
+ this.pullRequest = pullRequest;
55533
+ }
55534
+ /** Resolve which roles apply to the current actor. */
55535
+ resolveRoles() {
55536
+ const matched = [];
55537
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
55538
+ let identityMatch = false;
55539
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
55540
+ identityMatch = true;
55541
+ }
55542
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
55543
+ identityMatch = true;
55544
+ }
55545
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
55546
+ identityMatch = true;
55547
+ }
55548
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
55549
+ const actorEmail = this.actor.slack.email.toLowerCase();
55550
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
55551
+ identityMatch = true;
55552
+ }
55553
+ }
55554
+ if (!identityMatch) continue;
55555
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
55556
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
55557
+ continue;
55558
+ }
55559
+ }
55560
+ matched.push(roleName);
55561
+ }
55562
+ return matched;
55563
+ }
55564
+ buildActor() {
55565
+ return {
55566
+ authorAssociation: this.actor.authorAssociation,
55567
+ login: this.actor.login,
55568
+ roles: this.resolveRoles(),
55569
+ isLocalMode: this.actor.isLocalMode,
55570
+ ...this.actor.slack && { slack: this.actor.slack }
55571
+ };
55572
+ }
55573
+ forCheckExecution(check) {
55574
+ return {
55575
+ scope: "check.execute",
55576
+ check: {
55577
+ id: check.id,
55578
+ type: check.type,
55579
+ group: check.group,
55580
+ tags: check.tags,
55581
+ criticality: check.criticality,
55582
+ sandbox: check.sandbox,
55583
+ policy: check.policy
55584
+ },
55585
+ actor: this.buildActor(),
55586
+ repository: this.repository,
55587
+ pullRequest: this.pullRequest
55588
+ };
55589
+ }
55590
+ forToolInvocation(serverName, methodName, transport) {
55591
+ return {
55592
+ scope: "tool.invoke",
55593
+ tool: { serverName, methodName, transport },
55594
+ actor: this.buildActor(),
55595
+ repository: this.repository,
55596
+ pullRequest: this.pullRequest
55597
+ };
55598
+ }
55599
+ forCapabilityResolve(checkId, capabilities) {
55600
+ return {
55601
+ scope: "capability.resolve",
55602
+ check: { id: checkId, type: "ai" },
55603
+ capability: capabilities,
55604
+ actor: this.buildActor(),
55605
+ repository: this.repository,
55606
+ pullRequest: this.pullRequest
55607
+ };
55608
+ }
55609
+ };
55610
+ }
55611
+ });
55612
+
55613
+ // src/enterprise/policy/opa-policy-engine.ts
55614
+ var opa_policy_engine_exports = {};
55615
+ __export(opa_policy_engine_exports, {
55616
+ OpaPolicyEngine: () => OpaPolicyEngine
55617
+ });
55618
+ var OpaPolicyEngine;
55619
+ var init_opa_policy_engine = __esm({
55620
+ "src/enterprise/policy/opa-policy-engine.ts"() {
55621
+ "use strict";
55622
+ init_opa_wasm_evaluator();
55623
+ init_opa_http_evaluator();
55624
+ init_policy_input_builder();
55625
+ OpaPolicyEngine = class {
55626
+ evaluator = null;
55627
+ fallback;
55628
+ timeout;
55629
+ config;
55630
+ inputBuilder = null;
55631
+ logger = null;
55632
+ constructor(config) {
55633
+ this.config = config;
55634
+ this.fallback = config.fallback || "deny";
55635
+ this.timeout = config.timeout || 5e3;
55636
+ }
55637
+ async initialize(config) {
55638
+ try {
55639
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
55640
+ } catch {
55641
+ }
55642
+ const actor = {
55643
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
55644
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
55645
+ isLocalMode: !process.env.GITHUB_ACTIONS
55646
+ };
55647
+ const repo = {
55648
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
55649
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
55650
+ branch: process.env.GITHUB_HEAD_REF,
55651
+ baseBranch: process.env.GITHUB_BASE_REF,
55652
+ event: process.env.GITHUB_EVENT_NAME
55653
+ };
55654
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
55655
+ const pullRequest = {
55656
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
55657
+ };
55658
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
55659
+ if (config.engine === "local") {
55660
+ if (!config.rules) {
55661
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
55662
+ }
55663
+ const wasm = new OpaWasmEvaluator();
55664
+ await wasm.initialize(config.rules);
55665
+ if (config.data) {
55666
+ wasm.loadData(config.data);
55667
+ }
55668
+ this.evaluator = wasm;
55669
+ } else if (config.engine === "remote") {
55670
+ if (!config.url) {
55671
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
55672
+ }
55673
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
55674
+ } else {
55675
+ this.evaluator = null;
55676
+ }
55677
+ }
55678
+ /**
55679
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
55680
+ * Called by the enterprise loader when engine context is enriched.
55681
+ */
55682
+ setActorContext(actor, repo, pullRequest) {
55683
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
55684
+ }
55685
+ async evaluateCheckExecution(checkId, checkConfig) {
55686
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55687
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
55688
+ const policyOverride = cfg.policy;
55689
+ const input = this.inputBuilder.forCheckExecution({
55690
+ id: checkId,
55691
+ type: cfg.type || "ai",
55692
+ group: cfg.group,
55693
+ tags: cfg.tags,
55694
+ criticality: cfg.criticality,
55695
+ sandbox: cfg.sandbox,
55696
+ policy: policyOverride
55697
+ });
55698
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
55699
+ }
55700
+ async evaluateToolInvocation(serverName, methodName, transport) {
55701
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55702
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
55703
+ return this.doEvaluate(input, "visor/tool/invoke");
55704
+ }
55705
+ async evaluateCapabilities(checkId, capabilities) {
55706
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
55707
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
55708
+ return this.doEvaluate(input, "visor/capability/resolve");
55709
+ }
55710
+ async shutdown() {
55711
+ if (this.evaluator && "shutdown" in this.evaluator) {
55712
+ await this.evaluator.shutdown();
55713
+ }
55714
+ this.evaluator = null;
55715
+ this.inputBuilder = null;
55716
+ }
55717
+ resolveRulePath(defaultScope, override) {
55718
+ if (override) {
55719
+ return override.startsWith("visor/") ? override : `visor/${override}`;
55720
+ }
55721
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
55722
+ }
55723
+ async doEvaluate(input, rulePath) {
55724
+ try {
55725
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
55726
+ let timer;
55727
+ const timeoutPromise = new Promise((_resolve, reject) => {
55728
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
55729
+ });
55730
+ try {
55731
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
55732
+ const decision = this.parseDecision(result);
55733
+ if (!decision.allowed && this.fallback === "warn") {
55734
+ decision.allowed = true;
55735
+ decision.warn = true;
55736
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
55737
+ }
55738
+ this.logger?.debug(
55739
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
55740
+ );
55741
+ return decision;
55742
+ } finally {
55743
+ if (timer) clearTimeout(timer);
55744
+ }
55745
+ } catch (err) {
55746
+ const msg = err instanceof Error ? err.message : String(err);
55747
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
55748
+ return {
55749
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55750
+ warn: this.fallback === "warn" ? true : void 0,
55751
+ reason: `policy evaluation failed, fallback=${this.fallback}`
55752
+ };
55753
+ }
55754
+ }
55755
+ async rawEvaluate(input, rulePath) {
55756
+ if (this.evaluator instanceof OpaWasmEvaluator) {
55757
+ const result = await this.evaluator.evaluate(input);
55758
+ return this.navigateWasmResult(result, rulePath);
55759
+ }
55760
+ return this.evaluator.evaluate(input, rulePath);
55761
+ }
55762
+ /**
55763
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
55764
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
55765
+ * so we strip the `visor/` prefix and walk the remaining segments.
55766
+ */
55767
+ navigateWasmResult(result, rulePath) {
55768
+ if (!result || typeof result !== "object") return result;
55769
+ const segments = rulePath.replace(/^visor\//, "").split("/");
55770
+ let current = result;
55771
+ for (const seg of segments) {
55772
+ if (current && typeof current === "object" && seg in current) {
55773
+ current = current[seg];
55774
+ } else {
55775
+ return void 0;
55776
+ }
55777
+ }
55778
+ return current;
55779
+ }
55780
+ parseDecision(result) {
55781
+ if (result === void 0 || result === null) {
55782
+ return {
55783
+ allowed: this.fallback === "allow" || this.fallback === "warn",
55784
+ warn: this.fallback === "warn" ? true : void 0,
55785
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
55786
+ };
55787
+ }
55788
+ const allowed = result.allowed !== false;
55789
+ const decision = {
55790
+ allowed,
55791
+ reason: result.reason
55792
+ };
55793
+ if (result.capabilities) {
55794
+ decision.capabilities = result.capabilities;
55795
+ }
55796
+ return decision;
55797
+ }
55798
+ };
55799
+ }
55800
+ });
55801
+
55802
+ // src/enterprise/scheduler/knex-store.ts
55803
+ var knex_store_exports = {};
55804
+ __export(knex_store_exports, {
55805
+ KnexStoreBackend: () => KnexStoreBackend
55806
+ });
55807
+ function toNum(val) {
55808
+ if (val === null || val === void 0) return void 0;
55809
+ return typeof val === "string" ? parseInt(val, 10) : val;
55810
+ }
55811
+ function safeJsonParse2(value) {
55812
+ if (!value) return void 0;
55813
+ try {
55814
+ return JSON.parse(value);
55815
+ } catch {
55816
+ return void 0;
55817
+ }
55818
+ }
55819
+ function fromTriggerRow2(row) {
55820
+ return {
55821
+ id: row.id,
55822
+ creatorId: row.creator_id,
55823
+ creatorContext: row.creator_context ?? void 0,
55824
+ creatorName: row.creator_name ?? void 0,
55825
+ description: row.description ?? void 0,
55826
+ channels: safeJsonParse2(row.channels),
55827
+ fromUsers: safeJsonParse2(row.from_users),
55828
+ fromBots: row.from_bots === true || row.from_bots === 1,
55829
+ contains: safeJsonParse2(row.contains),
55830
+ matchPattern: row.match_pattern ?? void 0,
55831
+ threads: row.threads,
55832
+ workflow: row.workflow,
55833
+ inputs: safeJsonParse2(row.inputs),
55834
+ outputContext: safeJsonParse2(row.output_context),
55835
+ status: row.status,
55836
+ enabled: row.enabled === true || row.enabled === 1,
55837
+ createdAt: toNum(row.created_at)
55838
+ };
55839
+ }
55840
+ function toTriggerInsertRow(trigger) {
55841
+ return {
55842
+ id: trigger.id,
55843
+ creator_id: trigger.creatorId,
55844
+ creator_context: trigger.creatorContext ?? null,
55845
+ creator_name: trigger.creatorName ?? null,
55846
+ description: trigger.description ?? null,
55847
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
55848
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
55849
+ from_bots: trigger.fromBots,
55850
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
55851
+ match_pattern: trigger.matchPattern ?? null,
55852
+ threads: trigger.threads,
55853
+ workflow: trigger.workflow,
55854
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
55855
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
55856
+ status: trigger.status,
55857
+ enabled: trigger.enabled,
55858
+ created_at: trigger.createdAt
55859
+ };
55860
+ }
55861
+ function fromDbRow2(row) {
55862
+ return {
55863
+ id: row.id,
55864
+ creatorId: row.creator_id,
55865
+ creatorContext: row.creator_context ?? void 0,
55866
+ creatorName: row.creator_name ?? void 0,
55867
+ timezone: row.timezone,
55868
+ schedule: row.schedule_expr,
55869
+ runAt: toNum(row.run_at),
55870
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
55871
+ originalExpression: row.original_expression,
55872
+ workflow: row.workflow ?? void 0,
55873
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
55874
+ outputContext: safeJsonParse2(row.output_context),
55875
+ status: row.status,
55876
+ createdAt: toNum(row.created_at),
55877
+ lastRunAt: toNum(row.last_run_at),
55878
+ nextRunAt: toNum(row.next_run_at),
55879
+ runCount: row.run_count,
55880
+ failureCount: row.failure_count,
55881
+ lastError: row.last_error ?? void 0,
55882
+ previousResponse: row.previous_response ?? void 0
55883
+ };
55884
+ }
55885
+ function toInsertRow(schedule) {
55886
+ return {
55887
+ id: schedule.id,
55888
+ creator_id: schedule.creatorId,
55889
+ creator_context: schedule.creatorContext ?? null,
55890
+ creator_name: schedule.creatorName ?? null,
55891
+ timezone: schedule.timezone,
55892
+ schedule_expr: schedule.schedule,
55893
+ run_at: schedule.runAt ?? null,
55894
+ is_recurring: schedule.isRecurring,
55895
+ original_expression: schedule.originalExpression,
55896
+ workflow: schedule.workflow ?? null,
55897
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
55898
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
55899
+ status: schedule.status,
55900
+ created_at: schedule.createdAt,
55901
+ last_run_at: schedule.lastRunAt ?? null,
55902
+ next_run_at: schedule.nextRunAt ?? null,
55903
+ run_count: schedule.runCount,
55904
+ failure_count: schedule.failureCount,
55905
+ last_error: schedule.lastError ?? null,
55906
+ previous_response: schedule.previousResponse ?? null
55907
+ };
55908
+ }
55909
+ var fs24, path28, import_uuid2, KnexStoreBackend;
55910
+ var init_knex_store = __esm({
55911
+ "src/enterprise/scheduler/knex-store.ts"() {
55912
+ "use strict";
55913
+ fs24 = __toESM(require("fs"));
55914
+ path28 = __toESM(require("path"));
55915
+ import_uuid2 = require("uuid");
55916
+ init_logger();
55917
+ KnexStoreBackend = class {
55918
+ knex = null;
55919
+ driver;
55920
+ connection;
55921
+ constructor(driver, storageConfig, _haConfig) {
55922
+ this.driver = driver;
55923
+ this.connection = storageConfig.connection || {};
55924
+ }
55925
+ async initialize() {
55926
+ const { createRequire } = require("module");
55927
+ const runtimeRequire = createRequire(__filename);
55928
+ let knexFactory;
55929
+ try {
55930
+ knexFactory = runtimeRequire("knex");
55931
+ } catch (err) {
55932
+ const code = err?.code;
55933
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
55934
+ throw new Error(
55935
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
55936
+ );
55937
+ }
55938
+ throw err;
55939
+ }
55940
+ const clientMap = {
55941
+ postgresql: "pg",
55942
+ mysql: "mysql2",
55943
+ mssql: "tedious"
55944
+ };
55945
+ const client = clientMap[this.driver];
55946
+ let connection;
55947
+ if (this.connection.connection_string) {
55948
+ connection = this.connection.connection_string;
55949
+ } else if (this.driver === "mssql") {
55950
+ connection = this.buildMssqlConnection();
55951
+ } else {
55952
+ connection = this.buildStandardConnection();
55953
+ }
55954
+ this.knex = knexFactory({
55955
+ client,
55956
+ connection,
55957
+ pool: {
55958
+ min: this.connection.pool?.min ?? 0,
55959
+ max: this.connection.pool?.max ?? 10
55960
+ }
55961
+ });
55962
+ await this.migrateSchema();
55963
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
55964
+ }
55965
+ buildStandardConnection() {
55966
+ return {
55967
+ host: this.connection.host || "localhost",
55968
+ port: this.connection.port,
55969
+ database: this.connection.database || "visor",
55970
+ user: this.connection.user,
55971
+ password: this.connection.password,
55972
+ ssl: this.resolveSslConfig()
55973
+ };
55974
+ }
55975
+ buildMssqlConnection() {
55976
+ const ssl = this.connection.ssl;
55977
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
55978
+ return {
55979
+ server: this.connection.host || "localhost",
55980
+ port: this.connection.port,
55981
+ database: this.connection.database || "visor",
55982
+ user: this.connection.user,
55983
+ password: this.connection.password,
55984
+ options: {
55985
+ encrypt: sslEnabled,
55986
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
55987
+ }
55988
+ };
55989
+ }
55990
+ resolveSslConfig() {
55991
+ const ssl = this.connection.ssl;
55992
+ if (ssl === false || ssl === void 0) return false;
55993
+ if (ssl === true) return { rejectUnauthorized: true };
55994
+ if (ssl.enabled === false) return false;
55995
+ const result = {
55996
+ rejectUnauthorized: ssl.reject_unauthorized !== false
55997
+ };
55998
+ if (ssl.ca) {
55999
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
56000
+ result.ca = fs24.readFileSync(caPath, "utf8");
56001
+ }
56002
+ if (ssl.cert) {
56003
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
56004
+ result.cert = fs24.readFileSync(certPath, "utf8");
56005
+ }
56006
+ if (ssl.key) {
56007
+ const keyPath = this.validateSslPath(ssl.key, "client key");
56008
+ result.key = fs24.readFileSync(keyPath, "utf8");
56009
+ }
56010
+ return result;
56011
+ }
56012
+ validateSslPath(filePath, label) {
56013
+ const resolved = path28.resolve(filePath);
56014
+ if (resolved !== path28.normalize(resolved)) {
56015
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
56016
+ }
56017
+ if (!fs24.existsSync(resolved)) {
56018
+ throw new Error(`SSL ${label} not found: ${filePath}`);
56019
+ }
56020
+ return resolved;
56021
+ }
56022
+ async shutdown() {
56023
+ if (this.knex) {
56024
+ await this.knex.destroy();
56025
+ this.knex = null;
56026
+ }
56027
+ }
56028
+ async migrateSchema() {
56029
+ const knex = this.getKnex();
56030
+ const exists = await knex.schema.hasTable("schedules");
56031
+ if (!exists) {
56032
+ await knex.schema.createTable("schedules", (table) => {
56033
+ table.string("id", 36).primary();
56034
+ table.string("creator_id", 255).notNullable().index();
56035
+ table.string("creator_context", 255);
56036
+ table.string("creator_name", 255);
56037
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
56038
+ table.string("schedule_expr", 255);
56039
+ table.bigInteger("run_at");
56040
+ table.boolean("is_recurring").notNullable();
56041
+ table.text("original_expression");
56042
+ table.string("workflow", 255);
56043
+ table.text("workflow_inputs");
56044
+ table.text("output_context");
56045
+ table.string("status", 20).notNullable().index();
56046
+ table.bigInteger("created_at").notNullable();
56047
+ table.bigInteger("last_run_at");
56048
+ table.bigInteger("next_run_at");
56049
+ table.integer("run_count").notNullable().defaultTo(0);
56050
+ table.integer("failure_count").notNullable().defaultTo(0);
56051
+ table.text("last_error");
56052
+ table.text("previous_response");
56053
+ table.index(["status", "next_run_at"]);
56054
+ });
56055
+ }
56056
+ const triggersExist = await knex.schema.hasTable("message_triggers");
56057
+ if (!triggersExist) {
56058
+ await knex.schema.createTable("message_triggers", (table) => {
56059
+ table.string("id", 36).primary();
56060
+ table.string("creator_id", 255).notNullable().index();
56061
+ table.string("creator_context", 255);
56062
+ table.string("creator_name", 255);
56063
+ table.text("description");
56064
+ table.text("channels");
56065
+ table.text("from_users");
56066
+ table.boolean("from_bots").notNullable().defaultTo(false);
56067
+ table.text("contains");
56068
+ table.text("match_pattern");
56069
+ table.string("threads", 20).notNullable().defaultTo("any");
56070
+ table.string("workflow", 255).notNullable();
56071
+ table.text("inputs");
56072
+ table.text("output_context");
56073
+ table.string("status", 20).notNullable().defaultTo("active").index();
56074
+ table.boolean("enabled").notNullable().defaultTo(true);
56075
+ table.bigInteger("created_at").notNullable();
56076
+ });
56077
+ }
56078
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
56079
+ if (!locksExist) {
56080
+ await knex.schema.createTable("scheduler_locks", (table) => {
56081
+ table.string("lock_id", 255).primary();
56082
+ table.string("node_id", 255).notNullable();
56083
+ table.string("lock_token", 36).notNullable();
56084
+ table.bigInteger("acquired_at").notNullable();
56085
+ table.bigInteger("expires_at").notNullable();
56086
+ });
56087
+ }
56088
+ }
56089
+ getKnex() {
56090
+ if (!this.knex) {
56091
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
56092
+ }
56093
+ return this.knex;
56094
+ }
56095
+ // --- CRUD ---
56096
+ async create(schedule) {
56097
+ const knex = this.getKnex();
56098
+ const newSchedule = {
56099
+ ...schedule,
56100
+ id: (0, import_uuid2.v4)(),
56101
+ createdAt: Date.now(),
56102
+ runCount: 0,
56103
+ failureCount: 0,
56104
+ status: "active"
56105
+ };
56106
+ await knex("schedules").insert(toInsertRow(newSchedule));
56107
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
56108
+ return newSchedule;
56109
+ }
56110
+ async importSchedule(schedule) {
56111
+ const knex = this.getKnex();
56112
+ const existing = await knex("schedules").where("id", schedule.id).first();
56113
+ if (existing) return;
56114
+ await knex("schedules").insert(toInsertRow(schedule));
56115
+ }
56116
+ async get(id) {
56117
+ const knex = this.getKnex();
56118
+ const row = await knex("schedules").where("id", id).first();
56119
+ return row ? fromDbRow2(row) : void 0;
56120
+ }
56121
+ async update(id, patch) {
56122
+ const knex = this.getKnex();
56123
+ const existing = await knex("schedules").where("id", id).first();
56124
+ if (!existing) return void 0;
56125
+ const current = fromDbRow2(existing);
56126
+ const updated = { ...current, ...patch, id: current.id };
56127
+ const row = toInsertRow(updated);
56128
+ delete row.id;
56129
+ await knex("schedules").where("id", id).update(row);
56130
+ return updated;
56131
+ }
56132
+ async delete(id) {
56133
+ const knex = this.getKnex();
56134
+ const deleted = await knex("schedules").where("id", id).del();
56135
+ if (deleted > 0) {
56136
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
56137
+ return true;
56138
+ }
56139
+ return false;
56140
+ }
56141
+ // --- Queries ---
56142
+ async getByCreator(creatorId) {
56143
+ const knex = this.getKnex();
56144
+ const rows = await knex("schedules").where("creator_id", creatorId);
56145
+ return rows.map((r) => fromDbRow2(r));
56146
+ }
56147
+ async getActiveSchedules() {
56148
+ const knex = this.getKnex();
56149
+ const rows = await knex("schedules").where("status", "active");
56150
+ return rows.map((r) => fromDbRow2(r));
56151
+ }
56152
+ async getDueSchedules(now) {
56153
+ const ts = now ?? Date.now();
56154
+ const knex = this.getKnex();
56155
+ const bFalse = this.driver === "mssql" ? 0 : false;
56156
+ const bTrue = this.driver === "mssql" ? 1 : true;
56157
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
56158
+ this.where(function() {
56159
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
56160
+ }).orWhere(function() {
56161
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
56162
+ });
56163
+ });
56164
+ return rows.map((r) => fromDbRow2(r));
56165
+ }
56166
+ async findByWorkflow(creatorId, workflowName) {
56167
+ const knex = this.getKnex();
56168
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
56169
+ const pattern = `%${escaped}%`;
56170
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
56171
+ return rows.map((r) => fromDbRow2(r));
56172
+ }
56173
+ async getAll() {
56174
+ const knex = this.getKnex();
56175
+ const rows = await knex("schedules");
56176
+ return rows.map((r) => fromDbRow2(r));
56177
+ }
56178
+ async getStats() {
56179
+ const knex = this.getKnex();
56180
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
56181
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
56182
+ const result = await knex("schedules").select(
56183
+ knex.raw("COUNT(*) as total"),
56184
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
56185
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
56186
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
56187
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
56188
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
56189
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
56190
+ ).first();
56191
+ return {
56192
+ total: Number(result.total) || 0,
56193
+ active: Number(result.active) || 0,
56194
+ paused: Number(result.paused) || 0,
56195
+ completed: Number(result.completed) || 0,
56196
+ failed: Number(result.failed) || 0,
56197
+ recurring: Number(result.recurring) || 0,
56198
+ oneTime: Number(result.one_time) || 0
56199
+ };
56200
+ }
56201
+ async validateLimits(creatorId, isRecurring, limits) {
56202
+ const knex = this.getKnex();
56203
+ if (limits.maxGlobal) {
56204
+ const result = await knex("schedules").count("* as cnt").first();
56205
+ if (Number(result?.cnt) >= limits.maxGlobal) {
56206
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
56207
+ }
56208
+ }
56209
+ if (limits.maxPerUser) {
56210
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
56211
+ if (Number(result?.cnt) >= limits.maxPerUser) {
56212
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
56213
+ }
56214
+ }
56215
+ if (isRecurring && limits.maxRecurringPerUser) {
56216
+ const bTrue = this.driver === "mssql" ? 1 : true;
56217
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
56218
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
56219
+ throw new Error(
56220
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
56221
+ );
56222
+ }
56223
+ }
56224
+ }
56225
+ // --- HA Distributed Locking (via scheduler_locks table) ---
56226
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
56227
+ const knex = this.getKnex();
56228
+ const now = Date.now();
56229
+ const expiresAt = now + ttlSeconds * 1e3;
56230
+ const token = (0, import_uuid2.v4)();
56231
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
56232
+ node_id: nodeId,
56233
+ lock_token: token,
56234
+ acquired_at: now,
56235
+ expires_at: expiresAt
56236
+ });
56237
+ if (updated > 0) return token;
56238
+ try {
56239
+ await knex("scheduler_locks").insert({
56240
+ lock_id: lockId,
56241
+ node_id: nodeId,
56242
+ lock_token: token,
56243
+ acquired_at: now,
56244
+ expires_at: expiresAt
56245
+ });
56246
+ return token;
56247
+ } catch {
56248
+ return null;
56249
+ }
56250
+ }
56251
+ async releaseLock(lockId, lockToken) {
56252
+ const knex = this.getKnex();
56253
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
56254
+ }
56255
+ async renewLock(lockId, lockToken, ttlSeconds) {
56256
+ const knex = this.getKnex();
56257
+ const now = Date.now();
56258
+ const expiresAt = now + ttlSeconds * 1e3;
56259
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
56260
+ return updated > 0;
56261
+ }
56262
+ async flush() {
56263
+ }
56264
+ // --- Message Trigger CRUD ---
56265
+ async createTrigger(trigger) {
56266
+ const knex = this.getKnex();
56267
+ const newTrigger = {
56268
+ ...trigger,
56269
+ id: (0, import_uuid2.v4)(),
56270
+ createdAt: Date.now()
56271
+ };
56272
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
56273
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
56274
+ return newTrigger;
56275
+ }
56276
+ async getTrigger(id) {
56277
+ const knex = this.getKnex();
56278
+ const row = await knex("message_triggers").where("id", id).first();
56279
+ return row ? fromTriggerRow2(row) : void 0;
56280
+ }
56281
+ async updateTrigger(id, patch) {
56282
+ const knex = this.getKnex();
56283
+ const existing = await knex("message_triggers").where("id", id).first();
56284
+ if (!existing) return void 0;
56285
+ const current = fromTriggerRow2(existing);
56286
+ const updated = {
56287
+ ...current,
56288
+ ...patch,
56289
+ id: current.id,
56290
+ createdAt: current.createdAt
56291
+ };
56292
+ const row = toTriggerInsertRow(updated);
56293
+ delete row.id;
56294
+ await knex("message_triggers").where("id", id).update(row);
56295
+ return updated;
56296
+ }
56297
+ async deleteTrigger(id) {
56298
+ const knex = this.getKnex();
56299
+ const deleted = await knex("message_triggers").where("id", id).del();
56300
+ if (deleted > 0) {
56301
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
56302
+ return true;
56303
+ }
56304
+ return false;
56305
+ }
56306
+ async getTriggersByCreator(creatorId) {
56307
+ const knex = this.getKnex();
56308
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
56309
+ return rows.map((r) => fromTriggerRow2(r));
56310
+ }
56311
+ async getActiveTriggers() {
56312
+ const knex = this.getKnex();
56313
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
56314
+ return rows.map((r) => fromTriggerRow2(r));
56315
+ }
56316
+ };
56317
+ }
56318
+ });
56319
+
56320
+ // src/enterprise/loader.ts
56321
+ var loader_exports = {};
56322
+ __export(loader_exports, {
56323
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
56324
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
56325
+ });
56326
+ async function loadEnterprisePolicyEngine(config) {
56327
+ try {
56328
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56329
+ const validator = new LicenseValidator2();
56330
+ const license = await validator.loadAndValidate();
56331
+ if (!license || !validator.hasFeature("policy")) {
56332
+ return new DefaultPolicyEngine();
56333
+ }
56334
+ if (validator.isInGracePeriod()) {
56335
+ console.warn(
56336
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56337
+ );
56338
+ }
56339
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
56340
+ const engine = new OpaPolicyEngine2(config);
56341
+ await engine.initialize(config);
56342
+ return engine;
56343
+ } catch (err) {
56344
+ const msg = err instanceof Error ? err.message : String(err);
56345
+ try {
56346
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
56347
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
56348
+ } catch {
56349
+ }
56350
+ return new DefaultPolicyEngine();
56351
+ }
56352
+ }
56353
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
56354
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
56355
+ const validator = new LicenseValidator2();
56356
+ const license = await validator.loadAndValidate();
56357
+ if (!license || !validator.hasFeature("scheduler-sql")) {
56358
+ throw new Error(
56359
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
56360
+ );
56361
+ }
56362
+ if (validator.isInGracePeriod()) {
56363
+ console.warn(
56364
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
56365
+ );
56366
+ }
56367
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
56368
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
56369
+ }
56370
+ var init_loader = __esm({
56371
+ "src/enterprise/loader.ts"() {
56372
+ "use strict";
56373
+ init_default_engine();
56374
+ }
56375
+ });
56376
+
54863
56377
  // src/event-bus/event-bus.ts
54864
56378
  var event_bus_exports = {};
54865
56379
  __export(event_bus_exports, {
@@ -55766,8 +57280,8 @@ ${content}
55766
57280
  * Sleep utility
55767
57281
  */
55768
57282
  sleep(ms) {
55769
- return new Promise((resolve15) => {
55770
- const t = setTimeout(resolve15, ms);
57283
+ return new Promise((resolve19) => {
57284
+ const t = setTimeout(resolve19, ms);
55771
57285
  if (typeof t.unref === "function") {
55772
57286
  try {
55773
57287
  t.unref();
@@ -56052,8 +57566,8 @@ ${end}`);
56052
57566
  async updateGroupedComment(ctx, comments, group, changedIds) {
56053
57567
  const existingLock = this.updateLocks.get(group);
56054
57568
  let resolveLock;
56055
- const ourLock = new Promise((resolve15) => {
56056
- resolveLock = resolve15;
57569
+ const ourLock = new Promise((resolve19) => {
57570
+ resolveLock = resolve19;
56057
57571
  });
56058
57572
  this.updateLocks.set(group, ourLock);
56059
57573
  try {
@@ -56366,7 +57880,7 @@ ${blocks}
56366
57880
  * Sleep utility for enforcing delays
56367
57881
  */
56368
57882
  sleep(ms) {
56369
- return new Promise((resolve15) => setTimeout(resolve15, ms));
57883
+ return new Promise((resolve19) => setTimeout(resolve19, ms));
56370
57884
  }
56371
57885
  };
56372
57886
  }
@@ -57658,15 +59172,15 @@ function serializeRunState(state) {
57658
59172
  ])
57659
59173
  };
57660
59174
  }
57661
- var path26, fs22, StateMachineExecutionEngine;
59175
+ var path30, fs26, StateMachineExecutionEngine;
57662
59176
  var init_state_machine_execution_engine = __esm({
57663
59177
  "src/state-machine-execution-engine.ts"() {
57664
59178
  "use strict";
57665
59179
  init_runner();
57666
59180
  init_logger();
57667
59181
  init_sandbox_manager();
57668
- path26 = __toESM(require("path"));
57669
- fs22 = __toESM(require("fs"));
59182
+ path30 = __toESM(require("path"));
59183
+ fs26 = __toESM(require("fs"));
57670
59184
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
57671
59185
  workingDirectory;
57672
59186
  executionContext;
@@ -57898,8 +59412,8 @@ var init_state_machine_execution_engine = __esm({
57898
59412
  logger.debug(
57899
59413
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
57900
59414
  );
57901
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
57902
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
59415
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
59416
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
57903
59417
  logger.debug(
57904
59418
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
57905
59419
  );
@@ -58051,9 +59565,9 @@ var init_state_machine_execution_engine = __esm({
58051
59565
  }
58052
59566
  const checkId = String(ev?.checkId || "unknown");
58053
59567
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
58054
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
58055
- fs22.mkdirSync(baseDir, { recursive: true });
58056
- const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
59568
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
59569
+ fs26.mkdirSync(baseDir, { recursive: true });
59570
+ const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
58057
59571
  await this.saveSnapshotToFile(filePath);
58058
59572
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
58059
59573
  try {
@@ -58194,7 +59708,7 @@ var init_state_machine_execution_engine = __esm({
58194
59708
  * Does not include secrets. Intended for debugging and future resume support.
58195
59709
  */
58196
59710
  async saveSnapshotToFile(filePath) {
58197
- const fs23 = await import("fs/promises");
59711
+ const fs27 = await import("fs/promises");
58198
59712
  const ctx = this._lastContext;
58199
59713
  const runner = this._lastRunner;
58200
59714
  if (!ctx || !runner) {
@@ -58214,14 +59728,14 @@ var init_state_machine_execution_engine = __esm({
58214
59728
  journal: entries,
58215
59729
  requestedChecks: ctx.requestedChecks || []
58216
59730
  };
58217
- await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
59731
+ await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
58218
59732
  }
58219
59733
  /**
58220
59734
  * Load a snapshot JSON from file and return it. Resume support can build on this.
58221
59735
  */
58222
59736
  async loadSnapshotFromFile(filePath) {
58223
- const fs23 = await import("fs/promises");
58224
- const raw = await fs23.readFile(filePath, "utf8");
59737
+ const fs27 = await import("fs/promises");
59738
+ const raw = await fs27.readFile(filePath, "utf8");
58225
59739
  return JSON.parse(raw);
58226
59740
  }
58227
59741
  /**