@probelabs/visor 0.1.175 → 0.1.176-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/defaults/assistant.yaml +16 -0
  2. package/defaults/code-talk.yaml +5 -6
  3. package/dist/ai-review-service.d.ts.map +1 -1
  4. package/dist/config.d.ts.map +1 -1
  5. package/dist/defaults/assistant.yaml +16 -0
  6. package/dist/defaults/code-talk.yaml +5 -6
  7. package/dist/index.js +1955 -53
  8. package/dist/providers/ai-check-provider.d.ts.map +1 -1
  9. package/dist/sdk/{check-provider-registry-7JPPJHVM.mjs → check-provider-registry-VE6LQPLY.mjs} +3 -3
  10. package/dist/sdk/{check-provider-registry-O36CQEGD.mjs → check-provider-registry-X7WH3PXQ.mjs} +3 -3
  11. package/dist/sdk/{chunk-TAK5HLAR.mjs → chunk-KQAT6H3S.mjs} +71 -32
  12. package/dist/sdk/chunk-KQAT6H3S.mjs.map +1 -0
  13. package/dist/sdk/{chunk-MLXGCLZJ.mjs → chunk-MM3TGVQ4.mjs} +6 -2
  14. package/dist/sdk/chunk-MM3TGVQ4.mjs.map +1 -0
  15. package/dist/sdk/{chunk-FZPCP444.mjs → chunk-OK4MLC3R.mjs} +69 -30
  16. package/dist/sdk/chunk-OK4MLC3R.mjs.map +1 -0
  17. package/dist/sdk/{config-4JMBJKWS.mjs → config-OOUMTCEA.mjs} +2 -2
  18. package/dist/sdk/{host-WTJBWO4T.mjs → host-LRWIKURZ.mjs} +3 -3
  19. package/dist/sdk/knex-store-QCEW4I4R.mjs +527 -0
  20. package/dist/sdk/knex-store-QCEW4I4R.mjs.map +1 -0
  21. package/dist/sdk/loader-Q7K76ZIY.mjs +89 -0
  22. package/dist/sdk/loader-Q7K76ZIY.mjs.map +1 -0
  23. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +655 -0
  24. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +1 -0
  25. package/dist/sdk/{schedule-tool-XVSYLH4Z.mjs → schedule-tool-INVLVX3G.mjs} +3 -3
  26. package/dist/sdk/{schedule-tool-DF5WUVYV.mjs → schedule-tool-R7NSHTPJ.mjs} +3 -3
  27. package/dist/sdk/{schedule-tool-handler-CFMFHDUL.mjs → schedule-tool-handler-4SSRQXFJ.mjs} +3 -3
  28. package/dist/sdk/{schedule-tool-handler-JGWA4N3C.mjs → schedule-tool-handler-5GTQ6SFI.mjs} +3 -3
  29. package/dist/sdk/sdk.js +1720 -303
  30. package/dist/sdk/sdk.js.map +1 -1
  31. package/dist/sdk/sdk.mjs +5 -5
  32. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  33. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  34. package/dist/sdk/{workflow-check-provider-Z6U7FZAF.mjs → workflow-check-provider-EY6VSMNG.mjs} +3 -3
  35. package/dist/sdk/{workflow-check-provider-ETM452BO.mjs → workflow-check-provider-IWZSZQ7N.mjs} +3 -3
  36. package/dist/test-runner/index.d.ts.map +1 -1
  37. package/package.json +1 -1
  38. package/dist/output/traces/run-2026-03-09T18-49-07-663Z.ndjson +0 -138
  39. package/dist/output/traces/run-2026-03-09T18-49-46-345Z.ndjson +0 -2296
  40. package/dist/sdk/a2a-frontend-ORLAU5GK.mjs +0 -1658
  41. package/dist/sdk/a2a-frontend-ORLAU5GK.mjs.map +0 -1
  42. package/dist/sdk/check-provider-registry-QCDV3SI6.mjs +0 -30
  43. package/dist/sdk/chunk-4FGX4SA6.mjs +0 -516
  44. package/dist/sdk/chunk-4FGX4SA6.mjs.map +0 -1
  45. package/dist/sdk/chunk-FZPCP444.mjs.map +0 -1
  46. package/dist/sdk/chunk-HNK5ZJ2L.mjs +0 -739
  47. package/dist/sdk/chunk-HNK5ZJ2L.mjs.map +0 -1
  48. package/dist/sdk/chunk-MLXGCLZJ.mjs.map +0 -1
  49. package/dist/sdk/chunk-QAO73GUX.mjs +0 -1502
  50. package/dist/sdk/chunk-QAO73GUX.mjs.map +0 -1
  51. package/dist/sdk/chunk-TAK5HLAR.mjs.map +0 -1
  52. package/dist/sdk/chunk-YVVOG7RP.mjs +0 -45155
  53. package/dist/sdk/chunk-YVVOG7RP.mjs.map +0 -1
  54. package/dist/sdk/failure-condition-evaluator-RM5JJS4Q.mjs +0 -18
  55. package/dist/sdk/github-frontend-O5IAWXL5.mjs +0 -1386
  56. package/dist/sdk/github-frontend-O5IAWXL5.mjs.map +0 -1
  57. package/dist/sdk/routing-AWOHU2WP.mjs +0 -26
  58. package/dist/sdk/schedule-tool-L5G2BRIG.mjs +0 -36
  59. package/dist/sdk/schedule-tool-handler-CFMFHDUL.mjs.map +0 -1
  60. package/dist/sdk/schedule-tool-handler-JGWA4N3C.mjs.map +0 -1
  61. package/dist/sdk/schedule-tool-handler-UJ4RFTW2.mjs +0 -40
  62. package/dist/sdk/schedule-tool-handler-UJ4RFTW2.mjs.map +0 -1
  63. package/dist/sdk/trace-helpers-4ERTVCZG.mjs +0 -29
  64. package/dist/sdk/trace-helpers-4ERTVCZG.mjs.map +0 -1
  65. package/dist/sdk/workflow-check-provider-ETM452BO.mjs.map +0 -1
  66. package/dist/sdk/workflow-check-provider-I3XLJP6V.mjs +0 -30
  67. package/dist/sdk/workflow-check-provider-I3XLJP6V.mjs.map +0 -1
  68. package/dist/sdk/workflow-check-provider-Z6U7FZAF.mjs.map +0 -1
  69. package/dist/traces/run-2026-03-09T18-49-07-663Z.ndjson +0 -138
  70. package/dist/traces/run-2026-03-09T18-49-46-345Z.ndjson +0 -2296
  71. /package/dist/sdk/{check-provider-registry-7JPPJHVM.mjs.map → check-provider-registry-VE6LQPLY.mjs.map} +0 -0
  72. /package/dist/sdk/{check-provider-registry-O36CQEGD.mjs.map → check-provider-registry-X7WH3PXQ.mjs.map} +0 -0
  73. /package/dist/sdk/{check-provider-registry-QCDV3SI6.mjs.map → config-OOUMTCEA.mjs.map} +0 -0
  74. /package/dist/sdk/{host-WTJBWO4T.mjs.map → host-LRWIKURZ.mjs.map} +0 -0
  75. /package/dist/sdk/{config-4JMBJKWS.mjs.map → schedule-tool-INVLVX3G.mjs.map} +0 -0
  76. /package/dist/sdk/{failure-condition-evaluator-RM5JJS4Q.mjs.map → schedule-tool-R7NSHTPJ.mjs.map} +0 -0
  77. /package/dist/sdk/{routing-AWOHU2WP.mjs.map → schedule-tool-handler-4SSRQXFJ.mjs.map} +0 -0
  78. /package/dist/sdk/{schedule-tool-DF5WUVYV.mjs.map → schedule-tool-handler-5GTQ6SFI.mjs.map} +0 -0
  79. /package/dist/sdk/{schedule-tool-L5G2BRIG.mjs.map → workflow-check-provider-EY6VSMNG.mjs.map} +0 -0
  80. /package/dist/sdk/{schedule-tool-XVSYLH4Z.mjs.map → workflow-check-provider-IWZSZQ7N.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -704,7 +704,7 @@ var require_package = __commonJS({
704
704
  "package.json"(exports2, module2) {
705
705
  module2.exports = {
706
706
  name: "@probelabs/visor",
707
- version: "0.1.175",
707
+ version: "0.1.42",
708
708
  main: "dist/index.js",
709
709
  bin: {
710
710
  visor: "./dist/index.js"
@@ -1152,11 +1152,11 @@ function getTracer() {
1152
1152
  }
1153
1153
  async function withActiveSpan(name, attrs, fn) {
1154
1154
  const tracer = getTracer();
1155
- return await new Promise((resolve15, reject) => {
1155
+ return await new Promise((resolve19, reject) => {
1156
1156
  const callback = async (span) => {
1157
1157
  try {
1158
1158
  const res = await fn(span);
1159
- resolve15(res);
1159
+ resolve19(res);
1160
1160
  } catch (err) {
1161
1161
  try {
1162
1162
  if (err instanceof Error) span.recordException(err);
@@ -1281,19 +1281,19 @@ function __getOrCreateNdjsonPath() {
1281
1281
  try {
1282
1282
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
1283
1283
  return null;
1284
- const path29 = require("path");
1285
- const fs25 = require("fs");
1284
+ const path33 = require("path");
1285
+ const fs29 = require("fs");
1286
1286
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
1287
1287
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
1288
- const dir = path29.dirname(__ndjsonPath);
1289
- if (!fs25.existsSync(dir)) fs25.mkdirSync(dir, { recursive: true });
1288
+ const dir = path33.dirname(__ndjsonPath);
1289
+ if (!fs29.existsSync(dir)) fs29.mkdirSync(dir, { recursive: true });
1290
1290
  return __ndjsonPath;
1291
1291
  }
1292
- const outDir = process.env.VISOR_TRACE_DIR || path29.join(process.cwd(), "output", "traces");
1293
- if (!fs25.existsSync(outDir)) fs25.mkdirSync(outDir, { recursive: true });
1292
+ const outDir = process.env.VISOR_TRACE_DIR || path33.join(process.cwd(), "output", "traces");
1293
+ if (!fs29.existsSync(outDir)) fs29.mkdirSync(outDir, { recursive: true });
1294
1294
  if (!__ndjsonPath) {
1295
1295
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
1296
- __ndjsonPath = path29.join(outDir, `${ts}.ndjson`);
1296
+ __ndjsonPath = path33.join(outDir, `${ts}.ndjson`);
1297
1297
  }
1298
1298
  return __ndjsonPath;
1299
1299
  } catch {
@@ -1302,11 +1302,11 @@ function __getOrCreateNdjsonPath() {
1302
1302
  }
1303
1303
  function _appendRunMarker() {
1304
1304
  try {
1305
- const fs25 = require("fs");
1305
+ const fs29 = require("fs");
1306
1306
  const p = __getOrCreateNdjsonPath();
1307
1307
  if (!p) return;
1308
1308
  const line = { name: "visor.run", attributes: { started: true } };
1309
- fs25.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
1309
+ fs29.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
1310
1310
  } catch {
1311
1311
  }
1312
1312
  }
@@ -3393,7 +3393,7 @@ var init_failure_condition_evaluator = __esm({
3393
3393
  */
3394
3394
  evaluateExpression(condition, context2) {
3395
3395
  try {
3396
- const normalize4 = (expr) => {
3396
+ const normalize8 = (expr) => {
3397
3397
  const trimmed = expr.trim();
3398
3398
  if (!/[\n;]/.test(trimmed)) return trimmed;
3399
3399
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3551,7 +3551,7 @@ var init_failure_condition_evaluator = __esm({
3551
3551
  try {
3552
3552
  exec2 = this.sandbox.compile(`return (${raw});`);
3553
3553
  } catch {
3554
- const normalizedExpr = normalize4(condition);
3554
+ const normalizedExpr = normalize8(condition);
3555
3555
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3556
3556
  }
3557
3557
  const result = exec2(scope).run();
@@ -3934,9 +3934,9 @@ function configureLiquidWithExtensions(liquid) {
3934
3934
  });
3935
3935
  liquid.registerFilter("get", (obj, pathExpr) => {
3936
3936
  if (obj == null) return void 0;
3937
- const path29 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3938
- if (!path29) return obj;
3939
- const parts = path29.split(".");
3937
+ const path33 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3938
+ if (!path33) return obj;
3939
+ const parts = path33.split(".");
3940
3940
  let cur = obj;
3941
3941
  for (const p of parts) {
3942
3942
  if (cur == null) return void 0;
@@ -4055,9 +4055,9 @@ function configureLiquidWithExtensions(liquid) {
4055
4055
  }
4056
4056
  }
4057
4057
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
4058
- const getNested = (obj, path29) => {
4059
- if (!obj || !path29) return void 0;
4060
- const parts = path29.split(".");
4058
+ const getNested = (obj, path33) => {
4059
+ if (!obj || !path33) return void 0;
4060
+ const parts = path33.split(".");
4061
4061
  let cur = obj;
4062
4062
  for (const p of parts) {
4063
4063
  if (cur == null) return void 0;
@@ -6609,8 +6609,8 @@ var init_dependency_gating = __esm({
6609
6609
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6610
6610
  try {
6611
6611
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6612
- const fs25 = await import("fs/promises");
6613
- const path29 = await import("path");
6612
+ const fs29 = await import("fs/promises");
6613
+ const path33 = await import("path");
6614
6614
  const schemaRaw = checkConfig.schema || "plain";
6615
6615
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6616
6616
  let templateContent;
@@ -6618,24 +6618,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6618
6618
  templateContent = String(checkConfig.template.content);
6619
6619
  } else if (checkConfig.template && checkConfig.template.file) {
6620
6620
  const file = String(checkConfig.template.file);
6621
- const resolved = path29.resolve(process.cwd(), file);
6622
- templateContent = await fs25.readFile(resolved, "utf-8");
6621
+ const resolved = path33.resolve(process.cwd(), file);
6622
+ templateContent = await fs29.readFile(resolved, "utf-8");
6623
6623
  } else if (schema && schema !== "plain") {
6624
6624
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6625
6625
  if (sanitized) {
6626
6626
  const candidatePaths = [
6627
- path29.join(__dirname, "output", sanitized, "template.liquid"),
6627
+ path33.join(__dirname, "output", sanitized, "template.liquid"),
6628
6628
  // bundled: dist/output/
6629
- path29.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6629
+ path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6630
6630
  // source: output/
6631
- path29.join(process.cwd(), "output", sanitized, "template.liquid"),
6631
+ path33.join(process.cwd(), "output", sanitized, "template.liquid"),
6632
6632
  // fallback: cwd/output/
6633
- path29.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6633
+ path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6634
6634
  // fallback: cwd/dist/output/
6635
6635
  ];
6636
6636
  for (const p of candidatePaths) {
6637
6637
  try {
6638
- templateContent = await fs25.readFile(p, "utf-8");
6638
+ templateContent = await fs29.readFile(p, "utf-8");
6639
6639
  if (templateContent) break;
6640
6640
  } catch {
6641
6641
  }
@@ -7040,7 +7040,7 @@ async function processDiffWithOutline(diffContent) {
7040
7040
  }
7041
7041
  try {
7042
7042
  const originalProbePath = process.env.PROBE_PATH;
7043
- const fs25 = require("fs");
7043
+ const fs29 = require("fs");
7044
7044
  const possiblePaths = [
7045
7045
  // Relative to current working directory (most common in production)
7046
7046
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -7051,7 +7051,7 @@ async function processDiffWithOutline(diffContent) {
7051
7051
  ];
7052
7052
  let probeBinaryPath;
7053
7053
  for (const candidatePath of possiblePaths) {
7054
- if (fs25.existsSync(candidatePath)) {
7054
+ if (fs29.existsSync(candidatePath)) {
7055
7055
  probeBinaryPath = candidatePath;
7056
7056
  break;
7057
7057
  }
@@ -7158,7 +7158,7 @@ async function renderMermaidToPng(mermaidCode) {
7158
7158
  if (chromiumPath) {
7159
7159
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
7160
7160
  }
7161
- const result = await new Promise((resolve15) => {
7161
+ const result = await new Promise((resolve19) => {
7162
7162
  const proc = (0, import_child_process.spawn)(
7163
7163
  "npx",
7164
7164
  [
@@ -7188,13 +7188,13 @@ async function renderMermaidToPng(mermaidCode) {
7188
7188
  });
7189
7189
  proc.on("close", (code) => {
7190
7190
  if (code === 0) {
7191
- resolve15({ success: true });
7191
+ resolve19({ success: true });
7192
7192
  } else {
7193
- resolve15({ success: false, error: stderr || `Exit code ${code}` });
7193
+ resolve19({ success: false, error: stderr || `Exit code ${code}` });
7194
7194
  }
7195
7195
  });
7196
7196
  proc.on("error", (err) => {
7197
- resolve15({ success: false, error: err.message });
7197
+ resolve19({ success: false, error: err.message });
7198
7198
  });
7199
7199
  });
7200
7200
  if (!result.success) {
@@ -7339,11 +7339,32 @@ function getCurrentDateXml() {
7339
7339
  }
7340
7340
  function createProbeTracerAdapter(fallbackTracer) {
7341
7341
  const fallback = fallbackTracer && typeof fallbackTracer === "object" ? fallbackTracer : null;
7342
+ const flattenAttrs = (attrs) => {
7343
+ if (!attrs) return attrs;
7344
+ const out = {};
7345
+ for (const [k, v] of Object.entries(attrs)) {
7346
+ if (v === null || v === void 0) continue;
7347
+ if (typeof v === "string" || typeof v === "number" || typeof v === "boolean") {
7348
+ out[k] = v;
7349
+ } else if (Array.isArray(v)) {
7350
+ if (v.length > 0 && typeof v[0] === "object") {
7351
+ out[k] = JSON.stringify(v);
7352
+ } else {
7353
+ out[k] = v;
7354
+ }
7355
+ } else if (typeof v === "object") {
7356
+ out[k] = JSON.stringify(v);
7357
+ } else {
7358
+ out[k] = v;
7359
+ }
7360
+ }
7361
+ return out;
7362
+ };
7342
7363
  const emitEvent = (name, attrs) => {
7343
7364
  try {
7344
7365
  const span = trace.getActiveSpan();
7345
7366
  if (span && typeof span.addEvent === "function") {
7346
- span.addEvent(name, attrs);
7367
+ span.addEvent(name, flattenAttrs(attrs));
7347
7368
  }
7348
7369
  } catch {
7349
7370
  }
@@ -7395,6 +7416,21 @@ function createProbeTracerAdapter(fallbackTracer) {
7395
7416
  }
7396
7417
  }
7397
7418
  },
7419
+ recordToolDecision: (toolName, params, metadata) => {
7420
+ const paramsStr = typeof params === "string" ? params : JSON.stringify(params || {});
7421
+ emitEvent("tool.decision", {
7422
+ "tool.name": toolName,
7423
+ "tool.params": paramsStr.substring(0, 5e3),
7424
+ "tool.params.length": paramsStr.length,
7425
+ ...metadata || {}
7426
+ });
7427
+ if (fallback && typeof fallback.recordToolDecision === "function") {
7428
+ try {
7429
+ fallback.recordToolDecision(toolName, params, metadata);
7430
+ } catch {
7431
+ }
7432
+ }
7433
+ },
7398
7434
  recordDelegationEvent: (phase, attrs) => {
7399
7435
  emitEvent(`delegation.${phase}`, attrs);
7400
7436
  if (fallback && typeof fallback.recordDelegationEvent === "function") {
@@ -8356,8 +8392,8 @@ ${schemaString}`);
8356
8392
  }
8357
8393
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8358
8394
  try {
8359
- const fs25 = require("fs");
8360
- const path29 = require("path");
8395
+ const fs29 = require("fs");
8396
+ const path33 = require("path");
8361
8397
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8362
8398
  const provider = this.config.provider || "auto";
8363
8399
  const model = this.config.model || "default";
@@ -8471,20 +8507,20 @@ ${"=".repeat(60)}
8471
8507
  `;
8472
8508
  readableVersion += `${"=".repeat(60)}
8473
8509
  `;
8474
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8475
- if (!fs25.existsSync(debugArtifactsDir)) {
8476
- fs25.mkdirSync(debugArtifactsDir, { recursive: true });
8510
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8511
+ if (!fs29.existsSync(debugArtifactsDir)) {
8512
+ fs29.mkdirSync(debugArtifactsDir, { recursive: true });
8477
8513
  }
8478
- const debugFile = path29.join(
8514
+ const debugFile = path33.join(
8479
8515
  debugArtifactsDir,
8480
8516
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8481
8517
  );
8482
- fs25.writeFileSync(debugFile, debugJson, "utf-8");
8483
- const readableFile = path29.join(
8518
+ fs29.writeFileSync(debugFile, debugJson, "utf-8");
8519
+ const readableFile = path33.join(
8484
8520
  debugArtifactsDir,
8485
8521
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8486
8522
  );
8487
- fs25.writeFileSync(readableFile, readableVersion, "utf-8");
8523
+ fs29.writeFileSync(readableFile, readableVersion, "utf-8");
8488
8524
  log(`
8489
8525
  \u{1F4BE} Full debug info saved to:`);
8490
8526
  log(` JSON: ${debugFile}`);
@@ -8522,8 +8558,8 @@ ${"=".repeat(60)}
8522
8558
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8523
8559
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8524
8560
  try {
8525
- const fs25 = require("fs");
8526
- const path29 = require("path");
8561
+ const fs29 = require("fs");
8562
+ const path33 = require("path");
8527
8563
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8528
8564
  const agentAny2 = agent;
8529
8565
  let fullHistory = [];
@@ -8534,8 +8570,8 @@ ${"=".repeat(60)}
8534
8570
  } else if (agentAny2._messages) {
8535
8571
  fullHistory = agentAny2._messages;
8536
8572
  }
8537
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8538
- const sessionBase = path29.join(
8573
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8574
+ const sessionBase = path33.join(
8539
8575
  debugArtifactsDir,
8540
8576
  `session-${_checkName || "unknown"}-${timestamp}`
8541
8577
  );
@@ -8547,7 +8583,7 @@ ${"=".repeat(60)}
8547
8583
  schema: effectiveSchema,
8548
8584
  totalMessages: fullHistory.length
8549
8585
  };
8550
- fs25.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8586
+ fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8551
8587
  let readable = `=============================================================
8552
8588
  `;
8553
8589
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8574,7 +8610,7 @@ ${"=".repeat(60)}
8574
8610
  `;
8575
8611
  readable += content + "\n";
8576
8612
  });
8577
- fs25.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8613
+ fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8578
8614
  log(`\u{1F4BE} Complete session history saved:`);
8579
8615
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8580
8616
  } catch (error) {
@@ -8583,11 +8619,11 @@ ${"=".repeat(60)}
8583
8619
  }
8584
8620
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8585
8621
  try {
8586
- const fs25 = require("fs");
8587
- const path29 = require("path");
8622
+ const fs29 = require("fs");
8623
+ const path33 = require("path");
8588
8624
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8589
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8590
- const responseFile = path29.join(
8625
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8626
+ const responseFile = path33.join(
8591
8627
  debugArtifactsDir,
8592
8628
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8593
8629
  );
@@ -8620,7 +8656,7 @@ ${"=".repeat(60)}
8620
8656
  `;
8621
8657
  responseContent += `${"=".repeat(60)}
8622
8658
  `;
8623
- fs25.writeFileSync(responseFile, responseContent, "utf-8");
8659
+ fs29.writeFileSync(responseFile, responseContent, "utf-8");
8624
8660
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8625
8661
  } catch (error) {
8626
8662
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8636,9 +8672,9 @@ ${"=".repeat(60)}
8636
8672
  await agentAny._telemetryConfig.shutdown();
8637
8673
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8638
8674
  if (process.env.GITHUB_ACTIONS) {
8639
- const fs25 = require("fs");
8640
- if (fs25.existsSync(agentAny._traceFilePath)) {
8641
- const stats = fs25.statSync(agentAny._traceFilePath);
8675
+ const fs29 = require("fs");
8676
+ if (fs29.existsSync(agentAny._traceFilePath)) {
8677
+ const stats = fs29.statSync(agentAny._traceFilePath);
8642
8678
  console.log(
8643
8679
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8644
8680
  );
@@ -8851,9 +8887,9 @@ ${schemaString}`);
8851
8887
  const model = this.config.model || "default";
8852
8888
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8853
8889
  try {
8854
- const fs25 = require("fs");
8855
- const path29 = require("path");
8856
- const os2 = require("os");
8890
+ const fs29 = require("fs");
8891
+ const path33 = require("path");
8892
+ const os3 = require("os");
8857
8893
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8858
8894
  const debugData = {
8859
8895
  timestamp,
@@ -8925,19 +8961,19 @@ ${"=".repeat(60)}
8925
8961
  `;
8926
8962
  readableVersion += `${"=".repeat(60)}
8927
8963
  `;
8928
- const tempDir = os2.tmpdir();
8929
- const promptFile = path29.join(tempDir, `visor-prompt-${timestamp}.txt`);
8930
- fs25.writeFileSync(promptFile, prompt, "utf-8");
8964
+ const tempDir = os3.tmpdir();
8965
+ const promptFile = path33.join(tempDir, `visor-prompt-${timestamp}.txt`);
8966
+ fs29.writeFileSync(promptFile, prompt, "utf-8");
8931
8967
  log(`
8932
8968
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8933
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8969
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8934
8970
  try {
8935
- const base = path29.join(
8971
+ const base = path33.join(
8936
8972
  debugArtifactsDir,
8937
8973
  `prompt-${_checkName || "unknown"}-${timestamp}`
8938
8974
  );
8939
- fs25.writeFileSync(base + ".json", debugJson, "utf-8");
8940
- fs25.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8975
+ fs29.writeFileSync(base + ".json", debugJson, "utf-8");
8976
+ fs29.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8941
8977
  log(`
8942
8978
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8943
8979
  } catch {
@@ -8987,8 +9023,8 @@ $ ${cliCommand}
8987
9023
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8988
9024
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8989
9025
  try {
8990
- const fs25 = require("fs");
8991
- const path29 = require("path");
9026
+ const fs29 = require("fs");
9027
+ const path33 = require("path");
8992
9028
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8993
9029
  const agentAny = agent;
8994
9030
  let fullHistory = [];
@@ -8999,8 +9035,8 @@ $ ${cliCommand}
8999
9035
  } else if (agentAny._messages) {
9000
9036
  fullHistory = agentAny._messages;
9001
9037
  }
9002
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
9003
- const sessionBase = path29.join(
9038
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
9039
+ const sessionBase = path33.join(
9004
9040
  debugArtifactsDir,
9005
9041
  `session-${_checkName || "unknown"}-${timestamp}`
9006
9042
  );
@@ -9012,7 +9048,7 @@ $ ${cliCommand}
9012
9048
  schema: effectiveSchema,
9013
9049
  totalMessages: fullHistory.length
9014
9050
  };
9015
- fs25.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
9051
+ fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
9016
9052
  let readable = `=============================================================
9017
9053
  `;
9018
9054
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -9039,7 +9075,7 @@ ${"=".repeat(60)}
9039
9075
  `;
9040
9076
  readable += content + "\n";
9041
9077
  });
9042
- fs25.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
9078
+ fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
9043
9079
  log(`\u{1F4BE} Complete session history saved:`);
9044
9080
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
9045
9081
  } catch (error) {
@@ -9048,11 +9084,11 @@ ${"=".repeat(60)}
9048
9084
  }
9049
9085
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
9050
9086
  try {
9051
- const fs25 = require("fs");
9052
- const path29 = require("path");
9087
+ const fs29 = require("fs");
9088
+ const path33 = require("path");
9053
9089
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
9054
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
9055
- const responseFile = path29.join(
9090
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
9091
+ const responseFile = path33.join(
9056
9092
  debugArtifactsDir,
9057
9093
  `response-${_checkName || "unknown"}-${timestamp}.txt`
9058
9094
  );
@@ -9085,7 +9121,7 @@ ${"=".repeat(60)}
9085
9121
  `;
9086
9122
  responseContent += `${"=".repeat(60)}
9087
9123
  `;
9088
- fs25.writeFileSync(responseFile, responseContent, "utf-8");
9124
+ fs29.writeFileSync(responseFile, responseContent, "utf-8");
9089
9125
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
9090
9126
  } catch (error) {
9091
9127
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -9103,9 +9139,9 @@ ${"=".repeat(60)}
9103
9139
  await telemetry.shutdown();
9104
9140
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
9105
9141
  if (process.env.GITHUB_ACTIONS) {
9106
- const fs25 = require("fs");
9107
- if (fs25.existsSync(traceFilePath)) {
9108
- const stats = fs25.statSync(traceFilePath);
9142
+ const fs29 = require("fs");
9143
+ if (fs29.existsSync(traceFilePath)) {
9144
+ const stats = fs29.statSync(traceFilePath);
9109
9145
  console.log(
9110
9146
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
9111
9147
  );
@@ -9143,8 +9179,8 @@ ${"=".repeat(60)}
9143
9179
  * Load schema content from schema files or inline definitions
9144
9180
  */
9145
9181
  async loadSchemaContent(schema) {
9146
- const fs25 = require("fs").promises;
9147
- const path29 = require("path");
9182
+ const fs29 = require("fs").promises;
9183
+ const path33 = require("path");
9148
9184
  if (typeof schema === "object" && schema !== null) {
9149
9185
  log("\u{1F4CB} Using inline schema object from configuration");
9150
9186
  return JSON.stringify(schema);
@@ -9157,14 +9193,14 @@ ${"=".repeat(60)}
9157
9193
  }
9158
9194
  } catch {
9159
9195
  }
9160
- if ((schema.startsWith("./") || schema.includes(".json")) && !path29.isAbsolute(schema)) {
9196
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path33.isAbsolute(schema)) {
9161
9197
  if (schema.includes("..") || schema.includes("\0")) {
9162
9198
  throw new Error("Invalid schema path: path traversal not allowed");
9163
9199
  }
9164
9200
  try {
9165
- const schemaPath = path29.resolve(process.cwd(), schema);
9201
+ const schemaPath = path33.resolve(process.cwd(), schema);
9166
9202
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
9167
- const schemaContent = await fs25.readFile(schemaPath, "utf-8");
9203
+ const schemaContent = await fs29.readFile(schemaPath, "utf-8");
9168
9204
  return schemaContent.trim();
9169
9205
  } catch (error) {
9170
9206
  throw new Error(
@@ -9178,22 +9214,22 @@ ${"=".repeat(60)}
9178
9214
  }
9179
9215
  const candidatePaths = [
9180
9216
  // GitHub Action bundle location
9181
- path29.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
9217
+ path33.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
9182
9218
  // Historical fallback when src/output was inadvertently bundled as output1/
9183
- path29.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
9219
+ path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
9184
9220
  // Local dev (repo root)
9185
- path29.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
9221
+ path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
9186
9222
  ];
9187
9223
  for (const schemaPath of candidatePaths) {
9188
9224
  try {
9189
- const schemaContent = await fs25.readFile(schemaPath, "utf-8");
9225
+ const schemaContent = await fs29.readFile(schemaPath, "utf-8");
9190
9226
  return schemaContent.trim();
9191
9227
  } catch {
9192
9228
  }
9193
9229
  }
9194
- const distPath = path29.join(__dirname, "output", sanitizedSchemaName, "schema.json");
9195
- const distAltPath = path29.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
9196
- const cwdPath = path29.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
9230
+ const distPath = path33.join(__dirname, "output", sanitizedSchemaName, "schema.json");
9231
+ const distAltPath = path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
9232
+ const cwdPath = path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
9197
9233
  throw new Error(
9198
9234
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
9199
9235
  );
@@ -9435,7 +9471,7 @@ ${"=".repeat(60)}
9435
9471
  * Generate mock response for testing
9436
9472
  */
9437
9473
  async generateMockResponse(_prompt, _checkName, _schema) {
9438
- await new Promise((resolve15) => setTimeout(resolve15, 500));
9474
+ await new Promise((resolve19) => setTimeout(resolve19, 500));
9439
9475
  const name = (_checkName || "").toLowerCase();
9440
9476
  if (name.includes("extract-facts")) {
9441
9477
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9796,7 +9832,7 @@ var init_command_executor = __esm({
9796
9832
  * Execute command with stdin input
9797
9833
  */
9798
9834
  executeWithStdin(command, options) {
9799
- return new Promise((resolve15, reject) => {
9835
+ return new Promise((resolve19, reject) => {
9800
9836
  const childProcess = (0, import_child_process2.exec)(
9801
9837
  command,
9802
9838
  {
@@ -9808,7 +9844,7 @@ var init_command_executor = __esm({
9808
9844
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9809
9845
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9810
9846
  } else {
9811
- resolve15({
9847
+ resolve19({
9812
9848
  stdout: stdout || "",
9813
9849
  stderr: stderr || "",
9814
9850
  exitCode: error ? error.code || 1 : 0
@@ -16136,7 +16172,11 @@ var init_config = __esm({
16136
16172
  "manual",
16137
16173
  "schedule",
16138
16174
  "webhook_received",
16139
- "slack_message"
16175
+ "slack_message",
16176
+ "telegram_message",
16177
+ "email_message",
16178
+ "whatsapp_message",
16179
+ "teams_message"
16140
16180
  ];
16141
16181
  ConfigManager = class {
16142
16182
  validCheckTypes = [
@@ -18596,17 +18636,17 @@ var init_workflow_check_provider = __esm({
18596
18636
  * so it can be executed by the state machine as a nested workflow.
18597
18637
  */
18598
18638
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
18599
- const path29 = require("path");
18600
- const fs25 = require("fs");
18639
+ const path33 = require("path");
18640
+ const fs29 = require("fs");
18601
18641
  const yaml5 = require("js-yaml");
18602
- const resolved = path29.isAbsolute(sourcePath) ? sourcePath : path29.resolve(baseDir, sourcePath);
18603
- if (!fs25.existsSync(resolved)) {
18642
+ const resolved = path33.isAbsolute(sourcePath) ? sourcePath : path33.resolve(baseDir, sourcePath);
18643
+ if (!fs29.existsSync(resolved)) {
18604
18644
  throw new Error(`Workflow config not found at: ${resolved}`);
18605
18645
  }
18606
- const rawContent = fs25.readFileSync(resolved, "utf8");
18646
+ const rawContent = fs29.readFileSync(resolved, "utf8");
18607
18647
  const rawData = yaml5.load(rawContent);
18608
18648
  if (rawData.imports && Array.isArray(rawData.imports)) {
18609
- const configDir = path29.dirname(resolved);
18649
+ const configDir = path33.dirname(resolved);
18610
18650
  for (const source of rawData.imports) {
18611
18651
  const results = await this.registry.import(source, {
18612
18652
  basePath: configDir,
@@ -18636,8 +18676,8 @@ ${errors}`);
18636
18676
  if (!steps || Object.keys(steps).length === 0) {
18637
18677
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
18638
18678
  }
18639
- const id = path29.basename(resolved).replace(/\.(ya?ml)$/i, "");
18640
- const name = loaded.name || `Workflow from ${path29.basename(resolved)}`;
18679
+ const id = path33.basename(resolved).replace(/\.(ya?ml)$/i, "");
18680
+ const name = loaded.name || `Workflow from ${path33.basename(resolved)}`;
18641
18681
  const workflowDef = {
18642
18682
  id,
18643
18683
  name,
@@ -19446,8 +19486,8 @@ async function createStoreBackend(storageConfig, haConfig) {
19446
19486
  case "mssql": {
19447
19487
  try {
19448
19488
  const loaderPath = "../../enterprise/loader";
19449
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
19450
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
19489
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
19490
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
19451
19491
  } catch (err) {
19452
19492
  const msg = err instanceof Error ? err.message : String(err);
19453
19493
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -22141,7 +22181,7 @@ var init_mcp_custom_sse_server = __esm({
22141
22181
  * Returns the actual bound port number
22142
22182
  */
22143
22183
  async start() {
22144
- return new Promise((resolve15, reject) => {
22184
+ return new Promise((resolve19, reject) => {
22145
22185
  try {
22146
22186
  this.server = import_http.default.createServer((req, res) => {
22147
22187
  this.handleRequest(req, res).catch((error) => {
@@ -22175,7 +22215,7 @@ var init_mcp_custom_sse_server = __esm({
22175
22215
  );
22176
22216
  }
22177
22217
  this.startKeepalive();
22178
- resolve15(this.port);
22218
+ resolve19(this.port);
22179
22219
  });
22180
22220
  } catch (error) {
22181
22221
  reject(error);
@@ -22238,7 +22278,7 @@ var init_mcp_custom_sse_server = __esm({
22238
22278
  logger.debug(
22239
22279
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
22240
22280
  );
22241
- await new Promise((resolve15) => setTimeout(resolve15, waitMs));
22281
+ await new Promise((resolve19) => setTimeout(resolve19, waitMs));
22242
22282
  }
22243
22283
  }
22244
22284
  if (this.activeToolCalls > 0) {
@@ -22247,7 +22287,7 @@ var init_mcp_custom_sse_server = __esm({
22247
22287
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
22248
22288
  );
22249
22289
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
22250
- await new Promise((resolve15) => setTimeout(resolve15, 250));
22290
+ await new Promise((resolve19) => setTimeout(resolve19, 250));
22251
22291
  }
22252
22292
  if (this.activeToolCalls > 0) {
22253
22293
  logger.warn(
@@ -22272,21 +22312,21 @@ var init_mcp_custom_sse_server = __esm({
22272
22312
  }
22273
22313
  this.connections.clear();
22274
22314
  if (this.server) {
22275
- await new Promise((resolve15, reject) => {
22315
+ await new Promise((resolve19, reject) => {
22276
22316
  const timeout = setTimeout(() => {
22277
22317
  if (this.debug) {
22278
22318
  logger.debug(
22279
22319
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
22280
22320
  );
22281
22321
  }
22282
- this.server?.close(() => resolve15());
22322
+ this.server?.close(() => resolve19());
22283
22323
  }, 5e3);
22284
22324
  this.server.close((error) => {
22285
22325
  clearTimeout(timeout);
22286
22326
  if (error) {
22287
22327
  reject(error);
22288
22328
  } else {
22289
- resolve15();
22329
+ resolve19();
22290
22330
  }
22291
22331
  });
22292
22332
  });
@@ -22743,7 +22783,7 @@ var init_mcp_custom_sse_server = __esm({
22743
22783
  logger.warn(
22744
22784
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
22745
22785
  );
22746
- await new Promise((resolve15) => setTimeout(resolve15, delay));
22786
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
22747
22787
  attempt++;
22748
22788
  }
22749
22789
  }
@@ -23077,7 +23117,9 @@ var init_ai_check_provider = __esm({
23077
23117
  const first = Array.from(map.values())[0];
23078
23118
  if (!first || typeof first !== "object") return {};
23079
23119
  const ev = first.event;
23080
- const conv = first.slack_conversation;
23120
+ const slackConv = first.slack_conversation;
23121
+ const telegramConv = first.telegram_conversation;
23122
+ const conv = slackConv || telegramConv;
23081
23123
  if (!ev && !conv) return {};
23082
23124
  if (conv && prInfo) {
23083
23125
  try {
@@ -23085,7 +23127,8 @@ var init_ai_check_provider = __esm({
23085
23127
  } catch {
23086
23128
  }
23087
23129
  }
23088
- return { slack: { event: ev, conversation: conv } };
23130
+ const transportCtx = slackConv ? { slack: { event: ev, conversation: slackConv } } : { telegram: { event: ev, conversation: telegramConv } };
23131
+ return { ...transportCtx, conversation: conv };
23089
23132
  } catch {
23090
23133
  return {};
23091
23134
  }
@@ -23212,9 +23255,9 @@ var init_ai_check_provider = __esm({
23212
23255
  } else {
23213
23256
  resolvedPath = import_path8.default.resolve(process.cwd(), str);
23214
23257
  }
23215
- const fs25 = require("fs").promises;
23258
+ const fs29 = require("fs").promises;
23216
23259
  try {
23217
- const stat2 = await fs25.stat(resolvedPath);
23260
+ const stat2 = await fs29.stat(resolvedPath);
23218
23261
  return stat2.isFile();
23219
23262
  } catch {
23220
23263
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -29365,14 +29408,14 @@ var require_util = __commonJS({
29365
29408
  }
29366
29409
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
29367
29410
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
29368
- let path29 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
29411
+ let path33 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
29369
29412
  if (origin.endsWith("/")) {
29370
29413
  origin = origin.substring(0, origin.length - 1);
29371
29414
  }
29372
- if (path29 && !path29.startsWith("/")) {
29373
- path29 = `/${path29}`;
29415
+ if (path33 && !path33.startsWith("/")) {
29416
+ path33 = `/${path33}`;
29374
29417
  }
29375
- url = new URL(origin + path29);
29418
+ url = new URL(origin + path33);
29376
29419
  }
29377
29420
  return url;
29378
29421
  }
@@ -30986,20 +31029,20 @@ var require_parseParams = __commonJS({
30986
31029
  var require_basename = __commonJS({
30987
31030
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
30988
31031
  "use strict";
30989
- module2.exports = function basename4(path29) {
30990
- if (typeof path29 !== "string") {
31032
+ module2.exports = function basename4(path33) {
31033
+ if (typeof path33 !== "string") {
30991
31034
  return "";
30992
31035
  }
30993
- for (var i = path29.length - 1; i >= 0; --i) {
30994
- switch (path29.charCodeAt(i)) {
31036
+ for (var i = path33.length - 1; i >= 0; --i) {
31037
+ switch (path33.charCodeAt(i)) {
30995
31038
  case 47:
30996
31039
  // '/'
30997
31040
  case 92:
30998
- path29 = path29.slice(i + 1);
30999
- return path29 === ".." || path29 === "." ? "" : path29;
31041
+ path33 = path33.slice(i + 1);
31042
+ return path33 === ".." || path33 === "." ? "" : path33;
31000
31043
  }
31001
31044
  }
31002
- return path29 === ".." || path29 === "." ? "" : path29;
31045
+ return path33 === ".." || path33 === "." ? "" : path33;
31003
31046
  };
31004
31047
  }
31005
31048
  });
@@ -32003,11 +32046,11 @@ var require_util2 = __commonJS({
32003
32046
  var assert = require("assert");
32004
32047
  var { isUint8Array } = require("util/types");
32005
32048
  var supportedHashes = [];
32006
- var crypto7;
32049
+ var crypto9;
32007
32050
  try {
32008
- crypto7 = require("crypto");
32051
+ crypto9 = require("crypto");
32009
32052
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
32010
- supportedHashes = crypto7.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
32053
+ supportedHashes = crypto9.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
32011
32054
  } catch {
32012
32055
  }
32013
32056
  function responseURL(response) {
@@ -32284,7 +32327,7 @@ var require_util2 = __commonJS({
32284
32327
  }
32285
32328
  }
32286
32329
  function bytesMatch(bytes, metadataList) {
32287
- if (crypto7 === void 0) {
32330
+ if (crypto9 === void 0) {
32288
32331
  return true;
32289
32332
  }
32290
32333
  const parsedMetadata = parseMetadata(metadataList);
@@ -32299,7 +32342,7 @@ var require_util2 = __commonJS({
32299
32342
  for (const item of metadata) {
32300
32343
  const algorithm = item.algo;
32301
32344
  const expectedValue = item.hash;
32302
- let actualValue = crypto7.createHash(algorithm).update(bytes).digest("base64");
32345
+ let actualValue = crypto9.createHash(algorithm).update(bytes).digest("base64");
32303
32346
  if (actualValue[actualValue.length - 1] === "=") {
32304
32347
  if (actualValue[actualValue.length - 2] === "=") {
32305
32348
  actualValue = actualValue.slice(0, -2);
@@ -32392,8 +32435,8 @@ var require_util2 = __commonJS({
32392
32435
  function createDeferredPromise() {
32393
32436
  let res;
32394
32437
  let rej;
32395
- const promise = new Promise((resolve15, reject) => {
32396
- res = resolve15;
32438
+ const promise = new Promise((resolve19, reject) => {
32439
+ res = resolve19;
32397
32440
  rej = reject;
32398
32441
  });
32399
32442
  return { promise, resolve: res, reject: rej };
@@ -33646,8 +33689,8 @@ var require_body = __commonJS({
33646
33689
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
33647
33690
  var random;
33648
33691
  try {
33649
- const crypto7 = require("crypto");
33650
- random = (max) => crypto7.randomInt(0, max);
33692
+ const crypto9 = require("crypto");
33693
+ random = (max) => crypto9.randomInt(0, max);
33651
33694
  } catch {
33652
33695
  random = (max) => Math.floor(Math.random(max));
33653
33696
  }
@@ -33898,8 +33941,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
33898
33941
  });
33899
33942
  }
33900
33943
  });
33901
- const busboyResolve = new Promise((resolve15, reject) => {
33902
- busboy.on("finish", resolve15);
33944
+ const busboyResolve = new Promise((resolve19, reject) => {
33945
+ busboy.on("finish", resolve19);
33903
33946
  busboy.on("error", (err) => reject(new TypeError(err)));
33904
33947
  });
33905
33948
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -34030,7 +34073,7 @@ var require_request = __commonJS({
34030
34073
  }
34031
34074
  var Request2 = class _Request {
34032
34075
  constructor(origin, {
34033
- path: path29,
34076
+ path: path33,
34034
34077
  method,
34035
34078
  body,
34036
34079
  headers,
@@ -34044,11 +34087,11 @@ var require_request = __commonJS({
34044
34087
  throwOnError,
34045
34088
  expectContinue
34046
34089
  }, handler) {
34047
- if (typeof path29 !== "string") {
34090
+ if (typeof path33 !== "string") {
34048
34091
  throw new InvalidArgumentError("path must be a string");
34049
- } else if (path29[0] !== "/" && !(path29.startsWith("http://") || path29.startsWith("https://")) && method !== "CONNECT") {
34092
+ } else if (path33[0] !== "/" && !(path33.startsWith("http://") || path33.startsWith("https://")) && method !== "CONNECT") {
34050
34093
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
34051
- } else if (invalidPathRegex.exec(path29) !== null) {
34094
+ } else if (invalidPathRegex.exec(path33) !== null) {
34052
34095
  throw new InvalidArgumentError("invalid request path");
34053
34096
  }
34054
34097
  if (typeof method !== "string") {
@@ -34111,7 +34154,7 @@ var require_request = __commonJS({
34111
34154
  this.completed = false;
34112
34155
  this.aborted = false;
34113
34156
  this.upgrade = upgrade || null;
34114
- this.path = query ? util.buildURL(path29, query) : path29;
34157
+ this.path = query ? util.buildURL(path33, query) : path33;
34115
34158
  this.origin = origin;
34116
34159
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
34117
34160
  this.blocking = blocking == null ? false : blocking;
@@ -34433,9 +34476,9 @@ var require_dispatcher_base = __commonJS({
34433
34476
  }
34434
34477
  close(callback) {
34435
34478
  if (callback === void 0) {
34436
- return new Promise((resolve15, reject) => {
34479
+ return new Promise((resolve19, reject) => {
34437
34480
  this.close((err, data) => {
34438
- return err ? reject(err) : resolve15(data);
34481
+ return err ? reject(err) : resolve19(data);
34439
34482
  });
34440
34483
  });
34441
34484
  }
@@ -34473,12 +34516,12 @@ var require_dispatcher_base = __commonJS({
34473
34516
  err = null;
34474
34517
  }
34475
34518
  if (callback === void 0) {
34476
- return new Promise((resolve15, reject) => {
34519
+ return new Promise((resolve19, reject) => {
34477
34520
  this.destroy(err, (err2, data) => {
34478
34521
  return err2 ? (
34479
34522
  /* istanbul ignore next: should never error */
34480
34523
  reject(err2)
34481
- ) : resolve15(data);
34524
+ ) : resolve19(data);
34482
34525
  });
34483
34526
  });
34484
34527
  }
@@ -35119,9 +35162,9 @@ var require_RedirectHandler = __commonJS({
35119
35162
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
35120
35163
  }
35121
35164
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
35122
- const path29 = search ? `${pathname}${search}` : pathname;
35165
+ const path33 = search ? `${pathname}${search}` : pathname;
35123
35166
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
35124
- this.opts.path = path29;
35167
+ this.opts.path = path33;
35125
35168
  this.opts.origin = origin;
35126
35169
  this.opts.maxRedirections = 0;
35127
35170
  this.opts.query = null;
@@ -35540,16 +35583,16 @@ var require_client = __commonJS({
35540
35583
  return this[kNeedDrain] < 2;
35541
35584
  }
35542
35585
  async [kClose]() {
35543
- return new Promise((resolve15) => {
35586
+ return new Promise((resolve19) => {
35544
35587
  if (!this[kSize]) {
35545
- resolve15(null);
35588
+ resolve19(null);
35546
35589
  } else {
35547
- this[kClosedResolve] = resolve15;
35590
+ this[kClosedResolve] = resolve19;
35548
35591
  }
35549
35592
  });
35550
35593
  }
35551
35594
  async [kDestroy](err) {
35552
- return new Promise((resolve15) => {
35595
+ return new Promise((resolve19) => {
35553
35596
  const requests = this[kQueue].splice(this[kPendingIdx]);
35554
35597
  for (let i = 0; i < requests.length; i++) {
35555
35598
  const request = requests[i];
@@ -35560,7 +35603,7 @@ var require_client = __commonJS({
35560
35603
  this[kClosedResolve]();
35561
35604
  this[kClosedResolve] = null;
35562
35605
  }
35563
- resolve15();
35606
+ resolve19();
35564
35607
  };
35565
35608
  if (this[kHTTP2Session] != null) {
35566
35609
  util.destroy(this[kHTTP2Session], err);
@@ -36140,7 +36183,7 @@ var require_client = __commonJS({
36140
36183
  });
36141
36184
  }
36142
36185
  try {
36143
- const socket = await new Promise((resolve15, reject) => {
36186
+ const socket = await new Promise((resolve19, reject) => {
36144
36187
  client[kConnector]({
36145
36188
  host,
36146
36189
  hostname,
@@ -36152,7 +36195,7 @@ var require_client = __commonJS({
36152
36195
  if (err) {
36153
36196
  reject(err);
36154
36197
  } else {
36155
- resolve15(socket2);
36198
+ resolve19(socket2);
36156
36199
  }
36157
36200
  });
36158
36201
  });
@@ -36363,7 +36406,7 @@ var require_client = __commonJS({
36363
36406
  writeH2(client, client[kHTTP2Session], request);
36364
36407
  return;
36365
36408
  }
36366
- const { body, method, path: path29, host, upgrade, headers, blocking, reset } = request;
36409
+ const { body, method, path: path33, host, upgrade, headers, blocking, reset } = request;
36367
36410
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
36368
36411
  if (body && typeof body.read === "function") {
36369
36412
  body.read(0);
@@ -36413,7 +36456,7 @@ var require_client = __commonJS({
36413
36456
  if (blocking) {
36414
36457
  socket[kBlocking] = true;
36415
36458
  }
36416
- let header = `${method} ${path29} HTTP/1.1\r
36459
+ let header = `${method} ${path33} HTTP/1.1\r
36417
36460
  `;
36418
36461
  if (typeof host === "string") {
36419
36462
  header += `host: ${host}\r
@@ -36476,7 +36519,7 @@ upgrade: ${upgrade}\r
36476
36519
  return true;
36477
36520
  }
36478
36521
  function writeH2(client, session, request) {
36479
- const { body, method, path: path29, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
36522
+ const { body, method, path: path33, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
36480
36523
  let headers;
36481
36524
  if (typeof reqHeaders === "string") headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim());
36482
36525
  else headers = reqHeaders;
@@ -36519,7 +36562,7 @@ upgrade: ${upgrade}\r
36519
36562
  });
36520
36563
  return true;
36521
36564
  }
36522
- headers[HTTP2_HEADER_PATH] = path29;
36565
+ headers[HTTP2_HEADER_PATH] = path33;
36523
36566
  headers[HTTP2_HEADER_SCHEME] = "https";
36524
36567
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
36525
36568
  if (body && typeof body.read === "function") {
@@ -36776,12 +36819,12 @@ upgrade: ${upgrade}\r
36776
36819
  cb();
36777
36820
  }
36778
36821
  }
36779
- const waitForDrain = () => new Promise((resolve15, reject) => {
36822
+ const waitForDrain = () => new Promise((resolve19, reject) => {
36780
36823
  assert(callback === null);
36781
36824
  if (socket[kError]) {
36782
36825
  reject(socket[kError]);
36783
36826
  } else {
36784
- callback = resolve15;
36827
+ callback = resolve19;
36785
36828
  }
36786
36829
  });
36787
36830
  if (client[kHTTPConnVersion] === "h2") {
@@ -37127,8 +37170,8 @@ var require_pool_base = __commonJS({
37127
37170
  if (this[kQueue].isEmpty()) {
37128
37171
  return Promise.all(this[kClients].map((c) => c.close()));
37129
37172
  } else {
37130
- return new Promise((resolve15) => {
37131
- this[kClosedResolve] = resolve15;
37173
+ return new Promise((resolve19) => {
37174
+ this[kClosedResolve] = resolve19;
37132
37175
  });
37133
37176
  }
37134
37177
  }
@@ -37706,7 +37749,7 @@ var require_readable = __commonJS({
37706
37749
  if (this.closed) {
37707
37750
  return Promise.resolve(null);
37708
37751
  }
37709
- return new Promise((resolve15, reject) => {
37752
+ return new Promise((resolve19, reject) => {
37710
37753
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
37711
37754
  this.destroy();
37712
37755
  }) : noop;
@@ -37715,7 +37758,7 @@ var require_readable = __commonJS({
37715
37758
  if (signal && signal.aborted) {
37716
37759
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
37717
37760
  } else {
37718
- resolve15(null);
37761
+ resolve19(null);
37719
37762
  }
37720
37763
  }).on("error", noop).on("data", function(chunk) {
37721
37764
  limit -= chunk.length;
@@ -37737,11 +37780,11 @@ var require_readable = __commonJS({
37737
37780
  throw new TypeError("unusable");
37738
37781
  }
37739
37782
  assert(!stream[kConsume]);
37740
- return new Promise((resolve15, reject) => {
37783
+ return new Promise((resolve19, reject) => {
37741
37784
  stream[kConsume] = {
37742
37785
  type,
37743
37786
  stream,
37744
- resolve: resolve15,
37787
+ resolve: resolve19,
37745
37788
  reject,
37746
37789
  length: 0,
37747
37790
  body: []
@@ -37776,12 +37819,12 @@ var require_readable = __commonJS({
37776
37819
  }
37777
37820
  }
37778
37821
  function consumeEnd(consume2) {
37779
- const { type, body, resolve: resolve15, stream, length } = consume2;
37822
+ const { type, body, resolve: resolve19, stream, length } = consume2;
37780
37823
  try {
37781
37824
  if (type === "text") {
37782
- resolve15(toUSVString(Buffer.concat(body)));
37825
+ resolve19(toUSVString(Buffer.concat(body)));
37783
37826
  } else if (type === "json") {
37784
- resolve15(JSON.parse(Buffer.concat(body)));
37827
+ resolve19(JSON.parse(Buffer.concat(body)));
37785
37828
  } else if (type === "arrayBuffer") {
37786
37829
  const dst = new Uint8Array(length);
37787
37830
  let pos = 0;
@@ -37789,12 +37832,12 @@ var require_readable = __commonJS({
37789
37832
  dst.set(buf, pos);
37790
37833
  pos += buf.byteLength;
37791
37834
  }
37792
- resolve15(dst.buffer);
37835
+ resolve19(dst.buffer);
37793
37836
  } else if (type === "blob") {
37794
37837
  if (!Blob2) {
37795
37838
  Blob2 = require("buffer").Blob;
37796
37839
  }
37797
- resolve15(new Blob2(body, { type: stream[kContentType] }));
37840
+ resolve19(new Blob2(body, { type: stream[kContentType] }));
37798
37841
  }
37799
37842
  consumeFinish(consume2);
37800
37843
  } catch (err) {
@@ -38051,9 +38094,9 @@ var require_api_request = __commonJS({
38051
38094
  };
38052
38095
  function request(opts, callback) {
38053
38096
  if (callback === void 0) {
38054
- return new Promise((resolve15, reject) => {
38097
+ return new Promise((resolve19, reject) => {
38055
38098
  request.call(this, opts, (err, data) => {
38056
- return err ? reject(err) : resolve15(data);
38099
+ return err ? reject(err) : resolve19(data);
38057
38100
  });
38058
38101
  });
38059
38102
  }
@@ -38226,9 +38269,9 @@ var require_api_stream = __commonJS({
38226
38269
  };
38227
38270
  function stream(opts, factory, callback) {
38228
38271
  if (callback === void 0) {
38229
- return new Promise((resolve15, reject) => {
38272
+ return new Promise((resolve19, reject) => {
38230
38273
  stream.call(this, opts, factory, (err, data) => {
38231
- return err ? reject(err) : resolve15(data);
38274
+ return err ? reject(err) : resolve19(data);
38232
38275
  });
38233
38276
  });
38234
38277
  }
@@ -38509,9 +38552,9 @@ var require_api_upgrade = __commonJS({
38509
38552
  };
38510
38553
  function upgrade(opts, callback) {
38511
38554
  if (callback === void 0) {
38512
- return new Promise((resolve15, reject) => {
38555
+ return new Promise((resolve19, reject) => {
38513
38556
  upgrade.call(this, opts, (err, data) => {
38514
- return err ? reject(err) : resolve15(data);
38557
+ return err ? reject(err) : resolve19(data);
38515
38558
  });
38516
38559
  });
38517
38560
  }
@@ -38600,9 +38643,9 @@ var require_api_connect = __commonJS({
38600
38643
  };
38601
38644
  function connect(opts, callback) {
38602
38645
  if (callback === void 0) {
38603
- return new Promise((resolve15, reject) => {
38646
+ return new Promise((resolve19, reject) => {
38604
38647
  connect.call(this, opts, (err, data) => {
38605
- return err ? reject(err) : resolve15(data);
38648
+ return err ? reject(err) : resolve19(data);
38606
38649
  });
38607
38650
  });
38608
38651
  }
@@ -38762,20 +38805,20 @@ var require_mock_utils = __commonJS({
38762
38805
  }
38763
38806
  return true;
38764
38807
  }
38765
- function safeUrl(path29) {
38766
- if (typeof path29 !== "string") {
38767
- return path29;
38808
+ function safeUrl(path33) {
38809
+ if (typeof path33 !== "string") {
38810
+ return path33;
38768
38811
  }
38769
- const pathSegments = path29.split("?");
38812
+ const pathSegments = path33.split("?");
38770
38813
  if (pathSegments.length !== 2) {
38771
- return path29;
38814
+ return path33;
38772
38815
  }
38773
38816
  const qp = new URLSearchParams(pathSegments.pop());
38774
38817
  qp.sort();
38775
38818
  return [...pathSegments, qp.toString()].join("?");
38776
38819
  }
38777
- function matchKey(mockDispatch2, { path: path29, method, body, headers }) {
38778
- const pathMatch = matchValue(mockDispatch2.path, path29);
38820
+ function matchKey(mockDispatch2, { path: path33, method, body, headers }) {
38821
+ const pathMatch = matchValue(mockDispatch2.path, path33);
38779
38822
  const methodMatch = matchValue(mockDispatch2.method, method);
38780
38823
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
38781
38824
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -38793,7 +38836,7 @@ var require_mock_utils = __commonJS({
38793
38836
  function getMockDispatch(mockDispatches, key) {
38794
38837
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
38795
38838
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
38796
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path29 }) => matchValue(safeUrl(path29), resolvedPath));
38839
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path33 }) => matchValue(safeUrl(path33), resolvedPath));
38797
38840
  if (matchedMockDispatches.length === 0) {
38798
38841
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
38799
38842
  }
@@ -38830,9 +38873,9 @@ var require_mock_utils = __commonJS({
38830
38873
  }
38831
38874
  }
38832
38875
  function buildKey(opts) {
38833
- const { path: path29, method, body, headers, query } = opts;
38876
+ const { path: path33, method, body, headers, query } = opts;
38834
38877
  return {
38835
- path: path29,
38878
+ path: path33,
38836
38879
  method,
38837
38880
  body,
38838
38881
  headers,
@@ -39281,10 +39324,10 @@ var require_pending_interceptors_formatter = __commonJS({
39281
39324
  }
39282
39325
  format(pendingInterceptors) {
39283
39326
  const withPrettyHeaders = pendingInterceptors.map(
39284
- ({ method, path: path29, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
39327
+ ({ method, path: path33, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
39285
39328
  Method: method,
39286
39329
  Origin: origin,
39287
- Path: path29,
39330
+ Path: path33,
39288
39331
  "Status code": statusCode,
39289
39332
  Persistent: persist ? "\u2705" : "\u274C",
39290
39333
  Invocations: timesInvoked,
@@ -42225,7 +42268,7 @@ var require_fetch = __commonJS({
42225
42268
  async function dispatch({ body }) {
42226
42269
  const url = requestCurrentURL(request);
42227
42270
  const agent = fetchParams.controller.dispatcher;
42228
- return new Promise((resolve15, reject) => agent.dispatch(
42271
+ return new Promise((resolve19, reject) => agent.dispatch(
42229
42272
  {
42230
42273
  path: url.pathname + url.search,
42231
42274
  origin: url.origin,
@@ -42301,7 +42344,7 @@ var require_fetch = __commonJS({
42301
42344
  }
42302
42345
  }
42303
42346
  }
42304
- resolve15({
42347
+ resolve19({
42305
42348
  status,
42306
42349
  statusText,
42307
42350
  headersList: headers[kHeadersList],
@@ -42344,7 +42387,7 @@ var require_fetch = __commonJS({
42344
42387
  const val = headersList[n + 1].toString("latin1");
42345
42388
  headers[kHeadersList].append(key, val);
42346
42389
  }
42347
- resolve15({
42390
+ resolve19({
42348
42391
  status,
42349
42392
  statusText: STATUS_CODES[status],
42350
42393
  headersList: headers[kHeadersList],
@@ -43905,8 +43948,8 @@ var require_util6 = __commonJS({
43905
43948
  }
43906
43949
  }
43907
43950
  }
43908
- function validateCookiePath(path29) {
43909
- for (const char of path29) {
43951
+ function validateCookiePath(path33) {
43952
+ for (const char of path33) {
43910
43953
  const code = char.charCodeAt(0);
43911
43954
  if (code < 33 || char === ";") {
43912
43955
  throw new Error("Invalid cookie path");
@@ -44703,9 +44746,9 @@ var require_connection = __commonJS({
44703
44746
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
44704
44747
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
44705
44748
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
44706
- var crypto7;
44749
+ var crypto9;
44707
44750
  try {
44708
- crypto7 = require("crypto");
44751
+ crypto9 = require("crypto");
44709
44752
  } catch {
44710
44753
  }
44711
44754
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -44724,7 +44767,7 @@ var require_connection = __commonJS({
44724
44767
  const headersList = new Headers(options.headers)[kHeadersList];
44725
44768
  request.headersList = headersList;
44726
44769
  }
44727
- const keyValue = crypto7.randomBytes(16).toString("base64");
44770
+ const keyValue = crypto9.randomBytes(16).toString("base64");
44728
44771
  request.headersList.append("sec-websocket-key", keyValue);
44729
44772
  request.headersList.append("sec-websocket-version", "13");
44730
44773
  for (const protocol of protocols) {
@@ -44753,7 +44796,7 @@ var require_connection = __commonJS({
44753
44796
  return;
44754
44797
  }
44755
44798
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
44756
- const digest = crypto7.createHash("sha1").update(keyValue + uid).digest("base64");
44799
+ const digest = crypto9.createHash("sha1").update(keyValue + uid).digest("base64");
44757
44800
  if (secWSAccept !== digest) {
44758
44801
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
44759
44802
  return;
@@ -44833,9 +44876,9 @@ var require_frame = __commonJS({
44833
44876
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
44834
44877
  "use strict";
44835
44878
  var { maxUnsigned16Bit } = require_constants5();
44836
- var crypto7;
44879
+ var crypto9;
44837
44880
  try {
44838
- crypto7 = require("crypto");
44881
+ crypto9 = require("crypto");
44839
44882
  } catch {
44840
44883
  }
44841
44884
  var WebsocketFrameSend = class {
@@ -44844,7 +44887,7 @@ var require_frame = __commonJS({
44844
44887
  */
44845
44888
  constructor(data) {
44846
44889
  this.frameData = data;
44847
- this.maskKey = crypto7.randomBytes(4);
44890
+ this.maskKey = crypto9.randomBytes(4);
44848
44891
  }
44849
44892
  createFrame(opcode) {
44850
44893
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -45586,11 +45629,11 @@ var require_undici = __commonJS({
45586
45629
  if (typeof opts.path !== "string") {
45587
45630
  throw new InvalidArgumentError("invalid opts.path");
45588
45631
  }
45589
- let path29 = opts.path;
45632
+ let path33 = opts.path;
45590
45633
  if (!opts.path.startsWith("/")) {
45591
- path29 = `/${path29}`;
45634
+ path33 = `/${path33}`;
45592
45635
  }
45593
- url = new URL(util.parseOrigin(url).origin + path29);
45636
+ url = new URL(util.parseOrigin(url).origin + path33);
45594
45637
  } else {
45595
45638
  if (!opts) {
45596
45639
  opts = typeof url === "object" ? url : {};
@@ -46159,7 +46202,7 @@ var init_mcp_check_provider = __esm({
46159
46202
  logger.warn(
46160
46203
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
46161
46204
  );
46162
- await new Promise((resolve15) => setTimeout(resolve15, delay));
46205
+ await new Promise((resolve19) => setTimeout(resolve19, delay));
46163
46206
  attempt += 1;
46164
46207
  } finally {
46165
46208
  try {
@@ -46452,7 +46495,7 @@ async function acquirePromptLock() {
46452
46495
  );
46453
46496
  }, 1e4);
46454
46497
  try {
46455
- await new Promise((resolve15) => waiters.push(resolve15));
46498
+ await new Promise((resolve19) => waiters.push(resolve19));
46456
46499
  } finally {
46457
46500
  clearInterval(reminder);
46458
46501
  const waitedMs = Date.now() - queuedAt;
@@ -46471,7 +46514,7 @@ function releasePromptLock() {
46471
46514
  }
46472
46515
  async function interactivePrompt(options) {
46473
46516
  await acquirePromptLock();
46474
- return new Promise((resolve15, reject) => {
46517
+ return new Promise((resolve19, reject) => {
46475
46518
  const dbg = process.env.VISOR_DEBUG === "true";
46476
46519
  try {
46477
46520
  if (dbg) {
@@ -46558,12 +46601,12 @@ async function interactivePrompt(options) {
46558
46601
  };
46559
46602
  const finish = (value) => {
46560
46603
  cleanup();
46561
- resolve15(value);
46604
+ resolve19(value);
46562
46605
  };
46563
46606
  if (options.timeout && options.timeout > 0) {
46564
46607
  timeoutId = setTimeout(() => {
46565
46608
  cleanup();
46566
- if (defaultValue !== void 0) return resolve15(defaultValue);
46609
+ if (defaultValue !== void 0) return resolve19(defaultValue);
46567
46610
  return reject(new Error("Input timeout"));
46568
46611
  }, options.timeout);
46569
46612
  }
@@ -46695,7 +46738,7 @@ async function interactivePrompt(options) {
46695
46738
  });
46696
46739
  }
46697
46740
  async function simplePrompt(prompt) {
46698
- return new Promise((resolve15) => {
46741
+ return new Promise((resolve19) => {
46699
46742
  const rl = readline.createInterface({
46700
46743
  input: process.stdin,
46701
46744
  output: process.stdout
@@ -46711,7 +46754,7 @@ async function simplePrompt(prompt) {
46711
46754
  rl.question(`${prompt}
46712
46755
  > `, (answer) => {
46713
46756
  rl.close();
46714
- resolve15(answer.trim());
46757
+ resolve19(answer.trim());
46715
46758
  });
46716
46759
  });
46717
46760
  }
@@ -46879,7 +46922,7 @@ function isStdinAvailable() {
46879
46922
  return !process.stdin.isTTY;
46880
46923
  }
46881
46924
  async function readStdin(timeout, maxSize = 1024 * 1024) {
46882
- return new Promise((resolve15, reject) => {
46925
+ return new Promise((resolve19, reject) => {
46883
46926
  let data = "";
46884
46927
  let timeoutId;
46885
46928
  if (timeout) {
@@ -46906,7 +46949,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
46906
46949
  };
46907
46950
  const onEnd = () => {
46908
46951
  cleanup();
46909
- resolve15(data.trim());
46952
+ resolve19(data.trim());
46910
46953
  };
46911
46954
  const onError = (err) => {
46912
46955
  cleanup();
@@ -51606,23 +51649,23 @@ __export(renderer_schema_exports, {
51606
51649
  });
51607
51650
  async function loadRendererSchema(name) {
51608
51651
  try {
51609
- const fs25 = await import("fs/promises");
51610
- const path29 = await import("path");
51652
+ const fs29 = await import("fs/promises");
51653
+ const path33 = await import("path");
51611
51654
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
51612
51655
  if (!sanitized) return void 0;
51613
51656
  const candidates = [
51614
51657
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
51615
- path29.join(__dirname, "output", sanitized, "schema.json"),
51658
+ path33.join(__dirname, "output", sanitized, "schema.json"),
51616
51659
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
51617
- path29.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
51660
+ path33.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
51618
51661
  // When running from a checkout with output/ folder copied to CWD
51619
- path29.join(process.cwd(), "output", sanitized, "schema.json"),
51662
+ path33.join(process.cwd(), "output", sanitized, "schema.json"),
51620
51663
  // Fallback: cwd/dist/output/
51621
- path29.join(process.cwd(), "dist", "output", sanitized, "schema.json")
51664
+ path33.join(process.cwd(), "dist", "output", sanitized, "schema.json")
51622
51665
  ];
51623
51666
  for (const p of candidates) {
51624
51667
  try {
51625
- const raw = await fs25.readFile(p, "utf-8");
51668
+ const raw = await fs29.readFile(p, "utf-8");
51626
51669
  return JSON.parse(raw);
51627
51670
  } catch {
51628
51671
  }
@@ -52189,22 +52232,22 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
52189
52232
  if (webhookData && webhookData.size > 0) {
52190
52233
  for (const payload of webhookData.values()) {
52191
52234
  const slackConv = payload?.slack_conversation;
52192
- if (slackConv) {
52235
+ const telegramConv = payload?.telegram_conversation;
52236
+ const conv = slackConv || telegramConv;
52237
+ if (conv) {
52193
52238
  const event = payload?.event;
52194
- const messageCount = Array.isArray(slackConv?.messages) ? slackConv.messages.length : 0;
52239
+ const messageCount = Array.isArray(conv?.messages) ? conv.messages.length : 0;
52195
52240
  if (context2.debug) {
52196
52241
  logger.info(
52197
- `[LevelDispatch] Slack conversation extracted: ${messageCount} messages`
52242
+ `[LevelDispatch] Conversation extracted (${conv?.transport || "unknown"}): ${messageCount} messages`
52198
52243
  );
52199
52244
  }
52245
+ const transportCtx = slackConv ? { slack: { event: event || {}, conversation: slackConv } } : { telegram: { event: event || {}, conversation: telegramConv }, webhook: payload };
52200
52246
  providerConfig.eventContext = {
52201
52247
  ...providerConfig.eventContext,
52202
- slack: {
52203
- event: event || {},
52204
- conversation: slackConv
52205
- },
52206
- conversation: slackConv
52207
- // Also expose at top level for convenience
52248
+ ...transportCtx,
52249
+ conversation: conv
52250
+ // Expose at top level for all transports
52208
52251
  };
52209
52252
  break;
52210
52253
  }
@@ -53317,20 +53360,20 @@ async function executeSingleCheck2(checkId, context2, state, emitEvent, transiti
53317
53360
  if (webhookData && webhookData.size > 0) {
53318
53361
  for (const payload of webhookData.values()) {
53319
53362
  const slackConv = payload?.slack_conversation;
53320
- if (slackConv) {
53363
+ const telegramConv = payload?.telegram_conversation;
53364
+ const conv = slackConv || telegramConv;
53365
+ if (conv) {
53321
53366
  const event = payload?.event;
53322
- const messageCount = Array.isArray(slackConv?.messages) ? slackConv.messages.length : 0;
53367
+ const messageCount = Array.isArray(conv?.messages) ? conv.messages.length : 0;
53323
53368
  if (context2.debug) {
53324
- logger.info(`[LevelDispatch] Slack conversation extracted: ${messageCount} messages`);
53369
+ logger.info(`[LevelDispatch] Conversation extracted (${conv?.transport || "unknown"}): ${messageCount} messages`);
53325
53370
  }
53371
+ const transportCtx = slackConv ? { slack: { event: event || {}, conversation: slackConv } } : { telegram: { event: event || {}, conversation: telegramConv }, webhook: payload };
53326
53372
  providerConfig.eventContext = {
53327
53373
  ...providerConfig.eventContext,
53328
- slack: {
53329
- event: event || {},
53330
- conversation: slackConv
53331
- },
53332
- conversation: slackConv
53333
- // Also expose at top level for convenience
53374
+ ...transportCtx,
53375
+ conversation: conv
53376
+ // Expose at top level for all transports
53334
53377
  };
53335
53378
  break;
53336
53379
  }
@@ -54067,8 +54110,8 @@ function updateStats2(results, state, isForEachIteration = false) {
54067
54110
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
54068
54111
  try {
54069
54112
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
54070
- const fs25 = await import("fs/promises");
54071
- const path29 = await import("path");
54113
+ const fs29 = await import("fs/promises");
54114
+ const path33 = await import("path");
54072
54115
  const schemaRaw = checkConfig.schema || "plain";
54073
54116
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
54074
54117
  let templateContent;
@@ -54077,27 +54120,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
54077
54120
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
54078
54121
  } else if (checkConfig.template && checkConfig.template.file) {
54079
54122
  const file = String(checkConfig.template.file);
54080
- const resolved = path29.resolve(process.cwd(), file);
54081
- templateContent = await fs25.readFile(resolved, "utf-8");
54123
+ const resolved = path33.resolve(process.cwd(), file);
54124
+ templateContent = await fs29.readFile(resolved, "utf-8");
54082
54125
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
54083
54126
  } else if (schema && schema !== "plain") {
54084
54127
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
54085
54128
  if (sanitized) {
54086
54129
  const candidatePaths = [
54087
- path29.join(__dirname, "output", sanitized, "template.liquid"),
54130
+ path33.join(__dirname, "output", sanitized, "template.liquid"),
54088
54131
  // bundled: dist/output/
54089
- path29.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
54132
+ path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
54090
54133
  // source (from state-machine/states)
54091
- path29.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
54134
+ path33.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
54092
54135
  // source (alternate)
54093
- path29.join(process.cwd(), "output", sanitized, "template.liquid"),
54136
+ path33.join(process.cwd(), "output", sanitized, "template.liquid"),
54094
54137
  // fallback: cwd/output/
54095
- path29.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
54138
+ path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
54096
54139
  // fallback: cwd/dist/output/
54097
54140
  ];
54098
54141
  for (const p of candidatePaths) {
54099
54142
  try {
54100
- templateContent = await fs25.readFile(p, "utf-8");
54143
+ templateContent = await fs29.readFile(p, "utf-8");
54101
54144
  if (templateContent) {
54102
54145
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
54103
54146
  break;
@@ -56237,8 +56280,8 @@ var init_workspace_manager = __esm({
56237
56280
  );
56238
56281
  if (this.cleanupRequested && this.activeOperations === 0) {
56239
56282
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
56240
- for (const resolve15 of this.cleanupResolvers) {
56241
- resolve15();
56283
+ for (const resolve19 of this.cleanupResolvers) {
56284
+ resolve19();
56242
56285
  }
56243
56286
  this.cleanupResolvers = [];
56244
56287
  }
@@ -56417,19 +56460,19 @@ var init_workspace_manager = __esm({
56417
56460
  );
56418
56461
  this.cleanupRequested = true;
56419
56462
  await Promise.race([
56420
- new Promise((resolve15) => {
56463
+ new Promise((resolve19) => {
56421
56464
  if (this.activeOperations === 0) {
56422
- resolve15();
56465
+ resolve19();
56423
56466
  } else {
56424
- this.cleanupResolvers.push(resolve15);
56467
+ this.cleanupResolvers.push(resolve19);
56425
56468
  }
56426
56469
  }),
56427
- new Promise((resolve15) => {
56470
+ new Promise((resolve19) => {
56428
56471
  setTimeout(() => {
56429
56472
  logger.warn(
56430
56473
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
56431
56474
  );
56432
- resolve15();
56475
+ resolve19();
56433
56476
  }, timeout);
56434
56477
  })
56435
56478
  ]);
@@ -56823,8 +56866,8 @@ var init_fair_concurrency_limiter = __esm({
56823
56866
  );
56824
56867
  const queuedAt = Date.now();
56825
56868
  const effectiveTimeout = queueTimeout ?? 12e4;
56826
- return new Promise((resolve15, reject) => {
56827
- const entry = { resolve: resolve15, reject, queuedAt };
56869
+ return new Promise((resolve19, reject) => {
56870
+ const entry = { resolve: resolve19, reject, queuedAt };
56828
56871
  entry.reminder = setInterval(() => {
56829
56872
  const waited = Math.round((Date.now() - queuedAt) / 1e3);
56830
56873
  const curQueued = this._totalQueued();
@@ -57131,6 +57174,1380 @@ var init_build_engine_context = __esm({
57131
57174
  }
57132
57175
  });
57133
57176
 
57177
+ // src/policy/default-engine.ts
57178
+ var DefaultPolicyEngine;
57179
+ var init_default_engine = __esm({
57180
+ "src/policy/default-engine.ts"() {
57181
+ "use strict";
57182
+ DefaultPolicyEngine = class {
57183
+ async initialize(_config) {
57184
+ }
57185
+ async evaluateCheckExecution(_checkId, _checkConfig) {
57186
+ return { allowed: true };
57187
+ }
57188
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
57189
+ return { allowed: true };
57190
+ }
57191
+ async evaluateCapabilities(_checkId, _capabilities) {
57192
+ return { allowed: true };
57193
+ }
57194
+ async shutdown() {
57195
+ }
57196
+ };
57197
+ }
57198
+ });
57199
+
57200
+ // src/enterprise/license/validator.ts
57201
+ var validator_exports = {};
57202
+ __export(validator_exports, {
57203
+ LicenseValidator: () => LicenseValidator
57204
+ });
57205
+ var crypto3, fs21, path26, LicenseValidator;
57206
+ var init_validator = __esm({
57207
+ "src/enterprise/license/validator.ts"() {
57208
+ "use strict";
57209
+ crypto3 = __toESM(require("crypto"));
57210
+ fs21 = __toESM(require("fs"));
57211
+ path26 = __toESM(require("path"));
57212
+ LicenseValidator = class _LicenseValidator {
57213
+ /** Ed25519 public key for license verification (PEM format). */
57214
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
57215
+ cache = null;
57216
+ static CACHE_TTL = 5 * 60 * 1e3;
57217
+ // 5 minutes
57218
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
57219
+ // 72 hours after expiry
57220
+ /**
57221
+ * Load and validate license from environment or file.
57222
+ *
57223
+ * Resolution order:
57224
+ * 1. VISOR_LICENSE env var (JWT string)
57225
+ * 2. VISOR_LICENSE_FILE env var (path to file)
57226
+ * 3. .visor-license in project root (cwd)
57227
+ * 4. .visor-license in ~/.config/visor/
57228
+ */
57229
+ async loadAndValidate() {
57230
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
57231
+ return this.cache.payload;
57232
+ }
57233
+ const token = this.resolveToken();
57234
+ if (!token) return null;
57235
+ const payload = this.verifyAndDecode(token);
57236
+ if (!payload) return null;
57237
+ this.cache = { payload, validatedAt: Date.now() };
57238
+ return payload;
57239
+ }
57240
+ /** Check if a specific feature is licensed */
57241
+ hasFeature(feature) {
57242
+ if (!this.cache) return false;
57243
+ return this.cache.payload.features.includes(feature);
57244
+ }
57245
+ /** Check if license is valid (with grace period) */
57246
+ isValid() {
57247
+ if (!this.cache) return false;
57248
+ const now = Date.now();
57249
+ const expiryMs = this.cache.payload.exp * 1e3;
57250
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
57251
+ }
57252
+ /** Check if the license is within its grace period (expired but still valid) */
57253
+ isInGracePeriod() {
57254
+ if (!this.cache) return false;
57255
+ const now = Date.now();
57256
+ const expiryMs = this.cache.payload.exp * 1e3;
57257
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
57258
+ }
57259
+ resolveToken() {
57260
+ if (process.env.VISOR_LICENSE) {
57261
+ return process.env.VISOR_LICENSE.trim();
57262
+ }
57263
+ if (process.env.VISOR_LICENSE_FILE) {
57264
+ const resolved = path26.resolve(process.env.VISOR_LICENSE_FILE);
57265
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
57266
+ const allowedPrefixes = [path26.normalize(process.cwd())];
57267
+ if (home2) allowedPrefixes.push(path26.normalize(path26.join(home2, ".config", "visor")));
57268
+ let realPath;
57269
+ try {
57270
+ realPath = fs21.realpathSync(resolved);
57271
+ } catch {
57272
+ return null;
57273
+ }
57274
+ const isSafe = allowedPrefixes.some(
57275
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path26.sep)
57276
+ );
57277
+ if (!isSafe) return null;
57278
+ return this.readFile(realPath);
57279
+ }
57280
+ const cwdPath = path26.join(process.cwd(), ".visor-license");
57281
+ const cwdToken = this.readFile(cwdPath);
57282
+ if (cwdToken) return cwdToken;
57283
+ const home = process.env.HOME || process.env.USERPROFILE || "";
57284
+ if (home) {
57285
+ const configPath = path26.join(home, ".config", "visor", ".visor-license");
57286
+ const configToken = this.readFile(configPath);
57287
+ if (configToken) return configToken;
57288
+ }
57289
+ return null;
57290
+ }
57291
+ readFile(filePath) {
57292
+ try {
57293
+ return fs21.readFileSync(filePath, "utf-8").trim();
57294
+ } catch {
57295
+ return null;
57296
+ }
57297
+ }
57298
+ verifyAndDecode(token) {
57299
+ try {
57300
+ const parts = token.split(".");
57301
+ if (parts.length !== 3) return null;
57302
+ const [headerB64, payloadB64, signatureB64] = parts;
57303
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
57304
+ if (header.alg !== "EdDSA") return null;
57305
+ const data = `${headerB64}.${payloadB64}`;
57306
+ const signature = Buffer.from(signatureB64, "base64url");
57307
+ const publicKey = crypto3.createPublicKey(_LicenseValidator.PUBLIC_KEY);
57308
+ if (publicKey.asymmetricKeyType !== "ed25519") {
57309
+ return null;
57310
+ }
57311
+ const isValid = crypto3.verify(null, Buffer.from(data), publicKey, signature);
57312
+ if (!isValid) return null;
57313
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
57314
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
57315
+ return null;
57316
+ }
57317
+ const now = Date.now();
57318
+ const expiryMs = payload.exp * 1e3;
57319
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
57320
+ return null;
57321
+ }
57322
+ return payload;
57323
+ } catch {
57324
+ return null;
57325
+ }
57326
+ }
57327
+ };
57328
+ }
57329
+ });
57330
+
57331
+ // src/enterprise/policy/opa-compiler.ts
57332
+ var fs22, path27, os2, crypto4, import_child_process8, OpaCompiler;
57333
+ var init_opa_compiler = __esm({
57334
+ "src/enterprise/policy/opa-compiler.ts"() {
57335
+ "use strict";
57336
+ fs22 = __toESM(require("fs"));
57337
+ path27 = __toESM(require("path"));
57338
+ os2 = __toESM(require("os"));
57339
+ crypto4 = __toESM(require("crypto"));
57340
+ import_child_process8 = require("child_process");
57341
+ OpaCompiler = class _OpaCompiler {
57342
+ static CACHE_DIR = path27.join(os2.tmpdir(), "visor-opa-cache");
57343
+ /**
57344
+ * Resolve the input paths to WASM bytes.
57345
+ *
57346
+ * Strategy:
57347
+ * 1. If any path is a .wasm file, read it directly
57348
+ * 2. If a directory contains policy.wasm, read it
57349
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
57350
+ */
57351
+ async resolveWasmBytes(paths) {
57352
+ const regoFiles = [];
57353
+ for (const p of paths) {
57354
+ const resolved = path27.resolve(p);
57355
+ if (path27.normalize(resolved).includes("..")) {
57356
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
57357
+ }
57358
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
57359
+ return fs22.readFileSync(resolved);
57360
+ }
57361
+ if (!fs22.existsSync(resolved)) continue;
57362
+ const stat2 = fs22.statSync(resolved);
57363
+ if (stat2.isDirectory()) {
57364
+ const wasmCandidate = path27.join(resolved, "policy.wasm");
57365
+ if (fs22.existsSync(wasmCandidate)) {
57366
+ return fs22.readFileSync(wasmCandidate);
57367
+ }
57368
+ const files = fs22.readdirSync(resolved);
57369
+ for (const f of files) {
57370
+ if (f.endsWith(".rego")) {
57371
+ regoFiles.push(path27.join(resolved, f));
57372
+ }
57373
+ }
57374
+ } else if (resolved.endsWith(".rego")) {
57375
+ regoFiles.push(resolved);
57376
+ }
57377
+ }
57378
+ if (regoFiles.length === 0) {
57379
+ throw new Error(
57380
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
57381
+ );
57382
+ }
57383
+ return this.compileRego(regoFiles);
57384
+ }
57385
+ /**
57386
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
57387
+ *
57388
+ * Caches the compiled bundle based on a content hash of all input .rego files
57389
+ * so subsequent runs skip compilation if policies haven't changed.
57390
+ */
57391
+ compileRego(regoFiles) {
57392
+ try {
57393
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
57394
+ } catch {
57395
+ throw new Error(
57396
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
57397
+ );
57398
+ }
57399
+ const hash = crypto4.createHash("sha256");
57400
+ for (const f of regoFiles.sort()) {
57401
+ hash.update(fs22.readFileSync(f));
57402
+ hash.update(f);
57403
+ }
57404
+ const cacheKey = hash.digest("hex").slice(0, 16);
57405
+ const cacheDir = _OpaCompiler.CACHE_DIR;
57406
+ const cachedWasm = path27.join(cacheDir, `${cacheKey}.wasm`);
57407
+ if (fs22.existsSync(cachedWasm)) {
57408
+ return fs22.readFileSync(cachedWasm);
57409
+ }
57410
+ fs22.mkdirSync(cacheDir, { recursive: true });
57411
+ const bundleTar = path27.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
57412
+ try {
57413
+ const args = [
57414
+ "build",
57415
+ "-t",
57416
+ "wasm",
57417
+ "-e",
57418
+ "visor",
57419
+ // entrypoint: the visor package tree
57420
+ "-o",
57421
+ bundleTar,
57422
+ ...regoFiles
57423
+ ];
57424
+ (0, import_child_process8.execFileSync)("opa", args, {
57425
+ stdio: "pipe",
57426
+ timeout: 3e4
57427
+ });
57428
+ } catch (err) {
57429
+ const stderr = err?.stderr?.toString() || "";
57430
+ throw new Error(
57431
+ `Failed to compile .rego files to WASM:
57432
+ ${stderr}
57433
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
57434
+ );
57435
+ }
57436
+ try {
57437
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
57438
+ stdio: "pipe"
57439
+ });
57440
+ const extractedWasm = path27.join(cacheDir, "policy.wasm");
57441
+ if (fs22.existsSync(extractedWasm)) {
57442
+ fs22.renameSync(extractedWasm, cachedWasm);
57443
+ }
57444
+ } catch {
57445
+ try {
57446
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
57447
+ stdio: "pipe"
57448
+ });
57449
+ const extractedWasm = path27.join(cacheDir, "policy.wasm");
57450
+ if (fs22.existsSync(extractedWasm)) {
57451
+ fs22.renameSync(extractedWasm, cachedWasm);
57452
+ }
57453
+ } catch (err2) {
57454
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
57455
+ }
57456
+ }
57457
+ try {
57458
+ fs22.unlinkSync(bundleTar);
57459
+ } catch {
57460
+ }
57461
+ if (!fs22.existsSync(cachedWasm)) {
57462
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
57463
+ }
57464
+ return fs22.readFileSync(cachedWasm);
57465
+ }
57466
+ };
57467
+ }
57468
+ });
57469
+
57470
+ // src/enterprise/policy/opa-wasm-evaluator.ts
57471
+ var fs23, path28, OpaWasmEvaluator;
57472
+ var init_opa_wasm_evaluator = __esm({
57473
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
57474
+ "use strict";
57475
+ fs23 = __toESM(require("fs"));
57476
+ path28 = __toESM(require("path"));
57477
+ init_opa_compiler();
57478
+ OpaWasmEvaluator = class {
57479
+ policy = null;
57480
+ dataDocument = {};
57481
+ compiler = new OpaCompiler();
57482
+ async initialize(rulesPath) {
57483
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
57484
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
57485
+ try {
57486
+ const { createRequire } = require("module");
57487
+ const runtimeRequire = createRequire(__filename);
57488
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
57489
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
57490
+ if (!loadPolicy) {
57491
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
57492
+ }
57493
+ this.policy = await loadPolicy(wasmBytes);
57494
+ } catch (err) {
57495
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
57496
+ throw new Error(
57497
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
57498
+ );
57499
+ }
57500
+ throw err;
57501
+ }
57502
+ }
57503
+ /**
57504
+ * Load external data from a JSON file to use as the OPA data document.
57505
+ * The loaded data will be passed to `policy.setData()` during evaluation,
57506
+ * making it available in Rego via `data.<key>`.
57507
+ */
57508
+ loadData(dataPath) {
57509
+ const resolved = path28.resolve(dataPath);
57510
+ if (path28.normalize(resolved).includes("..")) {
57511
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
57512
+ }
57513
+ if (!fs23.existsSync(resolved)) {
57514
+ throw new Error(`OPA data file not found: ${resolved}`);
57515
+ }
57516
+ const stat2 = fs23.statSync(resolved);
57517
+ if (stat2.size > 10 * 1024 * 1024) {
57518
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
57519
+ }
57520
+ const raw = fs23.readFileSync(resolved, "utf-8");
57521
+ try {
57522
+ const parsed = JSON.parse(raw);
57523
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
57524
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
57525
+ }
57526
+ this.dataDocument = parsed;
57527
+ } catch (err) {
57528
+ if (err.message.startsWith("OPA data file must")) {
57529
+ throw err;
57530
+ }
57531
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
57532
+ }
57533
+ }
57534
+ async evaluate(input) {
57535
+ if (!this.policy) {
57536
+ throw new Error("OPA WASM evaluator not initialized");
57537
+ }
57538
+ this.policy.setData(this.dataDocument);
57539
+ const resultSet = this.policy.evaluate(input);
57540
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
57541
+ return resultSet[0].result;
57542
+ }
57543
+ return void 0;
57544
+ }
57545
+ async shutdown() {
57546
+ if (this.policy) {
57547
+ if (typeof this.policy.close === "function") {
57548
+ try {
57549
+ this.policy.close();
57550
+ } catch {
57551
+ }
57552
+ } else if (typeof this.policy.free === "function") {
57553
+ try {
57554
+ this.policy.free();
57555
+ } catch {
57556
+ }
57557
+ }
57558
+ }
57559
+ this.policy = null;
57560
+ }
57561
+ };
57562
+ }
57563
+ });
57564
+
57565
+ // src/enterprise/policy/opa-http-evaluator.ts
57566
+ var OpaHttpEvaluator;
57567
+ var init_opa_http_evaluator = __esm({
57568
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
57569
+ "use strict";
57570
+ OpaHttpEvaluator = class {
57571
+ baseUrl;
57572
+ timeout;
57573
+ constructor(baseUrl, timeout = 5e3) {
57574
+ let parsed;
57575
+ try {
57576
+ parsed = new URL(baseUrl);
57577
+ } catch {
57578
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
57579
+ }
57580
+ if (!["http:", "https:"].includes(parsed.protocol)) {
57581
+ throw new Error(
57582
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
57583
+ );
57584
+ }
57585
+ const hostname = parsed.hostname;
57586
+ if (this.isBlockedHostname(hostname)) {
57587
+ throw new Error(
57588
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
57589
+ );
57590
+ }
57591
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
57592
+ this.timeout = timeout;
57593
+ }
57594
+ /**
57595
+ * Check if a hostname is blocked due to SSRF concerns.
57596
+ *
57597
+ * Blocks:
57598
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
57599
+ * - Link-local addresses (169.254.x.x)
57600
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
57601
+ * - IPv6 unique local addresses (fd00::/8)
57602
+ * - Cloud metadata services (*.internal)
57603
+ */
57604
+ isBlockedHostname(hostname) {
57605
+ if (!hostname) return true;
57606
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
57607
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
57608
+ return true;
57609
+ }
57610
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
57611
+ return true;
57612
+ }
57613
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
57614
+ return true;
57615
+ }
57616
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
57617
+ const ipv4Match = normalized.match(ipv4Pattern);
57618
+ if (ipv4Match) {
57619
+ const octets = ipv4Match.slice(1, 5).map(Number);
57620
+ if (octets.some((octet) => octet > 255)) {
57621
+ return false;
57622
+ }
57623
+ const [a, b] = octets;
57624
+ if (a === 127) {
57625
+ return true;
57626
+ }
57627
+ if (a === 0) {
57628
+ return true;
57629
+ }
57630
+ if (a === 169 && b === 254) {
57631
+ return true;
57632
+ }
57633
+ if (a === 10) {
57634
+ return true;
57635
+ }
57636
+ if (a === 172 && b >= 16 && b <= 31) {
57637
+ return true;
57638
+ }
57639
+ if (a === 192 && b === 168) {
57640
+ return true;
57641
+ }
57642
+ }
57643
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
57644
+ return true;
57645
+ }
57646
+ if (normalized.startsWith("fe80:")) {
57647
+ return true;
57648
+ }
57649
+ return false;
57650
+ }
57651
+ /**
57652
+ * Evaluate a policy rule against an input document via OPA REST API.
57653
+ *
57654
+ * @param input - The input document to evaluate
57655
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
57656
+ * @returns The result object from OPA, or undefined on error
57657
+ */
57658
+ async evaluate(input, rulePath) {
57659
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
57660
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
57661
+ const controller = new AbortController();
57662
+ const timer = setTimeout(() => controller.abort(), this.timeout);
57663
+ try {
57664
+ const response = await fetch(url, {
57665
+ method: "POST",
57666
+ headers: { "Content-Type": "application/json" },
57667
+ body: JSON.stringify({ input }),
57668
+ signal: controller.signal
57669
+ });
57670
+ if (!response.ok) {
57671
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
57672
+ }
57673
+ let body;
57674
+ try {
57675
+ body = await response.json();
57676
+ } catch (jsonErr) {
57677
+ throw new Error(
57678
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
57679
+ );
57680
+ }
57681
+ return body?.result;
57682
+ } finally {
57683
+ clearTimeout(timer);
57684
+ }
57685
+ }
57686
+ async shutdown() {
57687
+ }
57688
+ };
57689
+ }
57690
+ });
57691
+
57692
+ // src/enterprise/policy/policy-input-builder.ts
57693
+ var PolicyInputBuilder;
57694
+ var init_policy_input_builder = __esm({
57695
+ "src/enterprise/policy/policy-input-builder.ts"() {
57696
+ "use strict";
57697
+ PolicyInputBuilder = class {
57698
+ roles;
57699
+ actor;
57700
+ repository;
57701
+ pullRequest;
57702
+ constructor(policyConfig, actor, repository, pullRequest) {
57703
+ this.roles = policyConfig.roles || {};
57704
+ this.actor = actor;
57705
+ this.repository = repository;
57706
+ this.pullRequest = pullRequest;
57707
+ }
57708
+ /** Resolve which roles apply to the current actor. */
57709
+ resolveRoles() {
57710
+ const matched = [];
57711
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
57712
+ let identityMatch = false;
57713
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
57714
+ identityMatch = true;
57715
+ }
57716
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
57717
+ identityMatch = true;
57718
+ }
57719
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
57720
+ identityMatch = true;
57721
+ }
57722
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
57723
+ const actorEmail = this.actor.slack.email.toLowerCase();
57724
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
57725
+ identityMatch = true;
57726
+ }
57727
+ }
57728
+ if (!identityMatch) continue;
57729
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
57730
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
57731
+ continue;
57732
+ }
57733
+ }
57734
+ matched.push(roleName);
57735
+ }
57736
+ return matched;
57737
+ }
57738
+ buildActor() {
57739
+ return {
57740
+ authorAssociation: this.actor.authorAssociation,
57741
+ login: this.actor.login,
57742
+ roles: this.resolveRoles(),
57743
+ isLocalMode: this.actor.isLocalMode,
57744
+ ...this.actor.slack && { slack: this.actor.slack }
57745
+ };
57746
+ }
57747
+ forCheckExecution(check) {
57748
+ return {
57749
+ scope: "check.execute",
57750
+ check: {
57751
+ id: check.id,
57752
+ type: check.type,
57753
+ group: check.group,
57754
+ tags: check.tags,
57755
+ criticality: check.criticality,
57756
+ sandbox: check.sandbox,
57757
+ policy: check.policy
57758
+ },
57759
+ actor: this.buildActor(),
57760
+ repository: this.repository,
57761
+ pullRequest: this.pullRequest
57762
+ };
57763
+ }
57764
+ forToolInvocation(serverName, methodName, transport) {
57765
+ return {
57766
+ scope: "tool.invoke",
57767
+ tool: { serverName, methodName, transport },
57768
+ actor: this.buildActor(),
57769
+ repository: this.repository,
57770
+ pullRequest: this.pullRequest
57771
+ };
57772
+ }
57773
+ forCapabilityResolve(checkId, capabilities) {
57774
+ return {
57775
+ scope: "capability.resolve",
57776
+ check: { id: checkId, type: "ai" },
57777
+ capability: capabilities,
57778
+ actor: this.buildActor(),
57779
+ repository: this.repository,
57780
+ pullRequest: this.pullRequest
57781
+ };
57782
+ }
57783
+ };
57784
+ }
57785
+ });
57786
+
57787
+ // src/enterprise/policy/opa-policy-engine.ts
57788
+ var opa_policy_engine_exports = {};
57789
+ __export(opa_policy_engine_exports, {
57790
+ OpaPolicyEngine: () => OpaPolicyEngine
57791
+ });
57792
+ var OpaPolicyEngine;
57793
+ var init_opa_policy_engine = __esm({
57794
+ "src/enterprise/policy/opa-policy-engine.ts"() {
57795
+ "use strict";
57796
+ init_opa_wasm_evaluator();
57797
+ init_opa_http_evaluator();
57798
+ init_policy_input_builder();
57799
+ OpaPolicyEngine = class {
57800
+ evaluator = null;
57801
+ fallback;
57802
+ timeout;
57803
+ config;
57804
+ inputBuilder = null;
57805
+ logger = null;
57806
+ constructor(config) {
57807
+ this.config = config;
57808
+ this.fallback = config.fallback || "deny";
57809
+ this.timeout = config.timeout || 5e3;
57810
+ }
57811
+ async initialize(config) {
57812
+ try {
57813
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
57814
+ } catch {
57815
+ }
57816
+ const actor = {
57817
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
57818
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
57819
+ isLocalMode: !process.env.GITHUB_ACTIONS
57820
+ };
57821
+ const repo = {
57822
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
57823
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
57824
+ branch: process.env.GITHUB_HEAD_REF,
57825
+ baseBranch: process.env.GITHUB_BASE_REF,
57826
+ event: process.env.GITHUB_EVENT_NAME
57827
+ };
57828
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
57829
+ const pullRequest = {
57830
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
57831
+ };
57832
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
57833
+ if (config.engine === "local") {
57834
+ if (!config.rules) {
57835
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
57836
+ }
57837
+ const wasm = new OpaWasmEvaluator();
57838
+ await wasm.initialize(config.rules);
57839
+ if (config.data) {
57840
+ wasm.loadData(config.data);
57841
+ }
57842
+ this.evaluator = wasm;
57843
+ } else if (config.engine === "remote") {
57844
+ if (!config.url) {
57845
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
57846
+ }
57847
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
57848
+ } else {
57849
+ this.evaluator = null;
57850
+ }
57851
+ }
57852
+ /**
57853
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
57854
+ * Called by the enterprise loader when engine context is enriched.
57855
+ */
57856
+ setActorContext(actor, repo, pullRequest) {
57857
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
57858
+ }
57859
+ async evaluateCheckExecution(checkId, checkConfig) {
57860
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
57861
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
57862
+ const policyOverride = cfg.policy;
57863
+ const input = this.inputBuilder.forCheckExecution({
57864
+ id: checkId,
57865
+ type: cfg.type || "ai",
57866
+ group: cfg.group,
57867
+ tags: cfg.tags,
57868
+ criticality: cfg.criticality,
57869
+ sandbox: cfg.sandbox,
57870
+ policy: policyOverride
57871
+ });
57872
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
57873
+ }
57874
+ async evaluateToolInvocation(serverName, methodName, transport) {
57875
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
57876
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
57877
+ return this.doEvaluate(input, "visor/tool/invoke");
57878
+ }
57879
+ async evaluateCapabilities(checkId, capabilities) {
57880
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
57881
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
57882
+ return this.doEvaluate(input, "visor/capability/resolve");
57883
+ }
57884
+ async shutdown() {
57885
+ if (this.evaluator && "shutdown" in this.evaluator) {
57886
+ await this.evaluator.shutdown();
57887
+ }
57888
+ this.evaluator = null;
57889
+ this.inputBuilder = null;
57890
+ }
57891
+ resolveRulePath(defaultScope, override) {
57892
+ if (override) {
57893
+ return override.startsWith("visor/") ? override : `visor/${override}`;
57894
+ }
57895
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
57896
+ }
57897
+ async doEvaluate(input, rulePath) {
57898
+ try {
57899
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
57900
+ let timer;
57901
+ const timeoutPromise = new Promise((_resolve, reject) => {
57902
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
57903
+ });
57904
+ try {
57905
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
57906
+ const decision = this.parseDecision(result);
57907
+ if (!decision.allowed && this.fallback === "warn") {
57908
+ decision.allowed = true;
57909
+ decision.warn = true;
57910
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
57911
+ }
57912
+ this.logger?.debug(
57913
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
57914
+ );
57915
+ return decision;
57916
+ } finally {
57917
+ if (timer) clearTimeout(timer);
57918
+ }
57919
+ } catch (err) {
57920
+ const msg = err instanceof Error ? err.message : String(err);
57921
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
57922
+ return {
57923
+ allowed: this.fallback === "allow" || this.fallback === "warn",
57924
+ warn: this.fallback === "warn" ? true : void 0,
57925
+ reason: `policy evaluation failed, fallback=${this.fallback}`
57926
+ };
57927
+ }
57928
+ }
57929
+ async rawEvaluate(input, rulePath) {
57930
+ if (this.evaluator instanceof OpaWasmEvaluator) {
57931
+ const result = await this.evaluator.evaluate(input);
57932
+ return this.navigateWasmResult(result, rulePath);
57933
+ }
57934
+ return this.evaluator.evaluate(input, rulePath);
57935
+ }
57936
+ /**
57937
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
57938
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
57939
+ * so we strip the `visor/` prefix and walk the remaining segments.
57940
+ */
57941
+ navigateWasmResult(result, rulePath) {
57942
+ if (!result || typeof result !== "object") return result;
57943
+ const segments = rulePath.replace(/^visor\//, "").split("/");
57944
+ let current = result;
57945
+ for (const seg of segments) {
57946
+ if (current && typeof current === "object" && seg in current) {
57947
+ current = current[seg];
57948
+ } else {
57949
+ return void 0;
57950
+ }
57951
+ }
57952
+ return current;
57953
+ }
57954
+ parseDecision(result) {
57955
+ if (result === void 0 || result === null) {
57956
+ return {
57957
+ allowed: this.fallback === "allow" || this.fallback === "warn",
57958
+ warn: this.fallback === "warn" ? true : void 0,
57959
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
57960
+ };
57961
+ }
57962
+ const allowed = result.allowed !== false;
57963
+ const decision = {
57964
+ allowed,
57965
+ reason: result.reason
57966
+ };
57967
+ if (result.capabilities) {
57968
+ decision.capabilities = result.capabilities;
57969
+ }
57970
+ return decision;
57971
+ }
57972
+ };
57973
+ }
57974
+ });
57975
+
57976
+ // src/enterprise/scheduler/knex-store.ts
57977
+ var knex_store_exports = {};
57978
+ __export(knex_store_exports, {
57979
+ KnexStoreBackend: () => KnexStoreBackend
57980
+ });
57981
+ function toNum(val) {
57982
+ if (val === null || val === void 0) return void 0;
57983
+ return typeof val === "string" ? parseInt(val, 10) : val;
57984
+ }
57985
+ function safeJsonParse2(value) {
57986
+ if (!value) return void 0;
57987
+ try {
57988
+ return JSON.parse(value);
57989
+ } catch {
57990
+ return void 0;
57991
+ }
57992
+ }
57993
+ function fromTriggerRow2(row) {
57994
+ return {
57995
+ id: row.id,
57996
+ creatorId: row.creator_id,
57997
+ creatorContext: row.creator_context ?? void 0,
57998
+ creatorName: row.creator_name ?? void 0,
57999
+ description: row.description ?? void 0,
58000
+ channels: safeJsonParse2(row.channels),
58001
+ fromUsers: safeJsonParse2(row.from_users),
58002
+ fromBots: row.from_bots === true || row.from_bots === 1,
58003
+ contains: safeJsonParse2(row.contains),
58004
+ matchPattern: row.match_pattern ?? void 0,
58005
+ threads: row.threads,
58006
+ workflow: row.workflow,
58007
+ inputs: safeJsonParse2(row.inputs),
58008
+ outputContext: safeJsonParse2(row.output_context),
58009
+ status: row.status,
58010
+ enabled: row.enabled === true || row.enabled === 1,
58011
+ createdAt: toNum(row.created_at)
58012
+ };
58013
+ }
58014
+ function toTriggerInsertRow(trigger) {
58015
+ return {
58016
+ id: trigger.id,
58017
+ creator_id: trigger.creatorId,
58018
+ creator_context: trigger.creatorContext ?? null,
58019
+ creator_name: trigger.creatorName ?? null,
58020
+ description: trigger.description ?? null,
58021
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
58022
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
58023
+ from_bots: trigger.fromBots,
58024
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
58025
+ match_pattern: trigger.matchPattern ?? null,
58026
+ threads: trigger.threads,
58027
+ workflow: trigger.workflow,
58028
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
58029
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
58030
+ status: trigger.status,
58031
+ enabled: trigger.enabled,
58032
+ created_at: trigger.createdAt
58033
+ };
58034
+ }
58035
+ function fromDbRow2(row) {
58036
+ return {
58037
+ id: row.id,
58038
+ creatorId: row.creator_id,
58039
+ creatorContext: row.creator_context ?? void 0,
58040
+ creatorName: row.creator_name ?? void 0,
58041
+ timezone: row.timezone,
58042
+ schedule: row.schedule_expr,
58043
+ runAt: toNum(row.run_at),
58044
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
58045
+ originalExpression: row.original_expression,
58046
+ workflow: row.workflow ?? void 0,
58047
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
58048
+ outputContext: safeJsonParse2(row.output_context),
58049
+ status: row.status,
58050
+ createdAt: toNum(row.created_at),
58051
+ lastRunAt: toNum(row.last_run_at),
58052
+ nextRunAt: toNum(row.next_run_at),
58053
+ runCount: row.run_count,
58054
+ failureCount: row.failure_count,
58055
+ lastError: row.last_error ?? void 0,
58056
+ previousResponse: row.previous_response ?? void 0
58057
+ };
58058
+ }
58059
+ function toInsertRow(schedule) {
58060
+ return {
58061
+ id: schedule.id,
58062
+ creator_id: schedule.creatorId,
58063
+ creator_context: schedule.creatorContext ?? null,
58064
+ creator_name: schedule.creatorName ?? null,
58065
+ timezone: schedule.timezone,
58066
+ schedule_expr: schedule.schedule,
58067
+ run_at: schedule.runAt ?? null,
58068
+ is_recurring: schedule.isRecurring,
58069
+ original_expression: schedule.originalExpression,
58070
+ workflow: schedule.workflow ?? null,
58071
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
58072
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
58073
+ status: schedule.status,
58074
+ created_at: schedule.createdAt,
58075
+ last_run_at: schedule.lastRunAt ?? null,
58076
+ next_run_at: schedule.nextRunAt ?? null,
58077
+ run_count: schedule.runCount,
58078
+ failure_count: schedule.failureCount,
58079
+ last_error: schedule.lastError ?? null,
58080
+ previous_response: schedule.previousResponse ?? null
58081
+ };
58082
+ }
58083
+ var fs24, path29, import_uuid2, KnexStoreBackend;
58084
+ var init_knex_store = __esm({
58085
+ "src/enterprise/scheduler/knex-store.ts"() {
58086
+ "use strict";
58087
+ fs24 = __toESM(require("fs"));
58088
+ path29 = __toESM(require("path"));
58089
+ import_uuid2 = require("uuid");
58090
+ init_logger();
58091
+ KnexStoreBackend = class {
58092
+ knex = null;
58093
+ driver;
58094
+ connection;
58095
+ constructor(driver, storageConfig, _haConfig) {
58096
+ this.driver = driver;
58097
+ this.connection = storageConfig.connection || {};
58098
+ }
58099
+ async initialize() {
58100
+ const { createRequire } = require("module");
58101
+ const runtimeRequire = createRequire(__filename);
58102
+ let knexFactory;
58103
+ try {
58104
+ knexFactory = runtimeRequire("knex");
58105
+ } catch (err) {
58106
+ const code = err?.code;
58107
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
58108
+ throw new Error(
58109
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
58110
+ );
58111
+ }
58112
+ throw err;
58113
+ }
58114
+ const clientMap = {
58115
+ postgresql: "pg",
58116
+ mysql: "mysql2",
58117
+ mssql: "tedious"
58118
+ };
58119
+ const client = clientMap[this.driver];
58120
+ let connection;
58121
+ if (this.connection.connection_string) {
58122
+ connection = this.connection.connection_string;
58123
+ } else if (this.driver === "mssql") {
58124
+ connection = this.buildMssqlConnection();
58125
+ } else {
58126
+ connection = this.buildStandardConnection();
58127
+ }
58128
+ this.knex = knexFactory({
58129
+ client,
58130
+ connection,
58131
+ pool: {
58132
+ min: this.connection.pool?.min ?? 0,
58133
+ max: this.connection.pool?.max ?? 10
58134
+ }
58135
+ });
58136
+ await this.migrateSchema();
58137
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
58138
+ }
58139
+ buildStandardConnection() {
58140
+ return {
58141
+ host: this.connection.host || "localhost",
58142
+ port: this.connection.port,
58143
+ database: this.connection.database || "visor",
58144
+ user: this.connection.user,
58145
+ password: this.connection.password,
58146
+ ssl: this.resolveSslConfig()
58147
+ };
58148
+ }
58149
+ buildMssqlConnection() {
58150
+ const ssl = this.connection.ssl;
58151
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
58152
+ return {
58153
+ server: this.connection.host || "localhost",
58154
+ port: this.connection.port,
58155
+ database: this.connection.database || "visor",
58156
+ user: this.connection.user,
58157
+ password: this.connection.password,
58158
+ options: {
58159
+ encrypt: sslEnabled,
58160
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
58161
+ }
58162
+ };
58163
+ }
58164
+ resolveSslConfig() {
58165
+ const ssl = this.connection.ssl;
58166
+ if (ssl === false || ssl === void 0) return false;
58167
+ if (ssl === true) return { rejectUnauthorized: true };
58168
+ if (ssl.enabled === false) return false;
58169
+ const result = {
58170
+ rejectUnauthorized: ssl.reject_unauthorized !== false
58171
+ };
58172
+ if (ssl.ca) {
58173
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
58174
+ result.ca = fs24.readFileSync(caPath, "utf8");
58175
+ }
58176
+ if (ssl.cert) {
58177
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
58178
+ result.cert = fs24.readFileSync(certPath, "utf8");
58179
+ }
58180
+ if (ssl.key) {
58181
+ const keyPath = this.validateSslPath(ssl.key, "client key");
58182
+ result.key = fs24.readFileSync(keyPath, "utf8");
58183
+ }
58184
+ return result;
58185
+ }
58186
+ validateSslPath(filePath, label) {
58187
+ const resolved = path29.resolve(filePath);
58188
+ if (resolved !== path29.normalize(resolved)) {
58189
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
58190
+ }
58191
+ if (!fs24.existsSync(resolved)) {
58192
+ throw new Error(`SSL ${label} not found: ${filePath}`);
58193
+ }
58194
+ return resolved;
58195
+ }
58196
+ async shutdown() {
58197
+ if (this.knex) {
58198
+ await this.knex.destroy();
58199
+ this.knex = null;
58200
+ }
58201
+ }
58202
+ async migrateSchema() {
58203
+ const knex = this.getKnex();
58204
+ const exists = await knex.schema.hasTable("schedules");
58205
+ if (!exists) {
58206
+ await knex.schema.createTable("schedules", (table) => {
58207
+ table.string("id", 36).primary();
58208
+ table.string("creator_id", 255).notNullable().index();
58209
+ table.string("creator_context", 255);
58210
+ table.string("creator_name", 255);
58211
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
58212
+ table.string("schedule_expr", 255);
58213
+ table.bigInteger("run_at");
58214
+ table.boolean("is_recurring").notNullable();
58215
+ table.text("original_expression");
58216
+ table.string("workflow", 255);
58217
+ table.text("workflow_inputs");
58218
+ table.text("output_context");
58219
+ table.string("status", 20).notNullable().index();
58220
+ table.bigInteger("created_at").notNullable();
58221
+ table.bigInteger("last_run_at");
58222
+ table.bigInteger("next_run_at");
58223
+ table.integer("run_count").notNullable().defaultTo(0);
58224
+ table.integer("failure_count").notNullable().defaultTo(0);
58225
+ table.text("last_error");
58226
+ table.text("previous_response");
58227
+ table.index(["status", "next_run_at"]);
58228
+ });
58229
+ }
58230
+ const triggersExist = await knex.schema.hasTable("message_triggers");
58231
+ if (!triggersExist) {
58232
+ await knex.schema.createTable("message_triggers", (table) => {
58233
+ table.string("id", 36).primary();
58234
+ table.string("creator_id", 255).notNullable().index();
58235
+ table.string("creator_context", 255);
58236
+ table.string("creator_name", 255);
58237
+ table.text("description");
58238
+ table.text("channels");
58239
+ table.text("from_users");
58240
+ table.boolean("from_bots").notNullable().defaultTo(false);
58241
+ table.text("contains");
58242
+ table.text("match_pattern");
58243
+ table.string("threads", 20).notNullable().defaultTo("any");
58244
+ table.string("workflow", 255).notNullable();
58245
+ table.text("inputs");
58246
+ table.text("output_context");
58247
+ table.string("status", 20).notNullable().defaultTo("active").index();
58248
+ table.boolean("enabled").notNullable().defaultTo(true);
58249
+ table.bigInteger("created_at").notNullable();
58250
+ });
58251
+ }
58252
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
58253
+ if (!locksExist) {
58254
+ await knex.schema.createTable("scheduler_locks", (table) => {
58255
+ table.string("lock_id", 255).primary();
58256
+ table.string("node_id", 255).notNullable();
58257
+ table.string("lock_token", 36).notNullable();
58258
+ table.bigInteger("acquired_at").notNullable();
58259
+ table.bigInteger("expires_at").notNullable();
58260
+ });
58261
+ }
58262
+ }
58263
+ getKnex() {
58264
+ if (!this.knex) {
58265
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
58266
+ }
58267
+ return this.knex;
58268
+ }
58269
+ // --- CRUD ---
58270
+ async create(schedule) {
58271
+ const knex = this.getKnex();
58272
+ const newSchedule = {
58273
+ ...schedule,
58274
+ id: (0, import_uuid2.v4)(),
58275
+ createdAt: Date.now(),
58276
+ runCount: 0,
58277
+ failureCount: 0,
58278
+ status: "active"
58279
+ };
58280
+ await knex("schedules").insert(toInsertRow(newSchedule));
58281
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
58282
+ return newSchedule;
58283
+ }
58284
+ async importSchedule(schedule) {
58285
+ const knex = this.getKnex();
58286
+ const existing = await knex("schedules").where("id", schedule.id).first();
58287
+ if (existing) return;
58288
+ await knex("schedules").insert(toInsertRow(schedule));
58289
+ }
58290
+ async get(id) {
58291
+ const knex = this.getKnex();
58292
+ const row = await knex("schedules").where("id", id).first();
58293
+ return row ? fromDbRow2(row) : void 0;
58294
+ }
58295
+ async update(id, patch) {
58296
+ const knex = this.getKnex();
58297
+ const existing = await knex("schedules").where("id", id).first();
58298
+ if (!existing) return void 0;
58299
+ const current = fromDbRow2(existing);
58300
+ const updated = { ...current, ...patch, id: current.id };
58301
+ const row = toInsertRow(updated);
58302
+ delete row.id;
58303
+ await knex("schedules").where("id", id).update(row);
58304
+ return updated;
58305
+ }
58306
+ async delete(id) {
58307
+ const knex = this.getKnex();
58308
+ const deleted = await knex("schedules").where("id", id).del();
58309
+ if (deleted > 0) {
58310
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
58311
+ return true;
58312
+ }
58313
+ return false;
58314
+ }
58315
+ // --- Queries ---
58316
+ async getByCreator(creatorId) {
58317
+ const knex = this.getKnex();
58318
+ const rows = await knex("schedules").where("creator_id", creatorId);
58319
+ return rows.map((r) => fromDbRow2(r));
58320
+ }
58321
+ async getActiveSchedules() {
58322
+ const knex = this.getKnex();
58323
+ const rows = await knex("schedules").where("status", "active");
58324
+ return rows.map((r) => fromDbRow2(r));
58325
+ }
58326
+ async getDueSchedules(now) {
58327
+ const ts = now ?? Date.now();
58328
+ const knex = this.getKnex();
58329
+ const bFalse = this.driver === "mssql" ? 0 : false;
58330
+ const bTrue = this.driver === "mssql" ? 1 : true;
58331
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
58332
+ this.where(function() {
58333
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
58334
+ }).orWhere(function() {
58335
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
58336
+ });
58337
+ });
58338
+ return rows.map((r) => fromDbRow2(r));
58339
+ }
58340
+ async findByWorkflow(creatorId, workflowName) {
58341
+ const knex = this.getKnex();
58342
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
58343
+ const pattern = `%${escaped}%`;
58344
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
58345
+ return rows.map((r) => fromDbRow2(r));
58346
+ }
58347
+ async getAll() {
58348
+ const knex = this.getKnex();
58349
+ const rows = await knex("schedules");
58350
+ return rows.map((r) => fromDbRow2(r));
58351
+ }
58352
+ async getStats() {
58353
+ const knex = this.getKnex();
58354
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
58355
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
58356
+ const result = await knex("schedules").select(
58357
+ knex.raw("COUNT(*) as total"),
58358
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
58359
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
58360
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
58361
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
58362
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
58363
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
58364
+ ).first();
58365
+ return {
58366
+ total: Number(result.total) || 0,
58367
+ active: Number(result.active) || 0,
58368
+ paused: Number(result.paused) || 0,
58369
+ completed: Number(result.completed) || 0,
58370
+ failed: Number(result.failed) || 0,
58371
+ recurring: Number(result.recurring) || 0,
58372
+ oneTime: Number(result.one_time) || 0
58373
+ };
58374
+ }
58375
+ async validateLimits(creatorId, isRecurring, limits) {
58376
+ const knex = this.getKnex();
58377
+ if (limits.maxGlobal) {
58378
+ const result = await knex("schedules").count("* as cnt").first();
58379
+ if (Number(result?.cnt) >= limits.maxGlobal) {
58380
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
58381
+ }
58382
+ }
58383
+ if (limits.maxPerUser) {
58384
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
58385
+ if (Number(result?.cnt) >= limits.maxPerUser) {
58386
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
58387
+ }
58388
+ }
58389
+ if (isRecurring && limits.maxRecurringPerUser) {
58390
+ const bTrue = this.driver === "mssql" ? 1 : true;
58391
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
58392
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
58393
+ throw new Error(
58394
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
58395
+ );
58396
+ }
58397
+ }
58398
+ }
58399
+ // --- HA Distributed Locking (via scheduler_locks table) ---
58400
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
58401
+ const knex = this.getKnex();
58402
+ const now = Date.now();
58403
+ const expiresAt = now + ttlSeconds * 1e3;
58404
+ const token = (0, import_uuid2.v4)();
58405
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
58406
+ node_id: nodeId,
58407
+ lock_token: token,
58408
+ acquired_at: now,
58409
+ expires_at: expiresAt
58410
+ });
58411
+ if (updated > 0) return token;
58412
+ try {
58413
+ await knex("scheduler_locks").insert({
58414
+ lock_id: lockId,
58415
+ node_id: nodeId,
58416
+ lock_token: token,
58417
+ acquired_at: now,
58418
+ expires_at: expiresAt
58419
+ });
58420
+ return token;
58421
+ } catch {
58422
+ return null;
58423
+ }
58424
+ }
58425
+ async releaseLock(lockId, lockToken) {
58426
+ const knex = this.getKnex();
58427
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
58428
+ }
58429
+ async renewLock(lockId, lockToken, ttlSeconds) {
58430
+ const knex = this.getKnex();
58431
+ const now = Date.now();
58432
+ const expiresAt = now + ttlSeconds * 1e3;
58433
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
58434
+ return updated > 0;
58435
+ }
58436
+ async flush() {
58437
+ }
58438
+ // --- Message Trigger CRUD ---
58439
+ async createTrigger(trigger) {
58440
+ const knex = this.getKnex();
58441
+ const newTrigger = {
58442
+ ...trigger,
58443
+ id: (0, import_uuid2.v4)(),
58444
+ createdAt: Date.now()
58445
+ };
58446
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
58447
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
58448
+ return newTrigger;
58449
+ }
58450
+ async getTrigger(id) {
58451
+ const knex = this.getKnex();
58452
+ const row = await knex("message_triggers").where("id", id).first();
58453
+ return row ? fromTriggerRow2(row) : void 0;
58454
+ }
58455
+ async updateTrigger(id, patch) {
58456
+ const knex = this.getKnex();
58457
+ const existing = await knex("message_triggers").where("id", id).first();
58458
+ if (!existing) return void 0;
58459
+ const current = fromTriggerRow2(existing);
58460
+ const updated = {
58461
+ ...current,
58462
+ ...patch,
58463
+ id: current.id,
58464
+ createdAt: current.createdAt
58465
+ };
58466
+ const row = toTriggerInsertRow(updated);
58467
+ delete row.id;
58468
+ await knex("message_triggers").where("id", id).update(row);
58469
+ return updated;
58470
+ }
58471
+ async deleteTrigger(id) {
58472
+ const knex = this.getKnex();
58473
+ const deleted = await knex("message_triggers").where("id", id).del();
58474
+ if (deleted > 0) {
58475
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
58476
+ return true;
58477
+ }
58478
+ return false;
58479
+ }
58480
+ async getTriggersByCreator(creatorId) {
58481
+ const knex = this.getKnex();
58482
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
58483
+ return rows.map((r) => fromTriggerRow2(r));
58484
+ }
58485
+ async getActiveTriggers() {
58486
+ const knex = this.getKnex();
58487
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
58488
+ return rows.map((r) => fromTriggerRow2(r));
58489
+ }
58490
+ };
58491
+ }
58492
+ });
58493
+
58494
+ // src/enterprise/loader.ts
58495
+ var loader_exports = {};
58496
+ __export(loader_exports, {
58497
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
58498
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
58499
+ });
58500
+ async function loadEnterprisePolicyEngine(config) {
58501
+ try {
58502
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
58503
+ const validator = new LicenseValidator2();
58504
+ const license = await validator.loadAndValidate();
58505
+ if (!license || !validator.hasFeature("policy")) {
58506
+ return new DefaultPolicyEngine();
58507
+ }
58508
+ if (validator.isInGracePeriod()) {
58509
+ console.warn(
58510
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
58511
+ );
58512
+ }
58513
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
58514
+ const engine = new OpaPolicyEngine2(config);
58515
+ await engine.initialize(config);
58516
+ return engine;
58517
+ } catch (err) {
58518
+ const msg = err instanceof Error ? err.message : String(err);
58519
+ try {
58520
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
58521
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
58522
+ } catch {
58523
+ }
58524
+ return new DefaultPolicyEngine();
58525
+ }
58526
+ }
58527
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
58528
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
58529
+ const validator = new LicenseValidator2();
58530
+ const license = await validator.loadAndValidate();
58531
+ if (!license || !validator.hasFeature("scheduler-sql")) {
58532
+ throw new Error(
58533
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
58534
+ );
58535
+ }
58536
+ if (validator.isInGracePeriod()) {
58537
+ console.warn(
58538
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
58539
+ );
58540
+ }
58541
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
58542
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
58543
+ }
58544
+ var init_loader = __esm({
58545
+ "src/enterprise/loader.ts"() {
58546
+ "use strict";
58547
+ init_default_engine();
58548
+ }
58549
+ });
58550
+
57134
58551
  // src/event-bus/event-bus.ts
57135
58552
  var event_bus_exports = {};
57136
58553
  __export(event_bus_exports, {
@@ -58037,8 +59454,8 @@ ${content}
58037
59454
  * Sleep utility
58038
59455
  */
58039
59456
  sleep(ms) {
58040
- return new Promise((resolve15) => {
58041
- const t = setTimeout(resolve15, ms);
59457
+ return new Promise((resolve19) => {
59458
+ const t = setTimeout(resolve19, ms);
58042
59459
  if (typeof t.unref === "function") {
58043
59460
  try {
58044
59461
  t.unref();
@@ -58323,8 +59740,8 @@ ${end}`);
58323
59740
  async updateGroupedComment(ctx, comments, group, changedIds) {
58324
59741
  const existingLock = this.updateLocks.get(group);
58325
59742
  let resolveLock;
58326
- const ourLock = new Promise((resolve15) => {
58327
- resolveLock = resolve15;
59743
+ const ourLock = new Promise((resolve19) => {
59744
+ resolveLock = resolve19;
58328
59745
  });
58329
59746
  this.updateLocks.set(group, ourLock);
58330
59747
  try {
@@ -58655,7 +60072,7 @@ ${blocks}
58655
60072
  * Sleep utility for enforcing delays
58656
60073
  */
58657
60074
  sleep(ms) {
58658
- return new Promise((resolve15) => setTimeout(resolve15, ms));
60075
+ return new Promise((resolve19) => setTimeout(resolve19, ms));
58659
60076
  }
58660
60077
  };
58661
60078
  }
@@ -60502,11 +61919,11 @@ var require_request3 = __commonJS({
60502
61919
  "use strict";
60503
61920
  var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) {
60504
61921
  function adopt(value) {
60505
- return value instanceof P ? value : new P(function(resolve15) {
60506
- resolve15(value);
61922
+ return value instanceof P ? value : new P(function(resolve19) {
61923
+ resolve19(value);
60507
61924
  });
60508
61925
  }
60509
- return new (P || (P = Promise))(function(resolve15, reject) {
61926
+ return new (P || (P = Promise))(function(resolve19, reject) {
60510
61927
  function fulfilled(value) {
60511
61928
  try {
60512
61929
  step(generator.next(value));
@@ -60522,7 +61939,7 @@ var require_request3 = __commonJS({
60522
61939
  }
60523
61940
  }
60524
61941
  function step(result) {
60525
- result.done ? resolve15(result.value) : adopt(result.value).then(fulfilled, rejected);
61942
+ result.done ? resolve19(result.value) : adopt(result.value).then(fulfilled, rejected);
60526
61943
  }
60527
61944
  step((generator = generator.apply(thisArg, _arguments || [])).next());
60528
61945
  });
@@ -60546,9 +61963,9 @@ var require_request3 = __commonJS({
60546
61963
  HttpMethod2["PATCH"] = "PATCH";
60547
61964
  })(HttpMethod = exports2.HttpMethod || (exports2.HttpMethod = {}));
60548
61965
  var SvixRequest = class {
60549
- constructor(method, path29) {
61966
+ constructor(method, path33) {
60550
61967
  this.method = method;
60551
- this.path = path29;
61968
+ this.path = path33;
60552
61969
  this.queryParams = {};
60553
61970
  this.headerParams = {};
60554
61971
  }
@@ -60651,7 +62068,7 @@ var require_request3 = __commonJS({
60651
62068
  }
60652
62069
  function sendWithRetry(url, init, retryScheduleInMs, nextInterval = 50, triesLeft = 2, fetchImpl = fetch, retryCount = 1) {
60653
62070
  return __awaiter(this, void 0, void 0, function* () {
60654
- const sleep = (interval) => new Promise((resolve15) => setTimeout(resolve15, interval));
62071
+ const sleep = (interval) => new Promise((resolve19) => setTimeout(resolve19, interval));
60655
62072
  try {
60656
62073
  const response = yield fetchImpl(url, init);
60657
62074
  if (triesLeft <= 0 || response.status < 500) {
@@ -69725,7 +71142,7 @@ ${message}`;
69725
71142
  });
69726
71143
 
69727
71144
  // src/agent-protocol/task-store.ts
69728
- function safeJsonParse2(value) {
71145
+ function safeJsonParse3(value) {
69729
71146
  if (!value) return void 0;
69730
71147
  try {
69731
71148
  return JSON.parse(value);
@@ -69742,12 +71159,12 @@ function taskRowToAgentTask(row) {
69742
71159
  context_id: row.context_id,
69743
71160
  status: {
69744
71161
  state: row.state,
69745
- message: safeJsonParse2(row.status_message),
71162
+ message: safeJsonParse3(row.status_message),
69746
71163
  timestamp: row.updated_at
69747
71164
  },
69748
- artifacts: safeJsonParse2(row.artifacts) ?? [],
69749
- history: safeJsonParse2(row.history) ?? [],
69750
- metadata: safeJsonParse2(row.request_metadata),
71165
+ artifacts: safeJsonParse3(row.artifacts) ?? [],
71166
+ history: safeJsonParse3(row.history) ?? [],
71167
+ metadata: safeJsonParse3(row.request_metadata),
69751
71168
  workflow_id: row.workflow_id ?? void 0
69752
71169
  };
69753
71170
  }
@@ -69984,7 +71401,7 @@ var init_task_store = __esm({
69984
71401
  const db = this.getDb();
69985
71402
  const row = db.prepare("SELECT artifacts FROM agent_tasks WHERE id = ?").get(taskId);
69986
71403
  if (!row) throw new TaskNotFoundError(taskId);
69987
- const artifacts = safeJsonParse2(row.artifacts) ?? [];
71404
+ const artifacts = safeJsonParse3(row.artifacts) ?? [];
69988
71405
  artifacts.push(artifact);
69989
71406
  db.prepare("UPDATE agent_tasks SET artifacts = ?, updated_at = ? WHERE id = ?").run(
69990
71407
  JSON.stringify(artifacts),
@@ -69996,7 +71413,7 @@ var init_task_store = __esm({
69996
71413
  const db = this.getDb();
69997
71414
  const row = db.prepare("SELECT history FROM agent_tasks WHERE id = ?").get(taskId);
69998
71415
  if (!row) throw new TaskNotFoundError(taskId);
69999
- const history = safeJsonParse2(row.history) ?? [];
71416
+ const history = safeJsonParse3(row.history) ?? [];
70000
71417
  history.push(message);
70001
71418
  db.prepare("UPDATE agent_tasks SET history = ?, updated_at = ? WHERE id = ?").run(
70002
71419
  JSON.stringify(history),
@@ -70508,13 +71925,13 @@ __export(a2a_frontend_exports, {
70508
71925
  resultToArtifacts: () => resultToArtifacts
70509
71926
  });
70510
71927
  function readJsonBody(req) {
70511
- return new Promise((resolve15, reject) => {
71928
+ return new Promise((resolve19, reject) => {
70512
71929
  const chunks = [];
70513
71930
  req.on("data", (chunk) => chunks.push(chunk));
70514
71931
  req.on("end", () => {
70515
71932
  try {
70516
71933
  const body = Buffer.concat(chunks).toString("utf8");
70517
- resolve15(body ? JSON.parse(body) : {});
71934
+ resolve19(body ? JSON.parse(body) : {});
70518
71935
  } catch {
70519
71936
  reject(new ParseError("Malformed JSON body"));
70520
71937
  }
@@ -70757,12 +72174,12 @@ var init_a2a_frontend = __esm({
70757
72174
  }
70758
72175
  const port = this.config.port ?? 9e3;
70759
72176
  const host = this.config.host ?? "0.0.0.0";
70760
- await new Promise((resolve15) => {
72177
+ await new Promise((resolve19) => {
70761
72178
  this.server.listen(port, host, () => {
70762
72179
  const addr = this.server.address();
70763
72180
  this._boundPort = typeof addr === "object" && addr ? addr.port : port;
70764
72181
  logger.info(`A2A server listening on ${host}:${this._boundPort}`);
70765
- resolve15();
72182
+ resolve19();
70766
72183
  });
70767
72184
  });
70768
72185
  if (this.agentCard) {
@@ -70786,8 +72203,8 @@ var init_a2a_frontend = __esm({
70786
72203
  }
70787
72204
  this.streamManager.shutdown();
70788
72205
  if (this.server) {
70789
- await new Promise((resolve15, reject) => {
70790
- this.server.close((err) => err ? reject(err) : resolve15());
72206
+ await new Promise((resolve19, reject) => {
72207
+ this.server.close((err) => err ? reject(err) : resolve19());
70791
72208
  });
70792
72209
  this.server = null;
70793
72210
  }
@@ -71504,15 +72921,15 @@ function serializeRunState(state) {
71504
72921
  ])
71505
72922
  };
71506
72923
  }
71507
- var path28, fs24, StateMachineExecutionEngine;
72924
+ var path32, fs28, StateMachineExecutionEngine;
71508
72925
  var init_state_machine_execution_engine = __esm({
71509
72926
  "src/state-machine-execution-engine.ts"() {
71510
72927
  "use strict";
71511
72928
  init_runner();
71512
72929
  init_logger();
71513
72930
  init_sandbox_manager();
71514
- path28 = __toESM(require("path"));
71515
- fs24 = __toESM(require("fs"));
72931
+ path32 = __toESM(require("path"));
72932
+ fs28 = __toESM(require("fs"));
71516
72933
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
71517
72934
  workingDirectory;
71518
72935
  executionContext;
@@ -71744,8 +73161,8 @@ var init_state_machine_execution_engine = __esm({
71744
73161
  logger.debug(
71745
73162
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
71746
73163
  );
71747
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
71748
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
73164
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
73165
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
71749
73166
  logger.debug(
71750
73167
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
71751
73168
  );
@@ -71899,9 +73316,9 @@ var init_state_machine_execution_engine = __esm({
71899
73316
  }
71900
73317
  const checkId = String(ev?.checkId || "unknown");
71901
73318
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
71902
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path28.resolve(process.cwd(), ".visor", "snapshots");
71903
- fs24.mkdirSync(baseDir, { recursive: true });
71904
- const filePath = path28.join(baseDir, `${threadKey}-${checkId}.json`);
73319
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path32.resolve(process.cwd(), ".visor", "snapshots");
73320
+ fs28.mkdirSync(baseDir, { recursive: true });
73321
+ const filePath = path32.join(baseDir, `${threadKey}-${checkId}.json`);
71905
73322
  await this.saveSnapshotToFile(filePath);
71906
73323
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
71907
73324
  try {
@@ -72042,7 +73459,7 @@ var init_state_machine_execution_engine = __esm({
72042
73459
  * Does not include secrets. Intended for debugging and future resume support.
72043
73460
  */
72044
73461
  async saveSnapshotToFile(filePath) {
72045
- const fs25 = await import("fs/promises");
73462
+ const fs29 = await import("fs/promises");
72046
73463
  const ctx = this._lastContext;
72047
73464
  const runner = this._lastRunner;
72048
73465
  if (!ctx || !runner) {
@@ -72062,14 +73479,14 @@ var init_state_machine_execution_engine = __esm({
72062
73479
  journal: entries,
72063
73480
  requestedChecks: ctx.requestedChecks || []
72064
73481
  };
72065
- await fs25.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
73482
+ await fs29.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
72066
73483
  }
72067
73484
  /**
72068
73485
  * Load a snapshot JSON from file and return it. Resume support can build on this.
72069
73486
  */
72070
73487
  async loadSnapshotFromFile(filePath) {
72071
- const fs25 = await import("fs/promises");
72072
- const raw = await fs25.readFile(filePath, "utf8");
73488
+ const fs29 = await import("fs/promises");
73489
+ const raw = await fs29.readFile(filePath, "utf8");
72073
73490
  return JSON.parse(raw);
72074
73491
  }
72075
73492
  /**