@probelabs/visor 0.1.178 → 0.1.179-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/defaults/assistant.yaml +38 -16
  2. package/defaults/skills/code-explorer.yaml +8 -8
  3. package/dist/agent-protocol/tasks-cli-handler.d.ts.map +1 -1
  4. package/dist/agent-protocol/track-execution.d.ts.map +1 -1
  5. package/dist/defaults/assistant.yaml +38 -16
  6. package/dist/defaults/skills/code-explorer.yaml +8 -8
  7. package/dist/frontends/slack-frontend.d.ts +6 -0
  8. package/dist/frontends/slack-frontend.d.ts.map +1 -1
  9. package/dist/index.js +2143 -108
  10. package/dist/providers/ai-check-provider.d.ts.map +1 -1
  11. package/dist/sdk/{a2a-frontend-WYBMBBYG.mjs → a2a-frontend-KJFLIZJT.mjs} +2 -2
  12. package/dist/sdk/{check-provider-registry-3DZOXYIA.mjs → check-provider-registry-J27YX4IT.mjs} +5 -5
  13. package/dist/sdk/{check-provider-registry-T5J3H2N7.mjs → check-provider-registry-SYAHJMWJ.mjs} +5 -5
  14. package/dist/sdk/{chunk-6YGCACBF.mjs → chunk-CHARL3TY.mjs} +2 -2
  15. package/dist/sdk/{chunk-6YGCACBF.mjs.map → chunk-CHARL3TY.mjs.map} +1 -1
  16. package/dist/sdk/{chunk-B7XHSG3L.mjs → chunk-FTPLYUQ3.mjs} +163 -124
  17. package/dist/sdk/chunk-FTPLYUQ3.mjs.map +1 -0
  18. package/dist/sdk/{chunk-AK64Y6Y2.mjs → chunk-KWHLB5E3.mjs} +164 -125
  19. package/dist/sdk/chunk-KWHLB5E3.mjs.map +1 -0
  20. package/dist/sdk/{chunk-4ECMTCOM.mjs → chunk-OYHDBTKY.mjs} +2 -2
  21. package/dist/sdk/{chunk-ENSZDV3O.mjs → chunk-ZJYQMNPA.mjs} +3 -3
  22. package/dist/sdk/{failure-condition-evaluator-P3MS5DRL.mjs → failure-condition-evaluator-V2YGFRKO.mjs} +3 -3
  23. package/dist/sdk/{github-frontend-7RLEBJWG.mjs → github-frontend-4LM4NAZK.mjs} +3 -3
  24. package/dist/sdk/{host-I2TBBKD5.mjs → host-GBXJKNHL.mjs} +4 -4
  25. package/dist/sdk/{host-SE3MQHWG.mjs → host-XXPPPC76.mjs} +4 -4
  26. package/dist/sdk/knex-store-QCEW4I4R.mjs +527 -0
  27. package/dist/sdk/knex-store-QCEW4I4R.mjs.map +1 -0
  28. package/dist/sdk/loader-Q7K76ZIY.mjs +89 -0
  29. package/dist/sdk/loader-Q7K76ZIY.mjs.map +1 -0
  30. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +655 -0
  31. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +1 -0
  32. package/dist/sdk/{routing-2X6QF5IW.mjs → routing-YAYBIVPL.mjs} +4 -4
  33. package/dist/sdk/{schedule-tool-R6JJIDZ6.mjs → schedule-tool-OIVJDIDK.mjs} +5 -5
  34. package/dist/sdk/{schedule-tool-W4SQ334O.mjs → schedule-tool-WACIV77L.mjs} +5 -5
  35. package/dist/sdk/{schedule-tool-handler-AOMZV3Q3.mjs → schedule-tool-handler-ODKY57FO.mjs} +5 -5
  36. package/dist/sdk/{schedule-tool-handler-MPJFLH4J.mjs → schedule-tool-handler-SJF4ZKSB.mjs} +5 -5
  37. package/dist/sdk/sdk.js +1778 -328
  38. package/dist/sdk/sdk.js.map +1 -1
  39. package/dist/sdk/sdk.mjs +4 -4
  40. package/dist/sdk/{slack-frontend-XKSIOUXB.mjs → slack-frontend-OWD7BSWF.mjs} +22 -3
  41. package/dist/sdk/slack-frontend-OWD7BSWF.mjs.map +1 -0
  42. package/dist/sdk/{trace-helpers-4ADQ4GB3.mjs → trace-helpers-QL2B75AK.mjs} +2 -2
  43. package/dist/sdk/{track-execution-XTCZBUWX.mjs → track-execution-2Q66SXBZ.mjs} +20 -2
  44. package/dist/sdk/{track-execution-XTCZBUWX.mjs.map → track-execution-2Q66SXBZ.mjs.map} +1 -1
  45. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  46. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  47. package/dist/sdk/{workflow-check-provider-WHZP7BDF.mjs → workflow-check-provider-IXW6BMQA.mjs} +5 -5
  48. package/dist/sdk/{workflow-check-provider-WZN3B2S2.mjs → workflow-check-provider-UZQZYPOE.mjs} +5 -5
  49. package/dist/utils/workspace-manager.d.ts +2 -0
  50. package/dist/utils/workspace-manager.d.ts.map +1 -1
  51. package/package.json +2 -2
  52. package/dist/output/traces/run-2026-03-11T06-33-05-398Z.ndjson +0 -138
  53. package/dist/output/traces/run-2026-03-11T06-33-47-884Z.ndjson +0 -2296
  54. package/dist/sdk/a2a-frontend-U3PTNCLR.mjs +0 -1658
  55. package/dist/sdk/a2a-frontend-WYBMBBYG.mjs.map +0 -1
  56. package/dist/sdk/check-provider-registry-ZX76MY2L.mjs +0 -30
  57. package/dist/sdk/chunk-AK64Y6Y2.mjs.map +0 -1
  58. package/dist/sdk/chunk-ANEKFNAS.mjs +0 -45424
  59. package/dist/sdk/chunk-ANEKFNAS.mjs.map +0 -1
  60. package/dist/sdk/chunk-B7XHSG3L.mjs.map +0 -1
  61. package/dist/sdk/chunk-CDRKH5HH.mjs +0 -739
  62. package/dist/sdk/chunk-CDRKH5HH.mjs.map +0 -1
  63. package/dist/sdk/chunk-KG6PM4OL.mjs +0 -516
  64. package/dist/sdk/chunk-KG6PM4OL.mjs.map +0 -1
  65. package/dist/sdk/chunk-WZS4ARZB.mjs +0 -1502
  66. package/dist/sdk/chunk-WZS4ARZB.mjs.map +0 -1
  67. package/dist/sdk/failure-condition-evaluator-MMPKQGUA.mjs +0 -18
  68. package/dist/sdk/github-frontend-QTKOYB56.mjs +0 -1394
  69. package/dist/sdk/github-frontend-QTKOYB56.mjs.map +0 -1
  70. package/dist/sdk/routing-QHXBQS6X.mjs +0 -26
  71. package/dist/sdk/schedule-tool-MKT5FZ6J.mjs +0 -36
  72. package/dist/sdk/schedule-tool-handler-MPJFLH4J.mjs.map +0 -1
  73. package/dist/sdk/schedule-tool-handler-WY7WCFE5.mjs +0 -40
  74. package/dist/sdk/schedule-tool-handler-WY7WCFE5.mjs.map +0 -1
  75. package/dist/sdk/slack-frontend-XKSIOUXB.mjs.map +0 -1
  76. package/dist/sdk/trace-helpers-4ADQ4GB3.mjs.map +0 -1
  77. package/dist/sdk/trace-helpers-K47ZVJSU.mjs +0 -29
  78. package/dist/sdk/trace-helpers-K47ZVJSU.mjs.map +0 -1
  79. package/dist/sdk/workflow-check-provider-A3YH2UZJ.mjs +0 -30
  80. package/dist/sdk/workflow-check-provider-A3YH2UZJ.mjs.map +0 -1
  81. package/dist/sdk/workflow-check-provider-WHZP7BDF.mjs.map +0 -1
  82. package/dist/sdk/workflow-check-provider-WZN3B2S2.mjs.map +0 -1
  83. package/dist/traces/run-2026-03-11T06-33-05-398Z.ndjson +0 -138
  84. package/dist/traces/run-2026-03-11T06-33-47-884Z.ndjson +0 -2296
  85. /package/dist/sdk/{a2a-frontend-U3PTNCLR.mjs.map → a2a-frontend-KJFLIZJT.mjs.map} +0 -0
  86. /package/dist/sdk/{check-provider-registry-3DZOXYIA.mjs.map → check-provider-registry-J27YX4IT.mjs.map} +0 -0
  87. /package/dist/sdk/{check-provider-registry-T5J3H2N7.mjs.map → check-provider-registry-SYAHJMWJ.mjs.map} +0 -0
  88. /package/dist/sdk/{chunk-4ECMTCOM.mjs.map → chunk-OYHDBTKY.mjs.map} +0 -0
  89. /package/dist/sdk/{chunk-ENSZDV3O.mjs.map → chunk-ZJYQMNPA.mjs.map} +0 -0
  90. /package/dist/sdk/{check-provider-registry-ZX76MY2L.mjs.map → failure-condition-evaluator-V2YGFRKO.mjs.map} +0 -0
  91. /package/dist/sdk/{github-frontend-7RLEBJWG.mjs.map → github-frontend-4LM4NAZK.mjs.map} +0 -0
  92. /package/dist/sdk/{host-I2TBBKD5.mjs.map → host-GBXJKNHL.mjs.map} +0 -0
  93. /package/dist/sdk/{host-SE3MQHWG.mjs.map → host-XXPPPC76.mjs.map} +0 -0
  94. /package/dist/sdk/{failure-condition-evaluator-MMPKQGUA.mjs.map → routing-YAYBIVPL.mjs.map} +0 -0
  95. /package/dist/sdk/{failure-condition-evaluator-P3MS5DRL.mjs.map → schedule-tool-OIVJDIDK.mjs.map} +0 -0
  96. /package/dist/sdk/{routing-2X6QF5IW.mjs.map → schedule-tool-WACIV77L.mjs.map} +0 -0
  97. /package/dist/sdk/{routing-QHXBQS6X.mjs.map → schedule-tool-handler-ODKY57FO.mjs.map} +0 -0
  98. /package/dist/sdk/{schedule-tool-MKT5FZ6J.mjs.map → schedule-tool-handler-SJF4ZKSB.mjs.map} +0 -0
  99. /package/dist/sdk/{schedule-tool-R6JJIDZ6.mjs.map → trace-helpers-QL2B75AK.mjs.map} +0 -0
  100. /package/dist/sdk/{schedule-tool-W4SQ334O.mjs.map → workflow-check-provider-IXW6BMQA.mjs.map} +0 -0
  101. /package/dist/sdk/{schedule-tool-handler-AOMZV3Q3.mjs.map → workflow-check-provider-UZQZYPOE.mjs.map} +0 -0
package/dist/sdk/sdk.js CHANGED
@@ -704,7 +704,7 @@ var require_package = __commonJS({
704
704
  "package.json"(exports2, module2) {
705
705
  module2.exports = {
706
706
  name: "@probelabs/visor",
707
- version: "0.1.178",
707
+ version: "0.1.42",
708
708
  main: "dist/index.js",
709
709
  bin: {
710
710
  visor: "./dist/index.js"
@@ -823,7 +823,7 @@ var require_package = __commonJS({
823
823
  "@opentelemetry/sdk-node": "^0.203.0",
824
824
  "@opentelemetry/sdk-trace-base": "^1.30.1",
825
825
  "@opentelemetry/semantic-conventions": "^1.30.1",
826
- "@probelabs/probe": "^0.6.0-rc293",
826
+ "@probelabs/probe": "^0.6.0-rc294",
827
827
  "@types/commander": "^2.12.0",
828
828
  "@types/uuid": "^10.0.0",
829
829
  acorn: "^8.16.0",
@@ -1152,11 +1152,11 @@ function getTracer() {
1152
1152
  }
1153
1153
  async function withActiveSpan(name, attrs, fn) {
1154
1154
  const tracer = getTracer();
1155
- return await new Promise((resolve15, reject) => {
1155
+ return await new Promise((resolve20, reject) => {
1156
1156
  const callback = async (span) => {
1157
1157
  try {
1158
1158
  const res = await fn(span);
1159
- resolve15(res);
1159
+ resolve20(res);
1160
1160
  } catch (err) {
1161
1161
  try {
1162
1162
  if (err instanceof Error) span.recordException(err);
@@ -1281,19 +1281,19 @@ function __getOrCreateNdjsonPath() {
1281
1281
  try {
1282
1282
  if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
1283
1283
  return null;
1284
- const path29 = require("path");
1285
- const fs25 = require("fs");
1284
+ const path33 = require("path");
1285
+ const fs29 = require("fs");
1286
1286
  if (process.env.VISOR_FALLBACK_TRACE_FILE) {
1287
1287
  __ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
1288
- const dir = path29.dirname(__ndjsonPath);
1289
- if (!fs25.existsSync(dir)) fs25.mkdirSync(dir, { recursive: true });
1288
+ const dir = path33.dirname(__ndjsonPath);
1289
+ if (!fs29.existsSync(dir)) fs29.mkdirSync(dir, { recursive: true });
1290
1290
  return __ndjsonPath;
1291
1291
  }
1292
- const outDir = process.env.VISOR_TRACE_DIR || path29.join(process.cwd(), "output", "traces");
1293
- if (!fs25.existsSync(outDir)) fs25.mkdirSync(outDir, { recursive: true });
1292
+ const outDir = process.env.VISOR_TRACE_DIR || path33.join(process.cwd(), "output", "traces");
1293
+ if (!fs29.existsSync(outDir)) fs29.mkdirSync(outDir, { recursive: true });
1294
1294
  if (!__ndjsonPath) {
1295
1295
  const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
1296
- __ndjsonPath = path29.join(outDir, `${ts}.ndjson`);
1296
+ __ndjsonPath = path33.join(outDir, `${ts}.ndjson`);
1297
1297
  }
1298
1298
  return __ndjsonPath;
1299
1299
  } catch {
@@ -1302,11 +1302,11 @@ function __getOrCreateNdjsonPath() {
1302
1302
  }
1303
1303
  function _appendRunMarker() {
1304
1304
  try {
1305
- const fs25 = require("fs");
1305
+ const fs29 = require("fs");
1306
1306
  const p = __getOrCreateNdjsonPath();
1307
1307
  if (!p) return;
1308
1308
  const line = { name: "visor.run", attributes: { started: true } };
1309
- fs25.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
1309
+ fs29.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
1310
1310
  } catch {
1311
1311
  }
1312
1312
  }
@@ -3393,7 +3393,7 @@ var init_failure_condition_evaluator = __esm({
3393
3393
  */
3394
3394
  evaluateExpression(condition, context2) {
3395
3395
  try {
3396
- const normalize4 = (expr) => {
3396
+ const normalize8 = (expr) => {
3397
3397
  const trimmed = expr.trim();
3398
3398
  if (!/[\n;]/.test(trimmed)) return trimmed;
3399
3399
  const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
@@ -3551,7 +3551,7 @@ var init_failure_condition_evaluator = __esm({
3551
3551
  try {
3552
3552
  exec2 = this.sandbox.compile(`return (${raw});`);
3553
3553
  } catch {
3554
- const normalizedExpr = normalize4(condition);
3554
+ const normalizedExpr = normalize8(condition);
3555
3555
  exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
3556
3556
  }
3557
3557
  const result = exec2(scope).run();
@@ -3934,9 +3934,9 @@ function configureLiquidWithExtensions(liquid) {
3934
3934
  });
3935
3935
  liquid.registerFilter("get", (obj, pathExpr) => {
3936
3936
  if (obj == null) return void 0;
3937
- const path29 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3938
- if (!path29) return obj;
3939
- const parts = path29.split(".");
3937
+ const path33 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
3938
+ if (!path33) return obj;
3939
+ const parts = path33.split(".");
3940
3940
  let cur = obj;
3941
3941
  for (const p of parts) {
3942
3942
  if (cur == null) return void 0;
@@ -4055,9 +4055,9 @@ function configureLiquidWithExtensions(liquid) {
4055
4055
  }
4056
4056
  }
4057
4057
  const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
4058
- const getNested = (obj, path29) => {
4059
- if (!obj || !path29) return void 0;
4060
- const parts = path29.split(".");
4058
+ const getNested = (obj, path33) => {
4059
+ if (!obj || !path33) return void 0;
4060
+ const parts = path33.split(".");
4061
4061
  let cur = obj;
4062
4062
  for (const p of parts) {
4063
4063
  if (cur == null) return void 0;
@@ -6609,8 +6609,8 @@ var init_dependency_gating = __esm({
6609
6609
  async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6610
6610
  try {
6611
6611
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
6612
- const fs25 = await import("fs/promises");
6613
- const path29 = await import("path");
6612
+ const fs29 = await import("fs/promises");
6613
+ const path33 = await import("path");
6614
6614
  const schemaRaw = checkConfig.schema || "plain";
6615
6615
  const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
6616
6616
  let templateContent;
@@ -6618,24 +6618,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
6618
6618
  templateContent = String(checkConfig.template.content);
6619
6619
  } else if (checkConfig.template && checkConfig.template.file) {
6620
6620
  const file = String(checkConfig.template.file);
6621
- const resolved = path29.resolve(process.cwd(), file);
6622
- templateContent = await fs25.readFile(resolved, "utf-8");
6621
+ const resolved = path33.resolve(process.cwd(), file);
6622
+ templateContent = await fs29.readFile(resolved, "utf-8");
6623
6623
  } else if (schema && schema !== "plain") {
6624
6624
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
6625
6625
  if (sanitized) {
6626
6626
  const candidatePaths = [
6627
- path29.join(__dirname, "output", sanitized, "template.liquid"),
6627
+ path33.join(__dirname, "output", sanitized, "template.liquid"),
6628
6628
  // bundled: dist/output/
6629
- path29.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6629
+ path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
6630
6630
  // source: output/
6631
- path29.join(process.cwd(), "output", sanitized, "template.liquid"),
6631
+ path33.join(process.cwd(), "output", sanitized, "template.liquid"),
6632
6632
  // fallback: cwd/output/
6633
- path29.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6633
+ path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
6634
6634
  // fallback: cwd/dist/output/
6635
6635
  ];
6636
6636
  for (const p of candidatePaths) {
6637
6637
  try {
6638
- templateContent = await fs25.readFile(p, "utf-8");
6638
+ templateContent = await fs29.readFile(p, "utf-8");
6639
6639
  if (templateContent) break;
6640
6640
  } catch {
6641
6641
  }
@@ -7040,7 +7040,7 @@ async function processDiffWithOutline(diffContent) {
7040
7040
  }
7041
7041
  try {
7042
7042
  const originalProbePath = process.env.PROBE_PATH;
7043
- const fs25 = require("fs");
7043
+ const fs29 = require("fs");
7044
7044
  const possiblePaths = [
7045
7045
  // Relative to current working directory (most common in production)
7046
7046
  path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
@@ -7051,7 +7051,7 @@ async function processDiffWithOutline(diffContent) {
7051
7051
  ];
7052
7052
  let probeBinaryPath;
7053
7053
  for (const candidatePath of possiblePaths) {
7054
- if (fs25.existsSync(candidatePath)) {
7054
+ if (fs29.existsSync(candidatePath)) {
7055
7055
  probeBinaryPath = candidatePath;
7056
7056
  break;
7057
7057
  }
@@ -7158,7 +7158,7 @@ async function renderMermaidToPng(mermaidCode) {
7158
7158
  if (chromiumPath) {
7159
7159
  env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
7160
7160
  }
7161
- const result = await new Promise((resolve15) => {
7161
+ const result = await new Promise((resolve20) => {
7162
7162
  const proc = (0, import_child_process.spawn)(
7163
7163
  "npx",
7164
7164
  [
@@ -7188,13 +7188,13 @@ async function renderMermaidToPng(mermaidCode) {
7188
7188
  });
7189
7189
  proc.on("close", (code) => {
7190
7190
  if (code === 0) {
7191
- resolve15({ success: true });
7191
+ resolve20({ success: true });
7192
7192
  } else {
7193
- resolve15({ success: false, error: stderr || `Exit code ${code}` });
7193
+ resolve20({ success: false, error: stderr || `Exit code ${code}` });
7194
7194
  }
7195
7195
  });
7196
7196
  proc.on("error", (err) => {
7197
- resolve15({ success: false, error: err.message });
7197
+ resolve20({ success: false, error: err.message });
7198
7198
  });
7199
7199
  });
7200
7200
  if (!result.success) {
@@ -8392,8 +8392,8 @@ ${schemaString}`);
8392
8392
  }
8393
8393
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8394
8394
  try {
8395
- const fs25 = require("fs");
8396
- const path29 = require("path");
8395
+ const fs29 = require("fs");
8396
+ const path33 = require("path");
8397
8397
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8398
8398
  const provider = this.config.provider || "auto";
8399
8399
  const model = this.config.model || "default";
@@ -8507,20 +8507,20 @@ ${"=".repeat(60)}
8507
8507
  `;
8508
8508
  readableVersion += `${"=".repeat(60)}
8509
8509
  `;
8510
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8511
- if (!fs25.existsSync(debugArtifactsDir)) {
8512
- fs25.mkdirSync(debugArtifactsDir, { recursive: true });
8510
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8511
+ if (!fs29.existsSync(debugArtifactsDir)) {
8512
+ fs29.mkdirSync(debugArtifactsDir, { recursive: true });
8513
8513
  }
8514
- const debugFile = path29.join(
8514
+ const debugFile = path33.join(
8515
8515
  debugArtifactsDir,
8516
8516
  `prompt-${_checkName || "unknown"}-${timestamp}.json`
8517
8517
  );
8518
- fs25.writeFileSync(debugFile, debugJson, "utf-8");
8519
- const readableFile = path29.join(
8518
+ fs29.writeFileSync(debugFile, debugJson, "utf-8");
8519
+ const readableFile = path33.join(
8520
8520
  debugArtifactsDir,
8521
8521
  `prompt-${_checkName || "unknown"}-${timestamp}.txt`
8522
8522
  );
8523
- fs25.writeFileSync(readableFile, readableVersion, "utf-8");
8523
+ fs29.writeFileSync(readableFile, readableVersion, "utf-8");
8524
8524
  log(`
8525
8525
  \u{1F4BE} Full debug info saved to:`);
8526
8526
  log(` JSON: ${debugFile}`);
@@ -8558,8 +8558,8 @@ ${"=".repeat(60)}
8558
8558
  log(`\u{1F4E4} Response length: ${response.length} characters`);
8559
8559
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8560
8560
  try {
8561
- const fs25 = require("fs");
8562
- const path29 = require("path");
8561
+ const fs29 = require("fs");
8562
+ const path33 = require("path");
8563
8563
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8564
8564
  const agentAny2 = agent;
8565
8565
  let fullHistory = [];
@@ -8570,8 +8570,8 @@ ${"=".repeat(60)}
8570
8570
  } else if (agentAny2._messages) {
8571
8571
  fullHistory = agentAny2._messages;
8572
8572
  }
8573
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8574
- const sessionBase = path29.join(
8573
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8574
+ const sessionBase = path33.join(
8575
8575
  debugArtifactsDir,
8576
8576
  `session-${_checkName || "unknown"}-${timestamp}`
8577
8577
  );
@@ -8583,7 +8583,7 @@ ${"=".repeat(60)}
8583
8583
  schema: effectiveSchema,
8584
8584
  totalMessages: fullHistory.length
8585
8585
  };
8586
- fs25.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8586
+ fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
8587
8587
  let readable = `=============================================================
8588
8588
  `;
8589
8589
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -8610,7 +8610,7 @@ ${"=".repeat(60)}
8610
8610
  `;
8611
8611
  readable += content + "\n";
8612
8612
  });
8613
- fs25.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8613
+ fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
8614
8614
  log(`\u{1F4BE} Complete session history saved:`);
8615
8615
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
8616
8616
  } catch (error) {
@@ -8619,11 +8619,11 @@ ${"=".repeat(60)}
8619
8619
  }
8620
8620
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8621
8621
  try {
8622
- const fs25 = require("fs");
8623
- const path29 = require("path");
8622
+ const fs29 = require("fs");
8623
+ const path33 = require("path");
8624
8624
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8625
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8626
- const responseFile = path29.join(
8625
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8626
+ const responseFile = path33.join(
8627
8627
  debugArtifactsDir,
8628
8628
  `response-${_checkName || "unknown"}-${timestamp}.txt`
8629
8629
  );
@@ -8656,7 +8656,7 @@ ${"=".repeat(60)}
8656
8656
  `;
8657
8657
  responseContent += `${"=".repeat(60)}
8658
8658
  `;
8659
- fs25.writeFileSync(responseFile, responseContent, "utf-8");
8659
+ fs29.writeFileSync(responseFile, responseContent, "utf-8");
8660
8660
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
8661
8661
  } catch (error) {
8662
8662
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -8672,9 +8672,9 @@ ${"=".repeat(60)}
8672
8672
  await agentAny._telemetryConfig.shutdown();
8673
8673
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
8674
8674
  if (process.env.GITHUB_ACTIONS) {
8675
- const fs25 = require("fs");
8676
- if (fs25.existsSync(agentAny._traceFilePath)) {
8677
- const stats = fs25.statSync(agentAny._traceFilePath);
8675
+ const fs29 = require("fs");
8676
+ if (fs29.existsSync(agentAny._traceFilePath)) {
8677
+ const stats = fs29.statSync(agentAny._traceFilePath);
8678
8678
  console.log(
8679
8679
  `::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
8680
8680
  );
@@ -8887,9 +8887,9 @@ ${schemaString}`);
8887
8887
  const model = this.config.model || "default";
8888
8888
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
8889
8889
  try {
8890
- const fs25 = require("fs");
8891
- const path29 = require("path");
8892
- const os2 = require("os");
8890
+ const fs29 = require("fs");
8891
+ const path33 = require("path");
8892
+ const os3 = require("os");
8893
8893
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
8894
8894
  const debugData = {
8895
8895
  timestamp,
@@ -8961,19 +8961,19 @@ ${"=".repeat(60)}
8961
8961
  `;
8962
8962
  readableVersion += `${"=".repeat(60)}
8963
8963
  `;
8964
- const tempDir = os2.tmpdir();
8965
- const promptFile = path29.join(tempDir, `visor-prompt-${timestamp}.txt`);
8966
- fs25.writeFileSync(promptFile, prompt, "utf-8");
8964
+ const tempDir = os3.tmpdir();
8965
+ const promptFile = path33.join(tempDir, `visor-prompt-${timestamp}.txt`);
8966
+ fs29.writeFileSync(promptFile, prompt, "utf-8");
8967
8967
  log(`
8968
8968
  \u{1F4BE} Prompt saved to: ${promptFile}`);
8969
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
8969
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
8970
8970
  try {
8971
- const base = path29.join(
8971
+ const base = path33.join(
8972
8972
  debugArtifactsDir,
8973
8973
  `prompt-${_checkName || "unknown"}-${timestamp}`
8974
8974
  );
8975
- fs25.writeFileSync(base + ".json", debugJson, "utf-8");
8976
- fs25.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8975
+ fs29.writeFileSync(base + ".json", debugJson, "utf-8");
8976
+ fs29.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
8977
8977
  log(`
8978
8978
  \u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
8979
8979
  } catch {
@@ -9023,8 +9023,8 @@ $ ${cliCommand}
9023
9023
  log(`\u{1F4E4} Response length: ${response.length} characters`);
9024
9024
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
9025
9025
  try {
9026
- const fs25 = require("fs");
9027
- const path29 = require("path");
9026
+ const fs29 = require("fs");
9027
+ const path33 = require("path");
9028
9028
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
9029
9029
  const agentAny = agent;
9030
9030
  let fullHistory = [];
@@ -9035,8 +9035,8 @@ $ ${cliCommand}
9035
9035
  } else if (agentAny._messages) {
9036
9036
  fullHistory = agentAny._messages;
9037
9037
  }
9038
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
9039
- const sessionBase = path29.join(
9038
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
9039
+ const sessionBase = path33.join(
9040
9040
  debugArtifactsDir,
9041
9041
  `session-${_checkName || "unknown"}-${timestamp}`
9042
9042
  );
@@ -9048,7 +9048,7 @@ $ ${cliCommand}
9048
9048
  schema: effectiveSchema,
9049
9049
  totalMessages: fullHistory.length
9050
9050
  };
9051
- fs25.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
9051
+ fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
9052
9052
  let readable = `=============================================================
9053
9053
  `;
9054
9054
  readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
@@ -9075,7 +9075,7 @@ ${"=".repeat(60)}
9075
9075
  `;
9076
9076
  readable += content + "\n";
9077
9077
  });
9078
- fs25.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
9078
+ fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
9079
9079
  log(`\u{1F4BE} Complete session history saved:`);
9080
9080
  log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
9081
9081
  } catch (error) {
@@ -9084,11 +9084,11 @@ ${"=".repeat(60)}
9084
9084
  }
9085
9085
  if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
9086
9086
  try {
9087
- const fs25 = require("fs");
9088
- const path29 = require("path");
9087
+ const fs29 = require("fs");
9088
+ const path33 = require("path");
9089
9089
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
9090
- const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path29.join(process.cwd(), "debug-artifacts");
9091
- const responseFile = path29.join(
9090
+ const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
9091
+ const responseFile = path33.join(
9092
9092
  debugArtifactsDir,
9093
9093
  `response-${_checkName || "unknown"}-${timestamp}.txt`
9094
9094
  );
@@ -9121,7 +9121,7 @@ ${"=".repeat(60)}
9121
9121
  `;
9122
9122
  responseContent += `${"=".repeat(60)}
9123
9123
  `;
9124
- fs25.writeFileSync(responseFile, responseContent, "utf-8");
9124
+ fs29.writeFileSync(responseFile, responseContent, "utf-8");
9125
9125
  log(`\u{1F4BE} Response saved to: ${responseFile}`);
9126
9126
  } catch (error) {
9127
9127
  log(`\u26A0\uFE0F Could not save response file: ${error}`);
@@ -9139,9 +9139,9 @@ ${"=".repeat(60)}
9139
9139
  await telemetry.shutdown();
9140
9140
  log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
9141
9141
  if (process.env.GITHUB_ACTIONS) {
9142
- const fs25 = require("fs");
9143
- if (fs25.existsSync(traceFilePath)) {
9144
- const stats = fs25.statSync(traceFilePath);
9142
+ const fs29 = require("fs");
9143
+ if (fs29.existsSync(traceFilePath)) {
9144
+ const stats = fs29.statSync(traceFilePath);
9145
9145
  console.log(
9146
9146
  `::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
9147
9147
  );
@@ -9179,8 +9179,8 @@ ${"=".repeat(60)}
9179
9179
  * Load schema content from schema files or inline definitions
9180
9180
  */
9181
9181
  async loadSchemaContent(schema) {
9182
- const fs25 = require("fs").promises;
9183
- const path29 = require("path");
9182
+ const fs29 = require("fs").promises;
9183
+ const path33 = require("path");
9184
9184
  if (typeof schema === "object" && schema !== null) {
9185
9185
  log("\u{1F4CB} Using inline schema object from configuration");
9186
9186
  return JSON.stringify(schema);
@@ -9193,14 +9193,14 @@ ${"=".repeat(60)}
9193
9193
  }
9194
9194
  } catch {
9195
9195
  }
9196
- if ((schema.startsWith("./") || schema.includes(".json")) && !path29.isAbsolute(schema)) {
9196
+ if ((schema.startsWith("./") || schema.includes(".json")) && !path33.isAbsolute(schema)) {
9197
9197
  if (schema.includes("..") || schema.includes("\0")) {
9198
9198
  throw new Error("Invalid schema path: path traversal not allowed");
9199
9199
  }
9200
9200
  try {
9201
- const schemaPath = path29.resolve(process.cwd(), schema);
9201
+ const schemaPath = path33.resolve(process.cwd(), schema);
9202
9202
  log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
9203
- const schemaContent = await fs25.readFile(schemaPath, "utf-8");
9203
+ const schemaContent = await fs29.readFile(schemaPath, "utf-8");
9204
9204
  return schemaContent.trim();
9205
9205
  } catch (error) {
9206
9206
  throw new Error(
@@ -9214,22 +9214,22 @@ ${"=".repeat(60)}
9214
9214
  }
9215
9215
  const candidatePaths = [
9216
9216
  // GitHub Action bundle location
9217
- path29.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
9217
+ path33.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
9218
9218
  // Historical fallback when src/output was inadvertently bundled as output1/
9219
- path29.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
9219
+ path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
9220
9220
  // Local dev (repo root)
9221
- path29.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
9221
+ path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
9222
9222
  ];
9223
9223
  for (const schemaPath of candidatePaths) {
9224
9224
  try {
9225
- const schemaContent = await fs25.readFile(schemaPath, "utf-8");
9225
+ const schemaContent = await fs29.readFile(schemaPath, "utf-8");
9226
9226
  return schemaContent.trim();
9227
9227
  } catch {
9228
9228
  }
9229
9229
  }
9230
- const distPath = path29.join(__dirname, "output", sanitizedSchemaName, "schema.json");
9231
- const distAltPath = path29.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
9232
- const cwdPath = path29.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
9230
+ const distPath = path33.join(__dirname, "output", sanitizedSchemaName, "schema.json");
9231
+ const distAltPath = path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
9232
+ const cwdPath = path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
9233
9233
  throw new Error(
9234
9234
  `Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
9235
9235
  );
@@ -9471,7 +9471,7 @@ ${"=".repeat(60)}
9471
9471
  * Generate mock response for testing
9472
9472
  */
9473
9473
  async generateMockResponse(_prompt, _checkName, _schema) {
9474
- await new Promise((resolve15) => setTimeout(resolve15, 500));
9474
+ await new Promise((resolve20) => setTimeout(resolve20, 500));
9475
9475
  const name = (_checkName || "").toLowerCase();
9476
9476
  if (name.includes("extract-facts")) {
9477
9477
  const arr = Array.from({ length: 6 }, (_, i) => ({
@@ -9832,7 +9832,7 @@ var init_command_executor = __esm({
9832
9832
  * Execute command with stdin input
9833
9833
  */
9834
9834
  executeWithStdin(command, options) {
9835
- return new Promise((resolve15, reject) => {
9835
+ return new Promise((resolve20, reject) => {
9836
9836
  const childProcess = (0, import_child_process2.exec)(
9837
9837
  command,
9838
9838
  {
@@ -9844,7 +9844,7 @@ var init_command_executor = __esm({
9844
9844
  if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
9845
9845
  reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
9846
9846
  } else {
9847
- resolve15({
9847
+ resolve20({
9848
9848
  stdout: stdout || "",
9849
9849
  stderr: stderr || "",
9850
9850
  exitCode: error ? error.code || 1 : 0
@@ -9973,7 +9973,7 @@ async function rateLimitedFetch(url, options, rateLimitConfig) {
9973
9973
  logger.verbose(
9974
9974
  `[rate-limiter] 429 on ${url} (bucket: ${key}), retry ${attempt + 1}/${maxRetries} in ${delayMs}ms`
9975
9975
  );
9976
- await new Promise((resolve15) => setTimeout(resolve15, delayMs));
9976
+ await new Promise((resolve20) => setTimeout(resolve20, delayMs));
9977
9977
  }
9978
9978
  return fetch(url, options);
9979
9979
  }
@@ -10022,8 +10022,8 @@ var init_rate_limiter = __esm({
10022
10022
  return;
10023
10023
  }
10024
10024
  const waitMs = Math.ceil((1 - this.tokens) / this.refillRate);
10025
- return new Promise((resolve15) => {
10026
- const entry = { resolve: resolve15 };
10025
+ return new Promise((resolve20) => {
10026
+ const entry = { resolve: resolve20 };
10027
10027
  this.waitQueue.push(entry);
10028
10028
  setTimeout(() => {
10029
10029
  const idx = this.waitQueue.indexOf(entry);
@@ -10034,7 +10034,7 @@ var init_rate_limiter = __esm({
10034
10034
  if (this.tokens >= 1) {
10035
10035
  this.tokens -= 1;
10036
10036
  }
10037
- resolve15();
10037
+ resolve20();
10038
10038
  }, waitMs);
10039
10039
  });
10040
10040
  }
@@ -18851,17 +18851,17 @@ var init_workflow_check_provider = __esm({
18851
18851
  * so it can be executed by the state machine as a nested workflow.
18852
18852
  */
18853
18853
  async loadWorkflowFromConfigPath(sourcePath, baseDir) {
18854
- const path29 = require("path");
18855
- const fs25 = require("fs");
18854
+ const path33 = require("path");
18855
+ const fs29 = require("fs");
18856
18856
  const yaml5 = require("js-yaml");
18857
- const resolved = path29.isAbsolute(sourcePath) ? sourcePath : path29.resolve(baseDir, sourcePath);
18858
- if (!fs25.existsSync(resolved)) {
18857
+ const resolved = path33.isAbsolute(sourcePath) ? sourcePath : path33.resolve(baseDir, sourcePath);
18858
+ if (!fs29.existsSync(resolved)) {
18859
18859
  throw new Error(`Workflow config not found at: ${resolved}`);
18860
18860
  }
18861
- const rawContent = fs25.readFileSync(resolved, "utf8");
18861
+ const rawContent = fs29.readFileSync(resolved, "utf8");
18862
18862
  const rawData = yaml5.load(rawContent);
18863
18863
  if (rawData.imports && Array.isArray(rawData.imports)) {
18864
- const configDir = path29.dirname(resolved);
18864
+ const configDir = path33.dirname(resolved);
18865
18865
  for (const source of rawData.imports) {
18866
18866
  const results = await this.registry.import(source, {
18867
18867
  basePath: configDir,
@@ -18891,8 +18891,8 @@ ${errors}`);
18891
18891
  if (!steps || Object.keys(steps).length === 0) {
18892
18892
  throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
18893
18893
  }
18894
- const id = path29.basename(resolved).replace(/\.(ya?ml)$/i, "");
18895
- const name = loaded.name || `Workflow from ${path29.basename(resolved)}`;
18894
+ const id = path33.basename(resolved).replace(/\.(ya?ml)$/i, "");
18895
+ const name = loaded.name || `Workflow from ${path33.basename(resolved)}`;
18896
18896
  const workflowDef = {
18897
18897
  id,
18898
18898
  name,
@@ -19701,8 +19701,8 @@ async function createStoreBackend(storageConfig, haConfig) {
19701
19701
  case "mssql": {
19702
19702
  try {
19703
19703
  const loaderPath = "../../enterprise/loader";
19704
- const { loadEnterpriseStoreBackend } = await import(loaderPath);
19705
- return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
19704
+ const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
19705
+ return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
19706
19706
  } catch (err) {
19707
19707
  const msg = err instanceof Error ? err.message : String(err);
19708
19708
  logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
@@ -20341,10 +20341,28 @@ async function trackExecution(opts, executor) {
20341
20341
  );
20342
20342
  try {
20343
20343
  const result = await executor();
20344
+ let responseText = "Execution completed";
20345
+ try {
20346
+ const history = result?.reviewSummary?.history;
20347
+ if (history) {
20348
+ for (const outputs of Object.values(history)) {
20349
+ if (!Array.isArray(outputs)) continue;
20350
+ for (const out of outputs) {
20351
+ const text = out?.text;
20352
+ if (typeof text === "string" && text.trim().length > 0) {
20353
+ responseText = text.trim();
20354
+ break;
20355
+ }
20356
+ }
20357
+ if (responseText !== "Execution completed") break;
20358
+ }
20359
+ }
20360
+ } catch {
20361
+ }
20344
20362
  const completedMsg = {
20345
20363
  message_id: import_crypto2.default.randomUUID(),
20346
20364
  role: "agent",
20347
- parts: [{ text: "Execution completed" }]
20365
+ parts: [{ text: responseText }]
20348
20366
  };
20349
20367
  taskStore.updateTaskState(task.id, "completed", completedMsg);
20350
20368
  logger.info(`[TaskTracking] Task ${task.id} completed`);
@@ -22397,7 +22415,7 @@ var init_mcp_custom_sse_server = __esm({
22397
22415
  * Returns the actual bound port number
22398
22416
  */
22399
22417
  async start() {
22400
- return new Promise((resolve15, reject) => {
22418
+ return new Promise((resolve20, reject) => {
22401
22419
  try {
22402
22420
  this.server = import_http.default.createServer((req, res) => {
22403
22421
  this.handleRequest(req, res).catch((error) => {
@@ -22431,7 +22449,7 @@ var init_mcp_custom_sse_server = __esm({
22431
22449
  );
22432
22450
  }
22433
22451
  this.startKeepalive();
22434
- resolve15(this.port);
22452
+ resolve20(this.port);
22435
22453
  });
22436
22454
  } catch (error) {
22437
22455
  reject(error);
@@ -22494,7 +22512,7 @@ var init_mcp_custom_sse_server = __esm({
22494
22512
  logger.debug(
22495
22513
  `[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
22496
22514
  );
22497
- await new Promise((resolve15) => setTimeout(resolve15, waitMs));
22515
+ await new Promise((resolve20) => setTimeout(resolve20, waitMs));
22498
22516
  }
22499
22517
  }
22500
22518
  if (this.activeToolCalls > 0) {
@@ -22503,7 +22521,7 @@ var init_mcp_custom_sse_server = __esm({
22503
22521
  `[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
22504
22522
  );
22505
22523
  while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
22506
- await new Promise((resolve15) => setTimeout(resolve15, 250));
22524
+ await new Promise((resolve20) => setTimeout(resolve20, 250));
22507
22525
  }
22508
22526
  if (this.activeToolCalls > 0) {
22509
22527
  logger.warn(
@@ -22528,21 +22546,21 @@ var init_mcp_custom_sse_server = __esm({
22528
22546
  }
22529
22547
  this.connections.clear();
22530
22548
  if (this.server) {
22531
- await new Promise((resolve15, reject) => {
22549
+ await new Promise((resolve20, reject) => {
22532
22550
  const timeout = setTimeout(() => {
22533
22551
  if (this.debug) {
22534
22552
  logger.debug(
22535
22553
  `[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
22536
22554
  );
22537
22555
  }
22538
- this.server?.close(() => resolve15());
22556
+ this.server?.close(() => resolve20());
22539
22557
  }, 5e3);
22540
22558
  this.server.close((error) => {
22541
22559
  clearTimeout(timeout);
22542
22560
  if (error) {
22543
22561
  reject(error);
22544
22562
  } else {
22545
- resolve15();
22563
+ resolve20();
22546
22564
  }
22547
22565
  });
22548
22566
  });
@@ -22999,7 +23017,7 @@ var init_mcp_custom_sse_server = __esm({
22999
23017
  logger.warn(
23000
23018
  `[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
23001
23019
  );
23002
- await new Promise((resolve15) => setTimeout(resolve15, delay));
23020
+ await new Promise((resolve20) => setTimeout(resolve20, delay));
23003
23021
  attempt++;
23004
23022
  }
23005
23023
  }
@@ -23472,9 +23490,9 @@ var init_ai_check_provider = __esm({
23472
23490
  } else {
23473
23491
  resolvedPath = import_path8.default.resolve(process.cwd(), str);
23474
23492
  }
23475
- const fs25 = require("fs").promises;
23493
+ const fs29 = require("fs").promises;
23476
23494
  try {
23477
- const stat2 = await fs25.stat(resolvedPath);
23495
+ const stat2 = await fs29.stat(resolvedPath);
23478
23496
  return stat2.isFile();
23479
23497
  } catch {
23480
23498
  return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
@@ -23858,45 +23876,44 @@ ${preview}`);
23858
23876
  const aiConfig = {};
23859
23877
  if (config.ai) {
23860
23878
  const aiAny2 = config.ai;
23879
+ const resolveLiquid = async (val) => {
23880
+ if (typeof val !== "string" || !val.includes("{{")) return void 0;
23881
+ try {
23882
+ return (await this.liquidEngine.parseAndRender(val, {
23883
+ inputs: config.workflowInputs || {},
23884
+ env: process.env
23885
+ })).trim();
23886
+ } catch {
23887
+ return void 0;
23888
+ }
23889
+ };
23890
+ const resolveBool = async (val) => {
23891
+ const resolved = await resolveLiquid(val) ?? val;
23892
+ if (typeof resolved === "boolean") return resolved;
23893
+ if (typeof resolved === "string") return resolved === "true";
23894
+ return !!resolved;
23895
+ };
23861
23896
  const skipTransport = aiAny2.skip_transport_context === true;
23862
23897
  if (aiAny2.apiKey !== void 0) {
23863
23898
  aiConfig.apiKey = aiAny2.apiKey;
23864
23899
  }
23865
23900
  if (aiAny2.model !== void 0) {
23866
- let modelVal = String(aiAny2.model);
23867
- if (modelVal.includes("{{")) {
23868
- try {
23869
- const rendered = await this.liquidEngine.parseAndRender(modelVal, {
23870
- inputs: config.workflowInputs || {},
23871
- env: process.env
23872
- });
23873
- modelVal = rendered.trim();
23874
- } catch {
23875
- }
23876
- }
23901
+ const modelVal = await resolveLiquid(aiAny2.model) ?? String(aiAny2.model);
23877
23902
  if (modelVal) {
23878
23903
  aiConfig.model = modelVal;
23879
23904
  }
23880
23905
  }
23881
23906
  if (aiAny2.timeout !== void 0) {
23882
- aiConfig.timeout = aiAny2.timeout;
23907
+ const resolvedTimeout = await resolveLiquid(aiAny2.timeout) ?? aiAny2.timeout;
23908
+ aiConfig.timeout = Number(resolvedTimeout);
23883
23909
  }
23884
23910
  if (aiAny2.max_iterations !== void 0 || aiAny2.maxIterations !== void 0) {
23885
23911
  const raw = aiAny2.max_iterations ?? aiAny2.maxIterations;
23886
- aiConfig.maxIterations = Number(raw);
23912
+ const resolved = await resolveLiquid(raw) ?? raw;
23913
+ aiConfig.maxIterations = Number(resolved);
23887
23914
  }
23888
23915
  if (aiAny2.provider !== void 0) {
23889
- let providerVal = String(aiAny2.provider);
23890
- if (providerVal.includes("{{")) {
23891
- try {
23892
- const rendered = await this.liquidEngine.parseAndRender(providerVal, {
23893
- inputs: config.workflowInputs || {},
23894
- env: process.env
23895
- });
23896
- providerVal = rendered.trim();
23897
- } catch {
23898
- }
23899
- }
23916
+ const providerVal = await resolveLiquid(aiAny2.provider) ?? String(aiAny2.provider);
23900
23917
  if (providerVal) {
23901
23918
  aiConfig.provider = providerVal;
23902
23919
  }
@@ -23905,16 +23922,16 @@ ${preview}`);
23905
23922
  aiConfig.debug = aiAny2.debug;
23906
23923
  }
23907
23924
  if (aiAny2.enableDelegate !== void 0) {
23908
- aiConfig.enableDelegate = aiAny2.enableDelegate;
23925
+ aiConfig.enableDelegate = await resolveBool(aiAny2.enableDelegate);
23909
23926
  }
23910
23927
  if (aiAny2.enableTasks !== void 0) {
23911
- aiConfig.enableTasks = aiAny2.enableTasks;
23928
+ aiConfig.enableTasks = await resolveBool(aiAny2.enableTasks);
23912
23929
  }
23913
23930
  if (aiAny2.enableExecutePlan !== void 0) {
23914
- aiConfig.enableExecutePlan = aiAny2.enableExecutePlan;
23931
+ aiConfig.enableExecutePlan = await resolveBool(aiAny2.enableExecutePlan);
23915
23932
  }
23916
23933
  if (aiAny2.allowEdit !== void 0) {
23917
- aiConfig.allowEdit = aiAny2.allowEdit;
23934
+ aiConfig.allowEdit = await resolveBool(aiAny2.allowEdit);
23918
23935
  }
23919
23936
  if (aiAny2.allowedTools !== void 0) {
23920
23937
  aiConfig.allowedTools = aiAny2.allowedTools;
@@ -23923,20 +23940,20 @@ ${preview}`);
23923
23940
  );
23924
23941
  }
23925
23942
  if (aiAny2.disableTools !== void 0) {
23926
- aiConfig.disableTools = aiAny2.disableTools;
23943
+ aiConfig.disableTools = await resolveBool(aiAny2.disableTools);
23927
23944
  this.logDebug(`[AI Provider] Read disableTools from YAML: ${aiAny2.disableTools}`);
23928
23945
  }
23929
23946
  if (aiAny2.allowBash !== void 0) {
23930
- aiConfig.allowBash = aiAny2.allowBash;
23947
+ aiConfig.allowBash = await resolveBool(aiAny2.allowBash);
23931
23948
  }
23932
23949
  if (aiAny2.bashConfig !== void 0) {
23933
23950
  aiConfig.bashConfig = aiAny2.bashConfig;
23934
23951
  }
23935
23952
  if (aiAny2.search_delegate_provider !== void 0) {
23936
- aiConfig.search_delegate_provider = aiAny2.search_delegate_provider;
23953
+ aiConfig.search_delegate_provider = await resolveLiquid(aiAny2.search_delegate_provider) ?? aiAny2.search_delegate_provider;
23937
23954
  }
23938
23955
  if (aiAny2.search_delegate_model !== void 0) {
23939
- aiConfig.search_delegate_model = aiAny2.search_delegate_model;
23956
+ aiConfig.search_delegate_model = await resolveLiquid(aiAny2.search_delegate_model) ?? aiAny2.search_delegate_model;
23940
23957
  }
23941
23958
  if (aiAny2.completion_prompt !== void 0) {
23942
23959
  aiConfig.completionPrompt = aiAny2.completion_prompt;
@@ -24047,6 +24064,9 @@ ${preview}`);
24047
24064
  if (config.ai_max_iterations !== void 0 && aiConfig.maxIterations === void 0) {
24048
24065
  aiConfig.maxIterations = config.ai_max_iterations;
24049
24066
  }
24067
+ if (aiConfig.maxIterations === void 0 || Number.isNaN(aiConfig.maxIterations)) {
24068
+ aiConfig.maxIterations = 100;
24069
+ }
24050
24070
  const sharedLimiter = sessionInfo?._parentContext?.sharedConcurrencyLimiter;
24051
24071
  if (sharedLimiter) {
24052
24072
  aiConfig.concurrencyLimiter = sharedLimiter;
@@ -29635,14 +29655,14 @@ var require_util = __commonJS({
29635
29655
  }
29636
29656
  const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
29637
29657
  let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
29638
- let path29 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
29658
+ let path33 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
29639
29659
  if (origin.endsWith("/")) {
29640
29660
  origin = origin.substring(0, origin.length - 1);
29641
29661
  }
29642
- if (path29 && !path29.startsWith("/")) {
29643
- path29 = `/${path29}`;
29662
+ if (path33 && !path33.startsWith("/")) {
29663
+ path33 = `/${path33}`;
29644
29664
  }
29645
- url = new URL(origin + path29);
29665
+ url = new URL(origin + path33);
29646
29666
  }
29647
29667
  return url;
29648
29668
  }
@@ -31256,20 +31276,20 @@ var require_parseParams = __commonJS({
31256
31276
  var require_basename = __commonJS({
31257
31277
  "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
31258
31278
  "use strict";
31259
- module2.exports = function basename4(path29) {
31260
- if (typeof path29 !== "string") {
31279
+ module2.exports = function basename4(path33) {
31280
+ if (typeof path33 !== "string") {
31261
31281
  return "";
31262
31282
  }
31263
- for (var i = path29.length - 1; i >= 0; --i) {
31264
- switch (path29.charCodeAt(i)) {
31283
+ for (var i = path33.length - 1; i >= 0; --i) {
31284
+ switch (path33.charCodeAt(i)) {
31265
31285
  case 47:
31266
31286
  // '/'
31267
31287
  case 92:
31268
- path29 = path29.slice(i + 1);
31269
- return path29 === ".." || path29 === "." ? "" : path29;
31288
+ path33 = path33.slice(i + 1);
31289
+ return path33 === ".." || path33 === "." ? "" : path33;
31270
31290
  }
31271
31291
  }
31272
- return path29 === ".." || path29 === "." ? "" : path29;
31292
+ return path33 === ".." || path33 === "." ? "" : path33;
31273
31293
  };
31274
31294
  }
31275
31295
  });
@@ -32273,11 +32293,11 @@ var require_util2 = __commonJS({
32273
32293
  var assert = require("assert");
32274
32294
  var { isUint8Array } = require("util/types");
32275
32295
  var supportedHashes = [];
32276
- var crypto7;
32296
+ var crypto9;
32277
32297
  try {
32278
- crypto7 = require("crypto");
32298
+ crypto9 = require("crypto");
32279
32299
  const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
32280
- supportedHashes = crypto7.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
32300
+ supportedHashes = crypto9.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
32281
32301
  } catch {
32282
32302
  }
32283
32303
  function responseURL(response) {
@@ -32554,7 +32574,7 @@ var require_util2 = __commonJS({
32554
32574
  }
32555
32575
  }
32556
32576
  function bytesMatch(bytes, metadataList) {
32557
- if (crypto7 === void 0) {
32577
+ if (crypto9 === void 0) {
32558
32578
  return true;
32559
32579
  }
32560
32580
  const parsedMetadata = parseMetadata(metadataList);
@@ -32569,7 +32589,7 @@ var require_util2 = __commonJS({
32569
32589
  for (const item of metadata) {
32570
32590
  const algorithm = item.algo;
32571
32591
  const expectedValue = item.hash;
32572
- let actualValue = crypto7.createHash(algorithm).update(bytes).digest("base64");
32592
+ let actualValue = crypto9.createHash(algorithm).update(bytes).digest("base64");
32573
32593
  if (actualValue[actualValue.length - 1] === "=") {
32574
32594
  if (actualValue[actualValue.length - 2] === "=") {
32575
32595
  actualValue = actualValue.slice(0, -2);
@@ -32662,8 +32682,8 @@ var require_util2 = __commonJS({
32662
32682
  function createDeferredPromise() {
32663
32683
  let res;
32664
32684
  let rej;
32665
- const promise = new Promise((resolve15, reject) => {
32666
- res = resolve15;
32685
+ const promise = new Promise((resolve20, reject) => {
32686
+ res = resolve20;
32667
32687
  rej = reject;
32668
32688
  });
32669
32689
  return { promise, resolve: res, reject: rej };
@@ -33916,8 +33936,8 @@ var require_body = __commonJS({
33916
33936
  var { parseMIMEType, serializeAMimeType } = require_dataURL();
33917
33937
  var random;
33918
33938
  try {
33919
- const crypto7 = require("crypto");
33920
- random = (max) => crypto7.randomInt(0, max);
33939
+ const crypto9 = require("crypto");
33940
+ random = (max) => crypto9.randomInt(0, max);
33921
33941
  } catch {
33922
33942
  random = (max) => Math.floor(Math.random(max));
33923
33943
  }
@@ -34168,8 +34188,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
34168
34188
  });
34169
34189
  }
34170
34190
  });
34171
- const busboyResolve = new Promise((resolve15, reject) => {
34172
- busboy.on("finish", resolve15);
34191
+ const busboyResolve = new Promise((resolve20, reject) => {
34192
+ busboy.on("finish", resolve20);
34173
34193
  busboy.on("error", (err) => reject(new TypeError(err)));
34174
34194
  });
34175
34195
  if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
@@ -34300,7 +34320,7 @@ var require_request = __commonJS({
34300
34320
  }
34301
34321
  var Request2 = class _Request {
34302
34322
  constructor(origin, {
34303
- path: path29,
34323
+ path: path33,
34304
34324
  method,
34305
34325
  body,
34306
34326
  headers,
@@ -34314,11 +34334,11 @@ var require_request = __commonJS({
34314
34334
  throwOnError,
34315
34335
  expectContinue
34316
34336
  }, handler) {
34317
- if (typeof path29 !== "string") {
34337
+ if (typeof path33 !== "string") {
34318
34338
  throw new InvalidArgumentError("path must be a string");
34319
- } else if (path29[0] !== "/" && !(path29.startsWith("http://") || path29.startsWith("https://")) && method !== "CONNECT") {
34339
+ } else if (path33[0] !== "/" && !(path33.startsWith("http://") || path33.startsWith("https://")) && method !== "CONNECT") {
34320
34340
  throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
34321
- } else if (invalidPathRegex.exec(path29) !== null) {
34341
+ } else if (invalidPathRegex.exec(path33) !== null) {
34322
34342
  throw new InvalidArgumentError("invalid request path");
34323
34343
  }
34324
34344
  if (typeof method !== "string") {
@@ -34381,7 +34401,7 @@ var require_request = __commonJS({
34381
34401
  this.completed = false;
34382
34402
  this.aborted = false;
34383
34403
  this.upgrade = upgrade || null;
34384
- this.path = query ? util.buildURL(path29, query) : path29;
34404
+ this.path = query ? util.buildURL(path33, query) : path33;
34385
34405
  this.origin = origin;
34386
34406
  this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
34387
34407
  this.blocking = blocking == null ? false : blocking;
@@ -34703,9 +34723,9 @@ var require_dispatcher_base = __commonJS({
34703
34723
  }
34704
34724
  close(callback) {
34705
34725
  if (callback === void 0) {
34706
- return new Promise((resolve15, reject) => {
34726
+ return new Promise((resolve20, reject) => {
34707
34727
  this.close((err, data) => {
34708
- return err ? reject(err) : resolve15(data);
34728
+ return err ? reject(err) : resolve20(data);
34709
34729
  });
34710
34730
  });
34711
34731
  }
@@ -34743,12 +34763,12 @@ var require_dispatcher_base = __commonJS({
34743
34763
  err = null;
34744
34764
  }
34745
34765
  if (callback === void 0) {
34746
- return new Promise((resolve15, reject) => {
34766
+ return new Promise((resolve20, reject) => {
34747
34767
  this.destroy(err, (err2, data) => {
34748
34768
  return err2 ? (
34749
34769
  /* istanbul ignore next: should never error */
34750
34770
  reject(err2)
34751
- ) : resolve15(data);
34771
+ ) : resolve20(data);
34752
34772
  });
34753
34773
  });
34754
34774
  }
@@ -35389,9 +35409,9 @@ var require_RedirectHandler = __commonJS({
35389
35409
  return this.handler.onHeaders(statusCode, headers, resume, statusText);
35390
35410
  }
35391
35411
  const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
35392
- const path29 = search ? `${pathname}${search}` : pathname;
35412
+ const path33 = search ? `${pathname}${search}` : pathname;
35393
35413
  this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
35394
- this.opts.path = path29;
35414
+ this.opts.path = path33;
35395
35415
  this.opts.origin = origin;
35396
35416
  this.opts.maxRedirections = 0;
35397
35417
  this.opts.query = null;
@@ -35810,16 +35830,16 @@ var require_client = __commonJS({
35810
35830
  return this[kNeedDrain] < 2;
35811
35831
  }
35812
35832
  async [kClose]() {
35813
- return new Promise((resolve15) => {
35833
+ return new Promise((resolve20) => {
35814
35834
  if (!this[kSize]) {
35815
- resolve15(null);
35835
+ resolve20(null);
35816
35836
  } else {
35817
- this[kClosedResolve] = resolve15;
35837
+ this[kClosedResolve] = resolve20;
35818
35838
  }
35819
35839
  });
35820
35840
  }
35821
35841
  async [kDestroy](err) {
35822
- return new Promise((resolve15) => {
35842
+ return new Promise((resolve20) => {
35823
35843
  const requests = this[kQueue].splice(this[kPendingIdx]);
35824
35844
  for (let i = 0; i < requests.length; i++) {
35825
35845
  const request = requests[i];
@@ -35830,7 +35850,7 @@ var require_client = __commonJS({
35830
35850
  this[kClosedResolve]();
35831
35851
  this[kClosedResolve] = null;
35832
35852
  }
35833
- resolve15();
35853
+ resolve20();
35834
35854
  };
35835
35855
  if (this[kHTTP2Session] != null) {
35836
35856
  util.destroy(this[kHTTP2Session], err);
@@ -36410,7 +36430,7 @@ var require_client = __commonJS({
36410
36430
  });
36411
36431
  }
36412
36432
  try {
36413
- const socket = await new Promise((resolve15, reject) => {
36433
+ const socket = await new Promise((resolve20, reject) => {
36414
36434
  client[kConnector]({
36415
36435
  host,
36416
36436
  hostname,
@@ -36422,7 +36442,7 @@ var require_client = __commonJS({
36422
36442
  if (err) {
36423
36443
  reject(err);
36424
36444
  } else {
36425
- resolve15(socket2);
36445
+ resolve20(socket2);
36426
36446
  }
36427
36447
  });
36428
36448
  });
@@ -36633,7 +36653,7 @@ var require_client = __commonJS({
36633
36653
  writeH2(client, client[kHTTP2Session], request);
36634
36654
  return;
36635
36655
  }
36636
- const { body, method, path: path29, host, upgrade, headers, blocking, reset } = request;
36656
+ const { body, method, path: path33, host, upgrade, headers, blocking, reset } = request;
36637
36657
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
36638
36658
  if (body && typeof body.read === "function") {
36639
36659
  body.read(0);
@@ -36683,7 +36703,7 @@ var require_client = __commonJS({
36683
36703
  if (blocking) {
36684
36704
  socket[kBlocking] = true;
36685
36705
  }
36686
- let header = `${method} ${path29} HTTP/1.1\r
36706
+ let header = `${method} ${path33} HTTP/1.1\r
36687
36707
  `;
36688
36708
  if (typeof host === "string") {
36689
36709
  header += `host: ${host}\r
@@ -36746,7 +36766,7 @@ upgrade: ${upgrade}\r
36746
36766
  return true;
36747
36767
  }
36748
36768
  function writeH2(client, session, request) {
36749
- const { body, method, path: path29, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
36769
+ const { body, method, path: path33, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
36750
36770
  let headers;
36751
36771
  if (typeof reqHeaders === "string") headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim());
36752
36772
  else headers = reqHeaders;
@@ -36789,7 +36809,7 @@ upgrade: ${upgrade}\r
36789
36809
  });
36790
36810
  return true;
36791
36811
  }
36792
- headers[HTTP2_HEADER_PATH] = path29;
36812
+ headers[HTTP2_HEADER_PATH] = path33;
36793
36813
  headers[HTTP2_HEADER_SCHEME] = "https";
36794
36814
  const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
36795
36815
  if (body && typeof body.read === "function") {
@@ -37046,12 +37066,12 @@ upgrade: ${upgrade}\r
37046
37066
  cb();
37047
37067
  }
37048
37068
  }
37049
- const waitForDrain = () => new Promise((resolve15, reject) => {
37069
+ const waitForDrain = () => new Promise((resolve20, reject) => {
37050
37070
  assert(callback === null);
37051
37071
  if (socket[kError]) {
37052
37072
  reject(socket[kError]);
37053
37073
  } else {
37054
- callback = resolve15;
37074
+ callback = resolve20;
37055
37075
  }
37056
37076
  });
37057
37077
  if (client[kHTTPConnVersion] === "h2") {
@@ -37397,8 +37417,8 @@ var require_pool_base = __commonJS({
37397
37417
  if (this[kQueue].isEmpty()) {
37398
37418
  return Promise.all(this[kClients].map((c) => c.close()));
37399
37419
  } else {
37400
- return new Promise((resolve15) => {
37401
- this[kClosedResolve] = resolve15;
37420
+ return new Promise((resolve20) => {
37421
+ this[kClosedResolve] = resolve20;
37402
37422
  });
37403
37423
  }
37404
37424
  }
@@ -37976,7 +37996,7 @@ var require_readable = __commonJS({
37976
37996
  if (this.closed) {
37977
37997
  return Promise.resolve(null);
37978
37998
  }
37979
- return new Promise((resolve15, reject) => {
37999
+ return new Promise((resolve20, reject) => {
37980
38000
  const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
37981
38001
  this.destroy();
37982
38002
  }) : noop;
@@ -37985,7 +38005,7 @@ var require_readable = __commonJS({
37985
38005
  if (signal && signal.aborted) {
37986
38006
  reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
37987
38007
  } else {
37988
- resolve15(null);
38008
+ resolve20(null);
37989
38009
  }
37990
38010
  }).on("error", noop).on("data", function(chunk) {
37991
38011
  limit -= chunk.length;
@@ -38007,11 +38027,11 @@ var require_readable = __commonJS({
38007
38027
  throw new TypeError("unusable");
38008
38028
  }
38009
38029
  assert(!stream[kConsume]);
38010
- return new Promise((resolve15, reject) => {
38030
+ return new Promise((resolve20, reject) => {
38011
38031
  stream[kConsume] = {
38012
38032
  type,
38013
38033
  stream,
38014
- resolve: resolve15,
38034
+ resolve: resolve20,
38015
38035
  reject,
38016
38036
  length: 0,
38017
38037
  body: []
@@ -38046,12 +38066,12 @@ var require_readable = __commonJS({
38046
38066
  }
38047
38067
  }
38048
38068
  function consumeEnd(consume2) {
38049
- const { type, body, resolve: resolve15, stream, length } = consume2;
38069
+ const { type, body, resolve: resolve20, stream, length } = consume2;
38050
38070
  try {
38051
38071
  if (type === "text") {
38052
- resolve15(toUSVString(Buffer.concat(body)));
38072
+ resolve20(toUSVString(Buffer.concat(body)));
38053
38073
  } else if (type === "json") {
38054
- resolve15(JSON.parse(Buffer.concat(body)));
38074
+ resolve20(JSON.parse(Buffer.concat(body)));
38055
38075
  } else if (type === "arrayBuffer") {
38056
38076
  const dst = new Uint8Array(length);
38057
38077
  let pos = 0;
@@ -38059,12 +38079,12 @@ var require_readable = __commonJS({
38059
38079
  dst.set(buf, pos);
38060
38080
  pos += buf.byteLength;
38061
38081
  }
38062
- resolve15(dst.buffer);
38082
+ resolve20(dst.buffer);
38063
38083
  } else if (type === "blob") {
38064
38084
  if (!Blob2) {
38065
38085
  Blob2 = require("buffer").Blob;
38066
38086
  }
38067
- resolve15(new Blob2(body, { type: stream[kContentType] }));
38087
+ resolve20(new Blob2(body, { type: stream[kContentType] }));
38068
38088
  }
38069
38089
  consumeFinish(consume2);
38070
38090
  } catch (err) {
@@ -38321,9 +38341,9 @@ var require_api_request = __commonJS({
38321
38341
  };
38322
38342
  function request(opts, callback) {
38323
38343
  if (callback === void 0) {
38324
- return new Promise((resolve15, reject) => {
38344
+ return new Promise((resolve20, reject) => {
38325
38345
  request.call(this, opts, (err, data) => {
38326
- return err ? reject(err) : resolve15(data);
38346
+ return err ? reject(err) : resolve20(data);
38327
38347
  });
38328
38348
  });
38329
38349
  }
@@ -38496,9 +38516,9 @@ var require_api_stream = __commonJS({
38496
38516
  };
38497
38517
  function stream(opts, factory, callback) {
38498
38518
  if (callback === void 0) {
38499
- return new Promise((resolve15, reject) => {
38519
+ return new Promise((resolve20, reject) => {
38500
38520
  stream.call(this, opts, factory, (err, data) => {
38501
- return err ? reject(err) : resolve15(data);
38521
+ return err ? reject(err) : resolve20(data);
38502
38522
  });
38503
38523
  });
38504
38524
  }
@@ -38779,9 +38799,9 @@ var require_api_upgrade = __commonJS({
38779
38799
  };
38780
38800
  function upgrade(opts, callback) {
38781
38801
  if (callback === void 0) {
38782
- return new Promise((resolve15, reject) => {
38802
+ return new Promise((resolve20, reject) => {
38783
38803
  upgrade.call(this, opts, (err, data) => {
38784
- return err ? reject(err) : resolve15(data);
38804
+ return err ? reject(err) : resolve20(data);
38785
38805
  });
38786
38806
  });
38787
38807
  }
@@ -38870,9 +38890,9 @@ var require_api_connect = __commonJS({
38870
38890
  };
38871
38891
  function connect(opts, callback) {
38872
38892
  if (callback === void 0) {
38873
- return new Promise((resolve15, reject) => {
38893
+ return new Promise((resolve20, reject) => {
38874
38894
  connect.call(this, opts, (err, data) => {
38875
- return err ? reject(err) : resolve15(data);
38895
+ return err ? reject(err) : resolve20(data);
38876
38896
  });
38877
38897
  });
38878
38898
  }
@@ -39032,20 +39052,20 @@ var require_mock_utils = __commonJS({
39032
39052
  }
39033
39053
  return true;
39034
39054
  }
39035
- function safeUrl(path29) {
39036
- if (typeof path29 !== "string") {
39037
- return path29;
39055
+ function safeUrl(path33) {
39056
+ if (typeof path33 !== "string") {
39057
+ return path33;
39038
39058
  }
39039
- const pathSegments = path29.split("?");
39059
+ const pathSegments = path33.split("?");
39040
39060
  if (pathSegments.length !== 2) {
39041
- return path29;
39061
+ return path33;
39042
39062
  }
39043
39063
  const qp = new URLSearchParams(pathSegments.pop());
39044
39064
  qp.sort();
39045
39065
  return [...pathSegments, qp.toString()].join("?");
39046
39066
  }
39047
- function matchKey(mockDispatch2, { path: path29, method, body, headers }) {
39048
- const pathMatch = matchValue(mockDispatch2.path, path29);
39067
+ function matchKey(mockDispatch2, { path: path33, method, body, headers }) {
39068
+ const pathMatch = matchValue(mockDispatch2.path, path33);
39049
39069
  const methodMatch = matchValue(mockDispatch2.method, method);
39050
39070
  const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
39051
39071
  const headersMatch = matchHeaders(mockDispatch2, headers);
@@ -39063,7 +39083,7 @@ var require_mock_utils = __commonJS({
39063
39083
  function getMockDispatch(mockDispatches, key) {
39064
39084
  const basePath = key.query ? buildURL(key.path, key.query) : key.path;
39065
39085
  const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
39066
- let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path29 }) => matchValue(safeUrl(path29), resolvedPath));
39086
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path33 }) => matchValue(safeUrl(path33), resolvedPath));
39067
39087
  if (matchedMockDispatches.length === 0) {
39068
39088
  throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
39069
39089
  }
@@ -39100,9 +39120,9 @@ var require_mock_utils = __commonJS({
39100
39120
  }
39101
39121
  }
39102
39122
  function buildKey(opts) {
39103
- const { path: path29, method, body, headers, query } = opts;
39123
+ const { path: path33, method, body, headers, query } = opts;
39104
39124
  return {
39105
- path: path29,
39125
+ path: path33,
39106
39126
  method,
39107
39127
  body,
39108
39128
  headers,
@@ -39551,10 +39571,10 @@ var require_pending_interceptors_formatter = __commonJS({
39551
39571
  }
39552
39572
  format(pendingInterceptors) {
39553
39573
  const withPrettyHeaders = pendingInterceptors.map(
39554
- ({ method, path: path29, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
39574
+ ({ method, path: path33, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
39555
39575
  Method: method,
39556
39576
  Origin: origin,
39557
- Path: path29,
39577
+ Path: path33,
39558
39578
  "Status code": statusCode,
39559
39579
  Persistent: persist ? "\u2705" : "\u274C",
39560
39580
  Invocations: timesInvoked,
@@ -42495,7 +42515,7 @@ var require_fetch = __commonJS({
42495
42515
  async function dispatch({ body }) {
42496
42516
  const url = requestCurrentURL(request);
42497
42517
  const agent = fetchParams.controller.dispatcher;
42498
- return new Promise((resolve15, reject) => agent.dispatch(
42518
+ return new Promise((resolve20, reject) => agent.dispatch(
42499
42519
  {
42500
42520
  path: url.pathname + url.search,
42501
42521
  origin: url.origin,
@@ -42571,7 +42591,7 @@ var require_fetch = __commonJS({
42571
42591
  }
42572
42592
  }
42573
42593
  }
42574
- resolve15({
42594
+ resolve20({
42575
42595
  status,
42576
42596
  statusText,
42577
42597
  headersList: headers[kHeadersList],
@@ -42614,7 +42634,7 @@ var require_fetch = __commonJS({
42614
42634
  const val = headersList[n + 1].toString("latin1");
42615
42635
  headers[kHeadersList].append(key, val);
42616
42636
  }
42617
- resolve15({
42637
+ resolve20({
42618
42638
  status,
42619
42639
  statusText: STATUS_CODES[status],
42620
42640
  headersList: headers[kHeadersList],
@@ -44175,8 +44195,8 @@ var require_util6 = __commonJS({
44175
44195
  }
44176
44196
  }
44177
44197
  }
44178
- function validateCookiePath(path29) {
44179
- for (const char of path29) {
44198
+ function validateCookiePath(path33) {
44199
+ for (const char of path33) {
44180
44200
  const code = char.charCodeAt(0);
44181
44201
  if (code < 33 || char === ";") {
44182
44202
  throw new Error("Invalid cookie path");
@@ -44973,9 +44993,9 @@ var require_connection = __commonJS({
44973
44993
  channels.open = diagnosticsChannel.channel("undici:websocket:open");
44974
44994
  channels.close = diagnosticsChannel.channel("undici:websocket:close");
44975
44995
  channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
44976
- var crypto7;
44996
+ var crypto9;
44977
44997
  try {
44978
- crypto7 = require("crypto");
44998
+ crypto9 = require("crypto");
44979
44999
  } catch {
44980
45000
  }
44981
45001
  function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
@@ -44994,7 +45014,7 @@ var require_connection = __commonJS({
44994
45014
  const headersList = new Headers(options.headers)[kHeadersList];
44995
45015
  request.headersList = headersList;
44996
45016
  }
44997
- const keyValue = crypto7.randomBytes(16).toString("base64");
45017
+ const keyValue = crypto9.randomBytes(16).toString("base64");
44998
45018
  request.headersList.append("sec-websocket-key", keyValue);
44999
45019
  request.headersList.append("sec-websocket-version", "13");
45000
45020
  for (const protocol of protocols) {
@@ -45023,7 +45043,7 @@ var require_connection = __commonJS({
45023
45043
  return;
45024
45044
  }
45025
45045
  const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
45026
- const digest = crypto7.createHash("sha1").update(keyValue + uid).digest("base64");
45046
+ const digest = crypto9.createHash("sha1").update(keyValue + uid).digest("base64");
45027
45047
  if (secWSAccept !== digest) {
45028
45048
  failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
45029
45049
  return;
@@ -45103,9 +45123,9 @@ var require_frame = __commonJS({
45103
45123
  "node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
45104
45124
  "use strict";
45105
45125
  var { maxUnsigned16Bit } = require_constants5();
45106
- var crypto7;
45126
+ var crypto9;
45107
45127
  try {
45108
- crypto7 = require("crypto");
45128
+ crypto9 = require("crypto");
45109
45129
  } catch {
45110
45130
  }
45111
45131
  var WebsocketFrameSend = class {
@@ -45114,7 +45134,7 @@ var require_frame = __commonJS({
45114
45134
  */
45115
45135
  constructor(data) {
45116
45136
  this.frameData = data;
45117
- this.maskKey = crypto7.randomBytes(4);
45137
+ this.maskKey = crypto9.randomBytes(4);
45118
45138
  }
45119
45139
  createFrame(opcode) {
45120
45140
  const bodyLength = this.frameData?.byteLength ?? 0;
@@ -45856,11 +45876,11 @@ var require_undici = __commonJS({
45856
45876
  if (typeof opts.path !== "string") {
45857
45877
  throw new InvalidArgumentError("invalid opts.path");
45858
45878
  }
45859
- let path29 = opts.path;
45879
+ let path33 = opts.path;
45860
45880
  if (!opts.path.startsWith("/")) {
45861
- path29 = `/${path29}`;
45881
+ path33 = `/${path33}`;
45862
45882
  }
45863
- url = new URL(util.parseOrigin(url).origin + path29);
45883
+ url = new URL(util.parseOrigin(url).origin + path33);
45864
45884
  } else {
45865
45885
  if (!opts) {
45866
45886
  opts = typeof url === "object" ? url : {};
@@ -46429,7 +46449,7 @@ var init_mcp_check_provider = __esm({
46429
46449
  logger.warn(
46430
46450
  `MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
46431
46451
  );
46432
- await new Promise((resolve15) => setTimeout(resolve15, delay));
46452
+ await new Promise((resolve20) => setTimeout(resolve20, delay));
46433
46453
  attempt += 1;
46434
46454
  } finally {
46435
46455
  try {
@@ -46722,7 +46742,7 @@ async function acquirePromptLock() {
46722
46742
  );
46723
46743
  }, 1e4);
46724
46744
  try {
46725
- await new Promise((resolve15) => waiters.push(resolve15));
46745
+ await new Promise((resolve20) => waiters.push(resolve20));
46726
46746
  } finally {
46727
46747
  clearInterval(reminder);
46728
46748
  const waitedMs = Date.now() - queuedAt;
@@ -46741,7 +46761,7 @@ function releasePromptLock() {
46741
46761
  }
46742
46762
  async function interactivePrompt(options) {
46743
46763
  await acquirePromptLock();
46744
- return new Promise((resolve15, reject) => {
46764
+ return new Promise((resolve20, reject) => {
46745
46765
  const dbg = process.env.VISOR_DEBUG === "true";
46746
46766
  try {
46747
46767
  if (dbg) {
@@ -46828,12 +46848,12 @@ async function interactivePrompt(options) {
46828
46848
  };
46829
46849
  const finish = (value) => {
46830
46850
  cleanup();
46831
- resolve15(value);
46851
+ resolve20(value);
46832
46852
  };
46833
46853
  if (options.timeout && options.timeout > 0) {
46834
46854
  timeoutId = setTimeout(() => {
46835
46855
  cleanup();
46836
- if (defaultValue !== void 0) return resolve15(defaultValue);
46856
+ if (defaultValue !== void 0) return resolve20(defaultValue);
46837
46857
  return reject(new Error("Input timeout"));
46838
46858
  }, options.timeout);
46839
46859
  }
@@ -46965,7 +46985,7 @@ async function interactivePrompt(options) {
46965
46985
  });
46966
46986
  }
46967
46987
  async function simplePrompt(prompt) {
46968
- return new Promise((resolve15) => {
46988
+ return new Promise((resolve20) => {
46969
46989
  const rl = readline.createInterface({
46970
46990
  input: process.stdin,
46971
46991
  output: process.stdout
@@ -46981,7 +47001,7 @@ async function simplePrompt(prompt) {
46981
47001
  rl.question(`${prompt}
46982
47002
  > `, (answer) => {
46983
47003
  rl.close();
46984
- resolve15(answer.trim());
47004
+ resolve20(answer.trim());
46985
47005
  });
46986
47006
  });
46987
47007
  }
@@ -47149,7 +47169,7 @@ function isStdinAvailable() {
47149
47169
  return !process.stdin.isTTY;
47150
47170
  }
47151
47171
  async function readStdin(timeout, maxSize = 1024 * 1024) {
47152
- return new Promise((resolve15, reject) => {
47172
+ return new Promise((resolve20, reject) => {
47153
47173
  let data = "";
47154
47174
  let timeoutId;
47155
47175
  if (timeout) {
@@ -47176,7 +47196,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
47176
47196
  };
47177
47197
  const onEnd = () => {
47178
47198
  cleanup();
47179
- resolve15(data.trim());
47199
+ resolve20(data.trim());
47180
47200
  };
47181
47201
  const onError = (err) => {
47182
47202
  cleanup();
@@ -51895,23 +51915,23 @@ __export(renderer_schema_exports, {
51895
51915
  });
51896
51916
  async function loadRendererSchema(name) {
51897
51917
  try {
51898
- const fs25 = await import("fs/promises");
51899
- const path29 = await import("path");
51918
+ const fs29 = await import("fs/promises");
51919
+ const path33 = await import("path");
51900
51920
  const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
51901
51921
  if (!sanitized) return void 0;
51902
51922
  const candidates = [
51903
51923
  // When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
51904
- path29.join(__dirname, "output", sanitized, "schema.json"),
51924
+ path33.join(__dirname, "output", sanitized, "schema.json"),
51905
51925
  // When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
51906
- path29.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
51926
+ path33.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
51907
51927
  // When running from a checkout with output/ folder copied to CWD
51908
- path29.join(process.cwd(), "output", sanitized, "schema.json"),
51928
+ path33.join(process.cwd(), "output", sanitized, "schema.json"),
51909
51929
  // Fallback: cwd/dist/output/
51910
- path29.join(process.cwd(), "dist", "output", sanitized, "schema.json")
51930
+ path33.join(process.cwd(), "dist", "output", sanitized, "schema.json")
51911
51931
  ];
51912
51932
  for (const p of candidates) {
51913
51933
  try {
51914
- const raw = await fs25.readFile(p, "utf-8");
51934
+ const raw = await fs29.readFile(p, "utf-8");
51915
51935
  return JSON.parse(raw);
51916
51936
  } catch {
51917
51937
  }
@@ -54363,8 +54383,8 @@ function updateStats2(results, state, isForEachIteration = false) {
54363
54383
  async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
54364
54384
  try {
54365
54385
  const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
54366
- const fs25 = await import("fs/promises");
54367
- const path29 = await import("path");
54386
+ const fs29 = await import("fs/promises");
54387
+ const path33 = await import("path");
54368
54388
  const schemaRaw = checkConfig.schema || "plain";
54369
54389
  const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
54370
54390
  let templateContent;
@@ -54373,27 +54393,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
54373
54393
  logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
54374
54394
  } else if (checkConfig.template && checkConfig.template.file) {
54375
54395
  const file = String(checkConfig.template.file);
54376
- const resolved = path29.resolve(process.cwd(), file);
54377
- templateContent = await fs25.readFile(resolved, "utf-8");
54396
+ const resolved = path33.resolve(process.cwd(), file);
54397
+ templateContent = await fs29.readFile(resolved, "utf-8");
54378
54398
  logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
54379
54399
  } else if (schema && schema !== "plain") {
54380
54400
  const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
54381
54401
  if (sanitized) {
54382
54402
  const candidatePaths = [
54383
- path29.join(__dirname, "output", sanitized, "template.liquid"),
54403
+ path33.join(__dirname, "output", sanitized, "template.liquid"),
54384
54404
  // bundled: dist/output/
54385
- path29.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
54405
+ path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
54386
54406
  // source (from state-machine/states)
54387
- path29.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
54407
+ path33.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
54388
54408
  // source (alternate)
54389
- path29.join(process.cwd(), "output", sanitized, "template.liquid"),
54409
+ path33.join(process.cwd(), "output", sanitized, "template.liquid"),
54390
54410
  // fallback: cwd/output/
54391
- path29.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
54411
+ path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
54392
54412
  // fallback: cwd/dist/output/
54393
54413
  ];
54394
54414
  for (const p of candidatePaths) {
54395
54415
  try {
54396
- templateContent = await fs25.readFile(p, "utf-8");
54416
+ templateContent = await fs29.readFile(p, "utf-8");
54397
54417
  if (templateContent) {
54398
54418
  logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
54399
54419
  break;
@@ -56533,8 +56553,8 @@ var init_workspace_manager = __esm({
56533
56553
  );
56534
56554
  if (this.cleanupRequested && this.activeOperations === 0) {
56535
56555
  logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
56536
- for (const resolve15 of this.cleanupResolvers) {
56537
- resolve15();
56556
+ for (const resolve20 of this.cleanupResolvers) {
56557
+ resolve20();
56538
56558
  }
56539
56559
  this.cleanupResolvers = [];
56540
56560
  }
@@ -56581,8 +56601,32 @@ var init_workspace_manager = __esm({
56581
56601
  configuredMainProjectName || this.extractProjectName(this.originalPath)
56582
56602
  );
56583
56603
  this.usedNames.add(mainProjectName);
56584
- const mainProjectPath = path25.join(this.workspacePath, mainProjectName);
56604
+ let mainProjectPath = path25.join(this.workspacePath, mainProjectName);
56585
56605
  const isGitRepo = await this.isGitRepository(this.originalPath);
56606
+ if (isGitRepo) {
56607
+ try {
56608
+ await commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} worktree prune`, {
56609
+ timeout: 15e3
56610
+ });
56611
+ } catch {
56612
+ }
56613
+ }
56614
+ let subdirOffset = "";
56615
+ if (isGitRepo) {
56616
+ const gitRootResult = await commandExecutor.execute(
56617
+ `git -C ${shellEscape(this.originalPath)} rev-parse --show-toplevel`,
56618
+ { timeout: 5e3 }
56619
+ );
56620
+ if (gitRootResult.exitCode === 0) {
56621
+ const gitRoot = gitRootResult.stdout.trim();
56622
+ const normalizedOriginal = path25.resolve(this.originalPath);
56623
+ const normalizedRoot = path25.resolve(gitRoot);
56624
+ if (normalizedOriginal !== normalizedRoot) {
56625
+ subdirOffset = path25.relative(normalizedRoot, normalizedOriginal);
56626
+ logger.info(`[Workspace] Original path is a subdirectory of git repo: ${subdirOffset}`);
56627
+ }
56628
+ }
56629
+ }
56586
56630
  if (isGitRepo) {
56587
56631
  const exists = await this.pathExists(mainProjectPath);
56588
56632
  if (exists) {
@@ -56616,6 +56660,18 @@ var init_workspace_manager = __esm({
56616
56660
  }
56617
56661
  }
56618
56662
  }
56663
+ const worktreeRootPath = mainProjectPath;
56664
+ if (subdirOffset) {
56665
+ mainProjectPath = path25.join(mainProjectPath, subdirOffset);
56666
+ logger.info(`[Workspace] Adjusted main project path to subdirectory: ${mainProjectPath}`);
56667
+ const subdirExists = await this.pathExists(mainProjectPath);
56668
+ if (!subdirExists) {
56669
+ logger.warn(
56670
+ `[Workspace] Subdirectory '${subdirOffset}' not found in worktree \u2014 falling back to worktree root`
56671
+ );
56672
+ mainProjectPath = path25.join(this.workspacePath, mainProjectName);
56673
+ }
56674
+ }
56619
56675
  try {
56620
56676
  const entries = await fsp2.readdir(this.workspacePath, { withFileTypes: true });
56621
56677
  for (const entry of entries) {
@@ -56631,7 +56687,8 @@ var init_workspace_manager = __esm({
56631
56687
  workspacePath: this.workspacePath,
56632
56688
  mainProjectPath,
56633
56689
  mainProjectName,
56634
- originalPath: this.originalPath
56690
+ originalPath: this.originalPath,
56691
+ worktreeRootPath
56635
56692
  };
56636
56693
  this.initialized = true;
56637
56694
  logger.info(`Workspace initialized: ${this.workspacePath}`);
@@ -56713,30 +56770,30 @@ var init_workspace_manager = __esm({
56713
56770
  );
56714
56771
  this.cleanupRequested = true;
56715
56772
  await Promise.race([
56716
- new Promise((resolve15) => {
56773
+ new Promise((resolve20) => {
56717
56774
  if (this.activeOperations === 0) {
56718
- resolve15();
56775
+ resolve20();
56719
56776
  } else {
56720
- this.cleanupResolvers.push(resolve15);
56777
+ this.cleanupResolvers.push(resolve20);
56721
56778
  }
56722
56779
  }),
56723
- new Promise((resolve15) => {
56780
+ new Promise((resolve20) => {
56724
56781
  setTimeout(() => {
56725
56782
  logger.warn(
56726
56783
  `[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
56727
56784
  );
56728
- resolve15();
56785
+ resolve20();
56729
56786
  }, timeout);
56730
56787
  })
56731
56788
  ]);
56732
56789
  }
56733
56790
  try {
56734
56791
  if (this.mainProjectInfo) {
56735
- const mainProjectPath = this.mainProjectInfo.mainProjectPath;
56792
+ const worktreePath = this.mainProjectInfo.worktreeRootPath || this.mainProjectInfo.mainProjectPath;
56736
56793
  try {
56737
- const stats = await fsp2.lstat(mainProjectPath);
56794
+ const stats = await fsp2.lstat(worktreePath);
56738
56795
  if (!stats.isSymbolicLink()) {
56739
- await this.removeMainProjectWorktree(mainProjectPath);
56796
+ await this.removeMainProjectWorktree(worktreePath);
56740
56797
  }
56741
56798
  } catch {
56742
56799
  }
@@ -57140,8 +57197,8 @@ var init_fair_concurrency_limiter = __esm({
57140
57197
  );
57141
57198
  const queuedAt = Date.now();
57142
57199
  const effectiveTimeout = queueTimeout ?? 12e4;
57143
- return new Promise((resolve15, reject) => {
57144
- const entry = { resolve: resolve15, reject, queuedAt };
57200
+ return new Promise((resolve20, reject) => {
57201
+ const entry = { resolve: resolve20, reject, queuedAt };
57145
57202
  entry.reminder = setInterval(() => {
57146
57203
  const waited = Math.round((Date.now() - queuedAt) / 1e3);
57147
57204
  const curQueued = this._totalQueued();
@@ -57449,6 +57506,1380 @@ var init_build_engine_context = __esm({
57449
57506
  }
57450
57507
  });
57451
57508
 
57509
+ // src/policy/default-engine.ts
57510
+ var DefaultPolicyEngine;
57511
+ var init_default_engine = __esm({
57512
+ "src/policy/default-engine.ts"() {
57513
+ "use strict";
57514
+ DefaultPolicyEngine = class {
57515
+ async initialize(_config) {
57516
+ }
57517
+ async evaluateCheckExecution(_checkId, _checkConfig) {
57518
+ return { allowed: true };
57519
+ }
57520
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
57521
+ return { allowed: true };
57522
+ }
57523
+ async evaluateCapabilities(_checkId, _capabilities) {
57524
+ return { allowed: true };
57525
+ }
57526
+ async shutdown() {
57527
+ }
57528
+ };
57529
+ }
57530
+ });
57531
+
57532
+ // src/enterprise/license/validator.ts
57533
+ var validator_exports = {};
57534
+ __export(validator_exports, {
57535
+ LicenseValidator: () => LicenseValidator
57536
+ });
57537
+ var crypto3, fs21, path26, LicenseValidator;
57538
+ var init_validator = __esm({
57539
+ "src/enterprise/license/validator.ts"() {
57540
+ "use strict";
57541
+ crypto3 = __toESM(require("crypto"));
57542
+ fs21 = __toESM(require("fs"));
57543
+ path26 = __toESM(require("path"));
57544
+ LicenseValidator = class _LicenseValidator {
57545
+ /** Ed25519 public key for license verification (PEM format). */
57546
+ static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
57547
+ cache = null;
57548
+ static CACHE_TTL = 5 * 60 * 1e3;
57549
+ // 5 minutes
57550
+ static GRACE_PERIOD = 72 * 3600 * 1e3;
57551
+ // 72 hours after expiry
57552
+ /**
57553
+ * Load and validate license from environment or file.
57554
+ *
57555
+ * Resolution order:
57556
+ * 1. VISOR_LICENSE env var (JWT string)
57557
+ * 2. VISOR_LICENSE_FILE env var (path to file)
57558
+ * 3. .visor-license in project root (cwd)
57559
+ * 4. .visor-license in ~/.config/visor/
57560
+ */
57561
+ async loadAndValidate() {
57562
+ if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
57563
+ return this.cache.payload;
57564
+ }
57565
+ const token = this.resolveToken();
57566
+ if (!token) return null;
57567
+ const payload = this.verifyAndDecode(token);
57568
+ if (!payload) return null;
57569
+ this.cache = { payload, validatedAt: Date.now() };
57570
+ return payload;
57571
+ }
57572
+ /** Check if a specific feature is licensed */
57573
+ hasFeature(feature) {
57574
+ if (!this.cache) return false;
57575
+ return this.cache.payload.features.includes(feature);
57576
+ }
57577
+ /** Check if license is valid (with grace period) */
57578
+ isValid() {
57579
+ if (!this.cache) return false;
57580
+ const now = Date.now();
57581
+ const expiryMs = this.cache.payload.exp * 1e3;
57582
+ return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
57583
+ }
57584
+ /** Check if the license is within its grace period (expired but still valid) */
57585
+ isInGracePeriod() {
57586
+ if (!this.cache) return false;
57587
+ const now = Date.now();
57588
+ const expiryMs = this.cache.payload.exp * 1e3;
57589
+ return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
57590
+ }
57591
+ resolveToken() {
57592
+ if (process.env.VISOR_LICENSE) {
57593
+ return process.env.VISOR_LICENSE.trim();
57594
+ }
57595
+ if (process.env.VISOR_LICENSE_FILE) {
57596
+ const resolved = path26.resolve(process.env.VISOR_LICENSE_FILE);
57597
+ const home2 = process.env.HOME || process.env.USERPROFILE || "";
57598
+ const allowedPrefixes = [path26.normalize(process.cwd())];
57599
+ if (home2) allowedPrefixes.push(path26.normalize(path26.join(home2, ".config", "visor")));
57600
+ let realPath;
57601
+ try {
57602
+ realPath = fs21.realpathSync(resolved);
57603
+ } catch {
57604
+ return null;
57605
+ }
57606
+ const isSafe = allowedPrefixes.some(
57607
+ (prefix) => realPath === prefix || realPath.startsWith(prefix + path26.sep)
57608
+ );
57609
+ if (!isSafe) return null;
57610
+ return this.readFile(realPath);
57611
+ }
57612
+ const cwdPath = path26.join(process.cwd(), ".visor-license");
57613
+ const cwdToken = this.readFile(cwdPath);
57614
+ if (cwdToken) return cwdToken;
57615
+ const home = process.env.HOME || process.env.USERPROFILE || "";
57616
+ if (home) {
57617
+ const configPath = path26.join(home, ".config", "visor", ".visor-license");
57618
+ const configToken = this.readFile(configPath);
57619
+ if (configToken) return configToken;
57620
+ }
57621
+ return null;
57622
+ }
57623
+ readFile(filePath) {
57624
+ try {
57625
+ return fs21.readFileSync(filePath, "utf-8").trim();
57626
+ } catch {
57627
+ return null;
57628
+ }
57629
+ }
57630
+ verifyAndDecode(token) {
57631
+ try {
57632
+ const parts = token.split(".");
57633
+ if (parts.length !== 3) return null;
57634
+ const [headerB64, payloadB64, signatureB64] = parts;
57635
+ const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
57636
+ if (header.alg !== "EdDSA") return null;
57637
+ const data = `${headerB64}.${payloadB64}`;
57638
+ const signature = Buffer.from(signatureB64, "base64url");
57639
+ const publicKey = crypto3.createPublicKey(_LicenseValidator.PUBLIC_KEY);
57640
+ if (publicKey.asymmetricKeyType !== "ed25519") {
57641
+ return null;
57642
+ }
57643
+ const isValid = crypto3.verify(null, Buffer.from(data), publicKey, signature);
57644
+ if (!isValid) return null;
57645
+ const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
57646
+ if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
57647
+ return null;
57648
+ }
57649
+ const now = Date.now();
57650
+ const expiryMs = payload.exp * 1e3;
57651
+ if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
57652
+ return null;
57653
+ }
57654
+ return payload;
57655
+ } catch {
57656
+ return null;
57657
+ }
57658
+ }
57659
+ };
57660
+ }
57661
+ });
57662
+
57663
+ // src/enterprise/policy/opa-compiler.ts
57664
+ var fs22, path27, os2, crypto4, import_child_process8, OpaCompiler;
57665
+ var init_opa_compiler = __esm({
57666
+ "src/enterprise/policy/opa-compiler.ts"() {
57667
+ "use strict";
57668
+ fs22 = __toESM(require("fs"));
57669
+ path27 = __toESM(require("path"));
57670
+ os2 = __toESM(require("os"));
57671
+ crypto4 = __toESM(require("crypto"));
57672
+ import_child_process8 = require("child_process");
57673
+ OpaCompiler = class _OpaCompiler {
57674
+ static CACHE_DIR = path27.join(os2.tmpdir(), "visor-opa-cache");
57675
+ /**
57676
+ * Resolve the input paths to WASM bytes.
57677
+ *
57678
+ * Strategy:
57679
+ * 1. If any path is a .wasm file, read it directly
57680
+ * 2. If a directory contains policy.wasm, read it
57681
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
57682
+ */
57683
+ async resolveWasmBytes(paths) {
57684
+ const regoFiles = [];
57685
+ for (const p of paths) {
57686
+ const resolved = path27.resolve(p);
57687
+ if (path27.normalize(resolved).includes("..")) {
57688
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
57689
+ }
57690
+ if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
57691
+ return fs22.readFileSync(resolved);
57692
+ }
57693
+ if (!fs22.existsSync(resolved)) continue;
57694
+ const stat2 = fs22.statSync(resolved);
57695
+ if (stat2.isDirectory()) {
57696
+ const wasmCandidate = path27.join(resolved, "policy.wasm");
57697
+ if (fs22.existsSync(wasmCandidate)) {
57698
+ return fs22.readFileSync(wasmCandidate);
57699
+ }
57700
+ const files = fs22.readdirSync(resolved);
57701
+ for (const f of files) {
57702
+ if (f.endsWith(".rego")) {
57703
+ regoFiles.push(path27.join(resolved, f));
57704
+ }
57705
+ }
57706
+ } else if (resolved.endsWith(".rego")) {
57707
+ regoFiles.push(resolved);
57708
+ }
57709
+ }
57710
+ if (regoFiles.length === 0) {
57711
+ throw new Error(
57712
+ `OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
57713
+ );
57714
+ }
57715
+ return this.compileRego(regoFiles);
57716
+ }
57717
+ /**
57718
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
57719
+ *
57720
+ * Caches the compiled bundle based on a content hash of all input .rego files
57721
+ * so subsequent runs skip compilation if policies haven't changed.
57722
+ */
57723
+ compileRego(regoFiles) {
57724
+ try {
57725
+ (0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
57726
+ } catch {
57727
+ throw new Error(
57728
+ "OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
57729
+ );
57730
+ }
57731
+ const hash = crypto4.createHash("sha256");
57732
+ for (const f of regoFiles.sort()) {
57733
+ hash.update(fs22.readFileSync(f));
57734
+ hash.update(f);
57735
+ }
57736
+ const cacheKey = hash.digest("hex").slice(0, 16);
57737
+ const cacheDir = _OpaCompiler.CACHE_DIR;
57738
+ const cachedWasm = path27.join(cacheDir, `${cacheKey}.wasm`);
57739
+ if (fs22.existsSync(cachedWasm)) {
57740
+ return fs22.readFileSync(cachedWasm);
57741
+ }
57742
+ fs22.mkdirSync(cacheDir, { recursive: true });
57743
+ const bundleTar = path27.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
57744
+ try {
57745
+ const args = [
57746
+ "build",
57747
+ "-t",
57748
+ "wasm",
57749
+ "-e",
57750
+ "visor",
57751
+ // entrypoint: the visor package tree
57752
+ "-o",
57753
+ bundleTar,
57754
+ ...regoFiles
57755
+ ];
57756
+ (0, import_child_process8.execFileSync)("opa", args, {
57757
+ stdio: "pipe",
57758
+ timeout: 3e4
57759
+ });
57760
+ } catch (err) {
57761
+ const stderr = err?.stderr?.toString() || "";
57762
+ throw new Error(
57763
+ `Failed to compile .rego files to WASM:
57764
+ ${stderr}
57765
+ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
57766
+ );
57767
+ }
57768
+ try {
57769
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
57770
+ stdio: "pipe"
57771
+ });
57772
+ const extractedWasm = path27.join(cacheDir, "policy.wasm");
57773
+ if (fs22.existsSync(extractedWasm)) {
57774
+ fs22.renameSync(extractedWasm, cachedWasm);
57775
+ }
57776
+ } catch {
57777
+ try {
57778
+ (0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
57779
+ stdio: "pipe"
57780
+ });
57781
+ const extractedWasm = path27.join(cacheDir, "policy.wasm");
57782
+ if (fs22.existsSync(extractedWasm)) {
57783
+ fs22.renameSync(extractedWasm, cachedWasm);
57784
+ }
57785
+ } catch (err2) {
57786
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
57787
+ }
57788
+ }
57789
+ try {
57790
+ fs22.unlinkSync(bundleTar);
57791
+ } catch {
57792
+ }
57793
+ if (!fs22.existsSync(cachedWasm)) {
57794
+ throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
57795
+ }
57796
+ return fs22.readFileSync(cachedWasm);
57797
+ }
57798
+ };
57799
+ }
57800
+ });
57801
+
57802
+ // src/enterprise/policy/opa-wasm-evaluator.ts
57803
+ var fs23, path28, OpaWasmEvaluator;
57804
+ var init_opa_wasm_evaluator = __esm({
57805
+ "src/enterprise/policy/opa-wasm-evaluator.ts"() {
57806
+ "use strict";
57807
+ fs23 = __toESM(require("fs"));
57808
+ path28 = __toESM(require("path"));
57809
+ init_opa_compiler();
57810
+ OpaWasmEvaluator = class {
57811
+ policy = null;
57812
+ dataDocument = {};
57813
+ compiler = new OpaCompiler();
57814
+ async initialize(rulesPath) {
57815
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
57816
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
57817
+ try {
57818
+ const { createRequire } = require("module");
57819
+ const runtimeRequire = createRequire(__filename);
57820
+ const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
57821
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
57822
+ if (!loadPolicy) {
57823
+ throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
57824
+ }
57825
+ this.policy = await loadPolicy(wasmBytes);
57826
+ } catch (err) {
57827
+ if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
57828
+ throw new Error(
57829
+ "OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
57830
+ );
57831
+ }
57832
+ throw err;
57833
+ }
57834
+ }
57835
+ /**
57836
+ * Load external data from a JSON file to use as the OPA data document.
57837
+ * The loaded data will be passed to `policy.setData()` during evaluation,
57838
+ * making it available in Rego via `data.<key>`.
57839
+ */
57840
+ loadData(dataPath) {
57841
+ const resolved = path28.resolve(dataPath);
57842
+ if (path28.normalize(resolved).includes("..")) {
57843
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
57844
+ }
57845
+ if (!fs23.existsSync(resolved)) {
57846
+ throw new Error(`OPA data file not found: ${resolved}`);
57847
+ }
57848
+ const stat2 = fs23.statSync(resolved);
57849
+ if (stat2.size > 10 * 1024 * 1024) {
57850
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
57851
+ }
57852
+ const raw = fs23.readFileSync(resolved, "utf-8");
57853
+ try {
57854
+ const parsed = JSON.parse(raw);
57855
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
57856
+ throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
57857
+ }
57858
+ this.dataDocument = parsed;
57859
+ } catch (err) {
57860
+ if (err.message.startsWith("OPA data file must")) {
57861
+ throw err;
57862
+ }
57863
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
57864
+ }
57865
+ }
57866
+ async evaluate(input) {
57867
+ if (!this.policy) {
57868
+ throw new Error("OPA WASM evaluator not initialized");
57869
+ }
57870
+ this.policy.setData(this.dataDocument);
57871
+ const resultSet = this.policy.evaluate(input);
57872
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
57873
+ return resultSet[0].result;
57874
+ }
57875
+ return void 0;
57876
+ }
57877
+ async shutdown() {
57878
+ if (this.policy) {
57879
+ if (typeof this.policy.close === "function") {
57880
+ try {
57881
+ this.policy.close();
57882
+ } catch {
57883
+ }
57884
+ } else if (typeof this.policy.free === "function") {
57885
+ try {
57886
+ this.policy.free();
57887
+ } catch {
57888
+ }
57889
+ }
57890
+ }
57891
+ this.policy = null;
57892
+ }
57893
+ };
57894
+ }
57895
+ });
57896
+
57897
+ // src/enterprise/policy/opa-http-evaluator.ts
57898
+ var OpaHttpEvaluator;
57899
+ var init_opa_http_evaluator = __esm({
57900
+ "src/enterprise/policy/opa-http-evaluator.ts"() {
57901
+ "use strict";
57902
+ OpaHttpEvaluator = class {
57903
+ baseUrl;
57904
+ timeout;
57905
+ constructor(baseUrl, timeout = 5e3) {
57906
+ let parsed;
57907
+ try {
57908
+ parsed = new URL(baseUrl);
57909
+ } catch {
57910
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
57911
+ }
57912
+ if (!["http:", "https:"].includes(parsed.protocol)) {
57913
+ throw new Error(
57914
+ `OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
57915
+ );
57916
+ }
57917
+ const hostname = parsed.hostname;
57918
+ if (this.isBlockedHostname(hostname)) {
57919
+ throw new Error(
57920
+ `OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
57921
+ );
57922
+ }
57923
+ this.baseUrl = baseUrl.replace(/\/+$/, "");
57924
+ this.timeout = timeout;
57925
+ }
57926
+ /**
57927
+ * Check if a hostname is blocked due to SSRF concerns.
57928
+ *
57929
+ * Blocks:
57930
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
57931
+ * - Link-local addresses (169.254.x.x)
57932
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
57933
+ * - IPv6 unique local addresses (fd00::/8)
57934
+ * - Cloud metadata services (*.internal)
57935
+ */
57936
+ isBlockedHostname(hostname) {
57937
+ if (!hostname) return true;
57938
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
57939
+ if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
57940
+ return true;
57941
+ }
57942
+ if (normalized === "localhost" || normalized === "localhost.localdomain") {
57943
+ return true;
57944
+ }
57945
+ if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
57946
+ return true;
57947
+ }
57948
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
57949
+ const ipv4Match = normalized.match(ipv4Pattern);
57950
+ if (ipv4Match) {
57951
+ const octets = ipv4Match.slice(1, 5).map(Number);
57952
+ if (octets.some((octet) => octet > 255)) {
57953
+ return false;
57954
+ }
57955
+ const [a, b] = octets;
57956
+ if (a === 127) {
57957
+ return true;
57958
+ }
57959
+ if (a === 0) {
57960
+ return true;
57961
+ }
57962
+ if (a === 169 && b === 254) {
57963
+ return true;
57964
+ }
57965
+ if (a === 10) {
57966
+ return true;
57967
+ }
57968
+ if (a === 172 && b >= 16 && b <= 31) {
57969
+ return true;
57970
+ }
57971
+ if (a === 192 && b === 168) {
57972
+ return true;
57973
+ }
57974
+ }
57975
+ if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
57976
+ return true;
57977
+ }
57978
+ if (normalized.startsWith("fe80:")) {
57979
+ return true;
57980
+ }
57981
+ return false;
57982
+ }
57983
+ /**
57984
+ * Evaluate a policy rule against an input document via OPA REST API.
57985
+ *
57986
+ * @param input - The input document to evaluate
57987
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
57988
+ * @returns The result object from OPA, or undefined on error
57989
+ */
57990
+ async evaluate(input, rulePath) {
57991
+ const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
57992
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
57993
+ const controller = new AbortController();
57994
+ const timer = setTimeout(() => controller.abort(), this.timeout);
57995
+ try {
57996
+ const response = await fetch(url, {
57997
+ method: "POST",
57998
+ headers: { "Content-Type": "application/json" },
57999
+ body: JSON.stringify({ input }),
58000
+ signal: controller.signal
58001
+ });
58002
+ if (!response.ok) {
58003
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
58004
+ }
58005
+ let body;
58006
+ try {
58007
+ body = await response.json();
58008
+ } catch (jsonErr) {
58009
+ throw new Error(
58010
+ `OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
58011
+ );
58012
+ }
58013
+ return body?.result;
58014
+ } finally {
58015
+ clearTimeout(timer);
58016
+ }
58017
+ }
58018
+ async shutdown() {
58019
+ }
58020
+ };
58021
+ }
58022
+ });
58023
+
58024
+ // src/enterprise/policy/policy-input-builder.ts
58025
+ var PolicyInputBuilder;
58026
+ var init_policy_input_builder = __esm({
58027
+ "src/enterprise/policy/policy-input-builder.ts"() {
58028
+ "use strict";
58029
+ PolicyInputBuilder = class {
58030
+ roles;
58031
+ actor;
58032
+ repository;
58033
+ pullRequest;
58034
+ constructor(policyConfig, actor, repository, pullRequest) {
58035
+ this.roles = policyConfig.roles || {};
58036
+ this.actor = actor;
58037
+ this.repository = repository;
58038
+ this.pullRequest = pullRequest;
58039
+ }
58040
+ /** Resolve which roles apply to the current actor. */
58041
+ resolveRoles() {
58042
+ const matched = [];
58043
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
58044
+ let identityMatch = false;
58045
+ if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
58046
+ identityMatch = true;
58047
+ }
58048
+ if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
58049
+ identityMatch = true;
58050
+ }
58051
+ if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
58052
+ identityMatch = true;
58053
+ }
58054
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
58055
+ const actorEmail = this.actor.slack.email.toLowerCase();
58056
+ if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
58057
+ identityMatch = true;
58058
+ }
58059
+ }
58060
+ if (!identityMatch) continue;
58061
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
58062
+ if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
58063
+ continue;
58064
+ }
58065
+ }
58066
+ matched.push(roleName);
58067
+ }
58068
+ return matched;
58069
+ }
58070
+ buildActor() {
58071
+ return {
58072
+ authorAssociation: this.actor.authorAssociation,
58073
+ login: this.actor.login,
58074
+ roles: this.resolveRoles(),
58075
+ isLocalMode: this.actor.isLocalMode,
58076
+ ...this.actor.slack && { slack: this.actor.slack }
58077
+ };
58078
+ }
58079
+ forCheckExecution(check) {
58080
+ return {
58081
+ scope: "check.execute",
58082
+ check: {
58083
+ id: check.id,
58084
+ type: check.type,
58085
+ group: check.group,
58086
+ tags: check.tags,
58087
+ criticality: check.criticality,
58088
+ sandbox: check.sandbox,
58089
+ policy: check.policy
58090
+ },
58091
+ actor: this.buildActor(),
58092
+ repository: this.repository,
58093
+ pullRequest: this.pullRequest
58094
+ };
58095
+ }
58096
+ forToolInvocation(serverName, methodName, transport) {
58097
+ return {
58098
+ scope: "tool.invoke",
58099
+ tool: { serverName, methodName, transport },
58100
+ actor: this.buildActor(),
58101
+ repository: this.repository,
58102
+ pullRequest: this.pullRequest
58103
+ };
58104
+ }
58105
+ forCapabilityResolve(checkId, capabilities) {
58106
+ return {
58107
+ scope: "capability.resolve",
58108
+ check: { id: checkId, type: "ai" },
58109
+ capability: capabilities,
58110
+ actor: this.buildActor(),
58111
+ repository: this.repository,
58112
+ pullRequest: this.pullRequest
58113
+ };
58114
+ }
58115
+ };
58116
+ }
58117
+ });
58118
+
58119
+ // src/enterprise/policy/opa-policy-engine.ts
58120
+ var opa_policy_engine_exports = {};
58121
+ __export(opa_policy_engine_exports, {
58122
+ OpaPolicyEngine: () => OpaPolicyEngine
58123
+ });
58124
+ var OpaPolicyEngine;
58125
+ var init_opa_policy_engine = __esm({
58126
+ "src/enterprise/policy/opa-policy-engine.ts"() {
58127
+ "use strict";
58128
+ init_opa_wasm_evaluator();
58129
+ init_opa_http_evaluator();
58130
+ init_policy_input_builder();
58131
+ OpaPolicyEngine = class {
58132
+ evaluator = null;
58133
+ fallback;
58134
+ timeout;
58135
+ config;
58136
+ inputBuilder = null;
58137
+ logger = null;
58138
+ constructor(config) {
58139
+ this.config = config;
58140
+ this.fallback = config.fallback || "deny";
58141
+ this.timeout = config.timeout || 5e3;
58142
+ }
58143
+ async initialize(config) {
58144
+ try {
58145
+ this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
58146
+ } catch {
58147
+ }
58148
+ const actor = {
58149
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
58150
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
58151
+ isLocalMode: !process.env.GITHUB_ACTIONS
58152
+ };
58153
+ const repo = {
58154
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
58155
+ name: process.env.GITHUB_REPOSITORY?.split("/")[1],
58156
+ branch: process.env.GITHUB_HEAD_REF,
58157
+ baseBranch: process.env.GITHUB_BASE_REF,
58158
+ event: process.env.GITHUB_EVENT_NAME
58159
+ };
58160
+ const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
58161
+ const pullRequest = {
58162
+ number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
58163
+ };
58164
+ this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
58165
+ if (config.engine === "local") {
58166
+ if (!config.rules) {
58167
+ throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
58168
+ }
58169
+ const wasm = new OpaWasmEvaluator();
58170
+ await wasm.initialize(config.rules);
58171
+ if (config.data) {
58172
+ wasm.loadData(config.data);
58173
+ }
58174
+ this.evaluator = wasm;
58175
+ } else if (config.engine === "remote") {
58176
+ if (!config.url) {
58177
+ throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
58178
+ }
58179
+ this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
58180
+ } else {
58181
+ this.evaluator = null;
58182
+ }
58183
+ }
58184
+ /**
58185
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
58186
+ * Called by the enterprise loader when engine context is enriched.
58187
+ */
58188
+ setActorContext(actor, repo, pullRequest) {
58189
+ this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
58190
+ }
58191
+ async evaluateCheckExecution(checkId, checkConfig) {
58192
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
58193
+ const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
58194
+ const policyOverride = cfg.policy;
58195
+ const input = this.inputBuilder.forCheckExecution({
58196
+ id: checkId,
58197
+ type: cfg.type || "ai",
58198
+ group: cfg.group,
58199
+ tags: cfg.tags,
58200
+ criticality: cfg.criticality,
58201
+ sandbox: cfg.sandbox,
58202
+ policy: policyOverride
58203
+ });
58204
+ return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
58205
+ }
58206
+ async evaluateToolInvocation(serverName, methodName, transport) {
58207
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
58208
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
58209
+ return this.doEvaluate(input, "visor/tool/invoke");
58210
+ }
58211
+ async evaluateCapabilities(checkId, capabilities) {
58212
+ if (!this.evaluator || !this.inputBuilder) return { allowed: true };
58213
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
58214
+ return this.doEvaluate(input, "visor/capability/resolve");
58215
+ }
58216
+ async shutdown() {
58217
+ if (this.evaluator && "shutdown" in this.evaluator) {
58218
+ await this.evaluator.shutdown();
58219
+ }
58220
+ this.evaluator = null;
58221
+ this.inputBuilder = null;
58222
+ }
58223
+ resolveRulePath(defaultScope, override) {
58224
+ if (override) {
58225
+ return override.startsWith("visor/") ? override : `visor/${override}`;
58226
+ }
58227
+ return `visor/${defaultScope.replace(/\./g, "/")}`;
58228
+ }
58229
+ async doEvaluate(input, rulePath) {
58230
+ try {
58231
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
58232
+ let timer;
58233
+ const timeoutPromise = new Promise((_resolve, reject) => {
58234
+ timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
58235
+ });
58236
+ try {
58237
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
58238
+ const decision = this.parseDecision(result);
58239
+ if (!decision.allowed && this.fallback === "warn") {
58240
+ decision.allowed = true;
58241
+ decision.warn = true;
58242
+ decision.reason = `audit: ${decision.reason || "policy denied"}`;
58243
+ }
58244
+ this.logger?.debug(
58245
+ `[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
58246
+ );
58247
+ return decision;
58248
+ } finally {
58249
+ if (timer) clearTimeout(timer);
58250
+ }
58251
+ } catch (err) {
58252
+ const msg = err instanceof Error ? err.message : String(err);
58253
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
58254
+ return {
58255
+ allowed: this.fallback === "allow" || this.fallback === "warn",
58256
+ warn: this.fallback === "warn" ? true : void 0,
58257
+ reason: `policy evaluation failed, fallback=${this.fallback}`
58258
+ };
58259
+ }
58260
+ }
58261
+ async rawEvaluate(input, rulePath) {
58262
+ if (this.evaluator instanceof OpaWasmEvaluator) {
58263
+ const result = await this.evaluator.evaluate(input);
58264
+ return this.navigateWasmResult(result, rulePath);
58265
+ }
58266
+ return this.evaluator.evaluate(input, rulePath);
58267
+ }
58268
+ /**
58269
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
58270
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
58271
+ * so we strip the `visor/` prefix and walk the remaining segments.
58272
+ */
58273
+ navigateWasmResult(result, rulePath) {
58274
+ if (!result || typeof result !== "object") return result;
58275
+ const segments = rulePath.replace(/^visor\//, "").split("/");
58276
+ let current = result;
58277
+ for (const seg of segments) {
58278
+ if (current && typeof current === "object" && seg in current) {
58279
+ current = current[seg];
58280
+ } else {
58281
+ return void 0;
58282
+ }
58283
+ }
58284
+ return current;
58285
+ }
58286
+ parseDecision(result) {
58287
+ if (result === void 0 || result === null) {
58288
+ return {
58289
+ allowed: this.fallback === "allow" || this.fallback === "warn",
58290
+ warn: this.fallback === "warn" ? true : void 0,
58291
+ reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
58292
+ };
58293
+ }
58294
+ const allowed = result.allowed !== false;
58295
+ const decision = {
58296
+ allowed,
58297
+ reason: result.reason
58298
+ };
58299
+ if (result.capabilities) {
58300
+ decision.capabilities = result.capabilities;
58301
+ }
58302
+ return decision;
58303
+ }
58304
+ };
58305
+ }
58306
+ });
58307
+
58308
+ // src/enterprise/scheduler/knex-store.ts
58309
+ var knex_store_exports = {};
58310
+ __export(knex_store_exports, {
58311
+ KnexStoreBackend: () => KnexStoreBackend
58312
+ });
58313
+ function toNum(val) {
58314
+ if (val === null || val === void 0) return void 0;
58315
+ return typeof val === "string" ? parseInt(val, 10) : val;
58316
+ }
58317
+ function safeJsonParse2(value) {
58318
+ if (!value) return void 0;
58319
+ try {
58320
+ return JSON.parse(value);
58321
+ } catch {
58322
+ return void 0;
58323
+ }
58324
+ }
58325
+ function fromTriggerRow2(row) {
58326
+ return {
58327
+ id: row.id,
58328
+ creatorId: row.creator_id,
58329
+ creatorContext: row.creator_context ?? void 0,
58330
+ creatorName: row.creator_name ?? void 0,
58331
+ description: row.description ?? void 0,
58332
+ channels: safeJsonParse2(row.channels),
58333
+ fromUsers: safeJsonParse2(row.from_users),
58334
+ fromBots: row.from_bots === true || row.from_bots === 1,
58335
+ contains: safeJsonParse2(row.contains),
58336
+ matchPattern: row.match_pattern ?? void 0,
58337
+ threads: row.threads,
58338
+ workflow: row.workflow,
58339
+ inputs: safeJsonParse2(row.inputs),
58340
+ outputContext: safeJsonParse2(row.output_context),
58341
+ status: row.status,
58342
+ enabled: row.enabled === true || row.enabled === 1,
58343
+ createdAt: toNum(row.created_at)
58344
+ };
58345
+ }
58346
+ function toTriggerInsertRow(trigger) {
58347
+ return {
58348
+ id: trigger.id,
58349
+ creator_id: trigger.creatorId,
58350
+ creator_context: trigger.creatorContext ?? null,
58351
+ creator_name: trigger.creatorName ?? null,
58352
+ description: trigger.description ?? null,
58353
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
58354
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
58355
+ from_bots: trigger.fromBots,
58356
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
58357
+ match_pattern: trigger.matchPattern ?? null,
58358
+ threads: trigger.threads,
58359
+ workflow: trigger.workflow,
58360
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
58361
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
58362
+ status: trigger.status,
58363
+ enabled: trigger.enabled,
58364
+ created_at: trigger.createdAt
58365
+ };
58366
+ }
58367
+ function fromDbRow2(row) {
58368
+ return {
58369
+ id: row.id,
58370
+ creatorId: row.creator_id,
58371
+ creatorContext: row.creator_context ?? void 0,
58372
+ creatorName: row.creator_name ?? void 0,
58373
+ timezone: row.timezone,
58374
+ schedule: row.schedule_expr,
58375
+ runAt: toNum(row.run_at),
58376
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
58377
+ originalExpression: row.original_expression,
58378
+ workflow: row.workflow ?? void 0,
58379
+ workflowInputs: safeJsonParse2(row.workflow_inputs),
58380
+ outputContext: safeJsonParse2(row.output_context),
58381
+ status: row.status,
58382
+ createdAt: toNum(row.created_at),
58383
+ lastRunAt: toNum(row.last_run_at),
58384
+ nextRunAt: toNum(row.next_run_at),
58385
+ runCount: row.run_count,
58386
+ failureCount: row.failure_count,
58387
+ lastError: row.last_error ?? void 0,
58388
+ previousResponse: row.previous_response ?? void 0
58389
+ };
58390
+ }
58391
+ function toInsertRow(schedule) {
58392
+ return {
58393
+ id: schedule.id,
58394
+ creator_id: schedule.creatorId,
58395
+ creator_context: schedule.creatorContext ?? null,
58396
+ creator_name: schedule.creatorName ?? null,
58397
+ timezone: schedule.timezone,
58398
+ schedule_expr: schedule.schedule,
58399
+ run_at: schedule.runAt ?? null,
58400
+ is_recurring: schedule.isRecurring,
58401
+ original_expression: schedule.originalExpression,
58402
+ workflow: schedule.workflow ?? null,
58403
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
58404
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
58405
+ status: schedule.status,
58406
+ created_at: schedule.createdAt,
58407
+ last_run_at: schedule.lastRunAt ?? null,
58408
+ next_run_at: schedule.nextRunAt ?? null,
58409
+ run_count: schedule.runCount,
58410
+ failure_count: schedule.failureCount,
58411
+ last_error: schedule.lastError ?? null,
58412
+ previous_response: schedule.previousResponse ?? null
58413
+ };
58414
+ }
58415
+ var fs24, path29, import_uuid2, KnexStoreBackend;
58416
+ var init_knex_store = __esm({
58417
+ "src/enterprise/scheduler/knex-store.ts"() {
58418
+ "use strict";
58419
+ fs24 = __toESM(require("fs"));
58420
+ path29 = __toESM(require("path"));
58421
+ import_uuid2 = require("uuid");
58422
+ init_logger();
58423
+ KnexStoreBackend = class {
58424
+ knex = null;
58425
+ driver;
58426
+ connection;
58427
+ constructor(driver, storageConfig, _haConfig) {
58428
+ this.driver = driver;
58429
+ this.connection = storageConfig.connection || {};
58430
+ }
58431
+ async initialize() {
58432
+ const { createRequire } = require("module");
58433
+ const runtimeRequire = createRequire(__filename);
58434
+ let knexFactory;
58435
+ try {
58436
+ knexFactory = runtimeRequire("knex");
58437
+ } catch (err) {
58438
+ const code = err?.code;
58439
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
58440
+ throw new Error(
58441
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
58442
+ );
58443
+ }
58444
+ throw err;
58445
+ }
58446
+ const clientMap = {
58447
+ postgresql: "pg",
58448
+ mysql: "mysql2",
58449
+ mssql: "tedious"
58450
+ };
58451
+ const client = clientMap[this.driver];
58452
+ let connection;
58453
+ if (this.connection.connection_string) {
58454
+ connection = this.connection.connection_string;
58455
+ } else if (this.driver === "mssql") {
58456
+ connection = this.buildMssqlConnection();
58457
+ } else {
58458
+ connection = this.buildStandardConnection();
58459
+ }
58460
+ this.knex = knexFactory({
58461
+ client,
58462
+ connection,
58463
+ pool: {
58464
+ min: this.connection.pool?.min ?? 0,
58465
+ max: this.connection.pool?.max ?? 10
58466
+ }
58467
+ });
58468
+ await this.migrateSchema();
58469
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
58470
+ }
58471
+ buildStandardConnection() {
58472
+ return {
58473
+ host: this.connection.host || "localhost",
58474
+ port: this.connection.port,
58475
+ database: this.connection.database || "visor",
58476
+ user: this.connection.user,
58477
+ password: this.connection.password,
58478
+ ssl: this.resolveSslConfig()
58479
+ };
58480
+ }
58481
+ buildMssqlConnection() {
58482
+ const ssl = this.connection.ssl;
58483
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
58484
+ return {
58485
+ server: this.connection.host || "localhost",
58486
+ port: this.connection.port,
58487
+ database: this.connection.database || "visor",
58488
+ user: this.connection.user,
58489
+ password: this.connection.password,
58490
+ options: {
58491
+ encrypt: sslEnabled,
58492
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
58493
+ }
58494
+ };
58495
+ }
58496
+ resolveSslConfig() {
58497
+ const ssl = this.connection.ssl;
58498
+ if (ssl === false || ssl === void 0) return false;
58499
+ if (ssl === true) return { rejectUnauthorized: true };
58500
+ if (ssl.enabled === false) return false;
58501
+ const result = {
58502
+ rejectUnauthorized: ssl.reject_unauthorized !== false
58503
+ };
58504
+ if (ssl.ca) {
58505
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
58506
+ result.ca = fs24.readFileSync(caPath, "utf8");
58507
+ }
58508
+ if (ssl.cert) {
58509
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
58510
+ result.cert = fs24.readFileSync(certPath, "utf8");
58511
+ }
58512
+ if (ssl.key) {
58513
+ const keyPath = this.validateSslPath(ssl.key, "client key");
58514
+ result.key = fs24.readFileSync(keyPath, "utf8");
58515
+ }
58516
+ return result;
58517
+ }
58518
+ validateSslPath(filePath, label) {
58519
+ const resolved = path29.resolve(filePath);
58520
+ if (resolved !== path29.normalize(resolved)) {
58521
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
58522
+ }
58523
+ if (!fs24.existsSync(resolved)) {
58524
+ throw new Error(`SSL ${label} not found: ${filePath}`);
58525
+ }
58526
+ return resolved;
58527
+ }
58528
+ async shutdown() {
58529
+ if (this.knex) {
58530
+ await this.knex.destroy();
58531
+ this.knex = null;
58532
+ }
58533
+ }
58534
+ async migrateSchema() {
58535
+ const knex = this.getKnex();
58536
+ const exists = await knex.schema.hasTable("schedules");
58537
+ if (!exists) {
58538
+ await knex.schema.createTable("schedules", (table) => {
58539
+ table.string("id", 36).primary();
58540
+ table.string("creator_id", 255).notNullable().index();
58541
+ table.string("creator_context", 255);
58542
+ table.string("creator_name", 255);
58543
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
58544
+ table.string("schedule_expr", 255);
58545
+ table.bigInteger("run_at");
58546
+ table.boolean("is_recurring").notNullable();
58547
+ table.text("original_expression");
58548
+ table.string("workflow", 255);
58549
+ table.text("workflow_inputs");
58550
+ table.text("output_context");
58551
+ table.string("status", 20).notNullable().index();
58552
+ table.bigInteger("created_at").notNullable();
58553
+ table.bigInteger("last_run_at");
58554
+ table.bigInteger("next_run_at");
58555
+ table.integer("run_count").notNullable().defaultTo(0);
58556
+ table.integer("failure_count").notNullable().defaultTo(0);
58557
+ table.text("last_error");
58558
+ table.text("previous_response");
58559
+ table.index(["status", "next_run_at"]);
58560
+ });
58561
+ }
58562
+ const triggersExist = await knex.schema.hasTable("message_triggers");
58563
+ if (!triggersExist) {
58564
+ await knex.schema.createTable("message_triggers", (table) => {
58565
+ table.string("id", 36).primary();
58566
+ table.string("creator_id", 255).notNullable().index();
58567
+ table.string("creator_context", 255);
58568
+ table.string("creator_name", 255);
58569
+ table.text("description");
58570
+ table.text("channels");
58571
+ table.text("from_users");
58572
+ table.boolean("from_bots").notNullable().defaultTo(false);
58573
+ table.text("contains");
58574
+ table.text("match_pattern");
58575
+ table.string("threads", 20).notNullable().defaultTo("any");
58576
+ table.string("workflow", 255).notNullable();
58577
+ table.text("inputs");
58578
+ table.text("output_context");
58579
+ table.string("status", 20).notNullable().defaultTo("active").index();
58580
+ table.boolean("enabled").notNullable().defaultTo(true);
58581
+ table.bigInteger("created_at").notNullable();
58582
+ });
58583
+ }
58584
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
58585
+ if (!locksExist) {
58586
+ await knex.schema.createTable("scheduler_locks", (table) => {
58587
+ table.string("lock_id", 255).primary();
58588
+ table.string("node_id", 255).notNullable();
58589
+ table.string("lock_token", 36).notNullable();
58590
+ table.bigInteger("acquired_at").notNullable();
58591
+ table.bigInteger("expires_at").notNullable();
58592
+ });
58593
+ }
58594
+ }
58595
+ getKnex() {
58596
+ if (!this.knex) {
58597
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
58598
+ }
58599
+ return this.knex;
58600
+ }
58601
+ // --- CRUD ---
58602
+ async create(schedule) {
58603
+ const knex = this.getKnex();
58604
+ const newSchedule = {
58605
+ ...schedule,
58606
+ id: (0, import_uuid2.v4)(),
58607
+ createdAt: Date.now(),
58608
+ runCount: 0,
58609
+ failureCount: 0,
58610
+ status: "active"
58611
+ };
58612
+ await knex("schedules").insert(toInsertRow(newSchedule));
58613
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
58614
+ return newSchedule;
58615
+ }
58616
+ async importSchedule(schedule) {
58617
+ const knex = this.getKnex();
58618
+ const existing = await knex("schedules").where("id", schedule.id).first();
58619
+ if (existing) return;
58620
+ await knex("schedules").insert(toInsertRow(schedule));
58621
+ }
58622
+ async get(id) {
58623
+ const knex = this.getKnex();
58624
+ const row = await knex("schedules").where("id", id).first();
58625
+ return row ? fromDbRow2(row) : void 0;
58626
+ }
58627
+ async update(id, patch) {
58628
+ const knex = this.getKnex();
58629
+ const existing = await knex("schedules").where("id", id).first();
58630
+ if (!existing) return void 0;
58631
+ const current = fromDbRow2(existing);
58632
+ const updated = { ...current, ...patch, id: current.id };
58633
+ const row = toInsertRow(updated);
58634
+ delete row.id;
58635
+ await knex("schedules").where("id", id).update(row);
58636
+ return updated;
58637
+ }
58638
+ async delete(id) {
58639
+ const knex = this.getKnex();
58640
+ const deleted = await knex("schedules").where("id", id).del();
58641
+ if (deleted > 0) {
58642
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
58643
+ return true;
58644
+ }
58645
+ return false;
58646
+ }
58647
+ // --- Queries ---
58648
+ async getByCreator(creatorId) {
58649
+ const knex = this.getKnex();
58650
+ const rows = await knex("schedules").where("creator_id", creatorId);
58651
+ return rows.map((r) => fromDbRow2(r));
58652
+ }
58653
+ async getActiveSchedules() {
58654
+ const knex = this.getKnex();
58655
+ const rows = await knex("schedules").where("status", "active");
58656
+ return rows.map((r) => fromDbRow2(r));
58657
+ }
58658
+ async getDueSchedules(now) {
58659
+ const ts = now ?? Date.now();
58660
+ const knex = this.getKnex();
58661
+ const bFalse = this.driver === "mssql" ? 0 : false;
58662
+ const bTrue = this.driver === "mssql" ? 1 : true;
58663
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
58664
+ this.where(function() {
58665
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
58666
+ }).orWhere(function() {
58667
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
58668
+ });
58669
+ });
58670
+ return rows.map((r) => fromDbRow2(r));
58671
+ }
58672
+ async findByWorkflow(creatorId, workflowName) {
58673
+ const knex = this.getKnex();
58674
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
58675
+ const pattern = `%${escaped}%`;
58676
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
58677
+ return rows.map((r) => fromDbRow2(r));
58678
+ }
58679
+ async getAll() {
58680
+ const knex = this.getKnex();
58681
+ const rows = await knex("schedules");
58682
+ return rows.map((r) => fromDbRow2(r));
58683
+ }
58684
+ async getStats() {
58685
+ const knex = this.getKnex();
58686
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
58687
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
58688
+ const result = await knex("schedules").select(
58689
+ knex.raw("COUNT(*) as total"),
58690
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
58691
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
58692
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
58693
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
58694
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
58695
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
58696
+ ).first();
58697
+ return {
58698
+ total: Number(result.total) || 0,
58699
+ active: Number(result.active) || 0,
58700
+ paused: Number(result.paused) || 0,
58701
+ completed: Number(result.completed) || 0,
58702
+ failed: Number(result.failed) || 0,
58703
+ recurring: Number(result.recurring) || 0,
58704
+ oneTime: Number(result.one_time) || 0
58705
+ };
58706
+ }
58707
+ async validateLimits(creatorId, isRecurring, limits) {
58708
+ const knex = this.getKnex();
58709
+ if (limits.maxGlobal) {
58710
+ const result = await knex("schedules").count("* as cnt").first();
58711
+ if (Number(result?.cnt) >= limits.maxGlobal) {
58712
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
58713
+ }
58714
+ }
58715
+ if (limits.maxPerUser) {
58716
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
58717
+ if (Number(result?.cnt) >= limits.maxPerUser) {
58718
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
58719
+ }
58720
+ }
58721
+ if (isRecurring && limits.maxRecurringPerUser) {
58722
+ const bTrue = this.driver === "mssql" ? 1 : true;
58723
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
58724
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
58725
+ throw new Error(
58726
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
58727
+ );
58728
+ }
58729
+ }
58730
+ }
58731
+ // --- HA Distributed Locking (via scheduler_locks table) ---
58732
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
58733
+ const knex = this.getKnex();
58734
+ const now = Date.now();
58735
+ const expiresAt = now + ttlSeconds * 1e3;
58736
+ const token = (0, import_uuid2.v4)();
58737
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
58738
+ node_id: nodeId,
58739
+ lock_token: token,
58740
+ acquired_at: now,
58741
+ expires_at: expiresAt
58742
+ });
58743
+ if (updated > 0) return token;
58744
+ try {
58745
+ await knex("scheduler_locks").insert({
58746
+ lock_id: lockId,
58747
+ node_id: nodeId,
58748
+ lock_token: token,
58749
+ acquired_at: now,
58750
+ expires_at: expiresAt
58751
+ });
58752
+ return token;
58753
+ } catch {
58754
+ return null;
58755
+ }
58756
+ }
58757
+ async releaseLock(lockId, lockToken) {
58758
+ const knex = this.getKnex();
58759
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
58760
+ }
58761
+ async renewLock(lockId, lockToken, ttlSeconds) {
58762
+ const knex = this.getKnex();
58763
+ const now = Date.now();
58764
+ const expiresAt = now + ttlSeconds * 1e3;
58765
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
58766
+ return updated > 0;
58767
+ }
58768
+ async flush() {
58769
+ }
58770
+ // --- Message Trigger CRUD ---
58771
+ async createTrigger(trigger) {
58772
+ const knex = this.getKnex();
58773
+ const newTrigger = {
58774
+ ...trigger,
58775
+ id: (0, import_uuid2.v4)(),
58776
+ createdAt: Date.now()
58777
+ };
58778
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
58779
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
58780
+ return newTrigger;
58781
+ }
58782
+ async getTrigger(id) {
58783
+ const knex = this.getKnex();
58784
+ const row = await knex("message_triggers").where("id", id).first();
58785
+ return row ? fromTriggerRow2(row) : void 0;
58786
+ }
58787
+ async updateTrigger(id, patch) {
58788
+ const knex = this.getKnex();
58789
+ const existing = await knex("message_triggers").where("id", id).first();
58790
+ if (!existing) return void 0;
58791
+ const current = fromTriggerRow2(existing);
58792
+ const updated = {
58793
+ ...current,
58794
+ ...patch,
58795
+ id: current.id,
58796
+ createdAt: current.createdAt
58797
+ };
58798
+ const row = toTriggerInsertRow(updated);
58799
+ delete row.id;
58800
+ await knex("message_triggers").where("id", id).update(row);
58801
+ return updated;
58802
+ }
58803
+ async deleteTrigger(id) {
58804
+ const knex = this.getKnex();
58805
+ const deleted = await knex("message_triggers").where("id", id).del();
58806
+ if (deleted > 0) {
58807
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
58808
+ return true;
58809
+ }
58810
+ return false;
58811
+ }
58812
+ async getTriggersByCreator(creatorId) {
58813
+ const knex = this.getKnex();
58814
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
58815
+ return rows.map((r) => fromTriggerRow2(r));
58816
+ }
58817
+ async getActiveTriggers() {
58818
+ const knex = this.getKnex();
58819
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
58820
+ return rows.map((r) => fromTriggerRow2(r));
58821
+ }
58822
+ };
58823
+ }
58824
+ });
58825
+
58826
+ // src/enterprise/loader.ts
58827
+ var loader_exports = {};
58828
+ __export(loader_exports, {
58829
+ loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
58830
+ loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
58831
+ });
58832
+ async function loadEnterprisePolicyEngine(config) {
58833
+ try {
58834
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
58835
+ const validator = new LicenseValidator2();
58836
+ const license = await validator.loadAndValidate();
58837
+ if (!license || !validator.hasFeature("policy")) {
58838
+ return new DefaultPolicyEngine();
58839
+ }
58840
+ if (validator.isInGracePeriod()) {
58841
+ console.warn(
58842
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
58843
+ );
58844
+ }
58845
+ const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
58846
+ const engine = new OpaPolicyEngine2(config);
58847
+ await engine.initialize(config);
58848
+ return engine;
58849
+ } catch (err) {
58850
+ const msg = err instanceof Error ? err.message : String(err);
58851
+ try {
58852
+ const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
58853
+ logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
58854
+ } catch {
58855
+ }
58856
+ return new DefaultPolicyEngine();
58857
+ }
58858
+ }
58859
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
58860
+ const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
58861
+ const validator = new LicenseValidator2();
58862
+ const license = await validator.loadAndValidate();
58863
+ if (!license || !validator.hasFeature("scheduler-sql")) {
58864
+ throw new Error(
58865
+ `The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
58866
+ );
58867
+ }
58868
+ if (validator.isInGracePeriod()) {
58869
+ console.warn(
58870
+ "[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
58871
+ );
58872
+ }
58873
+ const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
58874
+ return new KnexStoreBackend2(driver, storageConfig, haConfig);
58875
+ }
58876
+ var init_loader = __esm({
58877
+ "src/enterprise/loader.ts"() {
58878
+ "use strict";
58879
+ init_default_engine();
58880
+ }
58881
+ });
58882
+
57452
58883
  // src/event-bus/event-bus.ts
57453
58884
  var event_bus_exports = {};
57454
58885
  __export(event_bus_exports, {
@@ -58151,8 +59582,8 @@ var init_github_comments = __esm({
58151
59582
  * Update existing comment or create new one with collision detection
58152
59583
  */
58153
59584
  async updateOrCreateComment(owner, repo, prNumber, content, options = {}) {
58154
- return new Promise((resolve15, reject) => {
58155
- this._writeQueue = this._writeQueue.then(() => this._doUpdateOrCreate(owner, repo, prNumber, content, options)).then(resolve15, reject);
59585
+ return new Promise((resolve20, reject) => {
59586
+ this._writeQueue = this._writeQueue.then(() => this._doUpdateOrCreate(owner, repo, prNumber, content, options)).then(resolve20, reject);
58156
59587
  });
58157
59588
  }
58158
59589
  async _doUpdateOrCreate(owner, repo, prNumber, content, options = {}) {
@@ -58363,8 +59794,8 @@ ${content}
58363
59794
  * Sleep utility
58364
59795
  */
58365
59796
  sleep(ms) {
58366
- return new Promise((resolve15) => {
58367
- const t = setTimeout(resolve15, ms);
59797
+ return new Promise((resolve20) => {
59798
+ const t = setTimeout(resolve20, ms);
58368
59799
  if (typeof t.unref === "function") {
58369
59800
  try {
58370
59801
  t.unref();
@@ -58649,8 +60080,8 @@ ${end}`);
58649
60080
  async updateGroupedComment(ctx, comments, group, changedIds) {
58650
60081
  const existingLock = this.updateLocks.get(group);
58651
60082
  let resolveLock;
58652
- const ourLock = new Promise((resolve15) => {
58653
- resolveLock = resolve15;
60083
+ const ourLock = new Promise((resolve20) => {
60084
+ resolveLock = resolve20;
58654
60085
  });
58655
60086
  this.updateLocks.set(group, ourLock);
58656
60087
  try {
@@ -58981,7 +60412,7 @@ ${blocks}
58981
60412
  * Sleep utility for enforcing delays
58982
60413
  */
58983
60414
  sleep(ms) {
58984
- return new Promise((resolve15) => setTimeout(resolve15, ms));
60415
+ return new Promise((resolve20) => setTimeout(resolve20, ms));
58985
60416
  }
58986
60417
  };
58987
60418
  }
@@ -59378,7 +60809,16 @@ var init_slack_frontend = __esm({
59378
60809
  this.subs.push(
59379
60810
  bus.on("StateTransition", async (env) => {
59380
60811
  const ev = env && env.payload || env;
59381
- if (ev && (ev.to === "Completed" || ev.to === "Error")) {
60812
+ if (ev && ev.to === "Completed") {
60813
+ await this.finalizeReactions(ctx).catch(() => {
60814
+ });
60815
+ } else if (ev && ev.to === "Error") {
60816
+ if (!this.errorNotified) {
60817
+ await this.maybePostError(ctx, "Run failed", "Workflow finished with errors").catch(
60818
+ () => {
60819
+ }
60820
+ );
60821
+ }
59382
60822
  await this.finalizeReactions(ctx).catch(() => {
59383
60823
  });
59384
60824
  }
@@ -59388,7 +60828,7 @@ var init_slack_frontend = __esm({
59388
60828
  bus.on("Shutdown", async (env) => {
59389
60829
  const ev = env && env.payload || env;
59390
60830
  const message = ev?.error?.message || "Fatal error";
59391
- await this.maybePostError(ctx, "Run failed", message).catch(() => {
60831
+ await this.forcePostError(ctx, "Run failed", message).catch(() => {
59392
60832
  });
59393
60833
  })
59394
60834
  );
@@ -59514,6 +60954,16 @@ var init_slack_frontend = __esm({
59514
60954
  }
59515
60955
  async maybePostError(ctx, title, message, checkId) {
59516
60956
  if (this.errorNotified) return;
60957
+ return this.postErrorToSlack(ctx, title, message, checkId);
60958
+ }
60959
+ /**
60960
+ * Post error to Slack regardless of errorNotified flag.
60961
+ * Used for fatal/shutdown errors that must always reach the user.
60962
+ */
60963
+ async forcePostError(ctx, title, message, checkId) {
60964
+ return this.postErrorToSlack(ctx, title, message, checkId);
60965
+ }
60966
+ async postErrorToSlack(ctx, title, message, checkId) {
59517
60967
  const slack = this.getSlack(ctx);
59518
60968
  if (!slack) return;
59519
60969
  const payload = this.getInboundSlackPayload(ctx);
@@ -60843,11 +62293,11 @@ var require_request3 = __commonJS({
60843
62293
  "use strict";
60844
62294
  var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) {
60845
62295
  function adopt(value) {
60846
- return value instanceof P ? value : new P(function(resolve15) {
60847
- resolve15(value);
62296
+ return value instanceof P ? value : new P(function(resolve20) {
62297
+ resolve20(value);
60848
62298
  });
60849
62299
  }
60850
- return new (P || (P = Promise))(function(resolve15, reject) {
62300
+ return new (P || (P = Promise))(function(resolve20, reject) {
60851
62301
  function fulfilled(value) {
60852
62302
  try {
60853
62303
  step(generator.next(value));
@@ -60863,7 +62313,7 @@ var require_request3 = __commonJS({
60863
62313
  }
60864
62314
  }
60865
62315
  function step(result) {
60866
- result.done ? resolve15(result.value) : adopt(result.value).then(fulfilled, rejected);
62316
+ result.done ? resolve20(result.value) : adopt(result.value).then(fulfilled, rejected);
60867
62317
  }
60868
62318
  step((generator = generator.apply(thisArg, _arguments || [])).next());
60869
62319
  });
@@ -60887,9 +62337,9 @@ var require_request3 = __commonJS({
60887
62337
  HttpMethod2["PATCH"] = "PATCH";
60888
62338
  })(HttpMethod = exports2.HttpMethod || (exports2.HttpMethod = {}));
60889
62339
  var SvixRequest = class {
60890
- constructor(method, path29) {
62340
+ constructor(method, path33) {
60891
62341
  this.method = method;
60892
- this.path = path29;
62342
+ this.path = path33;
60893
62343
  this.queryParams = {};
60894
62344
  this.headerParams = {};
60895
62345
  }
@@ -60992,7 +62442,7 @@ var require_request3 = __commonJS({
60992
62442
  }
60993
62443
  function sendWithRetry(url, init, retryScheduleInMs, nextInterval = 50, triesLeft = 2, fetchImpl = fetch, retryCount = 1) {
60994
62444
  return __awaiter(this, void 0, void 0, function* () {
60995
- const sleep = (interval) => new Promise((resolve15) => setTimeout(resolve15, interval));
62445
+ const sleep = (interval) => new Promise((resolve20) => setTimeout(resolve20, interval));
60996
62446
  try {
60997
62447
  const response = yield fetchImpl(url, init);
60998
62448
  if (triesLeft <= 0 || response.status < 500) {
@@ -70066,7 +71516,7 @@ ${message}`;
70066
71516
  });
70067
71517
 
70068
71518
  // src/agent-protocol/task-store.ts
70069
- function safeJsonParse2(value) {
71519
+ function safeJsonParse3(value) {
70070
71520
  if (!value) return void 0;
70071
71521
  try {
70072
71522
  return JSON.parse(value);
@@ -70083,12 +71533,12 @@ function taskRowToAgentTask(row) {
70083
71533
  context_id: row.context_id,
70084
71534
  status: {
70085
71535
  state: row.state,
70086
- message: safeJsonParse2(row.status_message),
71536
+ message: safeJsonParse3(row.status_message),
70087
71537
  timestamp: row.updated_at
70088
71538
  },
70089
- artifacts: safeJsonParse2(row.artifacts) ?? [],
70090
- history: safeJsonParse2(row.history) ?? [],
70091
- metadata: safeJsonParse2(row.request_metadata),
71539
+ artifacts: safeJsonParse3(row.artifacts) ?? [],
71540
+ history: safeJsonParse3(row.history) ?? [],
71541
+ metadata: safeJsonParse3(row.request_metadata),
70092
71542
  workflow_id: row.workflow_id ?? void 0
70093
71543
  };
70094
71544
  }
@@ -70325,7 +71775,7 @@ var init_task_store = __esm({
70325
71775
  const db = this.getDb();
70326
71776
  const row = db.prepare("SELECT artifacts FROM agent_tasks WHERE id = ?").get(taskId);
70327
71777
  if (!row) throw new TaskNotFoundError(taskId);
70328
- const artifacts = safeJsonParse2(row.artifacts) ?? [];
71778
+ const artifacts = safeJsonParse3(row.artifacts) ?? [];
70329
71779
  artifacts.push(artifact);
70330
71780
  db.prepare("UPDATE agent_tasks SET artifacts = ?, updated_at = ? WHERE id = ?").run(
70331
71781
  JSON.stringify(artifacts),
@@ -70337,7 +71787,7 @@ var init_task_store = __esm({
70337
71787
  const db = this.getDb();
70338
71788
  const row = db.prepare("SELECT history FROM agent_tasks WHERE id = ?").get(taskId);
70339
71789
  if (!row) throw new TaskNotFoundError(taskId);
70340
- const history = safeJsonParse2(row.history) ?? [];
71790
+ const history = safeJsonParse3(row.history) ?? [];
70341
71791
  history.push(message);
70342
71792
  db.prepare("UPDATE agent_tasks SET history = ?, updated_at = ? WHERE id = ?").run(
70343
71793
  JSON.stringify(history),
@@ -70849,13 +72299,13 @@ __export(a2a_frontend_exports, {
70849
72299
  resultToArtifacts: () => resultToArtifacts
70850
72300
  });
70851
72301
  function readJsonBody(req) {
70852
- return new Promise((resolve15, reject) => {
72302
+ return new Promise((resolve20, reject) => {
70853
72303
  const chunks = [];
70854
72304
  req.on("data", (chunk) => chunks.push(chunk));
70855
72305
  req.on("end", () => {
70856
72306
  try {
70857
72307
  const body = Buffer.concat(chunks).toString("utf8");
70858
- resolve15(body ? JSON.parse(body) : {});
72308
+ resolve20(body ? JSON.parse(body) : {});
70859
72309
  } catch {
70860
72310
  reject(new ParseError("Malformed JSON body"));
70861
72311
  }
@@ -71098,12 +72548,12 @@ var init_a2a_frontend = __esm({
71098
72548
  }
71099
72549
  const port = this.config.port ?? 9e3;
71100
72550
  const host = this.config.host ?? "0.0.0.0";
71101
- await new Promise((resolve15) => {
72551
+ await new Promise((resolve20) => {
71102
72552
  this.server.listen(port, host, () => {
71103
72553
  const addr = this.server.address();
71104
72554
  this._boundPort = typeof addr === "object" && addr ? addr.port : port;
71105
72555
  logger.info(`A2A server listening on ${host}:${this._boundPort}`);
71106
- resolve15();
72556
+ resolve20();
71107
72557
  });
71108
72558
  });
71109
72559
  if (this.agentCard) {
@@ -71127,8 +72577,8 @@ var init_a2a_frontend = __esm({
71127
72577
  }
71128
72578
  this.streamManager.shutdown();
71129
72579
  if (this.server) {
71130
- await new Promise((resolve15, reject) => {
71131
- this.server.close((err) => err ? reject(err) : resolve15());
72580
+ await new Promise((resolve20, reject) => {
72581
+ this.server.close((err) => err ? reject(err) : resolve20());
71132
72582
  });
71133
72583
  this.server = null;
71134
72584
  }
@@ -71845,15 +73295,15 @@ function serializeRunState(state) {
71845
73295
  ])
71846
73296
  };
71847
73297
  }
71848
- var path28, fs24, StateMachineExecutionEngine;
73298
+ var path32, fs28, StateMachineExecutionEngine;
71849
73299
  var init_state_machine_execution_engine = __esm({
71850
73300
  "src/state-machine-execution-engine.ts"() {
71851
73301
  "use strict";
71852
73302
  init_runner();
71853
73303
  init_logger();
71854
73304
  init_sandbox_manager();
71855
- path28 = __toESM(require("path"));
71856
- fs24 = __toESM(require("fs"));
73305
+ path32 = __toESM(require("path"));
73306
+ fs28 = __toESM(require("fs"));
71857
73307
  StateMachineExecutionEngine = class _StateMachineExecutionEngine {
71858
73308
  workingDirectory;
71859
73309
  executionContext;
@@ -72085,8 +73535,8 @@ var init_state_machine_execution_engine = __esm({
72085
73535
  logger.debug(
72086
73536
  `[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
72087
73537
  );
72088
- const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
72089
- context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
73538
+ const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
73539
+ context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
72090
73540
  logger.debug(
72091
73541
  `[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
72092
73542
  );
@@ -72240,9 +73690,9 @@ var init_state_machine_execution_engine = __esm({
72240
73690
  }
72241
73691
  const checkId = String(ev?.checkId || "unknown");
72242
73692
  const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
72243
- const baseDir = process.env.VISOR_SNAPSHOT_DIR || path28.resolve(process.cwd(), ".visor", "snapshots");
72244
- fs24.mkdirSync(baseDir, { recursive: true });
72245
- const filePath = path28.join(baseDir, `${threadKey}-${checkId}.json`);
73693
+ const baseDir = process.env.VISOR_SNAPSHOT_DIR || path32.resolve(process.cwd(), ".visor", "snapshots");
73694
+ fs28.mkdirSync(baseDir, { recursive: true });
73695
+ const filePath = path32.join(baseDir, `${threadKey}-${checkId}.json`);
72246
73696
  await this.saveSnapshotToFile(filePath);
72247
73697
  logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
72248
73698
  try {
@@ -72383,7 +73833,7 @@ var init_state_machine_execution_engine = __esm({
72383
73833
  * Does not include secrets. Intended for debugging and future resume support.
72384
73834
  */
72385
73835
  async saveSnapshotToFile(filePath) {
72386
- const fs25 = await import("fs/promises");
73836
+ const fs29 = await import("fs/promises");
72387
73837
  const ctx = this._lastContext;
72388
73838
  const runner = this._lastRunner;
72389
73839
  if (!ctx || !runner) {
@@ -72403,14 +73853,14 @@ var init_state_machine_execution_engine = __esm({
72403
73853
  journal: entries,
72404
73854
  requestedChecks: ctx.requestedChecks || []
72405
73855
  };
72406
- await fs25.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
73856
+ await fs29.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
72407
73857
  }
72408
73858
  /**
72409
73859
  * Load a snapshot JSON from file and return it. Resume support can build on this.
72410
73860
  */
72411
73861
  async loadSnapshotFromFile(filePath) {
72412
- const fs25 = await import("fs/promises");
72413
- const raw = await fs25.readFile(filePath, "utf8");
73862
+ const fs29 = await import("fs/promises");
73863
+ const raw = await fs29.readFile(filePath, "utf8");
72414
73864
  return JSON.parse(raw);
72415
73865
  }
72416
73866
  /**