@probelabs/visor 0.1.176 → 0.1.177-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontends/slack-frontend.d.ts.map +1 -1
- package/dist/index.js +1964 -44
- package/dist/sdk/{a2a-frontend-W54ZZ32L.mjs → a2a-frontend-BPWLYLCG.mjs} +2 -2
- package/dist/sdk/{check-provider-registry-MJYNLB37.mjs → check-provider-registry-G64PWDCZ.mjs} +5 -5
- package/dist/sdk/{check-provider-registry-7HSDAKHQ.mjs → check-provider-registry-HW4QPPSA.mjs} +2 -2
- package/dist/sdk/{chunk-Y2DYDGGY.mjs → chunk-GVTWESYN.mjs} +3 -3
- package/dist/sdk/chunk-GVTWESYN.mjs.map +1 -0
- package/dist/sdk/{chunk-4E34HRCW.mjs → chunk-IYXOLUDJ.mjs} +74 -29
- package/dist/sdk/chunk-IYXOLUDJ.mjs.map +1 -0
- package/dist/sdk/{chunk-66PTDQAO.mjs → chunk-OHOBWVPP.mjs} +3 -3
- package/dist/sdk/{chunk-SEA2FWEC.mjs → chunk-OPI632LK.mjs} +2 -2
- package/dist/sdk/{chunk-OK4MLC3R.mjs → chunk-Y6PVSFCS.mjs} +49 -9
- package/dist/sdk/chunk-Y6PVSFCS.mjs.map +1 -0
- package/dist/sdk/{failure-condition-evaluator-RTT5SLVL.mjs → failure-condition-evaluator-HL33X7MH.mjs} +3 -3
- package/dist/sdk/{github-frontend-C4GG62PI.mjs → github-frontend-U2U42CKV.mjs} +3 -3
- package/dist/sdk/{host-6GGO2BQE.mjs → host-HFOJQIOF.mjs} +4 -4
- package/dist/sdk/knex-store-QCEW4I4R.mjs +527 -0
- package/dist/sdk/knex-store-QCEW4I4R.mjs.map +1 -0
- package/dist/sdk/loader-Q7K76ZIY.mjs +89 -0
- package/dist/sdk/loader-Q7K76ZIY.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +655 -0
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +1 -0
- package/dist/sdk/{routing-DXVYOXAS.mjs → routing-SFP4D6O3.mjs} +4 -4
- package/dist/sdk/{schedule-tool-R7NSHTPJ.mjs → schedule-tool-45NAALKS.mjs} +2 -2
- package/dist/sdk/{schedule-tool-LL7XDILD.mjs → schedule-tool-7O7SWSJ4.mjs} +5 -5
- package/dist/sdk/{schedule-tool-handler-5GTQ6SFI.mjs → schedule-tool-handler-6MPP5DXK.mjs} +2 -2
- package/dist/sdk/{schedule-tool-handler-O3L2R5OJ.mjs → schedule-tool-handler-KYDXJ2ZL.mjs} +5 -5
- package/dist/sdk/sdk.js +1729 -295
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -4
- package/dist/sdk/slack-frontend-XKSIOUXB.mjs +910 -0
- package/dist/sdk/slack-frontend-XKSIOUXB.mjs.map +1 -0
- package/dist/sdk/{trace-helpers-CECHXDLI.mjs → trace-helpers-L3EOYW5P.mjs} +2 -2
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-EY6VSMNG.mjs → workflow-check-provider-JIXZJNV5.mjs} +5 -5
- package/dist/sdk/{workflow-check-provider-AX7IRQEZ.mjs → workflow-check-provider-OA33MESM.mjs} +2 -2
- package/dist/utils/workspace-manager.d.ts +5 -1
- package/dist/utils/workspace-manager.d.ts.map +1 -1
- package/dist/utils/worktree-manager.d.ts +5 -1
- package/dist/utils/worktree-manager.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/output/traces/run-2026-03-10T15-37-04-236Z.ndjson +0 -138
- package/dist/output/traces/run-2026-03-10T15-37-44-748Z.ndjson +0 -2296
- package/dist/sdk/check-provider-registry-VE6LQPLY.mjs +0 -30
- package/dist/sdk/chunk-4E34HRCW.mjs.map +0 -1
- package/dist/sdk/chunk-OK4MLC3R.mjs.map +0 -1
- package/dist/sdk/chunk-R3FNZRE4.mjs +0 -45194
- package/dist/sdk/chunk-R3FNZRE4.mjs.map +0 -1
- package/dist/sdk/chunk-Y2DYDGGY.mjs.map +0 -1
- package/dist/sdk/schedule-tool-GKKVOQB7.mjs +0 -36
- package/dist/sdk/schedule-tool-handler-ZZGJ3UFR.mjs +0 -40
- package/dist/sdk/trace-helpers-CECHXDLI.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-AX7IRQEZ.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-EY6VSMNG.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-HZQGJFOU.mjs +0 -30
- package/dist/sdk/workflow-check-provider-HZQGJFOU.mjs.map +0 -1
- package/dist/traces/run-2026-03-10T15-37-04-236Z.ndjson +0 -138
- package/dist/traces/run-2026-03-10T15-37-44-748Z.ndjson +0 -2296
- /package/dist/sdk/{a2a-frontend-W54ZZ32L.mjs.map → a2a-frontend-BPWLYLCG.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-7HSDAKHQ.mjs.map → check-provider-registry-G64PWDCZ.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-MJYNLB37.mjs.map → check-provider-registry-HW4QPPSA.mjs.map} +0 -0
- /package/dist/sdk/{chunk-66PTDQAO.mjs.map → chunk-OHOBWVPP.mjs.map} +0 -0
- /package/dist/sdk/{chunk-SEA2FWEC.mjs.map → chunk-OPI632LK.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-VE6LQPLY.mjs.map → failure-condition-evaluator-HL33X7MH.mjs.map} +0 -0
- /package/dist/sdk/{github-frontend-C4GG62PI.mjs.map → github-frontend-U2U42CKV.mjs.map} +0 -0
- /package/dist/sdk/{host-6GGO2BQE.mjs.map → host-HFOJQIOF.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-RTT5SLVL.mjs.map → routing-SFP4D6O3.mjs.map} +0 -0
- /package/dist/sdk/{routing-DXVYOXAS.mjs.map → schedule-tool-45NAALKS.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-GKKVOQB7.mjs.map → schedule-tool-7O7SWSJ4.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-LL7XDILD.mjs.map → schedule-tool-handler-6MPP5DXK.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-R7NSHTPJ.mjs.map → schedule-tool-handler-KYDXJ2ZL.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-5GTQ6SFI.mjs.map → trace-helpers-L3EOYW5P.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-O3L2R5OJ.mjs.map → workflow-check-provider-JIXZJNV5.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-ZZGJ3UFR.mjs.map → workflow-check-provider-OA33MESM.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -704,7 +704,7 @@ var require_package = __commonJS({
|
|
|
704
704
|
"package.json"(exports2, module2) {
|
|
705
705
|
module2.exports = {
|
|
706
706
|
name: "@probelabs/visor",
|
|
707
|
-
version: "0.1.
|
|
707
|
+
version: "0.1.42",
|
|
708
708
|
main: "dist/index.js",
|
|
709
709
|
bin: {
|
|
710
710
|
visor: "./dist/index.js"
|
|
@@ -823,7 +823,7 @@ var require_package = __commonJS({
|
|
|
823
823
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
824
824
|
"@opentelemetry/sdk-trace-base": "^1.30.1",
|
|
825
825
|
"@opentelemetry/semantic-conventions": "^1.30.1",
|
|
826
|
-
"@probelabs/probe": "^0.6.0-
|
|
826
|
+
"@probelabs/probe": "^0.6.0-rc292",
|
|
827
827
|
"@types/commander": "^2.12.0",
|
|
828
828
|
"@types/uuid": "^10.0.0",
|
|
829
829
|
acorn: "^8.16.0",
|
|
@@ -1152,11 +1152,11 @@ function getTracer() {
|
|
|
1152
1152
|
}
|
|
1153
1153
|
async function withActiveSpan(name, attrs, fn) {
|
|
1154
1154
|
const tracer = getTracer();
|
|
1155
|
-
return await new Promise((
|
|
1155
|
+
return await new Promise((resolve19, reject) => {
|
|
1156
1156
|
const callback = async (span) => {
|
|
1157
1157
|
try {
|
|
1158
1158
|
const res = await fn(span);
|
|
1159
|
-
|
|
1159
|
+
resolve19(res);
|
|
1160
1160
|
} catch (err) {
|
|
1161
1161
|
try {
|
|
1162
1162
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -1281,19 +1281,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
1281
1281
|
try {
|
|
1282
1282
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
1283
1283
|
return null;
|
|
1284
|
-
const
|
|
1285
|
-
const
|
|
1284
|
+
const path33 = require("path");
|
|
1285
|
+
const fs29 = require("fs");
|
|
1286
1286
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
1287
1287
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
1288
|
-
const dir =
|
|
1289
|
-
if (!
|
|
1288
|
+
const dir = path33.dirname(__ndjsonPath);
|
|
1289
|
+
if (!fs29.existsSync(dir)) fs29.mkdirSync(dir, { recursive: true });
|
|
1290
1290
|
return __ndjsonPath;
|
|
1291
1291
|
}
|
|
1292
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
1293
|
-
if (!
|
|
1292
|
+
const outDir = process.env.VISOR_TRACE_DIR || path33.join(process.cwd(), "output", "traces");
|
|
1293
|
+
if (!fs29.existsSync(outDir)) fs29.mkdirSync(outDir, { recursive: true });
|
|
1294
1294
|
if (!__ndjsonPath) {
|
|
1295
1295
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1296
|
-
__ndjsonPath =
|
|
1296
|
+
__ndjsonPath = path33.join(outDir, `${ts}.ndjson`);
|
|
1297
1297
|
}
|
|
1298
1298
|
return __ndjsonPath;
|
|
1299
1299
|
} catch {
|
|
@@ -1302,11 +1302,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
1302
1302
|
}
|
|
1303
1303
|
function _appendRunMarker() {
|
|
1304
1304
|
try {
|
|
1305
|
-
const
|
|
1305
|
+
const fs29 = require("fs");
|
|
1306
1306
|
const p = __getOrCreateNdjsonPath();
|
|
1307
1307
|
if (!p) return;
|
|
1308
1308
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
1309
|
-
|
|
1309
|
+
fs29.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
1310
1310
|
} catch {
|
|
1311
1311
|
}
|
|
1312
1312
|
}
|
|
@@ -3393,7 +3393,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3393
3393
|
*/
|
|
3394
3394
|
evaluateExpression(condition, context2) {
|
|
3395
3395
|
try {
|
|
3396
|
-
const
|
|
3396
|
+
const normalize8 = (expr) => {
|
|
3397
3397
|
const trimmed = expr.trim();
|
|
3398
3398
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3399
3399
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3551,7 +3551,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3551
3551
|
try {
|
|
3552
3552
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3553
3553
|
} catch {
|
|
3554
|
-
const normalizedExpr =
|
|
3554
|
+
const normalizedExpr = normalize8(condition);
|
|
3555
3555
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3556
3556
|
}
|
|
3557
3557
|
const result = exec2(scope).run();
|
|
@@ -3934,9 +3934,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3934
3934
|
});
|
|
3935
3935
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3936
3936
|
if (obj == null) return void 0;
|
|
3937
|
-
const
|
|
3938
|
-
if (!
|
|
3939
|
-
const parts =
|
|
3937
|
+
const path33 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3938
|
+
if (!path33) return obj;
|
|
3939
|
+
const parts = path33.split(".");
|
|
3940
3940
|
let cur = obj;
|
|
3941
3941
|
for (const p of parts) {
|
|
3942
3942
|
if (cur == null) return void 0;
|
|
@@ -4055,9 +4055,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
4055
4055
|
}
|
|
4056
4056
|
}
|
|
4057
4057
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
4058
|
-
const getNested = (obj,
|
|
4059
|
-
if (!obj || !
|
|
4060
|
-
const parts =
|
|
4058
|
+
const getNested = (obj, path33) => {
|
|
4059
|
+
if (!obj || !path33) return void 0;
|
|
4060
|
+
const parts = path33.split(".");
|
|
4061
4061
|
let cur = obj;
|
|
4062
4062
|
for (const p of parts) {
|
|
4063
4063
|
if (cur == null) return void 0;
|
|
@@ -6609,8 +6609,8 @@ var init_dependency_gating = __esm({
|
|
|
6609
6609
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6610
6610
|
try {
|
|
6611
6611
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6612
|
-
const
|
|
6613
|
-
const
|
|
6612
|
+
const fs29 = await import("fs/promises");
|
|
6613
|
+
const path33 = await import("path");
|
|
6614
6614
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6615
6615
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6616
6616
|
let templateContent;
|
|
@@ -6618,24 +6618,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6618
6618
|
templateContent = String(checkConfig.template.content);
|
|
6619
6619
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6620
6620
|
const file = String(checkConfig.template.file);
|
|
6621
|
-
const resolved =
|
|
6622
|
-
templateContent = await
|
|
6621
|
+
const resolved = path33.resolve(process.cwd(), file);
|
|
6622
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
6623
6623
|
} else if (schema && schema !== "plain") {
|
|
6624
6624
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6625
6625
|
if (sanitized) {
|
|
6626
6626
|
const candidatePaths = [
|
|
6627
|
-
|
|
6627
|
+
path33.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6628
6628
|
// bundled: dist/output/
|
|
6629
|
-
|
|
6629
|
+
path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6630
6630
|
// source: output/
|
|
6631
|
-
|
|
6631
|
+
path33.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6632
6632
|
// fallback: cwd/output/
|
|
6633
|
-
|
|
6633
|
+
path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6634
6634
|
// fallback: cwd/dist/output/
|
|
6635
6635
|
];
|
|
6636
6636
|
for (const p of candidatePaths) {
|
|
6637
6637
|
try {
|
|
6638
|
-
templateContent = await
|
|
6638
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
6639
6639
|
if (templateContent) break;
|
|
6640
6640
|
} catch {
|
|
6641
6641
|
}
|
|
@@ -7040,7 +7040,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
7040
7040
|
}
|
|
7041
7041
|
try {
|
|
7042
7042
|
const originalProbePath = process.env.PROBE_PATH;
|
|
7043
|
-
const
|
|
7043
|
+
const fs29 = require("fs");
|
|
7044
7044
|
const possiblePaths = [
|
|
7045
7045
|
// Relative to current working directory (most common in production)
|
|
7046
7046
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -7051,7 +7051,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
7051
7051
|
];
|
|
7052
7052
|
let probeBinaryPath;
|
|
7053
7053
|
for (const candidatePath of possiblePaths) {
|
|
7054
|
-
if (
|
|
7054
|
+
if (fs29.existsSync(candidatePath)) {
|
|
7055
7055
|
probeBinaryPath = candidatePath;
|
|
7056
7056
|
break;
|
|
7057
7057
|
}
|
|
@@ -7158,7 +7158,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7158
7158
|
if (chromiumPath) {
|
|
7159
7159
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
7160
7160
|
}
|
|
7161
|
-
const result = await new Promise((
|
|
7161
|
+
const result = await new Promise((resolve19) => {
|
|
7162
7162
|
const proc = (0, import_child_process.spawn)(
|
|
7163
7163
|
"npx",
|
|
7164
7164
|
[
|
|
@@ -7188,13 +7188,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7188
7188
|
});
|
|
7189
7189
|
proc.on("close", (code) => {
|
|
7190
7190
|
if (code === 0) {
|
|
7191
|
-
|
|
7191
|
+
resolve19({ success: true });
|
|
7192
7192
|
} else {
|
|
7193
|
-
|
|
7193
|
+
resolve19({ success: false, error: stderr || `Exit code ${code}` });
|
|
7194
7194
|
}
|
|
7195
7195
|
});
|
|
7196
7196
|
proc.on("error", (err) => {
|
|
7197
|
-
|
|
7197
|
+
resolve19({ success: false, error: err.message });
|
|
7198
7198
|
});
|
|
7199
7199
|
});
|
|
7200
7200
|
if (!result.success) {
|
|
@@ -8392,8 +8392,8 @@ ${schemaString}`);
|
|
|
8392
8392
|
}
|
|
8393
8393
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8394
8394
|
try {
|
|
8395
|
-
const
|
|
8396
|
-
const
|
|
8395
|
+
const fs29 = require("fs");
|
|
8396
|
+
const path33 = require("path");
|
|
8397
8397
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8398
8398
|
const provider = this.config.provider || "auto";
|
|
8399
8399
|
const model = this.config.model || "default";
|
|
@@ -8507,20 +8507,20 @@ ${"=".repeat(60)}
|
|
|
8507
8507
|
`;
|
|
8508
8508
|
readableVersion += `${"=".repeat(60)}
|
|
8509
8509
|
`;
|
|
8510
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8511
|
-
if (!
|
|
8512
|
-
|
|
8510
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
8511
|
+
if (!fs29.existsSync(debugArtifactsDir)) {
|
|
8512
|
+
fs29.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8513
8513
|
}
|
|
8514
|
-
const debugFile =
|
|
8514
|
+
const debugFile = path33.join(
|
|
8515
8515
|
debugArtifactsDir,
|
|
8516
8516
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8517
8517
|
);
|
|
8518
|
-
|
|
8519
|
-
const readableFile =
|
|
8518
|
+
fs29.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8519
|
+
const readableFile = path33.join(
|
|
8520
8520
|
debugArtifactsDir,
|
|
8521
8521
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8522
8522
|
);
|
|
8523
|
-
|
|
8523
|
+
fs29.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8524
8524
|
log(`
|
|
8525
8525
|
\u{1F4BE} Full debug info saved to:`);
|
|
8526
8526
|
log(` JSON: ${debugFile}`);
|
|
@@ -8558,8 +8558,8 @@ ${"=".repeat(60)}
|
|
|
8558
8558
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8559
8559
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8560
8560
|
try {
|
|
8561
|
-
const
|
|
8562
|
-
const
|
|
8561
|
+
const fs29 = require("fs");
|
|
8562
|
+
const path33 = require("path");
|
|
8563
8563
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8564
8564
|
const agentAny2 = agent;
|
|
8565
8565
|
let fullHistory = [];
|
|
@@ -8570,8 +8570,8 @@ ${"=".repeat(60)}
|
|
|
8570
8570
|
} else if (agentAny2._messages) {
|
|
8571
8571
|
fullHistory = agentAny2._messages;
|
|
8572
8572
|
}
|
|
8573
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8574
|
-
const sessionBase =
|
|
8573
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
8574
|
+
const sessionBase = path33.join(
|
|
8575
8575
|
debugArtifactsDir,
|
|
8576
8576
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8577
8577
|
);
|
|
@@ -8583,7 +8583,7 @@ ${"=".repeat(60)}
|
|
|
8583
8583
|
schema: effectiveSchema,
|
|
8584
8584
|
totalMessages: fullHistory.length
|
|
8585
8585
|
};
|
|
8586
|
-
|
|
8586
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8587
8587
|
let readable = `=============================================================
|
|
8588
8588
|
`;
|
|
8589
8589
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8610,7 +8610,7 @@ ${"=".repeat(60)}
|
|
|
8610
8610
|
`;
|
|
8611
8611
|
readable += content + "\n";
|
|
8612
8612
|
});
|
|
8613
|
-
|
|
8613
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8614
8614
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8615
8615
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8616
8616
|
} catch (error) {
|
|
@@ -8619,11 +8619,11 @@ ${"=".repeat(60)}
|
|
|
8619
8619
|
}
|
|
8620
8620
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8621
8621
|
try {
|
|
8622
|
-
const
|
|
8623
|
-
const
|
|
8622
|
+
const fs29 = require("fs");
|
|
8623
|
+
const path33 = require("path");
|
|
8624
8624
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8625
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8626
|
-
const responseFile =
|
|
8625
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
8626
|
+
const responseFile = path33.join(
|
|
8627
8627
|
debugArtifactsDir,
|
|
8628
8628
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8629
8629
|
);
|
|
@@ -8656,7 +8656,7 @@ ${"=".repeat(60)}
|
|
|
8656
8656
|
`;
|
|
8657
8657
|
responseContent += `${"=".repeat(60)}
|
|
8658
8658
|
`;
|
|
8659
|
-
|
|
8659
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8660
8660
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8661
8661
|
} catch (error) {
|
|
8662
8662
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8672,9 +8672,9 @@ ${"=".repeat(60)}
|
|
|
8672
8672
|
await agentAny._telemetryConfig.shutdown();
|
|
8673
8673
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8674
8674
|
if (process.env.GITHUB_ACTIONS) {
|
|
8675
|
-
const
|
|
8676
|
-
if (
|
|
8677
|
-
const stats =
|
|
8675
|
+
const fs29 = require("fs");
|
|
8676
|
+
if (fs29.existsSync(agentAny._traceFilePath)) {
|
|
8677
|
+
const stats = fs29.statSync(agentAny._traceFilePath);
|
|
8678
8678
|
console.log(
|
|
8679
8679
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8680
8680
|
);
|
|
@@ -8887,9 +8887,9 @@ ${schemaString}`);
|
|
|
8887
8887
|
const model = this.config.model || "default";
|
|
8888
8888
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8889
8889
|
try {
|
|
8890
|
-
const
|
|
8891
|
-
const
|
|
8892
|
-
const
|
|
8890
|
+
const fs29 = require("fs");
|
|
8891
|
+
const path33 = require("path");
|
|
8892
|
+
const os3 = require("os");
|
|
8893
8893
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8894
8894
|
const debugData = {
|
|
8895
8895
|
timestamp,
|
|
@@ -8961,19 +8961,19 @@ ${"=".repeat(60)}
|
|
|
8961
8961
|
`;
|
|
8962
8962
|
readableVersion += `${"=".repeat(60)}
|
|
8963
8963
|
`;
|
|
8964
|
-
const tempDir =
|
|
8965
|
-
const promptFile =
|
|
8966
|
-
|
|
8964
|
+
const tempDir = os3.tmpdir();
|
|
8965
|
+
const promptFile = path33.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8966
|
+
fs29.writeFileSync(promptFile, prompt, "utf-8");
|
|
8967
8967
|
log(`
|
|
8968
8968
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8969
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8969
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
8970
8970
|
try {
|
|
8971
|
-
const base =
|
|
8971
|
+
const base = path33.join(
|
|
8972
8972
|
debugArtifactsDir,
|
|
8973
8973
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8974
8974
|
);
|
|
8975
|
-
|
|
8976
|
-
|
|
8975
|
+
fs29.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8976
|
+
fs29.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8977
8977
|
log(`
|
|
8978
8978
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8979
8979
|
} catch {
|
|
@@ -9023,8 +9023,8 @@ $ ${cliCommand}
|
|
|
9023
9023
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
9024
9024
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
9025
9025
|
try {
|
|
9026
|
-
const
|
|
9027
|
-
const
|
|
9026
|
+
const fs29 = require("fs");
|
|
9027
|
+
const path33 = require("path");
|
|
9028
9028
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
9029
9029
|
const agentAny = agent;
|
|
9030
9030
|
let fullHistory = [];
|
|
@@ -9035,8 +9035,8 @@ $ ${cliCommand}
|
|
|
9035
9035
|
} else if (agentAny._messages) {
|
|
9036
9036
|
fullHistory = agentAny._messages;
|
|
9037
9037
|
}
|
|
9038
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
9039
|
-
const sessionBase =
|
|
9038
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
9039
|
+
const sessionBase = path33.join(
|
|
9040
9040
|
debugArtifactsDir,
|
|
9041
9041
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
9042
9042
|
);
|
|
@@ -9048,7 +9048,7 @@ $ ${cliCommand}
|
|
|
9048
9048
|
schema: effectiveSchema,
|
|
9049
9049
|
totalMessages: fullHistory.length
|
|
9050
9050
|
};
|
|
9051
|
-
|
|
9051
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
9052
9052
|
let readable = `=============================================================
|
|
9053
9053
|
`;
|
|
9054
9054
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -9075,7 +9075,7 @@ ${"=".repeat(60)}
|
|
|
9075
9075
|
`;
|
|
9076
9076
|
readable += content + "\n";
|
|
9077
9077
|
});
|
|
9078
|
-
|
|
9078
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
9079
9079
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
9080
9080
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
9081
9081
|
} catch (error) {
|
|
@@ -9084,11 +9084,11 @@ ${"=".repeat(60)}
|
|
|
9084
9084
|
}
|
|
9085
9085
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
9086
9086
|
try {
|
|
9087
|
-
const
|
|
9088
|
-
const
|
|
9087
|
+
const fs29 = require("fs");
|
|
9088
|
+
const path33 = require("path");
|
|
9089
9089
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
9090
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
9091
|
-
const responseFile =
|
|
9090
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path33.join(process.cwd(), "debug-artifacts");
|
|
9091
|
+
const responseFile = path33.join(
|
|
9092
9092
|
debugArtifactsDir,
|
|
9093
9093
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
9094
9094
|
);
|
|
@@ -9121,7 +9121,7 @@ ${"=".repeat(60)}
|
|
|
9121
9121
|
`;
|
|
9122
9122
|
responseContent += `${"=".repeat(60)}
|
|
9123
9123
|
`;
|
|
9124
|
-
|
|
9124
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
9125
9125
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
9126
9126
|
} catch (error) {
|
|
9127
9127
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -9139,9 +9139,9 @@ ${"=".repeat(60)}
|
|
|
9139
9139
|
await telemetry.shutdown();
|
|
9140
9140
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
9141
9141
|
if (process.env.GITHUB_ACTIONS) {
|
|
9142
|
-
const
|
|
9143
|
-
if (
|
|
9144
|
-
const stats =
|
|
9142
|
+
const fs29 = require("fs");
|
|
9143
|
+
if (fs29.existsSync(traceFilePath)) {
|
|
9144
|
+
const stats = fs29.statSync(traceFilePath);
|
|
9145
9145
|
console.log(
|
|
9146
9146
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
9147
9147
|
);
|
|
@@ -9179,8 +9179,8 @@ ${"=".repeat(60)}
|
|
|
9179
9179
|
* Load schema content from schema files or inline definitions
|
|
9180
9180
|
*/
|
|
9181
9181
|
async loadSchemaContent(schema) {
|
|
9182
|
-
const
|
|
9183
|
-
const
|
|
9182
|
+
const fs29 = require("fs").promises;
|
|
9183
|
+
const path33 = require("path");
|
|
9184
9184
|
if (typeof schema === "object" && schema !== null) {
|
|
9185
9185
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
9186
9186
|
return JSON.stringify(schema);
|
|
@@ -9193,14 +9193,14 @@ ${"=".repeat(60)}
|
|
|
9193
9193
|
}
|
|
9194
9194
|
} catch {
|
|
9195
9195
|
}
|
|
9196
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
9196
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path33.isAbsolute(schema)) {
|
|
9197
9197
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
9198
9198
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
9199
9199
|
}
|
|
9200
9200
|
try {
|
|
9201
|
-
const schemaPath =
|
|
9201
|
+
const schemaPath = path33.resolve(process.cwd(), schema);
|
|
9202
9202
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
9203
|
-
const schemaContent = await
|
|
9203
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
9204
9204
|
return schemaContent.trim();
|
|
9205
9205
|
} catch (error) {
|
|
9206
9206
|
throw new Error(
|
|
@@ -9214,22 +9214,22 @@ ${"=".repeat(60)}
|
|
|
9214
9214
|
}
|
|
9215
9215
|
const candidatePaths = [
|
|
9216
9216
|
// GitHub Action bundle location
|
|
9217
|
-
|
|
9217
|
+
path33.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
9218
9218
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
9219
|
-
|
|
9219
|
+
path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
9220
9220
|
// Local dev (repo root)
|
|
9221
|
-
|
|
9221
|
+
path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
9222
9222
|
];
|
|
9223
9223
|
for (const schemaPath of candidatePaths) {
|
|
9224
9224
|
try {
|
|
9225
|
-
const schemaContent = await
|
|
9225
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
9226
9226
|
return schemaContent.trim();
|
|
9227
9227
|
} catch {
|
|
9228
9228
|
}
|
|
9229
9229
|
}
|
|
9230
|
-
const distPath =
|
|
9231
|
-
const distAltPath =
|
|
9232
|
-
const cwdPath =
|
|
9230
|
+
const distPath = path33.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
9231
|
+
const distAltPath = path33.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
9232
|
+
const cwdPath = path33.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
9233
9233
|
throw new Error(
|
|
9234
9234
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
9235
9235
|
);
|
|
@@ -9471,7 +9471,7 @@ ${"=".repeat(60)}
|
|
|
9471
9471
|
* Generate mock response for testing
|
|
9472
9472
|
*/
|
|
9473
9473
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9474
|
-
await new Promise((
|
|
9474
|
+
await new Promise((resolve19) => setTimeout(resolve19, 500));
|
|
9475
9475
|
const name = (_checkName || "").toLowerCase();
|
|
9476
9476
|
if (name.includes("extract-facts")) {
|
|
9477
9477
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9832,7 +9832,7 @@ var init_command_executor = __esm({
|
|
|
9832
9832
|
* Execute command with stdin input
|
|
9833
9833
|
*/
|
|
9834
9834
|
executeWithStdin(command, options) {
|
|
9835
|
-
return new Promise((
|
|
9835
|
+
return new Promise((resolve19, reject) => {
|
|
9836
9836
|
const childProcess = (0, import_child_process2.exec)(
|
|
9837
9837
|
command,
|
|
9838
9838
|
{
|
|
@@ -9844,7 +9844,7 @@ var init_command_executor = __esm({
|
|
|
9844
9844
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9845
9845
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9846
9846
|
} else {
|
|
9847
|
-
|
|
9847
|
+
resolve19({
|
|
9848
9848
|
stdout: stdout || "",
|
|
9849
9849
|
stderr: stderr || "",
|
|
9850
9850
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -18636,17 +18636,17 @@ var init_workflow_check_provider = __esm({
|
|
|
18636
18636
|
* so it can be executed by the state machine as a nested workflow.
|
|
18637
18637
|
*/
|
|
18638
18638
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
18639
|
-
const
|
|
18640
|
-
const
|
|
18639
|
+
const path33 = require("path");
|
|
18640
|
+
const fs29 = require("fs");
|
|
18641
18641
|
const yaml5 = require("js-yaml");
|
|
18642
|
-
const resolved =
|
|
18643
|
-
if (!
|
|
18642
|
+
const resolved = path33.isAbsolute(sourcePath) ? sourcePath : path33.resolve(baseDir, sourcePath);
|
|
18643
|
+
if (!fs29.existsSync(resolved)) {
|
|
18644
18644
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
18645
18645
|
}
|
|
18646
|
-
const rawContent =
|
|
18646
|
+
const rawContent = fs29.readFileSync(resolved, "utf8");
|
|
18647
18647
|
const rawData = yaml5.load(rawContent);
|
|
18648
18648
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
18649
|
-
const configDir =
|
|
18649
|
+
const configDir = path33.dirname(resolved);
|
|
18650
18650
|
for (const source of rawData.imports) {
|
|
18651
18651
|
const results = await this.registry.import(source, {
|
|
18652
18652
|
basePath: configDir,
|
|
@@ -18676,8 +18676,8 @@ ${errors}`);
|
|
|
18676
18676
|
if (!steps || Object.keys(steps).length === 0) {
|
|
18677
18677
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
18678
18678
|
}
|
|
18679
|
-
const id =
|
|
18680
|
-
const name = loaded.name || `Workflow from ${
|
|
18679
|
+
const id = path33.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
18680
|
+
const name = loaded.name || `Workflow from ${path33.basename(resolved)}`;
|
|
18681
18681
|
const workflowDef = {
|
|
18682
18682
|
id,
|
|
18683
18683
|
name,
|
|
@@ -19486,8 +19486,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
19486
19486
|
case "mssql": {
|
|
19487
19487
|
try {
|
|
19488
19488
|
const loaderPath = "../../enterprise/loader";
|
|
19489
|
-
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
19490
|
-
return await
|
|
19489
|
+
const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
|
|
19490
|
+
return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
|
|
19491
19491
|
} catch (err) {
|
|
19492
19492
|
const msg = err instanceof Error ? err.message : String(err);
|
|
19493
19493
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -22181,7 +22181,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22181
22181
|
* Returns the actual bound port number
|
|
22182
22182
|
*/
|
|
22183
22183
|
async start() {
|
|
22184
|
-
return new Promise((
|
|
22184
|
+
return new Promise((resolve19, reject) => {
|
|
22185
22185
|
try {
|
|
22186
22186
|
this.server = import_http.default.createServer((req, res) => {
|
|
22187
22187
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -22215,7 +22215,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22215
22215
|
);
|
|
22216
22216
|
}
|
|
22217
22217
|
this.startKeepalive();
|
|
22218
|
-
|
|
22218
|
+
resolve19(this.port);
|
|
22219
22219
|
});
|
|
22220
22220
|
} catch (error) {
|
|
22221
22221
|
reject(error);
|
|
@@ -22278,7 +22278,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22278
22278
|
logger.debug(
|
|
22279
22279
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
22280
22280
|
);
|
|
22281
|
-
await new Promise((
|
|
22281
|
+
await new Promise((resolve19) => setTimeout(resolve19, waitMs));
|
|
22282
22282
|
}
|
|
22283
22283
|
}
|
|
22284
22284
|
if (this.activeToolCalls > 0) {
|
|
@@ -22287,7 +22287,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22287
22287
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
22288
22288
|
);
|
|
22289
22289
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
22290
|
-
await new Promise((
|
|
22290
|
+
await new Promise((resolve19) => setTimeout(resolve19, 250));
|
|
22291
22291
|
}
|
|
22292
22292
|
if (this.activeToolCalls > 0) {
|
|
22293
22293
|
logger.warn(
|
|
@@ -22312,21 +22312,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22312
22312
|
}
|
|
22313
22313
|
this.connections.clear();
|
|
22314
22314
|
if (this.server) {
|
|
22315
|
-
await new Promise((
|
|
22315
|
+
await new Promise((resolve19, reject) => {
|
|
22316
22316
|
const timeout = setTimeout(() => {
|
|
22317
22317
|
if (this.debug) {
|
|
22318
22318
|
logger.debug(
|
|
22319
22319
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
22320
22320
|
);
|
|
22321
22321
|
}
|
|
22322
|
-
this.server?.close(() =>
|
|
22322
|
+
this.server?.close(() => resolve19());
|
|
22323
22323
|
}, 5e3);
|
|
22324
22324
|
this.server.close((error) => {
|
|
22325
22325
|
clearTimeout(timeout);
|
|
22326
22326
|
if (error) {
|
|
22327
22327
|
reject(error);
|
|
22328
22328
|
} else {
|
|
22329
|
-
|
|
22329
|
+
resolve19();
|
|
22330
22330
|
}
|
|
22331
22331
|
});
|
|
22332
22332
|
});
|
|
@@ -22783,7 +22783,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
22783
22783
|
logger.warn(
|
|
22784
22784
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
22785
22785
|
);
|
|
22786
|
-
await new Promise((
|
|
22786
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
22787
22787
|
attempt++;
|
|
22788
22788
|
}
|
|
22789
22789
|
}
|
|
@@ -23255,9 +23255,9 @@ var init_ai_check_provider = __esm({
|
|
|
23255
23255
|
} else {
|
|
23256
23256
|
resolvedPath = import_path8.default.resolve(process.cwd(), str);
|
|
23257
23257
|
}
|
|
23258
|
-
const
|
|
23258
|
+
const fs29 = require("fs").promises;
|
|
23259
23259
|
try {
|
|
23260
|
-
const stat2 = await
|
|
23260
|
+
const stat2 = await fs29.stat(resolvedPath);
|
|
23261
23261
|
return stat2.isFile();
|
|
23262
23262
|
} catch {
|
|
23263
23263
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -29408,14 +29408,14 @@ var require_util = __commonJS({
|
|
|
29408
29408
|
}
|
|
29409
29409
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
29410
29410
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
29411
|
-
let
|
|
29411
|
+
let path33 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
29412
29412
|
if (origin.endsWith("/")) {
|
|
29413
29413
|
origin = origin.substring(0, origin.length - 1);
|
|
29414
29414
|
}
|
|
29415
|
-
if (
|
|
29416
|
-
|
|
29415
|
+
if (path33 && !path33.startsWith("/")) {
|
|
29416
|
+
path33 = `/${path33}`;
|
|
29417
29417
|
}
|
|
29418
|
-
url = new URL(origin +
|
|
29418
|
+
url = new URL(origin + path33);
|
|
29419
29419
|
}
|
|
29420
29420
|
return url;
|
|
29421
29421
|
}
|
|
@@ -31029,20 +31029,20 @@ var require_parseParams = __commonJS({
|
|
|
31029
31029
|
var require_basename = __commonJS({
|
|
31030
31030
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
31031
31031
|
"use strict";
|
|
31032
|
-
module2.exports = function basename4(
|
|
31033
|
-
if (typeof
|
|
31032
|
+
module2.exports = function basename4(path33) {
|
|
31033
|
+
if (typeof path33 !== "string") {
|
|
31034
31034
|
return "";
|
|
31035
31035
|
}
|
|
31036
|
-
for (var i =
|
|
31037
|
-
switch (
|
|
31036
|
+
for (var i = path33.length - 1; i >= 0; --i) {
|
|
31037
|
+
switch (path33.charCodeAt(i)) {
|
|
31038
31038
|
case 47:
|
|
31039
31039
|
// '/'
|
|
31040
31040
|
case 92:
|
|
31041
|
-
|
|
31042
|
-
return
|
|
31041
|
+
path33 = path33.slice(i + 1);
|
|
31042
|
+
return path33 === ".." || path33 === "." ? "" : path33;
|
|
31043
31043
|
}
|
|
31044
31044
|
}
|
|
31045
|
-
return
|
|
31045
|
+
return path33 === ".." || path33 === "." ? "" : path33;
|
|
31046
31046
|
};
|
|
31047
31047
|
}
|
|
31048
31048
|
});
|
|
@@ -32046,11 +32046,11 @@ var require_util2 = __commonJS({
|
|
|
32046
32046
|
var assert = require("assert");
|
|
32047
32047
|
var { isUint8Array } = require("util/types");
|
|
32048
32048
|
var supportedHashes = [];
|
|
32049
|
-
var
|
|
32049
|
+
var crypto9;
|
|
32050
32050
|
try {
|
|
32051
|
-
|
|
32051
|
+
crypto9 = require("crypto");
|
|
32052
32052
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
32053
|
-
supportedHashes =
|
|
32053
|
+
supportedHashes = crypto9.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
32054
32054
|
} catch {
|
|
32055
32055
|
}
|
|
32056
32056
|
function responseURL(response) {
|
|
@@ -32327,7 +32327,7 @@ var require_util2 = __commonJS({
|
|
|
32327
32327
|
}
|
|
32328
32328
|
}
|
|
32329
32329
|
function bytesMatch(bytes, metadataList) {
|
|
32330
|
-
if (
|
|
32330
|
+
if (crypto9 === void 0) {
|
|
32331
32331
|
return true;
|
|
32332
32332
|
}
|
|
32333
32333
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -32342,7 +32342,7 @@ var require_util2 = __commonJS({
|
|
|
32342
32342
|
for (const item of metadata) {
|
|
32343
32343
|
const algorithm = item.algo;
|
|
32344
32344
|
const expectedValue = item.hash;
|
|
32345
|
-
let actualValue =
|
|
32345
|
+
let actualValue = crypto9.createHash(algorithm).update(bytes).digest("base64");
|
|
32346
32346
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
32347
32347
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
32348
32348
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -32435,8 +32435,8 @@ var require_util2 = __commonJS({
|
|
|
32435
32435
|
function createDeferredPromise() {
|
|
32436
32436
|
let res;
|
|
32437
32437
|
let rej;
|
|
32438
|
-
const promise = new Promise((
|
|
32439
|
-
res =
|
|
32438
|
+
const promise = new Promise((resolve19, reject) => {
|
|
32439
|
+
res = resolve19;
|
|
32440
32440
|
rej = reject;
|
|
32441
32441
|
});
|
|
32442
32442
|
return { promise, resolve: res, reject: rej };
|
|
@@ -33689,8 +33689,8 @@ var require_body = __commonJS({
|
|
|
33689
33689
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
33690
33690
|
var random;
|
|
33691
33691
|
try {
|
|
33692
|
-
const
|
|
33693
|
-
random = (max) =>
|
|
33692
|
+
const crypto9 = require("crypto");
|
|
33693
|
+
random = (max) => crypto9.randomInt(0, max);
|
|
33694
33694
|
} catch {
|
|
33695
33695
|
random = (max) => Math.floor(Math.random(max));
|
|
33696
33696
|
}
|
|
@@ -33941,8 +33941,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
33941
33941
|
});
|
|
33942
33942
|
}
|
|
33943
33943
|
});
|
|
33944
|
-
const busboyResolve = new Promise((
|
|
33945
|
-
busboy.on("finish",
|
|
33944
|
+
const busboyResolve = new Promise((resolve19, reject) => {
|
|
33945
|
+
busboy.on("finish", resolve19);
|
|
33946
33946
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
33947
33947
|
});
|
|
33948
33948
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -34073,7 +34073,7 @@ var require_request = __commonJS({
|
|
|
34073
34073
|
}
|
|
34074
34074
|
var Request2 = class _Request {
|
|
34075
34075
|
constructor(origin, {
|
|
34076
|
-
path:
|
|
34076
|
+
path: path33,
|
|
34077
34077
|
method,
|
|
34078
34078
|
body,
|
|
34079
34079
|
headers,
|
|
@@ -34087,11 +34087,11 @@ var require_request = __commonJS({
|
|
|
34087
34087
|
throwOnError,
|
|
34088
34088
|
expectContinue
|
|
34089
34089
|
}, handler) {
|
|
34090
|
-
if (typeof
|
|
34090
|
+
if (typeof path33 !== "string") {
|
|
34091
34091
|
throw new InvalidArgumentError("path must be a string");
|
|
34092
|
-
} else if (
|
|
34092
|
+
} else if (path33[0] !== "/" && !(path33.startsWith("http://") || path33.startsWith("https://")) && method !== "CONNECT") {
|
|
34093
34093
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
34094
|
-
} else if (invalidPathRegex.exec(
|
|
34094
|
+
} else if (invalidPathRegex.exec(path33) !== null) {
|
|
34095
34095
|
throw new InvalidArgumentError("invalid request path");
|
|
34096
34096
|
}
|
|
34097
34097
|
if (typeof method !== "string") {
|
|
@@ -34154,7 +34154,7 @@ var require_request = __commonJS({
|
|
|
34154
34154
|
this.completed = false;
|
|
34155
34155
|
this.aborted = false;
|
|
34156
34156
|
this.upgrade = upgrade || null;
|
|
34157
|
-
this.path = query ? util.buildURL(
|
|
34157
|
+
this.path = query ? util.buildURL(path33, query) : path33;
|
|
34158
34158
|
this.origin = origin;
|
|
34159
34159
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
34160
34160
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -34476,9 +34476,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
34476
34476
|
}
|
|
34477
34477
|
close(callback) {
|
|
34478
34478
|
if (callback === void 0) {
|
|
34479
|
-
return new Promise((
|
|
34479
|
+
return new Promise((resolve19, reject) => {
|
|
34480
34480
|
this.close((err, data) => {
|
|
34481
|
-
return err ? reject(err) :
|
|
34481
|
+
return err ? reject(err) : resolve19(data);
|
|
34482
34482
|
});
|
|
34483
34483
|
});
|
|
34484
34484
|
}
|
|
@@ -34516,12 +34516,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
34516
34516
|
err = null;
|
|
34517
34517
|
}
|
|
34518
34518
|
if (callback === void 0) {
|
|
34519
|
-
return new Promise((
|
|
34519
|
+
return new Promise((resolve19, reject) => {
|
|
34520
34520
|
this.destroy(err, (err2, data) => {
|
|
34521
34521
|
return err2 ? (
|
|
34522
34522
|
/* istanbul ignore next: should never error */
|
|
34523
34523
|
reject(err2)
|
|
34524
|
-
) :
|
|
34524
|
+
) : resolve19(data);
|
|
34525
34525
|
});
|
|
34526
34526
|
});
|
|
34527
34527
|
}
|
|
@@ -35162,9 +35162,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
35162
35162
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
35163
35163
|
}
|
|
35164
35164
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
35165
|
-
const
|
|
35165
|
+
const path33 = search ? `${pathname}${search}` : pathname;
|
|
35166
35166
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
35167
|
-
this.opts.path =
|
|
35167
|
+
this.opts.path = path33;
|
|
35168
35168
|
this.opts.origin = origin;
|
|
35169
35169
|
this.opts.maxRedirections = 0;
|
|
35170
35170
|
this.opts.query = null;
|
|
@@ -35583,16 +35583,16 @@ var require_client = __commonJS({
|
|
|
35583
35583
|
return this[kNeedDrain] < 2;
|
|
35584
35584
|
}
|
|
35585
35585
|
async [kClose]() {
|
|
35586
|
-
return new Promise((
|
|
35586
|
+
return new Promise((resolve19) => {
|
|
35587
35587
|
if (!this[kSize]) {
|
|
35588
|
-
|
|
35588
|
+
resolve19(null);
|
|
35589
35589
|
} else {
|
|
35590
|
-
this[kClosedResolve] =
|
|
35590
|
+
this[kClosedResolve] = resolve19;
|
|
35591
35591
|
}
|
|
35592
35592
|
});
|
|
35593
35593
|
}
|
|
35594
35594
|
async [kDestroy](err) {
|
|
35595
|
-
return new Promise((
|
|
35595
|
+
return new Promise((resolve19) => {
|
|
35596
35596
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
35597
35597
|
for (let i = 0; i < requests.length; i++) {
|
|
35598
35598
|
const request = requests[i];
|
|
@@ -35603,7 +35603,7 @@ var require_client = __commonJS({
|
|
|
35603
35603
|
this[kClosedResolve]();
|
|
35604
35604
|
this[kClosedResolve] = null;
|
|
35605
35605
|
}
|
|
35606
|
-
|
|
35606
|
+
resolve19();
|
|
35607
35607
|
};
|
|
35608
35608
|
if (this[kHTTP2Session] != null) {
|
|
35609
35609
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -36183,7 +36183,7 @@ var require_client = __commonJS({
|
|
|
36183
36183
|
});
|
|
36184
36184
|
}
|
|
36185
36185
|
try {
|
|
36186
|
-
const socket = await new Promise((
|
|
36186
|
+
const socket = await new Promise((resolve19, reject) => {
|
|
36187
36187
|
client[kConnector]({
|
|
36188
36188
|
host,
|
|
36189
36189
|
hostname,
|
|
@@ -36195,7 +36195,7 @@ var require_client = __commonJS({
|
|
|
36195
36195
|
if (err) {
|
|
36196
36196
|
reject(err);
|
|
36197
36197
|
} else {
|
|
36198
|
-
|
|
36198
|
+
resolve19(socket2);
|
|
36199
36199
|
}
|
|
36200
36200
|
});
|
|
36201
36201
|
});
|
|
@@ -36406,7 +36406,7 @@ var require_client = __commonJS({
|
|
|
36406
36406
|
writeH2(client, client[kHTTP2Session], request);
|
|
36407
36407
|
return;
|
|
36408
36408
|
}
|
|
36409
|
-
const { body, method, path:
|
|
36409
|
+
const { body, method, path: path33, host, upgrade, headers, blocking, reset } = request;
|
|
36410
36410
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
36411
36411
|
if (body && typeof body.read === "function") {
|
|
36412
36412
|
body.read(0);
|
|
@@ -36456,7 +36456,7 @@ var require_client = __commonJS({
|
|
|
36456
36456
|
if (blocking) {
|
|
36457
36457
|
socket[kBlocking] = true;
|
|
36458
36458
|
}
|
|
36459
|
-
let header = `${method} ${
|
|
36459
|
+
let header = `${method} ${path33} HTTP/1.1\r
|
|
36460
36460
|
`;
|
|
36461
36461
|
if (typeof host === "string") {
|
|
36462
36462
|
header += `host: ${host}\r
|
|
@@ -36519,7 +36519,7 @@ upgrade: ${upgrade}\r
|
|
|
36519
36519
|
return true;
|
|
36520
36520
|
}
|
|
36521
36521
|
function writeH2(client, session, request) {
|
|
36522
|
-
const { body, method, path:
|
|
36522
|
+
const { body, method, path: path33, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
36523
36523
|
let headers;
|
|
36524
36524
|
if (typeof reqHeaders === "string") headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
36525
36525
|
else headers = reqHeaders;
|
|
@@ -36562,7 +36562,7 @@ upgrade: ${upgrade}\r
|
|
|
36562
36562
|
});
|
|
36563
36563
|
return true;
|
|
36564
36564
|
}
|
|
36565
|
-
headers[HTTP2_HEADER_PATH] =
|
|
36565
|
+
headers[HTTP2_HEADER_PATH] = path33;
|
|
36566
36566
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
36567
36567
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
36568
36568
|
if (body && typeof body.read === "function") {
|
|
@@ -36819,12 +36819,12 @@ upgrade: ${upgrade}\r
|
|
|
36819
36819
|
cb();
|
|
36820
36820
|
}
|
|
36821
36821
|
}
|
|
36822
|
-
const waitForDrain = () => new Promise((
|
|
36822
|
+
const waitForDrain = () => new Promise((resolve19, reject) => {
|
|
36823
36823
|
assert(callback === null);
|
|
36824
36824
|
if (socket[kError]) {
|
|
36825
36825
|
reject(socket[kError]);
|
|
36826
36826
|
} else {
|
|
36827
|
-
callback =
|
|
36827
|
+
callback = resolve19;
|
|
36828
36828
|
}
|
|
36829
36829
|
});
|
|
36830
36830
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -37170,8 +37170,8 @@ var require_pool_base = __commonJS({
|
|
|
37170
37170
|
if (this[kQueue].isEmpty()) {
|
|
37171
37171
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
37172
37172
|
} else {
|
|
37173
|
-
return new Promise((
|
|
37174
|
-
this[kClosedResolve] =
|
|
37173
|
+
return new Promise((resolve19) => {
|
|
37174
|
+
this[kClosedResolve] = resolve19;
|
|
37175
37175
|
});
|
|
37176
37176
|
}
|
|
37177
37177
|
}
|
|
@@ -37749,7 +37749,7 @@ var require_readable = __commonJS({
|
|
|
37749
37749
|
if (this.closed) {
|
|
37750
37750
|
return Promise.resolve(null);
|
|
37751
37751
|
}
|
|
37752
|
-
return new Promise((
|
|
37752
|
+
return new Promise((resolve19, reject) => {
|
|
37753
37753
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
37754
37754
|
this.destroy();
|
|
37755
37755
|
}) : noop;
|
|
@@ -37758,7 +37758,7 @@ var require_readable = __commonJS({
|
|
|
37758
37758
|
if (signal && signal.aborted) {
|
|
37759
37759
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
37760
37760
|
} else {
|
|
37761
|
-
|
|
37761
|
+
resolve19(null);
|
|
37762
37762
|
}
|
|
37763
37763
|
}).on("error", noop).on("data", function(chunk) {
|
|
37764
37764
|
limit -= chunk.length;
|
|
@@ -37780,11 +37780,11 @@ var require_readable = __commonJS({
|
|
|
37780
37780
|
throw new TypeError("unusable");
|
|
37781
37781
|
}
|
|
37782
37782
|
assert(!stream[kConsume]);
|
|
37783
|
-
return new Promise((
|
|
37783
|
+
return new Promise((resolve19, reject) => {
|
|
37784
37784
|
stream[kConsume] = {
|
|
37785
37785
|
type,
|
|
37786
37786
|
stream,
|
|
37787
|
-
resolve:
|
|
37787
|
+
resolve: resolve19,
|
|
37788
37788
|
reject,
|
|
37789
37789
|
length: 0,
|
|
37790
37790
|
body: []
|
|
@@ -37819,12 +37819,12 @@ var require_readable = __commonJS({
|
|
|
37819
37819
|
}
|
|
37820
37820
|
}
|
|
37821
37821
|
function consumeEnd(consume2) {
|
|
37822
|
-
const { type, body, resolve:
|
|
37822
|
+
const { type, body, resolve: resolve19, stream, length } = consume2;
|
|
37823
37823
|
try {
|
|
37824
37824
|
if (type === "text") {
|
|
37825
|
-
|
|
37825
|
+
resolve19(toUSVString(Buffer.concat(body)));
|
|
37826
37826
|
} else if (type === "json") {
|
|
37827
|
-
|
|
37827
|
+
resolve19(JSON.parse(Buffer.concat(body)));
|
|
37828
37828
|
} else if (type === "arrayBuffer") {
|
|
37829
37829
|
const dst = new Uint8Array(length);
|
|
37830
37830
|
let pos = 0;
|
|
@@ -37832,12 +37832,12 @@ var require_readable = __commonJS({
|
|
|
37832
37832
|
dst.set(buf, pos);
|
|
37833
37833
|
pos += buf.byteLength;
|
|
37834
37834
|
}
|
|
37835
|
-
|
|
37835
|
+
resolve19(dst.buffer);
|
|
37836
37836
|
} else if (type === "blob") {
|
|
37837
37837
|
if (!Blob2) {
|
|
37838
37838
|
Blob2 = require("buffer").Blob;
|
|
37839
37839
|
}
|
|
37840
|
-
|
|
37840
|
+
resolve19(new Blob2(body, { type: stream[kContentType] }));
|
|
37841
37841
|
}
|
|
37842
37842
|
consumeFinish(consume2);
|
|
37843
37843
|
} catch (err) {
|
|
@@ -38094,9 +38094,9 @@ var require_api_request = __commonJS({
|
|
|
38094
38094
|
};
|
|
38095
38095
|
function request(opts, callback) {
|
|
38096
38096
|
if (callback === void 0) {
|
|
38097
|
-
return new Promise((
|
|
38097
|
+
return new Promise((resolve19, reject) => {
|
|
38098
38098
|
request.call(this, opts, (err, data) => {
|
|
38099
|
-
return err ? reject(err) :
|
|
38099
|
+
return err ? reject(err) : resolve19(data);
|
|
38100
38100
|
});
|
|
38101
38101
|
});
|
|
38102
38102
|
}
|
|
@@ -38269,9 +38269,9 @@ var require_api_stream = __commonJS({
|
|
|
38269
38269
|
};
|
|
38270
38270
|
function stream(opts, factory, callback) {
|
|
38271
38271
|
if (callback === void 0) {
|
|
38272
|
-
return new Promise((
|
|
38272
|
+
return new Promise((resolve19, reject) => {
|
|
38273
38273
|
stream.call(this, opts, factory, (err, data) => {
|
|
38274
|
-
return err ? reject(err) :
|
|
38274
|
+
return err ? reject(err) : resolve19(data);
|
|
38275
38275
|
});
|
|
38276
38276
|
});
|
|
38277
38277
|
}
|
|
@@ -38552,9 +38552,9 @@ var require_api_upgrade = __commonJS({
|
|
|
38552
38552
|
};
|
|
38553
38553
|
function upgrade(opts, callback) {
|
|
38554
38554
|
if (callback === void 0) {
|
|
38555
|
-
return new Promise((
|
|
38555
|
+
return new Promise((resolve19, reject) => {
|
|
38556
38556
|
upgrade.call(this, opts, (err, data) => {
|
|
38557
|
-
return err ? reject(err) :
|
|
38557
|
+
return err ? reject(err) : resolve19(data);
|
|
38558
38558
|
});
|
|
38559
38559
|
});
|
|
38560
38560
|
}
|
|
@@ -38643,9 +38643,9 @@ var require_api_connect = __commonJS({
|
|
|
38643
38643
|
};
|
|
38644
38644
|
function connect(opts, callback) {
|
|
38645
38645
|
if (callback === void 0) {
|
|
38646
|
-
return new Promise((
|
|
38646
|
+
return new Promise((resolve19, reject) => {
|
|
38647
38647
|
connect.call(this, opts, (err, data) => {
|
|
38648
|
-
return err ? reject(err) :
|
|
38648
|
+
return err ? reject(err) : resolve19(data);
|
|
38649
38649
|
});
|
|
38650
38650
|
});
|
|
38651
38651
|
}
|
|
@@ -38805,20 +38805,20 @@ var require_mock_utils = __commonJS({
|
|
|
38805
38805
|
}
|
|
38806
38806
|
return true;
|
|
38807
38807
|
}
|
|
38808
|
-
function safeUrl(
|
|
38809
|
-
if (typeof
|
|
38810
|
-
return
|
|
38808
|
+
function safeUrl(path33) {
|
|
38809
|
+
if (typeof path33 !== "string") {
|
|
38810
|
+
return path33;
|
|
38811
38811
|
}
|
|
38812
|
-
const pathSegments =
|
|
38812
|
+
const pathSegments = path33.split("?");
|
|
38813
38813
|
if (pathSegments.length !== 2) {
|
|
38814
|
-
return
|
|
38814
|
+
return path33;
|
|
38815
38815
|
}
|
|
38816
38816
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
38817
38817
|
qp.sort();
|
|
38818
38818
|
return [...pathSegments, qp.toString()].join("?");
|
|
38819
38819
|
}
|
|
38820
|
-
function matchKey(mockDispatch2, { path:
|
|
38821
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
38820
|
+
function matchKey(mockDispatch2, { path: path33, method, body, headers }) {
|
|
38821
|
+
const pathMatch = matchValue(mockDispatch2.path, path33);
|
|
38822
38822
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
38823
38823
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
38824
38824
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -38836,7 +38836,7 @@ var require_mock_utils = __commonJS({
|
|
|
38836
38836
|
function getMockDispatch(mockDispatches, key) {
|
|
38837
38837
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
38838
38838
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
38839
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
38839
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path33 }) => matchValue(safeUrl(path33), resolvedPath));
|
|
38840
38840
|
if (matchedMockDispatches.length === 0) {
|
|
38841
38841
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
38842
38842
|
}
|
|
@@ -38873,9 +38873,9 @@ var require_mock_utils = __commonJS({
|
|
|
38873
38873
|
}
|
|
38874
38874
|
}
|
|
38875
38875
|
function buildKey(opts) {
|
|
38876
|
-
const { path:
|
|
38876
|
+
const { path: path33, method, body, headers, query } = opts;
|
|
38877
38877
|
return {
|
|
38878
|
-
path:
|
|
38878
|
+
path: path33,
|
|
38879
38879
|
method,
|
|
38880
38880
|
body,
|
|
38881
38881
|
headers,
|
|
@@ -39324,10 +39324,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
39324
39324
|
}
|
|
39325
39325
|
format(pendingInterceptors) {
|
|
39326
39326
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
39327
|
-
({ method, path:
|
|
39327
|
+
({ method, path: path33, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
39328
39328
|
Method: method,
|
|
39329
39329
|
Origin: origin,
|
|
39330
|
-
Path:
|
|
39330
|
+
Path: path33,
|
|
39331
39331
|
"Status code": statusCode,
|
|
39332
39332
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
39333
39333
|
Invocations: timesInvoked,
|
|
@@ -42268,7 +42268,7 @@ var require_fetch = __commonJS({
|
|
|
42268
42268
|
async function dispatch({ body }) {
|
|
42269
42269
|
const url = requestCurrentURL(request);
|
|
42270
42270
|
const agent = fetchParams.controller.dispatcher;
|
|
42271
|
-
return new Promise((
|
|
42271
|
+
return new Promise((resolve19, reject) => agent.dispatch(
|
|
42272
42272
|
{
|
|
42273
42273
|
path: url.pathname + url.search,
|
|
42274
42274
|
origin: url.origin,
|
|
@@ -42344,7 +42344,7 @@ var require_fetch = __commonJS({
|
|
|
42344
42344
|
}
|
|
42345
42345
|
}
|
|
42346
42346
|
}
|
|
42347
|
-
|
|
42347
|
+
resolve19({
|
|
42348
42348
|
status,
|
|
42349
42349
|
statusText,
|
|
42350
42350
|
headersList: headers[kHeadersList],
|
|
@@ -42387,7 +42387,7 @@ var require_fetch = __commonJS({
|
|
|
42387
42387
|
const val = headersList[n + 1].toString("latin1");
|
|
42388
42388
|
headers[kHeadersList].append(key, val);
|
|
42389
42389
|
}
|
|
42390
|
-
|
|
42390
|
+
resolve19({
|
|
42391
42391
|
status,
|
|
42392
42392
|
statusText: STATUS_CODES[status],
|
|
42393
42393
|
headersList: headers[kHeadersList],
|
|
@@ -43948,8 +43948,8 @@ var require_util6 = __commonJS({
|
|
|
43948
43948
|
}
|
|
43949
43949
|
}
|
|
43950
43950
|
}
|
|
43951
|
-
function validateCookiePath(
|
|
43952
|
-
for (const char of
|
|
43951
|
+
function validateCookiePath(path33) {
|
|
43952
|
+
for (const char of path33) {
|
|
43953
43953
|
const code = char.charCodeAt(0);
|
|
43954
43954
|
if (code < 33 || char === ";") {
|
|
43955
43955
|
throw new Error("Invalid cookie path");
|
|
@@ -44746,9 +44746,9 @@ var require_connection = __commonJS({
|
|
|
44746
44746
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
44747
44747
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
44748
44748
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
44749
|
-
var
|
|
44749
|
+
var crypto9;
|
|
44750
44750
|
try {
|
|
44751
|
-
|
|
44751
|
+
crypto9 = require("crypto");
|
|
44752
44752
|
} catch {
|
|
44753
44753
|
}
|
|
44754
44754
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -44767,7 +44767,7 @@ var require_connection = __commonJS({
|
|
|
44767
44767
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
44768
44768
|
request.headersList = headersList;
|
|
44769
44769
|
}
|
|
44770
|
-
const keyValue =
|
|
44770
|
+
const keyValue = crypto9.randomBytes(16).toString("base64");
|
|
44771
44771
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
44772
44772
|
request.headersList.append("sec-websocket-version", "13");
|
|
44773
44773
|
for (const protocol of protocols) {
|
|
@@ -44796,7 +44796,7 @@ var require_connection = __commonJS({
|
|
|
44796
44796
|
return;
|
|
44797
44797
|
}
|
|
44798
44798
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
44799
|
-
const digest =
|
|
44799
|
+
const digest = crypto9.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
44800
44800
|
if (secWSAccept !== digest) {
|
|
44801
44801
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
44802
44802
|
return;
|
|
@@ -44876,9 +44876,9 @@ var require_frame = __commonJS({
|
|
|
44876
44876
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
44877
44877
|
"use strict";
|
|
44878
44878
|
var { maxUnsigned16Bit } = require_constants5();
|
|
44879
|
-
var
|
|
44879
|
+
var crypto9;
|
|
44880
44880
|
try {
|
|
44881
|
-
|
|
44881
|
+
crypto9 = require("crypto");
|
|
44882
44882
|
} catch {
|
|
44883
44883
|
}
|
|
44884
44884
|
var WebsocketFrameSend = class {
|
|
@@ -44887,7 +44887,7 @@ var require_frame = __commonJS({
|
|
|
44887
44887
|
*/
|
|
44888
44888
|
constructor(data) {
|
|
44889
44889
|
this.frameData = data;
|
|
44890
|
-
this.maskKey =
|
|
44890
|
+
this.maskKey = crypto9.randomBytes(4);
|
|
44891
44891
|
}
|
|
44892
44892
|
createFrame(opcode) {
|
|
44893
44893
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -45629,11 +45629,11 @@ var require_undici = __commonJS({
|
|
|
45629
45629
|
if (typeof opts.path !== "string") {
|
|
45630
45630
|
throw new InvalidArgumentError("invalid opts.path");
|
|
45631
45631
|
}
|
|
45632
|
-
let
|
|
45632
|
+
let path33 = opts.path;
|
|
45633
45633
|
if (!opts.path.startsWith("/")) {
|
|
45634
|
-
|
|
45634
|
+
path33 = `/${path33}`;
|
|
45635
45635
|
}
|
|
45636
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
45636
|
+
url = new URL(util.parseOrigin(url).origin + path33);
|
|
45637
45637
|
} else {
|
|
45638
45638
|
if (!opts) {
|
|
45639
45639
|
opts = typeof url === "object" ? url : {};
|
|
@@ -46202,7 +46202,7 @@ var init_mcp_check_provider = __esm({
|
|
|
46202
46202
|
logger.warn(
|
|
46203
46203
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
46204
46204
|
);
|
|
46205
|
-
await new Promise((
|
|
46205
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
46206
46206
|
attempt += 1;
|
|
46207
46207
|
} finally {
|
|
46208
46208
|
try {
|
|
@@ -46495,7 +46495,7 @@ async function acquirePromptLock() {
|
|
|
46495
46495
|
);
|
|
46496
46496
|
}, 1e4);
|
|
46497
46497
|
try {
|
|
46498
|
-
await new Promise((
|
|
46498
|
+
await new Promise((resolve19) => waiters.push(resolve19));
|
|
46499
46499
|
} finally {
|
|
46500
46500
|
clearInterval(reminder);
|
|
46501
46501
|
const waitedMs = Date.now() - queuedAt;
|
|
@@ -46514,7 +46514,7 @@ function releasePromptLock() {
|
|
|
46514
46514
|
}
|
|
46515
46515
|
async function interactivePrompt(options) {
|
|
46516
46516
|
await acquirePromptLock();
|
|
46517
|
-
return new Promise((
|
|
46517
|
+
return new Promise((resolve19, reject) => {
|
|
46518
46518
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
46519
46519
|
try {
|
|
46520
46520
|
if (dbg) {
|
|
@@ -46601,12 +46601,12 @@ async function interactivePrompt(options) {
|
|
|
46601
46601
|
};
|
|
46602
46602
|
const finish = (value) => {
|
|
46603
46603
|
cleanup();
|
|
46604
|
-
|
|
46604
|
+
resolve19(value);
|
|
46605
46605
|
};
|
|
46606
46606
|
if (options.timeout && options.timeout > 0) {
|
|
46607
46607
|
timeoutId = setTimeout(() => {
|
|
46608
46608
|
cleanup();
|
|
46609
|
-
if (defaultValue !== void 0) return
|
|
46609
|
+
if (defaultValue !== void 0) return resolve19(defaultValue);
|
|
46610
46610
|
return reject(new Error("Input timeout"));
|
|
46611
46611
|
}, options.timeout);
|
|
46612
46612
|
}
|
|
@@ -46738,7 +46738,7 @@ async function interactivePrompt(options) {
|
|
|
46738
46738
|
});
|
|
46739
46739
|
}
|
|
46740
46740
|
async function simplePrompt(prompt) {
|
|
46741
|
-
return new Promise((
|
|
46741
|
+
return new Promise((resolve19) => {
|
|
46742
46742
|
const rl = readline.createInterface({
|
|
46743
46743
|
input: process.stdin,
|
|
46744
46744
|
output: process.stdout
|
|
@@ -46754,7 +46754,7 @@ async function simplePrompt(prompt) {
|
|
|
46754
46754
|
rl.question(`${prompt}
|
|
46755
46755
|
> `, (answer) => {
|
|
46756
46756
|
rl.close();
|
|
46757
|
-
|
|
46757
|
+
resolve19(answer.trim());
|
|
46758
46758
|
});
|
|
46759
46759
|
});
|
|
46760
46760
|
}
|
|
@@ -46922,7 +46922,7 @@ function isStdinAvailable() {
|
|
|
46922
46922
|
return !process.stdin.isTTY;
|
|
46923
46923
|
}
|
|
46924
46924
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
46925
|
-
return new Promise((
|
|
46925
|
+
return new Promise((resolve19, reject) => {
|
|
46926
46926
|
let data = "";
|
|
46927
46927
|
let timeoutId;
|
|
46928
46928
|
if (timeout) {
|
|
@@ -46949,7 +46949,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
46949
46949
|
};
|
|
46950
46950
|
const onEnd = () => {
|
|
46951
46951
|
cleanup();
|
|
46952
|
-
|
|
46952
|
+
resolve19(data.trim());
|
|
46953
46953
|
};
|
|
46954
46954
|
const onError = (err) => {
|
|
46955
46955
|
cleanup();
|
|
@@ -48791,11 +48791,26 @@ var init_worktree_manager = __esm({
|
|
|
48791
48791
|
await this.deleteLocalBranches(worktreePath);
|
|
48792
48792
|
}
|
|
48793
48793
|
/**
|
|
48794
|
-
* Delete
|
|
48794
|
+
* Delete local branches in a worktree that are safe to remove.
|
|
48795
48795
|
* Worktrees are always used in detached HEAD state, so any local branches
|
|
48796
48796
|
* were unintentionally created and should be cleaned up.
|
|
48797
|
+
* IMPORTANT: Git worktrees share the branch namespace with the main repo
|
|
48798
|
+
* and all other worktrees. We must NOT delete branches that are checked out
|
|
48799
|
+
* in the main working tree or any other worktree — doing so would destroy
|
|
48800
|
+
* the user's work.
|
|
48797
48801
|
*/
|
|
48798
48802
|
async deleteLocalBranches(worktreePath) {
|
|
48803
|
+
const worktreeListCmd = `git -C ${this.escapeShellArg(worktreePath)} worktree list --porcelain`;
|
|
48804
|
+
const worktreeListResult = await this.executeGitCommand(worktreeListCmd, { timeout: 1e4 });
|
|
48805
|
+
const protectedBranches = /* @__PURE__ */ new Set();
|
|
48806
|
+
if (worktreeListResult.exitCode === 0) {
|
|
48807
|
+
for (const line of worktreeListResult.stdout.split("\n")) {
|
|
48808
|
+
const match = line.match(/^branch refs\/heads\/(.+)$/);
|
|
48809
|
+
if (match) {
|
|
48810
|
+
protectedBranches.add(match[1]);
|
|
48811
|
+
}
|
|
48812
|
+
}
|
|
48813
|
+
}
|
|
48799
48814
|
const listCmd = `git -C ${this.escapeShellArg(worktreePath)} branch --list --format='%(refname:short)'`;
|
|
48800
48815
|
const listResult = await this.executeGitCommand(listCmd, { timeout: 1e4 });
|
|
48801
48816
|
if (listResult.exitCode !== 0 || !listResult.stdout.trim()) {
|
|
@@ -48803,6 +48818,10 @@ var init_worktree_manager = __esm({
|
|
|
48803
48818
|
}
|
|
48804
48819
|
const branches = listResult.stdout.trim().split("\n").map((b) => b.trim()).filter((b) => b.length > 0);
|
|
48805
48820
|
for (const branch of branches) {
|
|
48821
|
+
if (protectedBranches.has(branch)) {
|
|
48822
|
+
logger.debug(`Skipping branch '${branch}' \u2014 checked out in another worktree`);
|
|
48823
|
+
continue;
|
|
48824
|
+
}
|
|
48806
48825
|
const deleteCmd = `git -C ${this.escapeShellArg(worktreePath)} branch -D ${this.escapeShellArg(branch)}`;
|
|
48807
48826
|
const deleteResult = await this.executeGitCommand(deleteCmd, { timeout: 1e4 });
|
|
48808
48827
|
if (deleteResult.exitCode === 0) {
|
|
@@ -50307,7 +50326,7 @@ async function executeCheckWithForEachItems(checkId, forEachParent, forEachItems
|
|
|
50307
50326
|
workflowInputs,
|
|
50308
50327
|
ai: {
|
|
50309
50328
|
...checkConfig.ai || {},
|
|
50310
|
-
timeout: checkConfig.ai?.timeout || 18e5,
|
|
50329
|
+
timeout: checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
50311
50330
|
debug: !!context2.debug
|
|
50312
50331
|
}
|
|
50313
50332
|
};
|
|
@@ -50394,7 +50413,7 @@ async function executeCheckWithForEachItems(checkId, forEachParent, forEachItems
|
|
|
50394
50413
|
context2,
|
|
50395
50414
|
prInfo,
|
|
50396
50415
|
dependencyResults,
|
|
50397
|
-
checkConfig.ai?.timeout || 18e5,
|
|
50416
|
+
checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
50398
50417
|
() => provider.execute(prInfo, providerConfig, dependencyResults, executionContext)
|
|
50399
50418
|
);
|
|
50400
50419
|
try {
|
|
@@ -50810,7 +50829,7 @@ async function executeInvocation(item, context2, scope, prInfo, dependencyResult
|
|
|
50810
50829
|
__outputHistory: outputHistory,
|
|
50811
50830
|
ai: {
|
|
50812
50831
|
...stepConfig.ai || {},
|
|
50813
|
-
timeout: stepConfig.ai?.timeout || 18e5,
|
|
50832
|
+
timeout: stepConfig.timeout || stepConfig.ai?.timeout || 18e5,
|
|
50814
50833
|
debug: !!context2.debug
|
|
50815
50834
|
}
|
|
50816
50835
|
};
|
|
@@ -51334,7 +51353,7 @@ async function executeSingleCheck(checkId, context2, state, emitEvent, transitio
|
|
|
51334
51353
|
workflowInputs,
|
|
51335
51354
|
ai: {
|
|
51336
51355
|
...checkConfig.ai || {},
|
|
51337
|
-
timeout: checkConfig.ai?.timeout || 18e5,
|
|
51356
|
+
timeout: checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
51338
51357
|
debug: !!context2.debug
|
|
51339
51358
|
}
|
|
51340
51359
|
};
|
|
@@ -51435,7 +51454,7 @@ async function executeSingleCheck(checkId, context2, state, emitEvent, transitio
|
|
|
51435
51454
|
context2,
|
|
51436
51455
|
prInfo,
|
|
51437
51456
|
dependencyResults,
|
|
51438
|
-
checkConfig.ai?.timeout || 18e5,
|
|
51457
|
+
checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
51439
51458
|
() => provider.execute(prInfo, providerConfig, dependencyResults, executionContext)
|
|
51440
51459
|
);
|
|
51441
51460
|
try {
|
|
@@ -51649,23 +51668,23 @@ __export(renderer_schema_exports, {
|
|
|
51649
51668
|
});
|
|
51650
51669
|
async function loadRendererSchema(name) {
|
|
51651
51670
|
try {
|
|
51652
|
-
const
|
|
51653
|
-
const
|
|
51671
|
+
const fs29 = await import("fs/promises");
|
|
51672
|
+
const path33 = await import("path");
|
|
51654
51673
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
51655
51674
|
if (!sanitized) return void 0;
|
|
51656
51675
|
const candidates = [
|
|
51657
51676
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
51658
|
-
|
|
51677
|
+
path33.join(__dirname, "output", sanitized, "schema.json"),
|
|
51659
51678
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
51660
|
-
|
|
51679
|
+
path33.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
51661
51680
|
// When running from a checkout with output/ folder copied to CWD
|
|
51662
|
-
|
|
51681
|
+
path33.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
51663
51682
|
// Fallback: cwd/dist/output/
|
|
51664
|
-
|
|
51683
|
+
path33.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
51665
51684
|
];
|
|
51666
51685
|
for (const p of candidates) {
|
|
51667
51686
|
try {
|
|
51668
|
-
const raw = await
|
|
51687
|
+
const raw = await fs29.readFile(p, "utf-8");
|
|
51669
51688
|
return JSON.parse(raw);
|
|
51670
51689
|
} catch {
|
|
51671
51690
|
}
|
|
@@ -52207,7 +52226,7 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
|
|
|
52207
52226
|
workflowInputs,
|
|
52208
52227
|
ai: {
|
|
52209
52228
|
...checkConfig.ai || {},
|
|
52210
|
-
timeout: checkConfig.ai?.timeout || 18e5,
|
|
52229
|
+
timeout: checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
52211
52230
|
debug: !!context2.debug
|
|
52212
52231
|
}
|
|
52213
52232
|
};
|
|
@@ -52242,7 +52261,10 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
|
|
|
52242
52261
|
`[LevelDispatch] Conversation extracted (${conv?.transport || "unknown"}): ${messageCount} messages`
|
|
52243
52262
|
);
|
|
52244
52263
|
}
|
|
52245
|
-
const transportCtx = slackConv ? { slack: { event: event || {}, conversation: slackConv } } : {
|
|
52264
|
+
const transportCtx = slackConv ? { slack: { event: event || {}, conversation: slackConv } } : {
|
|
52265
|
+
telegram: { event: event || {}, conversation: telegramConv },
|
|
52266
|
+
webhook: payload
|
|
52267
|
+
};
|
|
52246
52268
|
providerConfig.eventContext = {
|
|
52247
52269
|
...providerConfig.eventContext,
|
|
52248
52270
|
...transportCtx,
|
|
@@ -52406,7 +52428,7 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
|
|
|
52406
52428
|
context2,
|
|
52407
52429
|
prInfo,
|
|
52408
52430
|
dependencyResults,
|
|
52409
|
-
checkConfig.ai?.timeout || 18e5,
|
|
52431
|
+
checkConfig.timeout || checkConfig.ai?.timeout || 18e5,
|
|
52410
52432
|
() => provider.execute(prInfo, providerConfig, dependencyResults, executionContext)
|
|
52411
52433
|
);
|
|
52412
52434
|
try {
|
|
@@ -53335,7 +53357,7 @@ async function executeSingleCheck2(checkId, context2, state, emitEvent, transiti
|
|
|
53335
53357
|
workflowInputs,
|
|
53336
53358
|
ai: {
|
|
53337
53359
|
...checkConfig2.ai || {},
|
|
53338
|
-
timeout: checkConfig2.ai?.timeout || 18e5,
|
|
53360
|
+
timeout: checkConfig2.timeout || checkConfig2.ai?.timeout || 18e5,
|
|
53339
53361
|
debug: !!context2.debug
|
|
53340
53362
|
}
|
|
53341
53363
|
};
|
|
@@ -53366,7 +53388,9 @@ async function executeSingleCheck2(checkId, context2, state, emitEvent, transiti
|
|
|
53366
53388
|
const event = payload?.event;
|
|
53367
53389
|
const messageCount = Array.isArray(conv?.messages) ? conv.messages.length : 0;
|
|
53368
53390
|
if (context2.debug) {
|
|
53369
|
-
logger.info(
|
|
53391
|
+
logger.info(
|
|
53392
|
+
`[LevelDispatch] Conversation extracted (${conv?.transport || "unknown"}): ${messageCount} messages`
|
|
53393
|
+
);
|
|
53370
53394
|
}
|
|
53371
53395
|
const transportCtx = slackConv ? { slack: { event: event || {}, conversation: slackConv } } : { telegram: { event: event || {}, conversation: telegramConv }, webhook: payload };
|
|
53372
53396
|
providerConfig.eventContext = {
|
|
@@ -53504,7 +53528,7 @@ async function executeSingleCheck2(checkId, context2, state, emitEvent, transiti
|
|
|
53504
53528
|
context2,
|
|
53505
53529
|
prInfo,
|
|
53506
53530
|
dependencyResults,
|
|
53507
|
-
checkConfig2.ai?.timeout || 18e5,
|
|
53531
|
+
checkConfig2.timeout || checkConfig2.ai?.timeout || 18e5,
|
|
53508
53532
|
() => provider.execute(prInfo, providerConfig, dependencyResults, executionContext)
|
|
53509
53533
|
);
|
|
53510
53534
|
try {
|
|
@@ -54110,8 +54134,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
54110
54134
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
54111
54135
|
try {
|
|
54112
54136
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
54113
|
-
const
|
|
54114
|
-
const
|
|
54137
|
+
const fs29 = await import("fs/promises");
|
|
54138
|
+
const path33 = await import("path");
|
|
54115
54139
|
const schemaRaw = checkConfig.schema || "plain";
|
|
54116
54140
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
54117
54141
|
let templateContent;
|
|
@@ -54120,27 +54144,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
54120
54144
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
54121
54145
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
54122
54146
|
const file = String(checkConfig.template.file);
|
|
54123
|
-
const resolved =
|
|
54124
|
-
templateContent = await
|
|
54147
|
+
const resolved = path33.resolve(process.cwd(), file);
|
|
54148
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
54125
54149
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
54126
54150
|
} else if (schema && schema !== "plain") {
|
|
54127
54151
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
54128
54152
|
if (sanitized) {
|
|
54129
54153
|
const candidatePaths = [
|
|
54130
|
-
|
|
54154
|
+
path33.join(__dirname, "output", sanitized, "template.liquid"),
|
|
54131
54155
|
// bundled: dist/output/
|
|
54132
|
-
|
|
54156
|
+
path33.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
54133
54157
|
// source (from state-machine/states)
|
|
54134
|
-
|
|
54158
|
+
path33.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
54135
54159
|
// source (alternate)
|
|
54136
|
-
|
|
54160
|
+
path33.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
54137
54161
|
// fallback: cwd/output/
|
|
54138
|
-
|
|
54162
|
+
path33.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
54139
54163
|
// fallback: cwd/dist/output/
|
|
54140
54164
|
];
|
|
54141
54165
|
for (const p of candidatePaths) {
|
|
54142
54166
|
try {
|
|
54143
|
-
templateContent = await
|
|
54167
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
54144
54168
|
if (templateContent) {
|
|
54145
54169
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
54146
54170
|
break;
|
|
@@ -56280,8 +56304,8 @@ var init_workspace_manager = __esm({
|
|
|
56280
56304
|
);
|
|
56281
56305
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
56282
56306
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
56283
|
-
for (const
|
|
56284
|
-
|
|
56307
|
+
for (const resolve19 of this.cleanupResolvers) {
|
|
56308
|
+
resolve19();
|
|
56285
56309
|
}
|
|
56286
56310
|
this.cleanupResolvers = [];
|
|
56287
56311
|
}
|
|
@@ -56460,19 +56484,19 @@ var init_workspace_manager = __esm({
|
|
|
56460
56484
|
);
|
|
56461
56485
|
this.cleanupRequested = true;
|
|
56462
56486
|
await Promise.race([
|
|
56463
|
-
new Promise((
|
|
56487
|
+
new Promise((resolve19) => {
|
|
56464
56488
|
if (this.activeOperations === 0) {
|
|
56465
|
-
|
|
56489
|
+
resolve19();
|
|
56466
56490
|
} else {
|
|
56467
|
-
this.cleanupResolvers.push(
|
|
56491
|
+
this.cleanupResolvers.push(resolve19);
|
|
56468
56492
|
}
|
|
56469
56493
|
}),
|
|
56470
|
-
new Promise((
|
|
56494
|
+
new Promise((resolve19) => {
|
|
56471
56495
|
setTimeout(() => {
|
|
56472
56496
|
logger.warn(
|
|
56473
56497
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
56474
56498
|
);
|
|
56475
|
-
|
|
56499
|
+
resolve19();
|
|
56476
56500
|
}, timeout);
|
|
56477
56501
|
})
|
|
56478
56502
|
]);
|
|
@@ -56663,10 +56687,27 @@ var init_workspace_manager = __esm({
|
|
|
56663
56687
|
await this.deleteLocalBranches(worktreePath);
|
|
56664
56688
|
}
|
|
56665
56689
|
/**
|
|
56666
|
-
* Delete
|
|
56690
|
+
* Delete local branches in a worktree that are safe to remove.
|
|
56691
|
+
* IMPORTANT: Git worktrees share the branch namespace with the main repo
|
|
56692
|
+
* and all other worktrees. We must NOT delete branches that are checked out
|
|
56693
|
+
* in the main working tree or any other worktree — doing so would destroy
|
|
56694
|
+
* the user's work.
|
|
56667
56695
|
*/
|
|
56668
56696
|
async deleteLocalBranches(worktreePath) {
|
|
56669
56697
|
const escapedPath = shellEscape(worktreePath);
|
|
56698
|
+
const worktreeListResult = await commandExecutor.execute(
|
|
56699
|
+
`git -C ${escapedPath} worktree list --porcelain`,
|
|
56700
|
+
{ timeout: 1e4 }
|
|
56701
|
+
);
|
|
56702
|
+
const protectedBranches = /* @__PURE__ */ new Set();
|
|
56703
|
+
if (worktreeListResult.exitCode === 0) {
|
|
56704
|
+
for (const line of worktreeListResult.stdout.split("\n")) {
|
|
56705
|
+
const match = line.match(/^branch refs\/heads\/(.+)$/);
|
|
56706
|
+
if (match) {
|
|
56707
|
+
protectedBranches.add(match[1]);
|
|
56708
|
+
}
|
|
56709
|
+
}
|
|
56710
|
+
}
|
|
56670
56711
|
const listResult = await commandExecutor.execute(
|
|
56671
56712
|
`git -C ${escapedPath} branch --list --format='%(refname:short)'`,
|
|
56672
56713
|
{ timeout: 1e4 }
|
|
@@ -56676,6 +56717,10 @@ var init_workspace_manager = __esm({
|
|
|
56676
56717
|
}
|
|
56677
56718
|
const branches = listResult.stdout.trim().split("\n").map((b) => b.trim()).filter((b) => b.length > 0);
|
|
56678
56719
|
for (const branch of branches) {
|
|
56720
|
+
if (protectedBranches.has(branch)) {
|
|
56721
|
+
logger.debug(`[Workspace] Skipping branch '${branch}' \u2014 checked out in another worktree`);
|
|
56722
|
+
continue;
|
|
56723
|
+
}
|
|
56679
56724
|
const deleteResult = await commandExecutor.execute(
|
|
56680
56725
|
`git -C ${escapedPath} branch -D ${shellEscape(branch)}`,
|
|
56681
56726
|
{ timeout: 1e4 }
|
|
@@ -56866,8 +56911,8 @@ var init_fair_concurrency_limiter = __esm({
|
|
|
56866
56911
|
);
|
|
56867
56912
|
const queuedAt = Date.now();
|
|
56868
56913
|
const effectiveTimeout = queueTimeout ?? 12e4;
|
|
56869
|
-
return new Promise((
|
|
56870
|
-
const entry = { resolve:
|
|
56914
|
+
return new Promise((resolve19, reject) => {
|
|
56915
|
+
const entry = { resolve: resolve19, reject, queuedAt };
|
|
56871
56916
|
entry.reminder = setInterval(() => {
|
|
56872
56917
|
const waited = Math.round((Date.now() - queuedAt) / 1e3);
|
|
56873
56918
|
const curQueued = this._totalQueued();
|
|
@@ -57174,6 +57219,1380 @@ var init_build_engine_context = __esm({
|
|
|
57174
57219
|
}
|
|
57175
57220
|
});
|
|
57176
57221
|
|
|
57222
|
+
// src/policy/default-engine.ts
|
|
57223
|
+
var DefaultPolicyEngine;
|
|
57224
|
+
var init_default_engine = __esm({
|
|
57225
|
+
"src/policy/default-engine.ts"() {
|
|
57226
|
+
"use strict";
|
|
57227
|
+
DefaultPolicyEngine = class {
|
|
57228
|
+
async initialize(_config) {
|
|
57229
|
+
}
|
|
57230
|
+
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
57231
|
+
return { allowed: true };
|
|
57232
|
+
}
|
|
57233
|
+
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
57234
|
+
return { allowed: true };
|
|
57235
|
+
}
|
|
57236
|
+
async evaluateCapabilities(_checkId, _capabilities) {
|
|
57237
|
+
return { allowed: true };
|
|
57238
|
+
}
|
|
57239
|
+
async shutdown() {
|
|
57240
|
+
}
|
|
57241
|
+
};
|
|
57242
|
+
}
|
|
57243
|
+
});
|
|
57244
|
+
|
|
57245
|
+
// src/enterprise/license/validator.ts
|
|
57246
|
+
var validator_exports = {};
|
|
57247
|
+
__export(validator_exports, {
|
|
57248
|
+
LicenseValidator: () => LicenseValidator
|
|
57249
|
+
});
|
|
57250
|
+
var crypto3, fs21, path26, LicenseValidator;
|
|
57251
|
+
var init_validator = __esm({
|
|
57252
|
+
"src/enterprise/license/validator.ts"() {
|
|
57253
|
+
"use strict";
|
|
57254
|
+
crypto3 = __toESM(require("crypto"));
|
|
57255
|
+
fs21 = __toESM(require("fs"));
|
|
57256
|
+
path26 = __toESM(require("path"));
|
|
57257
|
+
LicenseValidator = class _LicenseValidator {
|
|
57258
|
+
/** Ed25519 public key for license verification (PEM format). */
|
|
57259
|
+
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
57260
|
+
cache = null;
|
|
57261
|
+
static CACHE_TTL = 5 * 60 * 1e3;
|
|
57262
|
+
// 5 minutes
|
|
57263
|
+
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
57264
|
+
// 72 hours after expiry
|
|
57265
|
+
/**
|
|
57266
|
+
* Load and validate license from environment or file.
|
|
57267
|
+
*
|
|
57268
|
+
* Resolution order:
|
|
57269
|
+
* 1. VISOR_LICENSE env var (JWT string)
|
|
57270
|
+
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
57271
|
+
* 3. .visor-license in project root (cwd)
|
|
57272
|
+
* 4. .visor-license in ~/.config/visor/
|
|
57273
|
+
*/
|
|
57274
|
+
async loadAndValidate() {
|
|
57275
|
+
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
57276
|
+
return this.cache.payload;
|
|
57277
|
+
}
|
|
57278
|
+
const token = this.resolveToken();
|
|
57279
|
+
if (!token) return null;
|
|
57280
|
+
const payload = this.verifyAndDecode(token);
|
|
57281
|
+
if (!payload) return null;
|
|
57282
|
+
this.cache = { payload, validatedAt: Date.now() };
|
|
57283
|
+
return payload;
|
|
57284
|
+
}
|
|
57285
|
+
/** Check if a specific feature is licensed */
|
|
57286
|
+
hasFeature(feature) {
|
|
57287
|
+
if (!this.cache) return false;
|
|
57288
|
+
return this.cache.payload.features.includes(feature);
|
|
57289
|
+
}
|
|
57290
|
+
/** Check if license is valid (with grace period) */
|
|
57291
|
+
isValid() {
|
|
57292
|
+
if (!this.cache) return false;
|
|
57293
|
+
const now = Date.now();
|
|
57294
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
57295
|
+
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
57296
|
+
}
|
|
57297
|
+
/** Check if the license is within its grace period (expired but still valid) */
|
|
57298
|
+
isInGracePeriod() {
|
|
57299
|
+
if (!this.cache) return false;
|
|
57300
|
+
const now = Date.now();
|
|
57301
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
57302
|
+
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
57303
|
+
}
|
|
57304
|
+
resolveToken() {
|
|
57305
|
+
if (process.env.VISOR_LICENSE) {
|
|
57306
|
+
return process.env.VISOR_LICENSE.trim();
|
|
57307
|
+
}
|
|
57308
|
+
if (process.env.VISOR_LICENSE_FILE) {
|
|
57309
|
+
const resolved = path26.resolve(process.env.VISOR_LICENSE_FILE);
|
|
57310
|
+
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
57311
|
+
const allowedPrefixes = [path26.normalize(process.cwd())];
|
|
57312
|
+
if (home2) allowedPrefixes.push(path26.normalize(path26.join(home2, ".config", "visor")));
|
|
57313
|
+
let realPath;
|
|
57314
|
+
try {
|
|
57315
|
+
realPath = fs21.realpathSync(resolved);
|
|
57316
|
+
} catch {
|
|
57317
|
+
return null;
|
|
57318
|
+
}
|
|
57319
|
+
const isSafe = allowedPrefixes.some(
|
|
57320
|
+
(prefix) => realPath === prefix || realPath.startsWith(prefix + path26.sep)
|
|
57321
|
+
);
|
|
57322
|
+
if (!isSafe) return null;
|
|
57323
|
+
return this.readFile(realPath);
|
|
57324
|
+
}
|
|
57325
|
+
const cwdPath = path26.join(process.cwd(), ".visor-license");
|
|
57326
|
+
const cwdToken = this.readFile(cwdPath);
|
|
57327
|
+
if (cwdToken) return cwdToken;
|
|
57328
|
+
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
57329
|
+
if (home) {
|
|
57330
|
+
const configPath = path26.join(home, ".config", "visor", ".visor-license");
|
|
57331
|
+
const configToken = this.readFile(configPath);
|
|
57332
|
+
if (configToken) return configToken;
|
|
57333
|
+
}
|
|
57334
|
+
return null;
|
|
57335
|
+
}
|
|
57336
|
+
readFile(filePath) {
|
|
57337
|
+
try {
|
|
57338
|
+
return fs21.readFileSync(filePath, "utf-8").trim();
|
|
57339
|
+
} catch {
|
|
57340
|
+
return null;
|
|
57341
|
+
}
|
|
57342
|
+
}
|
|
57343
|
+
verifyAndDecode(token) {
|
|
57344
|
+
try {
|
|
57345
|
+
const parts = token.split(".");
|
|
57346
|
+
if (parts.length !== 3) return null;
|
|
57347
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
57348
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
57349
|
+
if (header.alg !== "EdDSA") return null;
|
|
57350
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
57351
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
57352
|
+
const publicKey = crypto3.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
57353
|
+
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
57354
|
+
return null;
|
|
57355
|
+
}
|
|
57356
|
+
const isValid = crypto3.verify(null, Buffer.from(data), publicKey, signature);
|
|
57357
|
+
if (!isValid) return null;
|
|
57358
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
57359
|
+
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
57360
|
+
return null;
|
|
57361
|
+
}
|
|
57362
|
+
const now = Date.now();
|
|
57363
|
+
const expiryMs = payload.exp * 1e3;
|
|
57364
|
+
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
57365
|
+
return null;
|
|
57366
|
+
}
|
|
57367
|
+
return payload;
|
|
57368
|
+
} catch {
|
|
57369
|
+
return null;
|
|
57370
|
+
}
|
|
57371
|
+
}
|
|
57372
|
+
};
|
|
57373
|
+
}
|
|
57374
|
+
});
|
|
57375
|
+
|
|
57376
|
+
// src/enterprise/policy/opa-compiler.ts
|
|
57377
|
+
var fs22, path27, os2, crypto4, import_child_process8, OpaCompiler;
|
|
57378
|
+
var init_opa_compiler = __esm({
|
|
57379
|
+
"src/enterprise/policy/opa-compiler.ts"() {
|
|
57380
|
+
"use strict";
|
|
57381
|
+
fs22 = __toESM(require("fs"));
|
|
57382
|
+
path27 = __toESM(require("path"));
|
|
57383
|
+
os2 = __toESM(require("os"));
|
|
57384
|
+
crypto4 = __toESM(require("crypto"));
|
|
57385
|
+
import_child_process8 = require("child_process");
|
|
57386
|
+
OpaCompiler = class _OpaCompiler {
|
|
57387
|
+
static CACHE_DIR = path27.join(os2.tmpdir(), "visor-opa-cache");
|
|
57388
|
+
/**
|
|
57389
|
+
* Resolve the input paths to WASM bytes.
|
|
57390
|
+
*
|
|
57391
|
+
* Strategy:
|
|
57392
|
+
* 1. If any path is a .wasm file, read it directly
|
|
57393
|
+
* 2. If a directory contains policy.wasm, read it
|
|
57394
|
+
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
57395
|
+
*/
|
|
57396
|
+
async resolveWasmBytes(paths) {
|
|
57397
|
+
const regoFiles = [];
|
|
57398
|
+
for (const p of paths) {
|
|
57399
|
+
const resolved = path27.resolve(p);
|
|
57400
|
+
if (path27.normalize(resolved).includes("..")) {
|
|
57401
|
+
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
57402
|
+
}
|
|
57403
|
+
if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
|
|
57404
|
+
return fs22.readFileSync(resolved);
|
|
57405
|
+
}
|
|
57406
|
+
if (!fs22.existsSync(resolved)) continue;
|
|
57407
|
+
const stat2 = fs22.statSync(resolved);
|
|
57408
|
+
if (stat2.isDirectory()) {
|
|
57409
|
+
const wasmCandidate = path27.join(resolved, "policy.wasm");
|
|
57410
|
+
if (fs22.existsSync(wasmCandidate)) {
|
|
57411
|
+
return fs22.readFileSync(wasmCandidate);
|
|
57412
|
+
}
|
|
57413
|
+
const files = fs22.readdirSync(resolved);
|
|
57414
|
+
for (const f of files) {
|
|
57415
|
+
if (f.endsWith(".rego")) {
|
|
57416
|
+
regoFiles.push(path27.join(resolved, f));
|
|
57417
|
+
}
|
|
57418
|
+
}
|
|
57419
|
+
} else if (resolved.endsWith(".rego")) {
|
|
57420
|
+
regoFiles.push(resolved);
|
|
57421
|
+
}
|
|
57422
|
+
}
|
|
57423
|
+
if (regoFiles.length === 0) {
|
|
57424
|
+
throw new Error(
|
|
57425
|
+
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
57426
|
+
);
|
|
57427
|
+
}
|
|
57428
|
+
return this.compileRego(regoFiles);
|
|
57429
|
+
}
|
|
57430
|
+
/**
|
|
57431
|
+
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
57432
|
+
*
|
|
57433
|
+
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
57434
|
+
* so subsequent runs skip compilation if policies haven't changed.
|
|
57435
|
+
*/
|
|
57436
|
+
compileRego(regoFiles) {
|
|
57437
|
+
try {
|
|
57438
|
+
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
57439
|
+
} catch {
|
|
57440
|
+
throw new Error(
|
|
57441
|
+
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
57442
|
+
);
|
|
57443
|
+
}
|
|
57444
|
+
const hash = crypto4.createHash("sha256");
|
|
57445
|
+
for (const f of regoFiles.sort()) {
|
|
57446
|
+
hash.update(fs22.readFileSync(f));
|
|
57447
|
+
hash.update(f);
|
|
57448
|
+
}
|
|
57449
|
+
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
57450
|
+
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
57451
|
+
const cachedWasm = path27.join(cacheDir, `${cacheKey}.wasm`);
|
|
57452
|
+
if (fs22.existsSync(cachedWasm)) {
|
|
57453
|
+
return fs22.readFileSync(cachedWasm);
|
|
57454
|
+
}
|
|
57455
|
+
fs22.mkdirSync(cacheDir, { recursive: true });
|
|
57456
|
+
const bundleTar = path27.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
57457
|
+
try {
|
|
57458
|
+
const args = [
|
|
57459
|
+
"build",
|
|
57460
|
+
"-t",
|
|
57461
|
+
"wasm",
|
|
57462
|
+
"-e",
|
|
57463
|
+
"visor",
|
|
57464
|
+
// entrypoint: the visor package tree
|
|
57465
|
+
"-o",
|
|
57466
|
+
bundleTar,
|
|
57467
|
+
...regoFiles
|
|
57468
|
+
];
|
|
57469
|
+
(0, import_child_process8.execFileSync)("opa", args, {
|
|
57470
|
+
stdio: "pipe",
|
|
57471
|
+
timeout: 3e4
|
|
57472
|
+
});
|
|
57473
|
+
} catch (err) {
|
|
57474
|
+
const stderr = err?.stderr?.toString() || "";
|
|
57475
|
+
throw new Error(
|
|
57476
|
+
`Failed to compile .rego files to WASM:
|
|
57477
|
+
${stderr}
|
|
57478
|
+
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
57479
|
+
);
|
|
57480
|
+
}
|
|
57481
|
+
try {
|
|
57482
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
57483
|
+
stdio: "pipe"
|
|
57484
|
+
});
|
|
57485
|
+
const extractedWasm = path27.join(cacheDir, "policy.wasm");
|
|
57486
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
57487
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
57488
|
+
}
|
|
57489
|
+
} catch {
|
|
57490
|
+
try {
|
|
57491
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
57492
|
+
stdio: "pipe"
|
|
57493
|
+
});
|
|
57494
|
+
const extractedWasm = path27.join(cacheDir, "policy.wasm");
|
|
57495
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
57496
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
57497
|
+
}
|
|
57498
|
+
} catch (err2) {
|
|
57499
|
+
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
57500
|
+
}
|
|
57501
|
+
}
|
|
57502
|
+
try {
|
|
57503
|
+
fs22.unlinkSync(bundleTar);
|
|
57504
|
+
} catch {
|
|
57505
|
+
}
|
|
57506
|
+
if (!fs22.existsSync(cachedWasm)) {
|
|
57507
|
+
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
57508
|
+
}
|
|
57509
|
+
return fs22.readFileSync(cachedWasm);
|
|
57510
|
+
}
|
|
57511
|
+
};
|
|
57512
|
+
}
|
|
57513
|
+
});
|
|
57514
|
+
|
|
57515
|
+
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
57516
|
+
var fs23, path28, OpaWasmEvaluator;
|
|
57517
|
+
var init_opa_wasm_evaluator = __esm({
|
|
57518
|
+
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
57519
|
+
"use strict";
|
|
57520
|
+
fs23 = __toESM(require("fs"));
|
|
57521
|
+
path28 = __toESM(require("path"));
|
|
57522
|
+
init_opa_compiler();
|
|
57523
|
+
OpaWasmEvaluator = class {
|
|
57524
|
+
policy = null;
|
|
57525
|
+
dataDocument = {};
|
|
57526
|
+
compiler = new OpaCompiler();
|
|
57527
|
+
async initialize(rulesPath) {
|
|
57528
|
+
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
57529
|
+
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
57530
|
+
try {
|
|
57531
|
+
const { createRequire } = require("module");
|
|
57532
|
+
const runtimeRequire = createRequire(__filename);
|
|
57533
|
+
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
57534
|
+
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
57535
|
+
if (!loadPolicy) {
|
|
57536
|
+
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
57537
|
+
}
|
|
57538
|
+
this.policy = await loadPolicy(wasmBytes);
|
|
57539
|
+
} catch (err) {
|
|
57540
|
+
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
57541
|
+
throw new Error(
|
|
57542
|
+
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
57543
|
+
);
|
|
57544
|
+
}
|
|
57545
|
+
throw err;
|
|
57546
|
+
}
|
|
57547
|
+
}
|
|
57548
|
+
/**
|
|
57549
|
+
* Load external data from a JSON file to use as the OPA data document.
|
|
57550
|
+
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
57551
|
+
* making it available in Rego via `data.<key>`.
|
|
57552
|
+
*/
|
|
57553
|
+
loadData(dataPath) {
|
|
57554
|
+
const resolved = path28.resolve(dataPath);
|
|
57555
|
+
if (path28.normalize(resolved).includes("..")) {
|
|
57556
|
+
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
57557
|
+
}
|
|
57558
|
+
if (!fs23.existsSync(resolved)) {
|
|
57559
|
+
throw new Error(`OPA data file not found: ${resolved}`);
|
|
57560
|
+
}
|
|
57561
|
+
const stat2 = fs23.statSync(resolved);
|
|
57562
|
+
if (stat2.size > 10 * 1024 * 1024) {
|
|
57563
|
+
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
57564
|
+
}
|
|
57565
|
+
const raw = fs23.readFileSync(resolved, "utf-8");
|
|
57566
|
+
try {
|
|
57567
|
+
const parsed = JSON.parse(raw);
|
|
57568
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
57569
|
+
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
57570
|
+
}
|
|
57571
|
+
this.dataDocument = parsed;
|
|
57572
|
+
} catch (err) {
|
|
57573
|
+
if (err.message.startsWith("OPA data file must")) {
|
|
57574
|
+
throw err;
|
|
57575
|
+
}
|
|
57576
|
+
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
57577
|
+
}
|
|
57578
|
+
}
|
|
57579
|
+
async evaluate(input) {
|
|
57580
|
+
if (!this.policy) {
|
|
57581
|
+
throw new Error("OPA WASM evaluator not initialized");
|
|
57582
|
+
}
|
|
57583
|
+
this.policy.setData(this.dataDocument);
|
|
57584
|
+
const resultSet = this.policy.evaluate(input);
|
|
57585
|
+
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
57586
|
+
return resultSet[0].result;
|
|
57587
|
+
}
|
|
57588
|
+
return void 0;
|
|
57589
|
+
}
|
|
57590
|
+
async shutdown() {
|
|
57591
|
+
if (this.policy) {
|
|
57592
|
+
if (typeof this.policy.close === "function") {
|
|
57593
|
+
try {
|
|
57594
|
+
this.policy.close();
|
|
57595
|
+
} catch {
|
|
57596
|
+
}
|
|
57597
|
+
} else if (typeof this.policy.free === "function") {
|
|
57598
|
+
try {
|
|
57599
|
+
this.policy.free();
|
|
57600
|
+
} catch {
|
|
57601
|
+
}
|
|
57602
|
+
}
|
|
57603
|
+
}
|
|
57604
|
+
this.policy = null;
|
|
57605
|
+
}
|
|
57606
|
+
};
|
|
57607
|
+
}
|
|
57608
|
+
});
|
|
57609
|
+
|
|
57610
|
+
// src/enterprise/policy/opa-http-evaluator.ts
|
|
57611
|
+
var OpaHttpEvaluator;
|
|
57612
|
+
var init_opa_http_evaluator = __esm({
|
|
57613
|
+
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
57614
|
+
"use strict";
|
|
57615
|
+
OpaHttpEvaluator = class {
|
|
57616
|
+
baseUrl;
|
|
57617
|
+
timeout;
|
|
57618
|
+
constructor(baseUrl, timeout = 5e3) {
|
|
57619
|
+
let parsed;
|
|
57620
|
+
try {
|
|
57621
|
+
parsed = new URL(baseUrl);
|
|
57622
|
+
} catch {
|
|
57623
|
+
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
57624
|
+
}
|
|
57625
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
57626
|
+
throw new Error(
|
|
57627
|
+
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
57628
|
+
);
|
|
57629
|
+
}
|
|
57630
|
+
const hostname = parsed.hostname;
|
|
57631
|
+
if (this.isBlockedHostname(hostname)) {
|
|
57632
|
+
throw new Error(
|
|
57633
|
+
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
57634
|
+
);
|
|
57635
|
+
}
|
|
57636
|
+
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
57637
|
+
this.timeout = timeout;
|
|
57638
|
+
}
|
|
57639
|
+
/**
|
|
57640
|
+
* Check if a hostname is blocked due to SSRF concerns.
|
|
57641
|
+
*
|
|
57642
|
+
* Blocks:
|
|
57643
|
+
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
57644
|
+
* - Link-local addresses (169.254.x.x)
|
|
57645
|
+
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
57646
|
+
* - IPv6 unique local addresses (fd00::/8)
|
|
57647
|
+
* - Cloud metadata services (*.internal)
|
|
57648
|
+
*/
|
|
57649
|
+
isBlockedHostname(hostname) {
|
|
57650
|
+
if (!hostname) return true;
|
|
57651
|
+
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
57652
|
+
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
57653
|
+
return true;
|
|
57654
|
+
}
|
|
57655
|
+
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
57656
|
+
return true;
|
|
57657
|
+
}
|
|
57658
|
+
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
57659
|
+
return true;
|
|
57660
|
+
}
|
|
57661
|
+
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
57662
|
+
const ipv4Match = normalized.match(ipv4Pattern);
|
|
57663
|
+
if (ipv4Match) {
|
|
57664
|
+
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
57665
|
+
if (octets.some((octet) => octet > 255)) {
|
|
57666
|
+
return false;
|
|
57667
|
+
}
|
|
57668
|
+
const [a, b] = octets;
|
|
57669
|
+
if (a === 127) {
|
|
57670
|
+
return true;
|
|
57671
|
+
}
|
|
57672
|
+
if (a === 0) {
|
|
57673
|
+
return true;
|
|
57674
|
+
}
|
|
57675
|
+
if (a === 169 && b === 254) {
|
|
57676
|
+
return true;
|
|
57677
|
+
}
|
|
57678
|
+
if (a === 10) {
|
|
57679
|
+
return true;
|
|
57680
|
+
}
|
|
57681
|
+
if (a === 172 && b >= 16 && b <= 31) {
|
|
57682
|
+
return true;
|
|
57683
|
+
}
|
|
57684
|
+
if (a === 192 && b === 168) {
|
|
57685
|
+
return true;
|
|
57686
|
+
}
|
|
57687
|
+
}
|
|
57688
|
+
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
57689
|
+
return true;
|
|
57690
|
+
}
|
|
57691
|
+
if (normalized.startsWith("fe80:")) {
|
|
57692
|
+
return true;
|
|
57693
|
+
}
|
|
57694
|
+
return false;
|
|
57695
|
+
}
|
|
57696
|
+
/**
|
|
57697
|
+
* Evaluate a policy rule against an input document via OPA REST API.
|
|
57698
|
+
*
|
|
57699
|
+
* @param input - The input document to evaluate
|
|
57700
|
+
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
57701
|
+
* @returns The result object from OPA, or undefined on error
|
|
57702
|
+
*/
|
|
57703
|
+
async evaluate(input, rulePath) {
|
|
57704
|
+
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
57705
|
+
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
57706
|
+
const controller = new AbortController();
|
|
57707
|
+
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
57708
|
+
try {
|
|
57709
|
+
const response = await fetch(url, {
|
|
57710
|
+
method: "POST",
|
|
57711
|
+
headers: { "Content-Type": "application/json" },
|
|
57712
|
+
body: JSON.stringify({ input }),
|
|
57713
|
+
signal: controller.signal
|
|
57714
|
+
});
|
|
57715
|
+
if (!response.ok) {
|
|
57716
|
+
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
57717
|
+
}
|
|
57718
|
+
let body;
|
|
57719
|
+
try {
|
|
57720
|
+
body = await response.json();
|
|
57721
|
+
} catch (jsonErr) {
|
|
57722
|
+
throw new Error(
|
|
57723
|
+
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
57724
|
+
);
|
|
57725
|
+
}
|
|
57726
|
+
return body?.result;
|
|
57727
|
+
} finally {
|
|
57728
|
+
clearTimeout(timer);
|
|
57729
|
+
}
|
|
57730
|
+
}
|
|
57731
|
+
async shutdown() {
|
|
57732
|
+
}
|
|
57733
|
+
};
|
|
57734
|
+
}
|
|
57735
|
+
});
|
|
57736
|
+
|
|
57737
|
+
// src/enterprise/policy/policy-input-builder.ts
|
|
57738
|
+
var PolicyInputBuilder;
|
|
57739
|
+
var init_policy_input_builder = __esm({
|
|
57740
|
+
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
57741
|
+
"use strict";
|
|
57742
|
+
PolicyInputBuilder = class {
|
|
57743
|
+
roles;
|
|
57744
|
+
actor;
|
|
57745
|
+
repository;
|
|
57746
|
+
pullRequest;
|
|
57747
|
+
constructor(policyConfig, actor, repository, pullRequest) {
|
|
57748
|
+
this.roles = policyConfig.roles || {};
|
|
57749
|
+
this.actor = actor;
|
|
57750
|
+
this.repository = repository;
|
|
57751
|
+
this.pullRequest = pullRequest;
|
|
57752
|
+
}
|
|
57753
|
+
/** Resolve which roles apply to the current actor. */
|
|
57754
|
+
resolveRoles() {
|
|
57755
|
+
const matched = [];
|
|
57756
|
+
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
57757
|
+
let identityMatch = false;
|
|
57758
|
+
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
57759
|
+
identityMatch = true;
|
|
57760
|
+
}
|
|
57761
|
+
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
57762
|
+
identityMatch = true;
|
|
57763
|
+
}
|
|
57764
|
+
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
57765
|
+
identityMatch = true;
|
|
57766
|
+
}
|
|
57767
|
+
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
57768
|
+
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
57769
|
+
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
57770
|
+
identityMatch = true;
|
|
57771
|
+
}
|
|
57772
|
+
}
|
|
57773
|
+
if (!identityMatch) continue;
|
|
57774
|
+
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
57775
|
+
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
57776
|
+
continue;
|
|
57777
|
+
}
|
|
57778
|
+
}
|
|
57779
|
+
matched.push(roleName);
|
|
57780
|
+
}
|
|
57781
|
+
return matched;
|
|
57782
|
+
}
|
|
57783
|
+
buildActor() {
|
|
57784
|
+
return {
|
|
57785
|
+
authorAssociation: this.actor.authorAssociation,
|
|
57786
|
+
login: this.actor.login,
|
|
57787
|
+
roles: this.resolveRoles(),
|
|
57788
|
+
isLocalMode: this.actor.isLocalMode,
|
|
57789
|
+
...this.actor.slack && { slack: this.actor.slack }
|
|
57790
|
+
};
|
|
57791
|
+
}
|
|
57792
|
+
forCheckExecution(check) {
|
|
57793
|
+
return {
|
|
57794
|
+
scope: "check.execute",
|
|
57795
|
+
check: {
|
|
57796
|
+
id: check.id,
|
|
57797
|
+
type: check.type,
|
|
57798
|
+
group: check.group,
|
|
57799
|
+
tags: check.tags,
|
|
57800
|
+
criticality: check.criticality,
|
|
57801
|
+
sandbox: check.sandbox,
|
|
57802
|
+
policy: check.policy
|
|
57803
|
+
},
|
|
57804
|
+
actor: this.buildActor(),
|
|
57805
|
+
repository: this.repository,
|
|
57806
|
+
pullRequest: this.pullRequest
|
|
57807
|
+
};
|
|
57808
|
+
}
|
|
57809
|
+
forToolInvocation(serverName, methodName, transport) {
|
|
57810
|
+
return {
|
|
57811
|
+
scope: "tool.invoke",
|
|
57812
|
+
tool: { serverName, methodName, transport },
|
|
57813
|
+
actor: this.buildActor(),
|
|
57814
|
+
repository: this.repository,
|
|
57815
|
+
pullRequest: this.pullRequest
|
|
57816
|
+
};
|
|
57817
|
+
}
|
|
57818
|
+
forCapabilityResolve(checkId, capabilities) {
|
|
57819
|
+
return {
|
|
57820
|
+
scope: "capability.resolve",
|
|
57821
|
+
check: { id: checkId, type: "ai" },
|
|
57822
|
+
capability: capabilities,
|
|
57823
|
+
actor: this.buildActor(),
|
|
57824
|
+
repository: this.repository,
|
|
57825
|
+
pullRequest: this.pullRequest
|
|
57826
|
+
};
|
|
57827
|
+
}
|
|
57828
|
+
};
|
|
57829
|
+
}
|
|
57830
|
+
});
|
|
57831
|
+
|
|
57832
|
+
// src/enterprise/policy/opa-policy-engine.ts
|
|
57833
|
+
var opa_policy_engine_exports = {};
|
|
57834
|
+
__export(opa_policy_engine_exports, {
|
|
57835
|
+
OpaPolicyEngine: () => OpaPolicyEngine
|
|
57836
|
+
});
|
|
57837
|
+
var OpaPolicyEngine;
|
|
57838
|
+
var init_opa_policy_engine = __esm({
|
|
57839
|
+
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
57840
|
+
"use strict";
|
|
57841
|
+
init_opa_wasm_evaluator();
|
|
57842
|
+
init_opa_http_evaluator();
|
|
57843
|
+
init_policy_input_builder();
|
|
57844
|
+
OpaPolicyEngine = class {
|
|
57845
|
+
evaluator = null;
|
|
57846
|
+
fallback;
|
|
57847
|
+
timeout;
|
|
57848
|
+
config;
|
|
57849
|
+
inputBuilder = null;
|
|
57850
|
+
logger = null;
|
|
57851
|
+
constructor(config) {
|
|
57852
|
+
this.config = config;
|
|
57853
|
+
this.fallback = config.fallback || "deny";
|
|
57854
|
+
this.timeout = config.timeout || 5e3;
|
|
57855
|
+
}
|
|
57856
|
+
async initialize(config) {
|
|
57857
|
+
try {
|
|
57858
|
+
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
57859
|
+
} catch {
|
|
57860
|
+
}
|
|
57861
|
+
const actor = {
|
|
57862
|
+
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
57863
|
+
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
57864
|
+
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
57865
|
+
};
|
|
57866
|
+
const repo = {
|
|
57867
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
57868
|
+
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
57869
|
+
branch: process.env.GITHUB_HEAD_REF,
|
|
57870
|
+
baseBranch: process.env.GITHUB_BASE_REF,
|
|
57871
|
+
event: process.env.GITHUB_EVENT_NAME
|
|
57872
|
+
};
|
|
57873
|
+
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
57874
|
+
const pullRequest = {
|
|
57875
|
+
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
57876
|
+
};
|
|
57877
|
+
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
57878
|
+
if (config.engine === "local") {
|
|
57879
|
+
if (!config.rules) {
|
|
57880
|
+
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
57881
|
+
}
|
|
57882
|
+
const wasm = new OpaWasmEvaluator();
|
|
57883
|
+
await wasm.initialize(config.rules);
|
|
57884
|
+
if (config.data) {
|
|
57885
|
+
wasm.loadData(config.data);
|
|
57886
|
+
}
|
|
57887
|
+
this.evaluator = wasm;
|
|
57888
|
+
} else if (config.engine === "remote") {
|
|
57889
|
+
if (!config.url) {
|
|
57890
|
+
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
57891
|
+
}
|
|
57892
|
+
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
57893
|
+
} else {
|
|
57894
|
+
this.evaluator = null;
|
|
57895
|
+
}
|
|
57896
|
+
}
|
|
57897
|
+
/**
|
|
57898
|
+
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
57899
|
+
* Called by the enterprise loader when engine context is enriched.
|
|
57900
|
+
*/
|
|
57901
|
+
setActorContext(actor, repo, pullRequest) {
|
|
57902
|
+
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
57903
|
+
}
|
|
57904
|
+
async evaluateCheckExecution(checkId, checkConfig) {
|
|
57905
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
57906
|
+
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
57907
|
+
const policyOverride = cfg.policy;
|
|
57908
|
+
const input = this.inputBuilder.forCheckExecution({
|
|
57909
|
+
id: checkId,
|
|
57910
|
+
type: cfg.type || "ai",
|
|
57911
|
+
group: cfg.group,
|
|
57912
|
+
tags: cfg.tags,
|
|
57913
|
+
criticality: cfg.criticality,
|
|
57914
|
+
sandbox: cfg.sandbox,
|
|
57915
|
+
policy: policyOverride
|
|
57916
|
+
});
|
|
57917
|
+
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
57918
|
+
}
|
|
57919
|
+
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
57920
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
57921
|
+
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
57922
|
+
return this.doEvaluate(input, "visor/tool/invoke");
|
|
57923
|
+
}
|
|
57924
|
+
async evaluateCapabilities(checkId, capabilities) {
|
|
57925
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
57926
|
+
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
57927
|
+
return this.doEvaluate(input, "visor/capability/resolve");
|
|
57928
|
+
}
|
|
57929
|
+
async shutdown() {
|
|
57930
|
+
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
57931
|
+
await this.evaluator.shutdown();
|
|
57932
|
+
}
|
|
57933
|
+
this.evaluator = null;
|
|
57934
|
+
this.inputBuilder = null;
|
|
57935
|
+
}
|
|
57936
|
+
resolveRulePath(defaultScope, override) {
|
|
57937
|
+
if (override) {
|
|
57938
|
+
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
57939
|
+
}
|
|
57940
|
+
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
57941
|
+
}
|
|
57942
|
+
async doEvaluate(input, rulePath) {
|
|
57943
|
+
try {
|
|
57944
|
+
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
57945
|
+
let timer;
|
|
57946
|
+
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
57947
|
+
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
57948
|
+
});
|
|
57949
|
+
try {
|
|
57950
|
+
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
57951
|
+
const decision = this.parseDecision(result);
|
|
57952
|
+
if (!decision.allowed && this.fallback === "warn") {
|
|
57953
|
+
decision.allowed = true;
|
|
57954
|
+
decision.warn = true;
|
|
57955
|
+
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
57956
|
+
}
|
|
57957
|
+
this.logger?.debug(
|
|
57958
|
+
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
57959
|
+
);
|
|
57960
|
+
return decision;
|
|
57961
|
+
} finally {
|
|
57962
|
+
if (timer) clearTimeout(timer);
|
|
57963
|
+
}
|
|
57964
|
+
} catch (err) {
|
|
57965
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
57966
|
+
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
57967
|
+
return {
|
|
57968
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
57969
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
57970
|
+
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
57971
|
+
};
|
|
57972
|
+
}
|
|
57973
|
+
}
|
|
57974
|
+
async rawEvaluate(input, rulePath) {
|
|
57975
|
+
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
57976
|
+
const result = await this.evaluator.evaluate(input);
|
|
57977
|
+
return this.navigateWasmResult(result, rulePath);
|
|
57978
|
+
}
|
|
57979
|
+
return this.evaluator.evaluate(input, rulePath);
|
|
57980
|
+
}
|
|
57981
|
+
/**
|
|
57982
|
+
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
57983
|
+
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
57984
|
+
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
57985
|
+
*/
|
|
57986
|
+
navigateWasmResult(result, rulePath) {
|
|
57987
|
+
if (!result || typeof result !== "object") return result;
|
|
57988
|
+
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
57989
|
+
let current = result;
|
|
57990
|
+
for (const seg of segments) {
|
|
57991
|
+
if (current && typeof current === "object" && seg in current) {
|
|
57992
|
+
current = current[seg];
|
|
57993
|
+
} else {
|
|
57994
|
+
return void 0;
|
|
57995
|
+
}
|
|
57996
|
+
}
|
|
57997
|
+
return current;
|
|
57998
|
+
}
|
|
57999
|
+
parseDecision(result) {
|
|
58000
|
+
if (result === void 0 || result === null) {
|
|
58001
|
+
return {
|
|
58002
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
58003
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
58004
|
+
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
58005
|
+
};
|
|
58006
|
+
}
|
|
58007
|
+
const allowed = result.allowed !== false;
|
|
58008
|
+
const decision = {
|
|
58009
|
+
allowed,
|
|
58010
|
+
reason: result.reason
|
|
58011
|
+
};
|
|
58012
|
+
if (result.capabilities) {
|
|
58013
|
+
decision.capabilities = result.capabilities;
|
|
58014
|
+
}
|
|
58015
|
+
return decision;
|
|
58016
|
+
}
|
|
58017
|
+
};
|
|
58018
|
+
}
|
|
58019
|
+
});
|
|
58020
|
+
|
|
58021
|
+
// src/enterprise/scheduler/knex-store.ts
|
|
58022
|
+
var knex_store_exports = {};
|
|
58023
|
+
__export(knex_store_exports, {
|
|
58024
|
+
KnexStoreBackend: () => KnexStoreBackend
|
|
58025
|
+
});
|
|
58026
|
+
function toNum(val) {
|
|
58027
|
+
if (val === null || val === void 0) return void 0;
|
|
58028
|
+
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
58029
|
+
}
|
|
58030
|
+
function safeJsonParse2(value) {
|
|
58031
|
+
if (!value) return void 0;
|
|
58032
|
+
try {
|
|
58033
|
+
return JSON.parse(value);
|
|
58034
|
+
} catch {
|
|
58035
|
+
return void 0;
|
|
58036
|
+
}
|
|
58037
|
+
}
|
|
58038
|
+
function fromTriggerRow2(row) {
|
|
58039
|
+
return {
|
|
58040
|
+
id: row.id,
|
|
58041
|
+
creatorId: row.creator_id,
|
|
58042
|
+
creatorContext: row.creator_context ?? void 0,
|
|
58043
|
+
creatorName: row.creator_name ?? void 0,
|
|
58044
|
+
description: row.description ?? void 0,
|
|
58045
|
+
channels: safeJsonParse2(row.channels),
|
|
58046
|
+
fromUsers: safeJsonParse2(row.from_users),
|
|
58047
|
+
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
58048
|
+
contains: safeJsonParse2(row.contains),
|
|
58049
|
+
matchPattern: row.match_pattern ?? void 0,
|
|
58050
|
+
threads: row.threads,
|
|
58051
|
+
workflow: row.workflow,
|
|
58052
|
+
inputs: safeJsonParse2(row.inputs),
|
|
58053
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
58054
|
+
status: row.status,
|
|
58055
|
+
enabled: row.enabled === true || row.enabled === 1,
|
|
58056
|
+
createdAt: toNum(row.created_at)
|
|
58057
|
+
};
|
|
58058
|
+
}
|
|
58059
|
+
function toTriggerInsertRow(trigger) {
|
|
58060
|
+
return {
|
|
58061
|
+
id: trigger.id,
|
|
58062
|
+
creator_id: trigger.creatorId,
|
|
58063
|
+
creator_context: trigger.creatorContext ?? null,
|
|
58064
|
+
creator_name: trigger.creatorName ?? null,
|
|
58065
|
+
description: trigger.description ?? null,
|
|
58066
|
+
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
58067
|
+
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
58068
|
+
from_bots: trigger.fromBots,
|
|
58069
|
+
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
58070
|
+
match_pattern: trigger.matchPattern ?? null,
|
|
58071
|
+
threads: trigger.threads,
|
|
58072
|
+
workflow: trigger.workflow,
|
|
58073
|
+
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
58074
|
+
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
58075
|
+
status: trigger.status,
|
|
58076
|
+
enabled: trigger.enabled,
|
|
58077
|
+
created_at: trigger.createdAt
|
|
58078
|
+
};
|
|
58079
|
+
}
|
|
58080
|
+
function fromDbRow2(row) {
|
|
58081
|
+
return {
|
|
58082
|
+
id: row.id,
|
|
58083
|
+
creatorId: row.creator_id,
|
|
58084
|
+
creatorContext: row.creator_context ?? void 0,
|
|
58085
|
+
creatorName: row.creator_name ?? void 0,
|
|
58086
|
+
timezone: row.timezone,
|
|
58087
|
+
schedule: row.schedule_expr,
|
|
58088
|
+
runAt: toNum(row.run_at),
|
|
58089
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
58090
|
+
originalExpression: row.original_expression,
|
|
58091
|
+
workflow: row.workflow ?? void 0,
|
|
58092
|
+
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
58093
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
58094
|
+
status: row.status,
|
|
58095
|
+
createdAt: toNum(row.created_at),
|
|
58096
|
+
lastRunAt: toNum(row.last_run_at),
|
|
58097
|
+
nextRunAt: toNum(row.next_run_at),
|
|
58098
|
+
runCount: row.run_count,
|
|
58099
|
+
failureCount: row.failure_count,
|
|
58100
|
+
lastError: row.last_error ?? void 0,
|
|
58101
|
+
previousResponse: row.previous_response ?? void 0
|
|
58102
|
+
};
|
|
58103
|
+
}
|
|
58104
|
+
function toInsertRow(schedule) {
|
|
58105
|
+
return {
|
|
58106
|
+
id: schedule.id,
|
|
58107
|
+
creator_id: schedule.creatorId,
|
|
58108
|
+
creator_context: schedule.creatorContext ?? null,
|
|
58109
|
+
creator_name: schedule.creatorName ?? null,
|
|
58110
|
+
timezone: schedule.timezone,
|
|
58111
|
+
schedule_expr: schedule.schedule,
|
|
58112
|
+
run_at: schedule.runAt ?? null,
|
|
58113
|
+
is_recurring: schedule.isRecurring,
|
|
58114
|
+
original_expression: schedule.originalExpression,
|
|
58115
|
+
workflow: schedule.workflow ?? null,
|
|
58116
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
58117
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
58118
|
+
status: schedule.status,
|
|
58119
|
+
created_at: schedule.createdAt,
|
|
58120
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
58121
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
58122
|
+
run_count: schedule.runCount,
|
|
58123
|
+
failure_count: schedule.failureCount,
|
|
58124
|
+
last_error: schedule.lastError ?? null,
|
|
58125
|
+
previous_response: schedule.previousResponse ?? null
|
|
58126
|
+
};
|
|
58127
|
+
}
|
|
58128
|
+
var fs24, path29, import_uuid2, KnexStoreBackend;
|
|
58129
|
+
var init_knex_store = __esm({
|
|
58130
|
+
"src/enterprise/scheduler/knex-store.ts"() {
|
|
58131
|
+
"use strict";
|
|
58132
|
+
fs24 = __toESM(require("fs"));
|
|
58133
|
+
path29 = __toESM(require("path"));
|
|
58134
|
+
import_uuid2 = require("uuid");
|
|
58135
|
+
init_logger();
|
|
58136
|
+
KnexStoreBackend = class {
|
|
58137
|
+
knex = null;
|
|
58138
|
+
driver;
|
|
58139
|
+
connection;
|
|
58140
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
58141
|
+
this.driver = driver;
|
|
58142
|
+
this.connection = storageConfig.connection || {};
|
|
58143
|
+
}
|
|
58144
|
+
async initialize() {
|
|
58145
|
+
const { createRequire } = require("module");
|
|
58146
|
+
const runtimeRequire = createRequire(__filename);
|
|
58147
|
+
let knexFactory;
|
|
58148
|
+
try {
|
|
58149
|
+
knexFactory = runtimeRequire("knex");
|
|
58150
|
+
} catch (err) {
|
|
58151
|
+
const code = err?.code;
|
|
58152
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
58153
|
+
throw new Error(
|
|
58154
|
+
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
58155
|
+
);
|
|
58156
|
+
}
|
|
58157
|
+
throw err;
|
|
58158
|
+
}
|
|
58159
|
+
const clientMap = {
|
|
58160
|
+
postgresql: "pg",
|
|
58161
|
+
mysql: "mysql2",
|
|
58162
|
+
mssql: "tedious"
|
|
58163
|
+
};
|
|
58164
|
+
const client = clientMap[this.driver];
|
|
58165
|
+
let connection;
|
|
58166
|
+
if (this.connection.connection_string) {
|
|
58167
|
+
connection = this.connection.connection_string;
|
|
58168
|
+
} else if (this.driver === "mssql") {
|
|
58169
|
+
connection = this.buildMssqlConnection();
|
|
58170
|
+
} else {
|
|
58171
|
+
connection = this.buildStandardConnection();
|
|
58172
|
+
}
|
|
58173
|
+
this.knex = knexFactory({
|
|
58174
|
+
client,
|
|
58175
|
+
connection,
|
|
58176
|
+
pool: {
|
|
58177
|
+
min: this.connection.pool?.min ?? 0,
|
|
58178
|
+
max: this.connection.pool?.max ?? 10
|
|
58179
|
+
}
|
|
58180
|
+
});
|
|
58181
|
+
await this.migrateSchema();
|
|
58182
|
+
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
58183
|
+
}
|
|
58184
|
+
buildStandardConnection() {
|
|
58185
|
+
return {
|
|
58186
|
+
host: this.connection.host || "localhost",
|
|
58187
|
+
port: this.connection.port,
|
|
58188
|
+
database: this.connection.database || "visor",
|
|
58189
|
+
user: this.connection.user,
|
|
58190
|
+
password: this.connection.password,
|
|
58191
|
+
ssl: this.resolveSslConfig()
|
|
58192
|
+
};
|
|
58193
|
+
}
|
|
58194
|
+
buildMssqlConnection() {
|
|
58195
|
+
const ssl = this.connection.ssl;
|
|
58196
|
+
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
58197
|
+
return {
|
|
58198
|
+
server: this.connection.host || "localhost",
|
|
58199
|
+
port: this.connection.port,
|
|
58200
|
+
database: this.connection.database || "visor",
|
|
58201
|
+
user: this.connection.user,
|
|
58202
|
+
password: this.connection.password,
|
|
58203
|
+
options: {
|
|
58204
|
+
encrypt: sslEnabled,
|
|
58205
|
+
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
58206
|
+
}
|
|
58207
|
+
};
|
|
58208
|
+
}
|
|
58209
|
+
resolveSslConfig() {
|
|
58210
|
+
const ssl = this.connection.ssl;
|
|
58211
|
+
if (ssl === false || ssl === void 0) return false;
|
|
58212
|
+
if (ssl === true) return { rejectUnauthorized: true };
|
|
58213
|
+
if (ssl.enabled === false) return false;
|
|
58214
|
+
const result = {
|
|
58215
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
58216
|
+
};
|
|
58217
|
+
if (ssl.ca) {
|
|
58218
|
+
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
58219
|
+
result.ca = fs24.readFileSync(caPath, "utf8");
|
|
58220
|
+
}
|
|
58221
|
+
if (ssl.cert) {
|
|
58222
|
+
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
58223
|
+
result.cert = fs24.readFileSync(certPath, "utf8");
|
|
58224
|
+
}
|
|
58225
|
+
if (ssl.key) {
|
|
58226
|
+
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
58227
|
+
result.key = fs24.readFileSync(keyPath, "utf8");
|
|
58228
|
+
}
|
|
58229
|
+
return result;
|
|
58230
|
+
}
|
|
58231
|
+
validateSslPath(filePath, label) {
|
|
58232
|
+
const resolved = path29.resolve(filePath);
|
|
58233
|
+
if (resolved !== path29.normalize(resolved)) {
|
|
58234
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
58235
|
+
}
|
|
58236
|
+
if (!fs24.existsSync(resolved)) {
|
|
58237
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
58238
|
+
}
|
|
58239
|
+
return resolved;
|
|
58240
|
+
}
|
|
58241
|
+
async shutdown() {
|
|
58242
|
+
if (this.knex) {
|
|
58243
|
+
await this.knex.destroy();
|
|
58244
|
+
this.knex = null;
|
|
58245
|
+
}
|
|
58246
|
+
}
|
|
58247
|
+
async migrateSchema() {
|
|
58248
|
+
const knex = this.getKnex();
|
|
58249
|
+
const exists = await knex.schema.hasTable("schedules");
|
|
58250
|
+
if (!exists) {
|
|
58251
|
+
await knex.schema.createTable("schedules", (table) => {
|
|
58252
|
+
table.string("id", 36).primary();
|
|
58253
|
+
table.string("creator_id", 255).notNullable().index();
|
|
58254
|
+
table.string("creator_context", 255);
|
|
58255
|
+
table.string("creator_name", 255);
|
|
58256
|
+
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
58257
|
+
table.string("schedule_expr", 255);
|
|
58258
|
+
table.bigInteger("run_at");
|
|
58259
|
+
table.boolean("is_recurring").notNullable();
|
|
58260
|
+
table.text("original_expression");
|
|
58261
|
+
table.string("workflow", 255);
|
|
58262
|
+
table.text("workflow_inputs");
|
|
58263
|
+
table.text("output_context");
|
|
58264
|
+
table.string("status", 20).notNullable().index();
|
|
58265
|
+
table.bigInteger("created_at").notNullable();
|
|
58266
|
+
table.bigInteger("last_run_at");
|
|
58267
|
+
table.bigInteger("next_run_at");
|
|
58268
|
+
table.integer("run_count").notNullable().defaultTo(0);
|
|
58269
|
+
table.integer("failure_count").notNullable().defaultTo(0);
|
|
58270
|
+
table.text("last_error");
|
|
58271
|
+
table.text("previous_response");
|
|
58272
|
+
table.index(["status", "next_run_at"]);
|
|
58273
|
+
});
|
|
58274
|
+
}
|
|
58275
|
+
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
58276
|
+
if (!triggersExist) {
|
|
58277
|
+
await knex.schema.createTable("message_triggers", (table) => {
|
|
58278
|
+
table.string("id", 36).primary();
|
|
58279
|
+
table.string("creator_id", 255).notNullable().index();
|
|
58280
|
+
table.string("creator_context", 255);
|
|
58281
|
+
table.string("creator_name", 255);
|
|
58282
|
+
table.text("description");
|
|
58283
|
+
table.text("channels");
|
|
58284
|
+
table.text("from_users");
|
|
58285
|
+
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
58286
|
+
table.text("contains");
|
|
58287
|
+
table.text("match_pattern");
|
|
58288
|
+
table.string("threads", 20).notNullable().defaultTo("any");
|
|
58289
|
+
table.string("workflow", 255).notNullable();
|
|
58290
|
+
table.text("inputs");
|
|
58291
|
+
table.text("output_context");
|
|
58292
|
+
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
58293
|
+
table.boolean("enabled").notNullable().defaultTo(true);
|
|
58294
|
+
table.bigInteger("created_at").notNullable();
|
|
58295
|
+
});
|
|
58296
|
+
}
|
|
58297
|
+
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
58298
|
+
if (!locksExist) {
|
|
58299
|
+
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
58300
|
+
table.string("lock_id", 255).primary();
|
|
58301
|
+
table.string("node_id", 255).notNullable();
|
|
58302
|
+
table.string("lock_token", 36).notNullable();
|
|
58303
|
+
table.bigInteger("acquired_at").notNullable();
|
|
58304
|
+
table.bigInteger("expires_at").notNullable();
|
|
58305
|
+
});
|
|
58306
|
+
}
|
|
58307
|
+
}
|
|
58308
|
+
getKnex() {
|
|
58309
|
+
if (!this.knex) {
|
|
58310
|
+
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
58311
|
+
}
|
|
58312
|
+
return this.knex;
|
|
58313
|
+
}
|
|
58314
|
+
// --- CRUD ---
|
|
58315
|
+
async create(schedule) {
|
|
58316
|
+
const knex = this.getKnex();
|
|
58317
|
+
const newSchedule = {
|
|
58318
|
+
...schedule,
|
|
58319
|
+
id: (0, import_uuid2.v4)(),
|
|
58320
|
+
createdAt: Date.now(),
|
|
58321
|
+
runCount: 0,
|
|
58322
|
+
failureCount: 0,
|
|
58323
|
+
status: "active"
|
|
58324
|
+
};
|
|
58325
|
+
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
58326
|
+
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
58327
|
+
return newSchedule;
|
|
58328
|
+
}
|
|
58329
|
+
async importSchedule(schedule) {
|
|
58330
|
+
const knex = this.getKnex();
|
|
58331
|
+
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
58332
|
+
if (existing) return;
|
|
58333
|
+
await knex("schedules").insert(toInsertRow(schedule));
|
|
58334
|
+
}
|
|
58335
|
+
async get(id) {
|
|
58336
|
+
const knex = this.getKnex();
|
|
58337
|
+
const row = await knex("schedules").where("id", id).first();
|
|
58338
|
+
return row ? fromDbRow2(row) : void 0;
|
|
58339
|
+
}
|
|
58340
|
+
async update(id, patch) {
|
|
58341
|
+
const knex = this.getKnex();
|
|
58342
|
+
const existing = await knex("schedules").where("id", id).first();
|
|
58343
|
+
if (!existing) return void 0;
|
|
58344
|
+
const current = fromDbRow2(existing);
|
|
58345
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
58346
|
+
const row = toInsertRow(updated);
|
|
58347
|
+
delete row.id;
|
|
58348
|
+
await knex("schedules").where("id", id).update(row);
|
|
58349
|
+
return updated;
|
|
58350
|
+
}
|
|
58351
|
+
async delete(id) {
|
|
58352
|
+
const knex = this.getKnex();
|
|
58353
|
+
const deleted = await knex("schedules").where("id", id).del();
|
|
58354
|
+
if (deleted > 0) {
|
|
58355
|
+
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
58356
|
+
return true;
|
|
58357
|
+
}
|
|
58358
|
+
return false;
|
|
58359
|
+
}
|
|
58360
|
+
// --- Queries ---
|
|
58361
|
+
async getByCreator(creatorId) {
|
|
58362
|
+
const knex = this.getKnex();
|
|
58363
|
+
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
58364
|
+
return rows.map((r) => fromDbRow2(r));
|
|
58365
|
+
}
|
|
58366
|
+
async getActiveSchedules() {
|
|
58367
|
+
const knex = this.getKnex();
|
|
58368
|
+
const rows = await knex("schedules").where("status", "active");
|
|
58369
|
+
return rows.map((r) => fromDbRow2(r));
|
|
58370
|
+
}
|
|
58371
|
+
async getDueSchedules(now) {
|
|
58372
|
+
const ts = now ?? Date.now();
|
|
58373
|
+
const knex = this.getKnex();
|
|
58374
|
+
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
58375
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
58376
|
+
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
58377
|
+
this.where(function() {
|
|
58378
|
+
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
58379
|
+
}).orWhere(function() {
|
|
58380
|
+
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
58381
|
+
});
|
|
58382
|
+
});
|
|
58383
|
+
return rows.map((r) => fromDbRow2(r));
|
|
58384
|
+
}
|
|
58385
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
58386
|
+
const knex = this.getKnex();
|
|
58387
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
58388
|
+
const pattern = `%${escaped}%`;
|
|
58389
|
+
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
58390
|
+
return rows.map((r) => fromDbRow2(r));
|
|
58391
|
+
}
|
|
58392
|
+
async getAll() {
|
|
58393
|
+
const knex = this.getKnex();
|
|
58394
|
+
const rows = await knex("schedules");
|
|
58395
|
+
return rows.map((r) => fromDbRow2(r));
|
|
58396
|
+
}
|
|
58397
|
+
async getStats() {
|
|
58398
|
+
const knex = this.getKnex();
|
|
58399
|
+
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
58400
|
+
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
58401
|
+
const result = await knex("schedules").select(
|
|
58402
|
+
knex.raw("COUNT(*) as total"),
|
|
58403
|
+
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
58404
|
+
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
58405
|
+
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
58406
|
+
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
58407
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
58408
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
58409
|
+
).first();
|
|
58410
|
+
return {
|
|
58411
|
+
total: Number(result.total) || 0,
|
|
58412
|
+
active: Number(result.active) || 0,
|
|
58413
|
+
paused: Number(result.paused) || 0,
|
|
58414
|
+
completed: Number(result.completed) || 0,
|
|
58415
|
+
failed: Number(result.failed) || 0,
|
|
58416
|
+
recurring: Number(result.recurring) || 0,
|
|
58417
|
+
oneTime: Number(result.one_time) || 0
|
|
58418
|
+
};
|
|
58419
|
+
}
|
|
58420
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
58421
|
+
const knex = this.getKnex();
|
|
58422
|
+
if (limits.maxGlobal) {
|
|
58423
|
+
const result = await knex("schedules").count("* as cnt").first();
|
|
58424
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
58425
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
58426
|
+
}
|
|
58427
|
+
}
|
|
58428
|
+
if (limits.maxPerUser) {
|
|
58429
|
+
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
58430
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
58431
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
58432
|
+
}
|
|
58433
|
+
}
|
|
58434
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
58435
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
58436
|
+
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
58437
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
58438
|
+
throw new Error(
|
|
58439
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
58440
|
+
);
|
|
58441
|
+
}
|
|
58442
|
+
}
|
|
58443
|
+
}
|
|
58444
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
58445
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
58446
|
+
const knex = this.getKnex();
|
|
58447
|
+
const now = Date.now();
|
|
58448
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
58449
|
+
const token = (0, import_uuid2.v4)();
|
|
58450
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
58451
|
+
node_id: nodeId,
|
|
58452
|
+
lock_token: token,
|
|
58453
|
+
acquired_at: now,
|
|
58454
|
+
expires_at: expiresAt
|
|
58455
|
+
});
|
|
58456
|
+
if (updated > 0) return token;
|
|
58457
|
+
try {
|
|
58458
|
+
await knex("scheduler_locks").insert({
|
|
58459
|
+
lock_id: lockId,
|
|
58460
|
+
node_id: nodeId,
|
|
58461
|
+
lock_token: token,
|
|
58462
|
+
acquired_at: now,
|
|
58463
|
+
expires_at: expiresAt
|
|
58464
|
+
});
|
|
58465
|
+
return token;
|
|
58466
|
+
} catch {
|
|
58467
|
+
return null;
|
|
58468
|
+
}
|
|
58469
|
+
}
|
|
58470
|
+
async releaseLock(lockId, lockToken) {
|
|
58471
|
+
const knex = this.getKnex();
|
|
58472
|
+
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
58473
|
+
}
|
|
58474
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
58475
|
+
const knex = this.getKnex();
|
|
58476
|
+
const now = Date.now();
|
|
58477
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
58478
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
58479
|
+
return updated > 0;
|
|
58480
|
+
}
|
|
58481
|
+
async flush() {
|
|
58482
|
+
}
|
|
58483
|
+
// --- Message Trigger CRUD ---
|
|
58484
|
+
async createTrigger(trigger) {
|
|
58485
|
+
const knex = this.getKnex();
|
|
58486
|
+
const newTrigger = {
|
|
58487
|
+
...trigger,
|
|
58488
|
+
id: (0, import_uuid2.v4)(),
|
|
58489
|
+
createdAt: Date.now()
|
|
58490
|
+
};
|
|
58491
|
+
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
58492
|
+
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
58493
|
+
return newTrigger;
|
|
58494
|
+
}
|
|
58495
|
+
async getTrigger(id) {
|
|
58496
|
+
const knex = this.getKnex();
|
|
58497
|
+
const row = await knex("message_triggers").where("id", id).first();
|
|
58498
|
+
return row ? fromTriggerRow2(row) : void 0;
|
|
58499
|
+
}
|
|
58500
|
+
async updateTrigger(id, patch) {
|
|
58501
|
+
const knex = this.getKnex();
|
|
58502
|
+
const existing = await knex("message_triggers").where("id", id).first();
|
|
58503
|
+
if (!existing) return void 0;
|
|
58504
|
+
const current = fromTriggerRow2(existing);
|
|
58505
|
+
const updated = {
|
|
58506
|
+
...current,
|
|
58507
|
+
...patch,
|
|
58508
|
+
id: current.id,
|
|
58509
|
+
createdAt: current.createdAt
|
|
58510
|
+
};
|
|
58511
|
+
const row = toTriggerInsertRow(updated);
|
|
58512
|
+
delete row.id;
|
|
58513
|
+
await knex("message_triggers").where("id", id).update(row);
|
|
58514
|
+
return updated;
|
|
58515
|
+
}
|
|
58516
|
+
async deleteTrigger(id) {
|
|
58517
|
+
const knex = this.getKnex();
|
|
58518
|
+
const deleted = await knex("message_triggers").where("id", id).del();
|
|
58519
|
+
if (deleted > 0) {
|
|
58520
|
+
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
58521
|
+
return true;
|
|
58522
|
+
}
|
|
58523
|
+
return false;
|
|
58524
|
+
}
|
|
58525
|
+
async getTriggersByCreator(creatorId) {
|
|
58526
|
+
const knex = this.getKnex();
|
|
58527
|
+
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
58528
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
58529
|
+
}
|
|
58530
|
+
async getActiveTriggers() {
|
|
58531
|
+
const knex = this.getKnex();
|
|
58532
|
+
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
58533
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
58534
|
+
}
|
|
58535
|
+
};
|
|
58536
|
+
}
|
|
58537
|
+
});
|
|
58538
|
+
|
|
58539
|
+
// src/enterprise/loader.ts
|
|
58540
|
+
var loader_exports = {};
|
|
58541
|
+
__export(loader_exports, {
|
|
58542
|
+
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
58543
|
+
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
58544
|
+
});
|
|
58545
|
+
async function loadEnterprisePolicyEngine(config) {
|
|
58546
|
+
try {
|
|
58547
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
58548
|
+
const validator = new LicenseValidator2();
|
|
58549
|
+
const license = await validator.loadAndValidate();
|
|
58550
|
+
if (!license || !validator.hasFeature("policy")) {
|
|
58551
|
+
return new DefaultPolicyEngine();
|
|
58552
|
+
}
|
|
58553
|
+
if (validator.isInGracePeriod()) {
|
|
58554
|
+
console.warn(
|
|
58555
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
58556
|
+
);
|
|
58557
|
+
}
|
|
58558
|
+
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
58559
|
+
const engine = new OpaPolicyEngine2(config);
|
|
58560
|
+
await engine.initialize(config);
|
|
58561
|
+
return engine;
|
|
58562
|
+
} catch (err) {
|
|
58563
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
58564
|
+
try {
|
|
58565
|
+
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
58566
|
+
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
58567
|
+
} catch {
|
|
58568
|
+
}
|
|
58569
|
+
return new DefaultPolicyEngine();
|
|
58570
|
+
}
|
|
58571
|
+
}
|
|
58572
|
+
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
58573
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
58574
|
+
const validator = new LicenseValidator2();
|
|
58575
|
+
const license = await validator.loadAndValidate();
|
|
58576
|
+
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
58577
|
+
throw new Error(
|
|
58578
|
+
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
58579
|
+
);
|
|
58580
|
+
}
|
|
58581
|
+
if (validator.isInGracePeriod()) {
|
|
58582
|
+
console.warn(
|
|
58583
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
58584
|
+
);
|
|
58585
|
+
}
|
|
58586
|
+
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
58587
|
+
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
58588
|
+
}
|
|
58589
|
+
var init_loader = __esm({
|
|
58590
|
+
"src/enterprise/loader.ts"() {
|
|
58591
|
+
"use strict";
|
|
58592
|
+
init_default_engine();
|
|
58593
|
+
}
|
|
58594
|
+
});
|
|
58595
|
+
|
|
57177
58596
|
// src/event-bus/event-bus.ts
|
|
57178
58597
|
var event_bus_exports = {};
|
|
57179
58598
|
__export(event_bus_exports, {
|
|
@@ -58080,8 +59499,8 @@ ${content}
|
|
|
58080
59499
|
* Sleep utility
|
|
58081
59500
|
*/
|
|
58082
59501
|
sleep(ms) {
|
|
58083
|
-
return new Promise((
|
|
58084
|
-
const t = setTimeout(
|
|
59502
|
+
return new Promise((resolve19) => {
|
|
59503
|
+
const t = setTimeout(resolve19, ms);
|
|
58085
59504
|
if (typeof t.unref === "function") {
|
|
58086
59505
|
try {
|
|
58087
59506
|
t.unref();
|
|
@@ -58366,8 +59785,8 @@ ${end}`);
|
|
|
58366
59785
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
58367
59786
|
const existingLock = this.updateLocks.get(group);
|
|
58368
59787
|
let resolveLock;
|
|
58369
|
-
const ourLock = new Promise((
|
|
58370
|
-
resolveLock =
|
|
59788
|
+
const ourLock = new Promise((resolve19) => {
|
|
59789
|
+
resolveLock = resolve19;
|
|
58371
59790
|
});
|
|
58372
59791
|
this.updateLocks.set(group, ourLock);
|
|
58373
59792
|
try {
|
|
@@ -58698,7 +60117,7 @@ ${blocks}
|
|
|
58698
60117
|
* Sleep utility for enforcing delays
|
|
58699
60118
|
*/
|
|
58700
60119
|
sleep(ms) {
|
|
58701
|
-
return new Promise((
|
|
60120
|
+
return new Promise((resolve19) => setTimeout(resolve19, ms));
|
|
58702
60121
|
}
|
|
58703
60122
|
};
|
|
58704
60123
|
}
|
|
@@ -59435,6 +60854,21 @@ ${message}`;
|
|
|
59435
60854
|
if (out && typeof out._rawOutput === "string" && out._rawOutput.trim().length > 0) {
|
|
59436
60855
|
text = (text || "") + "\n\n" + out._rawOutput.trim();
|
|
59437
60856
|
}
|
|
60857
|
+
if (!text) {
|
|
60858
|
+
const issues = result?.issues || [];
|
|
60859
|
+
const errorIssues = issues.filter(
|
|
60860
|
+
(i) => i.severity === "error" && (i.ruleId?.startsWith("system/") || i.ruleId?.endsWith("/error"))
|
|
60861
|
+
);
|
|
60862
|
+
if (errorIssues.length > 0) {
|
|
60863
|
+
const errorMessages = errorIssues.map((i) => i.message).join("\n");
|
|
60864
|
+
text = `:warning: Something went wrong while processing your request:
|
|
60865
|
+
${errorMessages}`;
|
|
60866
|
+
this.errorNotified = true;
|
|
60867
|
+
ctx.logger.warn(
|
|
60868
|
+
`[slack-frontend] posting error fallback for ${checkId}: ${errorIssues.length} system error(s)`
|
|
60869
|
+
);
|
|
60870
|
+
}
|
|
60871
|
+
}
|
|
59438
60872
|
if (!text) {
|
|
59439
60873
|
ctx.logger.info(
|
|
59440
60874
|
`[slack-frontend] skip posting AI reply for ${checkId}: no renderable text in check output`
|
|
@@ -60545,11 +61979,11 @@ var require_request3 = __commonJS({
|
|
|
60545
61979
|
"use strict";
|
|
60546
61980
|
var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) {
|
|
60547
61981
|
function adopt(value) {
|
|
60548
|
-
return value instanceof P ? value : new P(function(
|
|
60549
|
-
|
|
61982
|
+
return value instanceof P ? value : new P(function(resolve19) {
|
|
61983
|
+
resolve19(value);
|
|
60550
61984
|
});
|
|
60551
61985
|
}
|
|
60552
|
-
return new (P || (P = Promise))(function(
|
|
61986
|
+
return new (P || (P = Promise))(function(resolve19, reject) {
|
|
60553
61987
|
function fulfilled(value) {
|
|
60554
61988
|
try {
|
|
60555
61989
|
step(generator.next(value));
|
|
@@ -60565,7 +61999,7 @@ var require_request3 = __commonJS({
|
|
|
60565
61999
|
}
|
|
60566
62000
|
}
|
|
60567
62001
|
function step(result) {
|
|
60568
|
-
result.done ?
|
|
62002
|
+
result.done ? resolve19(result.value) : adopt(result.value).then(fulfilled, rejected);
|
|
60569
62003
|
}
|
|
60570
62004
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
60571
62005
|
});
|
|
@@ -60589,9 +62023,9 @@ var require_request3 = __commonJS({
|
|
|
60589
62023
|
HttpMethod2["PATCH"] = "PATCH";
|
|
60590
62024
|
})(HttpMethod = exports2.HttpMethod || (exports2.HttpMethod = {}));
|
|
60591
62025
|
var SvixRequest = class {
|
|
60592
|
-
constructor(method,
|
|
62026
|
+
constructor(method, path33) {
|
|
60593
62027
|
this.method = method;
|
|
60594
|
-
this.path =
|
|
62028
|
+
this.path = path33;
|
|
60595
62029
|
this.queryParams = {};
|
|
60596
62030
|
this.headerParams = {};
|
|
60597
62031
|
}
|
|
@@ -60694,7 +62128,7 @@ var require_request3 = __commonJS({
|
|
|
60694
62128
|
}
|
|
60695
62129
|
function sendWithRetry(url, init, retryScheduleInMs, nextInterval = 50, triesLeft = 2, fetchImpl = fetch, retryCount = 1) {
|
|
60696
62130
|
return __awaiter(this, void 0, void 0, function* () {
|
|
60697
|
-
const sleep = (interval) => new Promise((
|
|
62131
|
+
const sleep = (interval) => new Promise((resolve19) => setTimeout(resolve19, interval));
|
|
60698
62132
|
try {
|
|
60699
62133
|
const response = yield fetchImpl(url, init);
|
|
60700
62134
|
if (triesLeft <= 0 || response.status < 500) {
|
|
@@ -69768,7 +71202,7 @@ ${message}`;
|
|
|
69768
71202
|
});
|
|
69769
71203
|
|
|
69770
71204
|
// src/agent-protocol/task-store.ts
|
|
69771
|
-
function
|
|
71205
|
+
function safeJsonParse3(value) {
|
|
69772
71206
|
if (!value) return void 0;
|
|
69773
71207
|
try {
|
|
69774
71208
|
return JSON.parse(value);
|
|
@@ -69785,12 +71219,12 @@ function taskRowToAgentTask(row) {
|
|
|
69785
71219
|
context_id: row.context_id,
|
|
69786
71220
|
status: {
|
|
69787
71221
|
state: row.state,
|
|
69788
|
-
message:
|
|
71222
|
+
message: safeJsonParse3(row.status_message),
|
|
69789
71223
|
timestamp: row.updated_at
|
|
69790
71224
|
},
|
|
69791
|
-
artifacts:
|
|
69792
|
-
history:
|
|
69793
|
-
metadata:
|
|
71225
|
+
artifacts: safeJsonParse3(row.artifacts) ?? [],
|
|
71226
|
+
history: safeJsonParse3(row.history) ?? [],
|
|
71227
|
+
metadata: safeJsonParse3(row.request_metadata),
|
|
69794
71228
|
workflow_id: row.workflow_id ?? void 0
|
|
69795
71229
|
};
|
|
69796
71230
|
}
|
|
@@ -70027,7 +71461,7 @@ var init_task_store = __esm({
|
|
|
70027
71461
|
const db = this.getDb();
|
|
70028
71462
|
const row = db.prepare("SELECT artifacts FROM agent_tasks WHERE id = ?").get(taskId);
|
|
70029
71463
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
70030
|
-
const artifacts =
|
|
71464
|
+
const artifacts = safeJsonParse3(row.artifacts) ?? [];
|
|
70031
71465
|
artifacts.push(artifact);
|
|
70032
71466
|
db.prepare("UPDATE agent_tasks SET artifacts = ?, updated_at = ? WHERE id = ?").run(
|
|
70033
71467
|
JSON.stringify(artifacts),
|
|
@@ -70039,7 +71473,7 @@ var init_task_store = __esm({
|
|
|
70039
71473
|
const db = this.getDb();
|
|
70040
71474
|
const row = db.prepare("SELECT history FROM agent_tasks WHERE id = ?").get(taskId);
|
|
70041
71475
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
70042
|
-
const history =
|
|
71476
|
+
const history = safeJsonParse3(row.history) ?? [];
|
|
70043
71477
|
history.push(message);
|
|
70044
71478
|
db.prepare("UPDATE agent_tasks SET history = ?, updated_at = ? WHERE id = ?").run(
|
|
70045
71479
|
JSON.stringify(history),
|
|
@@ -70551,13 +71985,13 @@ __export(a2a_frontend_exports, {
|
|
|
70551
71985
|
resultToArtifacts: () => resultToArtifacts
|
|
70552
71986
|
});
|
|
70553
71987
|
function readJsonBody(req) {
|
|
70554
|
-
return new Promise((
|
|
71988
|
+
return new Promise((resolve19, reject) => {
|
|
70555
71989
|
const chunks = [];
|
|
70556
71990
|
req.on("data", (chunk) => chunks.push(chunk));
|
|
70557
71991
|
req.on("end", () => {
|
|
70558
71992
|
try {
|
|
70559
71993
|
const body = Buffer.concat(chunks).toString("utf8");
|
|
70560
|
-
|
|
71994
|
+
resolve19(body ? JSON.parse(body) : {});
|
|
70561
71995
|
} catch {
|
|
70562
71996
|
reject(new ParseError("Malformed JSON body"));
|
|
70563
71997
|
}
|
|
@@ -70800,12 +72234,12 @@ var init_a2a_frontend = __esm({
|
|
|
70800
72234
|
}
|
|
70801
72235
|
const port = this.config.port ?? 9e3;
|
|
70802
72236
|
const host = this.config.host ?? "0.0.0.0";
|
|
70803
|
-
await new Promise((
|
|
72237
|
+
await new Promise((resolve19) => {
|
|
70804
72238
|
this.server.listen(port, host, () => {
|
|
70805
72239
|
const addr = this.server.address();
|
|
70806
72240
|
this._boundPort = typeof addr === "object" && addr ? addr.port : port;
|
|
70807
72241
|
logger.info(`A2A server listening on ${host}:${this._boundPort}`);
|
|
70808
|
-
|
|
72242
|
+
resolve19();
|
|
70809
72243
|
});
|
|
70810
72244
|
});
|
|
70811
72245
|
if (this.agentCard) {
|
|
@@ -70829,8 +72263,8 @@ var init_a2a_frontend = __esm({
|
|
|
70829
72263
|
}
|
|
70830
72264
|
this.streamManager.shutdown();
|
|
70831
72265
|
if (this.server) {
|
|
70832
|
-
await new Promise((
|
|
70833
|
-
this.server.close((err) => err ? reject(err) :
|
|
72266
|
+
await new Promise((resolve19, reject) => {
|
|
72267
|
+
this.server.close((err) => err ? reject(err) : resolve19());
|
|
70834
72268
|
});
|
|
70835
72269
|
this.server = null;
|
|
70836
72270
|
}
|
|
@@ -71547,15 +72981,15 @@ function serializeRunState(state) {
|
|
|
71547
72981
|
])
|
|
71548
72982
|
};
|
|
71549
72983
|
}
|
|
71550
|
-
var
|
|
72984
|
+
var path32, fs28, StateMachineExecutionEngine;
|
|
71551
72985
|
var init_state_machine_execution_engine = __esm({
|
|
71552
72986
|
"src/state-machine-execution-engine.ts"() {
|
|
71553
72987
|
"use strict";
|
|
71554
72988
|
init_runner();
|
|
71555
72989
|
init_logger();
|
|
71556
72990
|
init_sandbox_manager();
|
|
71557
|
-
|
|
71558
|
-
|
|
72991
|
+
path32 = __toESM(require("path"));
|
|
72992
|
+
fs28 = __toESM(require("fs"));
|
|
71559
72993
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
71560
72994
|
workingDirectory;
|
|
71561
72995
|
executionContext;
|
|
@@ -71787,8 +73221,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
71787
73221
|
logger.debug(
|
|
71788
73222
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
71789
73223
|
);
|
|
71790
|
-
const { loadEnterprisePolicyEngine } = await
|
|
71791
|
-
context2.policyEngine = await
|
|
73224
|
+
const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
|
|
73225
|
+
context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
|
|
71792
73226
|
logger.debug(
|
|
71793
73227
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
71794
73228
|
);
|
|
@@ -71942,9 +73376,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
71942
73376
|
}
|
|
71943
73377
|
const checkId = String(ev?.checkId || "unknown");
|
|
71944
73378
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
71945
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
71946
|
-
|
|
71947
|
-
const filePath =
|
|
73379
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path32.resolve(process.cwd(), ".visor", "snapshots");
|
|
73380
|
+
fs28.mkdirSync(baseDir, { recursive: true });
|
|
73381
|
+
const filePath = path32.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
71948
73382
|
await this.saveSnapshotToFile(filePath);
|
|
71949
73383
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
71950
73384
|
try {
|
|
@@ -72085,7 +73519,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
72085
73519
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
72086
73520
|
*/
|
|
72087
73521
|
async saveSnapshotToFile(filePath) {
|
|
72088
|
-
const
|
|
73522
|
+
const fs29 = await import("fs/promises");
|
|
72089
73523
|
const ctx = this._lastContext;
|
|
72090
73524
|
const runner = this._lastRunner;
|
|
72091
73525
|
if (!ctx || !runner) {
|
|
@@ -72105,14 +73539,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
72105
73539
|
journal: entries,
|
|
72106
73540
|
requestedChecks: ctx.requestedChecks || []
|
|
72107
73541
|
};
|
|
72108
|
-
await
|
|
73542
|
+
await fs29.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
72109
73543
|
}
|
|
72110
73544
|
/**
|
|
72111
73545
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
72112
73546
|
*/
|
|
72113
73547
|
async loadSnapshotFromFile(filePath) {
|
|
72114
|
-
const
|
|
72115
|
-
const raw = await
|
|
73548
|
+
const fs29 = await import("fs/promises");
|
|
73549
|
+
const raw = await fs29.readFile(filePath, "utf8");
|
|
72116
73550
|
return JSON.parse(raw);
|
|
72117
73551
|
}
|
|
72118
73552
|
/**
|