@probelabs/visor 0.1.182-ee → 0.1.182
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generated/config-schema.json +43 -6
- package/dist/index.js +23 -1840
- package/dist/output/traces/run-2026-03-18T19-02-50-465Z.ndjson +157 -0
- package/dist/output/traces/run-2026-03-18T19-03-30-428Z.ndjson +2333 -0
- package/dist/sdk/a2a-frontend-5J3UNFY4.mjs +1718 -0
- package/dist/sdk/a2a-frontend-5J3UNFY4.mjs.map +1 -0
- package/dist/sdk/check-provider-registry-MHXQGUNN.mjs +31 -0
- package/dist/sdk/{check-provider-registry-I4BCWKRU.mjs → check-provider-registry-Y33CRFVD.mjs} +2 -2
- package/dist/sdk/chunk-4AXAVXG5.mjs +390 -0
- package/dist/sdk/chunk-4AXAVXG5.mjs.map +1 -0
- package/dist/sdk/chunk-5J3DNRF7.mjs +1502 -0
- package/dist/sdk/chunk-5J3DNRF7.mjs.map +1 -0
- package/dist/sdk/chunk-7Z2WHX2J.mjs +46194 -0
- package/dist/sdk/{chunk-TQQNSHQV.mjs.map → chunk-7Z2WHX2J.mjs.map} +1 -1
- package/dist/sdk/{chunk-TQQNSHQV.mjs → chunk-JCOSKBMP.mjs} +9 -9
- package/dist/sdk/chunk-JCOSKBMP.mjs.map +1 -0
- package/dist/sdk/chunk-MK7ONH47.mjs +739 -0
- package/dist/sdk/chunk-MK7ONH47.mjs.map +1 -0
- package/dist/sdk/chunk-V75NEIXL.mjs +296 -0
- package/dist/sdk/chunk-V75NEIXL.mjs.map +1 -0
- package/dist/sdk/failure-condition-evaluator-R6DCDJAV.mjs +18 -0
- package/dist/sdk/github-frontend-3PSCKPAJ.mjs +1394 -0
- package/dist/sdk/github-frontend-3PSCKPAJ.mjs.map +1 -0
- package/dist/sdk/host-54CHV2LW.mjs +87 -0
- package/dist/sdk/{host-QBJ7TOWG.mjs → host-WAU6CT42.mjs} +1 -1
- package/dist/sdk/host-WAU6CT42.mjs.map +1 -0
- package/dist/sdk/routing-EBAE5SSO.mjs +26 -0
- package/dist/sdk/{schedule-tool-AECLFHSY.mjs → schedule-tool-POY3CDZL.mjs} +2 -2
- package/dist/sdk/schedule-tool-POY3CDZL.mjs.map +1 -0
- package/dist/sdk/schedule-tool-R2OAATUS.mjs +37 -0
- package/dist/sdk/schedule-tool-R2OAATUS.mjs.map +1 -0
- package/dist/sdk/{schedule-tool-handler-6QLZRTQA.mjs → schedule-tool-handler-JMAKHPI7.mjs} +2 -2
- package/dist/sdk/schedule-tool-handler-JMAKHPI7.mjs.map +1 -0
- package/dist/sdk/schedule-tool-handler-MWFUIQKR.mjs +41 -0
- package/dist/sdk/schedule-tool-handler-MWFUIQKR.mjs.map +1 -0
- package/dist/sdk/sdk.js +294 -1668
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +5 -5
- package/dist/sdk/trace-helpers-HL5FBX65.mjs +29 -0
- package/dist/sdk/trace-helpers-HL5FBX65.mjs.map +1 -0
- package/dist/sdk/track-execution-YUXQ6WQH.mjs +136 -0
- package/dist/sdk/track-execution-YUXQ6WQH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-EXMC6JIS.mjs → workflow-check-provider-SE5I7EMA.mjs} +2 -2
- package/dist/sdk/workflow-check-provider-SE5I7EMA.mjs.map +1 -0
- package/dist/sdk/workflow-check-provider-YDGZRI3Z.mjs +31 -0
- package/dist/sdk/workflow-check-provider-YDGZRI3Z.mjs.map +1 -0
- package/dist/traces/run-2026-03-18T19-02-50-465Z.ndjson +157 -0
- package/dist/traces/run-2026-03-18T19-03-30-428Z.ndjson +2333 -0
- package/package.json +1 -1
- package/dist/sdk/knex-store-QCEW4I4R.mjs +0 -527
- package/dist/sdk/knex-store-QCEW4I4R.mjs.map +0 -1
- package/dist/sdk/loader-ZNKKJEZ3.mjs +0 -89
- package/dist/sdk/loader-ZNKKJEZ3.mjs.map +0 -1
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +0 -655
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +0 -1
- package/dist/sdk/validator-XTZJZZJH.mjs +0 -134
- package/dist/sdk/validator-XTZJZZJH.mjs.map +0 -1
- /package/dist/sdk/{check-provider-registry-I4BCWKRU.mjs.map → check-provider-registry-MHXQGUNN.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-AECLFHSY.mjs.map → check-provider-registry-Y33CRFVD.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-6QLZRTQA.mjs.map → failure-condition-evaluator-R6DCDJAV.mjs.map} +0 -0
- /package/dist/sdk/{host-QBJ7TOWG.mjs.map → host-54CHV2LW.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-EXMC6JIS.mjs.map → routing-EBAE5SSO.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -704,7 +704,7 @@ var require_package = __commonJS({
|
|
|
704
704
|
"package.json"(exports2, module2) {
|
|
705
705
|
module2.exports = {
|
|
706
706
|
name: "@probelabs/visor",
|
|
707
|
-
version: "0.1.
|
|
707
|
+
version: "0.1.182",
|
|
708
708
|
main: "dist/index.js",
|
|
709
709
|
bin: {
|
|
710
710
|
visor: "./dist/index.js"
|
|
@@ -1156,11 +1156,11 @@ function getTracer() {
|
|
|
1156
1156
|
}
|
|
1157
1157
|
async function withActiveSpan(name, attrs, fn) {
|
|
1158
1158
|
const tracer = getTracer();
|
|
1159
|
-
return await new Promise((
|
|
1159
|
+
return await new Promise((resolve17, reject) => {
|
|
1160
1160
|
const callback = async (span) => {
|
|
1161
1161
|
try {
|
|
1162
1162
|
const res = await fn(span);
|
|
1163
|
-
|
|
1163
|
+
resolve17(res);
|
|
1164
1164
|
} catch (err) {
|
|
1165
1165
|
try {
|
|
1166
1166
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -1285,19 +1285,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
1285
1285
|
try {
|
|
1286
1286
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
1287
1287
|
return null;
|
|
1288
|
-
const
|
|
1289
|
-
const
|
|
1288
|
+
const path31 = require("path");
|
|
1289
|
+
const fs29 = require("fs");
|
|
1290
1290
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
1291
1291
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
1292
|
-
const dir =
|
|
1293
|
-
if (!
|
|
1292
|
+
const dir = path31.dirname(__ndjsonPath);
|
|
1293
|
+
if (!fs29.existsSync(dir)) fs29.mkdirSync(dir, { recursive: true });
|
|
1294
1294
|
return __ndjsonPath;
|
|
1295
1295
|
}
|
|
1296
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
1297
|
-
if (!
|
|
1296
|
+
const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
|
|
1297
|
+
if (!fs29.existsSync(outDir)) fs29.mkdirSync(outDir, { recursive: true });
|
|
1298
1298
|
if (!__ndjsonPath) {
|
|
1299
1299
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1300
|
-
__ndjsonPath =
|
|
1300
|
+
__ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
|
|
1301
1301
|
}
|
|
1302
1302
|
return __ndjsonPath;
|
|
1303
1303
|
} catch {
|
|
@@ -1306,11 +1306,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
1306
1306
|
}
|
|
1307
1307
|
function _appendRunMarker() {
|
|
1308
1308
|
try {
|
|
1309
|
-
const
|
|
1309
|
+
const fs29 = require("fs");
|
|
1310
1310
|
const p = __getOrCreateNdjsonPath();
|
|
1311
1311
|
if (!p) return;
|
|
1312
1312
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
1313
|
-
|
|
1313
|
+
fs29.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
1314
1314
|
} catch {
|
|
1315
1315
|
}
|
|
1316
1316
|
}
|
|
@@ -3397,7 +3397,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3397
3397
|
*/
|
|
3398
3398
|
evaluateExpression(condition, context2) {
|
|
3399
3399
|
try {
|
|
3400
|
-
const
|
|
3400
|
+
const normalize5 = (expr) => {
|
|
3401
3401
|
const trimmed = expr.trim();
|
|
3402
3402
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3403
3403
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3555,7 +3555,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3555
3555
|
try {
|
|
3556
3556
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3557
3557
|
} catch {
|
|
3558
|
-
const normalizedExpr =
|
|
3558
|
+
const normalizedExpr = normalize5(condition);
|
|
3559
3559
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3560
3560
|
}
|
|
3561
3561
|
const result = exec2(scope).run();
|
|
@@ -3938,9 +3938,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3938
3938
|
});
|
|
3939
3939
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3940
3940
|
if (obj == null) return void 0;
|
|
3941
|
-
const
|
|
3942
|
-
if (!
|
|
3943
|
-
const parts =
|
|
3941
|
+
const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3942
|
+
if (!path31) return obj;
|
|
3943
|
+
const parts = path31.split(".");
|
|
3944
3944
|
let cur = obj;
|
|
3945
3945
|
for (const p of parts) {
|
|
3946
3946
|
if (cur == null) return void 0;
|
|
@@ -4059,9 +4059,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
4059
4059
|
}
|
|
4060
4060
|
}
|
|
4061
4061
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
4062
|
-
const getNested = (obj,
|
|
4063
|
-
if (!obj || !
|
|
4064
|
-
const parts =
|
|
4062
|
+
const getNested = (obj, path31) => {
|
|
4063
|
+
if (!obj || !path31) return void 0;
|
|
4064
|
+
const parts = path31.split(".");
|
|
4065
4065
|
let cur = obj;
|
|
4066
4066
|
for (const p of parts) {
|
|
4067
4067
|
if (cur == null) return void 0;
|
|
@@ -7834,8 +7834,8 @@ var init_dependency_gating = __esm({
|
|
|
7834
7834
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
7835
7835
|
try {
|
|
7836
7836
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
7837
|
-
const
|
|
7838
|
-
const
|
|
7837
|
+
const fs29 = await import("fs/promises");
|
|
7838
|
+
const path31 = await import("path");
|
|
7839
7839
|
const schemaRaw = checkConfig.schema || "plain";
|
|
7840
7840
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
7841
7841
|
let templateContent;
|
|
@@ -7844,25 +7844,25 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
7844
7844
|
logger.debug(`[TemplateRenderer] Using inline template for ${checkId}`);
|
|
7845
7845
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
7846
7846
|
const file = String(checkConfig.template.file);
|
|
7847
|
-
const resolved =
|
|
7848
|
-
templateContent = await
|
|
7847
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
7848
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
7849
7849
|
logger.debug(`[TemplateRenderer] Using template file for ${checkId}: ${resolved}`);
|
|
7850
7850
|
} else if (schema && schema !== "plain") {
|
|
7851
7851
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
7852
7852
|
if (sanitized) {
|
|
7853
7853
|
const candidatePaths = [
|
|
7854
|
-
|
|
7854
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
7855
7855
|
// bundled: dist/output/
|
|
7856
|
-
|
|
7856
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
7857
7857
|
// source: output/
|
|
7858
|
-
|
|
7858
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
7859
7859
|
// fallback: cwd/output/
|
|
7860
|
-
|
|
7860
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
7861
7861
|
// fallback: cwd/dist/output/
|
|
7862
7862
|
];
|
|
7863
7863
|
for (const p of candidatePaths) {
|
|
7864
7864
|
try {
|
|
7865
|
-
templateContent = await
|
|
7865
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
7866
7866
|
if (templateContent) {
|
|
7867
7867
|
logger.debug(`[TemplateRenderer] Using schema template for ${checkId}: ${p}`);
|
|
7868
7868
|
break;
|
|
@@ -8284,7 +8284,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
8284
8284
|
}
|
|
8285
8285
|
try {
|
|
8286
8286
|
const originalProbePath = process.env.PROBE_PATH;
|
|
8287
|
-
const
|
|
8287
|
+
const fs29 = require("fs");
|
|
8288
8288
|
const possiblePaths = [
|
|
8289
8289
|
// Relative to current working directory (most common in production)
|
|
8290
8290
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -8295,7 +8295,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
8295
8295
|
];
|
|
8296
8296
|
let probeBinaryPath;
|
|
8297
8297
|
for (const candidatePath of possiblePaths) {
|
|
8298
|
-
if (
|
|
8298
|
+
if (fs29.existsSync(candidatePath)) {
|
|
8299
8299
|
probeBinaryPath = candidatePath;
|
|
8300
8300
|
break;
|
|
8301
8301
|
}
|
|
@@ -8402,7 +8402,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
8402
8402
|
if (chromiumPath) {
|
|
8403
8403
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
8404
8404
|
}
|
|
8405
|
-
const result = await new Promise((
|
|
8405
|
+
const result = await new Promise((resolve17) => {
|
|
8406
8406
|
const proc = (0, import_child_process6.spawn)(
|
|
8407
8407
|
"npx",
|
|
8408
8408
|
[
|
|
@@ -8432,13 +8432,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
8432
8432
|
});
|
|
8433
8433
|
proc.on("close", (code) => {
|
|
8434
8434
|
if (code === 0) {
|
|
8435
|
-
|
|
8435
|
+
resolve17({ success: true });
|
|
8436
8436
|
} else {
|
|
8437
|
-
|
|
8437
|
+
resolve17({ success: false, error: stderr || `Exit code ${code}` });
|
|
8438
8438
|
}
|
|
8439
8439
|
});
|
|
8440
8440
|
proc.on("error", (err) => {
|
|
8441
|
-
|
|
8441
|
+
resolve17({ success: false, error: err.message });
|
|
8442
8442
|
});
|
|
8443
8443
|
});
|
|
8444
8444
|
if (!result.success) {
|
|
@@ -9810,8 +9810,8 @@ ${schemaString}`);
|
|
|
9810
9810
|
}
|
|
9811
9811
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
9812
9812
|
try {
|
|
9813
|
-
const
|
|
9814
|
-
const
|
|
9813
|
+
const fs29 = require("fs");
|
|
9814
|
+
const path31 = require("path");
|
|
9815
9815
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
9816
9816
|
const provider = this.config.provider || "auto";
|
|
9817
9817
|
const model = this.config.model || "default";
|
|
@@ -9925,20 +9925,20 @@ ${"=".repeat(60)}
|
|
|
9925
9925
|
`;
|
|
9926
9926
|
readableVersion += `${"=".repeat(60)}
|
|
9927
9927
|
`;
|
|
9928
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
9929
|
-
if (!
|
|
9930
|
-
|
|
9928
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
9929
|
+
if (!fs29.existsSync(debugArtifactsDir)) {
|
|
9930
|
+
fs29.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
9931
9931
|
}
|
|
9932
|
-
const debugFile =
|
|
9932
|
+
const debugFile = path31.join(
|
|
9933
9933
|
debugArtifactsDir,
|
|
9934
9934
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
9935
9935
|
);
|
|
9936
|
-
|
|
9937
|
-
const readableFile =
|
|
9936
|
+
fs29.writeFileSync(debugFile, debugJson, "utf-8");
|
|
9937
|
+
const readableFile = path31.join(
|
|
9938
9938
|
debugArtifactsDir,
|
|
9939
9939
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
9940
9940
|
);
|
|
9941
|
-
|
|
9941
|
+
fs29.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
9942
9942
|
log(`
|
|
9943
9943
|
\u{1F4BE} Full debug info saved to:`);
|
|
9944
9944
|
log(` JSON: ${debugFile}`);
|
|
@@ -9976,8 +9976,8 @@ ${"=".repeat(60)}
|
|
|
9976
9976
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
9977
9977
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
9978
9978
|
try {
|
|
9979
|
-
const
|
|
9980
|
-
const
|
|
9979
|
+
const fs29 = require("fs");
|
|
9980
|
+
const path31 = require("path");
|
|
9981
9981
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
9982
9982
|
const agentAny2 = agent;
|
|
9983
9983
|
let fullHistory = [];
|
|
@@ -9988,8 +9988,8 @@ ${"=".repeat(60)}
|
|
|
9988
9988
|
} else if (agentAny2._messages) {
|
|
9989
9989
|
fullHistory = agentAny2._messages;
|
|
9990
9990
|
}
|
|
9991
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
9992
|
-
const sessionBase =
|
|
9991
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
9992
|
+
const sessionBase = path31.join(
|
|
9993
9993
|
debugArtifactsDir,
|
|
9994
9994
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
9995
9995
|
);
|
|
@@ -10001,7 +10001,7 @@ ${"=".repeat(60)}
|
|
|
10001
10001
|
schema: effectiveSchema,
|
|
10002
10002
|
totalMessages: fullHistory.length
|
|
10003
10003
|
};
|
|
10004
|
-
|
|
10004
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
10005
10005
|
let readable = `=============================================================
|
|
10006
10006
|
`;
|
|
10007
10007
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -10028,7 +10028,7 @@ ${"=".repeat(60)}
|
|
|
10028
10028
|
`;
|
|
10029
10029
|
readable += content + "\n";
|
|
10030
10030
|
});
|
|
10031
|
-
|
|
10031
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
10032
10032
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
10033
10033
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
10034
10034
|
} catch (error) {
|
|
@@ -10037,11 +10037,11 @@ ${"=".repeat(60)}
|
|
|
10037
10037
|
}
|
|
10038
10038
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
10039
10039
|
try {
|
|
10040
|
-
const
|
|
10041
|
-
const
|
|
10040
|
+
const fs29 = require("fs");
|
|
10041
|
+
const path31 = require("path");
|
|
10042
10042
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
10043
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
10044
|
-
const responseFile =
|
|
10043
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
10044
|
+
const responseFile = path31.join(
|
|
10045
10045
|
debugArtifactsDir,
|
|
10046
10046
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
10047
10047
|
);
|
|
@@ -10074,7 +10074,7 @@ ${"=".repeat(60)}
|
|
|
10074
10074
|
`;
|
|
10075
10075
|
responseContent += `${"=".repeat(60)}
|
|
10076
10076
|
`;
|
|
10077
|
-
|
|
10077
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
10078
10078
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
10079
10079
|
} catch (error) {
|
|
10080
10080
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -10090,9 +10090,9 @@ ${"=".repeat(60)}
|
|
|
10090
10090
|
await agentAny._telemetryConfig.shutdown();
|
|
10091
10091
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
10092
10092
|
if (process.env.GITHUB_ACTIONS) {
|
|
10093
|
-
const
|
|
10094
|
-
if (
|
|
10095
|
-
const stats =
|
|
10093
|
+
const fs29 = require("fs");
|
|
10094
|
+
if (fs29.existsSync(agentAny._traceFilePath)) {
|
|
10095
|
+
const stats = fs29.statSync(agentAny._traceFilePath);
|
|
10096
10096
|
console.log(
|
|
10097
10097
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
10098
10098
|
);
|
|
@@ -10353,9 +10353,9 @@ ${schemaString}`);
|
|
|
10353
10353
|
const model = this.config.model || "default";
|
|
10354
10354
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
10355
10355
|
try {
|
|
10356
|
-
const
|
|
10357
|
-
const
|
|
10358
|
-
const
|
|
10356
|
+
const fs29 = require("fs");
|
|
10357
|
+
const path31 = require("path");
|
|
10358
|
+
const os2 = require("os");
|
|
10359
10359
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
10360
10360
|
const debugData = {
|
|
10361
10361
|
timestamp,
|
|
@@ -10427,19 +10427,19 @@ ${"=".repeat(60)}
|
|
|
10427
10427
|
`;
|
|
10428
10428
|
readableVersion += `${"=".repeat(60)}
|
|
10429
10429
|
`;
|
|
10430
|
-
const tempDir =
|
|
10431
|
-
const promptFile =
|
|
10432
|
-
|
|
10430
|
+
const tempDir = os2.tmpdir();
|
|
10431
|
+
const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
10432
|
+
fs29.writeFileSync(promptFile, prompt, "utf-8");
|
|
10433
10433
|
log(`
|
|
10434
10434
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
10435
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
10435
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
10436
10436
|
try {
|
|
10437
|
-
const base =
|
|
10437
|
+
const base = path31.join(
|
|
10438
10438
|
debugArtifactsDir,
|
|
10439
10439
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
10440
10440
|
);
|
|
10441
|
-
|
|
10442
|
-
|
|
10441
|
+
fs29.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
10442
|
+
fs29.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
10443
10443
|
log(`
|
|
10444
10444
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
10445
10445
|
} catch {
|
|
@@ -10489,8 +10489,8 @@ $ ${cliCommand}
|
|
|
10489
10489
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
10490
10490
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
10491
10491
|
try {
|
|
10492
|
-
const
|
|
10493
|
-
const
|
|
10492
|
+
const fs29 = require("fs");
|
|
10493
|
+
const path31 = require("path");
|
|
10494
10494
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
10495
10495
|
const agentAny = agent;
|
|
10496
10496
|
let fullHistory = [];
|
|
@@ -10501,8 +10501,8 @@ $ ${cliCommand}
|
|
|
10501
10501
|
} else if (agentAny._messages) {
|
|
10502
10502
|
fullHistory = agentAny._messages;
|
|
10503
10503
|
}
|
|
10504
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
10505
|
-
const sessionBase =
|
|
10504
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
10505
|
+
const sessionBase = path31.join(
|
|
10506
10506
|
debugArtifactsDir,
|
|
10507
10507
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
10508
10508
|
);
|
|
@@ -10514,7 +10514,7 @@ $ ${cliCommand}
|
|
|
10514
10514
|
schema: effectiveSchema,
|
|
10515
10515
|
totalMessages: fullHistory.length
|
|
10516
10516
|
};
|
|
10517
|
-
|
|
10517
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
10518
10518
|
let readable = `=============================================================
|
|
10519
10519
|
`;
|
|
10520
10520
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -10541,7 +10541,7 @@ ${"=".repeat(60)}
|
|
|
10541
10541
|
`;
|
|
10542
10542
|
readable += content + "\n";
|
|
10543
10543
|
});
|
|
10544
|
-
|
|
10544
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
10545
10545
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
10546
10546
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
10547
10547
|
} catch (error) {
|
|
@@ -10550,11 +10550,11 @@ ${"=".repeat(60)}
|
|
|
10550
10550
|
}
|
|
10551
10551
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
10552
10552
|
try {
|
|
10553
|
-
const
|
|
10554
|
-
const
|
|
10553
|
+
const fs29 = require("fs");
|
|
10554
|
+
const path31 = require("path");
|
|
10555
10555
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
10556
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
10557
|
-
const responseFile =
|
|
10556
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
10557
|
+
const responseFile = path31.join(
|
|
10558
10558
|
debugArtifactsDir,
|
|
10559
10559
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
10560
10560
|
);
|
|
@@ -10587,7 +10587,7 @@ ${"=".repeat(60)}
|
|
|
10587
10587
|
`;
|
|
10588
10588
|
responseContent += `${"=".repeat(60)}
|
|
10589
10589
|
`;
|
|
10590
|
-
|
|
10590
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
10591
10591
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
10592
10592
|
} catch (error) {
|
|
10593
10593
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -10605,9 +10605,9 @@ ${"=".repeat(60)}
|
|
|
10605
10605
|
await telemetry.shutdown();
|
|
10606
10606
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
10607
10607
|
if (process.env.GITHUB_ACTIONS) {
|
|
10608
|
-
const
|
|
10609
|
-
if (
|
|
10610
|
-
const stats =
|
|
10608
|
+
const fs29 = require("fs");
|
|
10609
|
+
if (fs29.existsSync(traceFilePath)) {
|
|
10610
|
+
const stats = fs29.statSync(traceFilePath);
|
|
10611
10611
|
console.log(
|
|
10612
10612
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
10613
10613
|
);
|
|
@@ -10645,8 +10645,8 @@ ${"=".repeat(60)}
|
|
|
10645
10645
|
* Load schema content from schema files or inline definitions
|
|
10646
10646
|
*/
|
|
10647
10647
|
async loadSchemaContent(schema) {
|
|
10648
|
-
const
|
|
10649
|
-
const
|
|
10648
|
+
const fs29 = require("fs").promises;
|
|
10649
|
+
const path31 = require("path");
|
|
10650
10650
|
if (typeof schema === "object" && schema !== null) {
|
|
10651
10651
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
10652
10652
|
return JSON.stringify(schema);
|
|
@@ -10659,14 +10659,14 @@ ${"=".repeat(60)}
|
|
|
10659
10659
|
}
|
|
10660
10660
|
} catch {
|
|
10661
10661
|
}
|
|
10662
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
10662
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
|
|
10663
10663
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
10664
10664
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
10665
10665
|
}
|
|
10666
10666
|
try {
|
|
10667
|
-
const schemaPath =
|
|
10667
|
+
const schemaPath = path31.resolve(process.cwd(), schema);
|
|
10668
10668
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
10669
|
-
const schemaContent = await
|
|
10669
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
10670
10670
|
return schemaContent.trim();
|
|
10671
10671
|
} catch (error) {
|
|
10672
10672
|
throw new Error(
|
|
@@ -10680,22 +10680,22 @@ ${"=".repeat(60)}
|
|
|
10680
10680
|
}
|
|
10681
10681
|
const candidatePaths = [
|
|
10682
10682
|
// GitHub Action bundle location
|
|
10683
|
-
|
|
10683
|
+
path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
10684
10684
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
10685
|
-
|
|
10685
|
+
path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
10686
10686
|
// Local dev (repo root)
|
|
10687
|
-
|
|
10687
|
+
path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
10688
10688
|
];
|
|
10689
10689
|
for (const schemaPath of candidatePaths) {
|
|
10690
10690
|
try {
|
|
10691
|
-
const schemaContent = await
|
|
10691
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
10692
10692
|
return schemaContent.trim();
|
|
10693
10693
|
} catch {
|
|
10694
10694
|
}
|
|
10695
10695
|
}
|
|
10696
|
-
const distPath =
|
|
10697
|
-
const distAltPath =
|
|
10698
|
-
const cwdPath =
|
|
10696
|
+
const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
10697
|
+
const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
10698
|
+
const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
10699
10699
|
throw new Error(
|
|
10700
10700
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
10701
10701
|
);
|
|
@@ -10937,7 +10937,7 @@ ${"=".repeat(60)}
|
|
|
10937
10937
|
* Generate mock response for testing
|
|
10938
10938
|
*/
|
|
10939
10939
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
10940
|
-
await new Promise((
|
|
10940
|
+
await new Promise((resolve17) => setTimeout(resolve17, 500));
|
|
10941
10941
|
const name = (_checkName || "").toLowerCase();
|
|
10942
10942
|
if (name.includes("extract-facts")) {
|
|
10943
10943
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -11298,7 +11298,7 @@ var init_command_executor = __esm({
|
|
|
11298
11298
|
* Execute command with stdin input
|
|
11299
11299
|
*/
|
|
11300
11300
|
executeWithStdin(command, options) {
|
|
11301
|
-
return new Promise((
|
|
11301
|
+
return new Promise((resolve17, reject) => {
|
|
11302
11302
|
const childProcess = (0, import_child_process7.exec)(
|
|
11303
11303
|
command,
|
|
11304
11304
|
{
|
|
@@ -11310,7 +11310,7 @@ var init_command_executor = __esm({
|
|
|
11310
11310
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
11311
11311
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
11312
11312
|
} else {
|
|
11313
|
-
|
|
11313
|
+
resolve17({
|
|
11314
11314
|
stdout: stdout || "",
|
|
11315
11315
|
stderr: stderr || "",
|
|
11316
11316
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -11439,7 +11439,7 @@ async function rateLimitedFetch(url, options, rateLimitConfig) {
|
|
|
11439
11439
|
logger.verbose(
|
|
11440
11440
|
`[rate-limiter] 429 on ${url} (bucket: ${key}), retry ${attempt + 1}/${maxRetries} in ${delayMs}ms`
|
|
11441
11441
|
);
|
|
11442
|
-
await new Promise((
|
|
11442
|
+
await new Promise((resolve17) => setTimeout(resolve17, delayMs));
|
|
11443
11443
|
}
|
|
11444
11444
|
return fetch(url, options);
|
|
11445
11445
|
}
|
|
@@ -11488,8 +11488,8 @@ var init_rate_limiter = __esm({
|
|
|
11488
11488
|
return;
|
|
11489
11489
|
}
|
|
11490
11490
|
const waitMs = Math.ceil((1 - this.tokens) / this.refillRate);
|
|
11491
|
-
return new Promise((
|
|
11492
|
-
const entry = { resolve:
|
|
11491
|
+
return new Promise((resolve17) => {
|
|
11492
|
+
const entry = { resolve: resolve17 };
|
|
11493
11493
|
this.waitQueue.push(entry);
|
|
11494
11494
|
setTimeout(() => {
|
|
11495
11495
|
const idx = this.waitQueue.indexOf(entry);
|
|
@@ -11500,7 +11500,7 @@ var init_rate_limiter = __esm({
|
|
|
11500
11500
|
if (this.tokens >= 1) {
|
|
11501
11501
|
this.tokens -= 1;
|
|
11502
11502
|
}
|
|
11503
|
-
|
|
11503
|
+
resolve17();
|
|
11504
11504
|
}, waitMs);
|
|
11505
11505
|
});
|
|
11506
11506
|
}
|
|
@@ -20533,17 +20533,17 @@ var init_workflow_check_provider = __esm({
|
|
|
20533
20533
|
* so it can be executed by the state machine as a nested workflow.
|
|
20534
20534
|
*/
|
|
20535
20535
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
20536
|
-
const
|
|
20537
|
-
const
|
|
20536
|
+
const path31 = require("path");
|
|
20537
|
+
const fs29 = require("fs");
|
|
20538
20538
|
const yaml6 = require("js-yaml");
|
|
20539
|
-
const resolved =
|
|
20540
|
-
if (!
|
|
20539
|
+
const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
|
|
20540
|
+
if (!fs29.existsSync(resolved)) {
|
|
20541
20541
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
20542
20542
|
}
|
|
20543
|
-
const rawContent =
|
|
20543
|
+
const rawContent = fs29.readFileSync(resolved, "utf8");
|
|
20544
20544
|
const rawData = yaml6.load(rawContent);
|
|
20545
20545
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
20546
|
-
const configDir =
|
|
20546
|
+
const configDir = path31.dirname(resolved);
|
|
20547
20547
|
for (const source of rawData.imports) {
|
|
20548
20548
|
const results = await this.registry.import(source, {
|
|
20549
20549
|
basePath: configDir,
|
|
@@ -20573,8 +20573,8 @@ ${errors}`);
|
|
|
20573
20573
|
if (!steps || Object.keys(steps).length === 0) {
|
|
20574
20574
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
20575
20575
|
}
|
|
20576
|
-
const id =
|
|
20577
|
-
const name = loaded.name || `Workflow from ${
|
|
20576
|
+
const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
20577
|
+
const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
|
|
20578
20578
|
const workflowDef = {
|
|
20579
20579
|
id,
|
|
20580
20580
|
name,
|
|
@@ -21383,8 +21383,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
21383
21383
|
case "mssql": {
|
|
21384
21384
|
try {
|
|
21385
21385
|
const loaderPath = "../../enterprise/loader";
|
|
21386
|
-
const { loadEnterpriseStoreBackend
|
|
21387
|
-
return await
|
|
21386
|
+
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
21387
|
+
return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
|
|
21388
21388
|
} catch (err) {
|
|
21389
21389
|
const msg = err instanceof Error ? err.message : String(err);
|
|
21390
21390
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -22521,7 +22521,7 @@ async function findTraceFile(traceId, traceDir) {
|
|
|
22521
22521
|
return null;
|
|
22522
22522
|
}
|
|
22523
22523
|
async function readFirstLine(filePath) {
|
|
22524
|
-
return new Promise((
|
|
22524
|
+
return new Promise((resolve17, reject) => {
|
|
22525
22525
|
const stream = fs15.createReadStream(filePath, { encoding: "utf-8" });
|
|
22526
22526
|
const rl = readline2.createInterface({ input: stream, crlfDelay: Infinity });
|
|
22527
22527
|
let resolved = false;
|
|
@@ -22530,11 +22530,11 @@ async function readFirstLine(filePath) {
|
|
|
22530
22530
|
resolved = true;
|
|
22531
22531
|
rl.close();
|
|
22532
22532
|
stream.destroy();
|
|
22533
|
-
|
|
22533
|
+
resolve17(line.trim() || null);
|
|
22534
22534
|
}
|
|
22535
22535
|
});
|
|
22536
22536
|
rl.on("close", () => {
|
|
22537
|
-
if (!resolved)
|
|
22537
|
+
if (!resolved) resolve17(null);
|
|
22538
22538
|
});
|
|
22539
22539
|
rl.on("error", reject);
|
|
22540
22540
|
});
|
|
@@ -26320,7 +26320,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
26320
26320
|
* Returns the actual bound port number
|
|
26321
26321
|
*/
|
|
26322
26322
|
async start() {
|
|
26323
|
-
return new Promise((
|
|
26323
|
+
return new Promise((resolve17, reject) => {
|
|
26324
26324
|
try {
|
|
26325
26325
|
this.server = import_http.default.createServer((req, res) => {
|
|
26326
26326
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -26354,7 +26354,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
26354
26354
|
);
|
|
26355
26355
|
}
|
|
26356
26356
|
this.startKeepalive();
|
|
26357
|
-
|
|
26357
|
+
resolve17(this.port);
|
|
26358
26358
|
});
|
|
26359
26359
|
} catch (error) {
|
|
26360
26360
|
reject(error);
|
|
@@ -26464,7 +26464,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
26464
26464
|
logger.debug(
|
|
26465
26465
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
26466
26466
|
);
|
|
26467
|
-
await new Promise((
|
|
26467
|
+
await new Promise((resolve17) => setTimeout(resolve17, waitMs));
|
|
26468
26468
|
}
|
|
26469
26469
|
}
|
|
26470
26470
|
if (this.activeToolCalls > 0) {
|
|
@@ -26473,7 +26473,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
26473
26473
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
26474
26474
|
);
|
|
26475
26475
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
26476
|
-
await new Promise((
|
|
26476
|
+
await new Promise((resolve17) => setTimeout(resolve17, 250));
|
|
26477
26477
|
}
|
|
26478
26478
|
if (this.activeToolCalls > 0) {
|
|
26479
26479
|
logger.warn(
|
|
@@ -26499,21 +26499,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
26499
26499
|
}
|
|
26500
26500
|
this.connections.clear();
|
|
26501
26501
|
if (this.server) {
|
|
26502
|
-
await new Promise((
|
|
26502
|
+
await new Promise((resolve17, reject) => {
|
|
26503
26503
|
const timeout = setTimeout(() => {
|
|
26504
26504
|
if (this.debug) {
|
|
26505
26505
|
logger.debug(
|
|
26506
26506
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
26507
26507
|
);
|
|
26508
26508
|
}
|
|
26509
|
-
this.server?.close(() =>
|
|
26509
|
+
this.server?.close(() => resolve17());
|
|
26510
26510
|
}, 5e3);
|
|
26511
26511
|
this.server.close((error) => {
|
|
26512
26512
|
clearTimeout(timeout);
|
|
26513
26513
|
if (error) {
|
|
26514
26514
|
reject(error);
|
|
26515
26515
|
} else {
|
|
26516
|
-
|
|
26516
|
+
resolve17();
|
|
26517
26517
|
}
|
|
26518
26518
|
});
|
|
26519
26519
|
});
|
|
@@ -27063,7 +27063,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
27063
27063
|
logger.warn(
|
|
27064
27064
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
27065
27065
|
);
|
|
27066
|
-
await new Promise((
|
|
27066
|
+
await new Promise((resolve17) => setTimeout(resolve17, delay));
|
|
27067
27067
|
attempt++;
|
|
27068
27068
|
}
|
|
27069
27069
|
}
|
|
@@ -27600,9 +27600,9 @@ var init_ai_check_provider = __esm({
|
|
|
27600
27600
|
} else {
|
|
27601
27601
|
resolvedPath = import_path13.default.resolve(process.cwd(), str);
|
|
27602
27602
|
}
|
|
27603
|
-
const
|
|
27603
|
+
const fs29 = require("fs").promises;
|
|
27604
27604
|
try {
|
|
27605
|
-
const stat2 = await
|
|
27605
|
+
const stat2 = await fs29.stat(resolvedPath);
|
|
27606
27606
|
return stat2.isFile();
|
|
27607
27607
|
} catch {
|
|
27608
27608
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -33874,14 +33874,14 @@ var require_util = __commonJS({
|
|
|
33874
33874
|
}
|
|
33875
33875
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
33876
33876
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
33877
|
-
let
|
|
33877
|
+
let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
33878
33878
|
if (origin.endsWith("/")) {
|
|
33879
33879
|
origin = origin.substring(0, origin.length - 1);
|
|
33880
33880
|
}
|
|
33881
|
-
if (
|
|
33882
|
-
|
|
33881
|
+
if (path31 && !path31.startsWith("/")) {
|
|
33882
|
+
path31 = `/${path31}`;
|
|
33883
33883
|
}
|
|
33884
|
-
url = new URL(origin +
|
|
33884
|
+
url = new URL(origin + path31);
|
|
33885
33885
|
}
|
|
33886
33886
|
return url;
|
|
33887
33887
|
}
|
|
@@ -35495,20 +35495,20 @@ var require_parseParams = __commonJS({
|
|
|
35495
35495
|
var require_basename = __commonJS({
|
|
35496
35496
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
35497
35497
|
"use strict";
|
|
35498
|
-
module2.exports = function basename5(
|
|
35499
|
-
if (typeof
|
|
35498
|
+
module2.exports = function basename5(path31) {
|
|
35499
|
+
if (typeof path31 !== "string") {
|
|
35500
35500
|
return "";
|
|
35501
35501
|
}
|
|
35502
|
-
for (var i =
|
|
35503
|
-
switch (
|
|
35502
|
+
for (var i = path31.length - 1; i >= 0; --i) {
|
|
35503
|
+
switch (path31.charCodeAt(i)) {
|
|
35504
35504
|
case 47:
|
|
35505
35505
|
// '/'
|
|
35506
35506
|
case 92:
|
|
35507
|
-
|
|
35508
|
-
return
|
|
35507
|
+
path31 = path31.slice(i + 1);
|
|
35508
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
35509
35509
|
}
|
|
35510
35510
|
}
|
|
35511
|
-
return
|
|
35511
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
35512
35512
|
};
|
|
35513
35513
|
}
|
|
35514
35514
|
});
|
|
@@ -36512,11 +36512,11 @@ var require_util2 = __commonJS({
|
|
|
36512
36512
|
var assert = require("assert");
|
|
36513
36513
|
var { isUint8Array } = require("util/types");
|
|
36514
36514
|
var supportedHashes = [];
|
|
36515
|
-
var
|
|
36515
|
+
var crypto8;
|
|
36516
36516
|
try {
|
|
36517
|
-
|
|
36517
|
+
crypto8 = require("crypto");
|
|
36518
36518
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
36519
|
-
supportedHashes =
|
|
36519
|
+
supportedHashes = crypto8.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
36520
36520
|
} catch {
|
|
36521
36521
|
}
|
|
36522
36522
|
function responseURL(response) {
|
|
@@ -36793,7 +36793,7 @@ var require_util2 = __commonJS({
|
|
|
36793
36793
|
}
|
|
36794
36794
|
}
|
|
36795
36795
|
function bytesMatch(bytes, metadataList) {
|
|
36796
|
-
if (
|
|
36796
|
+
if (crypto8 === void 0) {
|
|
36797
36797
|
return true;
|
|
36798
36798
|
}
|
|
36799
36799
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -36808,7 +36808,7 @@ var require_util2 = __commonJS({
|
|
|
36808
36808
|
for (const item of metadata) {
|
|
36809
36809
|
const algorithm = item.algo;
|
|
36810
36810
|
const expectedValue = item.hash;
|
|
36811
|
-
let actualValue =
|
|
36811
|
+
let actualValue = crypto8.createHash(algorithm).update(bytes).digest("base64");
|
|
36812
36812
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
36813
36813
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
36814
36814
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -36901,8 +36901,8 @@ var require_util2 = __commonJS({
|
|
|
36901
36901
|
function createDeferredPromise() {
|
|
36902
36902
|
let res;
|
|
36903
36903
|
let rej;
|
|
36904
|
-
const promise = new Promise((
|
|
36905
|
-
res =
|
|
36904
|
+
const promise = new Promise((resolve17, reject) => {
|
|
36905
|
+
res = resolve17;
|
|
36906
36906
|
rej = reject;
|
|
36907
36907
|
});
|
|
36908
36908
|
return { promise, resolve: res, reject: rej };
|
|
@@ -38155,8 +38155,8 @@ var require_body = __commonJS({
|
|
|
38155
38155
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
38156
38156
|
var random;
|
|
38157
38157
|
try {
|
|
38158
|
-
const
|
|
38159
|
-
random = (max) =>
|
|
38158
|
+
const crypto8 = require("crypto");
|
|
38159
|
+
random = (max) => crypto8.randomInt(0, max);
|
|
38160
38160
|
} catch {
|
|
38161
38161
|
random = (max) => Math.floor(Math.random(max));
|
|
38162
38162
|
}
|
|
@@ -38407,8 +38407,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
38407
38407
|
});
|
|
38408
38408
|
}
|
|
38409
38409
|
});
|
|
38410
|
-
const busboyResolve = new Promise((
|
|
38411
|
-
busboy.on("finish",
|
|
38410
|
+
const busboyResolve = new Promise((resolve17, reject) => {
|
|
38411
|
+
busboy.on("finish", resolve17);
|
|
38412
38412
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
38413
38413
|
});
|
|
38414
38414
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -38539,7 +38539,7 @@ var require_request = __commonJS({
|
|
|
38539
38539
|
}
|
|
38540
38540
|
var Request2 = class _Request {
|
|
38541
38541
|
constructor(origin, {
|
|
38542
|
-
path:
|
|
38542
|
+
path: path31,
|
|
38543
38543
|
method,
|
|
38544
38544
|
body,
|
|
38545
38545
|
headers,
|
|
@@ -38553,11 +38553,11 @@ var require_request = __commonJS({
|
|
|
38553
38553
|
throwOnError,
|
|
38554
38554
|
expectContinue
|
|
38555
38555
|
}, handler) {
|
|
38556
|
-
if (typeof
|
|
38556
|
+
if (typeof path31 !== "string") {
|
|
38557
38557
|
throw new InvalidArgumentError("path must be a string");
|
|
38558
|
-
} else if (
|
|
38558
|
+
} else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
|
|
38559
38559
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
38560
|
-
} else if (invalidPathRegex.exec(
|
|
38560
|
+
} else if (invalidPathRegex.exec(path31) !== null) {
|
|
38561
38561
|
throw new InvalidArgumentError("invalid request path");
|
|
38562
38562
|
}
|
|
38563
38563
|
if (typeof method !== "string") {
|
|
@@ -38620,7 +38620,7 @@ var require_request = __commonJS({
|
|
|
38620
38620
|
this.completed = false;
|
|
38621
38621
|
this.aborted = false;
|
|
38622
38622
|
this.upgrade = upgrade || null;
|
|
38623
|
-
this.path = query ? util.buildURL(
|
|
38623
|
+
this.path = query ? util.buildURL(path31, query) : path31;
|
|
38624
38624
|
this.origin = origin;
|
|
38625
38625
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
38626
38626
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -38942,9 +38942,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
38942
38942
|
}
|
|
38943
38943
|
close(callback) {
|
|
38944
38944
|
if (callback === void 0) {
|
|
38945
|
-
return new Promise((
|
|
38945
|
+
return new Promise((resolve17, reject) => {
|
|
38946
38946
|
this.close((err, data) => {
|
|
38947
|
-
return err ? reject(err) :
|
|
38947
|
+
return err ? reject(err) : resolve17(data);
|
|
38948
38948
|
});
|
|
38949
38949
|
});
|
|
38950
38950
|
}
|
|
@@ -38982,12 +38982,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
38982
38982
|
err = null;
|
|
38983
38983
|
}
|
|
38984
38984
|
if (callback === void 0) {
|
|
38985
|
-
return new Promise((
|
|
38985
|
+
return new Promise((resolve17, reject) => {
|
|
38986
38986
|
this.destroy(err, (err2, data) => {
|
|
38987
38987
|
return err2 ? (
|
|
38988
38988
|
/* istanbul ignore next: should never error */
|
|
38989
38989
|
reject(err2)
|
|
38990
|
-
) :
|
|
38990
|
+
) : resolve17(data);
|
|
38991
38991
|
});
|
|
38992
38992
|
});
|
|
38993
38993
|
}
|
|
@@ -39628,9 +39628,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
39628
39628
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
39629
39629
|
}
|
|
39630
39630
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
39631
|
-
const
|
|
39631
|
+
const path31 = search ? `${pathname}${search}` : pathname;
|
|
39632
39632
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
39633
|
-
this.opts.path =
|
|
39633
|
+
this.opts.path = path31;
|
|
39634
39634
|
this.opts.origin = origin;
|
|
39635
39635
|
this.opts.maxRedirections = 0;
|
|
39636
39636
|
this.opts.query = null;
|
|
@@ -40049,16 +40049,16 @@ var require_client = __commonJS({
|
|
|
40049
40049
|
return this[kNeedDrain] < 2;
|
|
40050
40050
|
}
|
|
40051
40051
|
async [kClose]() {
|
|
40052
|
-
return new Promise((
|
|
40052
|
+
return new Promise((resolve17) => {
|
|
40053
40053
|
if (!this[kSize]) {
|
|
40054
|
-
|
|
40054
|
+
resolve17(null);
|
|
40055
40055
|
} else {
|
|
40056
|
-
this[kClosedResolve] =
|
|
40056
|
+
this[kClosedResolve] = resolve17;
|
|
40057
40057
|
}
|
|
40058
40058
|
});
|
|
40059
40059
|
}
|
|
40060
40060
|
async [kDestroy](err) {
|
|
40061
|
-
return new Promise((
|
|
40061
|
+
return new Promise((resolve17) => {
|
|
40062
40062
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
40063
40063
|
for (let i = 0; i < requests.length; i++) {
|
|
40064
40064
|
const request = requests[i];
|
|
@@ -40069,7 +40069,7 @@ var require_client = __commonJS({
|
|
|
40069
40069
|
this[kClosedResolve]();
|
|
40070
40070
|
this[kClosedResolve] = null;
|
|
40071
40071
|
}
|
|
40072
|
-
|
|
40072
|
+
resolve17();
|
|
40073
40073
|
};
|
|
40074
40074
|
if (this[kHTTP2Session] != null) {
|
|
40075
40075
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -40649,7 +40649,7 @@ var require_client = __commonJS({
|
|
|
40649
40649
|
});
|
|
40650
40650
|
}
|
|
40651
40651
|
try {
|
|
40652
|
-
const socket = await new Promise((
|
|
40652
|
+
const socket = await new Promise((resolve17, reject) => {
|
|
40653
40653
|
client[kConnector]({
|
|
40654
40654
|
host,
|
|
40655
40655
|
hostname,
|
|
@@ -40661,7 +40661,7 @@ var require_client = __commonJS({
|
|
|
40661
40661
|
if (err) {
|
|
40662
40662
|
reject(err);
|
|
40663
40663
|
} else {
|
|
40664
|
-
|
|
40664
|
+
resolve17(socket2);
|
|
40665
40665
|
}
|
|
40666
40666
|
});
|
|
40667
40667
|
});
|
|
@@ -40872,7 +40872,7 @@ var require_client = __commonJS({
|
|
|
40872
40872
|
writeH2(client, client[kHTTP2Session], request);
|
|
40873
40873
|
return;
|
|
40874
40874
|
}
|
|
40875
|
-
const { body, method, path:
|
|
40875
|
+
const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
|
|
40876
40876
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
40877
40877
|
if (body && typeof body.read === "function") {
|
|
40878
40878
|
body.read(0);
|
|
@@ -40922,7 +40922,7 @@ var require_client = __commonJS({
|
|
|
40922
40922
|
if (blocking) {
|
|
40923
40923
|
socket[kBlocking] = true;
|
|
40924
40924
|
}
|
|
40925
|
-
let header = `${method} ${
|
|
40925
|
+
let header = `${method} ${path31} HTTP/1.1\r
|
|
40926
40926
|
`;
|
|
40927
40927
|
if (typeof host === "string") {
|
|
40928
40928
|
header += `host: ${host}\r
|
|
@@ -40985,7 +40985,7 @@ upgrade: ${upgrade}\r
|
|
|
40985
40985
|
return true;
|
|
40986
40986
|
}
|
|
40987
40987
|
function writeH2(client, session, request) {
|
|
40988
|
-
const { body, method, path:
|
|
40988
|
+
const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
40989
40989
|
let headers;
|
|
40990
40990
|
if (typeof reqHeaders === "string") headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
40991
40991
|
else headers = reqHeaders;
|
|
@@ -41028,7 +41028,7 @@ upgrade: ${upgrade}\r
|
|
|
41028
41028
|
});
|
|
41029
41029
|
return true;
|
|
41030
41030
|
}
|
|
41031
|
-
headers[HTTP2_HEADER_PATH] =
|
|
41031
|
+
headers[HTTP2_HEADER_PATH] = path31;
|
|
41032
41032
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
41033
41033
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
41034
41034
|
if (body && typeof body.read === "function") {
|
|
@@ -41285,12 +41285,12 @@ upgrade: ${upgrade}\r
|
|
|
41285
41285
|
cb();
|
|
41286
41286
|
}
|
|
41287
41287
|
}
|
|
41288
|
-
const waitForDrain = () => new Promise((
|
|
41288
|
+
const waitForDrain = () => new Promise((resolve17, reject) => {
|
|
41289
41289
|
assert(callback === null);
|
|
41290
41290
|
if (socket[kError]) {
|
|
41291
41291
|
reject(socket[kError]);
|
|
41292
41292
|
} else {
|
|
41293
|
-
callback =
|
|
41293
|
+
callback = resolve17;
|
|
41294
41294
|
}
|
|
41295
41295
|
});
|
|
41296
41296
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -41636,8 +41636,8 @@ var require_pool_base = __commonJS({
|
|
|
41636
41636
|
if (this[kQueue].isEmpty()) {
|
|
41637
41637
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
41638
41638
|
} else {
|
|
41639
|
-
return new Promise((
|
|
41640
|
-
this[kClosedResolve] =
|
|
41639
|
+
return new Promise((resolve17) => {
|
|
41640
|
+
this[kClosedResolve] = resolve17;
|
|
41641
41641
|
});
|
|
41642
41642
|
}
|
|
41643
41643
|
}
|
|
@@ -42215,7 +42215,7 @@ var require_readable = __commonJS({
|
|
|
42215
42215
|
if (this.closed) {
|
|
42216
42216
|
return Promise.resolve(null);
|
|
42217
42217
|
}
|
|
42218
|
-
return new Promise((
|
|
42218
|
+
return new Promise((resolve17, reject) => {
|
|
42219
42219
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
42220
42220
|
this.destroy();
|
|
42221
42221
|
}) : noop;
|
|
@@ -42224,7 +42224,7 @@ var require_readable = __commonJS({
|
|
|
42224
42224
|
if (signal && signal.aborted) {
|
|
42225
42225
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
42226
42226
|
} else {
|
|
42227
|
-
|
|
42227
|
+
resolve17(null);
|
|
42228
42228
|
}
|
|
42229
42229
|
}).on("error", noop).on("data", function(chunk) {
|
|
42230
42230
|
limit -= chunk.length;
|
|
@@ -42246,11 +42246,11 @@ var require_readable = __commonJS({
|
|
|
42246
42246
|
throw new TypeError("unusable");
|
|
42247
42247
|
}
|
|
42248
42248
|
assert(!stream[kConsume]);
|
|
42249
|
-
return new Promise((
|
|
42249
|
+
return new Promise((resolve17, reject) => {
|
|
42250
42250
|
stream[kConsume] = {
|
|
42251
42251
|
type,
|
|
42252
42252
|
stream,
|
|
42253
|
-
resolve:
|
|
42253
|
+
resolve: resolve17,
|
|
42254
42254
|
reject,
|
|
42255
42255
|
length: 0,
|
|
42256
42256
|
body: []
|
|
@@ -42285,12 +42285,12 @@ var require_readable = __commonJS({
|
|
|
42285
42285
|
}
|
|
42286
42286
|
}
|
|
42287
42287
|
function consumeEnd(consume2) {
|
|
42288
|
-
const { type, body, resolve:
|
|
42288
|
+
const { type, body, resolve: resolve17, stream, length } = consume2;
|
|
42289
42289
|
try {
|
|
42290
42290
|
if (type === "text") {
|
|
42291
|
-
|
|
42291
|
+
resolve17(toUSVString(Buffer.concat(body)));
|
|
42292
42292
|
} else if (type === "json") {
|
|
42293
|
-
|
|
42293
|
+
resolve17(JSON.parse(Buffer.concat(body)));
|
|
42294
42294
|
} else if (type === "arrayBuffer") {
|
|
42295
42295
|
const dst = new Uint8Array(length);
|
|
42296
42296
|
let pos = 0;
|
|
@@ -42298,12 +42298,12 @@ var require_readable = __commonJS({
|
|
|
42298
42298
|
dst.set(buf, pos);
|
|
42299
42299
|
pos += buf.byteLength;
|
|
42300
42300
|
}
|
|
42301
|
-
|
|
42301
|
+
resolve17(dst.buffer);
|
|
42302
42302
|
} else if (type === "blob") {
|
|
42303
42303
|
if (!Blob2) {
|
|
42304
42304
|
Blob2 = require("buffer").Blob;
|
|
42305
42305
|
}
|
|
42306
|
-
|
|
42306
|
+
resolve17(new Blob2(body, { type: stream[kContentType] }));
|
|
42307
42307
|
}
|
|
42308
42308
|
consumeFinish(consume2);
|
|
42309
42309
|
} catch (err) {
|
|
@@ -42560,9 +42560,9 @@ var require_api_request = __commonJS({
|
|
|
42560
42560
|
};
|
|
42561
42561
|
function request(opts, callback) {
|
|
42562
42562
|
if (callback === void 0) {
|
|
42563
|
-
return new Promise((
|
|
42563
|
+
return new Promise((resolve17, reject) => {
|
|
42564
42564
|
request.call(this, opts, (err, data) => {
|
|
42565
|
-
return err ? reject(err) :
|
|
42565
|
+
return err ? reject(err) : resolve17(data);
|
|
42566
42566
|
});
|
|
42567
42567
|
});
|
|
42568
42568
|
}
|
|
@@ -42735,9 +42735,9 @@ var require_api_stream = __commonJS({
|
|
|
42735
42735
|
};
|
|
42736
42736
|
function stream(opts, factory, callback) {
|
|
42737
42737
|
if (callback === void 0) {
|
|
42738
|
-
return new Promise((
|
|
42738
|
+
return new Promise((resolve17, reject) => {
|
|
42739
42739
|
stream.call(this, opts, factory, (err, data) => {
|
|
42740
|
-
return err ? reject(err) :
|
|
42740
|
+
return err ? reject(err) : resolve17(data);
|
|
42741
42741
|
});
|
|
42742
42742
|
});
|
|
42743
42743
|
}
|
|
@@ -43018,9 +43018,9 @@ var require_api_upgrade = __commonJS({
|
|
|
43018
43018
|
};
|
|
43019
43019
|
function upgrade(opts, callback) {
|
|
43020
43020
|
if (callback === void 0) {
|
|
43021
|
-
return new Promise((
|
|
43021
|
+
return new Promise((resolve17, reject) => {
|
|
43022
43022
|
upgrade.call(this, opts, (err, data) => {
|
|
43023
|
-
return err ? reject(err) :
|
|
43023
|
+
return err ? reject(err) : resolve17(data);
|
|
43024
43024
|
});
|
|
43025
43025
|
});
|
|
43026
43026
|
}
|
|
@@ -43109,9 +43109,9 @@ var require_api_connect = __commonJS({
|
|
|
43109
43109
|
};
|
|
43110
43110
|
function connect(opts, callback) {
|
|
43111
43111
|
if (callback === void 0) {
|
|
43112
|
-
return new Promise((
|
|
43112
|
+
return new Promise((resolve17, reject) => {
|
|
43113
43113
|
connect.call(this, opts, (err, data) => {
|
|
43114
|
-
return err ? reject(err) :
|
|
43114
|
+
return err ? reject(err) : resolve17(data);
|
|
43115
43115
|
});
|
|
43116
43116
|
});
|
|
43117
43117
|
}
|
|
@@ -43271,20 +43271,20 @@ var require_mock_utils = __commonJS({
|
|
|
43271
43271
|
}
|
|
43272
43272
|
return true;
|
|
43273
43273
|
}
|
|
43274
|
-
function safeUrl(
|
|
43275
|
-
if (typeof
|
|
43276
|
-
return
|
|
43274
|
+
function safeUrl(path31) {
|
|
43275
|
+
if (typeof path31 !== "string") {
|
|
43276
|
+
return path31;
|
|
43277
43277
|
}
|
|
43278
|
-
const pathSegments =
|
|
43278
|
+
const pathSegments = path31.split("?");
|
|
43279
43279
|
if (pathSegments.length !== 2) {
|
|
43280
|
-
return
|
|
43280
|
+
return path31;
|
|
43281
43281
|
}
|
|
43282
43282
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
43283
43283
|
qp.sort();
|
|
43284
43284
|
return [...pathSegments, qp.toString()].join("?");
|
|
43285
43285
|
}
|
|
43286
|
-
function matchKey(mockDispatch2, { path:
|
|
43287
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
43286
|
+
function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
|
|
43287
|
+
const pathMatch = matchValue(mockDispatch2.path, path31);
|
|
43288
43288
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
43289
43289
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
43290
43290
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -43302,7 +43302,7 @@ var require_mock_utils = __commonJS({
|
|
|
43302
43302
|
function getMockDispatch(mockDispatches, key) {
|
|
43303
43303
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
43304
43304
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
43305
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
43305
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
|
|
43306
43306
|
if (matchedMockDispatches.length === 0) {
|
|
43307
43307
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
43308
43308
|
}
|
|
@@ -43339,9 +43339,9 @@ var require_mock_utils = __commonJS({
|
|
|
43339
43339
|
}
|
|
43340
43340
|
}
|
|
43341
43341
|
function buildKey(opts) {
|
|
43342
|
-
const { path:
|
|
43342
|
+
const { path: path31, method, body, headers, query } = opts;
|
|
43343
43343
|
return {
|
|
43344
|
-
path:
|
|
43344
|
+
path: path31,
|
|
43345
43345
|
method,
|
|
43346
43346
|
body,
|
|
43347
43347
|
headers,
|
|
@@ -43790,10 +43790,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
43790
43790
|
}
|
|
43791
43791
|
format(pendingInterceptors) {
|
|
43792
43792
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
43793
|
-
({ method, path:
|
|
43793
|
+
({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
43794
43794
|
Method: method,
|
|
43795
43795
|
Origin: origin,
|
|
43796
|
-
Path:
|
|
43796
|
+
Path: path31,
|
|
43797
43797
|
"Status code": statusCode,
|
|
43798
43798
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
43799
43799
|
Invocations: timesInvoked,
|
|
@@ -46734,7 +46734,7 @@ var require_fetch = __commonJS({
|
|
|
46734
46734
|
async function dispatch({ body }) {
|
|
46735
46735
|
const url = requestCurrentURL(request);
|
|
46736
46736
|
const agent = fetchParams.controller.dispatcher;
|
|
46737
|
-
return new Promise((
|
|
46737
|
+
return new Promise((resolve17, reject) => agent.dispatch(
|
|
46738
46738
|
{
|
|
46739
46739
|
path: url.pathname + url.search,
|
|
46740
46740
|
origin: url.origin,
|
|
@@ -46810,7 +46810,7 @@ var require_fetch = __commonJS({
|
|
|
46810
46810
|
}
|
|
46811
46811
|
}
|
|
46812
46812
|
}
|
|
46813
|
-
|
|
46813
|
+
resolve17({
|
|
46814
46814
|
status,
|
|
46815
46815
|
statusText,
|
|
46816
46816
|
headersList: headers[kHeadersList],
|
|
@@ -46853,7 +46853,7 @@ var require_fetch = __commonJS({
|
|
|
46853
46853
|
const val = headersList[n + 1].toString("latin1");
|
|
46854
46854
|
headers[kHeadersList].append(key, val);
|
|
46855
46855
|
}
|
|
46856
|
-
|
|
46856
|
+
resolve17({
|
|
46857
46857
|
status,
|
|
46858
46858
|
statusText: STATUS_CODES[status],
|
|
46859
46859
|
headersList: headers[kHeadersList],
|
|
@@ -48414,8 +48414,8 @@ var require_util6 = __commonJS({
|
|
|
48414
48414
|
}
|
|
48415
48415
|
}
|
|
48416
48416
|
}
|
|
48417
|
-
function validateCookiePath(
|
|
48418
|
-
for (const char of
|
|
48417
|
+
function validateCookiePath(path31) {
|
|
48418
|
+
for (const char of path31) {
|
|
48419
48419
|
const code = char.charCodeAt(0);
|
|
48420
48420
|
if (code < 33 || char === ";") {
|
|
48421
48421
|
throw new Error("Invalid cookie path");
|
|
@@ -49212,9 +49212,9 @@ var require_connection = __commonJS({
|
|
|
49212
49212
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
49213
49213
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
49214
49214
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
49215
|
-
var
|
|
49215
|
+
var crypto8;
|
|
49216
49216
|
try {
|
|
49217
|
-
|
|
49217
|
+
crypto8 = require("crypto");
|
|
49218
49218
|
} catch {
|
|
49219
49219
|
}
|
|
49220
49220
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -49233,7 +49233,7 @@ var require_connection = __commonJS({
|
|
|
49233
49233
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
49234
49234
|
request.headersList = headersList;
|
|
49235
49235
|
}
|
|
49236
|
-
const keyValue =
|
|
49236
|
+
const keyValue = crypto8.randomBytes(16).toString("base64");
|
|
49237
49237
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
49238
49238
|
request.headersList.append("sec-websocket-version", "13");
|
|
49239
49239
|
for (const protocol of protocols) {
|
|
@@ -49262,7 +49262,7 @@ var require_connection = __commonJS({
|
|
|
49262
49262
|
return;
|
|
49263
49263
|
}
|
|
49264
49264
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
49265
|
-
const digest =
|
|
49265
|
+
const digest = crypto8.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
49266
49266
|
if (secWSAccept !== digest) {
|
|
49267
49267
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
49268
49268
|
return;
|
|
@@ -49342,9 +49342,9 @@ var require_frame = __commonJS({
|
|
|
49342
49342
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
49343
49343
|
"use strict";
|
|
49344
49344
|
var { maxUnsigned16Bit } = require_constants5();
|
|
49345
|
-
var
|
|
49345
|
+
var crypto8;
|
|
49346
49346
|
try {
|
|
49347
|
-
|
|
49347
|
+
crypto8 = require("crypto");
|
|
49348
49348
|
} catch {
|
|
49349
49349
|
}
|
|
49350
49350
|
var WebsocketFrameSend = class {
|
|
@@ -49353,7 +49353,7 @@ var require_frame = __commonJS({
|
|
|
49353
49353
|
*/
|
|
49354
49354
|
constructor(data) {
|
|
49355
49355
|
this.frameData = data;
|
|
49356
|
-
this.maskKey =
|
|
49356
|
+
this.maskKey = crypto8.randomBytes(4);
|
|
49357
49357
|
}
|
|
49358
49358
|
createFrame(opcode) {
|
|
49359
49359
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -50095,11 +50095,11 @@ var require_undici = __commonJS({
|
|
|
50095
50095
|
if (typeof opts.path !== "string") {
|
|
50096
50096
|
throw new InvalidArgumentError("invalid opts.path");
|
|
50097
50097
|
}
|
|
50098
|
-
let
|
|
50098
|
+
let path31 = opts.path;
|
|
50099
50099
|
if (!opts.path.startsWith("/")) {
|
|
50100
|
-
|
|
50100
|
+
path31 = `/${path31}`;
|
|
50101
50101
|
}
|
|
50102
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
50102
|
+
url = new URL(util.parseOrigin(url).origin + path31);
|
|
50103
50103
|
} else {
|
|
50104
50104
|
if (!opts) {
|
|
50105
50105
|
opts = typeof url === "object" ? url : {};
|
|
@@ -50669,7 +50669,7 @@ var init_mcp_check_provider = __esm({
|
|
|
50669
50669
|
logger.warn(
|
|
50670
50670
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
50671
50671
|
);
|
|
50672
|
-
await new Promise((
|
|
50672
|
+
await new Promise((resolve17) => setTimeout(resolve17, delay));
|
|
50673
50673
|
attempt += 1;
|
|
50674
50674
|
} finally {
|
|
50675
50675
|
try {
|
|
@@ -50843,7 +50843,7 @@ async function acquirePromptLock() {
|
|
|
50843
50843
|
);
|
|
50844
50844
|
}, 1e4);
|
|
50845
50845
|
try {
|
|
50846
|
-
await new Promise((
|
|
50846
|
+
await new Promise((resolve17) => waiters.push(resolve17));
|
|
50847
50847
|
} finally {
|
|
50848
50848
|
clearInterval(reminder);
|
|
50849
50849
|
const waitedMs = Date.now() - queuedAt;
|
|
@@ -50862,7 +50862,7 @@ function releasePromptLock() {
|
|
|
50862
50862
|
}
|
|
50863
50863
|
async function interactivePrompt(options) {
|
|
50864
50864
|
await acquirePromptLock();
|
|
50865
|
-
return new Promise((
|
|
50865
|
+
return new Promise((resolve17, reject) => {
|
|
50866
50866
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
50867
50867
|
try {
|
|
50868
50868
|
if (dbg) {
|
|
@@ -50949,12 +50949,12 @@ async function interactivePrompt(options) {
|
|
|
50949
50949
|
};
|
|
50950
50950
|
const finish = (value) => {
|
|
50951
50951
|
cleanup();
|
|
50952
|
-
|
|
50952
|
+
resolve17(value);
|
|
50953
50953
|
};
|
|
50954
50954
|
if (options.timeout && options.timeout > 0) {
|
|
50955
50955
|
timeoutId = setTimeout(() => {
|
|
50956
50956
|
cleanup();
|
|
50957
|
-
if (defaultValue !== void 0) return
|
|
50957
|
+
if (defaultValue !== void 0) return resolve17(defaultValue);
|
|
50958
50958
|
return reject(new Error("Input timeout"));
|
|
50959
50959
|
}, options.timeout);
|
|
50960
50960
|
}
|
|
@@ -51086,7 +51086,7 @@ async function interactivePrompt(options) {
|
|
|
51086
51086
|
});
|
|
51087
51087
|
}
|
|
51088
51088
|
async function simplePrompt(prompt) {
|
|
51089
|
-
return new Promise((
|
|
51089
|
+
return new Promise((resolve17) => {
|
|
51090
51090
|
const rl = readline3.createInterface({
|
|
51091
51091
|
input: process.stdin,
|
|
51092
51092
|
output: process.stdout
|
|
@@ -51102,7 +51102,7 @@ async function simplePrompt(prompt) {
|
|
|
51102
51102
|
rl.question(`${prompt}
|
|
51103
51103
|
> `, (answer) => {
|
|
51104
51104
|
rl.close();
|
|
51105
|
-
|
|
51105
|
+
resolve17(answer.trim());
|
|
51106
51106
|
});
|
|
51107
51107
|
});
|
|
51108
51108
|
}
|
|
@@ -51270,7 +51270,7 @@ function isStdinAvailable() {
|
|
|
51270
51270
|
return !process.stdin.isTTY;
|
|
51271
51271
|
}
|
|
51272
51272
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
51273
|
-
return new Promise((
|
|
51273
|
+
return new Promise((resolve17, reject) => {
|
|
51274
51274
|
let data = "";
|
|
51275
51275
|
let timeoutId;
|
|
51276
51276
|
if (timeout) {
|
|
@@ -51297,7 +51297,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
51297
51297
|
};
|
|
51298
51298
|
const onEnd = () => {
|
|
51299
51299
|
cleanup();
|
|
51300
|
-
|
|
51300
|
+
resolve17(data.trim());
|
|
51301
51301
|
};
|
|
51302
51302
|
const onError = (err) => {
|
|
51303
51303
|
cleanup();
|
|
@@ -52851,7 +52851,7 @@ var init_worktree_manager = __esm({
|
|
|
52851
52851
|
logger.warn(`Bare repo update attempt ${attempt}/2 error: ${errorMessage}`);
|
|
52852
52852
|
}
|
|
52853
52853
|
if (attempt < 2) {
|
|
52854
|
-
await new Promise((
|
|
52854
|
+
await new Promise((resolve17) => setTimeout(resolve17, 2e3));
|
|
52855
52855
|
}
|
|
52856
52856
|
}
|
|
52857
52857
|
logger.warn(`Failed to update bare repository after 2 attempts (will rely on per-ref fetch)`);
|
|
@@ -53162,7 +53162,7 @@ var init_worktree_manager = __esm({
|
|
|
53162
53162
|
`Failed to fetch ref ${ref} (attempt ${attempt}/2): ${result.stderr || result.stdout}`
|
|
53163
53163
|
);
|
|
53164
53164
|
if (attempt < 2) {
|
|
53165
|
-
await new Promise((
|
|
53165
|
+
await new Promise((resolve17) => setTimeout(resolve17, 1e3));
|
|
53166
53166
|
}
|
|
53167
53167
|
}
|
|
53168
53168
|
return false;
|
|
@@ -56115,23 +56115,23 @@ __export(renderer_schema_exports, {
|
|
|
56115
56115
|
});
|
|
56116
56116
|
async function loadRendererSchema(name) {
|
|
56117
56117
|
try {
|
|
56118
|
-
const
|
|
56119
|
-
const
|
|
56118
|
+
const fs29 = await import("fs/promises");
|
|
56119
|
+
const path31 = await import("path");
|
|
56120
56120
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
56121
56121
|
if (!sanitized) return void 0;
|
|
56122
56122
|
const candidates = [
|
|
56123
56123
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
56124
|
-
|
|
56124
|
+
path31.join(__dirname, "output", sanitized, "schema.json"),
|
|
56125
56125
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
56126
|
-
|
|
56126
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
56127
56127
|
// When running from a checkout with output/ folder copied to CWD
|
|
56128
|
-
|
|
56128
|
+
path31.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
56129
56129
|
// Fallback: cwd/dist/output/
|
|
56130
|
-
|
|
56130
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
56131
56131
|
];
|
|
56132
56132
|
for (const p of candidates) {
|
|
56133
56133
|
try {
|
|
56134
|
-
const raw = await
|
|
56134
|
+
const raw = await fs29.readFile(p, "utf-8");
|
|
56135
56135
|
return JSON.parse(raw);
|
|
56136
56136
|
} catch {
|
|
56137
56137
|
}
|
|
@@ -58585,8 +58585,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
58585
58585
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
58586
58586
|
try {
|
|
58587
58587
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
58588
|
-
const
|
|
58589
|
-
const
|
|
58588
|
+
const fs29 = await import("fs/promises");
|
|
58589
|
+
const path31 = await import("path");
|
|
58590
58590
|
const schemaRaw = checkConfig.schema || "plain";
|
|
58591
58591
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
58592
58592
|
let templateContent;
|
|
@@ -58595,27 +58595,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
58595
58595
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
58596
58596
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
58597
58597
|
const file = String(checkConfig.template.file);
|
|
58598
|
-
const resolved =
|
|
58599
|
-
templateContent = await
|
|
58598
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
58599
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
58600
58600
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
58601
58601
|
} else if (schema && schema !== "plain") {
|
|
58602
58602
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
58603
58603
|
if (sanitized) {
|
|
58604
58604
|
const candidatePaths = [
|
|
58605
|
-
|
|
58605
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
58606
58606
|
// bundled: dist/output/
|
|
58607
|
-
|
|
58607
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
58608
58608
|
// source (from state-machine/states)
|
|
58609
|
-
|
|
58609
|
+
path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
58610
58610
|
// source (alternate)
|
|
58611
|
-
|
|
58611
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
58612
58612
|
// fallback: cwd/output/
|
|
58613
|
-
|
|
58613
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
58614
58614
|
// fallback: cwd/dist/output/
|
|
58615
58615
|
];
|
|
58616
58616
|
for (const p of candidatePaths) {
|
|
58617
58617
|
try {
|
|
58618
|
-
templateContent = await
|
|
58618
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
58619
58619
|
if (templateContent) {
|
|
58620
58620
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
58621
58621
|
break;
|
|
@@ -59821,8 +59821,8 @@ var init_workspace_manager = __esm({
|
|
|
59821
59821
|
);
|
|
59822
59822
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
59823
59823
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
59824
|
-
for (const
|
|
59825
|
-
|
|
59824
|
+
for (const resolve17 of this.cleanupResolvers) {
|
|
59825
|
+
resolve17();
|
|
59826
59826
|
}
|
|
59827
59827
|
this.cleanupResolvers = [];
|
|
59828
59828
|
}
|
|
@@ -60038,19 +60038,19 @@ var init_workspace_manager = __esm({
|
|
|
60038
60038
|
);
|
|
60039
60039
|
this.cleanupRequested = true;
|
|
60040
60040
|
await Promise.race([
|
|
60041
|
-
new Promise((
|
|
60041
|
+
new Promise((resolve17) => {
|
|
60042
60042
|
if (this.activeOperations === 0) {
|
|
60043
|
-
|
|
60043
|
+
resolve17();
|
|
60044
60044
|
} else {
|
|
60045
|
-
this.cleanupResolvers.push(
|
|
60045
|
+
this.cleanupResolvers.push(resolve17);
|
|
60046
60046
|
}
|
|
60047
60047
|
}),
|
|
60048
|
-
new Promise((
|
|
60048
|
+
new Promise((resolve17) => {
|
|
60049
60049
|
setTimeout(() => {
|
|
60050
60050
|
logger.warn(
|
|
60051
60051
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
60052
60052
|
);
|
|
60053
|
-
|
|
60053
|
+
resolve17();
|
|
60054
60054
|
}, timeout);
|
|
60055
60055
|
})
|
|
60056
60056
|
]);
|
|
@@ -60465,8 +60465,8 @@ var init_fair_concurrency_limiter = __esm({
|
|
|
60465
60465
|
);
|
|
60466
60466
|
const queuedAt = Date.now();
|
|
60467
60467
|
const effectiveTimeout = queueTimeout ?? 12e4;
|
|
60468
|
-
return new Promise((
|
|
60469
|
-
const entry = { resolve:
|
|
60468
|
+
return new Promise((resolve17, reject) => {
|
|
60469
|
+
const entry = { resolve: resolve17, reject, queuedAt };
|
|
60470
60470
|
entry.reminder = setInterval(() => {
|
|
60471
60471
|
const waited = Math.round((Date.now() - queuedAt) / 1e3);
|
|
60472
60472
|
const curQueued = this._totalQueued();
|
|
@@ -60774,1380 +60774,6 @@ var init_build_engine_context = __esm({
|
|
|
60774
60774
|
}
|
|
60775
60775
|
});
|
|
60776
60776
|
|
|
60777
|
-
// src/policy/default-engine.ts
|
|
60778
|
-
var DefaultPolicyEngine;
|
|
60779
|
-
var init_default_engine = __esm({
|
|
60780
|
-
"src/policy/default-engine.ts"() {
|
|
60781
|
-
"use strict";
|
|
60782
|
-
DefaultPolicyEngine = class {
|
|
60783
|
-
async initialize(_config) {
|
|
60784
|
-
}
|
|
60785
|
-
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
60786
|
-
return { allowed: true };
|
|
60787
|
-
}
|
|
60788
|
-
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
60789
|
-
return { allowed: true };
|
|
60790
|
-
}
|
|
60791
|
-
async evaluateCapabilities(_checkId, _capabilities) {
|
|
60792
|
-
return { allowed: true };
|
|
60793
|
-
}
|
|
60794
|
-
async shutdown() {
|
|
60795
|
-
}
|
|
60796
|
-
};
|
|
60797
|
-
}
|
|
60798
|
-
});
|
|
60799
|
-
|
|
60800
|
-
// src/enterprise/license/validator.ts
|
|
60801
|
-
var validator_exports = {};
|
|
60802
|
-
__export(validator_exports, {
|
|
60803
|
-
LicenseValidator: () => LicenseValidator
|
|
60804
|
-
});
|
|
60805
|
-
var crypto4, fs25, path28, LicenseValidator;
|
|
60806
|
-
var init_validator = __esm({
|
|
60807
|
-
"src/enterprise/license/validator.ts"() {
|
|
60808
|
-
"use strict";
|
|
60809
|
-
crypto4 = __toESM(require("crypto"));
|
|
60810
|
-
fs25 = __toESM(require("fs"));
|
|
60811
|
-
path28 = __toESM(require("path"));
|
|
60812
|
-
LicenseValidator = class _LicenseValidator {
|
|
60813
|
-
/** Ed25519 public key for license verification (PEM format). */
|
|
60814
|
-
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
60815
|
-
cache = null;
|
|
60816
|
-
static CACHE_TTL = 5 * 60 * 1e3;
|
|
60817
|
-
// 5 minutes
|
|
60818
|
-
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
60819
|
-
// 72 hours after expiry
|
|
60820
|
-
/**
|
|
60821
|
-
* Load and validate license from environment or file.
|
|
60822
|
-
*
|
|
60823
|
-
* Resolution order:
|
|
60824
|
-
* 1. VISOR_LICENSE env var (JWT string)
|
|
60825
|
-
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
60826
|
-
* 3. .visor-license in project root (cwd)
|
|
60827
|
-
* 4. .visor-license in ~/.config/visor/
|
|
60828
|
-
*/
|
|
60829
|
-
async loadAndValidate() {
|
|
60830
|
-
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
60831
|
-
return this.cache.payload;
|
|
60832
|
-
}
|
|
60833
|
-
const token = this.resolveToken();
|
|
60834
|
-
if (!token) return null;
|
|
60835
|
-
const payload = this.verifyAndDecode(token);
|
|
60836
|
-
if (!payload) return null;
|
|
60837
|
-
this.cache = { payload, validatedAt: Date.now() };
|
|
60838
|
-
return payload;
|
|
60839
|
-
}
|
|
60840
|
-
/** Check if a specific feature is licensed */
|
|
60841
|
-
hasFeature(feature) {
|
|
60842
|
-
if (!this.cache) return false;
|
|
60843
|
-
return this.cache.payload.features.includes(feature);
|
|
60844
|
-
}
|
|
60845
|
-
/** Check if license is valid (with grace period) */
|
|
60846
|
-
isValid() {
|
|
60847
|
-
if (!this.cache) return false;
|
|
60848
|
-
const now = Date.now();
|
|
60849
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
60850
|
-
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
60851
|
-
}
|
|
60852
|
-
/** Check if the license is within its grace period (expired but still valid) */
|
|
60853
|
-
isInGracePeriod() {
|
|
60854
|
-
if (!this.cache) return false;
|
|
60855
|
-
const now = Date.now();
|
|
60856
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
60857
|
-
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
60858
|
-
}
|
|
60859
|
-
resolveToken() {
|
|
60860
|
-
if (process.env.VISOR_LICENSE) {
|
|
60861
|
-
return process.env.VISOR_LICENSE.trim();
|
|
60862
|
-
}
|
|
60863
|
-
if (process.env.VISOR_LICENSE_FILE) {
|
|
60864
|
-
const resolved = path28.resolve(process.env.VISOR_LICENSE_FILE);
|
|
60865
|
-
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
60866
|
-
const allowedPrefixes = [path28.normalize(process.cwd())];
|
|
60867
|
-
if (home2) allowedPrefixes.push(path28.normalize(path28.join(home2, ".config", "visor")));
|
|
60868
|
-
let realPath;
|
|
60869
|
-
try {
|
|
60870
|
-
realPath = fs25.realpathSync(resolved);
|
|
60871
|
-
} catch {
|
|
60872
|
-
return null;
|
|
60873
|
-
}
|
|
60874
|
-
const isSafe = allowedPrefixes.some(
|
|
60875
|
-
(prefix) => realPath === prefix || realPath.startsWith(prefix + path28.sep)
|
|
60876
|
-
);
|
|
60877
|
-
if (!isSafe) return null;
|
|
60878
|
-
return this.readFile(realPath);
|
|
60879
|
-
}
|
|
60880
|
-
const cwdPath = path28.join(process.cwd(), ".visor-license");
|
|
60881
|
-
const cwdToken = this.readFile(cwdPath);
|
|
60882
|
-
if (cwdToken) return cwdToken;
|
|
60883
|
-
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
60884
|
-
if (home) {
|
|
60885
|
-
const configPath = path28.join(home, ".config", "visor", ".visor-license");
|
|
60886
|
-
const configToken = this.readFile(configPath);
|
|
60887
|
-
if (configToken) return configToken;
|
|
60888
|
-
}
|
|
60889
|
-
return null;
|
|
60890
|
-
}
|
|
60891
|
-
readFile(filePath) {
|
|
60892
|
-
try {
|
|
60893
|
-
return fs25.readFileSync(filePath, "utf-8").trim();
|
|
60894
|
-
} catch {
|
|
60895
|
-
return null;
|
|
60896
|
-
}
|
|
60897
|
-
}
|
|
60898
|
-
verifyAndDecode(token) {
|
|
60899
|
-
try {
|
|
60900
|
-
const parts = token.split(".");
|
|
60901
|
-
if (parts.length !== 3) return null;
|
|
60902
|
-
const [headerB64, payloadB64, signatureB64] = parts;
|
|
60903
|
-
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
60904
|
-
if (header.alg !== "EdDSA") return null;
|
|
60905
|
-
const data = `${headerB64}.${payloadB64}`;
|
|
60906
|
-
const signature = Buffer.from(signatureB64, "base64url");
|
|
60907
|
-
const publicKey = crypto4.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
60908
|
-
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
60909
|
-
return null;
|
|
60910
|
-
}
|
|
60911
|
-
const isValid = crypto4.verify(null, Buffer.from(data), publicKey, signature);
|
|
60912
|
-
if (!isValid) return null;
|
|
60913
|
-
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
60914
|
-
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
60915
|
-
return null;
|
|
60916
|
-
}
|
|
60917
|
-
const now = Date.now();
|
|
60918
|
-
const expiryMs = payload.exp * 1e3;
|
|
60919
|
-
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
60920
|
-
return null;
|
|
60921
|
-
}
|
|
60922
|
-
return payload;
|
|
60923
|
-
} catch {
|
|
60924
|
-
return null;
|
|
60925
|
-
}
|
|
60926
|
-
}
|
|
60927
|
-
};
|
|
60928
|
-
}
|
|
60929
|
-
});
|
|
60930
|
-
|
|
60931
|
-
// src/enterprise/policy/opa-compiler.ts
|
|
60932
|
-
var fs26, path29, os2, crypto5, import_child_process8, OpaCompiler;
|
|
60933
|
-
var init_opa_compiler = __esm({
|
|
60934
|
-
"src/enterprise/policy/opa-compiler.ts"() {
|
|
60935
|
-
"use strict";
|
|
60936
|
-
fs26 = __toESM(require("fs"));
|
|
60937
|
-
path29 = __toESM(require("path"));
|
|
60938
|
-
os2 = __toESM(require("os"));
|
|
60939
|
-
crypto5 = __toESM(require("crypto"));
|
|
60940
|
-
import_child_process8 = require("child_process");
|
|
60941
|
-
OpaCompiler = class _OpaCompiler {
|
|
60942
|
-
static CACHE_DIR = path29.join(os2.tmpdir(), "visor-opa-cache");
|
|
60943
|
-
/**
|
|
60944
|
-
* Resolve the input paths to WASM bytes.
|
|
60945
|
-
*
|
|
60946
|
-
* Strategy:
|
|
60947
|
-
* 1. If any path is a .wasm file, read it directly
|
|
60948
|
-
* 2. If a directory contains policy.wasm, read it
|
|
60949
|
-
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
60950
|
-
*/
|
|
60951
|
-
async resolveWasmBytes(paths) {
|
|
60952
|
-
const regoFiles = [];
|
|
60953
|
-
for (const p of paths) {
|
|
60954
|
-
const resolved = path29.resolve(p);
|
|
60955
|
-
if (path29.normalize(resolved).includes("..")) {
|
|
60956
|
-
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
60957
|
-
}
|
|
60958
|
-
if (resolved.endsWith(".wasm") && fs26.existsSync(resolved)) {
|
|
60959
|
-
return fs26.readFileSync(resolved);
|
|
60960
|
-
}
|
|
60961
|
-
if (!fs26.existsSync(resolved)) continue;
|
|
60962
|
-
const stat2 = fs26.statSync(resolved);
|
|
60963
|
-
if (stat2.isDirectory()) {
|
|
60964
|
-
const wasmCandidate = path29.join(resolved, "policy.wasm");
|
|
60965
|
-
if (fs26.existsSync(wasmCandidate)) {
|
|
60966
|
-
return fs26.readFileSync(wasmCandidate);
|
|
60967
|
-
}
|
|
60968
|
-
const files = fs26.readdirSync(resolved);
|
|
60969
|
-
for (const f of files) {
|
|
60970
|
-
if (f.endsWith(".rego")) {
|
|
60971
|
-
regoFiles.push(path29.join(resolved, f));
|
|
60972
|
-
}
|
|
60973
|
-
}
|
|
60974
|
-
} else if (resolved.endsWith(".rego")) {
|
|
60975
|
-
regoFiles.push(resolved);
|
|
60976
|
-
}
|
|
60977
|
-
}
|
|
60978
|
-
if (regoFiles.length === 0) {
|
|
60979
|
-
throw new Error(
|
|
60980
|
-
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
60981
|
-
);
|
|
60982
|
-
}
|
|
60983
|
-
return this.compileRego(regoFiles);
|
|
60984
|
-
}
|
|
60985
|
-
/**
|
|
60986
|
-
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
60987
|
-
*
|
|
60988
|
-
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
60989
|
-
* so subsequent runs skip compilation if policies haven't changed.
|
|
60990
|
-
*/
|
|
60991
|
-
compileRego(regoFiles) {
|
|
60992
|
-
try {
|
|
60993
|
-
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
60994
|
-
} catch {
|
|
60995
|
-
throw new Error(
|
|
60996
|
-
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
60997
|
-
);
|
|
60998
|
-
}
|
|
60999
|
-
const hash = crypto5.createHash("sha256");
|
|
61000
|
-
for (const f of regoFiles.sort()) {
|
|
61001
|
-
hash.update(fs26.readFileSync(f));
|
|
61002
|
-
hash.update(f);
|
|
61003
|
-
}
|
|
61004
|
-
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
61005
|
-
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
61006
|
-
const cachedWasm = path29.join(cacheDir, `${cacheKey}.wasm`);
|
|
61007
|
-
if (fs26.existsSync(cachedWasm)) {
|
|
61008
|
-
return fs26.readFileSync(cachedWasm);
|
|
61009
|
-
}
|
|
61010
|
-
fs26.mkdirSync(cacheDir, { recursive: true });
|
|
61011
|
-
const bundleTar = path29.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
61012
|
-
try {
|
|
61013
|
-
const args = [
|
|
61014
|
-
"build",
|
|
61015
|
-
"-t",
|
|
61016
|
-
"wasm",
|
|
61017
|
-
"-e",
|
|
61018
|
-
"visor",
|
|
61019
|
-
// entrypoint: the visor package tree
|
|
61020
|
-
"-o",
|
|
61021
|
-
bundleTar,
|
|
61022
|
-
...regoFiles
|
|
61023
|
-
];
|
|
61024
|
-
(0, import_child_process8.execFileSync)("opa", args, {
|
|
61025
|
-
stdio: "pipe",
|
|
61026
|
-
timeout: 3e4
|
|
61027
|
-
});
|
|
61028
|
-
} catch (err) {
|
|
61029
|
-
const stderr = err?.stderr?.toString() || "";
|
|
61030
|
-
throw new Error(
|
|
61031
|
-
`Failed to compile .rego files to WASM:
|
|
61032
|
-
${stderr}
|
|
61033
|
-
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
61034
|
-
);
|
|
61035
|
-
}
|
|
61036
|
-
try {
|
|
61037
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
61038
|
-
stdio: "pipe"
|
|
61039
|
-
});
|
|
61040
|
-
const extractedWasm = path29.join(cacheDir, "policy.wasm");
|
|
61041
|
-
if (fs26.existsSync(extractedWasm)) {
|
|
61042
|
-
fs26.renameSync(extractedWasm, cachedWasm);
|
|
61043
|
-
}
|
|
61044
|
-
} catch {
|
|
61045
|
-
try {
|
|
61046
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
61047
|
-
stdio: "pipe"
|
|
61048
|
-
});
|
|
61049
|
-
const extractedWasm = path29.join(cacheDir, "policy.wasm");
|
|
61050
|
-
if (fs26.existsSync(extractedWasm)) {
|
|
61051
|
-
fs26.renameSync(extractedWasm, cachedWasm);
|
|
61052
|
-
}
|
|
61053
|
-
} catch (err2) {
|
|
61054
|
-
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
61055
|
-
}
|
|
61056
|
-
}
|
|
61057
|
-
try {
|
|
61058
|
-
fs26.unlinkSync(bundleTar);
|
|
61059
|
-
} catch {
|
|
61060
|
-
}
|
|
61061
|
-
if (!fs26.existsSync(cachedWasm)) {
|
|
61062
|
-
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
61063
|
-
}
|
|
61064
|
-
return fs26.readFileSync(cachedWasm);
|
|
61065
|
-
}
|
|
61066
|
-
};
|
|
61067
|
-
}
|
|
61068
|
-
});
|
|
61069
|
-
|
|
61070
|
-
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
61071
|
-
var fs27, path30, OpaWasmEvaluator;
|
|
61072
|
-
var init_opa_wasm_evaluator = __esm({
|
|
61073
|
-
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
61074
|
-
"use strict";
|
|
61075
|
-
fs27 = __toESM(require("fs"));
|
|
61076
|
-
path30 = __toESM(require("path"));
|
|
61077
|
-
init_opa_compiler();
|
|
61078
|
-
OpaWasmEvaluator = class {
|
|
61079
|
-
policy = null;
|
|
61080
|
-
dataDocument = {};
|
|
61081
|
-
compiler = new OpaCompiler();
|
|
61082
|
-
async initialize(rulesPath) {
|
|
61083
|
-
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
61084
|
-
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
61085
|
-
try {
|
|
61086
|
-
const { createRequire } = require("module");
|
|
61087
|
-
const runtimeRequire = createRequire(__filename);
|
|
61088
|
-
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
61089
|
-
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
61090
|
-
if (!loadPolicy) {
|
|
61091
|
-
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
61092
|
-
}
|
|
61093
|
-
this.policy = await loadPolicy(wasmBytes);
|
|
61094
|
-
} catch (err) {
|
|
61095
|
-
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
61096
|
-
throw new Error(
|
|
61097
|
-
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
61098
|
-
);
|
|
61099
|
-
}
|
|
61100
|
-
throw err;
|
|
61101
|
-
}
|
|
61102
|
-
}
|
|
61103
|
-
/**
|
|
61104
|
-
* Load external data from a JSON file to use as the OPA data document.
|
|
61105
|
-
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
61106
|
-
* making it available in Rego via `data.<key>`.
|
|
61107
|
-
*/
|
|
61108
|
-
loadData(dataPath) {
|
|
61109
|
-
const resolved = path30.resolve(dataPath);
|
|
61110
|
-
if (path30.normalize(resolved).includes("..")) {
|
|
61111
|
-
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
61112
|
-
}
|
|
61113
|
-
if (!fs27.existsSync(resolved)) {
|
|
61114
|
-
throw new Error(`OPA data file not found: ${resolved}`);
|
|
61115
|
-
}
|
|
61116
|
-
const stat2 = fs27.statSync(resolved);
|
|
61117
|
-
if (stat2.size > 10 * 1024 * 1024) {
|
|
61118
|
-
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
61119
|
-
}
|
|
61120
|
-
const raw = fs27.readFileSync(resolved, "utf-8");
|
|
61121
|
-
try {
|
|
61122
|
-
const parsed = JSON.parse(raw);
|
|
61123
|
-
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
61124
|
-
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
61125
|
-
}
|
|
61126
|
-
this.dataDocument = parsed;
|
|
61127
|
-
} catch (err) {
|
|
61128
|
-
if (err.message.startsWith("OPA data file must")) {
|
|
61129
|
-
throw err;
|
|
61130
|
-
}
|
|
61131
|
-
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
61132
|
-
}
|
|
61133
|
-
}
|
|
61134
|
-
async evaluate(input) {
|
|
61135
|
-
if (!this.policy) {
|
|
61136
|
-
throw new Error("OPA WASM evaluator not initialized");
|
|
61137
|
-
}
|
|
61138
|
-
this.policy.setData(this.dataDocument);
|
|
61139
|
-
const resultSet = this.policy.evaluate(input);
|
|
61140
|
-
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
61141
|
-
return resultSet[0].result;
|
|
61142
|
-
}
|
|
61143
|
-
return void 0;
|
|
61144
|
-
}
|
|
61145
|
-
async shutdown() {
|
|
61146
|
-
if (this.policy) {
|
|
61147
|
-
if (typeof this.policy.close === "function") {
|
|
61148
|
-
try {
|
|
61149
|
-
this.policy.close();
|
|
61150
|
-
} catch {
|
|
61151
|
-
}
|
|
61152
|
-
} else if (typeof this.policy.free === "function") {
|
|
61153
|
-
try {
|
|
61154
|
-
this.policy.free();
|
|
61155
|
-
} catch {
|
|
61156
|
-
}
|
|
61157
|
-
}
|
|
61158
|
-
}
|
|
61159
|
-
this.policy = null;
|
|
61160
|
-
}
|
|
61161
|
-
};
|
|
61162
|
-
}
|
|
61163
|
-
});
|
|
61164
|
-
|
|
61165
|
-
// src/enterprise/policy/opa-http-evaluator.ts
|
|
61166
|
-
var OpaHttpEvaluator;
|
|
61167
|
-
var init_opa_http_evaluator = __esm({
|
|
61168
|
-
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
61169
|
-
"use strict";
|
|
61170
|
-
OpaHttpEvaluator = class {
|
|
61171
|
-
baseUrl;
|
|
61172
|
-
timeout;
|
|
61173
|
-
constructor(baseUrl, timeout = 5e3) {
|
|
61174
|
-
let parsed;
|
|
61175
|
-
try {
|
|
61176
|
-
parsed = new URL(baseUrl);
|
|
61177
|
-
} catch {
|
|
61178
|
-
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
61179
|
-
}
|
|
61180
|
-
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
61181
|
-
throw new Error(
|
|
61182
|
-
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
61183
|
-
);
|
|
61184
|
-
}
|
|
61185
|
-
const hostname = parsed.hostname;
|
|
61186
|
-
if (this.isBlockedHostname(hostname)) {
|
|
61187
|
-
throw new Error(
|
|
61188
|
-
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
61189
|
-
);
|
|
61190
|
-
}
|
|
61191
|
-
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
61192
|
-
this.timeout = timeout;
|
|
61193
|
-
}
|
|
61194
|
-
/**
|
|
61195
|
-
* Check if a hostname is blocked due to SSRF concerns.
|
|
61196
|
-
*
|
|
61197
|
-
* Blocks:
|
|
61198
|
-
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
61199
|
-
* - Link-local addresses (169.254.x.x)
|
|
61200
|
-
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
61201
|
-
* - IPv6 unique local addresses (fd00::/8)
|
|
61202
|
-
* - Cloud metadata services (*.internal)
|
|
61203
|
-
*/
|
|
61204
|
-
isBlockedHostname(hostname) {
|
|
61205
|
-
if (!hostname) return true;
|
|
61206
|
-
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
61207
|
-
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
61208
|
-
return true;
|
|
61209
|
-
}
|
|
61210
|
-
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
61211
|
-
return true;
|
|
61212
|
-
}
|
|
61213
|
-
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
61214
|
-
return true;
|
|
61215
|
-
}
|
|
61216
|
-
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
61217
|
-
const ipv4Match = normalized.match(ipv4Pattern);
|
|
61218
|
-
if (ipv4Match) {
|
|
61219
|
-
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
61220
|
-
if (octets.some((octet) => octet > 255)) {
|
|
61221
|
-
return false;
|
|
61222
|
-
}
|
|
61223
|
-
const [a, b] = octets;
|
|
61224
|
-
if (a === 127) {
|
|
61225
|
-
return true;
|
|
61226
|
-
}
|
|
61227
|
-
if (a === 0) {
|
|
61228
|
-
return true;
|
|
61229
|
-
}
|
|
61230
|
-
if (a === 169 && b === 254) {
|
|
61231
|
-
return true;
|
|
61232
|
-
}
|
|
61233
|
-
if (a === 10) {
|
|
61234
|
-
return true;
|
|
61235
|
-
}
|
|
61236
|
-
if (a === 172 && b >= 16 && b <= 31) {
|
|
61237
|
-
return true;
|
|
61238
|
-
}
|
|
61239
|
-
if (a === 192 && b === 168) {
|
|
61240
|
-
return true;
|
|
61241
|
-
}
|
|
61242
|
-
}
|
|
61243
|
-
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
61244
|
-
return true;
|
|
61245
|
-
}
|
|
61246
|
-
if (normalized.startsWith("fe80:")) {
|
|
61247
|
-
return true;
|
|
61248
|
-
}
|
|
61249
|
-
return false;
|
|
61250
|
-
}
|
|
61251
|
-
/**
|
|
61252
|
-
* Evaluate a policy rule against an input document via OPA REST API.
|
|
61253
|
-
*
|
|
61254
|
-
* @param input - The input document to evaluate
|
|
61255
|
-
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
61256
|
-
* @returns The result object from OPA, or undefined on error
|
|
61257
|
-
*/
|
|
61258
|
-
async evaluate(input, rulePath) {
|
|
61259
|
-
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
61260
|
-
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
61261
|
-
const controller = new AbortController();
|
|
61262
|
-
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
61263
|
-
try {
|
|
61264
|
-
const response = await fetch(url, {
|
|
61265
|
-
method: "POST",
|
|
61266
|
-
headers: { "Content-Type": "application/json" },
|
|
61267
|
-
body: JSON.stringify({ input }),
|
|
61268
|
-
signal: controller.signal
|
|
61269
|
-
});
|
|
61270
|
-
if (!response.ok) {
|
|
61271
|
-
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
61272
|
-
}
|
|
61273
|
-
let body;
|
|
61274
|
-
try {
|
|
61275
|
-
body = await response.json();
|
|
61276
|
-
} catch (jsonErr) {
|
|
61277
|
-
throw new Error(
|
|
61278
|
-
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
61279
|
-
);
|
|
61280
|
-
}
|
|
61281
|
-
return body?.result;
|
|
61282
|
-
} finally {
|
|
61283
|
-
clearTimeout(timer);
|
|
61284
|
-
}
|
|
61285
|
-
}
|
|
61286
|
-
async shutdown() {
|
|
61287
|
-
}
|
|
61288
|
-
};
|
|
61289
|
-
}
|
|
61290
|
-
});
|
|
61291
|
-
|
|
61292
|
-
// src/enterprise/policy/policy-input-builder.ts
|
|
61293
|
-
var PolicyInputBuilder;
|
|
61294
|
-
var init_policy_input_builder = __esm({
|
|
61295
|
-
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
61296
|
-
"use strict";
|
|
61297
|
-
PolicyInputBuilder = class {
|
|
61298
|
-
roles;
|
|
61299
|
-
actor;
|
|
61300
|
-
repository;
|
|
61301
|
-
pullRequest;
|
|
61302
|
-
constructor(policyConfig, actor, repository, pullRequest) {
|
|
61303
|
-
this.roles = policyConfig.roles || {};
|
|
61304
|
-
this.actor = actor;
|
|
61305
|
-
this.repository = repository;
|
|
61306
|
-
this.pullRequest = pullRequest;
|
|
61307
|
-
}
|
|
61308
|
-
/** Resolve which roles apply to the current actor. */
|
|
61309
|
-
resolveRoles() {
|
|
61310
|
-
const matched = [];
|
|
61311
|
-
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
61312
|
-
let identityMatch = false;
|
|
61313
|
-
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
61314
|
-
identityMatch = true;
|
|
61315
|
-
}
|
|
61316
|
-
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
61317
|
-
identityMatch = true;
|
|
61318
|
-
}
|
|
61319
|
-
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
61320
|
-
identityMatch = true;
|
|
61321
|
-
}
|
|
61322
|
-
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
61323
|
-
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
61324
|
-
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
61325
|
-
identityMatch = true;
|
|
61326
|
-
}
|
|
61327
|
-
}
|
|
61328
|
-
if (!identityMatch) continue;
|
|
61329
|
-
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
61330
|
-
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
61331
|
-
continue;
|
|
61332
|
-
}
|
|
61333
|
-
}
|
|
61334
|
-
matched.push(roleName);
|
|
61335
|
-
}
|
|
61336
|
-
return matched;
|
|
61337
|
-
}
|
|
61338
|
-
buildActor() {
|
|
61339
|
-
return {
|
|
61340
|
-
authorAssociation: this.actor.authorAssociation,
|
|
61341
|
-
login: this.actor.login,
|
|
61342
|
-
roles: this.resolveRoles(),
|
|
61343
|
-
isLocalMode: this.actor.isLocalMode,
|
|
61344
|
-
...this.actor.slack && { slack: this.actor.slack }
|
|
61345
|
-
};
|
|
61346
|
-
}
|
|
61347
|
-
forCheckExecution(check) {
|
|
61348
|
-
return {
|
|
61349
|
-
scope: "check.execute",
|
|
61350
|
-
check: {
|
|
61351
|
-
id: check.id,
|
|
61352
|
-
type: check.type,
|
|
61353
|
-
group: check.group,
|
|
61354
|
-
tags: check.tags,
|
|
61355
|
-
criticality: check.criticality,
|
|
61356
|
-
sandbox: check.sandbox,
|
|
61357
|
-
policy: check.policy
|
|
61358
|
-
},
|
|
61359
|
-
actor: this.buildActor(),
|
|
61360
|
-
repository: this.repository,
|
|
61361
|
-
pullRequest: this.pullRequest
|
|
61362
|
-
};
|
|
61363
|
-
}
|
|
61364
|
-
forToolInvocation(serverName, methodName, transport) {
|
|
61365
|
-
return {
|
|
61366
|
-
scope: "tool.invoke",
|
|
61367
|
-
tool: { serverName, methodName, transport },
|
|
61368
|
-
actor: this.buildActor(),
|
|
61369
|
-
repository: this.repository,
|
|
61370
|
-
pullRequest: this.pullRequest
|
|
61371
|
-
};
|
|
61372
|
-
}
|
|
61373
|
-
forCapabilityResolve(checkId, capabilities) {
|
|
61374
|
-
return {
|
|
61375
|
-
scope: "capability.resolve",
|
|
61376
|
-
check: { id: checkId, type: "ai" },
|
|
61377
|
-
capability: capabilities,
|
|
61378
|
-
actor: this.buildActor(),
|
|
61379
|
-
repository: this.repository,
|
|
61380
|
-
pullRequest: this.pullRequest
|
|
61381
|
-
};
|
|
61382
|
-
}
|
|
61383
|
-
};
|
|
61384
|
-
}
|
|
61385
|
-
});
|
|
61386
|
-
|
|
61387
|
-
// src/enterprise/policy/opa-policy-engine.ts
|
|
61388
|
-
var opa_policy_engine_exports = {};
|
|
61389
|
-
__export(opa_policy_engine_exports, {
|
|
61390
|
-
OpaPolicyEngine: () => OpaPolicyEngine
|
|
61391
|
-
});
|
|
61392
|
-
var OpaPolicyEngine;
|
|
61393
|
-
var init_opa_policy_engine = __esm({
|
|
61394
|
-
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
61395
|
-
"use strict";
|
|
61396
|
-
init_opa_wasm_evaluator();
|
|
61397
|
-
init_opa_http_evaluator();
|
|
61398
|
-
init_policy_input_builder();
|
|
61399
|
-
OpaPolicyEngine = class {
|
|
61400
|
-
evaluator = null;
|
|
61401
|
-
fallback;
|
|
61402
|
-
timeout;
|
|
61403
|
-
config;
|
|
61404
|
-
inputBuilder = null;
|
|
61405
|
-
logger = null;
|
|
61406
|
-
constructor(config) {
|
|
61407
|
-
this.config = config;
|
|
61408
|
-
this.fallback = config.fallback || "deny";
|
|
61409
|
-
this.timeout = config.timeout || 5e3;
|
|
61410
|
-
}
|
|
61411
|
-
async initialize(config) {
|
|
61412
|
-
try {
|
|
61413
|
-
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
61414
|
-
} catch {
|
|
61415
|
-
}
|
|
61416
|
-
const actor = {
|
|
61417
|
-
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
61418
|
-
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
61419
|
-
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
61420
|
-
};
|
|
61421
|
-
const repo = {
|
|
61422
|
-
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
61423
|
-
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
61424
|
-
branch: process.env.GITHUB_HEAD_REF,
|
|
61425
|
-
baseBranch: process.env.GITHUB_BASE_REF,
|
|
61426
|
-
event: process.env.GITHUB_EVENT_NAME
|
|
61427
|
-
};
|
|
61428
|
-
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
61429
|
-
const pullRequest = {
|
|
61430
|
-
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
61431
|
-
};
|
|
61432
|
-
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
61433
|
-
if (config.engine === "local") {
|
|
61434
|
-
if (!config.rules) {
|
|
61435
|
-
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
61436
|
-
}
|
|
61437
|
-
const wasm = new OpaWasmEvaluator();
|
|
61438
|
-
await wasm.initialize(config.rules);
|
|
61439
|
-
if (config.data) {
|
|
61440
|
-
wasm.loadData(config.data);
|
|
61441
|
-
}
|
|
61442
|
-
this.evaluator = wasm;
|
|
61443
|
-
} else if (config.engine === "remote") {
|
|
61444
|
-
if (!config.url) {
|
|
61445
|
-
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
61446
|
-
}
|
|
61447
|
-
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
61448
|
-
} else {
|
|
61449
|
-
this.evaluator = null;
|
|
61450
|
-
}
|
|
61451
|
-
}
|
|
61452
|
-
/**
|
|
61453
|
-
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
61454
|
-
* Called by the enterprise loader when engine context is enriched.
|
|
61455
|
-
*/
|
|
61456
|
-
setActorContext(actor, repo, pullRequest) {
|
|
61457
|
-
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
61458
|
-
}
|
|
61459
|
-
async evaluateCheckExecution(checkId, checkConfig) {
|
|
61460
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
61461
|
-
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
61462
|
-
const policyOverride = cfg.policy;
|
|
61463
|
-
const input = this.inputBuilder.forCheckExecution({
|
|
61464
|
-
id: checkId,
|
|
61465
|
-
type: cfg.type || "ai",
|
|
61466
|
-
group: cfg.group,
|
|
61467
|
-
tags: cfg.tags,
|
|
61468
|
-
criticality: cfg.criticality,
|
|
61469
|
-
sandbox: cfg.sandbox,
|
|
61470
|
-
policy: policyOverride
|
|
61471
|
-
});
|
|
61472
|
-
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
61473
|
-
}
|
|
61474
|
-
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
61475
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
61476
|
-
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
61477
|
-
return this.doEvaluate(input, "visor/tool/invoke");
|
|
61478
|
-
}
|
|
61479
|
-
async evaluateCapabilities(checkId, capabilities) {
|
|
61480
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
61481
|
-
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
61482
|
-
return this.doEvaluate(input, "visor/capability/resolve");
|
|
61483
|
-
}
|
|
61484
|
-
async shutdown() {
|
|
61485
|
-
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
61486
|
-
await this.evaluator.shutdown();
|
|
61487
|
-
}
|
|
61488
|
-
this.evaluator = null;
|
|
61489
|
-
this.inputBuilder = null;
|
|
61490
|
-
}
|
|
61491
|
-
resolveRulePath(defaultScope, override) {
|
|
61492
|
-
if (override) {
|
|
61493
|
-
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
61494
|
-
}
|
|
61495
|
-
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
61496
|
-
}
|
|
61497
|
-
async doEvaluate(input, rulePath) {
|
|
61498
|
-
try {
|
|
61499
|
-
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
61500
|
-
let timer;
|
|
61501
|
-
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
61502
|
-
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
61503
|
-
});
|
|
61504
|
-
try {
|
|
61505
|
-
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
61506
|
-
const decision = this.parseDecision(result);
|
|
61507
|
-
if (!decision.allowed && this.fallback === "warn") {
|
|
61508
|
-
decision.allowed = true;
|
|
61509
|
-
decision.warn = true;
|
|
61510
|
-
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
61511
|
-
}
|
|
61512
|
-
this.logger?.debug(
|
|
61513
|
-
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
61514
|
-
);
|
|
61515
|
-
return decision;
|
|
61516
|
-
} finally {
|
|
61517
|
-
if (timer) clearTimeout(timer);
|
|
61518
|
-
}
|
|
61519
|
-
} catch (err) {
|
|
61520
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
61521
|
-
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
61522
|
-
return {
|
|
61523
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
61524
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
61525
|
-
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
61526
|
-
};
|
|
61527
|
-
}
|
|
61528
|
-
}
|
|
61529
|
-
async rawEvaluate(input, rulePath) {
|
|
61530
|
-
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
61531
|
-
const result = await this.evaluator.evaluate(input);
|
|
61532
|
-
return this.navigateWasmResult(result, rulePath);
|
|
61533
|
-
}
|
|
61534
|
-
return this.evaluator.evaluate(input, rulePath);
|
|
61535
|
-
}
|
|
61536
|
-
/**
|
|
61537
|
-
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
61538
|
-
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
61539
|
-
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
61540
|
-
*/
|
|
61541
|
-
navigateWasmResult(result, rulePath) {
|
|
61542
|
-
if (!result || typeof result !== "object") return result;
|
|
61543
|
-
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
61544
|
-
let current = result;
|
|
61545
|
-
for (const seg of segments) {
|
|
61546
|
-
if (current && typeof current === "object" && seg in current) {
|
|
61547
|
-
current = current[seg];
|
|
61548
|
-
} else {
|
|
61549
|
-
return void 0;
|
|
61550
|
-
}
|
|
61551
|
-
}
|
|
61552
|
-
return current;
|
|
61553
|
-
}
|
|
61554
|
-
parseDecision(result) {
|
|
61555
|
-
if (result === void 0 || result === null) {
|
|
61556
|
-
return {
|
|
61557
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
61558
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
61559
|
-
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
61560
|
-
};
|
|
61561
|
-
}
|
|
61562
|
-
const allowed = result.allowed !== false;
|
|
61563
|
-
const decision = {
|
|
61564
|
-
allowed,
|
|
61565
|
-
reason: result.reason
|
|
61566
|
-
};
|
|
61567
|
-
if (result.capabilities) {
|
|
61568
|
-
decision.capabilities = result.capabilities;
|
|
61569
|
-
}
|
|
61570
|
-
return decision;
|
|
61571
|
-
}
|
|
61572
|
-
};
|
|
61573
|
-
}
|
|
61574
|
-
});
|
|
61575
|
-
|
|
61576
|
-
// src/enterprise/scheduler/knex-store.ts
|
|
61577
|
-
var knex_store_exports = {};
|
|
61578
|
-
__export(knex_store_exports, {
|
|
61579
|
-
KnexStoreBackend: () => KnexStoreBackend
|
|
61580
|
-
});
|
|
61581
|
-
function toNum(val) {
|
|
61582
|
-
if (val === null || val === void 0) return void 0;
|
|
61583
|
-
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
61584
|
-
}
|
|
61585
|
-
function safeJsonParse2(value) {
|
|
61586
|
-
if (!value) return void 0;
|
|
61587
|
-
try {
|
|
61588
|
-
return JSON.parse(value);
|
|
61589
|
-
} catch {
|
|
61590
|
-
return void 0;
|
|
61591
|
-
}
|
|
61592
|
-
}
|
|
61593
|
-
function fromTriggerRow2(row) {
|
|
61594
|
-
return {
|
|
61595
|
-
id: row.id,
|
|
61596
|
-
creatorId: row.creator_id,
|
|
61597
|
-
creatorContext: row.creator_context ?? void 0,
|
|
61598
|
-
creatorName: row.creator_name ?? void 0,
|
|
61599
|
-
description: row.description ?? void 0,
|
|
61600
|
-
channels: safeJsonParse2(row.channels),
|
|
61601
|
-
fromUsers: safeJsonParse2(row.from_users),
|
|
61602
|
-
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
61603
|
-
contains: safeJsonParse2(row.contains),
|
|
61604
|
-
matchPattern: row.match_pattern ?? void 0,
|
|
61605
|
-
threads: row.threads,
|
|
61606
|
-
workflow: row.workflow,
|
|
61607
|
-
inputs: safeJsonParse2(row.inputs),
|
|
61608
|
-
outputContext: safeJsonParse2(row.output_context),
|
|
61609
|
-
status: row.status,
|
|
61610
|
-
enabled: row.enabled === true || row.enabled === 1,
|
|
61611
|
-
createdAt: toNum(row.created_at)
|
|
61612
|
-
};
|
|
61613
|
-
}
|
|
61614
|
-
function toTriggerInsertRow(trigger) {
|
|
61615
|
-
return {
|
|
61616
|
-
id: trigger.id,
|
|
61617
|
-
creator_id: trigger.creatorId,
|
|
61618
|
-
creator_context: trigger.creatorContext ?? null,
|
|
61619
|
-
creator_name: trigger.creatorName ?? null,
|
|
61620
|
-
description: trigger.description ?? null,
|
|
61621
|
-
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
61622
|
-
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
61623
|
-
from_bots: trigger.fromBots,
|
|
61624
|
-
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
61625
|
-
match_pattern: trigger.matchPattern ?? null,
|
|
61626
|
-
threads: trigger.threads,
|
|
61627
|
-
workflow: trigger.workflow,
|
|
61628
|
-
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
61629
|
-
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
61630
|
-
status: trigger.status,
|
|
61631
|
-
enabled: trigger.enabled,
|
|
61632
|
-
created_at: trigger.createdAt
|
|
61633
|
-
};
|
|
61634
|
-
}
|
|
61635
|
-
function fromDbRow2(row) {
|
|
61636
|
-
return {
|
|
61637
|
-
id: row.id,
|
|
61638
|
-
creatorId: row.creator_id,
|
|
61639
|
-
creatorContext: row.creator_context ?? void 0,
|
|
61640
|
-
creatorName: row.creator_name ?? void 0,
|
|
61641
|
-
timezone: row.timezone,
|
|
61642
|
-
schedule: row.schedule_expr,
|
|
61643
|
-
runAt: toNum(row.run_at),
|
|
61644
|
-
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
61645
|
-
originalExpression: row.original_expression,
|
|
61646
|
-
workflow: row.workflow ?? void 0,
|
|
61647
|
-
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
61648
|
-
outputContext: safeJsonParse2(row.output_context),
|
|
61649
|
-
status: row.status,
|
|
61650
|
-
createdAt: toNum(row.created_at),
|
|
61651
|
-
lastRunAt: toNum(row.last_run_at),
|
|
61652
|
-
nextRunAt: toNum(row.next_run_at),
|
|
61653
|
-
runCount: row.run_count,
|
|
61654
|
-
failureCount: row.failure_count,
|
|
61655
|
-
lastError: row.last_error ?? void 0,
|
|
61656
|
-
previousResponse: row.previous_response ?? void 0
|
|
61657
|
-
};
|
|
61658
|
-
}
|
|
61659
|
-
function toInsertRow(schedule) {
|
|
61660
|
-
return {
|
|
61661
|
-
id: schedule.id,
|
|
61662
|
-
creator_id: schedule.creatorId,
|
|
61663
|
-
creator_context: schedule.creatorContext ?? null,
|
|
61664
|
-
creator_name: schedule.creatorName ?? null,
|
|
61665
|
-
timezone: schedule.timezone,
|
|
61666
|
-
schedule_expr: schedule.schedule,
|
|
61667
|
-
run_at: schedule.runAt ?? null,
|
|
61668
|
-
is_recurring: schedule.isRecurring,
|
|
61669
|
-
original_expression: schedule.originalExpression,
|
|
61670
|
-
workflow: schedule.workflow ?? null,
|
|
61671
|
-
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
61672
|
-
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
61673
|
-
status: schedule.status,
|
|
61674
|
-
created_at: schedule.createdAt,
|
|
61675
|
-
last_run_at: schedule.lastRunAt ?? null,
|
|
61676
|
-
next_run_at: schedule.nextRunAt ?? null,
|
|
61677
|
-
run_count: schedule.runCount,
|
|
61678
|
-
failure_count: schedule.failureCount,
|
|
61679
|
-
last_error: schedule.lastError ?? null,
|
|
61680
|
-
previous_response: schedule.previousResponse ?? null
|
|
61681
|
-
};
|
|
61682
|
-
}
|
|
61683
|
-
var fs28, path31, import_uuid2, KnexStoreBackend;
|
|
61684
|
-
var init_knex_store = __esm({
|
|
61685
|
-
"src/enterprise/scheduler/knex-store.ts"() {
|
|
61686
|
-
"use strict";
|
|
61687
|
-
fs28 = __toESM(require("fs"));
|
|
61688
|
-
path31 = __toESM(require("path"));
|
|
61689
|
-
import_uuid2 = require("uuid");
|
|
61690
|
-
init_logger();
|
|
61691
|
-
KnexStoreBackend = class {
|
|
61692
|
-
knex = null;
|
|
61693
|
-
driver;
|
|
61694
|
-
connection;
|
|
61695
|
-
constructor(driver, storageConfig, _haConfig) {
|
|
61696
|
-
this.driver = driver;
|
|
61697
|
-
this.connection = storageConfig.connection || {};
|
|
61698
|
-
}
|
|
61699
|
-
async initialize() {
|
|
61700
|
-
const { createRequire } = require("module");
|
|
61701
|
-
const runtimeRequire = createRequire(__filename);
|
|
61702
|
-
let knexFactory;
|
|
61703
|
-
try {
|
|
61704
|
-
knexFactory = runtimeRequire("knex");
|
|
61705
|
-
} catch (err) {
|
|
61706
|
-
const code = err?.code;
|
|
61707
|
-
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
61708
|
-
throw new Error(
|
|
61709
|
-
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
61710
|
-
);
|
|
61711
|
-
}
|
|
61712
|
-
throw err;
|
|
61713
|
-
}
|
|
61714
|
-
const clientMap = {
|
|
61715
|
-
postgresql: "pg",
|
|
61716
|
-
mysql: "mysql2",
|
|
61717
|
-
mssql: "tedious"
|
|
61718
|
-
};
|
|
61719
|
-
const client = clientMap[this.driver];
|
|
61720
|
-
let connection;
|
|
61721
|
-
if (this.connection.connection_string) {
|
|
61722
|
-
connection = this.connection.connection_string;
|
|
61723
|
-
} else if (this.driver === "mssql") {
|
|
61724
|
-
connection = this.buildMssqlConnection();
|
|
61725
|
-
} else {
|
|
61726
|
-
connection = this.buildStandardConnection();
|
|
61727
|
-
}
|
|
61728
|
-
this.knex = knexFactory({
|
|
61729
|
-
client,
|
|
61730
|
-
connection,
|
|
61731
|
-
pool: {
|
|
61732
|
-
min: this.connection.pool?.min ?? 0,
|
|
61733
|
-
max: this.connection.pool?.max ?? 10
|
|
61734
|
-
}
|
|
61735
|
-
});
|
|
61736
|
-
await this.migrateSchema();
|
|
61737
|
-
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
61738
|
-
}
|
|
61739
|
-
buildStandardConnection() {
|
|
61740
|
-
return {
|
|
61741
|
-
host: this.connection.host || "localhost",
|
|
61742
|
-
port: this.connection.port,
|
|
61743
|
-
database: this.connection.database || "visor",
|
|
61744
|
-
user: this.connection.user,
|
|
61745
|
-
password: this.connection.password,
|
|
61746
|
-
ssl: this.resolveSslConfig()
|
|
61747
|
-
};
|
|
61748
|
-
}
|
|
61749
|
-
buildMssqlConnection() {
|
|
61750
|
-
const ssl = this.connection.ssl;
|
|
61751
|
-
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
61752
|
-
return {
|
|
61753
|
-
server: this.connection.host || "localhost",
|
|
61754
|
-
port: this.connection.port,
|
|
61755
|
-
database: this.connection.database || "visor",
|
|
61756
|
-
user: this.connection.user,
|
|
61757
|
-
password: this.connection.password,
|
|
61758
|
-
options: {
|
|
61759
|
-
encrypt: sslEnabled,
|
|
61760
|
-
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
61761
|
-
}
|
|
61762
|
-
};
|
|
61763
|
-
}
|
|
61764
|
-
resolveSslConfig() {
|
|
61765
|
-
const ssl = this.connection.ssl;
|
|
61766
|
-
if (ssl === false || ssl === void 0) return false;
|
|
61767
|
-
if (ssl === true) return { rejectUnauthorized: true };
|
|
61768
|
-
if (ssl.enabled === false) return false;
|
|
61769
|
-
const result = {
|
|
61770
|
-
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
61771
|
-
};
|
|
61772
|
-
if (ssl.ca) {
|
|
61773
|
-
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
61774
|
-
result.ca = fs28.readFileSync(caPath, "utf8");
|
|
61775
|
-
}
|
|
61776
|
-
if (ssl.cert) {
|
|
61777
|
-
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
61778
|
-
result.cert = fs28.readFileSync(certPath, "utf8");
|
|
61779
|
-
}
|
|
61780
|
-
if (ssl.key) {
|
|
61781
|
-
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
61782
|
-
result.key = fs28.readFileSync(keyPath, "utf8");
|
|
61783
|
-
}
|
|
61784
|
-
return result;
|
|
61785
|
-
}
|
|
61786
|
-
validateSslPath(filePath, label) {
|
|
61787
|
-
const resolved = path31.resolve(filePath);
|
|
61788
|
-
if (resolved !== path31.normalize(resolved)) {
|
|
61789
|
-
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
61790
|
-
}
|
|
61791
|
-
if (!fs28.existsSync(resolved)) {
|
|
61792
|
-
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
61793
|
-
}
|
|
61794
|
-
return resolved;
|
|
61795
|
-
}
|
|
61796
|
-
async shutdown() {
|
|
61797
|
-
if (this.knex) {
|
|
61798
|
-
await this.knex.destroy();
|
|
61799
|
-
this.knex = null;
|
|
61800
|
-
}
|
|
61801
|
-
}
|
|
61802
|
-
async migrateSchema() {
|
|
61803
|
-
const knex = this.getKnex();
|
|
61804
|
-
const exists = await knex.schema.hasTable("schedules");
|
|
61805
|
-
if (!exists) {
|
|
61806
|
-
await knex.schema.createTable("schedules", (table) => {
|
|
61807
|
-
table.string("id", 36).primary();
|
|
61808
|
-
table.string("creator_id", 255).notNullable().index();
|
|
61809
|
-
table.string("creator_context", 255);
|
|
61810
|
-
table.string("creator_name", 255);
|
|
61811
|
-
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
61812
|
-
table.string("schedule_expr", 255);
|
|
61813
|
-
table.bigInteger("run_at");
|
|
61814
|
-
table.boolean("is_recurring").notNullable();
|
|
61815
|
-
table.text("original_expression");
|
|
61816
|
-
table.string("workflow", 255);
|
|
61817
|
-
table.text("workflow_inputs");
|
|
61818
|
-
table.text("output_context");
|
|
61819
|
-
table.string("status", 20).notNullable().index();
|
|
61820
|
-
table.bigInteger("created_at").notNullable();
|
|
61821
|
-
table.bigInteger("last_run_at");
|
|
61822
|
-
table.bigInteger("next_run_at");
|
|
61823
|
-
table.integer("run_count").notNullable().defaultTo(0);
|
|
61824
|
-
table.integer("failure_count").notNullable().defaultTo(0);
|
|
61825
|
-
table.text("last_error");
|
|
61826
|
-
table.text("previous_response");
|
|
61827
|
-
table.index(["status", "next_run_at"]);
|
|
61828
|
-
});
|
|
61829
|
-
}
|
|
61830
|
-
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
61831
|
-
if (!triggersExist) {
|
|
61832
|
-
await knex.schema.createTable("message_triggers", (table) => {
|
|
61833
|
-
table.string("id", 36).primary();
|
|
61834
|
-
table.string("creator_id", 255).notNullable().index();
|
|
61835
|
-
table.string("creator_context", 255);
|
|
61836
|
-
table.string("creator_name", 255);
|
|
61837
|
-
table.text("description");
|
|
61838
|
-
table.text("channels");
|
|
61839
|
-
table.text("from_users");
|
|
61840
|
-
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
61841
|
-
table.text("contains");
|
|
61842
|
-
table.text("match_pattern");
|
|
61843
|
-
table.string("threads", 20).notNullable().defaultTo("any");
|
|
61844
|
-
table.string("workflow", 255).notNullable();
|
|
61845
|
-
table.text("inputs");
|
|
61846
|
-
table.text("output_context");
|
|
61847
|
-
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
61848
|
-
table.boolean("enabled").notNullable().defaultTo(true);
|
|
61849
|
-
table.bigInteger("created_at").notNullable();
|
|
61850
|
-
});
|
|
61851
|
-
}
|
|
61852
|
-
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
61853
|
-
if (!locksExist) {
|
|
61854
|
-
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
61855
|
-
table.string("lock_id", 255).primary();
|
|
61856
|
-
table.string("node_id", 255).notNullable();
|
|
61857
|
-
table.string("lock_token", 36).notNullable();
|
|
61858
|
-
table.bigInteger("acquired_at").notNullable();
|
|
61859
|
-
table.bigInteger("expires_at").notNullable();
|
|
61860
|
-
});
|
|
61861
|
-
}
|
|
61862
|
-
}
|
|
61863
|
-
getKnex() {
|
|
61864
|
-
if (!this.knex) {
|
|
61865
|
-
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
61866
|
-
}
|
|
61867
|
-
return this.knex;
|
|
61868
|
-
}
|
|
61869
|
-
// --- CRUD ---
|
|
61870
|
-
async create(schedule) {
|
|
61871
|
-
const knex = this.getKnex();
|
|
61872
|
-
const newSchedule = {
|
|
61873
|
-
...schedule,
|
|
61874
|
-
id: (0, import_uuid2.v4)(),
|
|
61875
|
-
createdAt: Date.now(),
|
|
61876
|
-
runCount: 0,
|
|
61877
|
-
failureCount: 0,
|
|
61878
|
-
status: "active"
|
|
61879
|
-
};
|
|
61880
|
-
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
61881
|
-
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
61882
|
-
return newSchedule;
|
|
61883
|
-
}
|
|
61884
|
-
async importSchedule(schedule) {
|
|
61885
|
-
const knex = this.getKnex();
|
|
61886
|
-
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
61887
|
-
if (existing) return;
|
|
61888
|
-
await knex("schedules").insert(toInsertRow(schedule));
|
|
61889
|
-
}
|
|
61890
|
-
async get(id) {
|
|
61891
|
-
const knex = this.getKnex();
|
|
61892
|
-
const row = await knex("schedules").where("id", id).first();
|
|
61893
|
-
return row ? fromDbRow2(row) : void 0;
|
|
61894
|
-
}
|
|
61895
|
-
async update(id, patch) {
|
|
61896
|
-
const knex = this.getKnex();
|
|
61897
|
-
const existing = await knex("schedules").where("id", id).first();
|
|
61898
|
-
if (!existing) return void 0;
|
|
61899
|
-
const current = fromDbRow2(existing);
|
|
61900
|
-
const updated = { ...current, ...patch, id: current.id };
|
|
61901
|
-
const row = toInsertRow(updated);
|
|
61902
|
-
delete row.id;
|
|
61903
|
-
await knex("schedules").where("id", id).update(row);
|
|
61904
|
-
return updated;
|
|
61905
|
-
}
|
|
61906
|
-
async delete(id) {
|
|
61907
|
-
const knex = this.getKnex();
|
|
61908
|
-
const deleted = await knex("schedules").where("id", id).del();
|
|
61909
|
-
if (deleted > 0) {
|
|
61910
|
-
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
61911
|
-
return true;
|
|
61912
|
-
}
|
|
61913
|
-
return false;
|
|
61914
|
-
}
|
|
61915
|
-
// --- Queries ---
|
|
61916
|
-
async getByCreator(creatorId) {
|
|
61917
|
-
const knex = this.getKnex();
|
|
61918
|
-
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
61919
|
-
return rows.map((r) => fromDbRow2(r));
|
|
61920
|
-
}
|
|
61921
|
-
async getActiveSchedules() {
|
|
61922
|
-
const knex = this.getKnex();
|
|
61923
|
-
const rows = await knex("schedules").where("status", "active");
|
|
61924
|
-
return rows.map((r) => fromDbRow2(r));
|
|
61925
|
-
}
|
|
61926
|
-
async getDueSchedules(now) {
|
|
61927
|
-
const ts = now ?? Date.now();
|
|
61928
|
-
const knex = this.getKnex();
|
|
61929
|
-
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
61930
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
61931
|
-
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
61932
|
-
this.where(function() {
|
|
61933
|
-
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
61934
|
-
}).orWhere(function() {
|
|
61935
|
-
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
61936
|
-
});
|
|
61937
|
-
});
|
|
61938
|
-
return rows.map((r) => fromDbRow2(r));
|
|
61939
|
-
}
|
|
61940
|
-
async findByWorkflow(creatorId, workflowName) {
|
|
61941
|
-
const knex = this.getKnex();
|
|
61942
|
-
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
61943
|
-
const pattern = `%${escaped}%`;
|
|
61944
|
-
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
61945
|
-
return rows.map((r) => fromDbRow2(r));
|
|
61946
|
-
}
|
|
61947
|
-
async getAll() {
|
|
61948
|
-
const knex = this.getKnex();
|
|
61949
|
-
const rows = await knex("schedules");
|
|
61950
|
-
return rows.map((r) => fromDbRow2(r));
|
|
61951
|
-
}
|
|
61952
|
-
async getStats() {
|
|
61953
|
-
const knex = this.getKnex();
|
|
61954
|
-
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
61955
|
-
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
61956
|
-
const result = await knex("schedules").select(
|
|
61957
|
-
knex.raw("COUNT(*) as total"),
|
|
61958
|
-
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
61959
|
-
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
61960
|
-
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
61961
|
-
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
61962
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
61963
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
61964
|
-
).first();
|
|
61965
|
-
return {
|
|
61966
|
-
total: Number(result.total) || 0,
|
|
61967
|
-
active: Number(result.active) || 0,
|
|
61968
|
-
paused: Number(result.paused) || 0,
|
|
61969
|
-
completed: Number(result.completed) || 0,
|
|
61970
|
-
failed: Number(result.failed) || 0,
|
|
61971
|
-
recurring: Number(result.recurring) || 0,
|
|
61972
|
-
oneTime: Number(result.one_time) || 0
|
|
61973
|
-
};
|
|
61974
|
-
}
|
|
61975
|
-
async validateLimits(creatorId, isRecurring, limits) {
|
|
61976
|
-
const knex = this.getKnex();
|
|
61977
|
-
if (limits.maxGlobal) {
|
|
61978
|
-
const result = await knex("schedules").count("* as cnt").first();
|
|
61979
|
-
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
61980
|
-
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
61981
|
-
}
|
|
61982
|
-
}
|
|
61983
|
-
if (limits.maxPerUser) {
|
|
61984
|
-
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
61985
|
-
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
61986
|
-
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
61987
|
-
}
|
|
61988
|
-
}
|
|
61989
|
-
if (isRecurring && limits.maxRecurringPerUser) {
|
|
61990
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
61991
|
-
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
61992
|
-
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
61993
|
-
throw new Error(
|
|
61994
|
-
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
61995
|
-
);
|
|
61996
|
-
}
|
|
61997
|
-
}
|
|
61998
|
-
}
|
|
61999
|
-
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
62000
|
-
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
62001
|
-
const knex = this.getKnex();
|
|
62002
|
-
const now = Date.now();
|
|
62003
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
62004
|
-
const token = (0, import_uuid2.v4)();
|
|
62005
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
62006
|
-
node_id: nodeId,
|
|
62007
|
-
lock_token: token,
|
|
62008
|
-
acquired_at: now,
|
|
62009
|
-
expires_at: expiresAt
|
|
62010
|
-
});
|
|
62011
|
-
if (updated > 0) return token;
|
|
62012
|
-
try {
|
|
62013
|
-
await knex("scheduler_locks").insert({
|
|
62014
|
-
lock_id: lockId,
|
|
62015
|
-
node_id: nodeId,
|
|
62016
|
-
lock_token: token,
|
|
62017
|
-
acquired_at: now,
|
|
62018
|
-
expires_at: expiresAt
|
|
62019
|
-
});
|
|
62020
|
-
return token;
|
|
62021
|
-
} catch {
|
|
62022
|
-
return null;
|
|
62023
|
-
}
|
|
62024
|
-
}
|
|
62025
|
-
async releaseLock(lockId, lockToken) {
|
|
62026
|
-
const knex = this.getKnex();
|
|
62027
|
-
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
62028
|
-
}
|
|
62029
|
-
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
62030
|
-
const knex = this.getKnex();
|
|
62031
|
-
const now = Date.now();
|
|
62032
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
62033
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
62034
|
-
return updated > 0;
|
|
62035
|
-
}
|
|
62036
|
-
async flush() {
|
|
62037
|
-
}
|
|
62038
|
-
// --- Message Trigger CRUD ---
|
|
62039
|
-
async createTrigger(trigger) {
|
|
62040
|
-
const knex = this.getKnex();
|
|
62041
|
-
const newTrigger = {
|
|
62042
|
-
...trigger,
|
|
62043
|
-
id: (0, import_uuid2.v4)(),
|
|
62044
|
-
createdAt: Date.now()
|
|
62045
|
-
};
|
|
62046
|
-
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
62047
|
-
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
62048
|
-
return newTrigger;
|
|
62049
|
-
}
|
|
62050
|
-
async getTrigger(id) {
|
|
62051
|
-
const knex = this.getKnex();
|
|
62052
|
-
const row = await knex("message_triggers").where("id", id).first();
|
|
62053
|
-
return row ? fromTriggerRow2(row) : void 0;
|
|
62054
|
-
}
|
|
62055
|
-
async updateTrigger(id, patch) {
|
|
62056
|
-
const knex = this.getKnex();
|
|
62057
|
-
const existing = await knex("message_triggers").where("id", id).first();
|
|
62058
|
-
if (!existing) return void 0;
|
|
62059
|
-
const current = fromTriggerRow2(existing);
|
|
62060
|
-
const updated = {
|
|
62061
|
-
...current,
|
|
62062
|
-
...patch,
|
|
62063
|
-
id: current.id,
|
|
62064
|
-
createdAt: current.createdAt
|
|
62065
|
-
};
|
|
62066
|
-
const row = toTriggerInsertRow(updated);
|
|
62067
|
-
delete row.id;
|
|
62068
|
-
await knex("message_triggers").where("id", id).update(row);
|
|
62069
|
-
return updated;
|
|
62070
|
-
}
|
|
62071
|
-
async deleteTrigger(id) {
|
|
62072
|
-
const knex = this.getKnex();
|
|
62073
|
-
const deleted = await knex("message_triggers").where("id", id).del();
|
|
62074
|
-
if (deleted > 0) {
|
|
62075
|
-
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
62076
|
-
return true;
|
|
62077
|
-
}
|
|
62078
|
-
return false;
|
|
62079
|
-
}
|
|
62080
|
-
async getTriggersByCreator(creatorId) {
|
|
62081
|
-
const knex = this.getKnex();
|
|
62082
|
-
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
62083
|
-
return rows.map((r) => fromTriggerRow2(r));
|
|
62084
|
-
}
|
|
62085
|
-
async getActiveTriggers() {
|
|
62086
|
-
const knex = this.getKnex();
|
|
62087
|
-
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
62088
|
-
return rows.map((r) => fromTriggerRow2(r));
|
|
62089
|
-
}
|
|
62090
|
-
};
|
|
62091
|
-
}
|
|
62092
|
-
});
|
|
62093
|
-
|
|
62094
|
-
// src/enterprise/loader.ts
|
|
62095
|
-
var loader_exports = {};
|
|
62096
|
-
__export(loader_exports, {
|
|
62097
|
-
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
62098
|
-
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
62099
|
-
});
|
|
62100
|
-
async function loadEnterprisePolicyEngine(config) {
|
|
62101
|
-
try {
|
|
62102
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
62103
|
-
const validator = new LicenseValidator2();
|
|
62104
|
-
const license = await validator.loadAndValidate();
|
|
62105
|
-
if (!license || !validator.hasFeature("policy")) {
|
|
62106
|
-
return new DefaultPolicyEngine();
|
|
62107
|
-
}
|
|
62108
|
-
if (validator.isInGracePeriod()) {
|
|
62109
|
-
console.warn(
|
|
62110
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
62111
|
-
);
|
|
62112
|
-
}
|
|
62113
|
-
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
62114
|
-
const engine = new OpaPolicyEngine2(config);
|
|
62115
|
-
await engine.initialize(config);
|
|
62116
|
-
return engine;
|
|
62117
|
-
} catch (err) {
|
|
62118
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
62119
|
-
try {
|
|
62120
|
-
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
62121
|
-
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
62122
|
-
} catch {
|
|
62123
|
-
}
|
|
62124
|
-
return new DefaultPolicyEngine();
|
|
62125
|
-
}
|
|
62126
|
-
}
|
|
62127
|
-
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
62128
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
62129
|
-
const validator = new LicenseValidator2();
|
|
62130
|
-
const license = await validator.loadAndValidate();
|
|
62131
|
-
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
62132
|
-
throw new Error(
|
|
62133
|
-
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
62134
|
-
);
|
|
62135
|
-
}
|
|
62136
|
-
if (validator.isInGracePeriod()) {
|
|
62137
|
-
console.warn(
|
|
62138
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
62139
|
-
);
|
|
62140
|
-
}
|
|
62141
|
-
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
62142
|
-
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
62143
|
-
}
|
|
62144
|
-
var init_loader = __esm({
|
|
62145
|
-
"src/enterprise/loader.ts"() {
|
|
62146
|
-
"use strict";
|
|
62147
|
-
init_default_engine();
|
|
62148
|
-
}
|
|
62149
|
-
});
|
|
62150
|
-
|
|
62151
60777
|
// src/event-bus/event-bus.ts
|
|
62152
60778
|
var event_bus_exports = {};
|
|
62153
60779
|
__export(event_bus_exports, {
|
|
@@ -62850,8 +61476,8 @@ var init_github_comments = __esm({
|
|
|
62850
61476
|
* Update existing comment or create new one with collision detection
|
|
62851
61477
|
*/
|
|
62852
61478
|
async updateOrCreateComment(owner, repo, prNumber, content, options = {}) {
|
|
62853
|
-
return new Promise((
|
|
62854
|
-
this._writeQueue = this._writeQueue.then(() => this._doUpdateOrCreate(owner, repo, prNumber, content, options)).then(
|
|
61479
|
+
return new Promise((resolve17, reject) => {
|
|
61480
|
+
this._writeQueue = this._writeQueue.then(() => this._doUpdateOrCreate(owner, repo, prNumber, content, options)).then(resolve17, reject);
|
|
62855
61481
|
});
|
|
62856
61482
|
}
|
|
62857
61483
|
async _doUpdateOrCreate(owner, repo, prNumber, content, options = {}) {
|
|
@@ -63062,8 +61688,8 @@ ${content}
|
|
|
63062
61688
|
* Sleep utility
|
|
63063
61689
|
*/
|
|
63064
61690
|
sleep(ms) {
|
|
63065
|
-
return new Promise((
|
|
63066
|
-
const t = setTimeout(
|
|
61691
|
+
return new Promise((resolve17) => {
|
|
61692
|
+
const t = setTimeout(resolve17, ms);
|
|
63067
61693
|
if (typeof t.unref === "function") {
|
|
63068
61694
|
try {
|
|
63069
61695
|
t.unref();
|
|
@@ -63348,8 +61974,8 @@ ${end}`);
|
|
|
63348
61974
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
63349
61975
|
const existingLock = this.updateLocks.get(group);
|
|
63350
61976
|
let resolveLock;
|
|
63351
|
-
const ourLock = new Promise((
|
|
63352
|
-
resolveLock =
|
|
61977
|
+
const ourLock = new Promise((resolve17) => {
|
|
61978
|
+
resolveLock = resolve17;
|
|
63353
61979
|
});
|
|
63354
61980
|
this.updateLocks.set(group, ourLock);
|
|
63355
61981
|
try {
|
|
@@ -63680,7 +62306,7 @@ ${blocks}
|
|
|
63680
62306
|
* Sleep utility for enforcing delays
|
|
63681
62307
|
*/
|
|
63682
62308
|
sleep(ms) {
|
|
63683
|
-
return new Promise((
|
|
62309
|
+
return new Promise((resolve17) => setTimeout(resolve17, ms));
|
|
63684
62310
|
}
|
|
63685
62311
|
};
|
|
63686
62312
|
}
|
|
@@ -65561,11 +64187,11 @@ var require_request3 = __commonJS({
|
|
|
65561
64187
|
"use strict";
|
|
65562
64188
|
var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) {
|
|
65563
64189
|
function adopt(value) {
|
|
65564
|
-
return value instanceof P ? value : new P(function(
|
|
65565
|
-
|
|
64190
|
+
return value instanceof P ? value : new P(function(resolve17) {
|
|
64191
|
+
resolve17(value);
|
|
65566
64192
|
});
|
|
65567
64193
|
}
|
|
65568
|
-
return new (P || (P = Promise))(function(
|
|
64194
|
+
return new (P || (P = Promise))(function(resolve17, reject) {
|
|
65569
64195
|
function fulfilled(value) {
|
|
65570
64196
|
try {
|
|
65571
64197
|
step(generator.next(value));
|
|
@@ -65581,7 +64207,7 @@ var require_request3 = __commonJS({
|
|
|
65581
64207
|
}
|
|
65582
64208
|
}
|
|
65583
64209
|
function step(result) {
|
|
65584
|
-
result.done ?
|
|
64210
|
+
result.done ? resolve17(result.value) : adopt(result.value).then(fulfilled, rejected);
|
|
65585
64211
|
}
|
|
65586
64212
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
65587
64213
|
});
|
|
@@ -65605,9 +64231,9 @@ var require_request3 = __commonJS({
|
|
|
65605
64231
|
HttpMethod2["PATCH"] = "PATCH";
|
|
65606
64232
|
})(HttpMethod = exports2.HttpMethod || (exports2.HttpMethod = {}));
|
|
65607
64233
|
var SvixRequest = class {
|
|
65608
|
-
constructor(method,
|
|
64234
|
+
constructor(method, path31) {
|
|
65609
64235
|
this.method = method;
|
|
65610
|
-
this.path =
|
|
64236
|
+
this.path = path31;
|
|
65611
64237
|
this.queryParams = {};
|
|
65612
64238
|
this.headerParams = {};
|
|
65613
64239
|
}
|
|
@@ -65710,7 +64336,7 @@ var require_request3 = __commonJS({
|
|
|
65710
64336
|
}
|
|
65711
64337
|
function sendWithRetry(url, init, retryScheduleInMs, nextInterval = 50, triesLeft = 2, fetchImpl = fetch, retryCount = 1) {
|
|
65712
64338
|
return __awaiter(this, void 0, void 0, function* () {
|
|
65713
|
-
const sleep = (interval) => new Promise((
|
|
64339
|
+
const sleep = (interval) => new Promise((resolve17) => setTimeout(resolve17, interval));
|
|
65714
64340
|
try {
|
|
65715
64341
|
const response = yield fetchImpl(url, init);
|
|
65716
64342
|
if (triesLeft <= 0 || response.status < 500) {
|
|
@@ -74784,7 +73410,7 @@ ${message}`;
|
|
|
74784
73410
|
});
|
|
74785
73411
|
|
|
74786
73412
|
// src/agent-protocol/task-store.ts
|
|
74787
|
-
function
|
|
73413
|
+
function safeJsonParse2(value) {
|
|
74788
73414
|
if (!value) return void 0;
|
|
74789
73415
|
try {
|
|
74790
73416
|
return JSON.parse(value);
|
|
@@ -74801,12 +73427,12 @@ function taskRowToAgentTask(row) {
|
|
|
74801
73427
|
context_id: row.context_id,
|
|
74802
73428
|
status: {
|
|
74803
73429
|
state: row.state,
|
|
74804
|
-
message:
|
|
73430
|
+
message: safeJsonParse2(row.status_message),
|
|
74805
73431
|
timestamp: row.updated_at
|
|
74806
73432
|
},
|
|
74807
|
-
artifacts:
|
|
74808
|
-
history:
|
|
74809
|
-
metadata:
|
|
73433
|
+
artifacts: safeJsonParse2(row.artifacts) ?? [],
|
|
73434
|
+
history: safeJsonParse2(row.history) ?? [],
|
|
73435
|
+
metadata: safeJsonParse2(row.request_metadata),
|
|
74810
73436
|
workflow_id: row.workflow_id ?? void 0
|
|
74811
73437
|
};
|
|
74812
73438
|
}
|
|
@@ -75043,7 +73669,7 @@ var init_task_store = __esm({
|
|
|
75043
73669
|
const db = this.getDb();
|
|
75044
73670
|
const row = db.prepare("SELECT artifacts FROM agent_tasks WHERE id = ?").get(taskId);
|
|
75045
73671
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
75046
|
-
const artifacts =
|
|
73672
|
+
const artifacts = safeJsonParse2(row.artifacts) ?? [];
|
|
75047
73673
|
artifacts.push(artifact);
|
|
75048
73674
|
db.prepare("UPDATE agent_tasks SET artifacts = ?, updated_at = ? WHERE id = ?").run(
|
|
75049
73675
|
JSON.stringify(artifacts),
|
|
@@ -75055,7 +73681,7 @@ var init_task_store = __esm({
|
|
|
75055
73681
|
const db = this.getDb();
|
|
75056
73682
|
const row = db.prepare("SELECT history FROM agent_tasks WHERE id = ?").get(taskId);
|
|
75057
73683
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
75058
|
-
const history =
|
|
73684
|
+
const history = safeJsonParse2(row.history) ?? [];
|
|
75059
73685
|
history.push(message);
|
|
75060
73686
|
db.prepare("UPDATE agent_tasks SET history = ?, updated_at = ? WHERE id = ?").run(
|
|
75061
73687
|
JSON.stringify(history),
|
|
@@ -75593,13 +74219,13 @@ __export(a2a_frontend_exports, {
|
|
|
75593
74219
|
resultToArtifacts: () => resultToArtifacts
|
|
75594
74220
|
});
|
|
75595
74221
|
function readJsonBody(req) {
|
|
75596
|
-
return new Promise((
|
|
74222
|
+
return new Promise((resolve17, reject) => {
|
|
75597
74223
|
const chunks = [];
|
|
75598
74224
|
req.on("data", (chunk) => chunks.push(chunk));
|
|
75599
74225
|
req.on("end", () => {
|
|
75600
74226
|
try {
|
|
75601
74227
|
const body = Buffer.concat(chunks).toString("utf8");
|
|
75602
|
-
|
|
74228
|
+
resolve17(body ? JSON.parse(body) : {});
|
|
75603
74229
|
} catch {
|
|
75604
74230
|
reject(new ParseError("Malformed JSON body"));
|
|
75605
74231
|
}
|
|
@@ -75842,12 +74468,12 @@ var init_a2a_frontend = __esm({
|
|
|
75842
74468
|
}
|
|
75843
74469
|
const port = this.config.port ?? 9e3;
|
|
75844
74470
|
const host = this.config.host ?? "0.0.0.0";
|
|
75845
|
-
await new Promise((
|
|
74471
|
+
await new Promise((resolve17) => {
|
|
75846
74472
|
this.server.listen(port, host, () => {
|
|
75847
74473
|
const addr = this.server.address();
|
|
75848
74474
|
this._boundPort = typeof addr === "object" && addr ? addr.port : port;
|
|
75849
74475
|
logger.info(`A2A server listening on ${host}:${this._boundPort}`);
|
|
75850
|
-
|
|
74476
|
+
resolve17();
|
|
75851
74477
|
});
|
|
75852
74478
|
});
|
|
75853
74479
|
if (this.agentCard) {
|
|
@@ -75873,7 +74499,7 @@ var init_a2a_frontend = __esm({
|
|
|
75873
74499
|
if (typeof srv.closeAllConnections === "function") {
|
|
75874
74500
|
srv.closeAllConnections();
|
|
75875
74501
|
}
|
|
75876
|
-
await new Promise((
|
|
74502
|
+
await new Promise((resolve17) => srv.close(() => resolve17()));
|
|
75877
74503
|
this.server = null;
|
|
75878
74504
|
}
|
|
75879
74505
|
}
|
|
@@ -75890,7 +74516,7 @@ var init_a2a_frontend = __esm({
|
|
|
75890
74516
|
if (timeoutMs > 0 && Date.now() - startedAt >= timeoutMs) {
|
|
75891
74517
|
break;
|
|
75892
74518
|
}
|
|
75893
|
-
await new Promise((
|
|
74519
|
+
await new Promise((resolve17) => setTimeout(resolve17, 500));
|
|
75894
74520
|
}
|
|
75895
74521
|
this.taskQueue = null;
|
|
75896
74522
|
}
|
|
@@ -75905,8 +74531,8 @@ var init_a2a_frontend = __esm({
|
|
|
75905
74531
|
}
|
|
75906
74532
|
this.streamManager.shutdown();
|
|
75907
74533
|
if (this.server) {
|
|
75908
|
-
await new Promise((
|
|
75909
|
-
this.server.close((err) => err ? reject(err) :
|
|
74534
|
+
await new Promise((resolve17, reject) => {
|
|
74535
|
+
this.server.close((err) => err ? reject(err) : resolve17());
|
|
75910
74536
|
});
|
|
75911
74537
|
this.server = null;
|
|
75912
74538
|
}
|
|
@@ -76623,15 +75249,15 @@ function serializeRunState(state) {
|
|
|
76623
75249
|
])
|
|
76624
75250
|
};
|
|
76625
75251
|
}
|
|
76626
|
-
var
|
|
75252
|
+
var path30, fs28, StateMachineExecutionEngine;
|
|
76627
75253
|
var init_state_machine_execution_engine = __esm({
|
|
76628
75254
|
"src/state-machine-execution-engine.ts"() {
|
|
76629
75255
|
"use strict";
|
|
76630
75256
|
init_runner();
|
|
76631
75257
|
init_logger();
|
|
76632
75258
|
init_sandbox_manager();
|
|
76633
|
-
|
|
76634
|
-
|
|
75259
|
+
path30 = __toESM(require("path"));
|
|
75260
|
+
fs28 = __toESM(require("fs"));
|
|
76635
75261
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
76636
75262
|
workingDirectory;
|
|
76637
75263
|
executionContext;
|
|
@@ -76864,8 +75490,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
76864
75490
|
logger.debug(
|
|
76865
75491
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
76866
75492
|
);
|
|
76867
|
-
const { loadEnterprisePolicyEngine
|
|
76868
|
-
context2.policyEngine = await
|
|
75493
|
+
const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
|
|
75494
|
+
context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
|
|
76869
75495
|
logger.debug(
|
|
76870
75496
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
76871
75497
|
);
|
|
@@ -77019,9 +75645,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
77019
75645
|
}
|
|
77020
75646
|
const checkId = String(ev?.checkId || "unknown");
|
|
77021
75647
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
77022
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
77023
|
-
|
|
77024
|
-
const filePath =
|
|
75648
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
|
|
75649
|
+
fs28.mkdirSync(baseDir, { recursive: true });
|
|
75650
|
+
const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
77025
75651
|
await this.saveSnapshotToFile(filePath);
|
|
77026
75652
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
77027
75653
|
try {
|
|
@@ -77162,7 +75788,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
77162
75788
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
77163
75789
|
*/
|
|
77164
75790
|
async saveSnapshotToFile(filePath) {
|
|
77165
|
-
const
|
|
75791
|
+
const fs29 = await import("fs/promises");
|
|
77166
75792
|
const ctx = this._lastContext;
|
|
77167
75793
|
const runner = this._lastRunner;
|
|
77168
75794
|
if (!ctx || !runner) {
|
|
@@ -77182,14 +75808,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
77182
75808
|
journal: entries,
|
|
77183
75809
|
requestedChecks: ctx.requestedChecks || []
|
|
77184
75810
|
};
|
|
77185
|
-
await
|
|
75811
|
+
await fs29.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
77186
75812
|
}
|
|
77187
75813
|
/**
|
|
77188
75814
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
77189
75815
|
*/
|
|
77190
75816
|
async loadSnapshotFromFile(filePath) {
|
|
77191
|
-
const
|
|
77192
|
-
const raw = await
|
|
75817
|
+
const fs29 = await import("fs/promises");
|
|
75818
|
+
const raw = await fs29.readFile(filePath, "utf8");
|
|
77193
75819
|
return JSON.parse(raw);
|
|
77194
75820
|
}
|
|
77195
75821
|
/**
|