@probelabs/visor 0.1.146 → 0.1.147-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-review-service.d.ts.map +1 -1
- package/dist/cli-main.d.ts.map +1 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/github-auth.d.ts +40 -0
- package/dist/github-auth.d.ts.map +1 -1
- package/dist/index.js +2437 -350
- package/dist/sandbox/bubblewrap-sandbox.d.ts +2 -1
- package/dist/sandbox/bubblewrap-sandbox.d.ts.map +1 -1
- package/dist/sandbox/seatbelt-sandbox.d.ts +2 -1
- package/dist/sandbox/seatbelt-sandbox.d.ts.map +1 -1
- package/dist/scheduler/scheduler.d.ts.map +1 -1
- package/dist/sdk/{check-provider-registry-TSAMMJ7Q.mjs → check-provider-registry-LBYIKFYM.mjs} +7 -7
- package/dist/sdk/{check-provider-registry-WSFL2SVQ.mjs → check-provider-registry-SCPM6DIT.mjs} +7 -7
- package/dist/sdk/{chunk-OM3WYVFI.mjs → chunk-4F5UVWAN.mjs} +2 -2
- package/dist/sdk/{chunk-OM3WYVFI.mjs.map → chunk-4F5UVWAN.mjs.map} +1 -1
- package/dist/sdk/{chunk-74YJMONB.mjs → chunk-EWGX7LI7.mjs} +69 -27
- package/dist/sdk/chunk-EWGX7LI7.mjs.map +1 -0
- package/dist/sdk/{chunk-LVOWWALU.mjs → chunk-FBJ7MC7R.mjs} +3 -3
- package/dist/sdk/{chunk-4SYQL5UQ.mjs → chunk-PNZH3JSI.mjs} +70 -28
- package/dist/sdk/chunk-PNZH3JSI.mjs.map +1 -0
- package/dist/sdk/{chunk-LSCWRTSY.mjs → chunk-V2QW6ECX.mjs} +2 -2
- package/dist/sdk/{chunk-YOKAA4IU.mjs → chunk-XNTBSV6M.mjs} +2 -7
- package/dist/sdk/{chunk-YOKAA4IU.mjs.map → chunk-XNTBSV6M.mjs.map} +1 -1
- package/dist/sdk/{config-AAB2FL22.mjs → config-G5UU4WXT.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-O464EJMD.mjs → failure-condition-evaluator-FHNZL2US.mjs} +3 -3
- package/dist/sdk/github-auth-UPBBBOME.mjs +196 -0
- package/dist/sdk/github-auth-UPBBBOME.mjs.map +1 -0
- package/dist/sdk/{github-frontend-4L5YDHM4.mjs → github-frontend-47EU2HBY.mjs} +3 -3
- package/dist/sdk/{host-5BJ25CUZ.mjs → host-GVR4UGZ3.mjs} +2 -2
- package/dist/sdk/{host-GYZ7XCLI.mjs → host-KGN5OIAM.mjs} +2 -2
- package/dist/sdk/knex-store-HPXJILBL.mjs +411 -0
- package/dist/sdk/knex-store-HPXJILBL.mjs.map +1 -0
- package/dist/sdk/loader-YSRMVXC3.mjs +89 -0
- package/dist/sdk/loader-YSRMVXC3.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
- package/dist/sdk/{routing-CQDKRPTO.mjs → routing-CZ36LVVS.mjs} +4 -4
- package/dist/sdk/{schedule-tool-handler-4YUM6Z5F.mjs → schedule-tool-handler-E7XHMU5G.mjs} +7 -7
- package/dist/sdk/{schedule-tool-handler-62K3NGH6.mjs → schedule-tool-handler-KFYNV7HL.mjs} +7 -7
- package/dist/sdk/sdk.js +1568 -273
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +6 -6
- package/dist/sdk/{trace-helpers-QQSTZGDT.mjs → trace-helpers-EHDZ42HH.mjs} +2 -2
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-OM62QYHF.mjs → workflow-check-provider-5453TW65.mjs} +7 -7
- package/dist/sdk/{workflow-check-provider-XC7E5OFH.mjs → workflow-check-provider-BSUSPFOF.mjs} +7 -7
- package/dist/slack/socket-runner.d.ts.map +1 -1
- package/dist/test-runner/core/flow-stage.d.ts +2 -1
- package/dist/test-runner/core/flow-stage.d.ts.map +1 -1
- package/dist/test-runner/index.d.ts.map +1 -1
- package/dist/test-runner/validator.d.ts.map +1 -1
- package/dist/utils/worktree-manager.d.ts +11 -1
- package/dist/utils/worktree-manager.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/output/traces/run-2026-02-26T07-50-40-741Z.ndjson +0 -138
- package/dist/output/traces/run-2026-02-26T07-51-32-586Z.ndjson +0 -1442
- package/dist/sdk/check-provider-registry-HK6M4PDQ.mjs +0 -30
- package/dist/sdk/chunk-4SYQL5UQ.mjs.map +0 -1
- package/dist/sdk/chunk-74YJMONB.mjs.map +0 -1
- package/dist/sdk/chunk-E6SMU2Z4.mjs +0 -40651
- package/dist/sdk/chunk-E6SMU2Z4.mjs.map +0 -1
- package/dist/sdk/chunk-I42ZCVA5.mjs +0 -1502
- package/dist/sdk/chunk-L3XPYQ6I.mjs +0 -739
- package/dist/sdk/chunk-LSCWRTSY.mjs.map +0 -1
- package/dist/sdk/chunk-LVOWWALU.mjs.map +0 -1
- package/dist/sdk/chunk-Y4DBNDLQ.mjs +0 -443
- package/dist/sdk/chunk-Y4DBNDLQ.mjs.map +0 -1
- package/dist/sdk/failure-condition-evaluator-SMOVMMES.mjs +0 -17
- package/dist/sdk/github-frontend-MSX6Q2WL.mjs +0 -1356
- package/dist/sdk/github-frontend-MSX6Q2WL.mjs.map +0 -1
- package/dist/sdk/routing-RIHVCEIU.mjs +0 -25
- package/dist/sdk/schedule-tool-handler-4O2VKNG2.mjs +0 -40
- package/dist/sdk/schedule-tool-handler-62K3NGH6.mjs.map +0 -1
- package/dist/sdk/trace-helpers-AWCFW5KG.mjs +0 -25
- package/dist/sdk/trace-helpers-AWCFW5KG.mjs.map +0 -1
- package/dist/sdk/trace-helpers-QQSTZGDT.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-3N7HDIN6.mjs +0 -30
- package/dist/sdk/workflow-check-provider-3N7HDIN6.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-OM62QYHF.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-XC7E5OFH.mjs.map +0 -1
- package/dist/traces/run-2026-02-26T07-50-40-741Z.ndjson +0 -138
- package/dist/traces/run-2026-02-26T07-51-32-586Z.ndjson +0 -1442
- /package/dist/sdk/{check-provider-registry-HK6M4PDQ.mjs.map → check-provider-registry-LBYIKFYM.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-TSAMMJ7Q.mjs.map → check-provider-registry-SCPM6DIT.mjs.map} +0 -0
- /package/dist/sdk/{chunk-I42ZCVA5.mjs.map → chunk-FBJ7MC7R.mjs.map} +0 -0
- /package/dist/sdk/{chunk-L3XPYQ6I.mjs.map → chunk-V2QW6ECX.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-WSFL2SVQ.mjs.map → config-G5UU4WXT.mjs.map} +0 -0
- /package/dist/sdk/{config-AAB2FL22.mjs.map → failure-condition-evaluator-FHNZL2US.mjs.map} +0 -0
- /package/dist/sdk/{github-frontend-4L5YDHM4.mjs.map → github-frontend-47EU2HBY.mjs.map} +0 -0
- /package/dist/sdk/{host-5BJ25CUZ.mjs.map → host-GVR4UGZ3.mjs.map} +0 -0
- /package/dist/sdk/{host-GYZ7XCLI.mjs.map → host-KGN5OIAM.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-O464EJMD.mjs.map → routing-CZ36LVVS.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-SMOVMMES.mjs.map → schedule-tool-handler-E7XHMU5G.mjs.map} +0 -0
- /package/dist/sdk/{routing-CQDKRPTO.mjs.map → schedule-tool-handler-KFYNV7HL.mjs.map} +0 -0
- /package/dist/sdk/{routing-RIHVCEIU.mjs.map → trace-helpers-EHDZ42HH.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-4O2VKNG2.mjs.map → workflow-check-provider-5453TW65.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-4YUM6Z5F.mjs.map → workflow-check-provider-BSUSPFOF.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -646,7 +646,7 @@ var require_package = __commonJS({
|
|
|
646
646
|
"package.json"(exports2, module2) {
|
|
647
647
|
module2.exports = {
|
|
648
648
|
name: "@probelabs/visor",
|
|
649
|
-
version: "0.1.
|
|
649
|
+
version: "0.1.42",
|
|
650
650
|
main: "dist/index.js",
|
|
651
651
|
bin: {
|
|
652
652
|
visor: "./dist/index.js"
|
|
@@ -760,7 +760,7 @@ var require_package = __commonJS({
|
|
|
760
760
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
761
761
|
"@opentelemetry/sdk-trace-base": "^1.30.1",
|
|
762
762
|
"@opentelemetry/semantic-conventions": "^1.30.1",
|
|
763
|
-
"@probelabs/probe": "^0.6.0-
|
|
763
|
+
"@probelabs/probe": "^0.6.0-rc262",
|
|
764
764
|
"@types/commander": "^2.12.0",
|
|
765
765
|
"@types/uuid": "^10.0.0",
|
|
766
766
|
acorn: "^8.16.0",
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve17, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve17(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path30 = require("path");
|
|
949
|
+
const fs26 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path30.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs26.existsSync(dir)) fs26.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path30.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs26.existsSync(outDir)) fs26.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path30.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs26 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs26.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3193
3193
|
*/
|
|
3194
3194
|
evaluateExpression(condition, context2) {
|
|
3195
3195
|
try {
|
|
3196
|
-
const
|
|
3196
|
+
const normalize8 = (expr) => {
|
|
3197
3197
|
const trimmed = expr.trim();
|
|
3198
3198
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3199
3199
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3351
3351
|
try {
|
|
3352
3352
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3353
3353
|
} catch {
|
|
3354
|
-
const normalizedExpr =
|
|
3354
|
+
const normalizedExpr = normalize8(condition);
|
|
3355
3355
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3356
3356
|
}
|
|
3357
3357
|
const result = exec2(scope).run();
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path30 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path30) return obj;
|
|
3739
|
+
const parts = path30.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path30) => {
|
|
3859
|
+
if (!obj || !path30) return void 0;
|
|
3860
|
+
const parts = path30.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs26 = await import("fs/promises");
|
|
6413
|
+
const path30 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path30.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs26.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path30.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path30.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path30.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path30.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs26.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs26 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs26.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6972
6972
|
if (chromiumPath) {
|
|
6973
6973
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6974
6974
|
}
|
|
6975
|
-
const result = await new Promise((
|
|
6975
|
+
const result = await new Promise((resolve17) => {
|
|
6976
6976
|
const proc = (0, import_child_process.spawn)(
|
|
6977
6977
|
"npx",
|
|
6978
6978
|
[
|
|
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7002
7002
|
});
|
|
7003
7003
|
proc.on("close", (code) => {
|
|
7004
7004
|
if (code === 0) {
|
|
7005
|
-
|
|
7005
|
+
resolve17({ success: true });
|
|
7006
7006
|
} else {
|
|
7007
|
-
|
|
7007
|
+
resolve17({ success: false, error: stderr || `Exit code ${code}` });
|
|
7008
7008
|
}
|
|
7009
7009
|
});
|
|
7010
7010
|
proc.on("error", (err) => {
|
|
7011
|
-
|
|
7011
|
+
resolve17({ success: false, error: err.message });
|
|
7012
7012
|
});
|
|
7013
7013
|
});
|
|
7014
7014
|
if (!result.success) {
|
|
@@ -8153,8 +8153,8 @@ ${schemaString}`);
|
|
|
8153
8153
|
}
|
|
8154
8154
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8155
8155
|
try {
|
|
8156
|
-
const
|
|
8157
|
-
const
|
|
8156
|
+
const fs26 = require("fs");
|
|
8157
|
+
const path30 = require("path");
|
|
8158
8158
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8159
8159
|
const provider = this.config.provider || "auto";
|
|
8160
8160
|
const model = this.config.model || "default";
|
|
@@ -8268,20 +8268,20 @@ ${"=".repeat(60)}
|
|
|
8268
8268
|
`;
|
|
8269
8269
|
readableVersion += `${"=".repeat(60)}
|
|
8270
8270
|
`;
|
|
8271
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8272
|
-
if (!
|
|
8273
|
-
|
|
8271
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8272
|
+
if (!fs26.existsSync(debugArtifactsDir)) {
|
|
8273
|
+
fs26.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8274
8274
|
}
|
|
8275
|
-
const debugFile =
|
|
8275
|
+
const debugFile = path30.join(
|
|
8276
8276
|
debugArtifactsDir,
|
|
8277
8277
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8278
8278
|
);
|
|
8279
|
-
|
|
8280
|
-
const readableFile =
|
|
8279
|
+
fs26.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8280
|
+
const readableFile = path30.join(
|
|
8281
8281
|
debugArtifactsDir,
|
|
8282
8282
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8283
8283
|
);
|
|
8284
|
-
|
|
8284
|
+
fs26.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8285
8285
|
log(`
|
|
8286
8286
|
\u{1F4BE} Full debug info saved to:`);
|
|
8287
8287
|
log(` JSON: ${debugFile}`);
|
|
@@ -8314,8 +8314,8 @@ ${"=".repeat(60)}
|
|
|
8314
8314
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8315
8315
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8316
8316
|
try {
|
|
8317
|
-
const
|
|
8318
|
-
const
|
|
8317
|
+
const fs26 = require("fs");
|
|
8318
|
+
const path30 = require("path");
|
|
8319
8319
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8320
8320
|
const agentAny2 = agent;
|
|
8321
8321
|
let fullHistory = [];
|
|
@@ -8326,8 +8326,8 @@ ${"=".repeat(60)}
|
|
|
8326
8326
|
} else if (agentAny2._messages) {
|
|
8327
8327
|
fullHistory = agentAny2._messages;
|
|
8328
8328
|
}
|
|
8329
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8330
|
-
const sessionBase =
|
|
8329
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8330
|
+
const sessionBase = path30.join(
|
|
8331
8331
|
debugArtifactsDir,
|
|
8332
8332
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8333
8333
|
);
|
|
@@ -8339,7 +8339,7 @@ ${"=".repeat(60)}
|
|
|
8339
8339
|
schema: effectiveSchema,
|
|
8340
8340
|
totalMessages: fullHistory.length
|
|
8341
8341
|
};
|
|
8342
|
-
|
|
8342
|
+
fs26.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8343
8343
|
let readable = `=============================================================
|
|
8344
8344
|
`;
|
|
8345
8345
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8366,7 +8366,7 @@ ${"=".repeat(60)}
|
|
|
8366
8366
|
`;
|
|
8367
8367
|
readable += content + "\n";
|
|
8368
8368
|
});
|
|
8369
|
-
|
|
8369
|
+
fs26.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8370
8370
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8371
8371
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8372
8372
|
} catch (error) {
|
|
@@ -8375,11 +8375,11 @@ ${"=".repeat(60)}
|
|
|
8375
8375
|
}
|
|
8376
8376
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8377
8377
|
try {
|
|
8378
|
-
const
|
|
8379
|
-
const
|
|
8378
|
+
const fs26 = require("fs");
|
|
8379
|
+
const path30 = require("path");
|
|
8380
8380
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8381
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8382
|
-
const responseFile =
|
|
8381
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8382
|
+
const responseFile = path30.join(
|
|
8383
8383
|
debugArtifactsDir,
|
|
8384
8384
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8385
8385
|
);
|
|
@@ -8412,7 +8412,7 @@ ${"=".repeat(60)}
|
|
|
8412
8412
|
`;
|
|
8413
8413
|
responseContent += `${"=".repeat(60)}
|
|
8414
8414
|
`;
|
|
8415
|
-
|
|
8415
|
+
fs26.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8416
8416
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8417
8417
|
} catch (error) {
|
|
8418
8418
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8428,9 +8428,9 @@ ${"=".repeat(60)}
|
|
|
8428
8428
|
await agentAny._telemetryConfig.shutdown();
|
|
8429
8429
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8430
8430
|
if (process.env.GITHUB_ACTIONS) {
|
|
8431
|
-
const
|
|
8432
|
-
if (
|
|
8433
|
-
const stats =
|
|
8431
|
+
const fs26 = require("fs");
|
|
8432
|
+
if (fs26.existsSync(agentAny._traceFilePath)) {
|
|
8433
|
+
const stats = fs26.statSync(agentAny._traceFilePath);
|
|
8434
8434
|
console.log(
|
|
8435
8435
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8436
8436
|
);
|
|
@@ -8501,12 +8501,15 @@ ${"=".repeat(60)}
|
|
|
8501
8501
|
if (!systemPrompt && schema !== "code-review") {
|
|
8502
8502
|
systemPrompt = "You are general assistant, follow user instructions.";
|
|
8503
8503
|
}
|
|
8504
|
+
log(
|
|
8505
|
+
`\u{1F527} AIReviewService config: allowEdit=${this.config.allowEdit}, allowBash=${this.config.allowBash}, promptType=${this.config.promptType}`
|
|
8506
|
+
);
|
|
8504
8507
|
const options = {
|
|
8505
8508
|
sessionId,
|
|
8506
8509
|
// Prefer config promptType, then env override, else fallback to code-review when schema is set
|
|
8507
8510
|
promptType: this.config.promptType && this.config.promptType.trim() ? this.config.promptType.trim() : explicitPromptType ? explicitPromptType : schema === "code-review" ? "code-review-template" : void 0,
|
|
8508
8511
|
allowEdit: false,
|
|
8509
|
-
//
|
|
8512
|
+
// Default: don't allow file modifications
|
|
8510
8513
|
debug: this.config.debug || false,
|
|
8511
8514
|
// Use systemPrompt (native in rc168+) with fallback to customPrompt for backward compat
|
|
8512
8515
|
systemPrompt: systemPrompt || this.config.systemPrompt || this.config.customPrompt
|
|
@@ -8595,6 +8598,9 @@ ${"=".repeat(60)}
|
|
|
8595
8598
|
if (this.config.model) {
|
|
8596
8599
|
options.model = this.config.model;
|
|
8597
8600
|
}
|
|
8601
|
+
log(
|
|
8602
|
+
`\u{1F527} ProbeAgent options: allowEdit=${options.allowEdit}, enableBash=${options.enableBash}, promptType=${options.promptType}`
|
|
8603
|
+
);
|
|
8598
8604
|
const agent = new import_probe2.ProbeAgent(options);
|
|
8599
8605
|
if (typeof agent.initialize === "function") {
|
|
8600
8606
|
await agent.initialize();
|
|
@@ -8631,9 +8637,9 @@ ${schemaString}`);
|
|
|
8631
8637
|
const model = this.config.model || "default";
|
|
8632
8638
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8633
8639
|
try {
|
|
8634
|
-
const
|
|
8635
|
-
const
|
|
8636
|
-
const
|
|
8640
|
+
const fs26 = require("fs");
|
|
8641
|
+
const path30 = require("path");
|
|
8642
|
+
const os3 = require("os");
|
|
8637
8643
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8638
8644
|
const debugData = {
|
|
8639
8645
|
timestamp,
|
|
@@ -8705,19 +8711,19 @@ ${"=".repeat(60)}
|
|
|
8705
8711
|
`;
|
|
8706
8712
|
readableVersion += `${"=".repeat(60)}
|
|
8707
8713
|
`;
|
|
8708
|
-
const tempDir =
|
|
8709
|
-
const promptFile =
|
|
8710
|
-
|
|
8714
|
+
const tempDir = os3.tmpdir();
|
|
8715
|
+
const promptFile = path30.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8716
|
+
fs26.writeFileSync(promptFile, prompt, "utf-8");
|
|
8711
8717
|
log(`
|
|
8712
8718
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8713
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8719
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8714
8720
|
try {
|
|
8715
|
-
const base =
|
|
8721
|
+
const base = path30.join(
|
|
8716
8722
|
debugArtifactsDir,
|
|
8717
8723
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8718
8724
|
);
|
|
8719
|
-
|
|
8720
|
-
|
|
8725
|
+
fs26.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8726
|
+
fs26.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8721
8727
|
log(`
|
|
8722
8728
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8723
8729
|
} catch {
|
|
@@ -8762,8 +8768,8 @@ $ ${cliCommand}
|
|
|
8762
8768
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8763
8769
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8764
8770
|
try {
|
|
8765
|
-
const
|
|
8766
|
-
const
|
|
8771
|
+
const fs26 = require("fs");
|
|
8772
|
+
const path30 = require("path");
|
|
8767
8773
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8768
8774
|
const agentAny = agent;
|
|
8769
8775
|
let fullHistory = [];
|
|
@@ -8774,8 +8780,8 @@ $ ${cliCommand}
|
|
|
8774
8780
|
} else if (agentAny._messages) {
|
|
8775
8781
|
fullHistory = agentAny._messages;
|
|
8776
8782
|
}
|
|
8777
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8778
|
-
const sessionBase =
|
|
8783
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8784
|
+
const sessionBase = path30.join(
|
|
8779
8785
|
debugArtifactsDir,
|
|
8780
8786
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8781
8787
|
);
|
|
@@ -8787,7 +8793,7 @@ $ ${cliCommand}
|
|
|
8787
8793
|
schema: effectiveSchema,
|
|
8788
8794
|
totalMessages: fullHistory.length
|
|
8789
8795
|
};
|
|
8790
|
-
|
|
8796
|
+
fs26.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8791
8797
|
let readable = `=============================================================
|
|
8792
8798
|
`;
|
|
8793
8799
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8814,7 +8820,7 @@ ${"=".repeat(60)}
|
|
|
8814
8820
|
`;
|
|
8815
8821
|
readable += content + "\n";
|
|
8816
8822
|
});
|
|
8817
|
-
|
|
8823
|
+
fs26.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8818
8824
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8819
8825
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8820
8826
|
} catch (error) {
|
|
@@ -8823,11 +8829,11 @@ ${"=".repeat(60)}
|
|
|
8823
8829
|
}
|
|
8824
8830
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8825
8831
|
try {
|
|
8826
|
-
const
|
|
8827
|
-
const
|
|
8832
|
+
const fs26 = require("fs");
|
|
8833
|
+
const path30 = require("path");
|
|
8828
8834
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8829
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8830
|
-
const responseFile =
|
|
8835
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path30.join(process.cwd(), "debug-artifacts");
|
|
8836
|
+
const responseFile = path30.join(
|
|
8831
8837
|
debugArtifactsDir,
|
|
8832
8838
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8833
8839
|
);
|
|
@@ -8860,7 +8866,7 @@ ${"=".repeat(60)}
|
|
|
8860
8866
|
`;
|
|
8861
8867
|
responseContent += `${"=".repeat(60)}
|
|
8862
8868
|
`;
|
|
8863
|
-
|
|
8869
|
+
fs26.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8864
8870
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8865
8871
|
} catch (error) {
|
|
8866
8872
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8878,9 +8884,9 @@ ${"=".repeat(60)}
|
|
|
8878
8884
|
await telemetry.shutdown();
|
|
8879
8885
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8880
8886
|
if (process.env.GITHUB_ACTIONS) {
|
|
8881
|
-
const
|
|
8882
|
-
if (
|
|
8883
|
-
const stats =
|
|
8887
|
+
const fs26 = require("fs");
|
|
8888
|
+
if (fs26.existsSync(traceFilePath)) {
|
|
8889
|
+
const stats = fs26.statSync(traceFilePath);
|
|
8884
8890
|
console.log(
|
|
8885
8891
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8886
8892
|
);
|
|
@@ -8918,8 +8924,8 @@ ${"=".repeat(60)}
|
|
|
8918
8924
|
* Load schema content from schema files or inline definitions
|
|
8919
8925
|
*/
|
|
8920
8926
|
async loadSchemaContent(schema) {
|
|
8921
|
-
const
|
|
8922
|
-
const
|
|
8927
|
+
const fs26 = require("fs").promises;
|
|
8928
|
+
const path30 = require("path");
|
|
8923
8929
|
if (typeof schema === "object" && schema !== null) {
|
|
8924
8930
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8925
8931
|
return JSON.stringify(schema);
|
|
@@ -8932,14 +8938,14 @@ ${"=".repeat(60)}
|
|
|
8932
8938
|
}
|
|
8933
8939
|
} catch {
|
|
8934
8940
|
}
|
|
8935
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8941
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path30.isAbsolute(schema)) {
|
|
8936
8942
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8937
8943
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8938
8944
|
}
|
|
8939
8945
|
try {
|
|
8940
|
-
const schemaPath =
|
|
8946
|
+
const schemaPath = path30.resolve(process.cwd(), schema);
|
|
8941
8947
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8942
|
-
const schemaContent = await
|
|
8948
|
+
const schemaContent = await fs26.readFile(schemaPath, "utf-8");
|
|
8943
8949
|
return schemaContent.trim();
|
|
8944
8950
|
} catch (error) {
|
|
8945
8951
|
throw new Error(
|
|
@@ -8953,22 +8959,22 @@ ${"=".repeat(60)}
|
|
|
8953
8959
|
}
|
|
8954
8960
|
const candidatePaths = [
|
|
8955
8961
|
// GitHub Action bundle location
|
|
8956
|
-
|
|
8962
|
+
path30.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8957
8963
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8958
|
-
|
|
8964
|
+
path30.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8959
8965
|
// Local dev (repo root)
|
|
8960
|
-
|
|
8966
|
+
path30.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8961
8967
|
];
|
|
8962
8968
|
for (const schemaPath of candidatePaths) {
|
|
8963
8969
|
try {
|
|
8964
|
-
const schemaContent = await
|
|
8970
|
+
const schemaContent = await fs26.readFile(schemaPath, "utf-8");
|
|
8965
8971
|
return schemaContent.trim();
|
|
8966
8972
|
} catch {
|
|
8967
8973
|
}
|
|
8968
8974
|
}
|
|
8969
|
-
const distPath =
|
|
8970
|
-
const distAltPath =
|
|
8971
|
-
const cwdPath =
|
|
8975
|
+
const distPath = path30.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8976
|
+
const distAltPath = path30.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8977
|
+
const cwdPath = path30.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8972
8978
|
throw new Error(
|
|
8973
8979
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8974
8980
|
);
|
|
@@ -9213,7 +9219,7 @@ ${"=".repeat(60)}
|
|
|
9213
9219
|
* Generate mock response for testing
|
|
9214
9220
|
*/
|
|
9215
9221
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9216
|
-
await new Promise((
|
|
9222
|
+
await new Promise((resolve17) => setTimeout(resolve17, 500));
|
|
9217
9223
|
const name = (_checkName || "").toLowerCase();
|
|
9218
9224
|
if (name.includes("extract-facts")) {
|
|
9219
9225
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9574,7 +9580,7 @@ var init_command_executor = __esm({
|
|
|
9574
9580
|
* Execute command with stdin input
|
|
9575
9581
|
*/
|
|
9576
9582
|
executeWithStdin(command, options) {
|
|
9577
|
-
return new Promise((
|
|
9583
|
+
return new Promise((resolve17, reject) => {
|
|
9578
9584
|
const childProcess = (0, import_child_process2.exec)(
|
|
9579
9585
|
command,
|
|
9580
9586
|
{
|
|
@@ -9586,7 +9592,7 @@ var init_command_executor = __esm({
|
|
|
9586
9592
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9587
9593
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9588
9594
|
} else {
|
|
9589
|
-
|
|
9595
|
+
resolve17({
|
|
9590
9596
|
stdout: stdout || "",
|
|
9591
9597
|
stderr: stderr || "",
|
|
9592
9598
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -15543,14 +15549,9 @@ ${errors}`);
|
|
|
15543
15549
|
const { WorkflowRegistry: WorkflowRegistry2 } = await Promise.resolve().then(() => (init_workflow_registry(), workflow_registry_exports));
|
|
15544
15550
|
const registry = WorkflowRegistry2.getInstance();
|
|
15545
15551
|
for (const source of config.imports) {
|
|
15546
|
-
const results = await registry.import(source, { basePath, validate: true });
|
|
15552
|
+
const results = await registry.import(source, { basePath, validate: true, override: true });
|
|
15547
15553
|
for (const result of results) {
|
|
15548
15554
|
if (!result.valid && result.errors) {
|
|
15549
|
-
const isAlreadyExists = result.errors.every((e) => e.message.includes("already exists"));
|
|
15550
|
-
if (isAlreadyExists) {
|
|
15551
|
-
logger.debug(`Workflow from '${source}' already imported, skipping`);
|
|
15552
|
-
continue;
|
|
15553
|
-
}
|
|
15554
15555
|
const errors = result.errors.map((e) => ` ${e.path}: ${e.message}`).join("\n");
|
|
15555
15556
|
throw new Error(`Failed to import workflow from '${source}':
|
|
15556
15557
|
${errors}`);
|
|
@@ -17421,17 +17422,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17421
17422
|
* so it can be executed by the state machine as a nested workflow.
|
|
17422
17423
|
*/
|
|
17423
17424
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17424
|
-
const
|
|
17425
|
-
const
|
|
17425
|
+
const path30 = require("path");
|
|
17426
|
+
const fs26 = require("fs");
|
|
17426
17427
|
const yaml5 = require("js-yaml");
|
|
17427
|
-
const resolved =
|
|
17428
|
-
if (!
|
|
17428
|
+
const resolved = path30.isAbsolute(sourcePath) ? sourcePath : path30.resolve(baseDir, sourcePath);
|
|
17429
|
+
if (!fs26.existsSync(resolved)) {
|
|
17429
17430
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17430
17431
|
}
|
|
17431
|
-
const rawContent =
|
|
17432
|
+
const rawContent = fs26.readFileSync(resolved, "utf8");
|
|
17432
17433
|
const rawData = yaml5.load(rawContent);
|
|
17433
17434
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17434
|
-
const configDir =
|
|
17435
|
+
const configDir = path30.dirname(resolved);
|
|
17435
17436
|
for (const source of rawData.imports) {
|
|
17436
17437
|
const results = await this.registry.import(source, {
|
|
17437
17438
|
basePath: configDir,
|
|
@@ -17461,8 +17462,8 @@ ${errors}`);
|
|
|
17461
17462
|
if (!steps || Object.keys(steps).length === 0) {
|
|
17462
17463
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
17463
17464
|
}
|
|
17464
|
-
const id =
|
|
17465
|
-
const name = loaded.name || `Workflow from ${
|
|
17465
|
+
const id = path30.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
17466
|
+
const name = loaded.name || `Workflow from ${path30.basename(resolved)}`;
|
|
17466
17467
|
const workflowDef = {
|
|
17467
17468
|
id,
|
|
17468
17469
|
name,
|
|
@@ -18092,8 +18093,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
18092
18093
|
case "mssql": {
|
|
18093
18094
|
try {
|
|
18094
18095
|
const loaderPath = "../../enterprise/loader";
|
|
18095
|
-
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
18096
|
-
return await
|
|
18096
|
+
const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
|
|
18097
|
+
return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
|
|
18097
18098
|
} catch (err) {
|
|
18098
18099
|
const msg = err instanceof Error ? err.message : String(err);
|
|
18099
18100
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -19367,7 +19368,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19367
19368
|
* Returns the actual bound port number
|
|
19368
19369
|
*/
|
|
19369
19370
|
async start() {
|
|
19370
|
-
return new Promise((
|
|
19371
|
+
return new Promise((resolve17, reject) => {
|
|
19371
19372
|
try {
|
|
19372
19373
|
this.server = import_http.default.createServer((req, res) => {
|
|
19373
19374
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -19401,7 +19402,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19401
19402
|
);
|
|
19402
19403
|
}
|
|
19403
19404
|
this.startKeepalive();
|
|
19404
|
-
|
|
19405
|
+
resolve17(this.port);
|
|
19405
19406
|
});
|
|
19406
19407
|
} catch (error) {
|
|
19407
19408
|
reject(error);
|
|
@@ -19464,7 +19465,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19464
19465
|
logger.debug(
|
|
19465
19466
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
19466
19467
|
);
|
|
19467
|
-
await new Promise((
|
|
19468
|
+
await new Promise((resolve17) => setTimeout(resolve17, waitMs));
|
|
19468
19469
|
}
|
|
19469
19470
|
}
|
|
19470
19471
|
if (this.activeToolCalls > 0) {
|
|
@@ -19473,7 +19474,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19473
19474
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
19474
19475
|
);
|
|
19475
19476
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
19476
|
-
await new Promise((
|
|
19477
|
+
await new Promise((resolve17) => setTimeout(resolve17, 250));
|
|
19477
19478
|
}
|
|
19478
19479
|
if (this.activeToolCalls > 0) {
|
|
19479
19480
|
logger.warn(
|
|
@@ -19498,21 +19499,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19498
19499
|
}
|
|
19499
19500
|
this.connections.clear();
|
|
19500
19501
|
if (this.server) {
|
|
19501
|
-
await new Promise((
|
|
19502
|
+
await new Promise((resolve17, reject) => {
|
|
19502
19503
|
const timeout = setTimeout(() => {
|
|
19503
19504
|
if (this.debug) {
|
|
19504
19505
|
logger.debug(
|
|
19505
19506
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
19506
19507
|
);
|
|
19507
19508
|
}
|
|
19508
|
-
this.server?.close(() =>
|
|
19509
|
+
this.server?.close(() => resolve17());
|
|
19509
19510
|
}, 5e3);
|
|
19510
19511
|
this.server.close((error) => {
|
|
19511
19512
|
clearTimeout(timeout);
|
|
19512
19513
|
if (error) {
|
|
19513
19514
|
reject(error);
|
|
19514
19515
|
} else {
|
|
19515
|
-
|
|
19516
|
+
resolve17();
|
|
19516
19517
|
}
|
|
19517
19518
|
});
|
|
19518
19519
|
});
|
|
@@ -19938,7 +19939,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19938
19939
|
logger.warn(
|
|
19939
19940
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
19940
19941
|
);
|
|
19941
|
-
await new Promise((
|
|
19942
|
+
await new Promise((resolve17) => setTimeout(resolve17, delay));
|
|
19942
19943
|
attempt++;
|
|
19943
19944
|
}
|
|
19944
19945
|
}
|
|
@@ -20241,9 +20242,9 @@ var init_ai_check_provider = __esm({
|
|
|
20241
20242
|
} else {
|
|
20242
20243
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
20243
20244
|
}
|
|
20244
|
-
const
|
|
20245
|
+
const fs26 = require("fs").promises;
|
|
20245
20246
|
try {
|
|
20246
|
-
const stat2 = await
|
|
20247
|
+
const stat2 = await fs26.stat(resolvedPath);
|
|
20247
20248
|
return stat2.isFile();
|
|
20248
20249
|
} catch {
|
|
20249
20250
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -26095,14 +26096,14 @@ var require_util = __commonJS({
|
|
|
26095
26096
|
}
|
|
26096
26097
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
26097
26098
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
26098
|
-
let
|
|
26099
|
+
let path30 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
26099
26100
|
if (origin.endsWith("/")) {
|
|
26100
26101
|
origin = origin.substring(0, origin.length - 1);
|
|
26101
26102
|
}
|
|
26102
|
-
if (
|
|
26103
|
-
|
|
26103
|
+
if (path30 && !path30.startsWith("/")) {
|
|
26104
|
+
path30 = `/${path30}`;
|
|
26104
26105
|
}
|
|
26105
|
-
url = new URL(origin +
|
|
26106
|
+
url = new URL(origin + path30);
|
|
26106
26107
|
}
|
|
26107
26108
|
return url;
|
|
26108
26109
|
}
|
|
@@ -27716,20 +27717,20 @@ var require_parseParams = __commonJS({
|
|
|
27716
27717
|
var require_basename = __commonJS({
|
|
27717
27718
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
27718
27719
|
"use strict";
|
|
27719
|
-
module2.exports = function basename4(
|
|
27720
|
-
if (typeof
|
|
27720
|
+
module2.exports = function basename4(path30) {
|
|
27721
|
+
if (typeof path30 !== "string") {
|
|
27721
27722
|
return "";
|
|
27722
27723
|
}
|
|
27723
|
-
for (var i =
|
|
27724
|
-
switch (
|
|
27724
|
+
for (var i = path30.length - 1; i >= 0; --i) {
|
|
27725
|
+
switch (path30.charCodeAt(i)) {
|
|
27725
27726
|
case 47:
|
|
27726
27727
|
// '/'
|
|
27727
27728
|
case 92:
|
|
27728
|
-
|
|
27729
|
-
return
|
|
27729
|
+
path30 = path30.slice(i + 1);
|
|
27730
|
+
return path30 === ".." || path30 === "." ? "" : path30;
|
|
27730
27731
|
}
|
|
27731
27732
|
}
|
|
27732
|
-
return
|
|
27733
|
+
return path30 === ".." || path30 === "." ? "" : path30;
|
|
27733
27734
|
};
|
|
27734
27735
|
}
|
|
27735
27736
|
});
|
|
@@ -28733,11 +28734,11 @@ var require_util2 = __commonJS({
|
|
|
28733
28734
|
var assert = require("assert");
|
|
28734
28735
|
var { isUint8Array } = require("util/types");
|
|
28735
28736
|
var supportedHashes = [];
|
|
28736
|
-
var
|
|
28737
|
+
var crypto4;
|
|
28737
28738
|
try {
|
|
28738
|
-
|
|
28739
|
+
crypto4 = require("crypto");
|
|
28739
28740
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
28740
|
-
supportedHashes =
|
|
28741
|
+
supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
28741
28742
|
} catch {
|
|
28742
28743
|
}
|
|
28743
28744
|
function responseURL(response) {
|
|
@@ -29014,7 +29015,7 @@ var require_util2 = __commonJS({
|
|
|
29014
29015
|
}
|
|
29015
29016
|
}
|
|
29016
29017
|
function bytesMatch(bytes, metadataList) {
|
|
29017
|
-
if (
|
|
29018
|
+
if (crypto4 === void 0) {
|
|
29018
29019
|
return true;
|
|
29019
29020
|
}
|
|
29020
29021
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -29029,7 +29030,7 @@ var require_util2 = __commonJS({
|
|
|
29029
29030
|
for (const item of metadata) {
|
|
29030
29031
|
const algorithm = item.algo;
|
|
29031
29032
|
const expectedValue = item.hash;
|
|
29032
|
-
let actualValue =
|
|
29033
|
+
let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
|
|
29033
29034
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
29034
29035
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
29035
29036
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -29122,8 +29123,8 @@ var require_util2 = __commonJS({
|
|
|
29122
29123
|
function createDeferredPromise() {
|
|
29123
29124
|
let res;
|
|
29124
29125
|
let rej;
|
|
29125
|
-
const promise = new Promise((
|
|
29126
|
-
res =
|
|
29126
|
+
const promise = new Promise((resolve17, reject) => {
|
|
29127
|
+
res = resolve17;
|
|
29127
29128
|
rej = reject;
|
|
29128
29129
|
});
|
|
29129
29130
|
return { promise, resolve: res, reject: rej };
|
|
@@ -30376,8 +30377,8 @@ var require_body = __commonJS({
|
|
|
30376
30377
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
30377
30378
|
var random;
|
|
30378
30379
|
try {
|
|
30379
|
-
const
|
|
30380
|
-
random = (max) =>
|
|
30380
|
+
const crypto4 = require("crypto");
|
|
30381
|
+
random = (max) => crypto4.randomInt(0, max);
|
|
30381
30382
|
} catch {
|
|
30382
30383
|
random = (max) => Math.floor(Math.random(max));
|
|
30383
30384
|
}
|
|
@@ -30628,8 +30629,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
30628
30629
|
});
|
|
30629
30630
|
}
|
|
30630
30631
|
});
|
|
30631
|
-
const busboyResolve = new Promise((
|
|
30632
|
-
busboy.on("finish",
|
|
30632
|
+
const busboyResolve = new Promise((resolve17, reject) => {
|
|
30633
|
+
busboy.on("finish", resolve17);
|
|
30633
30634
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
30634
30635
|
});
|
|
30635
30636
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -30760,7 +30761,7 @@ var require_request = __commonJS({
|
|
|
30760
30761
|
}
|
|
30761
30762
|
var Request = class _Request {
|
|
30762
30763
|
constructor(origin, {
|
|
30763
|
-
path:
|
|
30764
|
+
path: path30,
|
|
30764
30765
|
method,
|
|
30765
30766
|
body,
|
|
30766
30767
|
headers,
|
|
@@ -30774,11 +30775,11 @@ var require_request = __commonJS({
|
|
|
30774
30775
|
throwOnError,
|
|
30775
30776
|
expectContinue
|
|
30776
30777
|
}, handler) {
|
|
30777
|
-
if (typeof
|
|
30778
|
+
if (typeof path30 !== "string") {
|
|
30778
30779
|
throw new InvalidArgumentError("path must be a string");
|
|
30779
|
-
} else if (
|
|
30780
|
+
} else if (path30[0] !== "/" && !(path30.startsWith("http://") || path30.startsWith("https://")) && method !== "CONNECT") {
|
|
30780
30781
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
30781
|
-
} else if (invalidPathRegex.exec(
|
|
30782
|
+
} else if (invalidPathRegex.exec(path30) !== null) {
|
|
30782
30783
|
throw new InvalidArgumentError("invalid request path");
|
|
30783
30784
|
}
|
|
30784
30785
|
if (typeof method !== "string") {
|
|
@@ -30841,7 +30842,7 @@ var require_request = __commonJS({
|
|
|
30841
30842
|
this.completed = false;
|
|
30842
30843
|
this.aborted = false;
|
|
30843
30844
|
this.upgrade = upgrade || null;
|
|
30844
|
-
this.path = query ? util.buildURL(
|
|
30845
|
+
this.path = query ? util.buildURL(path30, query) : path30;
|
|
30845
30846
|
this.origin = origin;
|
|
30846
30847
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
30847
30848
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -31163,9 +31164,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
31163
31164
|
}
|
|
31164
31165
|
close(callback) {
|
|
31165
31166
|
if (callback === void 0) {
|
|
31166
|
-
return new Promise((
|
|
31167
|
+
return new Promise((resolve17, reject) => {
|
|
31167
31168
|
this.close((err, data) => {
|
|
31168
|
-
return err ? reject(err) :
|
|
31169
|
+
return err ? reject(err) : resolve17(data);
|
|
31169
31170
|
});
|
|
31170
31171
|
});
|
|
31171
31172
|
}
|
|
@@ -31203,12 +31204,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
31203
31204
|
err = null;
|
|
31204
31205
|
}
|
|
31205
31206
|
if (callback === void 0) {
|
|
31206
|
-
return new Promise((
|
|
31207
|
+
return new Promise((resolve17, reject) => {
|
|
31207
31208
|
this.destroy(err, (err2, data) => {
|
|
31208
31209
|
return err2 ? (
|
|
31209
31210
|
/* istanbul ignore next: should never error */
|
|
31210
31211
|
reject(err2)
|
|
31211
|
-
) :
|
|
31212
|
+
) : resolve17(data);
|
|
31212
31213
|
});
|
|
31213
31214
|
});
|
|
31214
31215
|
}
|
|
@@ -31849,9 +31850,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
31849
31850
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
31850
31851
|
}
|
|
31851
31852
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
31852
|
-
const
|
|
31853
|
+
const path30 = search ? `${pathname}${search}` : pathname;
|
|
31853
31854
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
31854
|
-
this.opts.path =
|
|
31855
|
+
this.opts.path = path30;
|
|
31855
31856
|
this.opts.origin = origin;
|
|
31856
31857
|
this.opts.maxRedirections = 0;
|
|
31857
31858
|
this.opts.query = null;
|
|
@@ -32270,16 +32271,16 @@ var require_client = __commonJS({
|
|
|
32270
32271
|
return this[kNeedDrain] < 2;
|
|
32271
32272
|
}
|
|
32272
32273
|
async [kClose]() {
|
|
32273
|
-
return new Promise((
|
|
32274
|
+
return new Promise((resolve17) => {
|
|
32274
32275
|
if (!this[kSize]) {
|
|
32275
|
-
|
|
32276
|
+
resolve17(null);
|
|
32276
32277
|
} else {
|
|
32277
|
-
this[kClosedResolve] =
|
|
32278
|
+
this[kClosedResolve] = resolve17;
|
|
32278
32279
|
}
|
|
32279
32280
|
});
|
|
32280
32281
|
}
|
|
32281
32282
|
async [kDestroy](err) {
|
|
32282
|
-
return new Promise((
|
|
32283
|
+
return new Promise((resolve17) => {
|
|
32283
32284
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
32284
32285
|
for (let i = 0; i < requests.length; i++) {
|
|
32285
32286
|
const request = requests[i];
|
|
@@ -32290,7 +32291,7 @@ var require_client = __commonJS({
|
|
|
32290
32291
|
this[kClosedResolve]();
|
|
32291
32292
|
this[kClosedResolve] = null;
|
|
32292
32293
|
}
|
|
32293
|
-
|
|
32294
|
+
resolve17();
|
|
32294
32295
|
};
|
|
32295
32296
|
if (this[kHTTP2Session] != null) {
|
|
32296
32297
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -32870,7 +32871,7 @@ var require_client = __commonJS({
|
|
|
32870
32871
|
});
|
|
32871
32872
|
}
|
|
32872
32873
|
try {
|
|
32873
|
-
const socket = await new Promise((
|
|
32874
|
+
const socket = await new Promise((resolve17, reject) => {
|
|
32874
32875
|
client[kConnector]({
|
|
32875
32876
|
host,
|
|
32876
32877
|
hostname,
|
|
@@ -32882,7 +32883,7 @@ var require_client = __commonJS({
|
|
|
32882
32883
|
if (err) {
|
|
32883
32884
|
reject(err);
|
|
32884
32885
|
} else {
|
|
32885
|
-
|
|
32886
|
+
resolve17(socket2);
|
|
32886
32887
|
}
|
|
32887
32888
|
});
|
|
32888
32889
|
});
|
|
@@ -33093,7 +33094,7 @@ var require_client = __commonJS({
|
|
|
33093
33094
|
writeH2(client, client[kHTTP2Session], request);
|
|
33094
33095
|
return;
|
|
33095
33096
|
}
|
|
33096
|
-
const { body, method, path:
|
|
33097
|
+
const { body, method, path: path30, host, upgrade, headers, blocking, reset } = request;
|
|
33097
33098
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33098
33099
|
if (body && typeof body.read === "function") {
|
|
33099
33100
|
body.read(0);
|
|
@@ -33143,7 +33144,7 @@ var require_client = __commonJS({
|
|
|
33143
33144
|
if (blocking) {
|
|
33144
33145
|
socket[kBlocking] = true;
|
|
33145
33146
|
}
|
|
33146
|
-
let header = `${method} ${
|
|
33147
|
+
let header = `${method} ${path30} HTTP/1.1\r
|
|
33147
33148
|
`;
|
|
33148
33149
|
if (typeof host === "string") {
|
|
33149
33150
|
header += `host: ${host}\r
|
|
@@ -33206,7 +33207,7 @@ upgrade: ${upgrade}\r
|
|
|
33206
33207
|
return true;
|
|
33207
33208
|
}
|
|
33208
33209
|
function writeH2(client, session, request) {
|
|
33209
|
-
const { body, method, path:
|
|
33210
|
+
const { body, method, path: path30, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
33210
33211
|
let headers;
|
|
33211
33212
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
33212
33213
|
else headers = reqHeaders;
|
|
@@ -33249,7 +33250,7 @@ upgrade: ${upgrade}\r
|
|
|
33249
33250
|
});
|
|
33250
33251
|
return true;
|
|
33251
33252
|
}
|
|
33252
|
-
headers[HTTP2_HEADER_PATH] =
|
|
33253
|
+
headers[HTTP2_HEADER_PATH] = path30;
|
|
33253
33254
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
33254
33255
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33255
33256
|
if (body && typeof body.read === "function") {
|
|
@@ -33506,12 +33507,12 @@ upgrade: ${upgrade}\r
|
|
|
33506
33507
|
cb();
|
|
33507
33508
|
}
|
|
33508
33509
|
}
|
|
33509
|
-
const waitForDrain = () => new Promise((
|
|
33510
|
+
const waitForDrain = () => new Promise((resolve17, reject) => {
|
|
33510
33511
|
assert(callback === null);
|
|
33511
33512
|
if (socket[kError]) {
|
|
33512
33513
|
reject(socket[kError]);
|
|
33513
33514
|
} else {
|
|
33514
|
-
callback =
|
|
33515
|
+
callback = resolve17;
|
|
33515
33516
|
}
|
|
33516
33517
|
});
|
|
33517
33518
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -33857,8 +33858,8 @@ var require_pool_base = __commonJS({
|
|
|
33857
33858
|
if (this[kQueue].isEmpty()) {
|
|
33858
33859
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
33859
33860
|
} else {
|
|
33860
|
-
return new Promise((
|
|
33861
|
-
this[kClosedResolve] =
|
|
33861
|
+
return new Promise((resolve17) => {
|
|
33862
|
+
this[kClosedResolve] = resolve17;
|
|
33862
33863
|
});
|
|
33863
33864
|
}
|
|
33864
33865
|
}
|
|
@@ -34436,7 +34437,7 @@ var require_readable = __commonJS({
|
|
|
34436
34437
|
if (this.closed) {
|
|
34437
34438
|
return Promise.resolve(null);
|
|
34438
34439
|
}
|
|
34439
|
-
return new Promise((
|
|
34440
|
+
return new Promise((resolve17, reject) => {
|
|
34440
34441
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
34441
34442
|
this.destroy();
|
|
34442
34443
|
}) : noop;
|
|
@@ -34445,7 +34446,7 @@ var require_readable = __commonJS({
|
|
|
34445
34446
|
if (signal && signal.aborted) {
|
|
34446
34447
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
34447
34448
|
} else {
|
|
34448
|
-
|
|
34449
|
+
resolve17(null);
|
|
34449
34450
|
}
|
|
34450
34451
|
}).on("error", noop).on("data", function(chunk) {
|
|
34451
34452
|
limit -= chunk.length;
|
|
@@ -34467,11 +34468,11 @@ var require_readable = __commonJS({
|
|
|
34467
34468
|
throw new TypeError("unusable");
|
|
34468
34469
|
}
|
|
34469
34470
|
assert(!stream[kConsume]);
|
|
34470
|
-
return new Promise((
|
|
34471
|
+
return new Promise((resolve17, reject) => {
|
|
34471
34472
|
stream[kConsume] = {
|
|
34472
34473
|
type,
|
|
34473
34474
|
stream,
|
|
34474
|
-
resolve:
|
|
34475
|
+
resolve: resolve17,
|
|
34475
34476
|
reject,
|
|
34476
34477
|
length: 0,
|
|
34477
34478
|
body: []
|
|
@@ -34506,12 +34507,12 @@ var require_readable = __commonJS({
|
|
|
34506
34507
|
}
|
|
34507
34508
|
}
|
|
34508
34509
|
function consumeEnd(consume2) {
|
|
34509
|
-
const { type, body, resolve:
|
|
34510
|
+
const { type, body, resolve: resolve17, stream, length } = consume2;
|
|
34510
34511
|
try {
|
|
34511
34512
|
if (type === "text") {
|
|
34512
|
-
|
|
34513
|
+
resolve17(toUSVString(Buffer.concat(body)));
|
|
34513
34514
|
} else if (type === "json") {
|
|
34514
|
-
|
|
34515
|
+
resolve17(JSON.parse(Buffer.concat(body)));
|
|
34515
34516
|
} else if (type === "arrayBuffer") {
|
|
34516
34517
|
const dst = new Uint8Array(length);
|
|
34517
34518
|
let pos = 0;
|
|
@@ -34519,12 +34520,12 @@ var require_readable = __commonJS({
|
|
|
34519
34520
|
dst.set(buf, pos);
|
|
34520
34521
|
pos += buf.byteLength;
|
|
34521
34522
|
}
|
|
34522
|
-
|
|
34523
|
+
resolve17(dst.buffer);
|
|
34523
34524
|
} else if (type === "blob") {
|
|
34524
34525
|
if (!Blob2) {
|
|
34525
34526
|
Blob2 = require("buffer").Blob;
|
|
34526
34527
|
}
|
|
34527
|
-
|
|
34528
|
+
resolve17(new Blob2(body, { type: stream[kContentType] }));
|
|
34528
34529
|
}
|
|
34529
34530
|
consumeFinish(consume2);
|
|
34530
34531
|
} catch (err) {
|
|
@@ -34781,9 +34782,9 @@ var require_api_request = __commonJS({
|
|
|
34781
34782
|
};
|
|
34782
34783
|
function request(opts, callback) {
|
|
34783
34784
|
if (callback === void 0) {
|
|
34784
|
-
return new Promise((
|
|
34785
|
+
return new Promise((resolve17, reject) => {
|
|
34785
34786
|
request.call(this, opts, (err, data) => {
|
|
34786
|
-
return err ? reject(err) :
|
|
34787
|
+
return err ? reject(err) : resolve17(data);
|
|
34787
34788
|
});
|
|
34788
34789
|
});
|
|
34789
34790
|
}
|
|
@@ -34956,9 +34957,9 @@ var require_api_stream = __commonJS({
|
|
|
34956
34957
|
};
|
|
34957
34958
|
function stream(opts, factory, callback) {
|
|
34958
34959
|
if (callback === void 0) {
|
|
34959
|
-
return new Promise((
|
|
34960
|
+
return new Promise((resolve17, reject) => {
|
|
34960
34961
|
stream.call(this, opts, factory, (err, data) => {
|
|
34961
|
-
return err ? reject(err) :
|
|
34962
|
+
return err ? reject(err) : resolve17(data);
|
|
34962
34963
|
});
|
|
34963
34964
|
});
|
|
34964
34965
|
}
|
|
@@ -35239,9 +35240,9 @@ var require_api_upgrade = __commonJS({
|
|
|
35239
35240
|
};
|
|
35240
35241
|
function upgrade(opts, callback) {
|
|
35241
35242
|
if (callback === void 0) {
|
|
35242
|
-
return new Promise((
|
|
35243
|
+
return new Promise((resolve17, reject) => {
|
|
35243
35244
|
upgrade.call(this, opts, (err, data) => {
|
|
35244
|
-
return err ? reject(err) :
|
|
35245
|
+
return err ? reject(err) : resolve17(data);
|
|
35245
35246
|
});
|
|
35246
35247
|
});
|
|
35247
35248
|
}
|
|
@@ -35330,9 +35331,9 @@ var require_api_connect = __commonJS({
|
|
|
35330
35331
|
};
|
|
35331
35332
|
function connect(opts, callback) {
|
|
35332
35333
|
if (callback === void 0) {
|
|
35333
|
-
return new Promise((
|
|
35334
|
+
return new Promise((resolve17, reject) => {
|
|
35334
35335
|
connect.call(this, opts, (err, data) => {
|
|
35335
|
-
return err ? reject(err) :
|
|
35336
|
+
return err ? reject(err) : resolve17(data);
|
|
35336
35337
|
});
|
|
35337
35338
|
});
|
|
35338
35339
|
}
|
|
@@ -35492,20 +35493,20 @@ var require_mock_utils = __commonJS({
|
|
|
35492
35493
|
}
|
|
35493
35494
|
return true;
|
|
35494
35495
|
}
|
|
35495
|
-
function safeUrl(
|
|
35496
|
-
if (typeof
|
|
35497
|
-
return
|
|
35496
|
+
function safeUrl(path30) {
|
|
35497
|
+
if (typeof path30 !== "string") {
|
|
35498
|
+
return path30;
|
|
35498
35499
|
}
|
|
35499
|
-
const pathSegments =
|
|
35500
|
+
const pathSegments = path30.split("?");
|
|
35500
35501
|
if (pathSegments.length !== 2) {
|
|
35501
|
-
return
|
|
35502
|
+
return path30;
|
|
35502
35503
|
}
|
|
35503
35504
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
35504
35505
|
qp.sort();
|
|
35505
35506
|
return [...pathSegments, qp.toString()].join("?");
|
|
35506
35507
|
}
|
|
35507
|
-
function matchKey(mockDispatch2, { path:
|
|
35508
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
35508
|
+
function matchKey(mockDispatch2, { path: path30, method, body, headers }) {
|
|
35509
|
+
const pathMatch = matchValue(mockDispatch2.path, path30);
|
|
35509
35510
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
35510
35511
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
35511
35512
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -35523,7 +35524,7 @@ var require_mock_utils = __commonJS({
|
|
|
35523
35524
|
function getMockDispatch(mockDispatches, key) {
|
|
35524
35525
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
35525
35526
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
35526
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
35527
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path30 }) => matchValue(safeUrl(path30), resolvedPath));
|
|
35527
35528
|
if (matchedMockDispatches.length === 0) {
|
|
35528
35529
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
35529
35530
|
}
|
|
@@ -35560,9 +35561,9 @@ var require_mock_utils = __commonJS({
|
|
|
35560
35561
|
}
|
|
35561
35562
|
}
|
|
35562
35563
|
function buildKey(opts) {
|
|
35563
|
-
const { path:
|
|
35564
|
+
const { path: path30, method, body, headers, query } = opts;
|
|
35564
35565
|
return {
|
|
35565
|
-
path:
|
|
35566
|
+
path: path30,
|
|
35566
35567
|
method,
|
|
35567
35568
|
body,
|
|
35568
35569
|
headers,
|
|
@@ -36011,10 +36012,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
36011
36012
|
}
|
|
36012
36013
|
format(pendingInterceptors) {
|
|
36013
36014
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
36014
|
-
({ method, path:
|
|
36015
|
+
({ method, path: path30, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
36015
36016
|
Method: method,
|
|
36016
36017
|
Origin: origin,
|
|
36017
|
-
Path:
|
|
36018
|
+
Path: path30,
|
|
36018
36019
|
"Status code": statusCode,
|
|
36019
36020
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
36020
36021
|
Invocations: timesInvoked,
|
|
@@ -38955,7 +38956,7 @@ var require_fetch = __commonJS({
|
|
|
38955
38956
|
async function dispatch({ body }) {
|
|
38956
38957
|
const url = requestCurrentURL(request);
|
|
38957
38958
|
const agent = fetchParams.controller.dispatcher;
|
|
38958
|
-
return new Promise((
|
|
38959
|
+
return new Promise((resolve17, reject) => agent.dispatch(
|
|
38959
38960
|
{
|
|
38960
38961
|
path: url.pathname + url.search,
|
|
38961
38962
|
origin: url.origin,
|
|
@@ -39031,7 +39032,7 @@ var require_fetch = __commonJS({
|
|
|
39031
39032
|
}
|
|
39032
39033
|
}
|
|
39033
39034
|
}
|
|
39034
|
-
|
|
39035
|
+
resolve17({
|
|
39035
39036
|
status,
|
|
39036
39037
|
statusText,
|
|
39037
39038
|
headersList: headers[kHeadersList],
|
|
@@ -39074,7 +39075,7 @@ var require_fetch = __commonJS({
|
|
|
39074
39075
|
const val = headersList[n + 1].toString("latin1");
|
|
39075
39076
|
headers[kHeadersList].append(key, val);
|
|
39076
39077
|
}
|
|
39077
|
-
|
|
39078
|
+
resolve17({
|
|
39078
39079
|
status,
|
|
39079
39080
|
statusText: STATUS_CODES[status],
|
|
39080
39081
|
headersList: headers[kHeadersList],
|
|
@@ -40635,8 +40636,8 @@ var require_util6 = __commonJS({
|
|
|
40635
40636
|
}
|
|
40636
40637
|
}
|
|
40637
40638
|
}
|
|
40638
|
-
function validateCookiePath(
|
|
40639
|
-
for (const char of
|
|
40639
|
+
function validateCookiePath(path30) {
|
|
40640
|
+
for (const char of path30) {
|
|
40640
40641
|
const code = char.charCodeAt(0);
|
|
40641
40642
|
if (code < 33 || char === ";") {
|
|
40642
40643
|
throw new Error("Invalid cookie path");
|
|
@@ -41433,9 +41434,9 @@ var require_connection = __commonJS({
|
|
|
41433
41434
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
41434
41435
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
41435
41436
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
41436
|
-
var
|
|
41437
|
+
var crypto4;
|
|
41437
41438
|
try {
|
|
41438
|
-
|
|
41439
|
+
crypto4 = require("crypto");
|
|
41439
41440
|
} catch {
|
|
41440
41441
|
}
|
|
41441
41442
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -41454,7 +41455,7 @@ var require_connection = __commonJS({
|
|
|
41454
41455
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
41455
41456
|
request.headersList = headersList;
|
|
41456
41457
|
}
|
|
41457
|
-
const keyValue =
|
|
41458
|
+
const keyValue = crypto4.randomBytes(16).toString("base64");
|
|
41458
41459
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
41459
41460
|
request.headersList.append("sec-websocket-version", "13");
|
|
41460
41461
|
for (const protocol of protocols) {
|
|
@@ -41483,7 +41484,7 @@ var require_connection = __commonJS({
|
|
|
41483
41484
|
return;
|
|
41484
41485
|
}
|
|
41485
41486
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
41486
|
-
const digest =
|
|
41487
|
+
const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
41487
41488
|
if (secWSAccept !== digest) {
|
|
41488
41489
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
41489
41490
|
return;
|
|
@@ -41563,9 +41564,9 @@ var require_frame = __commonJS({
|
|
|
41563
41564
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
41564
41565
|
"use strict";
|
|
41565
41566
|
var { maxUnsigned16Bit } = require_constants5();
|
|
41566
|
-
var
|
|
41567
|
+
var crypto4;
|
|
41567
41568
|
try {
|
|
41568
|
-
|
|
41569
|
+
crypto4 = require("crypto");
|
|
41569
41570
|
} catch {
|
|
41570
41571
|
}
|
|
41571
41572
|
var WebsocketFrameSend = class {
|
|
@@ -41574,7 +41575,7 @@ var require_frame = __commonJS({
|
|
|
41574
41575
|
*/
|
|
41575
41576
|
constructor(data) {
|
|
41576
41577
|
this.frameData = data;
|
|
41577
|
-
this.maskKey =
|
|
41578
|
+
this.maskKey = crypto4.randomBytes(4);
|
|
41578
41579
|
}
|
|
41579
41580
|
createFrame(opcode) {
|
|
41580
41581
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -42316,11 +42317,11 @@ var require_undici = __commonJS({
|
|
|
42316
42317
|
if (typeof opts.path !== "string") {
|
|
42317
42318
|
throw new InvalidArgumentError("invalid opts.path");
|
|
42318
42319
|
}
|
|
42319
|
-
let
|
|
42320
|
+
let path30 = opts.path;
|
|
42320
42321
|
if (!opts.path.startsWith("/")) {
|
|
42321
|
-
|
|
42322
|
+
path30 = `/${path30}`;
|
|
42322
42323
|
}
|
|
42323
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
42324
|
+
url = new URL(util.parseOrigin(url).origin + path30);
|
|
42324
42325
|
} else {
|
|
42325
42326
|
if (!opts) {
|
|
42326
42327
|
opts = typeof url === "object" ? url : {};
|
|
@@ -42869,7 +42870,7 @@ var init_mcp_check_provider = __esm({
|
|
|
42869
42870
|
logger.warn(
|
|
42870
42871
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
42871
42872
|
);
|
|
42872
|
-
await new Promise((
|
|
42873
|
+
await new Promise((resolve17) => setTimeout(resolve17, delay));
|
|
42873
42874
|
attempt += 1;
|
|
42874
42875
|
} finally {
|
|
42875
42876
|
try {
|
|
@@ -43151,7 +43152,7 @@ async function acquirePromptLock() {
|
|
|
43151
43152
|
activePrompt = true;
|
|
43152
43153
|
return;
|
|
43153
43154
|
}
|
|
43154
|
-
await new Promise((
|
|
43155
|
+
await new Promise((resolve17) => waiters.push(resolve17));
|
|
43155
43156
|
activePrompt = true;
|
|
43156
43157
|
}
|
|
43157
43158
|
function releasePromptLock() {
|
|
@@ -43161,7 +43162,7 @@ function releasePromptLock() {
|
|
|
43161
43162
|
}
|
|
43162
43163
|
async function interactivePrompt(options) {
|
|
43163
43164
|
await acquirePromptLock();
|
|
43164
|
-
return new Promise((
|
|
43165
|
+
return new Promise((resolve17, reject) => {
|
|
43165
43166
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
43166
43167
|
try {
|
|
43167
43168
|
if (dbg) {
|
|
@@ -43248,12 +43249,12 @@ async function interactivePrompt(options) {
|
|
|
43248
43249
|
};
|
|
43249
43250
|
const finish = (value) => {
|
|
43250
43251
|
cleanup();
|
|
43251
|
-
|
|
43252
|
+
resolve17(value);
|
|
43252
43253
|
};
|
|
43253
43254
|
if (options.timeout && options.timeout > 0) {
|
|
43254
43255
|
timeoutId = setTimeout(() => {
|
|
43255
43256
|
cleanup();
|
|
43256
|
-
if (defaultValue !== void 0) return
|
|
43257
|
+
if (defaultValue !== void 0) return resolve17(defaultValue);
|
|
43257
43258
|
return reject(new Error("Input timeout"));
|
|
43258
43259
|
}, options.timeout);
|
|
43259
43260
|
}
|
|
@@ -43385,7 +43386,7 @@ async function interactivePrompt(options) {
|
|
|
43385
43386
|
});
|
|
43386
43387
|
}
|
|
43387
43388
|
async function simplePrompt(prompt) {
|
|
43388
|
-
return new Promise((
|
|
43389
|
+
return new Promise((resolve17) => {
|
|
43389
43390
|
const rl = readline.createInterface({
|
|
43390
43391
|
input: process.stdin,
|
|
43391
43392
|
output: process.stdout
|
|
@@ -43401,7 +43402,7 @@ async function simplePrompt(prompt) {
|
|
|
43401
43402
|
rl.question(`${prompt}
|
|
43402
43403
|
> `, (answer) => {
|
|
43403
43404
|
rl.close();
|
|
43404
|
-
|
|
43405
|
+
resolve17(answer.trim());
|
|
43405
43406
|
});
|
|
43406
43407
|
});
|
|
43407
43408
|
}
|
|
@@ -43569,7 +43570,7 @@ function isStdinAvailable() {
|
|
|
43569
43570
|
return !process.stdin.isTTY;
|
|
43570
43571
|
}
|
|
43571
43572
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
43572
|
-
return new Promise((
|
|
43573
|
+
return new Promise((resolve17, reject) => {
|
|
43573
43574
|
let data = "";
|
|
43574
43575
|
let timeoutId;
|
|
43575
43576
|
if (timeout) {
|
|
@@ -43596,7 +43597,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
43596
43597
|
};
|
|
43597
43598
|
const onEnd = () => {
|
|
43598
43599
|
cleanup();
|
|
43599
|
-
|
|
43600
|
+
resolve17(data.trim());
|
|
43600
43601
|
};
|
|
43601
43602
|
const onError = (err) => {
|
|
43602
43603
|
cleanup();
|
|
@@ -45055,7 +45056,7 @@ var init_worktree_manager = __esm({
|
|
|
45055
45056
|
/**
|
|
45056
45057
|
* Get or create bare repository
|
|
45057
45058
|
*/
|
|
45058
|
-
async getOrCreateBareRepo(repository, repoUrl,
|
|
45059
|
+
async getOrCreateBareRepo(repository, repoUrl, _token, fetchDepth, cloneTimeoutMs) {
|
|
45059
45060
|
const reposDir = this.getReposDir();
|
|
45060
45061
|
const repoName = repository.replace(/\//g, "-");
|
|
45061
45062
|
const bareRepoPath = path20.join(reposDir, `${repoName}.git`);
|
|
@@ -45071,11 +45072,12 @@ var init_worktree_manager = __esm({
|
|
|
45071
45072
|
);
|
|
45072
45073
|
await fsp.rm(bareRepoPath, { recursive: true, force: true });
|
|
45073
45074
|
} else {
|
|
45075
|
+
await this.resetBareRepoRemoteUrl(bareRepoPath, repoUrl);
|
|
45074
45076
|
await this.updateBareRepo(bareRepoPath);
|
|
45075
45077
|
return bareRepoPath;
|
|
45076
45078
|
}
|
|
45077
45079
|
}
|
|
45078
|
-
const cloneUrl =
|
|
45080
|
+
const cloneUrl = repoUrl;
|
|
45079
45081
|
const redactedUrl = this.redactUrl(cloneUrl);
|
|
45080
45082
|
logger.info(
|
|
45081
45083
|
`Cloning bare repository: ${redactedUrl}${fetchDepth ? ` (depth: ${fetchDepth})` : ""}`
|
|
@@ -45156,6 +45158,33 @@ var init_worktree_manager = __esm({
|
|
|
45156
45158
|
return false;
|
|
45157
45159
|
}
|
|
45158
45160
|
}
|
|
45161
|
+
/**
|
|
45162
|
+
* Ensure the origin remote URL of a bare repo is a plain URL (no embedded token).
|
|
45163
|
+
*
|
|
45164
|
+
* Older bare repos may have been cloned with a token in the URL
|
|
45165
|
+
* (https://x-access-token:TOKEN@github.com/...). This causes stale-token
|
|
45166
|
+
* failures because GIT_CONFIG insteadOf rules can't rewrite URLs that
|
|
45167
|
+
* already have credentials. Resetting to the plain URL lets insteadOf
|
|
45168
|
+
* handle auth with the freshest token.
|
|
45169
|
+
*/
|
|
45170
|
+
async resetBareRepoRemoteUrl(bareRepoPath, plainRepoUrl) {
|
|
45171
|
+
try {
|
|
45172
|
+
const cmd = `git -C ${this.escapeShellArg(bareRepoPath)} remote set-url origin ${this.escapeShellArg(plainRepoUrl)}`;
|
|
45173
|
+
const result = await this.executeGitCommand(cmd, { timeout: 1e4 });
|
|
45174
|
+
if (result.exitCode !== 0) {
|
|
45175
|
+
logger.warn(
|
|
45176
|
+
`Failed to reset bare repo remote URL: ${result.stderr}. Git operations may fail with stale token if the URL has embedded credentials.`
|
|
45177
|
+
);
|
|
45178
|
+
} else {
|
|
45179
|
+
logger.debug(`Reset bare repo remote URL to plain URL for ${bareRepoPath}`);
|
|
45180
|
+
}
|
|
45181
|
+
} catch (error) {
|
|
45182
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
45183
|
+
logger.warn(
|
|
45184
|
+
`Error resetting bare repo remote URL: ${msg}. Git operations may fail with stale token if the URL has embedded credentials.`
|
|
45185
|
+
);
|
|
45186
|
+
}
|
|
45187
|
+
}
|
|
45159
45188
|
/**
|
|
45160
45189
|
* Create a new worktree for the given repository/ref.
|
|
45161
45190
|
*
|
|
@@ -47628,23 +47657,23 @@ __export(renderer_schema_exports, {
|
|
|
47628
47657
|
});
|
|
47629
47658
|
async function loadRendererSchema(name) {
|
|
47630
47659
|
try {
|
|
47631
|
-
const
|
|
47632
|
-
const
|
|
47660
|
+
const fs26 = await import("fs/promises");
|
|
47661
|
+
const path30 = await import("path");
|
|
47633
47662
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
47634
47663
|
if (!sanitized) return void 0;
|
|
47635
47664
|
const candidates = [
|
|
47636
47665
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
47637
|
-
|
|
47666
|
+
path30.join(__dirname, "output", sanitized, "schema.json"),
|
|
47638
47667
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
47639
|
-
|
|
47668
|
+
path30.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
47640
47669
|
// When running from a checkout with output/ folder copied to CWD
|
|
47641
|
-
|
|
47670
|
+
path30.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
47642
47671
|
// Fallback: cwd/dist/output/
|
|
47643
|
-
|
|
47672
|
+
path30.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
47644
47673
|
];
|
|
47645
47674
|
for (const p of candidates) {
|
|
47646
47675
|
try {
|
|
47647
|
-
const raw = await
|
|
47676
|
+
const raw = await fs26.readFile(p, "utf-8");
|
|
47648
47677
|
return JSON.parse(raw);
|
|
47649
47678
|
} catch {
|
|
47650
47679
|
}
|
|
@@ -50063,8 +50092,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
50063
50092
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
50064
50093
|
try {
|
|
50065
50094
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
50066
|
-
const
|
|
50067
|
-
const
|
|
50095
|
+
const fs26 = await import("fs/promises");
|
|
50096
|
+
const path30 = await import("path");
|
|
50068
50097
|
const schemaRaw = checkConfig.schema || "plain";
|
|
50069
50098
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
50070
50099
|
let templateContent;
|
|
@@ -50073,27 +50102,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
50073
50102
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
50074
50103
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
50075
50104
|
const file = String(checkConfig.template.file);
|
|
50076
|
-
const resolved =
|
|
50077
|
-
templateContent = await
|
|
50105
|
+
const resolved = path30.resolve(process.cwd(), file);
|
|
50106
|
+
templateContent = await fs26.readFile(resolved, "utf-8");
|
|
50078
50107
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
50079
50108
|
} else if (schema && schema !== "plain") {
|
|
50080
50109
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
50081
50110
|
if (sanitized) {
|
|
50082
50111
|
const candidatePaths = [
|
|
50083
|
-
|
|
50112
|
+
path30.join(__dirname, "output", sanitized, "template.liquid"),
|
|
50084
50113
|
// bundled: dist/output/
|
|
50085
|
-
|
|
50114
|
+
path30.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
50086
50115
|
// source (from state-machine/states)
|
|
50087
|
-
|
|
50116
|
+
path30.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
50088
50117
|
// source (alternate)
|
|
50089
|
-
|
|
50118
|
+
path30.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
50090
50119
|
// fallback: cwd/output/
|
|
50091
|
-
|
|
50120
|
+
path30.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
50092
50121
|
// fallback: cwd/dist/output/
|
|
50093
50122
|
];
|
|
50094
50123
|
for (const p of candidatePaths) {
|
|
50095
50124
|
try {
|
|
50096
|
-
templateContent = await
|
|
50125
|
+
templateContent = await fs26.readFile(p, "utf-8");
|
|
50097
50126
|
if (templateContent) {
|
|
50098
50127
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
50099
50128
|
break;
|
|
@@ -51210,10 +51239,12 @@ var init_bubblewrap_sandbox = __esm({
|
|
|
51210
51239
|
name;
|
|
51211
51240
|
config;
|
|
51212
51241
|
repoPath;
|
|
51213
|
-
|
|
51242
|
+
visorDistPath;
|
|
51243
|
+
constructor(name, config, repoPath, visorDistPath) {
|
|
51214
51244
|
this.name = name;
|
|
51215
51245
|
this.config = config;
|
|
51216
51246
|
this.repoPath = (0, import_path10.resolve)(repoPath);
|
|
51247
|
+
this.visorDistPath = (0, import_path10.resolve)(visorDistPath);
|
|
51217
51248
|
}
|
|
51218
51249
|
/**
|
|
51219
51250
|
* Check if bwrap binary is available on the system.
|
|
@@ -51294,6 +51325,8 @@ var init_bubblewrap_sandbox = __esm({
|
|
|
51294
51325
|
} else {
|
|
51295
51326
|
args.push("--bind", this.repoPath, workdir);
|
|
51296
51327
|
}
|
|
51328
|
+
const visorPath = this.config.visor_path || "/opt/visor";
|
|
51329
|
+
args.push("--ro-bind", this.visorDistPath, visorPath);
|
|
51297
51330
|
args.push("--chdir", workdir);
|
|
51298
51331
|
args.push("--unshare-pid");
|
|
51299
51332
|
args.push("--new-session");
|
|
@@ -51335,10 +51368,12 @@ var init_seatbelt_sandbox = __esm({
|
|
|
51335
51368
|
name;
|
|
51336
51369
|
config;
|
|
51337
51370
|
repoPath;
|
|
51338
|
-
|
|
51371
|
+
visorDistPath;
|
|
51372
|
+
constructor(name, config, repoPath, visorDistPath) {
|
|
51339
51373
|
this.name = name;
|
|
51340
51374
|
this.config = config;
|
|
51341
51375
|
this.repoPath = (0, import_fs7.realpathSync)((0, import_path11.resolve)(repoPath));
|
|
51376
|
+
this.visorDistPath = (0, import_fs7.realpathSync)((0, import_path11.resolve)(visorDistPath));
|
|
51342
51377
|
}
|
|
51343
51378
|
/**
|
|
51344
51379
|
* Check if sandbox-exec binary is available on the system.
|
|
@@ -51439,6 +51474,8 @@ var init_seatbelt_sandbox = __esm({
|
|
|
51439
51474
|
if (!this.config.read_only) {
|
|
51440
51475
|
lines.push(`(allow file-write* (subpath "${repoPath}"))`);
|
|
51441
51476
|
}
|
|
51477
|
+
const visorDistPath = this.escapePath(this.visorDistPath);
|
|
51478
|
+
lines.push(`(allow file-read* (subpath "${visorDistPath}"))`);
|
|
51442
51479
|
if (this.config.network !== false) {
|
|
51443
51480
|
lines.push("(allow network*)");
|
|
51444
51481
|
}
|
|
@@ -51511,13 +51548,13 @@ var init_sandbox_manager = __esm({
|
|
|
51511
51548
|
const mode = config.compose ? "compose" : "image";
|
|
51512
51549
|
if (config.engine === "bubblewrap") {
|
|
51513
51550
|
const { BubblewrapSandbox: BubblewrapSandbox2 } = (init_bubblewrap_sandbox(), __toCommonJS(bubblewrap_sandbox_exports));
|
|
51514
|
-
const instance = new BubblewrapSandbox2(name, config, this.repoPath);
|
|
51551
|
+
const instance = new BubblewrapSandbox2(name, config, this.repoPath, this.visorDistPath);
|
|
51515
51552
|
this.instances.set(name, instance);
|
|
51516
51553
|
return instance;
|
|
51517
51554
|
}
|
|
51518
51555
|
if (config.engine === "seatbelt") {
|
|
51519
51556
|
const { SeatbeltSandbox: SeatbeltSandbox2 } = (init_seatbelt_sandbox(), __toCommonJS(seatbelt_sandbox_exports));
|
|
51520
|
-
const instance = new SeatbeltSandbox2(name, config, this.repoPath);
|
|
51557
|
+
const instance = new SeatbeltSandbox2(name, config, this.repoPath, this.visorDistPath);
|
|
51521
51558
|
this.instances.set(name, instance);
|
|
51522
51559
|
return instance;
|
|
51523
51560
|
}
|
|
@@ -52168,8 +52205,8 @@ var init_workspace_manager = __esm({
|
|
|
52168
52205
|
);
|
|
52169
52206
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
52170
52207
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
52171
|
-
for (const
|
|
52172
|
-
|
|
52208
|
+
for (const resolve17 of this.cleanupResolvers) {
|
|
52209
|
+
resolve17();
|
|
52173
52210
|
}
|
|
52174
52211
|
this.cleanupResolvers = [];
|
|
52175
52212
|
}
|
|
@@ -52324,19 +52361,19 @@ var init_workspace_manager = __esm({
|
|
|
52324
52361
|
);
|
|
52325
52362
|
this.cleanupRequested = true;
|
|
52326
52363
|
await Promise.race([
|
|
52327
|
-
new Promise((
|
|
52364
|
+
new Promise((resolve17) => {
|
|
52328
52365
|
if (this.activeOperations === 0) {
|
|
52329
|
-
|
|
52366
|
+
resolve17();
|
|
52330
52367
|
} else {
|
|
52331
|
-
this.cleanupResolvers.push(
|
|
52368
|
+
this.cleanupResolvers.push(resolve17);
|
|
52332
52369
|
}
|
|
52333
52370
|
}),
|
|
52334
|
-
new Promise((
|
|
52371
|
+
new Promise((resolve17) => {
|
|
52335
52372
|
setTimeout(() => {
|
|
52336
52373
|
logger.warn(
|
|
52337
52374
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
52338
52375
|
);
|
|
52339
|
-
|
|
52376
|
+
resolve17();
|
|
52340
52377
|
}, timeout);
|
|
52341
52378
|
})
|
|
52342
52379
|
]);
|
|
@@ -52677,6 +52714,1264 @@ var init_build_engine_context = __esm({
|
|
|
52677
52714
|
}
|
|
52678
52715
|
});
|
|
52679
52716
|
|
|
52717
|
+
// src/policy/default-engine.ts
|
|
52718
|
+
var DefaultPolicyEngine;
|
|
52719
|
+
var init_default_engine = __esm({
|
|
52720
|
+
"src/policy/default-engine.ts"() {
|
|
52721
|
+
"use strict";
|
|
52722
|
+
DefaultPolicyEngine = class {
|
|
52723
|
+
async initialize(_config) {
|
|
52724
|
+
}
|
|
52725
|
+
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
52726
|
+
return { allowed: true };
|
|
52727
|
+
}
|
|
52728
|
+
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
52729
|
+
return { allowed: true };
|
|
52730
|
+
}
|
|
52731
|
+
async evaluateCapabilities(_checkId, _capabilities) {
|
|
52732
|
+
return { allowed: true };
|
|
52733
|
+
}
|
|
52734
|
+
async shutdown() {
|
|
52735
|
+
}
|
|
52736
|
+
};
|
|
52737
|
+
}
|
|
52738
|
+
});
|
|
52739
|
+
|
|
52740
|
+
// src/enterprise/license/validator.ts
|
|
52741
|
+
var validator_exports = {};
|
|
52742
|
+
__export(validator_exports, {
|
|
52743
|
+
LicenseValidator: () => LicenseValidator
|
|
52744
|
+
});
|
|
52745
|
+
var crypto2, fs20, path24, LicenseValidator;
|
|
52746
|
+
var init_validator = __esm({
|
|
52747
|
+
"src/enterprise/license/validator.ts"() {
|
|
52748
|
+
"use strict";
|
|
52749
|
+
crypto2 = __toESM(require("crypto"));
|
|
52750
|
+
fs20 = __toESM(require("fs"));
|
|
52751
|
+
path24 = __toESM(require("path"));
|
|
52752
|
+
LicenseValidator = class _LicenseValidator {
|
|
52753
|
+
/** Ed25519 public key for license verification (PEM format). */
|
|
52754
|
+
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
52755
|
+
cache = null;
|
|
52756
|
+
static CACHE_TTL = 5 * 60 * 1e3;
|
|
52757
|
+
// 5 minutes
|
|
52758
|
+
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
52759
|
+
// 72 hours after expiry
|
|
52760
|
+
/**
|
|
52761
|
+
* Load and validate license from environment or file.
|
|
52762
|
+
*
|
|
52763
|
+
* Resolution order:
|
|
52764
|
+
* 1. VISOR_LICENSE env var (JWT string)
|
|
52765
|
+
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
52766
|
+
* 3. .visor-license in project root (cwd)
|
|
52767
|
+
* 4. .visor-license in ~/.config/visor/
|
|
52768
|
+
*/
|
|
52769
|
+
async loadAndValidate() {
|
|
52770
|
+
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
52771
|
+
return this.cache.payload;
|
|
52772
|
+
}
|
|
52773
|
+
const token = this.resolveToken();
|
|
52774
|
+
if (!token) return null;
|
|
52775
|
+
const payload = this.verifyAndDecode(token);
|
|
52776
|
+
if (!payload) return null;
|
|
52777
|
+
this.cache = { payload, validatedAt: Date.now() };
|
|
52778
|
+
return payload;
|
|
52779
|
+
}
|
|
52780
|
+
/** Check if a specific feature is licensed */
|
|
52781
|
+
hasFeature(feature) {
|
|
52782
|
+
if (!this.cache) return false;
|
|
52783
|
+
return this.cache.payload.features.includes(feature);
|
|
52784
|
+
}
|
|
52785
|
+
/** Check if license is valid (with grace period) */
|
|
52786
|
+
isValid() {
|
|
52787
|
+
if (!this.cache) return false;
|
|
52788
|
+
const now = Date.now();
|
|
52789
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
52790
|
+
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
52791
|
+
}
|
|
52792
|
+
/** Check if the license is within its grace period (expired but still valid) */
|
|
52793
|
+
isInGracePeriod() {
|
|
52794
|
+
if (!this.cache) return false;
|
|
52795
|
+
const now = Date.now();
|
|
52796
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
52797
|
+
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
52798
|
+
}
|
|
52799
|
+
resolveToken() {
|
|
52800
|
+
if (process.env.VISOR_LICENSE) {
|
|
52801
|
+
return process.env.VISOR_LICENSE.trim();
|
|
52802
|
+
}
|
|
52803
|
+
if (process.env.VISOR_LICENSE_FILE) {
|
|
52804
|
+
const resolved = path24.resolve(process.env.VISOR_LICENSE_FILE);
|
|
52805
|
+
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
52806
|
+
const allowedPrefixes = [path24.normalize(process.cwd())];
|
|
52807
|
+
if (home2) allowedPrefixes.push(path24.normalize(path24.join(home2, ".config", "visor")));
|
|
52808
|
+
let realPath;
|
|
52809
|
+
try {
|
|
52810
|
+
realPath = fs20.realpathSync(resolved);
|
|
52811
|
+
} catch {
|
|
52812
|
+
return null;
|
|
52813
|
+
}
|
|
52814
|
+
const isSafe = allowedPrefixes.some(
|
|
52815
|
+
(prefix) => realPath === prefix || realPath.startsWith(prefix + path24.sep)
|
|
52816
|
+
);
|
|
52817
|
+
if (!isSafe) return null;
|
|
52818
|
+
return this.readFile(realPath);
|
|
52819
|
+
}
|
|
52820
|
+
const cwdPath = path24.join(process.cwd(), ".visor-license");
|
|
52821
|
+
const cwdToken = this.readFile(cwdPath);
|
|
52822
|
+
if (cwdToken) return cwdToken;
|
|
52823
|
+
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
52824
|
+
if (home) {
|
|
52825
|
+
const configPath = path24.join(home, ".config", "visor", ".visor-license");
|
|
52826
|
+
const configToken = this.readFile(configPath);
|
|
52827
|
+
if (configToken) return configToken;
|
|
52828
|
+
}
|
|
52829
|
+
return null;
|
|
52830
|
+
}
|
|
52831
|
+
readFile(filePath) {
|
|
52832
|
+
try {
|
|
52833
|
+
return fs20.readFileSync(filePath, "utf-8").trim();
|
|
52834
|
+
} catch {
|
|
52835
|
+
return null;
|
|
52836
|
+
}
|
|
52837
|
+
}
|
|
52838
|
+
verifyAndDecode(token) {
|
|
52839
|
+
try {
|
|
52840
|
+
const parts = token.split(".");
|
|
52841
|
+
if (parts.length !== 3) return null;
|
|
52842
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
52843
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
52844
|
+
if (header.alg !== "EdDSA") return null;
|
|
52845
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
52846
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
52847
|
+
const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
52848
|
+
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
52849
|
+
return null;
|
|
52850
|
+
}
|
|
52851
|
+
const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
|
|
52852
|
+
if (!isValid) return null;
|
|
52853
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
52854
|
+
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
52855
|
+
return null;
|
|
52856
|
+
}
|
|
52857
|
+
const now = Date.now();
|
|
52858
|
+
const expiryMs = payload.exp * 1e3;
|
|
52859
|
+
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
52860
|
+
return null;
|
|
52861
|
+
}
|
|
52862
|
+
return payload;
|
|
52863
|
+
} catch {
|
|
52864
|
+
return null;
|
|
52865
|
+
}
|
|
52866
|
+
}
|
|
52867
|
+
};
|
|
52868
|
+
}
|
|
52869
|
+
});
|
|
52870
|
+
|
|
52871
|
+
// src/enterprise/policy/opa-compiler.ts
|
|
52872
|
+
var fs21, path25, os2, crypto3, import_child_process8, OpaCompiler;
|
|
52873
|
+
var init_opa_compiler = __esm({
|
|
52874
|
+
"src/enterprise/policy/opa-compiler.ts"() {
|
|
52875
|
+
"use strict";
|
|
52876
|
+
fs21 = __toESM(require("fs"));
|
|
52877
|
+
path25 = __toESM(require("path"));
|
|
52878
|
+
os2 = __toESM(require("os"));
|
|
52879
|
+
crypto3 = __toESM(require("crypto"));
|
|
52880
|
+
import_child_process8 = require("child_process");
|
|
52881
|
+
OpaCompiler = class _OpaCompiler {
|
|
52882
|
+
static CACHE_DIR = path25.join(os2.tmpdir(), "visor-opa-cache");
|
|
52883
|
+
/**
|
|
52884
|
+
* Resolve the input paths to WASM bytes.
|
|
52885
|
+
*
|
|
52886
|
+
* Strategy:
|
|
52887
|
+
* 1. If any path is a .wasm file, read it directly
|
|
52888
|
+
* 2. If a directory contains policy.wasm, read it
|
|
52889
|
+
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
52890
|
+
*/
|
|
52891
|
+
async resolveWasmBytes(paths) {
|
|
52892
|
+
const regoFiles = [];
|
|
52893
|
+
for (const p of paths) {
|
|
52894
|
+
const resolved = path25.resolve(p);
|
|
52895
|
+
if (path25.normalize(resolved).includes("..")) {
|
|
52896
|
+
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
52897
|
+
}
|
|
52898
|
+
if (resolved.endsWith(".wasm") && fs21.existsSync(resolved)) {
|
|
52899
|
+
return fs21.readFileSync(resolved);
|
|
52900
|
+
}
|
|
52901
|
+
if (!fs21.existsSync(resolved)) continue;
|
|
52902
|
+
const stat2 = fs21.statSync(resolved);
|
|
52903
|
+
if (stat2.isDirectory()) {
|
|
52904
|
+
const wasmCandidate = path25.join(resolved, "policy.wasm");
|
|
52905
|
+
if (fs21.existsSync(wasmCandidate)) {
|
|
52906
|
+
return fs21.readFileSync(wasmCandidate);
|
|
52907
|
+
}
|
|
52908
|
+
const files = fs21.readdirSync(resolved);
|
|
52909
|
+
for (const f of files) {
|
|
52910
|
+
if (f.endsWith(".rego")) {
|
|
52911
|
+
regoFiles.push(path25.join(resolved, f));
|
|
52912
|
+
}
|
|
52913
|
+
}
|
|
52914
|
+
} else if (resolved.endsWith(".rego")) {
|
|
52915
|
+
regoFiles.push(resolved);
|
|
52916
|
+
}
|
|
52917
|
+
}
|
|
52918
|
+
if (regoFiles.length === 0) {
|
|
52919
|
+
throw new Error(
|
|
52920
|
+
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
52921
|
+
);
|
|
52922
|
+
}
|
|
52923
|
+
return this.compileRego(regoFiles);
|
|
52924
|
+
}
|
|
52925
|
+
/**
|
|
52926
|
+
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
52927
|
+
*
|
|
52928
|
+
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
52929
|
+
* so subsequent runs skip compilation if policies haven't changed.
|
|
52930
|
+
*/
|
|
52931
|
+
compileRego(regoFiles) {
|
|
52932
|
+
try {
|
|
52933
|
+
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
52934
|
+
} catch {
|
|
52935
|
+
throw new Error(
|
|
52936
|
+
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
52937
|
+
);
|
|
52938
|
+
}
|
|
52939
|
+
const hash = crypto3.createHash("sha256");
|
|
52940
|
+
for (const f of regoFiles.sort()) {
|
|
52941
|
+
hash.update(fs21.readFileSync(f));
|
|
52942
|
+
hash.update(f);
|
|
52943
|
+
}
|
|
52944
|
+
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
52945
|
+
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
52946
|
+
const cachedWasm = path25.join(cacheDir, `${cacheKey}.wasm`);
|
|
52947
|
+
if (fs21.existsSync(cachedWasm)) {
|
|
52948
|
+
return fs21.readFileSync(cachedWasm);
|
|
52949
|
+
}
|
|
52950
|
+
fs21.mkdirSync(cacheDir, { recursive: true });
|
|
52951
|
+
const bundleTar = path25.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
52952
|
+
try {
|
|
52953
|
+
const args = [
|
|
52954
|
+
"build",
|
|
52955
|
+
"-t",
|
|
52956
|
+
"wasm",
|
|
52957
|
+
"-e",
|
|
52958
|
+
"visor",
|
|
52959
|
+
// entrypoint: the visor package tree
|
|
52960
|
+
"-o",
|
|
52961
|
+
bundleTar,
|
|
52962
|
+
...regoFiles
|
|
52963
|
+
];
|
|
52964
|
+
(0, import_child_process8.execFileSync)("opa", args, {
|
|
52965
|
+
stdio: "pipe",
|
|
52966
|
+
timeout: 3e4
|
|
52967
|
+
});
|
|
52968
|
+
} catch (err) {
|
|
52969
|
+
const stderr = err?.stderr?.toString() || "";
|
|
52970
|
+
throw new Error(
|
|
52971
|
+
`Failed to compile .rego files to WASM:
|
|
52972
|
+
${stderr}
|
|
52973
|
+
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
52974
|
+
);
|
|
52975
|
+
}
|
|
52976
|
+
try {
|
|
52977
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
52978
|
+
stdio: "pipe"
|
|
52979
|
+
});
|
|
52980
|
+
const extractedWasm = path25.join(cacheDir, "policy.wasm");
|
|
52981
|
+
if (fs21.existsSync(extractedWasm)) {
|
|
52982
|
+
fs21.renameSync(extractedWasm, cachedWasm);
|
|
52983
|
+
}
|
|
52984
|
+
} catch {
|
|
52985
|
+
try {
|
|
52986
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
52987
|
+
stdio: "pipe"
|
|
52988
|
+
});
|
|
52989
|
+
const extractedWasm = path25.join(cacheDir, "policy.wasm");
|
|
52990
|
+
if (fs21.existsSync(extractedWasm)) {
|
|
52991
|
+
fs21.renameSync(extractedWasm, cachedWasm);
|
|
52992
|
+
}
|
|
52993
|
+
} catch (err2) {
|
|
52994
|
+
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
52995
|
+
}
|
|
52996
|
+
}
|
|
52997
|
+
try {
|
|
52998
|
+
fs21.unlinkSync(bundleTar);
|
|
52999
|
+
} catch {
|
|
53000
|
+
}
|
|
53001
|
+
if (!fs21.existsSync(cachedWasm)) {
|
|
53002
|
+
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
53003
|
+
}
|
|
53004
|
+
return fs21.readFileSync(cachedWasm);
|
|
53005
|
+
}
|
|
53006
|
+
};
|
|
53007
|
+
}
|
|
53008
|
+
});
|
|
53009
|
+
|
|
53010
|
+
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
53011
|
+
var fs22, path26, OpaWasmEvaluator;
|
|
53012
|
+
var init_opa_wasm_evaluator = __esm({
|
|
53013
|
+
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
53014
|
+
"use strict";
|
|
53015
|
+
fs22 = __toESM(require("fs"));
|
|
53016
|
+
path26 = __toESM(require("path"));
|
|
53017
|
+
init_opa_compiler();
|
|
53018
|
+
OpaWasmEvaluator = class {
|
|
53019
|
+
policy = null;
|
|
53020
|
+
dataDocument = {};
|
|
53021
|
+
compiler = new OpaCompiler();
|
|
53022
|
+
async initialize(rulesPath) {
|
|
53023
|
+
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
53024
|
+
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
53025
|
+
try {
|
|
53026
|
+
const { createRequire } = require("module");
|
|
53027
|
+
const runtimeRequire = createRequire(__filename);
|
|
53028
|
+
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
53029
|
+
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
53030
|
+
if (!loadPolicy) {
|
|
53031
|
+
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
53032
|
+
}
|
|
53033
|
+
this.policy = await loadPolicy(wasmBytes);
|
|
53034
|
+
} catch (err) {
|
|
53035
|
+
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
53036
|
+
throw new Error(
|
|
53037
|
+
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
53038
|
+
);
|
|
53039
|
+
}
|
|
53040
|
+
throw err;
|
|
53041
|
+
}
|
|
53042
|
+
}
|
|
53043
|
+
/**
|
|
53044
|
+
* Load external data from a JSON file to use as the OPA data document.
|
|
53045
|
+
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
53046
|
+
* making it available in Rego via `data.<key>`.
|
|
53047
|
+
*/
|
|
53048
|
+
loadData(dataPath) {
|
|
53049
|
+
const resolved = path26.resolve(dataPath);
|
|
53050
|
+
if (path26.normalize(resolved).includes("..")) {
|
|
53051
|
+
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
53052
|
+
}
|
|
53053
|
+
if (!fs22.existsSync(resolved)) {
|
|
53054
|
+
throw new Error(`OPA data file not found: ${resolved}`);
|
|
53055
|
+
}
|
|
53056
|
+
const stat2 = fs22.statSync(resolved);
|
|
53057
|
+
if (stat2.size > 10 * 1024 * 1024) {
|
|
53058
|
+
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
53059
|
+
}
|
|
53060
|
+
const raw = fs22.readFileSync(resolved, "utf-8");
|
|
53061
|
+
try {
|
|
53062
|
+
const parsed = JSON.parse(raw);
|
|
53063
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
53064
|
+
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
53065
|
+
}
|
|
53066
|
+
this.dataDocument = parsed;
|
|
53067
|
+
} catch (err) {
|
|
53068
|
+
if (err.message.startsWith("OPA data file must")) {
|
|
53069
|
+
throw err;
|
|
53070
|
+
}
|
|
53071
|
+
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
53072
|
+
}
|
|
53073
|
+
}
|
|
53074
|
+
async evaluate(input) {
|
|
53075
|
+
if (!this.policy) {
|
|
53076
|
+
throw new Error("OPA WASM evaluator not initialized");
|
|
53077
|
+
}
|
|
53078
|
+
this.policy.setData(this.dataDocument);
|
|
53079
|
+
const resultSet = this.policy.evaluate(input);
|
|
53080
|
+
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
53081
|
+
return resultSet[0].result;
|
|
53082
|
+
}
|
|
53083
|
+
return void 0;
|
|
53084
|
+
}
|
|
53085
|
+
async shutdown() {
|
|
53086
|
+
if (this.policy) {
|
|
53087
|
+
if (typeof this.policy.close === "function") {
|
|
53088
|
+
try {
|
|
53089
|
+
this.policy.close();
|
|
53090
|
+
} catch {
|
|
53091
|
+
}
|
|
53092
|
+
} else if (typeof this.policy.free === "function") {
|
|
53093
|
+
try {
|
|
53094
|
+
this.policy.free();
|
|
53095
|
+
} catch {
|
|
53096
|
+
}
|
|
53097
|
+
}
|
|
53098
|
+
}
|
|
53099
|
+
this.policy = null;
|
|
53100
|
+
}
|
|
53101
|
+
};
|
|
53102
|
+
}
|
|
53103
|
+
});
|
|
53104
|
+
|
|
53105
|
+
// src/enterprise/policy/opa-http-evaluator.ts
|
|
53106
|
+
var OpaHttpEvaluator;
|
|
53107
|
+
var init_opa_http_evaluator = __esm({
|
|
53108
|
+
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
53109
|
+
"use strict";
|
|
53110
|
+
OpaHttpEvaluator = class {
|
|
53111
|
+
baseUrl;
|
|
53112
|
+
timeout;
|
|
53113
|
+
constructor(baseUrl, timeout = 5e3) {
|
|
53114
|
+
let parsed;
|
|
53115
|
+
try {
|
|
53116
|
+
parsed = new URL(baseUrl);
|
|
53117
|
+
} catch {
|
|
53118
|
+
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
53119
|
+
}
|
|
53120
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
53121
|
+
throw new Error(
|
|
53122
|
+
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
53123
|
+
);
|
|
53124
|
+
}
|
|
53125
|
+
const hostname = parsed.hostname;
|
|
53126
|
+
if (this.isBlockedHostname(hostname)) {
|
|
53127
|
+
throw new Error(
|
|
53128
|
+
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
53129
|
+
);
|
|
53130
|
+
}
|
|
53131
|
+
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
53132
|
+
this.timeout = timeout;
|
|
53133
|
+
}
|
|
53134
|
+
/**
|
|
53135
|
+
* Check if a hostname is blocked due to SSRF concerns.
|
|
53136
|
+
*
|
|
53137
|
+
* Blocks:
|
|
53138
|
+
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
53139
|
+
* - Link-local addresses (169.254.x.x)
|
|
53140
|
+
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
53141
|
+
* - IPv6 unique local addresses (fd00::/8)
|
|
53142
|
+
* - Cloud metadata services (*.internal)
|
|
53143
|
+
*/
|
|
53144
|
+
isBlockedHostname(hostname) {
|
|
53145
|
+
if (!hostname) return true;
|
|
53146
|
+
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
53147
|
+
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
53148
|
+
return true;
|
|
53149
|
+
}
|
|
53150
|
+
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
53151
|
+
return true;
|
|
53152
|
+
}
|
|
53153
|
+
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
53154
|
+
return true;
|
|
53155
|
+
}
|
|
53156
|
+
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
53157
|
+
const ipv4Match = normalized.match(ipv4Pattern);
|
|
53158
|
+
if (ipv4Match) {
|
|
53159
|
+
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
53160
|
+
if (octets.some((octet) => octet > 255)) {
|
|
53161
|
+
return false;
|
|
53162
|
+
}
|
|
53163
|
+
const [a, b] = octets;
|
|
53164
|
+
if (a === 127) {
|
|
53165
|
+
return true;
|
|
53166
|
+
}
|
|
53167
|
+
if (a === 0) {
|
|
53168
|
+
return true;
|
|
53169
|
+
}
|
|
53170
|
+
if (a === 169 && b === 254) {
|
|
53171
|
+
return true;
|
|
53172
|
+
}
|
|
53173
|
+
if (a === 10) {
|
|
53174
|
+
return true;
|
|
53175
|
+
}
|
|
53176
|
+
if (a === 172 && b >= 16 && b <= 31) {
|
|
53177
|
+
return true;
|
|
53178
|
+
}
|
|
53179
|
+
if (a === 192 && b === 168) {
|
|
53180
|
+
return true;
|
|
53181
|
+
}
|
|
53182
|
+
}
|
|
53183
|
+
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
53184
|
+
return true;
|
|
53185
|
+
}
|
|
53186
|
+
if (normalized.startsWith("fe80:")) {
|
|
53187
|
+
return true;
|
|
53188
|
+
}
|
|
53189
|
+
return false;
|
|
53190
|
+
}
|
|
53191
|
+
/**
|
|
53192
|
+
* Evaluate a policy rule against an input document via OPA REST API.
|
|
53193
|
+
*
|
|
53194
|
+
* @param input - The input document to evaluate
|
|
53195
|
+
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
53196
|
+
* @returns The result object from OPA, or undefined on error
|
|
53197
|
+
*/
|
|
53198
|
+
async evaluate(input, rulePath) {
|
|
53199
|
+
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
53200
|
+
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
53201
|
+
const controller = new AbortController();
|
|
53202
|
+
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
53203
|
+
try {
|
|
53204
|
+
const response = await fetch(url, {
|
|
53205
|
+
method: "POST",
|
|
53206
|
+
headers: { "Content-Type": "application/json" },
|
|
53207
|
+
body: JSON.stringify({ input }),
|
|
53208
|
+
signal: controller.signal
|
|
53209
|
+
});
|
|
53210
|
+
if (!response.ok) {
|
|
53211
|
+
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
53212
|
+
}
|
|
53213
|
+
let body;
|
|
53214
|
+
try {
|
|
53215
|
+
body = await response.json();
|
|
53216
|
+
} catch (jsonErr) {
|
|
53217
|
+
throw new Error(
|
|
53218
|
+
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
53219
|
+
);
|
|
53220
|
+
}
|
|
53221
|
+
return body?.result;
|
|
53222
|
+
} finally {
|
|
53223
|
+
clearTimeout(timer);
|
|
53224
|
+
}
|
|
53225
|
+
}
|
|
53226
|
+
async shutdown() {
|
|
53227
|
+
}
|
|
53228
|
+
};
|
|
53229
|
+
}
|
|
53230
|
+
});
|
|
53231
|
+
|
|
53232
|
+
// src/enterprise/policy/policy-input-builder.ts
|
|
53233
|
+
var PolicyInputBuilder;
|
|
53234
|
+
var init_policy_input_builder = __esm({
|
|
53235
|
+
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
53236
|
+
"use strict";
|
|
53237
|
+
PolicyInputBuilder = class {
|
|
53238
|
+
roles;
|
|
53239
|
+
actor;
|
|
53240
|
+
repository;
|
|
53241
|
+
pullRequest;
|
|
53242
|
+
constructor(policyConfig, actor, repository, pullRequest) {
|
|
53243
|
+
this.roles = policyConfig.roles || {};
|
|
53244
|
+
this.actor = actor;
|
|
53245
|
+
this.repository = repository;
|
|
53246
|
+
this.pullRequest = pullRequest;
|
|
53247
|
+
}
|
|
53248
|
+
/** Resolve which roles apply to the current actor. */
|
|
53249
|
+
resolveRoles() {
|
|
53250
|
+
const matched = [];
|
|
53251
|
+
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
53252
|
+
let identityMatch = false;
|
|
53253
|
+
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
53254
|
+
identityMatch = true;
|
|
53255
|
+
}
|
|
53256
|
+
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
53257
|
+
identityMatch = true;
|
|
53258
|
+
}
|
|
53259
|
+
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
53260
|
+
identityMatch = true;
|
|
53261
|
+
}
|
|
53262
|
+
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
53263
|
+
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
53264
|
+
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
53265
|
+
identityMatch = true;
|
|
53266
|
+
}
|
|
53267
|
+
}
|
|
53268
|
+
if (!identityMatch) continue;
|
|
53269
|
+
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
53270
|
+
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
53271
|
+
continue;
|
|
53272
|
+
}
|
|
53273
|
+
}
|
|
53274
|
+
matched.push(roleName);
|
|
53275
|
+
}
|
|
53276
|
+
return matched;
|
|
53277
|
+
}
|
|
53278
|
+
buildActor() {
|
|
53279
|
+
return {
|
|
53280
|
+
authorAssociation: this.actor.authorAssociation,
|
|
53281
|
+
login: this.actor.login,
|
|
53282
|
+
roles: this.resolveRoles(),
|
|
53283
|
+
isLocalMode: this.actor.isLocalMode,
|
|
53284
|
+
...this.actor.slack && { slack: this.actor.slack }
|
|
53285
|
+
};
|
|
53286
|
+
}
|
|
53287
|
+
forCheckExecution(check) {
|
|
53288
|
+
return {
|
|
53289
|
+
scope: "check.execute",
|
|
53290
|
+
check: {
|
|
53291
|
+
id: check.id,
|
|
53292
|
+
type: check.type,
|
|
53293
|
+
group: check.group,
|
|
53294
|
+
tags: check.tags,
|
|
53295
|
+
criticality: check.criticality,
|
|
53296
|
+
sandbox: check.sandbox,
|
|
53297
|
+
policy: check.policy
|
|
53298
|
+
},
|
|
53299
|
+
actor: this.buildActor(),
|
|
53300
|
+
repository: this.repository,
|
|
53301
|
+
pullRequest: this.pullRequest
|
|
53302
|
+
};
|
|
53303
|
+
}
|
|
53304
|
+
forToolInvocation(serverName, methodName, transport) {
|
|
53305
|
+
return {
|
|
53306
|
+
scope: "tool.invoke",
|
|
53307
|
+
tool: { serverName, methodName, transport },
|
|
53308
|
+
actor: this.buildActor(),
|
|
53309
|
+
repository: this.repository,
|
|
53310
|
+
pullRequest: this.pullRequest
|
|
53311
|
+
};
|
|
53312
|
+
}
|
|
53313
|
+
forCapabilityResolve(checkId, capabilities) {
|
|
53314
|
+
return {
|
|
53315
|
+
scope: "capability.resolve",
|
|
53316
|
+
check: { id: checkId, type: "ai" },
|
|
53317
|
+
capability: capabilities,
|
|
53318
|
+
actor: this.buildActor(),
|
|
53319
|
+
repository: this.repository,
|
|
53320
|
+
pullRequest: this.pullRequest
|
|
53321
|
+
};
|
|
53322
|
+
}
|
|
53323
|
+
};
|
|
53324
|
+
}
|
|
53325
|
+
});
|
|
53326
|
+
|
|
53327
|
+
// src/enterprise/policy/opa-policy-engine.ts
|
|
53328
|
+
var opa_policy_engine_exports = {};
|
|
53329
|
+
__export(opa_policy_engine_exports, {
|
|
53330
|
+
OpaPolicyEngine: () => OpaPolicyEngine
|
|
53331
|
+
});
|
|
53332
|
+
var OpaPolicyEngine;
|
|
53333
|
+
var init_opa_policy_engine = __esm({
|
|
53334
|
+
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
53335
|
+
"use strict";
|
|
53336
|
+
init_opa_wasm_evaluator();
|
|
53337
|
+
init_opa_http_evaluator();
|
|
53338
|
+
init_policy_input_builder();
|
|
53339
|
+
OpaPolicyEngine = class {
|
|
53340
|
+
evaluator = null;
|
|
53341
|
+
fallback;
|
|
53342
|
+
timeout;
|
|
53343
|
+
config;
|
|
53344
|
+
inputBuilder = null;
|
|
53345
|
+
logger = null;
|
|
53346
|
+
constructor(config) {
|
|
53347
|
+
this.config = config;
|
|
53348
|
+
this.fallback = config.fallback || "deny";
|
|
53349
|
+
this.timeout = config.timeout || 5e3;
|
|
53350
|
+
}
|
|
53351
|
+
async initialize(config) {
|
|
53352
|
+
try {
|
|
53353
|
+
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
53354
|
+
} catch {
|
|
53355
|
+
}
|
|
53356
|
+
const actor = {
|
|
53357
|
+
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
53358
|
+
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
53359
|
+
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
53360
|
+
};
|
|
53361
|
+
const repo = {
|
|
53362
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
53363
|
+
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
53364
|
+
branch: process.env.GITHUB_HEAD_REF,
|
|
53365
|
+
baseBranch: process.env.GITHUB_BASE_REF,
|
|
53366
|
+
event: process.env.GITHUB_EVENT_NAME
|
|
53367
|
+
};
|
|
53368
|
+
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
53369
|
+
const pullRequest = {
|
|
53370
|
+
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
53371
|
+
};
|
|
53372
|
+
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
53373
|
+
if (config.engine === "local") {
|
|
53374
|
+
if (!config.rules) {
|
|
53375
|
+
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
53376
|
+
}
|
|
53377
|
+
const wasm = new OpaWasmEvaluator();
|
|
53378
|
+
await wasm.initialize(config.rules);
|
|
53379
|
+
if (config.data) {
|
|
53380
|
+
wasm.loadData(config.data);
|
|
53381
|
+
}
|
|
53382
|
+
this.evaluator = wasm;
|
|
53383
|
+
} else if (config.engine === "remote") {
|
|
53384
|
+
if (!config.url) {
|
|
53385
|
+
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
53386
|
+
}
|
|
53387
|
+
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
53388
|
+
} else {
|
|
53389
|
+
this.evaluator = null;
|
|
53390
|
+
}
|
|
53391
|
+
}
|
|
53392
|
+
/**
|
|
53393
|
+
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
53394
|
+
* Called by the enterprise loader when engine context is enriched.
|
|
53395
|
+
*/
|
|
53396
|
+
setActorContext(actor, repo, pullRequest) {
|
|
53397
|
+
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
53398
|
+
}
|
|
53399
|
+
async evaluateCheckExecution(checkId, checkConfig) {
|
|
53400
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53401
|
+
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
53402
|
+
const policyOverride = cfg.policy;
|
|
53403
|
+
const input = this.inputBuilder.forCheckExecution({
|
|
53404
|
+
id: checkId,
|
|
53405
|
+
type: cfg.type || "ai",
|
|
53406
|
+
group: cfg.group,
|
|
53407
|
+
tags: cfg.tags,
|
|
53408
|
+
criticality: cfg.criticality,
|
|
53409
|
+
sandbox: cfg.sandbox,
|
|
53410
|
+
policy: policyOverride
|
|
53411
|
+
});
|
|
53412
|
+
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
53413
|
+
}
|
|
53414
|
+
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
53415
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53416
|
+
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
53417
|
+
return this.doEvaluate(input, "visor/tool/invoke");
|
|
53418
|
+
}
|
|
53419
|
+
async evaluateCapabilities(checkId, capabilities) {
|
|
53420
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53421
|
+
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
53422
|
+
return this.doEvaluate(input, "visor/capability/resolve");
|
|
53423
|
+
}
|
|
53424
|
+
async shutdown() {
|
|
53425
|
+
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
53426
|
+
await this.evaluator.shutdown();
|
|
53427
|
+
}
|
|
53428
|
+
this.evaluator = null;
|
|
53429
|
+
this.inputBuilder = null;
|
|
53430
|
+
}
|
|
53431
|
+
resolveRulePath(defaultScope, override) {
|
|
53432
|
+
if (override) {
|
|
53433
|
+
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
53434
|
+
}
|
|
53435
|
+
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
53436
|
+
}
|
|
53437
|
+
async doEvaluate(input, rulePath) {
|
|
53438
|
+
try {
|
|
53439
|
+
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
53440
|
+
let timer;
|
|
53441
|
+
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
53442
|
+
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
53443
|
+
});
|
|
53444
|
+
try {
|
|
53445
|
+
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
53446
|
+
const decision = this.parseDecision(result);
|
|
53447
|
+
if (!decision.allowed && this.fallback === "warn") {
|
|
53448
|
+
decision.allowed = true;
|
|
53449
|
+
decision.warn = true;
|
|
53450
|
+
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
53451
|
+
}
|
|
53452
|
+
this.logger?.debug(
|
|
53453
|
+
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
53454
|
+
);
|
|
53455
|
+
return decision;
|
|
53456
|
+
} finally {
|
|
53457
|
+
if (timer) clearTimeout(timer);
|
|
53458
|
+
}
|
|
53459
|
+
} catch (err) {
|
|
53460
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
53461
|
+
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
53462
|
+
return {
|
|
53463
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
53464
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
53465
|
+
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
53466
|
+
};
|
|
53467
|
+
}
|
|
53468
|
+
}
|
|
53469
|
+
async rawEvaluate(input, rulePath) {
|
|
53470
|
+
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
53471
|
+
const result = await this.evaluator.evaluate(input);
|
|
53472
|
+
return this.navigateWasmResult(result, rulePath);
|
|
53473
|
+
}
|
|
53474
|
+
return this.evaluator.evaluate(input, rulePath);
|
|
53475
|
+
}
|
|
53476
|
+
/**
|
|
53477
|
+
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
53478
|
+
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
53479
|
+
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
53480
|
+
*/
|
|
53481
|
+
navigateWasmResult(result, rulePath) {
|
|
53482
|
+
if (!result || typeof result !== "object") return result;
|
|
53483
|
+
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
53484
|
+
let current = result;
|
|
53485
|
+
for (const seg of segments) {
|
|
53486
|
+
if (current && typeof current === "object" && seg in current) {
|
|
53487
|
+
current = current[seg];
|
|
53488
|
+
} else {
|
|
53489
|
+
return void 0;
|
|
53490
|
+
}
|
|
53491
|
+
}
|
|
53492
|
+
return current;
|
|
53493
|
+
}
|
|
53494
|
+
parseDecision(result) {
|
|
53495
|
+
if (result === void 0 || result === null) {
|
|
53496
|
+
return {
|
|
53497
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
53498
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
53499
|
+
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
53500
|
+
};
|
|
53501
|
+
}
|
|
53502
|
+
const allowed = result.allowed !== false;
|
|
53503
|
+
const decision = {
|
|
53504
|
+
allowed,
|
|
53505
|
+
reason: result.reason
|
|
53506
|
+
};
|
|
53507
|
+
if (result.capabilities) {
|
|
53508
|
+
decision.capabilities = result.capabilities;
|
|
53509
|
+
}
|
|
53510
|
+
return decision;
|
|
53511
|
+
}
|
|
53512
|
+
};
|
|
53513
|
+
}
|
|
53514
|
+
});
|
|
53515
|
+
|
|
53516
|
+
// src/enterprise/scheduler/knex-store.ts
|
|
53517
|
+
var knex_store_exports = {};
|
|
53518
|
+
__export(knex_store_exports, {
|
|
53519
|
+
KnexStoreBackend: () => KnexStoreBackend
|
|
53520
|
+
});
|
|
53521
|
+
function toNum(val) {
|
|
53522
|
+
if (val === null || val === void 0) return void 0;
|
|
53523
|
+
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
53524
|
+
}
|
|
53525
|
+
function safeJsonParse2(value) {
|
|
53526
|
+
if (!value) return void 0;
|
|
53527
|
+
try {
|
|
53528
|
+
return JSON.parse(value);
|
|
53529
|
+
} catch {
|
|
53530
|
+
return void 0;
|
|
53531
|
+
}
|
|
53532
|
+
}
|
|
53533
|
+
function fromDbRow2(row) {
|
|
53534
|
+
return {
|
|
53535
|
+
id: row.id,
|
|
53536
|
+
creatorId: row.creator_id,
|
|
53537
|
+
creatorContext: row.creator_context ?? void 0,
|
|
53538
|
+
creatorName: row.creator_name ?? void 0,
|
|
53539
|
+
timezone: row.timezone,
|
|
53540
|
+
schedule: row.schedule_expr,
|
|
53541
|
+
runAt: toNum(row.run_at),
|
|
53542
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
53543
|
+
originalExpression: row.original_expression,
|
|
53544
|
+
workflow: row.workflow ?? void 0,
|
|
53545
|
+
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
53546
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
53547
|
+
status: row.status,
|
|
53548
|
+
createdAt: toNum(row.created_at),
|
|
53549
|
+
lastRunAt: toNum(row.last_run_at),
|
|
53550
|
+
nextRunAt: toNum(row.next_run_at),
|
|
53551
|
+
runCount: row.run_count,
|
|
53552
|
+
failureCount: row.failure_count,
|
|
53553
|
+
lastError: row.last_error ?? void 0,
|
|
53554
|
+
previousResponse: row.previous_response ?? void 0
|
|
53555
|
+
};
|
|
53556
|
+
}
|
|
53557
|
+
function toInsertRow(schedule) {
|
|
53558
|
+
return {
|
|
53559
|
+
id: schedule.id,
|
|
53560
|
+
creator_id: schedule.creatorId,
|
|
53561
|
+
creator_context: schedule.creatorContext ?? null,
|
|
53562
|
+
creator_name: schedule.creatorName ?? null,
|
|
53563
|
+
timezone: schedule.timezone,
|
|
53564
|
+
schedule_expr: schedule.schedule,
|
|
53565
|
+
run_at: schedule.runAt ?? null,
|
|
53566
|
+
is_recurring: schedule.isRecurring,
|
|
53567
|
+
original_expression: schedule.originalExpression,
|
|
53568
|
+
workflow: schedule.workflow ?? null,
|
|
53569
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
53570
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
53571
|
+
status: schedule.status,
|
|
53572
|
+
created_at: schedule.createdAt,
|
|
53573
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
53574
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
53575
|
+
run_count: schedule.runCount,
|
|
53576
|
+
failure_count: schedule.failureCount,
|
|
53577
|
+
last_error: schedule.lastError ?? null,
|
|
53578
|
+
previous_response: schedule.previousResponse ?? null
|
|
53579
|
+
};
|
|
53580
|
+
}
|
|
53581
|
+
var fs23, path27, import_uuid2, KnexStoreBackend;
|
|
53582
|
+
var init_knex_store = __esm({
|
|
53583
|
+
"src/enterprise/scheduler/knex-store.ts"() {
|
|
53584
|
+
"use strict";
|
|
53585
|
+
fs23 = __toESM(require("fs"));
|
|
53586
|
+
path27 = __toESM(require("path"));
|
|
53587
|
+
import_uuid2 = require("uuid");
|
|
53588
|
+
init_logger();
|
|
53589
|
+
KnexStoreBackend = class {
|
|
53590
|
+
knex = null;
|
|
53591
|
+
driver;
|
|
53592
|
+
connection;
|
|
53593
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
53594
|
+
this.driver = driver;
|
|
53595
|
+
this.connection = storageConfig.connection || {};
|
|
53596
|
+
}
|
|
53597
|
+
async initialize() {
|
|
53598
|
+
const { createRequire } = require("module");
|
|
53599
|
+
const runtimeRequire = createRequire(__filename);
|
|
53600
|
+
let knexFactory;
|
|
53601
|
+
try {
|
|
53602
|
+
knexFactory = runtimeRequire("knex");
|
|
53603
|
+
} catch (err) {
|
|
53604
|
+
const code = err?.code;
|
|
53605
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
53606
|
+
throw new Error(
|
|
53607
|
+
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
53608
|
+
);
|
|
53609
|
+
}
|
|
53610
|
+
throw err;
|
|
53611
|
+
}
|
|
53612
|
+
const clientMap = {
|
|
53613
|
+
postgresql: "pg",
|
|
53614
|
+
mysql: "mysql2",
|
|
53615
|
+
mssql: "tedious"
|
|
53616
|
+
};
|
|
53617
|
+
const client = clientMap[this.driver];
|
|
53618
|
+
let connection;
|
|
53619
|
+
if (this.connection.connection_string) {
|
|
53620
|
+
connection = this.connection.connection_string;
|
|
53621
|
+
} else if (this.driver === "mssql") {
|
|
53622
|
+
connection = this.buildMssqlConnection();
|
|
53623
|
+
} else {
|
|
53624
|
+
connection = this.buildStandardConnection();
|
|
53625
|
+
}
|
|
53626
|
+
this.knex = knexFactory({
|
|
53627
|
+
client,
|
|
53628
|
+
connection,
|
|
53629
|
+
pool: {
|
|
53630
|
+
min: this.connection.pool?.min ?? 0,
|
|
53631
|
+
max: this.connection.pool?.max ?? 10
|
|
53632
|
+
}
|
|
53633
|
+
});
|
|
53634
|
+
await this.migrateSchema();
|
|
53635
|
+
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
53636
|
+
}
|
|
53637
|
+
buildStandardConnection() {
|
|
53638
|
+
return {
|
|
53639
|
+
host: this.connection.host || "localhost",
|
|
53640
|
+
port: this.connection.port,
|
|
53641
|
+
database: this.connection.database || "visor",
|
|
53642
|
+
user: this.connection.user,
|
|
53643
|
+
password: this.connection.password,
|
|
53644
|
+
ssl: this.resolveSslConfig()
|
|
53645
|
+
};
|
|
53646
|
+
}
|
|
53647
|
+
buildMssqlConnection() {
|
|
53648
|
+
const ssl = this.connection.ssl;
|
|
53649
|
+
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
53650
|
+
return {
|
|
53651
|
+
server: this.connection.host || "localhost",
|
|
53652
|
+
port: this.connection.port,
|
|
53653
|
+
database: this.connection.database || "visor",
|
|
53654
|
+
user: this.connection.user,
|
|
53655
|
+
password: this.connection.password,
|
|
53656
|
+
options: {
|
|
53657
|
+
encrypt: sslEnabled,
|
|
53658
|
+
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
53659
|
+
}
|
|
53660
|
+
};
|
|
53661
|
+
}
|
|
53662
|
+
resolveSslConfig() {
|
|
53663
|
+
const ssl = this.connection.ssl;
|
|
53664
|
+
if (ssl === false || ssl === void 0) return false;
|
|
53665
|
+
if (ssl === true) return { rejectUnauthorized: true };
|
|
53666
|
+
if (ssl.enabled === false) return false;
|
|
53667
|
+
const result = {
|
|
53668
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
53669
|
+
};
|
|
53670
|
+
if (ssl.ca) {
|
|
53671
|
+
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
53672
|
+
result.ca = fs23.readFileSync(caPath, "utf8");
|
|
53673
|
+
}
|
|
53674
|
+
if (ssl.cert) {
|
|
53675
|
+
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
53676
|
+
result.cert = fs23.readFileSync(certPath, "utf8");
|
|
53677
|
+
}
|
|
53678
|
+
if (ssl.key) {
|
|
53679
|
+
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
53680
|
+
result.key = fs23.readFileSync(keyPath, "utf8");
|
|
53681
|
+
}
|
|
53682
|
+
return result;
|
|
53683
|
+
}
|
|
53684
|
+
validateSslPath(filePath, label) {
|
|
53685
|
+
const resolved = path27.resolve(filePath);
|
|
53686
|
+
if (resolved !== path27.normalize(resolved)) {
|
|
53687
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
53688
|
+
}
|
|
53689
|
+
if (!fs23.existsSync(resolved)) {
|
|
53690
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
53691
|
+
}
|
|
53692
|
+
return resolved;
|
|
53693
|
+
}
|
|
53694
|
+
async shutdown() {
|
|
53695
|
+
if (this.knex) {
|
|
53696
|
+
await this.knex.destroy();
|
|
53697
|
+
this.knex = null;
|
|
53698
|
+
}
|
|
53699
|
+
}
|
|
53700
|
+
async migrateSchema() {
|
|
53701
|
+
const knex = this.getKnex();
|
|
53702
|
+
const exists = await knex.schema.hasTable("schedules");
|
|
53703
|
+
if (!exists) {
|
|
53704
|
+
await knex.schema.createTable("schedules", (table) => {
|
|
53705
|
+
table.string("id", 36).primary();
|
|
53706
|
+
table.string("creator_id", 255).notNullable().index();
|
|
53707
|
+
table.string("creator_context", 255);
|
|
53708
|
+
table.string("creator_name", 255);
|
|
53709
|
+
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
53710
|
+
table.string("schedule_expr", 255);
|
|
53711
|
+
table.bigInteger("run_at");
|
|
53712
|
+
table.boolean("is_recurring").notNullable();
|
|
53713
|
+
table.text("original_expression");
|
|
53714
|
+
table.string("workflow", 255);
|
|
53715
|
+
table.text("workflow_inputs");
|
|
53716
|
+
table.text("output_context");
|
|
53717
|
+
table.string("status", 20).notNullable().index();
|
|
53718
|
+
table.bigInteger("created_at").notNullable();
|
|
53719
|
+
table.bigInteger("last_run_at");
|
|
53720
|
+
table.bigInteger("next_run_at");
|
|
53721
|
+
table.integer("run_count").notNullable().defaultTo(0);
|
|
53722
|
+
table.integer("failure_count").notNullable().defaultTo(0);
|
|
53723
|
+
table.text("last_error");
|
|
53724
|
+
table.text("previous_response");
|
|
53725
|
+
table.index(["status", "next_run_at"]);
|
|
53726
|
+
});
|
|
53727
|
+
}
|
|
53728
|
+
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
53729
|
+
if (!locksExist) {
|
|
53730
|
+
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
53731
|
+
table.string("lock_id", 255).primary();
|
|
53732
|
+
table.string("node_id", 255).notNullable();
|
|
53733
|
+
table.string("lock_token", 36).notNullable();
|
|
53734
|
+
table.bigInteger("acquired_at").notNullable();
|
|
53735
|
+
table.bigInteger("expires_at").notNullable();
|
|
53736
|
+
});
|
|
53737
|
+
}
|
|
53738
|
+
}
|
|
53739
|
+
getKnex() {
|
|
53740
|
+
if (!this.knex) {
|
|
53741
|
+
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
53742
|
+
}
|
|
53743
|
+
return this.knex;
|
|
53744
|
+
}
|
|
53745
|
+
// --- CRUD ---
|
|
53746
|
+
async create(schedule) {
|
|
53747
|
+
const knex = this.getKnex();
|
|
53748
|
+
const newSchedule = {
|
|
53749
|
+
...schedule,
|
|
53750
|
+
id: (0, import_uuid2.v4)(),
|
|
53751
|
+
createdAt: Date.now(),
|
|
53752
|
+
runCount: 0,
|
|
53753
|
+
failureCount: 0,
|
|
53754
|
+
status: "active"
|
|
53755
|
+
};
|
|
53756
|
+
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
53757
|
+
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
53758
|
+
return newSchedule;
|
|
53759
|
+
}
|
|
53760
|
+
async importSchedule(schedule) {
|
|
53761
|
+
const knex = this.getKnex();
|
|
53762
|
+
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
53763
|
+
if (existing) return;
|
|
53764
|
+
await knex("schedules").insert(toInsertRow(schedule));
|
|
53765
|
+
}
|
|
53766
|
+
async get(id) {
|
|
53767
|
+
const knex = this.getKnex();
|
|
53768
|
+
const row = await knex("schedules").where("id", id).first();
|
|
53769
|
+
return row ? fromDbRow2(row) : void 0;
|
|
53770
|
+
}
|
|
53771
|
+
async update(id, patch) {
|
|
53772
|
+
const knex = this.getKnex();
|
|
53773
|
+
const existing = await knex("schedules").where("id", id).first();
|
|
53774
|
+
if (!existing) return void 0;
|
|
53775
|
+
const current = fromDbRow2(existing);
|
|
53776
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
53777
|
+
const row = toInsertRow(updated);
|
|
53778
|
+
delete row.id;
|
|
53779
|
+
await knex("schedules").where("id", id).update(row);
|
|
53780
|
+
return updated;
|
|
53781
|
+
}
|
|
53782
|
+
async delete(id) {
|
|
53783
|
+
const knex = this.getKnex();
|
|
53784
|
+
const deleted = await knex("schedules").where("id", id).del();
|
|
53785
|
+
if (deleted > 0) {
|
|
53786
|
+
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
53787
|
+
return true;
|
|
53788
|
+
}
|
|
53789
|
+
return false;
|
|
53790
|
+
}
|
|
53791
|
+
// --- Queries ---
|
|
53792
|
+
async getByCreator(creatorId) {
|
|
53793
|
+
const knex = this.getKnex();
|
|
53794
|
+
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
53795
|
+
return rows.map((r) => fromDbRow2(r));
|
|
53796
|
+
}
|
|
53797
|
+
async getActiveSchedules() {
|
|
53798
|
+
const knex = this.getKnex();
|
|
53799
|
+
const rows = await knex("schedules").where("status", "active");
|
|
53800
|
+
return rows.map((r) => fromDbRow2(r));
|
|
53801
|
+
}
|
|
53802
|
+
async getDueSchedules(now) {
|
|
53803
|
+
const ts = now ?? Date.now();
|
|
53804
|
+
const knex = this.getKnex();
|
|
53805
|
+
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
53806
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
53807
|
+
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
53808
|
+
this.where(function() {
|
|
53809
|
+
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
53810
|
+
}).orWhere(function() {
|
|
53811
|
+
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
53812
|
+
});
|
|
53813
|
+
});
|
|
53814
|
+
return rows.map((r) => fromDbRow2(r));
|
|
53815
|
+
}
|
|
53816
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
53817
|
+
const knex = this.getKnex();
|
|
53818
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
53819
|
+
const pattern = `%${escaped}%`;
|
|
53820
|
+
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
53821
|
+
return rows.map((r) => fromDbRow2(r));
|
|
53822
|
+
}
|
|
53823
|
+
async getAll() {
|
|
53824
|
+
const knex = this.getKnex();
|
|
53825
|
+
const rows = await knex("schedules");
|
|
53826
|
+
return rows.map((r) => fromDbRow2(r));
|
|
53827
|
+
}
|
|
53828
|
+
async getStats() {
|
|
53829
|
+
const knex = this.getKnex();
|
|
53830
|
+
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
53831
|
+
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
53832
|
+
const result = await knex("schedules").select(
|
|
53833
|
+
knex.raw("COUNT(*) as total"),
|
|
53834
|
+
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
53835
|
+
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
53836
|
+
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
53837
|
+
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
53838
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
53839
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
53840
|
+
).first();
|
|
53841
|
+
return {
|
|
53842
|
+
total: Number(result.total) || 0,
|
|
53843
|
+
active: Number(result.active) || 0,
|
|
53844
|
+
paused: Number(result.paused) || 0,
|
|
53845
|
+
completed: Number(result.completed) || 0,
|
|
53846
|
+
failed: Number(result.failed) || 0,
|
|
53847
|
+
recurring: Number(result.recurring) || 0,
|
|
53848
|
+
oneTime: Number(result.one_time) || 0
|
|
53849
|
+
};
|
|
53850
|
+
}
|
|
53851
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
53852
|
+
const knex = this.getKnex();
|
|
53853
|
+
if (limits.maxGlobal) {
|
|
53854
|
+
const result = await knex("schedules").count("* as cnt").first();
|
|
53855
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
53856
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
53857
|
+
}
|
|
53858
|
+
}
|
|
53859
|
+
if (limits.maxPerUser) {
|
|
53860
|
+
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
53861
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
53862
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
53863
|
+
}
|
|
53864
|
+
}
|
|
53865
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
53866
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
53867
|
+
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
53868
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
53869
|
+
throw new Error(
|
|
53870
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
53871
|
+
);
|
|
53872
|
+
}
|
|
53873
|
+
}
|
|
53874
|
+
}
|
|
53875
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
53876
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
53877
|
+
const knex = this.getKnex();
|
|
53878
|
+
const now = Date.now();
|
|
53879
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
53880
|
+
const token = (0, import_uuid2.v4)();
|
|
53881
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
53882
|
+
node_id: nodeId,
|
|
53883
|
+
lock_token: token,
|
|
53884
|
+
acquired_at: now,
|
|
53885
|
+
expires_at: expiresAt
|
|
53886
|
+
});
|
|
53887
|
+
if (updated > 0) return token;
|
|
53888
|
+
try {
|
|
53889
|
+
await knex("scheduler_locks").insert({
|
|
53890
|
+
lock_id: lockId,
|
|
53891
|
+
node_id: nodeId,
|
|
53892
|
+
lock_token: token,
|
|
53893
|
+
acquired_at: now,
|
|
53894
|
+
expires_at: expiresAt
|
|
53895
|
+
});
|
|
53896
|
+
return token;
|
|
53897
|
+
} catch {
|
|
53898
|
+
return null;
|
|
53899
|
+
}
|
|
53900
|
+
}
|
|
53901
|
+
async releaseLock(lockId, lockToken) {
|
|
53902
|
+
const knex = this.getKnex();
|
|
53903
|
+
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
53904
|
+
}
|
|
53905
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
53906
|
+
const knex = this.getKnex();
|
|
53907
|
+
const now = Date.now();
|
|
53908
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
53909
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
53910
|
+
return updated > 0;
|
|
53911
|
+
}
|
|
53912
|
+
async flush() {
|
|
53913
|
+
}
|
|
53914
|
+
};
|
|
53915
|
+
}
|
|
53916
|
+
});
|
|
53917
|
+
|
|
53918
|
+
// src/enterprise/loader.ts
|
|
53919
|
+
var loader_exports = {};
|
|
53920
|
+
__export(loader_exports, {
|
|
53921
|
+
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
53922
|
+
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
53923
|
+
});
|
|
53924
|
+
async function loadEnterprisePolicyEngine(config) {
|
|
53925
|
+
try {
|
|
53926
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
53927
|
+
const validator = new LicenseValidator2();
|
|
53928
|
+
const license = await validator.loadAndValidate();
|
|
53929
|
+
if (!license || !validator.hasFeature("policy")) {
|
|
53930
|
+
return new DefaultPolicyEngine();
|
|
53931
|
+
}
|
|
53932
|
+
if (validator.isInGracePeriod()) {
|
|
53933
|
+
console.warn(
|
|
53934
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
53935
|
+
);
|
|
53936
|
+
}
|
|
53937
|
+
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
53938
|
+
const engine = new OpaPolicyEngine2(config);
|
|
53939
|
+
await engine.initialize(config);
|
|
53940
|
+
return engine;
|
|
53941
|
+
} catch (err) {
|
|
53942
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
53943
|
+
try {
|
|
53944
|
+
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
53945
|
+
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
53946
|
+
} catch {
|
|
53947
|
+
}
|
|
53948
|
+
return new DefaultPolicyEngine();
|
|
53949
|
+
}
|
|
53950
|
+
}
|
|
53951
|
+
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
53952
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
53953
|
+
const validator = new LicenseValidator2();
|
|
53954
|
+
const license = await validator.loadAndValidate();
|
|
53955
|
+
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
53956
|
+
throw new Error(
|
|
53957
|
+
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
53958
|
+
);
|
|
53959
|
+
}
|
|
53960
|
+
if (validator.isInGracePeriod()) {
|
|
53961
|
+
console.warn(
|
|
53962
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
53963
|
+
);
|
|
53964
|
+
}
|
|
53965
|
+
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
53966
|
+
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
53967
|
+
}
|
|
53968
|
+
var init_loader = __esm({
|
|
53969
|
+
"src/enterprise/loader.ts"() {
|
|
53970
|
+
"use strict";
|
|
53971
|
+
init_default_engine();
|
|
53972
|
+
}
|
|
53973
|
+
});
|
|
53974
|
+
|
|
52680
53975
|
// src/event-bus/event-bus.ts
|
|
52681
53976
|
var event_bus_exports = {};
|
|
52682
53977
|
__export(event_bus_exports, {
|
|
@@ -53583,8 +54878,8 @@ ${content}
|
|
|
53583
54878
|
* Sleep utility
|
|
53584
54879
|
*/
|
|
53585
54880
|
sleep(ms) {
|
|
53586
|
-
return new Promise((
|
|
53587
|
-
const t = setTimeout(
|
|
54881
|
+
return new Promise((resolve17) => {
|
|
54882
|
+
const t = setTimeout(resolve17, ms);
|
|
53588
54883
|
if (typeof t.unref === "function") {
|
|
53589
54884
|
try {
|
|
53590
54885
|
t.unref();
|
|
@@ -53858,8 +55153,8 @@ ${end}`);
|
|
|
53858
55153
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
53859
55154
|
const existingLock = this.updateLocks.get(group);
|
|
53860
55155
|
let resolveLock;
|
|
53861
|
-
const ourLock = new Promise((
|
|
53862
|
-
resolveLock =
|
|
55156
|
+
const ourLock = new Promise((resolve17) => {
|
|
55157
|
+
resolveLock = resolve17;
|
|
53863
55158
|
});
|
|
53864
55159
|
this.updateLocks.set(group, ourLock);
|
|
53865
55160
|
try {
|
|
@@ -54171,7 +55466,7 @@ ${blocks}
|
|
|
54171
55466
|
* Sleep utility for enforcing delays
|
|
54172
55467
|
*/
|
|
54173
55468
|
sleep(ms) {
|
|
54174
|
-
return new Promise((
|
|
55469
|
+
return new Promise((resolve17) => setTimeout(resolve17, ms));
|
|
54175
55470
|
}
|
|
54176
55471
|
};
|
|
54177
55472
|
}
|
|
@@ -55442,15 +56737,15 @@ function serializeRunState(state) {
|
|
|
55442
56737
|
])
|
|
55443
56738
|
};
|
|
55444
56739
|
}
|
|
55445
|
-
var
|
|
56740
|
+
var path29, fs25, StateMachineExecutionEngine;
|
|
55446
56741
|
var init_state_machine_execution_engine = __esm({
|
|
55447
56742
|
"src/state-machine-execution-engine.ts"() {
|
|
55448
56743
|
"use strict";
|
|
55449
56744
|
init_runner();
|
|
55450
56745
|
init_logger();
|
|
55451
56746
|
init_sandbox_manager();
|
|
55452
|
-
|
|
55453
|
-
|
|
56747
|
+
path29 = __toESM(require("path"));
|
|
56748
|
+
fs25 = __toESM(require("fs"));
|
|
55454
56749
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
55455
56750
|
workingDirectory;
|
|
55456
56751
|
executionContext;
|
|
@@ -55682,8 +56977,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
55682
56977
|
logger.debug(
|
|
55683
56978
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
55684
56979
|
);
|
|
55685
|
-
const { loadEnterprisePolicyEngine } = await
|
|
55686
|
-
context2.policyEngine = await
|
|
56980
|
+
const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
|
|
56981
|
+
context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
|
|
55687
56982
|
logger.debug(
|
|
55688
56983
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
55689
56984
|
);
|
|
@@ -55835,9 +57130,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
55835
57130
|
}
|
|
55836
57131
|
const checkId = String(ev?.checkId || "unknown");
|
|
55837
57132
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
55838
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
55839
|
-
|
|
55840
|
-
const filePath =
|
|
57133
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path29.resolve(process.cwd(), ".visor", "snapshots");
|
|
57134
|
+
fs25.mkdirSync(baseDir, { recursive: true });
|
|
57135
|
+
const filePath = path29.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
55841
57136
|
await this.saveSnapshotToFile(filePath);
|
|
55842
57137
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
55843
57138
|
try {
|
|
@@ -55978,7 +57273,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
55978
57273
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
55979
57274
|
*/
|
|
55980
57275
|
async saveSnapshotToFile(filePath) {
|
|
55981
|
-
const
|
|
57276
|
+
const fs26 = await import("fs/promises");
|
|
55982
57277
|
const ctx = this._lastContext;
|
|
55983
57278
|
const runner = this._lastRunner;
|
|
55984
57279
|
if (!ctx || !runner) {
|
|
@@ -55998,14 +57293,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
55998
57293
|
journal: entries,
|
|
55999
57294
|
requestedChecks: ctx.requestedChecks || []
|
|
56000
57295
|
};
|
|
56001
|
-
await
|
|
57296
|
+
await fs26.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
56002
57297
|
}
|
|
56003
57298
|
/**
|
|
56004
57299
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
56005
57300
|
*/
|
|
56006
57301
|
async loadSnapshotFromFile(filePath) {
|
|
56007
|
-
const
|
|
56008
|
-
const raw = await
|
|
57302
|
+
const fs26 = await import("fs/promises");
|
|
57303
|
+
const raw = await fs26.readFile(filePath, "utf8");
|
|
56009
57304
|
return JSON.parse(raw);
|
|
56010
57305
|
}
|
|
56011
57306
|
/**
|