@probelabs/visor 0.1.169 → 0.1.170-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -7
- package/defaults/assistant.yaml +1 -1
- package/defaults/code-talk.yaml +2 -2
- package/dist/defaults/assistant.yaml +1 -1
- package/dist/defaults/code-talk.yaml +2 -2
- package/dist/docs/a2a-provider.md +672 -0
- package/dist/docs/architecture.md +174 -12
- package/dist/docs/commands.md +36 -0
- package/dist/docs/configuration.md +1 -0
- package/dist/docs/index.md +9 -2
- package/dist/docs/pluggable.md +16 -1
- package/dist/index.js +1842 -25
- package/dist/sdk/{check-provider-registry-5CMLUEFG.mjs → check-provider-registry-Q2OVYSBJ.mjs} +2 -2
- package/dist/sdk/{chunk-LB77GR4Q.mjs → chunk-46P7AYHG.mjs} +9 -9
- package/dist/sdk/{chunk-LB77GR4Q.mjs.map → chunk-46P7AYHG.mjs.map} +1 -1
- package/dist/sdk/{host-MHYGIPDP.mjs → host-RATJKJW5.mjs} +3 -3
- package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
- package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
- package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
- package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
- package/dist/sdk/{schedule-tool-H4G5ITNL.mjs → schedule-tool-VI5IUMEL.mjs} +2 -2
- package/dist/sdk/{schedule-tool-handler-UQWDPFP6.mjs → schedule-tool-handler-PIXYVVJY.mjs} +2 -2
- package/dist/sdk/sdk.js +1648 -274
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -4
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-FAO4AUGB.mjs → workflow-check-provider-W5FKQU5G.mjs} +2 -2
- package/package.json +1 -1
- package/dist/output/traces/run-2026-03-07T13-37-21-566Z.ndjson +0 -138
- package/dist/output/traces/run-2026-03-07T13-37-59-420Z.ndjson +0 -2266
- package/dist/sdk/a2a-frontend-XFCSNQR5.mjs +0 -1605
- package/dist/sdk/a2a-frontend-XFCSNQR5.mjs.map +0 -1
- package/dist/sdk/check-provider-registry-XEU5BSRJ.mjs +0 -30
- package/dist/sdk/chunk-HBT572VG.mjs +0 -443
- package/dist/sdk/chunk-HBT572VG.mjs.map +0 -1
- package/dist/sdk/chunk-ROMY3ZN3.mjs +0 -44771
- package/dist/sdk/chunk-ROMY3ZN3.mjs.map +0 -1
- package/dist/sdk/chunk-VPPW2TFI.mjs +0 -1502
- package/dist/sdk/chunk-VPPW2TFI.mjs.map +0 -1
- package/dist/sdk/chunk-WGZNS5IB.mjs +0 -739
- package/dist/sdk/chunk-WGZNS5IB.mjs.map +0 -1
- package/dist/sdk/failure-condition-evaluator-WYDAZT3H.mjs +0 -17
- package/dist/sdk/github-frontend-BVM7MHBJ.mjs +0 -1386
- package/dist/sdk/github-frontend-BVM7MHBJ.mjs.map +0 -1
- package/dist/sdk/routing-K2U7U3OO.mjs +0 -25
- package/dist/sdk/schedule-tool-H4G5ITNL.mjs.map +0 -1
- package/dist/sdk/schedule-tool-RYYNPLDH.mjs +0 -36
- package/dist/sdk/schedule-tool-RYYNPLDH.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-NFNY6BVX.mjs +0 -40
- package/dist/sdk/schedule-tool-handler-NFNY6BVX.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-UQWDPFP6.mjs.map +0 -1
- package/dist/sdk/trace-helpers-DQYOGQT5.mjs +0 -25
- package/dist/sdk/trace-helpers-DQYOGQT5.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-FAO4AUGB.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-OM5L5FJX.mjs +0 -30
- package/dist/sdk/workflow-check-provider-OM5L5FJX.mjs.map +0 -1
- package/dist/traces/run-2026-03-07T13-37-21-566Z.ndjson +0 -138
- package/dist/traces/run-2026-03-07T13-37-59-420Z.ndjson +0 -2266
- /package/dist/sdk/{check-provider-registry-5CMLUEFG.mjs.map → check-provider-registry-Q2OVYSBJ.mjs.map} +0 -0
- /package/dist/sdk/{host-MHYGIPDP.mjs.map → host-RATJKJW5.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-XEU5BSRJ.mjs.map → schedule-tool-VI5IUMEL.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-WYDAZT3H.mjs.map → schedule-tool-handler-PIXYVVJY.mjs.map} +0 -0
- /package/dist/sdk/{routing-K2U7U3OO.mjs.map → workflow-check-provider-W5FKQU5G.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -646,7 +646,7 @@ var require_package = __commonJS({
|
|
|
646
646
|
"package.json"(exports2, module2) {
|
|
647
647
|
module2.exports = {
|
|
648
648
|
name: "@probelabs/visor",
|
|
649
|
-
version: "0.1.
|
|
649
|
+
version: "0.1.42",
|
|
650
650
|
main: "dist/index.js",
|
|
651
651
|
bin: {
|
|
652
652
|
visor: "./dist/index.js"
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve19, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve19(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path32 = require("path");
|
|
949
|
+
const fs29 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path32.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs29.existsSync(dir)) fs29.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path32.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs29.existsSync(outDir)) fs29.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path32.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs29 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs29.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3193
3193
|
*/
|
|
3194
3194
|
evaluateExpression(condition, context2) {
|
|
3195
3195
|
try {
|
|
3196
|
-
const
|
|
3196
|
+
const normalize8 = (expr) => {
|
|
3197
3197
|
const trimmed = expr.trim();
|
|
3198
3198
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3199
3199
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3351
3351
|
try {
|
|
3352
3352
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3353
3353
|
} catch {
|
|
3354
|
-
const normalizedExpr =
|
|
3354
|
+
const normalizedExpr = normalize8(condition);
|
|
3355
3355
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3356
3356
|
}
|
|
3357
3357
|
const result = exec2(scope).run();
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path32 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path32) return obj;
|
|
3739
|
+
const parts = path32.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path32) => {
|
|
3859
|
+
if (!obj || !path32) return void 0;
|
|
3860
|
+
const parts = path32.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs29 = await import("fs/promises");
|
|
6413
|
+
const path32 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path32.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path32.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path32.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path32.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path32.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs29 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs29.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6958,7 +6958,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6958
6958
|
if (chromiumPath) {
|
|
6959
6959
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6960
6960
|
}
|
|
6961
|
-
const result = await new Promise((
|
|
6961
|
+
const result = await new Promise((resolve19) => {
|
|
6962
6962
|
const proc = (0, import_child_process.spawn)(
|
|
6963
6963
|
"npx",
|
|
6964
6964
|
[
|
|
@@ -6988,13 +6988,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6988
6988
|
});
|
|
6989
6989
|
proc.on("close", (code) => {
|
|
6990
6990
|
if (code === 0) {
|
|
6991
|
-
|
|
6991
|
+
resolve19({ success: true });
|
|
6992
6992
|
} else {
|
|
6993
|
-
|
|
6993
|
+
resolve19({ success: false, error: stderr || `Exit code ${code}` });
|
|
6994
6994
|
}
|
|
6995
6995
|
});
|
|
6996
6996
|
proc.on("error", (err) => {
|
|
6997
|
-
|
|
6997
|
+
resolve19({ success: false, error: err.message });
|
|
6998
6998
|
});
|
|
6999
6999
|
});
|
|
7000
7000
|
if (!result.success) {
|
|
@@ -8156,8 +8156,8 @@ ${schemaString}`);
|
|
|
8156
8156
|
}
|
|
8157
8157
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8158
8158
|
try {
|
|
8159
|
-
const
|
|
8160
|
-
const
|
|
8159
|
+
const fs29 = require("fs");
|
|
8160
|
+
const path32 = require("path");
|
|
8161
8161
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8162
8162
|
const provider = this.config.provider || "auto";
|
|
8163
8163
|
const model = this.config.model || "default";
|
|
@@ -8271,20 +8271,20 @@ ${"=".repeat(60)}
|
|
|
8271
8271
|
`;
|
|
8272
8272
|
readableVersion += `${"=".repeat(60)}
|
|
8273
8273
|
`;
|
|
8274
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8275
|
-
if (!
|
|
8276
|
-
|
|
8274
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8275
|
+
if (!fs29.existsSync(debugArtifactsDir)) {
|
|
8276
|
+
fs29.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8277
8277
|
}
|
|
8278
|
-
const debugFile =
|
|
8278
|
+
const debugFile = path32.join(
|
|
8279
8279
|
debugArtifactsDir,
|
|
8280
8280
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8281
8281
|
);
|
|
8282
|
-
|
|
8283
|
-
const readableFile =
|
|
8282
|
+
fs29.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8283
|
+
const readableFile = path32.join(
|
|
8284
8284
|
debugArtifactsDir,
|
|
8285
8285
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8286
8286
|
);
|
|
8287
|
-
|
|
8287
|
+
fs29.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8288
8288
|
log(`
|
|
8289
8289
|
\u{1F4BE} Full debug info saved to:`);
|
|
8290
8290
|
log(` JSON: ${debugFile}`);
|
|
@@ -8317,8 +8317,8 @@ ${"=".repeat(60)}
|
|
|
8317
8317
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8318
8318
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8319
8319
|
try {
|
|
8320
|
-
const
|
|
8321
|
-
const
|
|
8320
|
+
const fs29 = require("fs");
|
|
8321
|
+
const path32 = require("path");
|
|
8322
8322
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8323
8323
|
const agentAny2 = agent;
|
|
8324
8324
|
let fullHistory = [];
|
|
@@ -8329,8 +8329,8 @@ ${"=".repeat(60)}
|
|
|
8329
8329
|
} else if (agentAny2._messages) {
|
|
8330
8330
|
fullHistory = agentAny2._messages;
|
|
8331
8331
|
}
|
|
8332
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8333
|
-
const sessionBase =
|
|
8332
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8333
|
+
const sessionBase = path32.join(
|
|
8334
8334
|
debugArtifactsDir,
|
|
8335
8335
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8336
8336
|
);
|
|
@@ -8342,7 +8342,7 @@ ${"=".repeat(60)}
|
|
|
8342
8342
|
schema: effectiveSchema,
|
|
8343
8343
|
totalMessages: fullHistory.length
|
|
8344
8344
|
};
|
|
8345
|
-
|
|
8345
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8346
8346
|
let readable = `=============================================================
|
|
8347
8347
|
`;
|
|
8348
8348
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8369,7 +8369,7 @@ ${"=".repeat(60)}
|
|
|
8369
8369
|
`;
|
|
8370
8370
|
readable += content + "\n";
|
|
8371
8371
|
});
|
|
8372
|
-
|
|
8372
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8373
8373
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8374
8374
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8375
8375
|
} catch (error) {
|
|
@@ -8378,11 +8378,11 @@ ${"=".repeat(60)}
|
|
|
8378
8378
|
}
|
|
8379
8379
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8380
8380
|
try {
|
|
8381
|
-
const
|
|
8382
|
-
const
|
|
8381
|
+
const fs29 = require("fs");
|
|
8382
|
+
const path32 = require("path");
|
|
8383
8383
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8384
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8385
|
-
const responseFile =
|
|
8384
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8385
|
+
const responseFile = path32.join(
|
|
8386
8386
|
debugArtifactsDir,
|
|
8387
8387
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8388
8388
|
);
|
|
@@ -8415,7 +8415,7 @@ ${"=".repeat(60)}
|
|
|
8415
8415
|
`;
|
|
8416
8416
|
responseContent += `${"=".repeat(60)}
|
|
8417
8417
|
`;
|
|
8418
|
-
|
|
8418
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8419
8419
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8420
8420
|
} catch (error) {
|
|
8421
8421
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8431,9 +8431,9 @@ ${"=".repeat(60)}
|
|
|
8431
8431
|
await agentAny._telemetryConfig.shutdown();
|
|
8432
8432
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8433
8433
|
if (process.env.GITHUB_ACTIONS) {
|
|
8434
|
-
const
|
|
8435
|
-
if (
|
|
8436
|
-
const stats =
|
|
8434
|
+
const fs29 = require("fs");
|
|
8435
|
+
if (fs29.existsSync(agentAny._traceFilePath)) {
|
|
8436
|
+
const stats = fs29.statSync(agentAny._traceFilePath);
|
|
8437
8437
|
console.log(
|
|
8438
8438
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8439
8439
|
);
|
|
@@ -8646,9 +8646,9 @@ ${schemaString}`);
|
|
|
8646
8646
|
const model = this.config.model || "default";
|
|
8647
8647
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8648
8648
|
try {
|
|
8649
|
-
const
|
|
8650
|
-
const
|
|
8651
|
-
const
|
|
8649
|
+
const fs29 = require("fs");
|
|
8650
|
+
const path32 = require("path");
|
|
8651
|
+
const os3 = require("os");
|
|
8652
8652
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8653
8653
|
const debugData = {
|
|
8654
8654
|
timestamp,
|
|
@@ -8720,19 +8720,19 @@ ${"=".repeat(60)}
|
|
|
8720
8720
|
`;
|
|
8721
8721
|
readableVersion += `${"=".repeat(60)}
|
|
8722
8722
|
`;
|
|
8723
|
-
const tempDir =
|
|
8724
|
-
const promptFile =
|
|
8725
|
-
|
|
8723
|
+
const tempDir = os3.tmpdir();
|
|
8724
|
+
const promptFile = path32.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8725
|
+
fs29.writeFileSync(promptFile, prompt, "utf-8");
|
|
8726
8726
|
log(`
|
|
8727
8727
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8728
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8728
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8729
8729
|
try {
|
|
8730
|
-
const base =
|
|
8730
|
+
const base = path32.join(
|
|
8731
8731
|
debugArtifactsDir,
|
|
8732
8732
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8733
8733
|
);
|
|
8734
|
-
|
|
8735
|
-
|
|
8734
|
+
fs29.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8735
|
+
fs29.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8736
8736
|
log(`
|
|
8737
8737
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8738
8738
|
} catch {
|
|
@@ -8777,8 +8777,8 @@ $ ${cliCommand}
|
|
|
8777
8777
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8778
8778
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8779
8779
|
try {
|
|
8780
|
-
const
|
|
8781
|
-
const
|
|
8780
|
+
const fs29 = require("fs");
|
|
8781
|
+
const path32 = require("path");
|
|
8782
8782
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8783
8783
|
const agentAny = agent;
|
|
8784
8784
|
let fullHistory = [];
|
|
@@ -8789,8 +8789,8 @@ $ ${cliCommand}
|
|
|
8789
8789
|
} else if (agentAny._messages) {
|
|
8790
8790
|
fullHistory = agentAny._messages;
|
|
8791
8791
|
}
|
|
8792
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8793
|
-
const sessionBase =
|
|
8792
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8793
|
+
const sessionBase = path32.join(
|
|
8794
8794
|
debugArtifactsDir,
|
|
8795
8795
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8796
8796
|
);
|
|
@@ -8802,7 +8802,7 @@ $ ${cliCommand}
|
|
|
8802
8802
|
schema: effectiveSchema,
|
|
8803
8803
|
totalMessages: fullHistory.length
|
|
8804
8804
|
};
|
|
8805
|
-
|
|
8805
|
+
fs29.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8806
8806
|
let readable = `=============================================================
|
|
8807
8807
|
`;
|
|
8808
8808
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8829,7 +8829,7 @@ ${"=".repeat(60)}
|
|
|
8829
8829
|
`;
|
|
8830
8830
|
readable += content + "\n";
|
|
8831
8831
|
});
|
|
8832
|
-
|
|
8832
|
+
fs29.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8833
8833
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8834
8834
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8835
8835
|
} catch (error) {
|
|
@@ -8838,11 +8838,11 @@ ${"=".repeat(60)}
|
|
|
8838
8838
|
}
|
|
8839
8839
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8840
8840
|
try {
|
|
8841
|
-
const
|
|
8842
|
-
const
|
|
8841
|
+
const fs29 = require("fs");
|
|
8842
|
+
const path32 = require("path");
|
|
8843
8843
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8844
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8845
|
-
const responseFile =
|
|
8844
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path32.join(process.cwd(), "debug-artifacts");
|
|
8845
|
+
const responseFile = path32.join(
|
|
8846
8846
|
debugArtifactsDir,
|
|
8847
8847
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8848
8848
|
);
|
|
@@ -8875,7 +8875,7 @@ ${"=".repeat(60)}
|
|
|
8875
8875
|
`;
|
|
8876
8876
|
responseContent += `${"=".repeat(60)}
|
|
8877
8877
|
`;
|
|
8878
|
-
|
|
8878
|
+
fs29.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8879
8879
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8880
8880
|
} catch (error) {
|
|
8881
8881
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8893,9 +8893,9 @@ ${"=".repeat(60)}
|
|
|
8893
8893
|
await telemetry.shutdown();
|
|
8894
8894
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8895
8895
|
if (process.env.GITHUB_ACTIONS) {
|
|
8896
|
-
const
|
|
8897
|
-
if (
|
|
8898
|
-
const stats =
|
|
8896
|
+
const fs29 = require("fs");
|
|
8897
|
+
if (fs29.existsSync(traceFilePath)) {
|
|
8898
|
+
const stats = fs29.statSync(traceFilePath);
|
|
8899
8899
|
console.log(
|
|
8900
8900
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8901
8901
|
);
|
|
@@ -8933,8 +8933,8 @@ ${"=".repeat(60)}
|
|
|
8933
8933
|
* Load schema content from schema files or inline definitions
|
|
8934
8934
|
*/
|
|
8935
8935
|
async loadSchemaContent(schema) {
|
|
8936
|
-
const
|
|
8937
|
-
const
|
|
8936
|
+
const fs29 = require("fs").promises;
|
|
8937
|
+
const path32 = require("path");
|
|
8938
8938
|
if (typeof schema === "object" && schema !== null) {
|
|
8939
8939
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8940
8940
|
return JSON.stringify(schema);
|
|
@@ -8947,14 +8947,14 @@ ${"=".repeat(60)}
|
|
|
8947
8947
|
}
|
|
8948
8948
|
} catch {
|
|
8949
8949
|
}
|
|
8950
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8950
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path32.isAbsolute(schema)) {
|
|
8951
8951
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8952
8952
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8953
8953
|
}
|
|
8954
8954
|
try {
|
|
8955
|
-
const schemaPath =
|
|
8955
|
+
const schemaPath = path32.resolve(process.cwd(), schema);
|
|
8956
8956
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8957
|
-
const schemaContent = await
|
|
8957
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
8958
8958
|
return schemaContent.trim();
|
|
8959
8959
|
} catch (error) {
|
|
8960
8960
|
throw new Error(
|
|
@@ -8968,22 +8968,22 @@ ${"=".repeat(60)}
|
|
|
8968
8968
|
}
|
|
8969
8969
|
const candidatePaths = [
|
|
8970
8970
|
// GitHub Action bundle location
|
|
8971
|
-
|
|
8971
|
+
path32.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8972
8972
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8973
|
-
|
|
8973
|
+
path32.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8974
8974
|
// Local dev (repo root)
|
|
8975
|
-
|
|
8975
|
+
path32.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8976
8976
|
];
|
|
8977
8977
|
for (const schemaPath of candidatePaths) {
|
|
8978
8978
|
try {
|
|
8979
|
-
const schemaContent = await
|
|
8979
|
+
const schemaContent = await fs29.readFile(schemaPath, "utf-8");
|
|
8980
8980
|
return schemaContent.trim();
|
|
8981
8981
|
} catch {
|
|
8982
8982
|
}
|
|
8983
8983
|
}
|
|
8984
|
-
const distPath =
|
|
8985
|
-
const distAltPath =
|
|
8986
|
-
const cwdPath =
|
|
8984
|
+
const distPath = path32.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8985
|
+
const distAltPath = path32.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8986
|
+
const cwdPath = path32.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8987
8987
|
throw new Error(
|
|
8988
8988
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8989
8989
|
);
|
|
@@ -9228,7 +9228,7 @@ ${"=".repeat(60)}
|
|
|
9228
9228
|
* Generate mock response for testing
|
|
9229
9229
|
*/
|
|
9230
9230
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9231
|
-
await new Promise((
|
|
9231
|
+
await new Promise((resolve19) => setTimeout(resolve19, 500));
|
|
9232
9232
|
const name = (_checkName || "").toLowerCase();
|
|
9233
9233
|
if (name.includes("extract-facts")) {
|
|
9234
9234
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9589,7 +9589,7 @@ var init_command_executor = __esm({
|
|
|
9589
9589
|
* Execute command with stdin input
|
|
9590
9590
|
*/
|
|
9591
9591
|
executeWithStdin(command, options) {
|
|
9592
|
-
return new Promise((
|
|
9592
|
+
return new Promise((resolve19, reject) => {
|
|
9593
9593
|
const childProcess = (0, import_child_process2.exec)(
|
|
9594
9594
|
command,
|
|
9595
9595
|
{
|
|
@@ -9601,7 +9601,7 @@ var init_command_executor = __esm({
|
|
|
9601
9601
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9602
9602
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9603
9603
|
} else {
|
|
9604
|
-
|
|
9604
|
+
resolve19({
|
|
9605
9605
|
stdout: stdout || "",
|
|
9606
9606
|
stderr: stderr || "",
|
|
9607
9607
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -17977,17 +17977,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17977
17977
|
* so it can be executed by the state machine as a nested workflow.
|
|
17978
17978
|
*/
|
|
17979
17979
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17980
|
-
const
|
|
17981
|
-
const
|
|
17980
|
+
const path32 = require("path");
|
|
17981
|
+
const fs29 = require("fs");
|
|
17982
17982
|
const yaml5 = require("js-yaml");
|
|
17983
|
-
const resolved =
|
|
17984
|
-
if (!
|
|
17983
|
+
const resolved = path32.isAbsolute(sourcePath) ? sourcePath : path32.resolve(baseDir, sourcePath);
|
|
17984
|
+
if (!fs29.existsSync(resolved)) {
|
|
17985
17985
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17986
17986
|
}
|
|
17987
|
-
const rawContent =
|
|
17987
|
+
const rawContent = fs29.readFileSync(resolved, "utf8");
|
|
17988
17988
|
const rawData = yaml5.load(rawContent);
|
|
17989
17989
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17990
|
-
const configDir =
|
|
17990
|
+
const configDir = path32.dirname(resolved);
|
|
17991
17991
|
for (const source of rawData.imports) {
|
|
17992
17992
|
const results = await this.registry.import(source, {
|
|
17993
17993
|
basePath: configDir,
|
|
@@ -18017,8 +18017,8 @@ ${errors}`);
|
|
|
18017
18017
|
if (!steps || Object.keys(steps).length === 0) {
|
|
18018
18018
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
18019
18019
|
}
|
|
18020
|
-
const id =
|
|
18021
|
-
const name = loaded.name || `Workflow from ${
|
|
18020
|
+
const id = path32.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
18021
|
+
const name = loaded.name || `Workflow from ${path32.basename(resolved)}`;
|
|
18022
18022
|
const workflowDef = {
|
|
18023
18023
|
id,
|
|
18024
18024
|
name,
|
|
@@ -18824,8 +18824,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
18824
18824
|
case "mssql": {
|
|
18825
18825
|
try {
|
|
18826
18826
|
const loaderPath = "../../enterprise/loader";
|
|
18827
|
-
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
18828
|
-
return await
|
|
18827
|
+
const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
|
|
18828
|
+
return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
|
|
18829
18829
|
} catch (err) {
|
|
18830
18830
|
const msg = err instanceof Error ? err.message : String(err);
|
|
18831
18831
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -21396,7 +21396,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21396
21396
|
* Returns the actual bound port number
|
|
21397
21397
|
*/
|
|
21398
21398
|
async start() {
|
|
21399
|
-
return new Promise((
|
|
21399
|
+
return new Promise((resolve19, reject) => {
|
|
21400
21400
|
try {
|
|
21401
21401
|
this.server = import_http.default.createServer((req, res) => {
|
|
21402
21402
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -21430,7 +21430,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21430
21430
|
);
|
|
21431
21431
|
}
|
|
21432
21432
|
this.startKeepalive();
|
|
21433
|
-
|
|
21433
|
+
resolve19(this.port);
|
|
21434
21434
|
});
|
|
21435
21435
|
} catch (error) {
|
|
21436
21436
|
reject(error);
|
|
@@ -21493,7 +21493,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21493
21493
|
logger.debug(
|
|
21494
21494
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
21495
21495
|
);
|
|
21496
|
-
await new Promise((
|
|
21496
|
+
await new Promise((resolve19) => setTimeout(resolve19, waitMs));
|
|
21497
21497
|
}
|
|
21498
21498
|
}
|
|
21499
21499
|
if (this.activeToolCalls > 0) {
|
|
@@ -21502,7 +21502,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21502
21502
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
21503
21503
|
);
|
|
21504
21504
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
21505
|
-
await new Promise((
|
|
21505
|
+
await new Promise((resolve19) => setTimeout(resolve19, 250));
|
|
21506
21506
|
}
|
|
21507
21507
|
if (this.activeToolCalls > 0) {
|
|
21508
21508
|
logger.warn(
|
|
@@ -21527,21 +21527,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21527
21527
|
}
|
|
21528
21528
|
this.connections.clear();
|
|
21529
21529
|
if (this.server) {
|
|
21530
|
-
await new Promise((
|
|
21530
|
+
await new Promise((resolve19, reject) => {
|
|
21531
21531
|
const timeout = setTimeout(() => {
|
|
21532
21532
|
if (this.debug) {
|
|
21533
21533
|
logger.debug(
|
|
21534
21534
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
21535
21535
|
);
|
|
21536
21536
|
}
|
|
21537
|
-
this.server?.close(() =>
|
|
21537
|
+
this.server?.close(() => resolve19());
|
|
21538
21538
|
}, 5e3);
|
|
21539
21539
|
this.server.close((error) => {
|
|
21540
21540
|
clearTimeout(timeout);
|
|
21541
21541
|
if (error) {
|
|
21542
21542
|
reject(error);
|
|
21543
21543
|
} else {
|
|
21544
|
-
|
|
21544
|
+
resolve19();
|
|
21545
21545
|
}
|
|
21546
21546
|
});
|
|
21547
21547
|
});
|
|
@@ -21976,7 +21976,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21976
21976
|
logger.warn(
|
|
21977
21977
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
21978
21978
|
);
|
|
21979
|
-
await new Promise((
|
|
21979
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
21980
21980
|
attempt++;
|
|
21981
21981
|
}
|
|
21982
21982
|
}
|
|
@@ -22289,9 +22289,9 @@ var init_ai_check_provider = __esm({
|
|
|
22289
22289
|
} else {
|
|
22290
22290
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
22291
22291
|
}
|
|
22292
|
-
const
|
|
22292
|
+
const fs29 = require("fs").promises;
|
|
22293
22293
|
try {
|
|
22294
|
-
const stat2 = await
|
|
22294
|
+
const stat2 = await fs29.stat(resolvedPath);
|
|
22295
22295
|
return stat2.isFile();
|
|
22296
22296
|
} catch {
|
|
22297
22297
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -28393,14 +28393,14 @@ var require_util = __commonJS({
|
|
|
28393
28393
|
}
|
|
28394
28394
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
28395
28395
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
28396
|
-
let
|
|
28396
|
+
let path32 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
28397
28397
|
if (origin.endsWith("/")) {
|
|
28398
28398
|
origin = origin.substring(0, origin.length - 1);
|
|
28399
28399
|
}
|
|
28400
|
-
if (
|
|
28401
|
-
|
|
28400
|
+
if (path32 && !path32.startsWith("/")) {
|
|
28401
|
+
path32 = `/${path32}`;
|
|
28402
28402
|
}
|
|
28403
|
-
url = new URL(origin +
|
|
28403
|
+
url = new URL(origin + path32);
|
|
28404
28404
|
}
|
|
28405
28405
|
return url;
|
|
28406
28406
|
}
|
|
@@ -30014,20 +30014,20 @@ var require_parseParams = __commonJS({
|
|
|
30014
30014
|
var require_basename = __commonJS({
|
|
30015
30015
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
30016
30016
|
"use strict";
|
|
30017
|
-
module2.exports = function basename4(
|
|
30018
|
-
if (typeof
|
|
30017
|
+
module2.exports = function basename4(path32) {
|
|
30018
|
+
if (typeof path32 !== "string") {
|
|
30019
30019
|
return "";
|
|
30020
30020
|
}
|
|
30021
|
-
for (var i =
|
|
30022
|
-
switch (
|
|
30021
|
+
for (var i = path32.length - 1; i >= 0; --i) {
|
|
30022
|
+
switch (path32.charCodeAt(i)) {
|
|
30023
30023
|
case 47:
|
|
30024
30024
|
// '/'
|
|
30025
30025
|
case 92:
|
|
30026
|
-
|
|
30027
|
-
return
|
|
30026
|
+
path32 = path32.slice(i + 1);
|
|
30027
|
+
return path32 === ".." || path32 === "." ? "" : path32;
|
|
30028
30028
|
}
|
|
30029
30029
|
}
|
|
30030
|
-
return
|
|
30030
|
+
return path32 === ".." || path32 === "." ? "" : path32;
|
|
30031
30031
|
};
|
|
30032
30032
|
}
|
|
30033
30033
|
});
|
|
@@ -31031,11 +31031,11 @@ var require_util2 = __commonJS({
|
|
|
31031
31031
|
var assert = require("assert");
|
|
31032
31032
|
var { isUint8Array } = require("util/types");
|
|
31033
31033
|
var supportedHashes = [];
|
|
31034
|
-
var
|
|
31034
|
+
var crypto8;
|
|
31035
31035
|
try {
|
|
31036
|
-
|
|
31036
|
+
crypto8 = require("crypto");
|
|
31037
31037
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
31038
|
-
supportedHashes =
|
|
31038
|
+
supportedHashes = crypto8.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
31039
31039
|
} catch {
|
|
31040
31040
|
}
|
|
31041
31041
|
function responseURL(response) {
|
|
@@ -31312,7 +31312,7 @@ var require_util2 = __commonJS({
|
|
|
31312
31312
|
}
|
|
31313
31313
|
}
|
|
31314
31314
|
function bytesMatch(bytes, metadataList) {
|
|
31315
|
-
if (
|
|
31315
|
+
if (crypto8 === void 0) {
|
|
31316
31316
|
return true;
|
|
31317
31317
|
}
|
|
31318
31318
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -31327,7 +31327,7 @@ var require_util2 = __commonJS({
|
|
|
31327
31327
|
for (const item of metadata) {
|
|
31328
31328
|
const algorithm = item.algo;
|
|
31329
31329
|
const expectedValue = item.hash;
|
|
31330
|
-
let actualValue =
|
|
31330
|
+
let actualValue = crypto8.createHash(algorithm).update(bytes).digest("base64");
|
|
31331
31331
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
31332
31332
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
31333
31333
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -31420,8 +31420,8 @@ var require_util2 = __commonJS({
|
|
|
31420
31420
|
function createDeferredPromise() {
|
|
31421
31421
|
let res;
|
|
31422
31422
|
let rej;
|
|
31423
|
-
const promise = new Promise((
|
|
31424
|
-
res =
|
|
31423
|
+
const promise = new Promise((resolve19, reject) => {
|
|
31424
|
+
res = resolve19;
|
|
31425
31425
|
rej = reject;
|
|
31426
31426
|
});
|
|
31427
31427
|
return { promise, resolve: res, reject: rej };
|
|
@@ -32674,8 +32674,8 @@ var require_body = __commonJS({
|
|
|
32674
32674
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
32675
32675
|
var random;
|
|
32676
32676
|
try {
|
|
32677
|
-
const
|
|
32678
|
-
random = (max) =>
|
|
32677
|
+
const crypto8 = require("crypto");
|
|
32678
|
+
random = (max) => crypto8.randomInt(0, max);
|
|
32679
32679
|
} catch {
|
|
32680
32680
|
random = (max) => Math.floor(Math.random(max));
|
|
32681
32681
|
}
|
|
@@ -32926,8 +32926,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
32926
32926
|
});
|
|
32927
32927
|
}
|
|
32928
32928
|
});
|
|
32929
|
-
const busboyResolve = new Promise((
|
|
32930
|
-
busboy.on("finish",
|
|
32929
|
+
const busboyResolve = new Promise((resolve19, reject) => {
|
|
32930
|
+
busboy.on("finish", resolve19);
|
|
32931
32931
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
32932
32932
|
});
|
|
32933
32933
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -33058,7 +33058,7 @@ var require_request = __commonJS({
|
|
|
33058
33058
|
}
|
|
33059
33059
|
var Request = class _Request {
|
|
33060
33060
|
constructor(origin, {
|
|
33061
|
-
path:
|
|
33061
|
+
path: path32,
|
|
33062
33062
|
method,
|
|
33063
33063
|
body,
|
|
33064
33064
|
headers,
|
|
@@ -33072,11 +33072,11 @@ var require_request = __commonJS({
|
|
|
33072
33072
|
throwOnError,
|
|
33073
33073
|
expectContinue
|
|
33074
33074
|
}, handler) {
|
|
33075
|
-
if (typeof
|
|
33075
|
+
if (typeof path32 !== "string") {
|
|
33076
33076
|
throw new InvalidArgumentError("path must be a string");
|
|
33077
|
-
} else if (
|
|
33077
|
+
} else if (path32[0] !== "/" && !(path32.startsWith("http://") || path32.startsWith("https://")) && method !== "CONNECT") {
|
|
33078
33078
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
33079
|
-
} else if (invalidPathRegex.exec(
|
|
33079
|
+
} else if (invalidPathRegex.exec(path32) !== null) {
|
|
33080
33080
|
throw new InvalidArgumentError("invalid request path");
|
|
33081
33081
|
}
|
|
33082
33082
|
if (typeof method !== "string") {
|
|
@@ -33139,7 +33139,7 @@ var require_request = __commonJS({
|
|
|
33139
33139
|
this.completed = false;
|
|
33140
33140
|
this.aborted = false;
|
|
33141
33141
|
this.upgrade = upgrade || null;
|
|
33142
|
-
this.path = query ? util.buildURL(
|
|
33142
|
+
this.path = query ? util.buildURL(path32, query) : path32;
|
|
33143
33143
|
this.origin = origin;
|
|
33144
33144
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
33145
33145
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -33461,9 +33461,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
33461
33461
|
}
|
|
33462
33462
|
close(callback) {
|
|
33463
33463
|
if (callback === void 0) {
|
|
33464
|
-
return new Promise((
|
|
33464
|
+
return new Promise((resolve19, reject) => {
|
|
33465
33465
|
this.close((err, data) => {
|
|
33466
|
-
return err ? reject(err) :
|
|
33466
|
+
return err ? reject(err) : resolve19(data);
|
|
33467
33467
|
});
|
|
33468
33468
|
});
|
|
33469
33469
|
}
|
|
@@ -33501,12 +33501,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
33501
33501
|
err = null;
|
|
33502
33502
|
}
|
|
33503
33503
|
if (callback === void 0) {
|
|
33504
|
-
return new Promise((
|
|
33504
|
+
return new Promise((resolve19, reject) => {
|
|
33505
33505
|
this.destroy(err, (err2, data) => {
|
|
33506
33506
|
return err2 ? (
|
|
33507
33507
|
/* istanbul ignore next: should never error */
|
|
33508
33508
|
reject(err2)
|
|
33509
|
-
) :
|
|
33509
|
+
) : resolve19(data);
|
|
33510
33510
|
});
|
|
33511
33511
|
});
|
|
33512
33512
|
}
|
|
@@ -34147,9 +34147,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
34147
34147
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
34148
34148
|
}
|
|
34149
34149
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
34150
|
-
const
|
|
34150
|
+
const path32 = search ? `${pathname}${search}` : pathname;
|
|
34151
34151
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
34152
|
-
this.opts.path =
|
|
34152
|
+
this.opts.path = path32;
|
|
34153
34153
|
this.opts.origin = origin;
|
|
34154
34154
|
this.opts.maxRedirections = 0;
|
|
34155
34155
|
this.opts.query = null;
|
|
@@ -34568,16 +34568,16 @@ var require_client = __commonJS({
|
|
|
34568
34568
|
return this[kNeedDrain] < 2;
|
|
34569
34569
|
}
|
|
34570
34570
|
async [kClose]() {
|
|
34571
|
-
return new Promise((
|
|
34571
|
+
return new Promise((resolve19) => {
|
|
34572
34572
|
if (!this[kSize]) {
|
|
34573
|
-
|
|
34573
|
+
resolve19(null);
|
|
34574
34574
|
} else {
|
|
34575
|
-
this[kClosedResolve] =
|
|
34575
|
+
this[kClosedResolve] = resolve19;
|
|
34576
34576
|
}
|
|
34577
34577
|
});
|
|
34578
34578
|
}
|
|
34579
34579
|
async [kDestroy](err) {
|
|
34580
|
-
return new Promise((
|
|
34580
|
+
return new Promise((resolve19) => {
|
|
34581
34581
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
34582
34582
|
for (let i = 0; i < requests.length; i++) {
|
|
34583
34583
|
const request = requests[i];
|
|
@@ -34588,7 +34588,7 @@ var require_client = __commonJS({
|
|
|
34588
34588
|
this[kClosedResolve]();
|
|
34589
34589
|
this[kClosedResolve] = null;
|
|
34590
34590
|
}
|
|
34591
|
-
|
|
34591
|
+
resolve19();
|
|
34592
34592
|
};
|
|
34593
34593
|
if (this[kHTTP2Session] != null) {
|
|
34594
34594
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -35168,7 +35168,7 @@ var require_client = __commonJS({
|
|
|
35168
35168
|
});
|
|
35169
35169
|
}
|
|
35170
35170
|
try {
|
|
35171
|
-
const socket = await new Promise((
|
|
35171
|
+
const socket = await new Promise((resolve19, reject) => {
|
|
35172
35172
|
client[kConnector]({
|
|
35173
35173
|
host,
|
|
35174
35174
|
hostname,
|
|
@@ -35180,7 +35180,7 @@ var require_client = __commonJS({
|
|
|
35180
35180
|
if (err) {
|
|
35181
35181
|
reject(err);
|
|
35182
35182
|
} else {
|
|
35183
|
-
|
|
35183
|
+
resolve19(socket2);
|
|
35184
35184
|
}
|
|
35185
35185
|
});
|
|
35186
35186
|
});
|
|
@@ -35391,7 +35391,7 @@ var require_client = __commonJS({
|
|
|
35391
35391
|
writeH2(client, client[kHTTP2Session], request);
|
|
35392
35392
|
return;
|
|
35393
35393
|
}
|
|
35394
|
-
const { body, method, path:
|
|
35394
|
+
const { body, method, path: path32, host, upgrade, headers, blocking, reset } = request;
|
|
35395
35395
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
35396
35396
|
if (body && typeof body.read === "function") {
|
|
35397
35397
|
body.read(0);
|
|
@@ -35441,7 +35441,7 @@ var require_client = __commonJS({
|
|
|
35441
35441
|
if (blocking) {
|
|
35442
35442
|
socket[kBlocking] = true;
|
|
35443
35443
|
}
|
|
35444
|
-
let header = `${method} ${
|
|
35444
|
+
let header = `${method} ${path32} HTTP/1.1\r
|
|
35445
35445
|
`;
|
|
35446
35446
|
if (typeof host === "string") {
|
|
35447
35447
|
header += `host: ${host}\r
|
|
@@ -35504,7 +35504,7 @@ upgrade: ${upgrade}\r
|
|
|
35504
35504
|
return true;
|
|
35505
35505
|
}
|
|
35506
35506
|
function writeH2(client, session, request) {
|
|
35507
|
-
const { body, method, path:
|
|
35507
|
+
const { body, method, path: path32, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
35508
35508
|
let headers;
|
|
35509
35509
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
35510
35510
|
else headers = reqHeaders;
|
|
@@ -35547,7 +35547,7 @@ upgrade: ${upgrade}\r
|
|
|
35547
35547
|
});
|
|
35548
35548
|
return true;
|
|
35549
35549
|
}
|
|
35550
|
-
headers[HTTP2_HEADER_PATH] =
|
|
35550
|
+
headers[HTTP2_HEADER_PATH] = path32;
|
|
35551
35551
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
35552
35552
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
35553
35553
|
if (body && typeof body.read === "function") {
|
|
@@ -35804,12 +35804,12 @@ upgrade: ${upgrade}\r
|
|
|
35804
35804
|
cb();
|
|
35805
35805
|
}
|
|
35806
35806
|
}
|
|
35807
|
-
const waitForDrain = () => new Promise((
|
|
35807
|
+
const waitForDrain = () => new Promise((resolve19, reject) => {
|
|
35808
35808
|
assert(callback === null);
|
|
35809
35809
|
if (socket[kError]) {
|
|
35810
35810
|
reject(socket[kError]);
|
|
35811
35811
|
} else {
|
|
35812
|
-
callback =
|
|
35812
|
+
callback = resolve19;
|
|
35813
35813
|
}
|
|
35814
35814
|
});
|
|
35815
35815
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -36155,8 +36155,8 @@ var require_pool_base = __commonJS({
|
|
|
36155
36155
|
if (this[kQueue].isEmpty()) {
|
|
36156
36156
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
36157
36157
|
} else {
|
|
36158
|
-
return new Promise((
|
|
36159
|
-
this[kClosedResolve] =
|
|
36158
|
+
return new Promise((resolve19) => {
|
|
36159
|
+
this[kClosedResolve] = resolve19;
|
|
36160
36160
|
});
|
|
36161
36161
|
}
|
|
36162
36162
|
}
|
|
@@ -36734,7 +36734,7 @@ var require_readable = __commonJS({
|
|
|
36734
36734
|
if (this.closed) {
|
|
36735
36735
|
return Promise.resolve(null);
|
|
36736
36736
|
}
|
|
36737
|
-
return new Promise((
|
|
36737
|
+
return new Promise((resolve19, reject) => {
|
|
36738
36738
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
36739
36739
|
this.destroy();
|
|
36740
36740
|
}) : noop;
|
|
@@ -36743,7 +36743,7 @@ var require_readable = __commonJS({
|
|
|
36743
36743
|
if (signal && signal.aborted) {
|
|
36744
36744
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
36745
36745
|
} else {
|
|
36746
|
-
|
|
36746
|
+
resolve19(null);
|
|
36747
36747
|
}
|
|
36748
36748
|
}).on("error", noop).on("data", function(chunk) {
|
|
36749
36749
|
limit -= chunk.length;
|
|
@@ -36765,11 +36765,11 @@ var require_readable = __commonJS({
|
|
|
36765
36765
|
throw new TypeError("unusable");
|
|
36766
36766
|
}
|
|
36767
36767
|
assert(!stream[kConsume]);
|
|
36768
|
-
return new Promise((
|
|
36768
|
+
return new Promise((resolve19, reject) => {
|
|
36769
36769
|
stream[kConsume] = {
|
|
36770
36770
|
type,
|
|
36771
36771
|
stream,
|
|
36772
|
-
resolve:
|
|
36772
|
+
resolve: resolve19,
|
|
36773
36773
|
reject,
|
|
36774
36774
|
length: 0,
|
|
36775
36775
|
body: []
|
|
@@ -36804,12 +36804,12 @@ var require_readable = __commonJS({
|
|
|
36804
36804
|
}
|
|
36805
36805
|
}
|
|
36806
36806
|
function consumeEnd(consume2) {
|
|
36807
|
-
const { type, body, resolve:
|
|
36807
|
+
const { type, body, resolve: resolve19, stream, length } = consume2;
|
|
36808
36808
|
try {
|
|
36809
36809
|
if (type === "text") {
|
|
36810
|
-
|
|
36810
|
+
resolve19(toUSVString(Buffer.concat(body)));
|
|
36811
36811
|
} else if (type === "json") {
|
|
36812
|
-
|
|
36812
|
+
resolve19(JSON.parse(Buffer.concat(body)));
|
|
36813
36813
|
} else if (type === "arrayBuffer") {
|
|
36814
36814
|
const dst = new Uint8Array(length);
|
|
36815
36815
|
let pos = 0;
|
|
@@ -36817,12 +36817,12 @@ var require_readable = __commonJS({
|
|
|
36817
36817
|
dst.set(buf, pos);
|
|
36818
36818
|
pos += buf.byteLength;
|
|
36819
36819
|
}
|
|
36820
|
-
|
|
36820
|
+
resolve19(dst.buffer);
|
|
36821
36821
|
} else if (type === "blob") {
|
|
36822
36822
|
if (!Blob2) {
|
|
36823
36823
|
Blob2 = require("buffer").Blob;
|
|
36824
36824
|
}
|
|
36825
|
-
|
|
36825
|
+
resolve19(new Blob2(body, { type: stream[kContentType] }));
|
|
36826
36826
|
}
|
|
36827
36827
|
consumeFinish(consume2);
|
|
36828
36828
|
} catch (err) {
|
|
@@ -37079,9 +37079,9 @@ var require_api_request = __commonJS({
|
|
|
37079
37079
|
};
|
|
37080
37080
|
function request(opts, callback) {
|
|
37081
37081
|
if (callback === void 0) {
|
|
37082
|
-
return new Promise((
|
|
37082
|
+
return new Promise((resolve19, reject) => {
|
|
37083
37083
|
request.call(this, opts, (err, data) => {
|
|
37084
|
-
return err ? reject(err) :
|
|
37084
|
+
return err ? reject(err) : resolve19(data);
|
|
37085
37085
|
});
|
|
37086
37086
|
});
|
|
37087
37087
|
}
|
|
@@ -37254,9 +37254,9 @@ var require_api_stream = __commonJS({
|
|
|
37254
37254
|
};
|
|
37255
37255
|
function stream(opts, factory, callback) {
|
|
37256
37256
|
if (callback === void 0) {
|
|
37257
|
-
return new Promise((
|
|
37257
|
+
return new Promise((resolve19, reject) => {
|
|
37258
37258
|
stream.call(this, opts, factory, (err, data) => {
|
|
37259
|
-
return err ? reject(err) :
|
|
37259
|
+
return err ? reject(err) : resolve19(data);
|
|
37260
37260
|
});
|
|
37261
37261
|
});
|
|
37262
37262
|
}
|
|
@@ -37537,9 +37537,9 @@ var require_api_upgrade = __commonJS({
|
|
|
37537
37537
|
};
|
|
37538
37538
|
function upgrade(opts, callback) {
|
|
37539
37539
|
if (callback === void 0) {
|
|
37540
|
-
return new Promise((
|
|
37540
|
+
return new Promise((resolve19, reject) => {
|
|
37541
37541
|
upgrade.call(this, opts, (err, data) => {
|
|
37542
|
-
return err ? reject(err) :
|
|
37542
|
+
return err ? reject(err) : resolve19(data);
|
|
37543
37543
|
});
|
|
37544
37544
|
});
|
|
37545
37545
|
}
|
|
@@ -37628,9 +37628,9 @@ var require_api_connect = __commonJS({
|
|
|
37628
37628
|
};
|
|
37629
37629
|
function connect(opts, callback) {
|
|
37630
37630
|
if (callback === void 0) {
|
|
37631
|
-
return new Promise((
|
|
37631
|
+
return new Promise((resolve19, reject) => {
|
|
37632
37632
|
connect.call(this, opts, (err, data) => {
|
|
37633
|
-
return err ? reject(err) :
|
|
37633
|
+
return err ? reject(err) : resolve19(data);
|
|
37634
37634
|
});
|
|
37635
37635
|
});
|
|
37636
37636
|
}
|
|
@@ -37790,20 +37790,20 @@ var require_mock_utils = __commonJS({
|
|
|
37790
37790
|
}
|
|
37791
37791
|
return true;
|
|
37792
37792
|
}
|
|
37793
|
-
function safeUrl(
|
|
37794
|
-
if (typeof
|
|
37795
|
-
return
|
|
37793
|
+
function safeUrl(path32) {
|
|
37794
|
+
if (typeof path32 !== "string") {
|
|
37795
|
+
return path32;
|
|
37796
37796
|
}
|
|
37797
|
-
const pathSegments =
|
|
37797
|
+
const pathSegments = path32.split("?");
|
|
37798
37798
|
if (pathSegments.length !== 2) {
|
|
37799
|
-
return
|
|
37799
|
+
return path32;
|
|
37800
37800
|
}
|
|
37801
37801
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
37802
37802
|
qp.sort();
|
|
37803
37803
|
return [...pathSegments, qp.toString()].join("?");
|
|
37804
37804
|
}
|
|
37805
|
-
function matchKey(mockDispatch2, { path:
|
|
37806
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
37805
|
+
function matchKey(mockDispatch2, { path: path32, method, body, headers }) {
|
|
37806
|
+
const pathMatch = matchValue(mockDispatch2.path, path32);
|
|
37807
37807
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
37808
37808
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
37809
37809
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -37821,7 +37821,7 @@ var require_mock_utils = __commonJS({
|
|
|
37821
37821
|
function getMockDispatch(mockDispatches, key) {
|
|
37822
37822
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
37823
37823
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
37824
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
37824
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path32 }) => matchValue(safeUrl(path32), resolvedPath));
|
|
37825
37825
|
if (matchedMockDispatches.length === 0) {
|
|
37826
37826
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
37827
37827
|
}
|
|
@@ -37858,9 +37858,9 @@ var require_mock_utils = __commonJS({
|
|
|
37858
37858
|
}
|
|
37859
37859
|
}
|
|
37860
37860
|
function buildKey(opts) {
|
|
37861
|
-
const { path:
|
|
37861
|
+
const { path: path32, method, body, headers, query } = opts;
|
|
37862
37862
|
return {
|
|
37863
|
-
path:
|
|
37863
|
+
path: path32,
|
|
37864
37864
|
method,
|
|
37865
37865
|
body,
|
|
37866
37866
|
headers,
|
|
@@ -38309,10 +38309,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
38309
38309
|
}
|
|
38310
38310
|
format(pendingInterceptors) {
|
|
38311
38311
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
38312
|
-
({ method, path:
|
|
38312
|
+
({ method, path: path32, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
38313
38313
|
Method: method,
|
|
38314
38314
|
Origin: origin,
|
|
38315
|
-
Path:
|
|
38315
|
+
Path: path32,
|
|
38316
38316
|
"Status code": statusCode,
|
|
38317
38317
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
38318
38318
|
Invocations: timesInvoked,
|
|
@@ -41253,7 +41253,7 @@ var require_fetch = __commonJS({
|
|
|
41253
41253
|
async function dispatch({ body }) {
|
|
41254
41254
|
const url = requestCurrentURL(request);
|
|
41255
41255
|
const agent = fetchParams.controller.dispatcher;
|
|
41256
|
-
return new Promise((
|
|
41256
|
+
return new Promise((resolve19, reject) => agent.dispatch(
|
|
41257
41257
|
{
|
|
41258
41258
|
path: url.pathname + url.search,
|
|
41259
41259
|
origin: url.origin,
|
|
@@ -41329,7 +41329,7 @@ var require_fetch = __commonJS({
|
|
|
41329
41329
|
}
|
|
41330
41330
|
}
|
|
41331
41331
|
}
|
|
41332
|
-
|
|
41332
|
+
resolve19({
|
|
41333
41333
|
status,
|
|
41334
41334
|
statusText,
|
|
41335
41335
|
headersList: headers[kHeadersList],
|
|
@@ -41372,7 +41372,7 @@ var require_fetch = __commonJS({
|
|
|
41372
41372
|
const val = headersList[n + 1].toString("latin1");
|
|
41373
41373
|
headers[kHeadersList].append(key, val);
|
|
41374
41374
|
}
|
|
41375
|
-
|
|
41375
|
+
resolve19({
|
|
41376
41376
|
status,
|
|
41377
41377
|
statusText: STATUS_CODES[status],
|
|
41378
41378
|
headersList: headers[kHeadersList],
|
|
@@ -42933,8 +42933,8 @@ var require_util6 = __commonJS({
|
|
|
42933
42933
|
}
|
|
42934
42934
|
}
|
|
42935
42935
|
}
|
|
42936
|
-
function validateCookiePath(
|
|
42937
|
-
for (const char of
|
|
42936
|
+
function validateCookiePath(path32) {
|
|
42937
|
+
for (const char of path32) {
|
|
42938
42938
|
const code = char.charCodeAt(0);
|
|
42939
42939
|
if (code < 33 || char === ";") {
|
|
42940
42940
|
throw new Error("Invalid cookie path");
|
|
@@ -43731,9 +43731,9 @@ var require_connection = __commonJS({
|
|
|
43731
43731
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
43732
43732
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
43733
43733
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
43734
|
-
var
|
|
43734
|
+
var crypto8;
|
|
43735
43735
|
try {
|
|
43736
|
-
|
|
43736
|
+
crypto8 = require("crypto");
|
|
43737
43737
|
} catch {
|
|
43738
43738
|
}
|
|
43739
43739
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -43752,7 +43752,7 @@ var require_connection = __commonJS({
|
|
|
43752
43752
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
43753
43753
|
request.headersList = headersList;
|
|
43754
43754
|
}
|
|
43755
|
-
const keyValue =
|
|
43755
|
+
const keyValue = crypto8.randomBytes(16).toString("base64");
|
|
43756
43756
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
43757
43757
|
request.headersList.append("sec-websocket-version", "13");
|
|
43758
43758
|
for (const protocol of protocols) {
|
|
@@ -43781,7 +43781,7 @@ var require_connection = __commonJS({
|
|
|
43781
43781
|
return;
|
|
43782
43782
|
}
|
|
43783
43783
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
43784
|
-
const digest =
|
|
43784
|
+
const digest = crypto8.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
43785
43785
|
if (secWSAccept !== digest) {
|
|
43786
43786
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
43787
43787
|
return;
|
|
@@ -43861,9 +43861,9 @@ var require_frame = __commonJS({
|
|
|
43861
43861
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
43862
43862
|
"use strict";
|
|
43863
43863
|
var { maxUnsigned16Bit } = require_constants5();
|
|
43864
|
-
var
|
|
43864
|
+
var crypto8;
|
|
43865
43865
|
try {
|
|
43866
|
-
|
|
43866
|
+
crypto8 = require("crypto");
|
|
43867
43867
|
} catch {
|
|
43868
43868
|
}
|
|
43869
43869
|
var WebsocketFrameSend = class {
|
|
@@ -43872,7 +43872,7 @@ var require_frame = __commonJS({
|
|
|
43872
43872
|
*/
|
|
43873
43873
|
constructor(data) {
|
|
43874
43874
|
this.frameData = data;
|
|
43875
|
-
this.maskKey =
|
|
43875
|
+
this.maskKey = crypto8.randomBytes(4);
|
|
43876
43876
|
}
|
|
43877
43877
|
createFrame(opcode) {
|
|
43878
43878
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -44614,11 +44614,11 @@ var require_undici = __commonJS({
|
|
|
44614
44614
|
if (typeof opts.path !== "string") {
|
|
44615
44615
|
throw new InvalidArgumentError("invalid opts.path");
|
|
44616
44616
|
}
|
|
44617
|
-
let
|
|
44617
|
+
let path32 = opts.path;
|
|
44618
44618
|
if (!opts.path.startsWith("/")) {
|
|
44619
|
-
|
|
44619
|
+
path32 = `/${path32}`;
|
|
44620
44620
|
}
|
|
44621
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
44621
|
+
url = new URL(util.parseOrigin(url).origin + path32);
|
|
44622
44622
|
} else {
|
|
44623
44623
|
if (!opts) {
|
|
44624
44624
|
opts = typeof url === "object" ? url : {};
|
|
@@ -45167,7 +45167,7 @@ var init_mcp_check_provider = __esm({
|
|
|
45167
45167
|
logger.warn(
|
|
45168
45168
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
45169
45169
|
);
|
|
45170
|
-
await new Promise((
|
|
45170
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
45171
45171
|
attempt += 1;
|
|
45172
45172
|
} finally {
|
|
45173
45173
|
try {
|
|
@@ -45460,7 +45460,7 @@ async function acquirePromptLock() {
|
|
|
45460
45460
|
);
|
|
45461
45461
|
}, 1e4);
|
|
45462
45462
|
try {
|
|
45463
|
-
await new Promise((
|
|
45463
|
+
await new Promise((resolve19) => waiters.push(resolve19));
|
|
45464
45464
|
} finally {
|
|
45465
45465
|
clearInterval(reminder);
|
|
45466
45466
|
const waitedMs = Date.now() - queuedAt;
|
|
@@ -45479,7 +45479,7 @@ function releasePromptLock() {
|
|
|
45479
45479
|
}
|
|
45480
45480
|
async function interactivePrompt(options) {
|
|
45481
45481
|
await acquirePromptLock();
|
|
45482
|
-
return new Promise((
|
|
45482
|
+
return new Promise((resolve19, reject) => {
|
|
45483
45483
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
45484
45484
|
try {
|
|
45485
45485
|
if (dbg) {
|
|
@@ -45566,12 +45566,12 @@ async function interactivePrompt(options) {
|
|
|
45566
45566
|
};
|
|
45567
45567
|
const finish = (value) => {
|
|
45568
45568
|
cleanup();
|
|
45569
|
-
|
|
45569
|
+
resolve19(value);
|
|
45570
45570
|
};
|
|
45571
45571
|
if (options.timeout && options.timeout > 0) {
|
|
45572
45572
|
timeoutId = setTimeout(() => {
|
|
45573
45573
|
cleanup();
|
|
45574
|
-
if (defaultValue !== void 0) return
|
|
45574
|
+
if (defaultValue !== void 0) return resolve19(defaultValue);
|
|
45575
45575
|
return reject(new Error("Input timeout"));
|
|
45576
45576
|
}, options.timeout);
|
|
45577
45577
|
}
|
|
@@ -45703,7 +45703,7 @@ async function interactivePrompt(options) {
|
|
|
45703
45703
|
});
|
|
45704
45704
|
}
|
|
45705
45705
|
async function simplePrompt(prompt) {
|
|
45706
|
-
return new Promise((
|
|
45706
|
+
return new Promise((resolve19) => {
|
|
45707
45707
|
const rl = readline.createInterface({
|
|
45708
45708
|
input: process.stdin,
|
|
45709
45709
|
output: process.stdout
|
|
@@ -45719,7 +45719,7 @@ async function simplePrompt(prompt) {
|
|
|
45719
45719
|
rl.question(`${prompt}
|
|
45720
45720
|
> `, (answer) => {
|
|
45721
45721
|
rl.close();
|
|
45722
|
-
|
|
45722
|
+
resolve19(answer.trim());
|
|
45723
45723
|
});
|
|
45724
45724
|
});
|
|
45725
45725
|
}
|
|
@@ -45887,7 +45887,7 @@ function isStdinAvailable() {
|
|
|
45887
45887
|
return !process.stdin.isTTY;
|
|
45888
45888
|
}
|
|
45889
45889
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
45890
|
-
return new Promise((
|
|
45890
|
+
return new Promise((resolve19, reject) => {
|
|
45891
45891
|
let data = "";
|
|
45892
45892
|
let timeoutId;
|
|
45893
45893
|
if (timeout) {
|
|
@@ -45914,7 +45914,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
45914
45914
|
};
|
|
45915
45915
|
const onEnd = () => {
|
|
45916
45916
|
cleanup();
|
|
45917
|
-
|
|
45917
|
+
resolve19(data.trim());
|
|
45918
45918
|
};
|
|
45919
45919
|
const onError = (err) => {
|
|
45920
45920
|
cleanup();
|
|
@@ -50605,23 +50605,23 @@ __export(renderer_schema_exports, {
|
|
|
50605
50605
|
});
|
|
50606
50606
|
async function loadRendererSchema(name) {
|
|
50607
50607
|
try {
|
|
50608
|
-
const
|
|
50609
|
-
const
|
|
50608
|
+
const fs29 = await import("fs/promises");
|
|
50609
|
+
const path32 = await import("path");
|
|
50610
50610
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
50611
50611
|
if (!sanitized) return void 0;
|
|
50612
50612
|
const candidates = [
|
|
50613
50613
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
50614
|
-
|
|
50614
|
+
path32.join(__dirname, "output", sanitized, "schema.json"),
|
|
50615
50615
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
50616
|
-
|
|
50616
|
+
path32.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
50617
50617
|
// When running from a checkout with output/ folder copied to CWD
|
|
50618
|
-
|
|
50618
|
+
path32.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
50619
50619
|
// Fallback: cwd/dist/output/
|
|
50620
|
-
|
|
50620
|
+
path32.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
50621
50621
|
];
|
|
50622
50622
|
for (const p of candidates) {
|
|
50623
50623
|
try {
|
|
50624
|
-
const raw = await
|
|
50624
|
+
const raw = await fs29.readFile(p, "utf-8");
|
|
50625
50625
|
return JSON.parse(raw);
|
|
50626
50626
|
} catch {
|
|
50627
50627
|
}
|
|
@@ -53062,8 +53062,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
53062
53062
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
53063
53063
|
try {
|
|
53064
53064
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
53065
|
-
const
|
|
53066
|
-
const
|
|
53065
|
+
const fs29 = await import("fs/promises");
|
|
53066
|
+
const path32 = await import("path");
|
|
53067
53067
|
const schemaRaw = checkConfig.schema || "plain";
|
|
53068
53068
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
53069
53069
|
let templateContent;
|
|
@@ -53072,27 +53072,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
53072
53072
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
53073
53073
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
53074
53074
|
const file = String(checkConfig.template.file);
|
|
53075
|
-
const resolved =
|
|
53076
|
-
templateContent = await
|
|
53075
|
+
const resolved = path32.resolve(process.cwd(), file);
|
|
53076
|
+
templateContent = await fs29.readFile(resolved, "utf-8");
|
|
53077
53077
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
53078
53078
|
} else if (schema && schema !== "plain") {
|
|
53079
53079
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
53080
53080
|
if (sanitized) {
|
|
53081
53081
|
const candidatePaths = [
|
|
53082
|
-
|
|
53082
|
+
path32.join(__dirname, "output", sanitized, "template.liquid"),
|
|
53083
53083
|
// bundled: dist/output/
|
|
53084
|
-
|
|
53084
|
+
path32.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
53085
53085
|
// source (from state-machine/states)
|
|
53086
|
-
|
|
53086
|
+
path32.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
53087
53087
|
// source (alternate)
|
|
53088
|
-
|
|
53088
|
+
path32.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
53089
53089
|
// fallback: cwd/output/
|
|
53090
|
-
|
|
53090
|
+
path32.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
53091
53091
|
// fallback: cwd/dist/output/
|
|
53092
53092
|
];
|
|
53093
53093
|
for (const p of candidatePaths) {
|
|
53094
53094
|
try {
|
|
53095
|
-
templateContent = await
|
|
53095
|
+
templateContent = await fs29.readFile(p, "utf-8");
|
|
53096
53096
|
if (templateContent) {
|
|
53097
53097
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
53098
53098
|
break;
|
|
@@ -55232,8 +55232,8 @@ var init_workspace_manager = __esm({
|
|
|
55232
55232
|
);
|
|
55233
55233
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
55234
55234
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
55235
|
-
for (const
|
|
55236
|
-
|
|
55235
|
+
for (const resolve19 of this.cleanupResolvers) {
|
|
55236
|
+
resolve19();
|
|
55237
55237
|
}
|
|
55238
55238
|
this.cleanupResolvers = [];
|
|
55239
55239
|
}
|
|
@@ -55412,19 +55412,19 @@ var init_workspace_manager = __esm({
|
|
|
55412
55412
|
);
|
|
55413
55413
|
this.cleanupRequested = true;
|
|
55414
55414
|
await Promise.race([
|
|
55415
|
-
new Promise((
|
|
55415
|
+
new Promise((resolve19) => {
|
|
55416
55416
|
if (this.activeOperations === 0) {
|
|
55417
|
-
|
|
55417
|
+
resolve19();
|
|
55418
55418
|
} else {
|
|
55419
|
-
this.cleanupResolvers.push(
|
|
55419
|
+
this.cleanupResolvers.push(resolve19);
|
|
55420
55420
|
}
|
|
55421
55421
|
}),
|
|
55422
|
-
new Promise((
|
|
55422
|
+
new Promise((resolve19) => {
|
|
55423
55423
|
setTimeout(() => {
|
|
55424
55424
|
logger.warn(
|
|
55425
55425
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
55426
55426
|
);
|
|
55427
|
-
|
|
55427
|
+
resolve19();
|
|
55428
55428
|
}, timeout);
|
|
55429
55429
|
})
|
|
55430
55430
|
]);
|
|
@@ -55818,8 +55818,8 @@ var init_fair_concurrency_limiter = __esm({
|
|
|
55818
55818
|
);
|
|
55819
55819
|
const queuedAt = Date.now();
|
|
55820
55820
|
const effectiveTimeout = queueTimeout ?? 12e4;
|
|
55821
|
-
return new Promise((
|
|
55822
|
-
const entry = { resolve:
|
|
55821
|
+
return new Promise((resolve19, reject) => {
|
|
55822
|
+
const entry = { resolve: resolve19, reject, queuedAt };
|
|
55823
55823
|
entry.reminder = setInterval(() => {
|
|
55824
55824
|
const waited = Math.round((Date.now() - queuedAt) / 1e3);
|
|
55825
55825
|
const curQueued = this._totalQueued();
|
|
@@ -56126,6 +56126,1380 @@ var init_build_engine_context = __esm({
|
|
|
56126
56126
|
}
|
|
56127
56127
|
});
|
|
56128
56128
|
|
|
56129
|
+
// src/policy/default-engine.ts
|
|
56130
|
+
var DefaultPolicyEngine;
|
|
56131
|
+
var init_default_engine = __esm({
|
|
56132
|
+
"src/policy/default-engine.ts"() {
|
|
56133
|
+
"use strict";
|
|
56134
|
+
DefaultPolicyEngine = class {
|
|
56135
|
+
async initialize(_config) {
|
|
56136
|
+
}
|
|
56137
|
+
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
56138
|
+
return { allowed: true };
|
|
56139
|
+
}
|
|
56140
|
+
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
56141
|
+
return { allowed: true };
|
|
56142
|
+
}
|
|
56143
|
+
async evaluateCapabilities(_checkId, _capabilities) {
|
|
56144
|
+
return { allowed: true };
|
|
56145
|
+
}
|
|
56146
|
+
async shutdown() {
|
|
56147
|
+
}
|
|
56148
|
+
};
|
|
56149
|
+
}
|
|
56150
|
+
});
|
|
56151
|
+
|
|
56152
|
+
// src/enterprise/license/validator.ts
|
|
56153
|
+
var validator_exports = {};
|
|
56154
|
+
__export(validator_exports, {
|
|
56155
|
+
LicenseValidator: () => LicenseValidator
|
|
56156
|
+
});
|
|
56157
|
+
var crypto2, fs21, path25, LicenseValidator;
|
|
56158
|
+
var init_validator = __esm({
|
|
56159
|
+
"src/enterprise/license/validator.ts"() {
|
|
56160
|
+
"use strict";
|
|
56161
|
+
crypto2 = __toESM(require("crypto"));
|
|
56162
|
+
fs21 = __toESM(require("fs"));
|
|
56163
|
+
path25 = __toESM(require("path"));
|
|
56164
|
+
LicenseValidator = class _LicenseValidator {
|
|
56165
|
+
/** Ed25519 public key for license verification (PEM format). */
|
|
56166
|
+
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
56167
|
+
cache = null;
|
|
56168
|
+
static CACHE_TTL = 5 * 60 * 1e3;
|
|
56169
|
+
// 5 minutes
|
|
56170
|
+
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
56171
|
+
// 72 hours after expiry
|
|
56172
|
+
/**
|
|
56173
|
+
* Load and validate license from environment or file.
|
|
56174
|
+
*
|
|
56175
|
+
* Resolution order:
|
|
56176
|
+
* 1. VISOR_LICENSE env var (JWT string)
|
|
56177
|
+
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
56178
|
+
* 3. .visor-license in project root (cwd)
|
|
56179
|
+
* 4. .visor-license in ~/.config/visor/
|
|
56180
|
+
*/
|
|
56181
|
+
async loadAndValidate() {
|
|
56182
|
+
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
56183
|
+
return this.cache.payload;
|
|
56184
|
+
}
|
|
56185
|
+
const token = this.resolveToken();
|
|
56186
|
+
if (!token) return null;
|
|
56187
|
+
const payload = this.verifyAndDecode(token);
|
|
56188
|
+
if (!payload) return null;
|
|
56189
|
+
this.cache = { payload, validatedAt: Date.now() };
|
|
56190
|
+
return payload;
|
|
56191
|
+
}
|
|
56192
|
+
/** Check if a specific feature is licensed */
|
|
56193
|
+
hasFeature(feature) {
|
|
56194
|
+
if (!this.cache) return false;
|
|
56195
|
+
return this.cache.payload.features.includes(feature);
|
|
56196
|
+
}
|
|
56197
|
+
/** Check if license is valid (with grace period) */
|
|
56198
|
+
isValid() {
|
|
56199
|
+
if (!this.cache) return false;
|
|
56200
|
+
const now = Date.now();
|
|
56201
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
56202
|
+
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
56203
|
+
}
|
|
56204
|
+
/** Check if the license is within its grace period (expired but still valid) */
|
|
56205
|
+
isInGracePeriod() {
|
|
56206
|
+
if (!this.cache) return false;
|
|
56207
|
+
const now = Date.now();
|
|
56208
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
56209
|
+
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
56210
|
+
}
|
|
56211
|
+
resolveToken() {
|
|
56212
|
+
if (process.env.VISOR_LICENSE) {
|
|
56213
|
+
return process.env.VISOR_LICENSE.trim();
|
|
56214
|
+
}
|
|
56215
|
+
if (process.env.VISOR_LICENSE_FILE) {
|
|
56216
|
+
const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
|
|
56217
|
+
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
56218
|
+
const allowedPrefixes = [path25.normalize(process.cwd())];
|
|
56219
|
+
if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
|
|
56220
|
+
let realPath;
|
|
56221
|
+
try {
|
|
56222
|
+
realPath = fs21.realpathSync(resolved);
|
|
56223
|
+
} catch {
|
|
56224
|
+
return null;
|
|
56225
|
+
}
|
|
56226
|
+
const isSafe = allowedPrefixes.some(
|
|
56227
|
+
(prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
|
|
56228
|
+
);
|
|
56229
|
+
if (!isSafe) return null;
|
|
56230
|
+
return this.readFile(realPath);
|
|
56231
|
+
}
|
|
56232
|
+
const cwdPath = path25.join(process.cwd(), ".visor-license");
|
|
56233
|
+
const cwdToken = this.readFile(cwdPath);
|
|
56234
|
+
if (cwdToken) return cwdToken;
|
|
56235
|
+
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
56236
|
+
if (home) {
|
|
56237
|
+
const configPath = path25.join(home, ".config", "visor", ".visor-license");
|
|
56238
|
+
const configToken = this.readFile(configPath);
|
|
56239
|
+
if (configToken) return configToken;
|
|
56240
|
+
}
|
|
56241
|
+
return null;
|
|
56242
|
+
}
|
|
56243
|
+
readFile(filePath) {
|
|
56244
|
+
try {
|
|
56245
|
+
return fs21.readFileSync(filePath, "utf-8").trim();
|
|
56246
|
+
} catch {
|
|
56247
|
+
return null;
|
|
56248
|
+
}
|
|
56249
|
+
}
|
|
56250
|
+
verifyAndDecode(token) {
|
|
56251
|
+
try {
|
|
56252
|
+
const parts = token.split(".");
|
|
56253
|
+
if (parts.length !== 3) return null;
|
|
56254
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
56255
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
56256
|
+
if (header.alg !== "EdDSA") return null;
|
|
56257
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
56258
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
56259
|
+
const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
56260
|
+
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
56261
|
+
return null;
|
|
56262
|
+
}
|
|
56263
|
+
const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
|
|
56264
|
+
if (!isValid) return null;
|
|
56265
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
56266
|
+
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
56267
|
+
return null;
|
|
56268
|
+
}
|
|
56269
|
+
const now = Date.now();
|
|
56270
|
+
const expiryMs = payload.exp * 1e3;
|
|
56271
|
+
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
56272
|
+
return null;
|
|
56273
|
+
}
|
|
56274
|
+
return payload;
|
|
56275
|
+
} catch {
|
|
56276
|
+
return null;
|
|
56277
|
+
}
|
|
56278
|
+
}
|
|
56279
|
+
};
|
|
56280
|
+
}
|
|
56281
|
+
});
|
|
56282
|
+
|
|
56283
|
+
// src/enterprise/policy/opa-compiler.ts
|
|
56284
|
+
var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
|
|
56285
|
+
var init_opa_compiler = __esm({
|
|
56286
|
+
"src/enterprise/policy/opa-compiler.ts"() {
|
|
56287
|
+
"use strict";
|
|
56288
|
+
fs22 = __toESM(require("fs"));
|
|
56289
|
+
path26 = __toESM(require("path"));
|
|
56290
|
+
os2 = __toESM(require("os"));
|
|
56291
|
+
crypto3 = __toESM(require("crypto"));
|
|
56292
|
+
import_child_process8 = require("child_process");
|
|
56293
|
+
OpaCompiler = class _OpaCompiler {
|
|
56294
|
+
static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
|
|
56295
|
+
/**
|
|
56296
|
+
* Resolve the input paths to WASM bytes.
|
|
56297
|
+
*
|
|
56298
|
+
* Strategy:
|
|
56299
|
+
* 1. If any path is a .wasm file, read it directly
|
|
56300
|
+
* 2. If a directory contains policy.wasm, read it
|
|
56301
|
+
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
56302
|
+
*/
|
|
56303
|
+
async resolveWasmBytes(paths) {
|
|
56304
|
+
const regoFiles = [];
|
|
56305
|
+
for (const p of paths) {
|
|
56306
|
+
const resolved = path26.resolve(p);
|
|
56307
|
+
if (path26.normalize(resolved).includes("..")) {
|
|
56308
|
+
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
56309
|
+
}
|
|
56310
|
+
if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
|
|
56311
|
+
return fs22.readFileSync(resolved);
|
|
56312
|
+
}
|
|
56313
|
+
if (!fs22.existsSync(resolved)) continue;
|
|
56314
|
+
const stat2 = fs22.statSync(resolved);
|
|
56315
|
+
if (stat2.isDirectory()) {
|
|
56316
|
+
const wasmCandidate = path26.join(resolved, "policy.wasm");
|
|
56317
|
+
if (fs22.existsSync(wasmCandidate)) {
|
|
56318
|
+
return fs22.readFileSync(wasmCandidate);
|
|
56319
|
+
}
|
|
56320
|
+
const files = fs22.readdirSync(resolved);
|
|
56321
|
+
for (const f of files) {
|
|
56322
|
+
if (f.endsWith(".rego")) {
|
|
56323
|
+
regoFiles.push(path26.join(resolved, f));
|
|
56324
|
+
}
|
|
56325
|
+
}
|
|
56326
|
+
} else if (resolved.endsWith(".rego")) {
|
|
56327
|
+
regoFiles.push(resolved);
|
|
56328
|
+
}
|
|
56329
|
+
}
|
|
56330
|
+
if (regoFiles.length === 0) {
|
|
56331
|
+
throw new Error(
|
|
56332
|
+
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
56333
|
+
);
|
|
56334
|
+
}
|
|
56335
|
+
return this.compileRego(regoFiles);
|
|
56336
|
+
}
|
|
56337
|
+
/**
|
|
56338
|
+
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
56339
|
+
*
|
|
56340
|
+
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
56341
|
+
* so subsequent runs skip compilation if policies haven't changed.
|
|
56342
|
+
*/
|
|
56343
|
+
compileRego(regoFiles) {
|
|
56344
|
+
try {
|
|
56345
|
+
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
56346
|
+
} catch {
|
|
56347
|
+
throw new Error(
|
|
56348
|
+
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
56349
|
+
);
|
|
56350
|
+
}
|
|
56351
|
+
const hash = crypto3.createHash("sha256");
|
|
56352
|
+
for (const f of regoFiles.sort()) {
|
|
56353
|
+
hash.update(fs22.readFileSync(f));
|
|
56354
|
+
hash.update(f);
|
|
56355
|
+
}
|
|
56356
|
+
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
56357
|
+
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
56358
|
+
const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
|
|
56359
|
+
if (fs22.existsSync(cachedWasm)) {
|
|
56360
|
+
return fs22.readFileSync(cachedWasm);
|
|
56361
|
+
}
|
|
56362
|
+
fs22.mkdirSync(cacheDir, { recursive: true });
|
|
56363
|
+
const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
56364
|
+
try {
|
|
56365
|
+
const args = [
|
|
56366
|
+
"build",
|
|
56367
|
+
"-t",
|
|
56368
|
+
"wasm",
|
|
56369
|
+
"-e",
|
|
56370
|
+
"visor",
|
|
56371
|
+
// entrypoint: the visor package tree
|
|
56372
|
+
"-o",
|
|
56373
|
+
bundleTar,
|
|
56374
|
+
...regoFiles
|
|
56375
|
+
];
|
|
56376
|
+
(0, import_child_process8.execFileSync)("opa", args, {
|
|
56377
|
+
stdio: "pipe",
|
|
56378
|
+
timeout: 3e4
|
|
56379
|
+
});
|
|
56380
|
+
} catch (err) {
|
|
56381
|
+
const stderr = err?.stderr?.toString() || "";
|
|
56382
|
+
throw new Error(
|
|
56383
|
+
`Failed to compile .rego files to WASM:
|
|
56384
|
+
${stderr}
|
|
56385
|
+
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
56386
|
+
);
|
|
56387
|
+
}
|
|
56388
|
+
try {
|
|
56389
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
56390
|
+
stdio: "pipe"
|
|
56391
|
+
});
|
|
56392
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
56393
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
56394
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
56395
|
+
}
|
|
56396
|
+
} catch {
|
|
56397
|
+
try {
|
|
56398
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
56399
|
+
stdio: "pipe"
|
|
56400
|
+
});
|
|
56401
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
56402
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
56403
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
56404
|
+
}
|
|
56405
|
+
} catch (err2) {
|
|
56406
|
+
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
56407
|
+
}
|
|
56408
|
+
}
|
|
56409
|
+
try {
|
|
56410
|
+
fs22.unlinkSync(bundleTar);
|
|
56411
|
+
} catch {
|
|
56412
|
+
}
|
|
56413
|
+
if (!fs22.existsSync(cachedWasm)) {
|
|
56414
|
+
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
56415
|
+
}
|
|
56416
|
+
return fs22.readFileSync(cachedWasm);
|
|
56417
|
+
}
|
|
56418
|
+
};
|
|
56419
|
+
}
|
|
56420
|
+
});
|
|
56421
|
+
|
|
56422
|
+
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
56423
|
+
var fs23, path27, OpaWasmEvaluator;
|
|
56424
|
+
var init_opa_wasm_evaluator = __esm({
|
|
56425
|
+
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
56426
|
+
"use strict";
|
|
56427
|
+
fs23 = __toESM(require("fs"));
|
|
56428
|
+
path27 = __toESM(require("path"));
|
|
56429
|
+
init_opa_compiler();
|
|
56430
|
+
OpaWasmEvaluator = class {
|
|
56431
|
+
policy = null;
|
|
56432
|
+
dataDocument = {};
|
|
56433
|
+
compiler = new OpaCompiler();
|
|
56434
|
+
async initialize(rulesPath) {
|
|
56435
|
+
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
56436
|
+
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
56437
|
+
try {
|
|
56438
|
+
const { createRequire } = require("module");
|
|
56439
|
+
const runtimeRequire = createRequire(__filename);
|
|
56440
|
+
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
56441
|
+
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
56442
|
+
if (!loadPolicy) {
|
|
56443
|
+
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
56444
|
+
}
|
|
56445
|
+
this.policy = await loadPolicy(wasmBytes);
|
|
56446
|
+
} catch (err) {
|
|
56447
|
+
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
56448
|
+
throw new Error(
|
|
56449
|
+
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
56450
|
+
);
|
|
56451
|
+
}
|
|
56452
|
+
throw err;
|
|
56453
|
+
}
|
|
56454
|
+
}
|
|
56455
|
+
/**
|
|
56456
|
+
* Load external data from a JSON file to use as the OPA data document.
|
|
56457
|
+
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
56458
|
+
* making it available in Rego via `data.<key>`.
|
|
56459
|
+
*/
|
|
56460
|
+
loadData(dataPath) {
|
|
56461
|
+
const resolved = path27.resolve(dataPath);
|
|
56462
|
+
if (path27.normalize(resolved).includes("..")) {
|
|
56463
|
+
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
56464
|
+
}
|
|
56465
|
+
if (!fs23.existsSync(resolved)) {
|
|
56466
|
+
throw new Error(`OPA data file not found: ${resolved}`);
|
|
56467
|
+
}
|
|
56468
|
+
const stat2 = fs23.statSync(resolved);
|
|
56469
|
+
if (stat2.size > 10 * 1024 * 1024) {
|
|
56470
|
+
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
56471
|
+
}
|
|
56472
|
+
const raw = fs23.readFileSync(resolved, "utf-8");
|
|
56473
|
+
try {
|
|
56474
|
+
const parsed = JSON.parse(raw);
|
|
56475
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
56476
|
+
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
56477
|
+
}
|
|
56478
|
+
this.dataDocument = parsed;
|
|
56479
|
+
} catch (err) {
|
|
56480
|
+
if (err.message.startsWith("OPA data file must")) {
|
|
56481
|
+
throw err;
|
|
56482
|
+
}
|
|
56483
|
+
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
56484
|
+
}
|
|
56485
|
+
}
|
|
56486
|
+
async evaluate(input) {
|
|
56487
|
+
if (!this.policy) {
|
|
56488
|
+
throw new Error("OPA WASM evaluator not initialized");
|
|
56489
|
+
}
|
|
56490
|
+
this.policy.setData(this.dataDocument);
|
|
56491
|
+
const resultSet = this.policy.evaluate(input);
|
|
56492
|
+
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
56493
|
+
return resultSet[0].result;
|
|
56494
|
+
}
|
|
56495
|
+
return void 0;
|
|
56496
|
+
}
|
|
56497
|
+
async shutdown() {
|
|
56498
|
+
if (this.policy) {
|
|
56499
|
+
if (typeof this.policy.close === "function") {
|
|
56500
|
+
try {
|
|
56501
|
+
this.policy.close();
|
|
56502
|
+
} catch {
|
|
56503
|
+
}
|
|
56504
|
+
} else if (typeof this.policy.free === "function") {
|
|
56505
|
+
try {
|
|
56506
|
+
this.policy.free();
|
|
56507
|
+
} catch {
|
|
56508
|
+
}
|
|
56509
|
+
}
|
|
56510
|
+
}
|
|
56511
|
+
this.policy = null;
|
|
56512
|
+
}
|
|
56513
|
+
};
|
|
56514
|
+
}
|
|
56515
|
+
});
|
|
56516
|
+
|
|
56517
|
+
// src/enterprise/policy/opa-http-evaluator.ts
|
|
56518
|
+
var OpaHttpEvaluator;
|
|
56519
|
+
var init_opa_http_evaluator = __esm({
|
|
56520
|
+
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
56521
|
+
"use strict";
|
|
56522
|
+
OpaHttpEvaluator = class {
|
|
56523
|
+
baseUrl;
|
|
56524
|
+
timeout;
|
|
56525
|
+
constructor(baseUrl, timeout = 5e3) {
|
|
56526
|
+
let parsed;
|
|
56527
|
+
try {
|
|
56528
|
+
parsed = new URL(baseUrl);
|
|
56529
|
+
} catch {
|
|
56530
|
+
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
56531
|
+
}
|
|
56532
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
56533
|
+
throw new Error(
|
|
56534
|
+
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
56535
|
+
);
|
|
56536
|
+
}
|
|
56537
|
+
const hostname = parsed.hostname;
|
|
56538
|
+
if (this.isBlockedHostname(hostname)) {
|
|
56539
|
+
throw new Error(
|
|
56540
|
+
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
56541
|
+
);
|
|
56542
|
+
}
|
|
56543
|
+
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
56544
|
+
this.timeout = timeout;
|
|
56545
|
+
}
|
|
56546
|
+
/**
|
|
56547
|
+
* Check if a hostname is blocked due to SSRF concerns.
|
|
56548
|
+
*
|
|
56549
|
+
* Blocks:
|
|
56550
|
+
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
56551
|
+
* - Link-local addresses (169.254.x.x)
|
|
56552
|
+
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
56553
|
+
* - IPv6 unique local addresses (fd00::/8)
|
|
56554
|
+
* - Cloud metadata services (*.internal)
|
|
56555
|
+
*/
|
|
56556
|
+
isBlockedHostname(hostname) {
|
|
56557
|
+
if (!hostname) return true;
|
|
56558
|
+
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
56559
|
+
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
56560
|
+
return true;
|
|
56561
|
+
}
|
|
56562
|
+
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
56563
|
+
return true;
|
|
56564
|
+
}
|
|
56565
|
+
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
56566
|
+
return true;
|
|
56567
|
+
}
|
|
56568
|
+
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
56569
|
+
const ipv4Match = normalized.match(ipv4Pattern);
|
|
56570
|
+
if (ipv4Match) {
|
|
56571
|
+
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
56572
|
+
if (octets.some((octet) => octet > 255)) {
|
|
56573
|
+
return false;
|
|
56574
|
+
}
|
|
56575
|
+
const [a, b] = octets;
|
|
56576
|
+
if (a === 127) {
|
|
56577
|
+
return true;
|
|
56578
|
+
}
|
|
56579
|
+
if (a === 0) {
|
|
56580
|
+
return true;
|
|
56581
|
+
}
|
|
56582
|
+
if (a === 169 && b === 254) {
|
|
56583
|
+
return true;
|
|
56584
|
+
}
|
|
56585
|
+
if (a === 10) {
|
|
56586
|
+
return true;
|
|
56587
|
+
}
|
|
56588
|
+
if (a === 172 && b >= 16 && b <= 31) {
|
|
56589
|
+
return true;
|
|
56590
|
+
}
|
|
56591
|
+
if (a === 192 && b === 168) {
|
|
56592
|
+
return true;
|
|
56593
|
+
}
|
|
56594
|
+
}
|
|
56595
|
+
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
56596
|
+
return true;
|
|
56597
|
+
}
|
|
56598
|
+
if (normalized.startsWith("fe80:")) {
|
|
56599
|
+
return true;
|
|
56600
|
+
}
|
|
56601
|
+
return false;
|
|
56602
|
+
}
|
|
56603
|
+
/**
|
|
56604
|
+
* Evaluate a policy rule against an input document via OPA REST API.
|
|
56605
|
+
*
|
|
56606
|
+
* @param input - The input document to evaluate
|
|
56607
|
+
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
56608
|
+
* @returns The result object from OPA, or undefined on error
|
|
56609
|
+
*/
|
|
56610
|
+
async evaluate(input, rulePath) {
|
|
56611
|
+
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
56612
|
+
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
56613
|
+
const controller = new AbortController();
|
|
56614
|
+
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
56615
|
+
try {
|
|
56616
|
+
const response = await fetch(url, {
|
|
56617
|
+
method: "POST",
|
|
56618
|
+
headers: { "Content-Type": "application/json" },
|
|
56619
|
+
body: JSON.stringify({ input }),
|
|
56620
|
+
signal: controller.signal
|
|
56621
|
+
});
|
|
56622
|
+
if (!response.ok) {
|
|
56623
|
+
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
56624
|
+
}
|
|
56625
|
+
let body;
|
|
56626
|
+
try {
|
|
56627
|
+
body = await response.json();
|
|
56628
|
+
} catch (jsonErr) {
|
|
56629
|
+
throw new Error(
|
|
56630
|
+
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
56631
|
+
);
|
|
56632
|
+
}
|
|
56633
|
+
return body?.result;
|
|
56634
|
+
} finally {
|
|
56635
|
+
clearTimeout(timer);
|
|
56636
|
+
}
|
|
56637
|
+
}
|
|
56638
|
+
async shutdown() {
|
|
56639
|
+
}
|
|
56640
|
+
};
|
|
56641
|
+
}
|
|
56642
|
+
});
|
|
56643
|
+
|
|
56644
|
+
// src/enterprise/policy/policy-input-builder.ts
|
|
56645
|
+
var PolicyInputBuilder;
|
|
56646
|
+
var init_policy_input_builder = __esm({
|
|
56647
|
+
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
56648
|
+
"use strict";
|
|
56649
|
+
PolicyInputBuilder = class {
|
|
56650
|
+
roles;
|
|
56651
|
+
actor;
|
|
56652
|
+
repository;
|
|
56653
|
+
pullRequest;
|
|
56654
|
+
constructor(policyConfig, actor, repository, pullRequest) {
|
|
56655
|
+
this.roles = policyConfig.roles || {};
|
|
56656
|
+
this.actor = actor;
|
|
56657
|
+
this.repository = repository;
|
|
56658
|
+
this.pullRequest = pullRequest;
|
|
56659
|
+
}
|
|
56660
|
+
/** Resolve which roles apply to the current actor. */
|
|
56661
|
+
resolveRoles() {
|
|
56662
|
+
const matched = [];
|
|
56663
|
+
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
56664
|
+
let identityMatch = false;
|
|
56665
|
+
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
56666
|
+
identityMatch = true;
|
|
56667
|
+
}
|
|
56668
|
+
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
56669
|
+
identityMatch = true;
|
|
56670
|
+
}
|
|
56671
|
+
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
56672
|
+
identityMatch = true;
|
|
56673
|
+
}
|
|
56674
|
+
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
56675
|
+
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
56676
|
+
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
56677
|
+
identityMatch = true;
|
|
56678
|
+
}
|
|
56679
|
+
}
|
|
56680
|
+
if (!identityMatch) continue;
|
|
56681
|
+
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
56682
|
+
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
56683
|
+
continue;
|
|
56684
|
+
}
|
|
56685
|
+
}
|
|
56686
|
+
matched.push(roleName);
|
|
56687
|
+
}
|
|
56688
|
+
return matched;
|
|
56689
|
+
}
|
|
56690
|
+
buildActor() {
|
|
56691
|
+
return {
|
|
56692
|
+
authorAssociation: this.actor.authorAssociation,
|
|
56693
|
+
login: this.actor.login,
|
|
56694
|
+
roles: this.resolveRoles(),
|
|
56695
|
+
isLocalMode: this.actor.isLocalMode,
|
|
56696
|
+
...this.actor.slack && { slack: this.actor.slack }
|
|
56697
|
+
};
|
|
56698
|
+
}
|
|
56699
|
+
forCheckExecution(check) {
|
|
56700
|
+
return {
|
|
56701
|
+
scope: "check.execute",
|
|
56702
|
+
check: {
|
|
56703
|
+
id: check.id,
|
|
56704
|
+
type: check.type,
|
|
56705
|
+
group: check.group,
|
|
56706
|
+
tags: check.tags,
|
|
56707
|
+
criticality: check.criticality,
|
|
56708
|
+
sandbox: check.sandbox,
|
|
56709
|
+
policy: check.policy
|
|
56710
|
+
},
|
|
56711
|
+
actor: this.buildActor(),
|
|
56712
|
+
repository: this.repository,
|
|
56713
|
+
pullRequest: this.pullRequest
|
|
56714
|
+
};
|
|
56715
|
+
}
|
|
56716
|
+
forToolInvocation(serverName, methodName, transport) {
|
|
56717
|
+
return {
|
|
56718
|
+
scope: "tool.invoke",
|
|
56719
|
+
tool: { serverName, methodName, transport },
|
|
56720
|
+
actor: this.buildActor(),
|
|
56721
|
+
repository: this.repository,
|
|
56722
|
+
pullRequest: this.pullRequest
|
|
56723
|
+
};
|
|
56724
|
+
}
|
|
56725
|
+
forCapabilityResolve(checkId, capabilities) {
|
|
56726
|
+
return {
|
|
56727
|
+
scope: "capability.resolve",
|
|
56728
|
+
check: { id: checkId, type: "ai" },
|
|
56729
|
+
capability: capabilities,
|
|
56730
|
+
actor: this.buildActor(),
|
|
56731
|
+
repository: this.repository,
|
|
56732
|
+
pullRequest: this.pullRequest
|
|
56733
|
+
};
|
|
56734
|
+
}
|
|
56735
|
+
};
|
|
56736
|
+
}
|
|
56737
|
+
});
|
|
56738
|
+
|
|
56739
|
+
// src/enterprise/policy/opa-policy-engine.ts
|
|
56740
|
+
var opa_policy_engine_exports = {};
|
|
56741
|
+
__export(opa_policy_engine_exports, {
|
|
56742
|
+
OpaPolicyEngine: () => OpaPolicyEngine
|
|
56743
|
+
});
|
|
56744
|
+
var OpaPolicyEngine;
|
|
56745
|
+
var init_opa_policy_engine = __esm({
|
|
56746
|
+
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
56747
|
+
"use strict";
|
|
56748
|
+
init_opa_wasm_evaluator();
|
|
56749
|
+
init_opa_http_evaluator();
|
|
56750
|
+
init_policy_input_builder();
|
|
56751
|
+
OpaPolicyEngine = class {
|
|
56752
|
+
evaluator = null;
|
|
56753
|
+
fallback;
|
|
56754
|
+
timeout;
|
|
56755
|
+
config;
|
|
56756
|
+
inputBuilder = null;
|
|
56757
|
+
logger = null;
|
|
56758
|
+
constructor(config) {
|
|
56759
|
+
this.config = config;
|
|
56760
|
+
this.fallback = config.fallback || "deny";
|
|
56761
|
+
this.timeout = config.timeout || 5e3;
|
|
56762
|
+
}
|
|
56763
|
+
async initialize(config) {
|
|
56764
|
+
try {
|
|
56765
|
+
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
56766
|
+
} catch {
|
|
56767
|
+
}
|
|
56768
|
+
const actor = {
|
|
56769
|
+
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
56770
|
+
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
56771
|
+
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
56772
|
+
};
|
|
56773
|
+
const repo = {
|
|
56774
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
56775
|
+
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
56776
|
+
branch: process.env.GITHUB_HEAD_REF,
|
|
56777
|
+
baseBranch: process.env.GITHUB_BASE_REF,
|
|
56778
|
+
event: process.env.GITHUB_EVENT_NAME
|
|
56779
|
+
};
|
|
56780
|
+
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
56781
|
+
const pullRequest = {
|
|
56782
|
+
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
56783
|
+
};
|
|
56784
|
+
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
56785
|
+
if (config.engine === "local") {
|
|
56786
|
+
if (!config.rules) {
|
|
56787
|
+
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
56788
|
+
}
|
|
56789
|
+
const wasm = new OpaWasmEvaluator();
|
|
56790
|
+
await wasm.initialize(config.rules);
|
|
56791
|
+
if (config.data) {
|
|
56792
|
+
wasm.loadData(config.data);
|
|
56793
|
+
}
|
|
56794
|
+
this.evaluator = wasm;
|
|
56795
|
+
} else if (config.engine === "remote") {
|
|
56796
|
+
if (!config.url) {
|
|
56797
|
+
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
56798
|
+
}
|
|
56799
|
+
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
56800
|
+
} else {
|
|
56801
|
+
this.evaluator = null;
|
|
56802
|
+
}
|
|
56803
|
+
}
|
|
56804
|
+
/**
|
|
56805
|
+
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
56806
|
+
* Called by the enterprise loader when engine context is enriched.
|
|
56807
|
+
*/
|
|
56808
|
+
setActorContext(actor, repo, pullRequest) {
|
|
56809
|
+
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
56810
|
+
}
|
|
56811
|
+
async evaluateCheckExecution(checkId, checkConfig) {
|
|
56812
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
56813
|
+
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
56814
|
+
const policyOverride = cfg.policy;
|
|
56815
|
+
const input = this.inputBuilder.forCheckExecution({
|
|
56816
|
+
id: checkId,
|
|
56817
|
+
type: cfg.type || "ai",
|
|
56818
|
+
group: cfg.group,
|
|
56819
|
+
tags: cfg.tags,
|
|
56820
|
+
criticality: cfg.criticality,
|
|
56821
|
+
sandbox: cfg.sandbox,
|
|
56822
|
+
policy: policyOverride
|
|
56823
|
+
});
|
|
56824
|
+
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
56825
|
+
}
|
|
56826
|
+
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
56827
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
56828
|
+
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
56829
|
+
return this.doEvaluate(input, "visor/tool/invoke");
|
|
56830
|
+
}
|
|
56831
|
+
async evaluateCapabilities(checkId, capabilities) {
|
|
56832
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
56833
|
+
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
56834
|
+
return this.doEvaluate(input, "visor/capability/resolve");
|
|
56835
|
+
}
|
|
56836
|
+
async shutdown() {
|
|
56837
|
+
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
56838
|
+
await this.evaluator.shutdown();
|
|
56839
|
+
}
|
|
56840
|
+
this.evaluator = null;
|
|
56841
|
+
this.inputBuilder = null;
|
|
56842
|
+
}
|
|
56843
|
+
resolveRulePath(defaultScope, override) {
|
|
56844
|
+
if (override) {
|
|
56845
|
+
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
56846
|
+
}
|
|
56847
|
+
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
56848
|
+
}
|
|
56849
|
+
async doEvaluate(input, rulePath) {
|
|
56850
|
+
try {
|
|
56851
|
+
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
56852
|
+
let timer;
|
|
56853
|
+
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
56854
|
+
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
56855
|
+
});
|
|
56856
|
+
try {
|
|
56857
|
+
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
56858
|
+
const decision = this.parseDecision(result);
|
|
56859
|
+
if (!decision.allowed && this.fallback === "warn") {
|
|
56860
|
+
decision.allowed = true;
|
|
56861
|
+
decision.warn = true;
|
|
56862
|
+
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
56863
|
+
}
|
|
56864
|
+
this.logger?.debug(
|
|
56865
|
+
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
56866
|
+
);
|
|
56867
|
+
return decision;
|
|
56868
|
+
} finally {
|
|
56869
|
+
if (timer) clearTimeout(timer);
|
|
56870
|
+
}
|
|
56871
|
+
} catch (err) {
|
|
56872
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
56873
|
+
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
56874
|
+
return {
|
|
56875
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
56876
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
56877
|
+
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
56878
|
+
};
|
|
56879
|
+
}
|
|
56880
|
+
}
|
|
56881
|
+
async rawEvaluate(input, rulePath) {
|
|
56882
|
+
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
56883
|
+
const result = await this.evaluator.evaluate(input);
|
|
56884
|
+
return this.navigateWasmResult(result, rulePath);
|
|
56885
|
+
}
|
|
56886
|
+
return this.evaluator.evaluate(input, rulePath);
|
|
56887
|
+
}
|
|
56888
|
+
/**
|
|
56889
|
+
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
56890
|
+
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
56891
|
+
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
56892
|
+
*/
|
|
56893
|
+
navigateWasmResult(result, rulePath) {
|
|
56894
|
+
if (!result || typeof result !== "object") return result;
|
|
56895
|
+
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
56896
|
+
let current = result;
|
|
56897
|
+
for (const seg of segments) {
|
|
56898
|
+
if (current && typeof current === "object" && seg in current) {
|
|
56899
|
+
current = current[seg];
|
|
56900
|
+
} else {
|
|
56901
|
+
return void 0;
|
|
56902
|
+
}
|
|
56903
|
+
}
|
|
56904
|
+
return current;
|
|
56905
|
+
}
|
|
56906
|
+
parseDecision(result) {
|
|
56907
|
+
if (result === void 0 || result === null) {
|
|
56908
|
+
return {
|
|
56909
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
56910
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
56911
|
+
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
56912
|
+
};
|
|
56913
|
+
}
|
|
56914
|
+
const allowed = result.allowed !== false;
|
|
56915
|
+
const decision = {
|
|
56916
|
+
allowed,
|
|
56917
|
+
reason: result.reason
|
|
56918
|
+
};
|
|
56919
|
+
if (result.capabilities) {
|
|
56920
|
+
decision.capabilities = result.capabilities;
|
|
56921
|
+
}
|
|
56922
|
+
return decision;
|
|
56923
|
+
}
|
|
56924
|
+
};
|
|
56925
|
+
}
|
|
56926
|
+
});
|
|
56927
|
+
|
|
56928
|
+
// src/enterprise/scheduler/knex-store.ts
|
|
56929
|
+
var knex_store_exports = {};
|
|
56930
|
+
__export(knex_store_exports, {
|
|
56931
|
+
KnexStoreBackend: () => KnexStoreBackend
|
|
56932
|
+
});
|
|
56933
|
+
function toNum(val) {
|
|
56934
|
+
if (val === null || val === void 0) return void 0;
|
|
56935
|
+
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
56936
|
+
}
|
|
56937
|
+
function safeJsonParse2(value) {
|
|
56938
|
+
if (!value) return void 0;
|
|
56939
|
+
try {
|
|
56940
|
+
return JSON.parse(value);
|
|
56941
|
+
} catch {
|
|
56942
|
+
return void 0;
|
|
56943
|
+
}
|
|
56944
|
+
}
|
|
56945
|
+
function fromTriggerRow2(row) {
|
|
56946
|
+
return {
|
|
56947
|
+
id: row.id,
|
|
56948
|
+
creatorId: row.creator_id,
|
|
56949
|
+
creatorContext: row.creator_context ?? void 0,
|
|
56950
|
+
creatorName: row.creator_name ?? void 0,
|
|
56951
|
+
description: row.description ?? void 0,
|
|
56952
|
+
channels: safeJsonParse2(row.channels),
|
|
56953
|
+
fromUsers: safeJsonParse2(row.from_users),
|
|
56954
|
+
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
56955
|
+
contains: safeJsonParse2(row.contains),
|
|
56956
|
+
matchPattern: row.match_pattern ?? void 0,
|
|
56957
|
+
threads: row.threads,
|
|
56958
|
+
workflow: row.workflow,
|
|
56959
|
+
inputs: safeJsonParse2(row.inputs),
|
|
56960
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
56961
|
+
status: row.status,
|
|
56962
|
+
enabled: row.enabled === true || row.enabled === 1,
|
|
56963
|
+
createdAt: toNum(row.created_at)
|
|
56964
|
+
};
|
|
56965
|
+
}
|
|
56966
|
+
function toTriggerInsertRow(trigger) {
|
|
56967
|
+
return {
|
|
56968
|
+
id: trigger.id,
|
|
56969
|
+
creator_id: trigger.creatorId,
|
|
56970
|
+
creator_context: trigger.creatorContext ?? null,
|
|
56971
|
+
creator_name: trigger.creatorName ?? null,
|
|
56972
|
+
description: trigger.description ?? null,
|
|
56973
|
+
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
56974
|
+
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
56975
|
+
from_bots: trigger.fromBots,
|
|
56976
|
+
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
56977
|
+
match_pattern: trigger.matchPattern ?? null,
|
|
56978
|
+
threads: trigger.threads,
|
|
56979
|
+
workflow: trigger.workflow,
|
|
56980
|
+
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
56981
|
+
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
56982
|
+
status: trigger.status,
|
|
56983
|
+
enabled: trigger.enabled,
|
|
56984
|
+
created_at: trigger.createdAt
|
|
56985
|
+
};
|
|
56986
|
+
}
|
|
56987
|
+
function fromDbRow2(row) {
|
|
56988
|
+
return {
|
|
56989
|
+
id: row.id,
|
|
56990
|
+
creatorId: row.creator_id,
|
|
56991
|
+
creatorContext: row.creator_context ?? void 0,
|
|
56992
|
+
creatorName: row.creator_name ?? void 0,
|
|
56993
|
+
timezone: row.timezone,
|
|
56994
|
+
schedule: row.schedule_expr,
|
|
56995
|
+
runAt: toNum(row.run_at),
|
|
56996
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
56997
|
+
originalExpression: row.original_expression,
|
|
56998
|
+
workflow: row.workflow ?? void 0,
|
|
56999
|
+
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
57000
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
57001
|
+
status: row.status,
|
|
57002
|
+
createdAt: toNum(row.created_at),
|
|
57003
|
+
lastRunAt: toNum(row.last_run_at),
|
|
57004
|
+
nextRunAt: toNum(row.next_run_at),
|
|
57005
|
+
runCount: row.run_count,
|
|
57006
|
+
failureCount: row.failure_count,
|
|
57007
|
+
lastError: row.last_error ?? void 0,
|
|
57008
|
+
previousResponse: row.previous_response ?? void 0
|
|
57009
|
+
};
|
|
57010
|
+
}
|
|
57011
|
+
function toInsertRow(schedule) {
|
|
57012
|
+
return {
|
|
57013
|
+
id: schedule.id,
|
|
57014
|
+
creator_id: schedule.creatorId,
|
|
57015
|
+
creator_context: schedule.creatorContext ?? null,
|
|
57016
|
+
creator_name: schedule.creatorName ?? null,
|
|
57017
|
+
timezone: schedule.timezone,
|
|
57018
|
+
schedule_expr: schedule.schedule,
|
|
57019
|
+
run_at: schedule.runAt ?? null,
|
|
57020
|
+
is_recurring: schedule.isRecurring,
|
|
57021
|
+
original_expression: schedule.originalExpression,
|
|
57022
|
+
workflow: schedule.workflow ?? null,
|
|
57023
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
57024
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
57025
|
+
status: schedule.status,
|
|
57026
|
+
created_at: schedule.createdAt,
|
|
57027
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
57028
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
57029
|
+
run_count: schedule.runCount,
|
|
57030
|
+
failure_count: schedule.failureCount,
|
|
57031
|
+
last_error: schedule.lastError ?? null,
|
|
57032
|
+
previous_response: schedule.previousResponse ?? null
|
|
57033
|
+
};
|
|
57034
|
+
}
|
|
57035
|
+
var fs24, path28, import_uuid2, KnexStoreBackend;
|
|
57036
|
+
var init_knex_store = __esm({
|
|
57037
|
+
"src/enterprise/scheduler/knex-store.ts"() {
|
|
57038
|
+
"use strict";
|
|
57039
|
+
fs24 = __toESM(require("fs"));
|
|
57040
|
+
path28 = __toESM(require("path"));
|
|
57041
|
+
import_uuid2 = require("uuid");
|
|
57042
|
+
init_logger();
|
|
57043
|
+
KnexStoreBackend = class {
|
|
57044
|
+
knex = null;
|
|
57045
|
+
driver;
|
|
57046
|
+
connection;
|
|
57047
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
57048
|
+
this.driver = driver;
|
|
57049
|
+
this.connection = storageConfig.connection || {};
|
|
57050
|
+
}
|
|
57051
|
+
async initialize() {
|
|
57052
|
+
const { createRequire } = require("module");
|
|
57053
|
+
const runtimeRequire = createRequire(__filename);
|
|
57054
|
+
let knexFactory;
|
|
57055
|
+
try {
|
|
57056
|
+
knexFactory = runtimeRequire("knex");
|
|
57057
|
+
} catch (err) {
|
|
57058
|
+
const code = err?.code;
|
|
57059
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
57060
|
+
throw new Error(
|
|
57061
|
+
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
57062
|
+
);
|
|
57063
|
+
}
|
|
57064
|
+
throw err;
|
|
57065
|
+
}
|
|
57066
|
+
const clientMap = {
|
|
57067
|
+
postgresql: "pg",
|
|
57068
|
+
mysql: "mysql2",
|
|
57069
|
+
mssql: "tedious"
|
|
57070
|
+
};
|
|
57071
|
+
const client = clientMap[this.driver];
|
|
57072
|
+
let connection;
|
|
57073
|
+
if (this.connection.connection_string) {
|
|
57074
|
+
connection = this.connection.connection_string;
|
|
57075
|
+
} else if (this.driver === "mssql") {
|
|
57076
|
+
connection = this.buildMssqlConnection();
|
|
57077
|
+
} else {
|
|
57078
|
+
connection = this.buildStandardConnection();
|
|
57079
|
+
}
|
|
57080
|
+
this.knex = knexFactory({
|
|
57081
|
+
client,
|
|
57082
|
+
connection,
|
|
57083
|
+
pool: {
|
|
57084
|
+
min: this.connection.pool?.min ?? 0,
|
|
57085
|
+
max: this.connection.pool?.max ?? 10
|
|
57086
|
+
}
|
|
57087
|
+
});
|
|
57088
|
+
await this.migrateSchema();
|
|
57089
|
+
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
57090
|
+
}
|
|
57091
|
+
buildStandardConnection() {
|
|
57092
|
+
return {
|
|
57093
|
+
host: this.connection.host || "localhost",
|
|
57094
|
+
port: this.connection.port,
|
|
57095
|
+
database: this.connection.database || "visor",
|
|
57096
|
+
user: this.connection.user,
|
|
57097
|
+
password: this.connection.password,
|
|
57098
|
+
ssl: this.resolveSslConfig()
|
|
57099
|
+
};
|
|
57100
|
+
}
|
|
57101
|
+
buildMssqlConnection() {
|
|
57102
|
+
const ssl = this.connection.ssl;
|
|
57103
|
+
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
57104
|
+
return {
|
|
57105
|
+
server: this.connection.host || "localhost",
|
|
57106
|
+
port: this.connection.port,
|
|
57107
|
+
database: this.connection.database || "visor",
|
|
57108
|
+
user: this.connection.user,
|
|
57109
|
+
password: this.connection.password,
|
|
57110
|
+
options: {
|
|
57111
|
+
encrypt: sslEnabled,
|
|
57112
|
+
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
57113
|
+
}
|
|
57114
|
+
};
|
|
57115
|
+
}
|
|
57116
|
+
resolveSslConfig() {
|
|
57117
|
+
const ssl = this.connection.ssl;
|
|
57118
|
+
if (ssl === false || ssl === void 0) return false;
|
|
57119
|
+
if (ssl === true) return { rejectUnauthorized: true };
|
|
57120
|
+
if (ssl.enabled === false) return false;
|
|
57121
|
+
const result = {
|
|
57122
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
57123
|
+
};
|
|
57124
|
+
if (ssl.ca) {
|
|
57125
|
+
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
57126
|
+
result.ca = fs24.readFileSync(caPath, "utf8");
|
|
57127
|
+
}
|
|
57128
|
+
if (ssl.cert) {
|
|
57129
|
+
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
57130
|
+
result.cert = fs24.readFileSync(certPath, "utf8");
|
|
57131
|
+
}
|
|
57132
|
+
if (ssl.key) {
|
|
57133
|
+
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
57134
|
+
result.key = fs24.readFileSync(keyPath, "utf8");
|
|
57135
|
+
}
|
|
57136
|
+
return result;
|
|
57137
|
+
}
|
|
57138
|
+
validateSslPath(filePath, label) {
|
|
57139
|
+
const resolved = path28.resolve(filePath);
|
|
57140
|
+
if (resolved !== path28.normalize(resolved)) {
|
|
57141
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
57142
|
+
}
|
|
57143
|
+
if (!fs24.existsSync(resolved)) {
|
|
57144
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
57145
|
+
}
|
|
57146
|
+
return resolved;
|
|
57147
|
+
}
|
|
57148
|
+
async shutdown() {
|
|
57149
|
+
if (this.knex) {
|
|
57150
|
+
await this.knex.destroy();
|
|
57151
|
+
this.knex = null;
|
|
57152
|
+
}
|
|
57153
|
+
}
|
|
57154
|
+
async migrateSchema() {
|
|
57155
|
+
const knex = this.getKnex();
|
|
57156
|
+
const exists = await knex.schema.hasTable("schedules");
|
|
57157
|
+
if (!exists) {
|
|
57158
|
+
await knex.schema.createTable("schedules", (table) => {
|
|
57159
|
+
table.string("id", 36).primary();
|
|
57160
|
+
table.string("creator_id", 255).notNullable().index();
|
|
57161
|
+
table.string("creator_context", 255);
|
|
57162
|
+
table.string("creator_name", 255);
|
|
57163
|
+
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
57164
|
+
table.string("schedule_expr", 255);
|
|
57165
|
+
table.bigInteger("run_at");
|
|
57166
|
+
table.boolean("is_recurring").notNullable();
|
|
57167
|
+
table.text("original_expression");
|
|
57168
|
+
table.string("workflow", 255);
|
|
57169
|
+
table.text("workflow_inputs");
|
|
57170
|
+
table.text("output_context");
|
|
57171
|
+
table.string("status", 20).notNullable().index();
|
|
57172
|
+
table.bigInteger("created_at").notNullable();
|
|
57173
|
+
table.bigInteger("last_run_at");
|
|
57174
|
+
table.bigInteger("next_run_at");
|
|
57175
|
+
table.integer("run_count").notNullable().defaultTo(0);
|
|
57176
|
+
table.integer("failure_count").notNullable().defaultTo(0);
|
|
57177
|
+
table.text("last_error");
|
|
57178
|
+
table.text("previous_response");
|
|
57179
|
+
table.index(["status", "next_run_at"]);
|
|
57180
|
+
});
|
|
57181
|
+
}
|
|
57182
|
+
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
57183
|
+
if (!triggersExist) {
|
|
57184
|
+
await knex.schema.createTable("message_triggers", (table) => {
|
|
57185
|
+
table.string("id", 36).primary();
|
|
57186
|
+
table.string("creator_id", 255).notNullable().index();
|
|
57187
|
+
table.string("creator_context", 255);
|
|
57188
|
+
table.string("creator_name", 255);
|
|
57189
|
+
table.text("description");
|
|
57190
|
+
table.text("channels");
|
|
57191
|
+
table.text("from_users");
|
|
57192
|
+
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
57193
|
+
table.text("contains");
|
|
57194
|
+
table.text("match_pattern");
|
|
57195
|
+
table.string("threads", 20).notNullable().defaultTo("any");
|
|
57196
|
+
table.string("workflow", 255).notNullable();
|
|
57197
|
+
table.text("inputs");
|
|
57198
|
+
table.text("output_context");
|
|
57199
|
+
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
57200
|
+
table.boolean("enabled").notNullable().defaultTo(true);
|
|
57201
|
+
table.bigInteger("created_at").notNullable();
|
|
57202
|
+
});
|
|
57203
|
+
}
|
|
57204
|
+
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
57205
|
+
if (!locksExist) {
|
|
57206
|
+
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
57207
|
+
table.string("lock_id", 255).primary();
|
|
57208
|
+
table.string("node_id", 255).notNullable();
|
|
57209
|
+
table.string("lock_token", 36).notNullable();
|
|
57210
|
+
table.bigInteger("acquired_at").notNullable();
|
|
57211
|
+
table.bigInteger("expires_at").notNullable();
|
|
57212
|
+
});
|
|
57213
|
+
}
|
|
57214
|
+
}
|
|
57215
|
+
getKnex() {
|
|
57216
|
+
if (!this.knex) {
|
|
57217
|
+
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
57218
|
+
}
|
|
57219
|
+
return this.knex;
|
|
57220
|
+
}
|
|
57221
|
+
// --- CRUD ---
|
|
57222
|
+
async create(schedule) {
|
|
57223
|
+
const knex = this.getKnex();
|
|
57224
|
+
const newSchedule = {
|
|
57225
|
+
...schedule,
|
|
57226
|
+
id: (0, import_uuid2.v4)(),
|
|
57227
|
+
createdAt: Date.now(),
|
|
57228
|
+
runCount: 0,
|
|
57229
|
+
failureCount: 0,
|
|
57230
|
+
status: "active"
|
|
57231
|
+
};
|
|
57232
|
+
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
57233
|
+
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
57234
|
+
return newSchedule;
|
|
57235
|
+
}
|
|
57236
|
+
async importSchedule(schedule) {
|
|
57237
|
+
const knex = this.getKnex();
|
|
57238
|
+
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
57239
|
+
if (existing) return;
|
|
57240
|
+
await knex("schedules").insert(toInsertRow(schedule));
|
|
57241
|
+
}
|
|
57242
|
+
async get(id) {
|
|
57243
|
+
const knex = this.getKnex();
|
|
57244
|
+
const row = await knex("schedules").where("id", id).first();
|
|
57245
|
+
return row ? fromDbRow2(row) : void 0;
|
|
57246
|
+
}
|
|
57247
|
+
async update(id, patch) {
|
|
57248
|
+
const knex = this.getKnex();
|
|
57249
|
+
const existing = await knex("schedules").where("id", id).first();
|
|
57250
|
+
if (!existing) return void 0;
|
|
57251
|
+
const current = fromDbRow2(existing);
|
|
57252
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
57253
|
+
const row = toInsertRow(updated);
|
|
57254
|
+
delete row.id;
|
|
57255
|
+
await knex("schedules").where("id", id).update(row);
|
|
57256
|
+
return updated;
|
|
57257
|
+
}
|
|
57258
|
+
async delete(id) {
|
|
57259
|
+
const knex = this.getKnex();
|
|
57260
|
+
const deleted = await knex("schedules").where("id", id).del();
|
|
57261
|
+
if (deleted > 0) {
|
|
57262
|
+
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
57263
|
+
return true;
|
|
57264
|
+
}
|
|
57265
|
+
return false;
|
|
57266
|
+
}
|
|
57267
|
+
// --- Queries ---
|
|
57268
|
+
async getByCreator(creatorId) {
|
|
57269
|
+
const knex = this.getKnex();
|
|
57270
|
+
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
57271
|
+
return rows.map((r) => fromDbRow2(r));
|
|
57272
|
+
}
|
|
57273
|
+
async getActiveSchedules() {
|
|
57274
|
+
const knex = this.getKnex();
|
|
57275
|
+
const rows = await knex("schedules").where("status", "active");
|
|
57276
|
+
return rows.map((r) => fromDbRow2(r));
|
|
57277
|
+
}
|
|
57278
|
+
async getDueSchedules(now) {
|
|
57279
|
+
const ts = now ?? Date.now();
|
|
57280
|
+
const knex = this.getKnex();
|
|
57281
|
+
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
57282
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
57283
|
+
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
57284
|
+
this.where(function() {
|
|
57285
|
+
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
57286
|
+
}).orWhere(function() {
|
|
57287
|
+
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
57288
|
+
});
|
|
57289
|
+
});
|
|
57290
|
+
return rows.map((r) => fromDbRow2(r));
|
|
57291
|
+
}
|
|
57292
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
57293
|
+
const knex = this.getKnex();
|
|
57294
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
57295
|
+
const pattern = `%${escaped}%`;
|
|
57296
|
+
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
57297
|
+
return rows.map((r) => fromDbRow2(r));
|
|
57298
|
+
}
|
|
57299
|
+
async getAll() {
|
|
57300
|
+
const knex = this.getKnex();
|
|
57301
|
+
const rows = await knex("schedules");
|
|
57302
|
+
return rows.map((r) => fromDbRow2(r));
|
|
57303
|
+
}
|
|
57304
|
+
async getStats() {
|
|
57305
|
+
const knex = this.getKnex();
|
|
57306
|
+
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
57307
|
+
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
57308
|
+
const result = await knex("schedules").select(
|
|
57309
|
+
knex.raw("COUNT(*) as total"),
|
|
57310
|
+
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
57311
|
+
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
57312
|
+
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
57313
|
+
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
57314
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
57315
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
57316
|
+
).first();
|
|
57317
|
+
return {
|
|
57318
|
+
total: Number(result.total) || 0,
|
|
57319
|
+
active: Number(result.active) || 0,
|
|
57320
|
+
paused: Number(result.paused) || 0,
|
|
57321
|
+
completed: Number(result.completed) || 0,
|
|
57322
|
+
failed: Number(result.failed) || 0,
|
|
57323
|
+
recurring: Number(result.recurring) || 0,
|
|
57324
|
+
oneTime: Number(result.one_time) || 0
|
|
57325
|
+
};
|
|
57326
|
+
}
|
|
57327
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
57328
|
+
const knex = this.getKnex();
|
|
57329
|
+
if (limits.maxGlobal) {
|
|
57330
|
+
const result = await knex("schedules").count("* as cnt").first();
|
|
57331
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
57332
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
57333
|
+
}
|
|
57334
|
+
}
|
|
57335
|
+
if (limits.maxPerUser) {
|
|
57336
|
+
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
57337
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
57338
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
57339
|
+
}
|
|
57340
|
+
}
|
|
57341
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
57342
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
57343
|
+
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
57344
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
57345
|
+
throw new Error(
|
|
57346
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
57347
|
+
);
|
|
57348
|
+
}
|
|
57349
|
+
}
|
|
57350
|
+
}
|
|
57351
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
57352
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
57353
|
+
const knex = this.getKnex();
|
|
57354
|
+
const now = Date.now();
|
|
57355
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
57356
|
+
const token = (0, import_uuid2.v4)();
|
|
57357
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
57358
|
+
node_id: nodeId,
|
|
57359
|
+
lock_token: token,
|
|
57360
|
+
acquired_at: now,
|
|
57361
|
+
expires_at: expiresAt
|
|
57362
|
+
});
|
|
57363
|
+
if (updated > 0) return token;
|
|
57364
|
+
try {
|
|
57365
|
+
await knex("scheduler_locks").insert({
|
|
57366
|
+
lock_id: lockId,
|
|
57367
|
+
node_id: nodeId,
|
|
57368
|
+
lock_token: token,
|
|
57369
|
+
acquired_at: now,
|
|
57370
|
+
expires_at: expiresAt
|
|
57371
|
+
});
|
|
57372
|
+
return token;
|
|
57373
|
+
} catch {
|
|
57374
|
+
return null;
|
|
57375
|
+
}
|
|
57376
|
+
}
|
|
57377
|
+
async releaseLock(lockId, lockToken) {
|
|
57378
|
+
const knex = this.getKnex();
|
|
57379
|
+
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
57380
|
+
}
|
|
57381
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
57382
|
+
const knex = this.getKnex();
|
|
57383
|
+
const now = Date.now();
|
|
57384
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
57385
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
57386
|
+
return updated > 0;
|
|
57387
|
+
}
|
|
57388
|
+
async flush() {
|
|
57389
|
+
}
|
|
57390
|
+
// --- Message Trigger CRUD ---
|
|
57391
|
+
async createTrigger(trigger) {
|
|
57392
|
+
const knex = this.getKnex();
|
|
57393
|
+
const newTrigger = {
|
|
57394
|
+
...trigger,
|
|
57395
|
+
id: (0, import_uuid2.v4)(),
|
|
57396
|
+
createdAt: Date.now()
|
|
57397
|
+
};
|
|
57398
|
+
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
57399
|
+
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
57400
|
+
return newTrigger;
|
|
57401
|
+
}
|
|
57402
|
+
async getTrigger(id) {
|
|
57403
|
+
const knex = this.getKnex();
|
|
57404
|
+
const row = await knex("message_triggers").where("id", id).first();
|
|
57405
|
+
return row ? fromTriggerRow2(row) : void 0;
|
|
57406
|
+
}
|
|
57407
|
+
async updateTrigger(id, patch) {
|
|
57408
|
+
const knex = this.getKnex();
|
|
57409
|
+
const existing = await knex("message_triggers").where("id", id).first();
|
|
57410
|
+
if (!existing) return void 0;
|
|
57411
|
+
const current = fromTriggerRow2(existing);
|
|
57412
|
+
const updated = {
|
|
57413
|
+
...current,
|
|
57414
|
+
...patch,
|
|
57415
|
+
id: current.id,
|
|
57416
|
+
createdAt: current.createdAt
|
|
57417
|
+
};
|
|
57418
|
+
const row = toTriggerInsertRow(updated);
|
|
57419
|
+
delete row.id;
|
|
57420
|
+
await knex("message_triggers").where("id", id).update(row);
|
|
57421
|
+
return updated;
|
|
57422
|
+
}
|
|
57423
|
+
async deleteTrigger(id) {
|
|
57424
|
+
const knex = this.getKnex();
|
|
57425
|
+
const deleted = await knex("message_triggers").where("id", id).del();
|
|
57426
|
+
if (deleted > 0) {
|
|
57427
|
+
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
57428
|
+
return true;
|
|
57429
|
+
}
|
|
57430
|
+
return false;
|
|
57431
|
+
}
|
|
57432
|
+
async getTriggersByCreator(creatorId) {
|
|
57433
|
+
const knex = this.getKnex();
|
|
57434
|
+
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
57435
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
57436
|
+
}
|
|
57437
|
+
async getActiveTriggers() {
|
|
57438
|
+
const knex = this.getKnex();
|
|
57439
|
+
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
57440
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
57441
|
+
}
|
|
57442
|
+
};
|
|
57443
|
+
}
|
|
57444
|
+
});
|
|
57445
|
+
|
|
57446
|
+
// src/enterprise/loader.ts
|
|
57447
|
+
var loader_exports = {};
|
|
57448
|
+
__export(loader_exports, {
|
|
57449
|
+
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
57450
|
+
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
57451
|
+
});
|
|
57452
|
+
async function loadEnterprisePolicyEngine(config) {
|
|
57453
|
+
try {
|
|
57454
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
57455
|
+
const validator = new LicenseValidator2();
|
|
57456
|
+
const license = await validator.loadAndValidate();
|
|
57457
|
+
if (!license || !validator.hasFeature("policy")) {
|
|
57458
|
+
return new DefaultPolicyEngine();
|
|
57459
|
+
}
|
|
57460
|
+
if (validator.isInGracePeriod()) {
|
|
57461
|
+
console.warn(
|
|
57462
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
57463
|
+
);
|
|
57464
|
+
}
|
|
57465
|
+
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
57466
|
+
const engine = new OpaPolicyEngine2(config);
|
|
57467
|
+
await engine.initialize(config);
|
|
57468
|
+
return engine;
|
|
57469
|
+
} catch (err) {
|
|
57470
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
57471
|
+
try {
|
|
57472
|
+
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
57473
|
+
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
57474
|
+
} catch {
|
|
57475
|
+
}
|
|
57476
|
+
return new DefaultPolicyEngine();
|
|
57477
|
+
}
|
|
57478
|
+
}
|
|
57479
|
+
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
57480
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
57481
|
+
const validator = new LicenseValidator2();
|
|
57482
|
+
const license = await validator.loadAndValidate();
|
|
57483
|
+
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
57484
|
+
throw new Error(
|
|
57485
|
+
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
57486
|
+
);
|
|
57487
|
+
}
|
|
57488
|
+
if (validator.isInGracePeriod()) {
|
|
57489
|
+
console.warn(
|
|
57490
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
57491
|
+
);
|
|
57492
|
+
}
|
|
57493
|
+
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
57494
|
+
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
57495
|
+
}
|
|
57496
|
+
var init_loader = __esm({
|
|
57497
|
+
"src/enterprise/loader.ts"() {
|
|
57498
|
+
"use strict";
|
|
57499
|
+
init_default_engine();
|
|
57500
|
+
}
|
|
57501
|
+
});
|
|
57502
|
+
|
|
56129
57503
|
// src/event-bus/event-bus.ts
|
|
56130
57504
|
var event_bus_exports = {};
|
|
56131
57505
|
__export(event_bus_exports, {
|
|
@@ -57032,8 +58406,8 @@ ${content}
|
|
|
57032
58406
|
* Sleep utility
|
|
57033
58407
|
*/
|
|
57034
58408
|
sleep(ms) {
|
|
57035
|
-
return new Promise((
|
|
57036
|
-
const t = setTimeout(
|
|
58409
|
+
return new Promise((resolve19) => {
|
|
58410
|
+
const t = setTimeout(resolve19, ms);
|
|
57037
58411
|
if (typeof t.unref === "function") {
|
|
57038
58412
|
try {
|
|
57039
58413
|
t.unref();
|
|
@@ -57318,8 +58692,8 @@ ${end}`);
|
|
|
57318
58692
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
57319
58693
|
const existingLock = this.updateLocks.get(group);
|
|
57320
58694
|
let resolveLock;
|
|
57321
|
-
const ourLock = new Promise((
|
|
57322
|
-
resolveLock =
|
|
58695
|
+
const ourLock = new Promise((resolve19) => {
|
|
58696
|
+
resolveLock = resolve19;
|
|
57323
58697
|
});
|
|
57324
58698
|
this.updateLocks.set(group, ourLock);
|
|
57325
58699
|
try {
|
|
@@ -57650,7 +59024,7 @@ ${blocks}
|
|
|
57650
59024
|
* Sleep utility for enforcing delays
|
|
57651
59025
|
*/
|
|
57652
59026
|
sleep(ms) {
|
|
57653
|
-
return new Promise((
|
|
59027
|
+
return new Promise((resolve19) => setTimeout(resolve19, ms));
|
|
57654
59028
|
}
|
|
57655
59029
|
};
|
|
57656
59030
|
}
|
|
@@ -58819,7 +60193,7 @@ var init_tui_frontend = __esm({
|
|
|
58819
60193
|
});
|
|
58820
60194
|
|
|
58821
60195
|
// src/agent-protocol/task-store.ts
|
|
58822
|
-
function
|
|
60196
|
+
function safeJsonParse3(value) {
|
|
58823
60197
|
if (!value) return void 0;
|
|
58824
60198
|
try {
|
|
58825
60199
|
return JSON.parse(value);
|
|
@@ -58836,12 +60210,12 @@ function taskRowToAgentTask(row) {
|
|
|
58836
60210
|
context_id: row.context_id,
|
|
58837
60211
|
status: {
|
|
58838
60212
|
state: row.state,
|
|
58839
|
-
message:
|
|
60213
|
+
message: safeJsonParse3(row.status_message),
|
|
58840
60214
|
timestamp: row.updated_at
|
|
58841
60215
|
},
|
|
58842
|
-
artifacts:
|
|
58843
|
-
history:
|
|
58844
|
-
metadata:
|
|
60216
|
+
artifacts: safeJsonParse3(row.artifacts) ?? [],
|
|
60217
|
+
history: safeJsonParse3(row.history) ?? [],
|
|
60218
|
+
metadata: safeJsonParse3(row.request_metadata),
|
|
58845
60219
|
workflow_id: row.workflow_id ?? void 0
|
|
58846
60220
|
};
|
|
58847
60221
|
}
|
|
@@ -59053,7 +60427,7 @@ var init_task_store = __esm({
|
|
|
59053
60427
|
const db = this.getDb();
|
|
59054
60428
|
const row = db.prepare("SELECT artifacts FROM agent_tasks WHERE id = ?").get(taskId);
|
|
59055
60429
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
59056
|
-
const artifacts =
|
|
60430
|
+
const artifacts = safeJsonParse3(row.artifacts) ?? [];
|
|
59057
60431
|
artifacts.push(artifact);
|
|
59058
60432
|
db.prepare("UPDATE agent_tasks SET artifacts = ?, updated_at = ? WHERE id = ?").run(
|
|
59059
60433
|
JSON.stringify(artifacts),
|
|
@@ -59065,7 +60439,7 @@ var init_task_store = __esm({
|
|
|
59065
60439
|
const db = this.getDb();
|
|
59066
60440
|
const row = db.prepare("SELECT history FROM agent_tasks WHERE id = ?").get(taskId);
|
|
59067
60441
|
if (!row) throw new TaskNotFoundError(taskId);
|
|
59068
|
-
const history =
|
|
60442
|
+
const history = safeJsonParse3(row.history) ?? [];
|
|
59069
60443
|
history.push(message);
|
|
59070
60444
|
db.prepare("UPDATE agent_tasks SET history = ?, updated_at = ? WHERE id = ?").run(
|
|
59071
60445
|
JSON.stringify(history),
|
|
@@ -59551,13 +60925,13 @@ __export(a2a_frontend_exports, {
|
|
|
59551
60925
|
resultToArtifacts: () => resultToArtifacts
|
|
59552
60926
|
});
|
|
59553
60927
|
function readJsonBody(req) {
|
|
59554
|
-
return new Promise((
|
|
60928
|
+
return new Promise((resolve19, reject) => {
|
|
59555
60929
|
const chunks = [];
|
|
59556
60930
|
req.on("data", (chunk) => chunks.push(chunk));
|
|
59557
60931
|
req.on("end", () => {
|
|
59558
60932
|
try {
|
|
59559
60933
|
const body = Buffer.concat(chunks).toString("utf8");
|
|
59560
|
-
|
|
60934
|
+
resolve19(body ? JSON.parse(body) : {});
|
|
59561
60935
|
} catch {
|
|
59562
60936
|
reject(new ParseError("Malformed JSON body"));
|
|
59563
60937
|
}
|
|
@@ -59800,12 +61174,12 @@ var init_a2a_frontend = __esm({
|
|
|
59800
61174
|
}
|
|
59801
61175
|
const port = this.config.port ?? 9e3;
|
|
59802
61176
|
const host = this.config.host ?? "0.0.0.0";
|
|
59803
|
-
await new Promise((
|
|
61177
|
+
await new Promise((resolve19) => {
|
|
59804
61178
|
this.server.listen(port, host, () => {
|
|
59805
61179
|
const addr = this.server.address();
|
|
59806
61180
|
this._boundPort = typeof addr === "object" && addr ? addr.port : port;
|
|
59807
61181
|
logger.info(`A2A server listening on ${host}:${this._boundPort}`);
|
|
59808
|
-
|
|
61182
|
+
resolve19();
|
|
59809
61183
|
});
|
|
59810
61184
|
});
|
|
59811
61185
|
if (this.agentCard) {
|
|
@@ -59829,8 +61203,8 @@ var init_a2a_frontend = __esm({
|
|
|
59829
61203
|
}
|
|
59830
61204
|
this.streamManager.shutdown();
|
|
59831
61205
|
if (this.server) {
|
|
59832
|
-
await new Promise((
|
|
59833
|
-
this.server.close((err) => err ? reject(err) :
|
|
61206
|
+
await new Promise((resolve19, reject) => {
|
|
61207
|
+
this.server.close((err) => err ? reject(err) : resolve19());
|
|
59834
61208
|
});
|
|
59835
61209
|
this.server = null;
|
|
59836
61210
|
}
|
|
@@ -60535,15 +61909,15 @@ function serializeRunState(state) {
|
|
|
60535
61909
|
])
|
|
60536
61910
|
};
|
|
60537
61911
|
}
|
|
60538
|
-
var
|
|
61912
|
+
var path31, fs28, StateMachineExecutionEngine;
|
|
60539
61913
|
var init_state_machine_execution_engine = __esm({
|
|
60540
61914
|
"src/state-machine-execution-engine.ts"() {
|
|
60541
61915
|
"use strict";
|
|
60542
61916
|
init_runner();
|
|
60543
61917
|
init_logger();
|
|
60544
61918
|
init_sandbox_manager();
|
|
60545
|
-
|
|
60546
|
-
|
|
61919
|
+
path31 = __toESM(require("path"));
|
|
61920
|
+
fs28 = __toESM(require("fs"));
|
|
60547
61921
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
60548
61922
|
workingDirectory;
|
|
60549
61923
|
executionContext;
|
|
@@ -60775,8 +62149,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
60775
62149
|
logger.debug(
|
|
60776
62150
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
60777
62151
|
);
|
|
60778
|
-
const { loadEnterprisePolicyEngine } = await
|
|
60779
|
-
context2.policyEngine = await
|
|
62152
|
+
const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
|
|
62153
|
+
context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
|
|
60780
62154
|
logger.debug(
|
|
60781
62155
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
60782
62156
|
);
|
|
@@ -60930,9 +62304,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
60930
62304
|
}
|
|
60931
62305
|
const checkId = String(ev?.checkId || "unknown");
|
|
60932
62306
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
60933
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
60934
|
-
|
|
60935
|
-
const filePath =
|
|
62307
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path31.resolve(process.cwd(), ".visor", "snapshots");
|
|
62308
|
+
fs28.mkdirSync(baseDir, { recursive: true });
|
|
62309
|
+
const filePath = path31.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
60936
62310
|
await this.saveSnapshotToFile(filePath);
|
|
60937
62311
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
60938
62312
|
try {
|
|
@@ -61073,7 +62447,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
61073
62447
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
61074
62448
|
*/
|
|
61075
62449
|
async saveSnapshotToFile(filePath) {
|
|
61076
|
-
const
|
|
62450
|
+
const fs29 = await import("fs/promises");
|
|
61077
62451
|
const ctx = this._lastContext;
|
|
61078
62452
|
const runner = this._lastRunner;
|
|
61079
62453
|
if (!ctx || !runner) {
|
|
@@ -61093,14 +62467,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
61093
62467
|
journal: entries,
|
|
61094
62468
|
requestedChecks: ctx.requestedChecks || []
|
|
61095
62469
|
};
|
|
61096
|
-
await
|
|
62470
|
+
await fs29.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
61097
62471
|
}
|
|
61098
62472
|
/**
|
|
61099
62473
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
61100
62474
|
*/
|
|
61101
62475
|
async loadSnapshotFromFile(filePath) {
|
|
61102
|
-
const
|
|
61103
|
-
const raw = await
|
|
62476
|
+
const fs29 = await import("fs/promises");
|
|
62477
|
+
const raw = await fs29.readFile(filePath, "utf8");
|
|
61104
62478
|
return JSON.parse(raw);
|
|
61105
62479
|
}
|
|
61106
62480
|
/**
|