@probelabs/visor 0.1.153 → 0.1.154-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +2023 -88
- package/dist/providers/mcp-custom-sse-server.d.ts.map +1 -1
- package/dist/scheduler/schedule-tool.d.ts.map +1 -1
- package/dist/sdk/{check-provider-registry-OEPUY5P6.mjs → check-provider-registry-PK3OTEX6.mjs} +7 -7
- package/dist/sdk/{check-provider-registry-ZOGNKTC3.mjs → check-provider-registry-PZ6K7G4G.mjs} +7 -7
- package/dist/sdk/{chunk-CPYQDJ27.mjs → chunk-AS6LIEO4.mjs} +3 -3
- package/dist/sdk/{chunk-Y3XWPKFP.mjs → chunk-GQ7H7E4Y.mjs} +2 -2
- package/dist/sdk/{chunk-Y3XWPKFP.mjs.map → chunk-GQ7H7E4Y.mjs.map} +1 -1
- package/dist/sdk/{chunk-KBX4OIXL.mjs → chunk-ILIWDV37.mjs} +2 -2
- package/dist/sdk/{chunk-U7KB66AN.mjs → chunk-LIRIQICI.mjs} +56 -36
- package/dist/sdk/chunk-LIRIQICI.mjs.map +1 -0
- package/dist/sdk/{chunk-DNDS7R3N.mjs → chunk-NZADFXHE.mjs} +7 -1
- package/dist/sdk/chunk-NZADFXHE.mjs.map +1 -0
- package/dist/sdk/{chunk-ZCUGMT7X.mjs → chunk-SPCGI24K.mjs} +3 -3
- package/dist/sdk/{chunk-EYQWEVZF.mjs → chunk-WPF7PJ64.mjs} +55 -35
- package/dist/sdk/chunk-WPF7PJ64.mjs.map +1 -0
- package/dist/sdk/{config-SW3VO4DQ.mjs → config-CWHZO5AL.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-R3UE4PE7.mjs → failure-condition-evaluator-LYFZMQ6Y.mjs} +3 -3
- package/dist/sdk/{github-frontend-6KZSVSPC.mjs → github-frontend-XKPAYXOT.mjs} +3 -3
- package/dist/sdk/{host-A5HS6F6G.mjs → host-6DJCOUJE.mjs} +2 -2
- package/dist/sdk/{host-YBJOWFT4.mjs → host-7Y25DDOR.mjs} +2 -2
- package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
- package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
- package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
- package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
- package/dist/sdk/{routing-MMEOAH34.mjs → routing-L224WQSY.mjs} +4 -4
- package/dist/sdk/{schedule-tool-NMCFABHK.mjs → schedule-tool-HRFONU5J.mjs} +7 -7
- package/dist/sdk/{schedule-tool-NYRLSV4F.mjs → schedule-tool-KZ36XTW4.mjs} +7 -7
- package/dist/sdk/{schedule-tool-handler-2TFSBZ2O.mjs → schedule-tool-handler-2V4EJEQT.mjs} +7 -7
- package/dist/sdk/{schedule-tool-handler-DRVRLVGD.mjs → schedule-tool-handler-KXZC4ZOR.mjs} +7 -7
- package/dist/sdk/sdk.js +1677 -277
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +6 -6
- package/dist/sdk/{trace-helpers-JJLVZ2RJ.mjs → trace-helpers-AWTAWKRA.mjs} +2 -2
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-GIW4WECT.mjs → workflow-check-provider-HGFTX64I.mjs} +7 -7
- package/dist/sdk/{workflow-check-provider-UQMMFLSK.mjs → workflow-check-provider-TXAEY7OU.mjs} +7 -7
- package/dist/sdk/{workflow-registry-MHUSKSD6.mjs → workflow-registry-NGV3SESX.mjs} +2 -2
- package/dist/slack/schedule-tool-handler.d.ts.map +1 -1
- package/dist/workflow-registry.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/output/traces/run-2026-03-04T13-24-27-240Z.ndjson +0 -138
- package/dist/output/traces/run-2026-03-04T13-25-12-321Z.ndjson +0 -2197
- package/dist/sdk/check-provider-registry-CSIZGIKC.mjs +0 -29
- package/dist/sdk/chunk-DNDS7R3N.mjs.map +0 -1
- package/dist/sdk/chunk-EYQWEVZF.mjs.map +0 -1
- package/dist/sdk/chunk-NYK7WDGH.mjs +0 -43754
- package/dist/sdk/chunk-NYK7WDGH.mjs.map +0 -1
- package/dist/sdk/chunk-SMR5N5MG.mjs +0 -443
- package/dist/sdk/chunk-SMR5N5MG.mjs.map +0 -1
- package/dist/sdk/chunk-U7KB66AN.mjs.map +0 -1
- package/dist/sdk/chunk-VBN45DBR.mjs +0 -1502
- package/dist/sdk/chunk-WG7P66MJ.mjs +0 -739
- package/dist/sdk/chunk-WG7P66MJ.mjs.map +0 -1
- package/dist/sdk/chunk-ZCUGMT7X.mjs.map +0 -1
- package/dist/sdk/failure-condition-evaluator-Y32S6DB2.mjs +0 -17
- package/dist/sdk/github-frontend-6SIR7QWX.mjs +0 -1368
- package/dist/sdk/github-frontend-6SIR7QWX.mjs.map +0 -1
- package/dist/sdk/routing-U63OJMZQ.mjs +0 -25
- package/dist/sdk/schedule-tool-74VMD77T.mjs +0 -35
- package/dist/sdk/schedule-tool-handler-EOQBRZSD.mjs +0 -39
- package/dist/sdk/schedule-tool-handler-EOQBRZSD.mjs.map +0 -1
- package/dist/sdk/trace-helpers-2BIVADUK.mjs +0 -25
- package/dist/sdk/trace-helpers-2BIVADUK.mjs.map +0 -1
- package/dist/sdk/trace-helpers-JJLVZ2RJ.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-4NHVFLMQ.mjs +0 -29
- package/dist/sdk/workflow-check-provider-4NHVFLMQ.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-GIW4WECT.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-UQMMFLSK.mjs.map +0 -1
- package/dist/sdk/workflow-registry-MHUSKSD6.mjs.map +0 -1
- package/dist/traces/run-2026-03-04T13-24-27-240Z.ndjson +0 -138
- package/dist/traces/run-2026-03-04T13-25-12-321Z.ndjson +0 -2197
- /package/dist/sdk/{check-provider-registry-CSIZGIKC.mjs.map → check-provider-registry-PK3OTEX6.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-OEPUY5P6.mjs.map → check-provider-registry-PZ6K7G4G.mjs.map} +0 -0
- /package/dist/sdk/{chunk-CPYQDJ27.mjs.map → chunk-AS6LIEO4.mjs.map} +0 -0
- /package/dist/sdk/{chunk-KBX4OIXL.mjs.map → chunk-ILIWDV37.mjs.map} +0 -0
- /package/dist/sdk/{chunk-VBN45DBR.mjs.map → chunk-SPCGI24K.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-ZOGNKTC3.mjs.map → config-CWHZO5AL.mjs.map} +0 -0
- /package/dist/sdk/{config-SW3VO4DQ.mjs.map → failure-condition-evaluator-LYFZMQ6Y.mjs.map} +0 -0
- /package/dist/sdk/{github-frontend-6KZSVSPC.mjs.map → github-frontend-XKPAYXOT.mjs.map} +0 -0
- /package/dist/sdk/{host-A5HS6F6G.mjs.map → host-6DJCOUJE.mjs.map} +0 -0
- /package/dist/sdk/{host-YBJOWFT4.mjs.map → host-7Y25DDOR.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-R3UE4PE7.mjs.map → routing-L224WQSY.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-Y32S6DB2.mjs.map → schedule-tool-HRFONU5J.mjs.map} +0 -0
- /package/dist/sdk/{routing-MMEOAH34.mjs.map → schedule-tool-KZ36XTW4.mjs.map} +0 -0
- /package/dist/sdk/{routing-U63OJMZQ.mjs.map → schedule-tool-handler-2V4EJEQT.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-74VMD77T.mjs.map → schedule-tool-handler-KXZC4ZOR.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-NMCFABHK.mjs.map → trace-helpers-AWTAWKRA.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-NYRLSV4F.mjs.map → workflow-check-provider-HGFTX64I.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-2TFSBZ2O.mjs.map → workflow-check-provider-TXAEY7OU.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-DRVRLVGD.mjs.map → workflow-registry-NGV3SESX.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -646,7 +646,7 @@ var require_package = __commonJS({
|
|
|
646
646
|
"package.json"(exports2, module2) {
|
|
647
647
|
module2.exports = {
|
|
648
648
|
name: "@probelabs/visor",
|
|
649
|
-
version: "0.1.
|
|
649
|
+
version: "0.1.42",
|
|
650
650
|
main: "dist/index.js",
|
|
651
651
|
bin: {
|
|
652
652
|
visor: "./dist/index.js"
|
|
@@ -760,7 +760,7 @@ var require_package = __commonJS({
|
|
|
760
760
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
761
761
|
"@opentelemetry/sdk-trace-base": "^1.30.1",
|
|
762
762
|
"@opentelemetry/semantic-conventions": "^1.30.1",
|
|
763
|
-
"@probelabs/probe": "^0.6.0-
|
|
763
|
+
"@probelabs/probe": "^0.6.0-rc271",
|
|
764
764
|
"@types/commander": "^2.12.0",
|
|
765
765
|
"@types/uuid": "^10.0.0",
|
|
766
766
|
acorn: "^8.16.0",
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve19, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve19(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path31 = require("path");
|
|
949
|
+
const fs27 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path31.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs27 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3193
3193
|
*/
|
|
3194
3194
|
evaluateExpression(condition, context2) {
|
|
3195
3195
|
try {
|
|
3196
|
-
const
|
|
3196
|
+
const normalize8 = (expr) => {
|
|
3197
3197
|
const trimmed = expr.trim();
|
|
3198
3198
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3199
3199
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3351
3351
|
try {
|
|
3352
3352
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3353
3353
|
} catch {
|
|
3354
|
-
const normalizedExpr =
|
|
3354
|
+
const normalizedExpr = normalize8(condition);
|
|
3355
3355
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3356
3356
|
}
|
|
3357
3357
|
const result = exec2(scope).run();
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path31) return obj;
|
|
3739
|
+
const parts = path31.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path31) => {
|
|
3859
|
+
if (!obj || !path31) return void 0;
|
|
3860
|
+
const parts = path31.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs27 = await import("fs/promises");
|
|
6413
|
+
const path31 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs27.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs27.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs27 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs27.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6972
6972
|
if (chromiumPath) {
|
|
6973
6973
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6974
6974
|
}
|
|
6975
|
-
const result = await new Promise((
|
|
6975
|
+
const result = await new Promise((resolve19) => {
|
|
6976
6976
|
const proc = (0, import_child_process.spawn)(
|
|
6977
6977
|
"npx",
|
|
6978
6978
|
[
|
|
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7002
7002
|
});
|
|
7003
7003
|
proc.on("close", (code) => {
|
|
7004
7004
|
if (code === 0) {
|
|
7005
|
-
|
|
7005
|
+
resolve19({ success: true });
|
|
7006
7006
|
} else {
|
|
7007
|
-
|
|
7007
|
+
resolve19({ success: false, error: stderr || `Exit code ${code}` });
|
|
7008
7008
|
}
|
|
7009
7009
|
});
|
|
7010
7010
|
proc.on("error", (err) => {
|
|
7011
|
-
|
|
7011
|
+
resolve19({ success: false, error: err.message });
|
|
7012
7012
|
});
|
|
7013
7013
|
});
|
|
7014
7014
|
if (!result.success) {
|
|
@@ -8170,8 +8170,8 @@ ${schemaString}`);
|
|
|
8170
8170
|
}
|
|
8171
8171
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8172
8172
|
try {
|
|
8173
|
-
const
|
|
8174
|
-
const
|
|
8173
|
+
const fs27 = require("fs");
|
|
8174
|
+
const path31 = require("path");
|
|
8175
8175
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8176
8176
|
const provider = this.config.provider || "auto";
|
|
8177
8177
|
const model = this.config.model || "default";
|
|
@@ -8285,20 +8285,20 @@ ${"=".repeat(60)}
|
|
|
8285
8285
|
`;
|
|
8286
8286
|
readableVersion += `${"=".repeat(60)}
|
|
8287
8287
|
`;
|
|
8288
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8289
|
-
if (!
|
|
8290
|
-
|
|
8288
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8289
|
+
if (!fs27.existsSync(debugArtifactsDir)) {
|
|
8290
|
+
fs27.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8291
8291
|
}
|
|
8292
|
-
const debugFile =
|
|
8292
|
+
const debugFile = path31.join(
|
|
8293
8293
|
debugArtifactsDir,
|
|
8294
8294
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8295
8295
|
);
|
|
8296
|
-
|
|
8297
|
-
const readableFile =
|
|
8296
|
+
fs27.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8297
|
+
const readableFile = path31.join(
|
|
8298
8298
|
debugArtifactsDir,
|
|
8299
8299
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8300
8300
|
);
|
|
8301
|
-
|
|
8301
|
+
fs27.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8302
8302
|
log(`
|
|
8303
8303
|
\u{1F4BE} Full debug info saved to:`);
|
|
8304
8304
|
log(` JSON: ${debugFile}`);
|
|
@@ -8331,8 +8331,8 @@ ${"=".repeat(60)}
|
|
|
8331
8331
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8332
8332
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8333
8333
|
try {
|
|
8334
|
-
const
|
|
8335
|
-
const
|
|
8334
|
+
const fs27 = require("fs");
|
|
8335
|
+
const path31 = require("path");
|
|
8336
8336
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8337
8337
|
const agentAny2 = agent;
|
|
8338
8338
|
let fullHistory = [];
|
|
@@ -8343,8 +8343,8 @@ ${"=".repeat(60)}
|
|
|
8343
8343
|
} else if (agentAny2._messages) {
|
|
8344
8344
|
fullHistory = agentAny2._messages;
|
|
8345
8345
|
}
|
|
8346
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8347
|
-
const sessionBase =
|
|
8346
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8347
|
+
const sessionBase = path31.join(
|
|
8348
8348
|
debugArtifactsDir,
|
|
8349
8349
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8350
8350
|
);
|
|
@@ -8356,7 +8356,7 @@ ${"=".repeat(60)}
|
|
|
8356
8356
|
schema: effectiveSchema,
|
|
8357
8357
|
totalMessages: fullHistory.length
|
|
8358
8358
|
};
|
|
8359
|
-
|
|
8359
|
+
fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8360
8360
|
let readable = `=============================================================
|
|
8361
8361
|
`;
|
|
8362
8362
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8383,7 +8383,7 @@ ${"=".repeat(60)}
|
|
|
8383
8383
|
`;
|
|
8384
8384
|
readable += content + "\n";
|
|
8385
8385
|
});
|
|
8386
|
-
|
|
8386
|
+
fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8387
8387
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8388
8388
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8389
8389
|
} catch (error) {
|
|
@@ -8392,11 +8392,11 @@ ${"=".repeat(60)}
|
|
|
8392
8392
|
}
|
|
8393
8393
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8394
8394
|
try {
|
|
8395
|
-
const
|
|
8396
|
-
const
|
|
8395
|
+
const fs27 = require("fs");
|
|
8396
|
+
const path31 = require("path");
|
|
8397
8397
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8398
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8399
|
-
const responseFile =
|
|
8398
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8399
|
+
const responseFile = path31.join(
|
|
8400
8400
|
debugArtifactsDir,
|
|
8401
8401
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8402
8402
|
);
|
|
@@ -8429,7 +8429,7 @@ ${"=".repeat(60)}
|
|
|
8429
8429
|
`;
|
|
8430
8430
|
responseContent += `${"=".repeat(60)}
|
|
8431
8431
|
`;
|
|
8432
|
-
|
|
8432
|
+
fs27.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8433
8433
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8434
8434
|
} catch (error) {
|
|
8435
8435
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8445,9 +8445,9 @@ ${"=".repeat(60)}
|
|
|
8445
8445
|
await agentAny._telemetryConfig.shutdown();
|
|
8446
8446
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8447
8447
|
if (process.env.GITHUB_ACTIONS) {
|
|
8448
|
-
const
|
|
8449
|
-
if (
|
|
8450
|
-
const stats =
|
|
8448
|
+
const fs27 = require("fs");
|
|
8449
|
+
if (fs27.existsSync(agentAny._traceFilePath)) {
|
|
8450
|
+
const stats = fs27.statSync(agentAny._traceFilePath);
|
|
8451
8451
|
console.log(
|
|
8452
8452
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8453
8453
|
);
|
|
@@ -8654,9 +8654,9 @@ ${schemaString}`);
|
|
|
8654
8654
|
const model = this.config.model || "default";
|
|
8655
8655
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8656
8656
|
try {
|
|
8657
|
-
const
|
|
8658
|
-
const
|
|
8659
|
-
const
|
|
8657
|
+
const fs27 = require("fs");
|
|
8658
|
+
const path31 = require("path");
|
|
8659
|
+
const os3 = require("os");
|
|
8660
8660
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8661
8661
|
const debugData = {
|
|
8662
8662
|
timestamp,
|
|
@@ -8728,19 +8728,19 @@ ${"=".repeat(60)}
|
|
|
8728
8728
|
`;
|
|
8729
8729
|
readableVersion += `${"=".repeat(60)}
|
|
8730
8730
|
`;
|
|
8731
|
-
const tempDir =
|
|
8732
|
-
const promptFile =
|
|
8733
|
-
|
|
8731
|
+
const tempDir = os3.tmpdir();
|
|
8732
|
+
const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8733
|
+
fs27.writeFileSync(promptFile, prompt, "utf-8");
|
|
8734
8734
|
log(`
|
|
8735
8735
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8736
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8736
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8737
8737
|
try {
|
|
8738
|
-
const base =
|
|
8738
|
+
const base = path31.join(
|
|
8739
8739
|
debugArtifactsDir,
|
|
8740
8740
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8741
8741
|
);
|
|
8742
|
-
|
|
8743
|
-
|
|
8742
|
+
fs27.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8743
|
+
fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8744
8744
|
log(`
|
|
8745
8745
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8746
8746
|
} catch {
|
|
@@ -8785,8 +8785,8 @@ $ ${cliCommand}
|
|
|
8785
8785
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8786
8786
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8787
8787
|
try {
|
|
8788
|
-
const
|
|
8789
|
-
const
|
|
8788
|
+
const fs27 = require("fs");
|
|
8789
|
+
const path31 = require("path");
|
|
8790
8790
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8791
8791
|
const agentAny = agent;
|
|
8792
8792
|
let fullHistory = [];
|
|
@@ -8797,8 +8797,8 @@ $ ${cliCommand}
|
|
|
8797
8797
|
} else if (agentAny._messages) {
|
|
8798
8798
|
fullHistory = agentAny._messages;
|
|
8799
8799
|
}
|
|
8800
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8801
|
-
const sessionBase =
|
|
8800
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8801
|
+
const sessionBase = path31.join(
|
|
8802
8802
|
debugArtifactsDir,
|
|
8803
8803
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8804
8804
|
);
|
|
@@ -8810,7 +8810,7 @@ $ ${cliCommand}
|
|
|
8810
8810
|
schema: effectiveSchema,
|
|
8811
8811
|
totalMessages: fullHistory.length
|
|
8812
8812
|
};
|
|
8813
|
-
|
|
8813
|
+
fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8814
8814
|
let readable = `=============================================================
|
|
8815
8815
|
`;
|
|
8816
8816
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8837,7 +8837,7 @@ ${"=".repeat(60)}
|
|
|
8837
8837
|
`;
|
|
8838
8838
|
readable += content + "\n";
|
|
8839
8839
|
});
|
|
8840
|
-
|
|
8840
|
+
fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8841
8841
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8842
8842
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8843
8843
|
} catch (error) {
|
|
@@ -8846,11 +8846,11 @@ ${"=".repeat(60)}
|
|
|
8846
8846
|
}
|
|
8847
8847
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8848
8848
|
try {
|
|
8849
|
-
const
|
|
8850
|
-
const
|
|
8849
|
+
const fs27 = require("fs");
|
|
8850
|
+
const path31 = require("path");
|
|
8851
8851
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8852
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8853
|
-
const responseFile =
|
|
8852
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8853
|
+
const responseFile = path31.join(
|
|
8854
8854
|
debugArtifactsDir,
|
|
8855
8855
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8856
8856
|
);
|
|
@@ -8883,7 +8883,7 @@ ${"=".repeat(60)}
|
|
|
8883
8883
|
`;
|
|
8884
8884
|
responseContent += `${"=".repeat(60)}
|
|
8885
8885
|
`;
|
|
8886
|
-
|
|
8886
|
+
fs27.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8887
8887
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8888
8888
|
} catch (error) {
|
|
8889
8889
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8901,9 +8901,9 @@ ${"=".repeat(60)}
|
|
|
8901
8901
|
await telemetry.shutdown();
|
|
8902
8902
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8903
8903
|
if (process.env.GITHUB_ACTIONS) {
|
|
8904
|
-
const
|
|
8905
|
-
if (
|
|
8906
|
-
const stats =
|
|
8904
|
+
const fs27 = require("fs");
|
|
8905
|
+
if (fs27.existsSync(traceFilePath)) {
|
|
8906
|
+
const stats = fs27.statSync(traceFilePath);
|
|
8907
8907
|
console.log(
|
|
8908
8908
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8909
8909
|
);
|
|
@@ -8941,8 +8941,8 @@ ${"=".repeat(60)}
|
|
|
8941
8941
|
* Load schema content from schema files or inline definitions
|
|
8942
8942
|
*/
|
|
8943
8943
|
async loadSchemaContent(schema) {
|
|
8944
|
-
const
|
|
8945
|
-
const
|
|
8944
|
+
const fs27 = require("fs").promises;
|
|
8945
|
+
const path31 = require("path");
|
|
8946
8946
|
if (typeof schema === "object" && schema !== null) {
|
|
8947
8947
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8948
8948
|
return JSON.stringify(schema);
|
|
@@ -8955,14 +8955,14 @@ ${"=".repeat(60)}
|
|
|
8955
8955
|
}
|
|
8956
8956
|
} catch {
|
|
8957
8957
|
}
|
|
8958
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8958
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
|
|
8959
8959
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8960
8960
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8961
8961
|
}
|
|
8962
8962
|
try {
|
|
8963
|
-
const schemaPath =
|
|
8963
|
+
const schemaPath = path31.resolve(process.cwd(), schema);
|
|
8964
8964
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8965
|
-
const schemaContent = await
|
|
8965
|
+
const schemaContent = await fs27.readFile(schemaPath, "utf-8");
|
|
8966
8966
|
return schemaContent.trim();
|
|
8967
8967
|
} catch (error) {
|
|
8968
8968
|
throw new Error(
|
|
@@ -8976,22 +8976,22 @@ ${"=".repeat(60)}
|
|
|
8976
8976
|
}
|
|
8977
8977
|
const candidatePaths = [
|
|
8978
8978
|
// GitHub Action bundle location
|
|
8979
|
-
|
|
8979
|
+
path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8980
8980
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8981
|
-
|
|
8981
|
+
path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8982
8982
|
// Local dev (repo root)
|
|
8983
|
-
|
|
8983
|
+
path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8984
8984
|
];
|
|
8985
8985
|
for (const schemaPath of candidatePaths) {
|
|
8986
8986
|
try {
|
|
8987
|
-
const schemaContent = await
|
|
8987
|
+
const schemaContent = await fs27.readFile(schemaPath, "utf-8");
|
|
8988
8988
|
return schemaContent.trim();
|
|
8989
8989
|
} catch {
|
|
8990
8990
|
}
|
|
8991
8991
|
}
|
|
8992
|
-
const distPath =
|
|
8993
|
-
const distAltPath =
|
|
8994
|
-
const cwdPath =
|
|
8992
|
+
const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8993
|
+
const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8994
|
+
const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8995
8995
|
throw new Error(
|
|
8996
8996
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8997
8997
|
);
|
|
@@ -9236,7 +9236,7 @@ ${"=".repeat(60)}
|
|
|
9236
9236
|
* Generate mock response for testing
|
|
9237
9237
|
*/
|
|
9238
9238
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9239
|
-
await new Promise((
|
|
9239
|
+
await new Promise((resolve19) => setTimeout(resolve19, 500));
|
|
9240
9240
|
const name = (_checkName || "").toLowerCase();
|
|
9241
9241
|
if (name.includes("extract-facts")) {
|
|
9242
9242
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9597,7 +9597,7 @@ var init_command_executor = __esm({
|
|
|
9597
9597
|
* Execute command with stdin input
|
|
9598
9598
|
*/
|
|
9599
9599
|
executeWithStdin(command, options) {
|
|
9600
|
-
return new Promise((
|
|
9600
|
+
return new Promise((resolve19, reject) => {
|
|
9601
9601
|
const childProcess = (0, import_child_process2.exec)(
|
|
9602
9602
|
command,
|
|
9603
9603
|
{
|
|
@@ -9609,7 +9609,7 @@ var init_command_executor = __esm({
|
|
|
9609
9609
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9610
9610
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9611
9611
|
} else {
|
|
9612
|
-
|
|
9612
|
+
resolve19({
|
|
9613
9613
|
stdout: stdout || "",
|
|
9614
9614
|
stderr: stderr || "",
|
|
9615
9615
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -10939,6 +10939,12 @@ var init_workflow_registry = __esm({
|
|
|
10939
10939
|
message: "Input parameter schema is recommended"
|
|
10940
10940
|
});
|
|
10941
10941
|
}
|
|
10942
|
+
if (input.schema?.type === "array" && !input.schema.items) {
|
|
10943
|
+
warnings.push({
|
|
10944
|
+
path: `inputs[${i}].schema`,
|
|
10945
|
+
message: 'Array schema should define "items" (e.g. items: { type: string }). Some LLM providers (Gemini) reject array schemas without items.'
|
|
10946
|
+
});
|
|
10947
|
+
}
|
|
10942
10948
|
}
|
|
10943
10949
|
}
|
|
10944
10950
|
if (workflow.outputs) {
|
|
@@ -17609,17 +17615,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17609
17615
|
* so it can be executed by the state machine as a nested workflow.
|
|
17610
17616
|
*/
|
|
17611
17617
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17612
|
-
const
|
|
17613
|
-
const
|
|
17618
|
+
const path31 = require("path");
|
|
17619
|
+
const fs27 = require("fs");
|
|
17614
17620
|
const yaml5 = require("js-yaml");
|
|
17615
|
-
const resolved =
|
|
17616
|
-
if (!
|
|
17621
|
+
const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
|
|
17622
|
+
if (!fs27.existsSync(resolved)) {
|
|
17617
17623
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17618
17624
|
}
|
|
17619
|
-
const rawContent =
|
|
17625
|
+
const rawContent = fs27.readFileSync(resolved, "utf8");
|
|
17620
17626
|
const rawData = yaml5.load(rawContent);
|
|
17621
17627
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17622
|
-
const configDir =
|
|
17628
|
+
const configDir = path31.dirname(resolved);
|
|
17623
17629
|
for (const source of rawData.imports) {
|
|
17624
17630
|
const results = await this.registry.import(source, {
|
|
17625
17631
|
basePath: configDir,
|
|
@@ -17649,8 +17655,8 @@ ${errors}`);
|
|
|
17649
17655
|
if (!steps || Object.keys(steps).length === 0) {
|
|
17650
17656
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
17651
17657
|
}
|
|
17652
|
-
const id =
|
|
17653
|
-
const name = loaded.name || `Workflow from ${
|
|
17658
|
+
const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
17659
|
+
const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
|
|
17654
17660
|
const workflowDef = {
|
|
17655
17661
|
id,
|
|
17656
17662
|
name,
|
|
@@ -18456,8 +18462,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
18456
18462
|
case "mssql": {
|
|
18457
18463
|
try {
|
|
18458
18464
|
const loaderPath = "../../enterprise/loader";
|
|
18459
|
-
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
18460
|
-
return await
|
|
18465
|
+
const { loadEnterpriseStoreBackend: loadEnterpriseStoreBackend2 } = await import(loaderPath);
|
|
18466
|
+
return await loadEnterpriseStoreBackend2(driver, storageConfig, haConfig);
|
|
18461
18467
|
} catch (err) {
|
|
18462
18468
|
const msg = err instanceof Error ? err.message : String(err);
|
|
18463
18469
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -20285,17 +20291,11 @@ function formatTrigger(trigger) {
|
|
|
20285
20291
|
return `\`${trigger.id.substring(0, 8)}\` - channels: ${channels} \u2192 workflow: "${trigger.workflow}"${filterStr}${status}`;
|
|
20286
20292
|
}
|
|
20287
20293
|
async function handleCreateTrigger(args, context2, store) {
|
|
20288
|
-
|
|
20289
|
-
|
|
20290
|
-
success: false,
|
|
20291
|
-
message: "Missing workflow",
|
|
20292
|
-
error: "Please specify the workflow to run when the trigger fires."
|
|
20293
|
-
};
|
|
20294
|
-
}
|
|
20295
|
-
if (context2.availableWorkflows && !context2.availableWorkflows.includes(args.workflow)) {
|
|
20294
|
+
const workflow = args.workflow || "default";
|
|
20295
|
+
if (context2.availableWorkflows && !context2.availableWorkflows.includes(workflow)) {
|
|
20296
20296
|
return {
|
|
20297
20297
|
success: false,
|
|
20298
|
-
message: `Workflow "${
|
|
20298
|
+
message: `Workflow "${workflow}" not found`,
|
|
20299
20299
|
error: `Available workflows: ${context2.availableWorkflows.slice(0, 5).join(", ")}${context2.availableWorkflows.length > 5 ? "..." : ""}`
|
|
20300
20300
|
};
|
|
20301
20301
|
}
|
|
@@ -20306,7 +20306,7 @@ async function handleCreateTrigger(args, context2, store) {
|
|
|
20306
20306
|
error: "Please specify at least one filter: trigger_channels, trigger_from, trigger_contains, or trigger_match."
|
|
20307
20307
|
};
|
|
20308
20308
|
}
|
|
20309
|
-
const permissionCheck = checkSchedulePermissions(context2,
|
|
20309
|
+
const permissionCheck = checkSchedulePermissions(context2, workflow);
|
|
20310
20310
|
if (!permissionCheck.allowed) {
|
|
20311
20311
|
return {
|
|
20312
20312
|
success: false,
|
|
@@ -20326,13 +20326,13 @@ async function handleCreateTrigger(args, context2, store) {
|
|
|
20326
20326
|
contains: args.trigger_contains,
|
|
20327
20327
|
matchPattern: args.trigger_match,
|
|
20328
20328
|
threads: args.trigger_threads ?? "any",
|
|
20329
|
-
workflow
|
|
20329
|
+
workflow,
|
|
20330
20330
|
inputs: args.workflow_inputs,
|
|
20331
20331
|
status: "active",
|
|
20332
20332
|
enabled: true
|
|
20333
20333
|
});
|
|
20334
20334
|
logger.info(
|
|
20335
|
-
`[ScheduleTool] Created message trigger ${trigger.id} for user ${context2.userId}: workflow="${
|
|
20335
|
+
`[ScheduleTool] Created message trigger ${trigger.id} for user ${context2.userId}: workflow="${workflow}"`
|
|
20336
20336
|
);
|
|
20337
20337
|
return {
|
|
20338
20338
|
success: true,
|
|
@@ -20449,12 +20449,17 @@ function getScheduleToolDefinition() {
|
|
|
20449
20449
|
|
|
20450
20450
|
YOU (the AI) must extract and structure all scheduling parameters. Do NOT pass natural language time expressions - convert them to cron or ISO timestamps.
|
|
20451
20451
|
|
|
20452
|
-
CRITICAL WORKFLOW RULE:
|
|
20452
|
+
CRITICAL WORKFLOW RULE (for 'create' action only):
|
|
20453
20453
|
- To schedule a WORKFLOW, the user MUST use a '%' prefix (e.g., "schedule %my-workflow daily").
|
|
20454
20454
|
- If the '%' prefix is present, extract the word following it as the 'workflow' parameter (without the '%').
|
|
20455
20455
|
- If the '%' prefix is NOT present, the request is a simple text reminder. The ENTIRE user request (excluding the schedule expression) MUST be placed in the 'reminder_text' parameter.
|
|
20456
20456
|
- DO NOT guess or infer a workflow name from a user's request without the '%' prefix.
|
|
20457
20457
|
|
|
20458
|
+
WORKFLOW RULE FOR TRIGGERS (create_trigger action):
|
|
20459
|
+
- Triggers ALWAYS require a workflow. The '%' prefix rule does NOT apply to triggers.
|
|
20460
|
+
- If the user specifies a workflow name (with or without '%'), use it directly.
|
|
20461
|
+
- If the user does NOT specify a workflow name, use "default" as the workflow name.
|
|
20462
|
+
|
|
20458
20463
|
ACTIONS:
|
|
20459
20464
|
- create: Schedule a new reminder or workflow
|
|
20460
20465
|
- list: Show user's active schedules
|
|
@@ -20467,7 +20472,7 @@ Slack messages in specific channels. Use the create_trigger, list_triggers, dele
|
|
|
20467
20472
|
actions for this. Message triggers fire workflows based on message content, channel, sender, and thread scope.
|
|
20468
20473
|
|
|
20469
20474
|
TRIGGER ACTIONS:
|
|
20470
|
-
- create_trigger: Create a new message trigger (requires
|
|
20475
|
+
- create_trigger: Create a new message trigger (requires at least one filter; workflow defaults to "default" if not specified). Supports filtering by user IDs (trigger_from), channels, keywords, regex, and thread scope.
|
|
20471
20476
|
- list_triggers: Show user's message triggers
|
|
20472
20477
|
- delete_trigger: Remove a trigger by ID
|
|
20473
20478
|
- update_trigger: Enable/disable a trigger by ID
|
|
@@ -20567,6 +20572,9 @@ User: "watch #cicd for messages containing 'failed' and run %handle-cicd"
|
|
|
20567
20572
|
User: "trigger on each of my messages in this channel and run %auto-reply" (user ID is U3P2L4XNE)
|
|
20568
20573
|
\u2192 { "action": "create_trigger", "trigger_channels": ["C09V810NY6R"], "trigger_from": ["U3P2L4XNE"], "workflow": "auto-reply" }
|
|
20569
20574
|
|
|
20575
|
+
User: "trigger on each message in this channel" (no workflow specified \u2014 use "default")
|
|
20576
|
+
\u2192 { "action": "create_trigger", "trigger_channels": ["C09V810NY6R"], "workflow": "default" }
|
|
20577
|
+
|
|
20570
20578
|
User: "list my message triggers"
|
|
20571
20579
|
\u2192 { "action": "list_triggers" }
|
|
20572
20580
|
|
|
@@ -20600,7 +20608,7 @@ User: "disable trigger abc123"
|
|
|
20600
20608
|
},
|
|
20601
20609
|
workflow: {
|
|
20602
20610
|
type: "string",
|
|
20603
|
-
description: 'For create: workflow ID to run. ONLY populate this if the user used the % prefix (e.g., "%my-workflow"). Extract the name without the % symbol. If no % prefix, use reminder_text instead.'
|
|
20611
|
+
description: 'For create: workflow ID to run. ONLY populate this if the user used the % prefix (e.g., "%my-workflow"). Extract the name without the % symbol. If no % prefix, use reminder_text instead. For create_trigger: workflow is REQUIRED \u2014 use the workflow name the user specified (% prefix optional), or "default" if not specified.'
|
|
20604
20612
|
},
|
|
20605
20613
|
workflow_inputs: {
|
|
20606
20614
|
type: "object",
|
|
@@ -20904,7 +20912,16 @@ async function executeScheduleTool(args, slackContext, slackClient, availableWor
|
|
|
20904
20912
|
run_at: args.run_at,
|
|
20905
20913
|
original_expression: args.original_expression,
|
|
20906
20914
|
// For cancel/pause/resume
|
|
20907
|
-
schedule_id: args.schedule_id
|
|
20915
|
+
schedule_id: args.schedule_id,
|
|
20916
|
+
// For trigger actions
|
|
20917
|
+
trigger_channels: args.trigger_channels,
|
|
20918
|
+
trigger_from: args.trigger_from,
|
|
20919
|
+
trigger_from_bots: args.trigger_from_bots,
|
|
20920
|
+
trigger_contains: args.trigger_contains,
|
|
20921
|
+
trigger_match: args.trigger_match,
|
|
20922
|
+
trigger_threads: args.trigger_threads,
|
|
20923
|
+
trigger_description: args.trigger_description,
|
|
20924
|
+
trigger_id: args.trigger_id
|
|
20908
20925
|
};
|
|
20909
20926
|
if (!toolArgs.target_type && slackContext.channel) {
|
|
20910
20927
|
if (slackContext.threadTs) {
|
|
@@ -21011,7 +21028,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21011
21028
|
* Returns the actual bound port number
|
|
21012
21029
|
*/
|
|
21013
21030
|
async start() {
|
|
21014
|
-
return new Promise((
|
|
21031
|
+
return new Promise((resolve19, reject) => {
|
|
21015
21032
|
try {
|
|
21016
21033
|
this.server = import_http.default.createServer((req, res) => {
|
|
21017
21034
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -21045,7 +21062,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21045
21062
|
);
|
|
21046
21063
|
}
|
|
21047
21064
|
this.startKeepalive();
|
|
21048
|
-
|
|
21065
|
+
resolve19(this.port);
|
|
21049
21066
|
});
|
|
21050
21067
|
} catch (error) {
|
|
21051
21068
|
reject(error);
|
|
@@ -21108,7 +21125,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21108
21125
|
logger.debug(
|
|
21109
21126
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
21110
21127
|
);
|
|
21111
|
-
await new Promise((
|
|
21128
|
+
await new Promise((resolve19) => setTimeout(resolve19, waitMs));
|
|
21112
21129
|
}
|
|
21113
21130
|
}
|
|
21114
21131
|
if (this.activeToolCalls > 0) {
|
|
@@ -21117,7 +21134,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21117
21134
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
21118
21135
|
);
|
|
21119
21136
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
21120
|
-
await new Promise((
|
|
21137
|
+
await new Promise((resolve19) => setTimeout(resolve19, 250));
|
|
21121
21138
|
}
|
|
21122
21139
|
if (this.activeToolCalls > 0) {
|
|
21123
21140
|
logger.warn(
|
|
@@ -21142,21 +21159,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21142
21159
|
}
|
|
21143
21160
|
this.connections.clear();
|
|
21144
21161
|
if (this.server) {
|
|
21145
|
-
await new Promise((
|
|
21162
|
+
await new Promise((resolve19, reject) => {
|
|
21146
21163
|
const timeout = setTimeout(() => {
|
|
21147
21164
|
if (this.debug) {
|
|
21148
21165
|
logger.debug(
|
|
21149
21166
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
21150
21167
|
);
|
|
21151
21168
|
}
|
|
21152
|
-
this.server?.close(() =>
|
|
21169
|
+
this.server?.close(() => resolve19());
|
|
21153
21170
|
}, 5e3);
|
|
21154
21171
|
this.server.close((error) => {
|
|
21155
21172
|
clearTimeout(timeout);
|
|
21156
21173
|
if (error) {
|
|
21157
21174
|
reject(error);
|
|
21158
21175
|
} else {
|
|
21159
|
-
|
|
21176
|
+
resolve19();
|
|
21160
21177
|
}
|
|
21161
21178
|
});
|
|
21162
21179
|
});
|
|
@@ -21544,7 +21561,16 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21544
21561
|
run_at: args.run_at,
|
|
21545
21562
|
original_expression: args.original_expression,
|
|
21546
21563
|
// For cancel/pause/resume
|
|
21547
|
-
schedule_id: args.schedule_id
|
|
21564
|
+
schedule_id: args.schedule_id,
|
|
21565
|
+
// For trigger actions
|
|
21566
|
+
trigger_channels: args.trigger_channels,
|
|
21567
|
+
trigger_from: args.trigger_from,
|
|
21568
|
+
trigger_from_bots: args.trigger_from_bots,
|
|
21569
|
+
trigger_contains: args.trigger_contains,
|
|
21570
|
+
trigger_match: args.trigger_match,
|
|
21571
|
+
trigger_threads: args.trigger_threads,
|
|
21572
|
+
trigger_description: args.trigger_description,
|
|
21573
|
+
trigger_id: args.trigger_id
|
|
21548
21574
|
};
|
|
21549
21575
|
const scheduleResult = await handleScheduleAction(scheduleArgs, scheduleContext);
|
|
21550
21576
|
result = scheduleResult.success ? scheduleResult.message : `Error: ${scheduleResult.error}`;
|
|
@@ -21582,7 +21608,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21582
21608
|
logger.warn(
|
|
21583
21609
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
21584
21610
|
);
|
|
21585
|
-
await new Promise((
|
|
21611
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
21586
21612
|
attempt++;
|
|
21587
21613
|
}
|
|
21588
21614
|
}
|
|
@@ -21895,9 +21921,9 @@ var init_ai_check_provider = __esm({
|
|
|
21895
21921
|
} else {
|
|
21896
21922
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
21897
21923
|
}
|
|
21898
|
-
const
|
|
21924
|
+
const fs27 = require("fs").promises;
|
|
21899
21925
|
try {
|
|
21900
|
-
const stat2 = await
|
|
21926
|
+
const stat2 = await fs27.stat(resolvedPath);
|
|
21901
21927
|
return stat2.isFile();
|
|
21902
21928
|
} catch {
|
|
21903
21929
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -27825,14 +27851,14 @@ var require_util = __commonJS({
|
|
|
27825
27851
|
}
|
|
27826
27852
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
27827
27853
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
27828
|
-
let
|
|
27854
|
+
let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
27829
27855
|
if (origin.endsWith("/")) {
|
|
27830
27856
|
origin = origin.substring(0, origin.length - 1);
|
|
27831
27857
|
}
|
|
27832
|
-
if (
|
|
27833
|
-
|
|
27858
|
+
if (path31 && !path31.startsWith("/")) {
|
|
27859
|
+
path31 = `/${path31}`;
|
|
27834
27860
|
}
|
|
27835
|
-
url = new URL(origin +
|
|
27861
|
+
url = new URL(origin + path31);
|
|
27836
27862
|
}
|
|
27837
27863
|
return url;
|
|
27838
27864
|
}
|
|
@@ -29446,20 +29472,20 @@ var require_parseParams = __commonJS({
|
|
|
29446
29472
|
var require_basename = __commonJS({
|
|
29447
29473
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
29448
29474
|
"use strict";
|
|
29449
|
-
module2.exports = function basename4(
|
|
29450
|
-
if (typeof
|
|
29475
|
+
module2.exports = function basename4(path31) {
|
|
29476
|
+
if (typeof path31 !== "string") {
|
|
29451
29477
|
return "";
|
|
29452
29478
|
}
|
|
29453
|
-
for (var i =
|
|
29454
|
-
switch (
|
|
29479
|
+
for (var i = path31.length - 1; i >= 0; --i) {
|
|
29480
|
+
switch (path31.charCodeAt(i)) {
|
|
29455
29481
|
case 47:
|
|
29456
29482
|
// '/'
|
|
29457
29483
|
case 92:
|
|
29458
|
-
|
|
29459
|
-
return
|
|
29484
|
+
path31 = path31.slice(i + 1);
|
|
29485
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
29460
29486
|
}
|
|
29461
29487
|
}
|
|
29462
|
-
return
|
|
29488
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
29463
29489
|
};
|
|
29464
29490
|
}
|
|
29465
29491
|
});
|
|
@@ -30463,11 +30489,11 @@ var require_util2 = __commonJS({
|
|
|
30463
30489
|
var assert = require("assert");
|
|
30464
30490
|
var { isUint8Array } = require("util/types");
|
|
30465
30491
|
var supportedHashes = [];
|
|
30466
|
-
var
|
|
30492
|
+
var crypto4;
|
|
30467
30493
|
try {
|
|
30468
|
-
|
|
30494
|
+
crypto4 = require("crypto");
|
|
30469
30495
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
30470
|
-
supportedHashes =
|
|
30496
|
+
supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
30471
30497
|
} catch {
|
|
30472
30498
|
}
|
|
30473
30499
|
function responseURL(response) {
|
|
@@ -30744,7 +30770,7 @@ var require_util2 = __commonJS({
|
|
|
30744
30770
|
}
|
|
30745
30771
|
}
|
|
30746
30772
|
function bytesMatch(bytes, metadataList) {
|
|
30747
|
-
if (
|
|
30773
|
+
if (crypto4 === void 0) {
|
|
30748
30774
|
return true;
|
|
30749
30775
|
}
|
|
30750
30776
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -30759,7 +30785,7 @@ var require_util2 = __commonJS({
|
|
|
30759
30785
|
for (const item of metadata) {
|
|
30760
30786
|
const algorithm = item.algo;
|
|
30761
30787
|
const expectedValue = item.hash;
|
|
30762
|
-
let actualValue =
|
|
30788
|
+
let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64");
|
|
30763
30789
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
30764
30790
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
30765
30791
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -30852,8 +30878,8 @@ var require_util2 = __commonJS({
|
|
|
30852
30878
|
function createDeferredPromise() {
|
|
30853
30879
|
let res;
|
|
30854
30880
|
let rej;
|
|
30855
|
-
const promise = new Promise((
|
|
30856
|
-
res =
|
|
30881
|
+
const promise = new Promise((resolve19, reject) => {
|
|
30882
|
+
res = resolve19;
|
|
30857
30883
|
rej = reject;
|
|
30858
30884
|
});
|
|
30859
30885
|
return { promise, resolve: res, reject: rej };
|
|
@@ -32106,8 +32132,8 @@ var require_body = __commonJS({
|
|
|
32106
32132
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
32107
32133
|
var random;
|
|
32108
32134
|
try {
|
|
32109
|
-
const
|
|
32110
|
-
random = (max) =>
|
|
32135
|
+
const crypto4 = require("crypto");
|
|
32136
|
+
random = (max) => crypto4.randomInt(0, max);
|
|
32111
32137
|
} catch {
|
|
32112
32138
|
random = (max) => Math.floor(Math.random(max));
|
|
32113
32139
|
}
|
|
@@ -32358,8 +32384,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
32358
32384
|
});
|
|
32359
32385
|
}
|
|
32360
32386
|
});
|
|
32361
|
-
const busboyResolve = new Promise((
|
|
32362
|
-
busboy.on("finish",
|
|
32387
|
+
const busboyResolve = new Promise((resolve19, reject) => {
|
|
32388
|
+
busboy.on("finish", resolve19);
|
|
32363
32389
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
32364
32390
|
});
|
|
32365
32391
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -32490,7 +32516,7 @@ var require_request = __commonJS({
|
|
|
32490
32516
|
}
|
|
32491
32517
|
var Request = class _Request {
|
|
32492
32518
|
constructor(origin, {
|
|
32493
|
-
path:
|
|
32519
|
+
path: path31,
|
|
32494
32520
|
method,
|
|
32495
32521
|
body,
|
|
32496
32522
|
headers,
|
|
@@ -32504,11 +32530,11 @@ var require_request = __commonJS({
|
|
|
32504
32530
|
throwOnError,
|
|
32505
32531
|
expectContinue
|
|
32506
32532
|
}, handler) {
|
|
32507
|
-
if (typeof
|
|
32533
|
+
if (typeof path31 !== "string") {
|
|
32508
32534
|
throw new InvalidArgumentError("path must be a string");
|
|
32509
|
-
} else if (
|
|
32535
|
+
} else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
|
|
32510
32536
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
32511
|
-
} else if (invalidPathRegex.exec(
|
|
32537
|
+
} else if (invalidPathRegex.exec(path31) !== null) {
|
|
32512
32538
|
throw new InvalidArgumentError("invalid request path");
|
|
32513
32539
|
}
|
|
32514
32540
|
if (typeof method !== "string") {
|
|
@@ -32571,7 +32597,7 @@ var require_request = __commonJS({
|
|
|
32571
32597
|
this.completed = false;
|
|
32572
32598
|
this.aborted = false;
|
|
32573
32599
|
this.upgrade = upgrade || null;
|
|
32574
|
-
this.path = query ? util.buildURL(
|
|
32600
|
+
this.path = query ? util.buildURL(path31, query) : path31;
|
|
32575
32601
|
this.origin = origin;
|
|
32576
32602
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
32577
32603
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -32893,9 +32919,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
32893
32919
|
}
|
|
32894
32920
|
close(callback) {
|
|
32895
32921
|
if (callback === void 0) {
|
|
32896
|
-
return new Promise((
|
|
32922
|
+
return new Promise((resolve19, reject) => {
|
|
32897
32923
|
this.close((err, data) => {
|
|
32898
|
-
return err ? reject(err) :
|
|
32924
|
+
return err ? reject(err) : resolve19(data);
|
|
32899
32925
|
});
|
|
32900
32926
|
});
|
|
32901
32927
|
}
|
|
@@ -32933,12 +32959,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
32933
32959
|
err = null;
|
|
32934
32960
|
}
|
|
32935
32961
|
if (callback === void 0) {
|
|
32936
|
-
return new Promise((
|
|
32962
|
+
return new Promise((resolve19, reject) => {
|
|
32937
32963
|
this.destroy(err, (err2, data) => {
|
|
32938
32964
|
return err2 ? (
|
|
32939
32965
|
/* istanbul ignore next: should never error */
|
|
32940
32966
|
reject(err2)
|
|
32941
|
-
) :
|
|
32967
|
+
) : resolve19(data);
|
|
32942
32968
|
});
|
|
32943
32969
|
});
|
|
32944
32970
|
}
|
|
@@ -33579,9 +33605,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
33579
33605
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
33580
33606
|
}
|
|
33581
33607
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
33582
|
-
const
|
|
33608
|
+
const path31 = search ? `${pathname}${search}` : pathname;
|
|
33583
33609
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
33584
|
-
this.opts.path =
|
|
33610
|
+
this.opts.path = path31;
|
|
33585
33611
|
this.opts.origin = origin;
|
|
33586
33612
|
this.opts.maxRedirections = 0;
|
|
33587
33613
|
this.opts.query = null;
|
|
@@ -34000,16 +34026,16 @@ var require_client = __commonJS({
|
|
|
34000
34026
|
return this[kNeedDrain] < 2;
|
|
34001
34027
|
}
|
|
34002
34028
|
async [kClose]() {
|
|
34003
|
-
return new Promise((
|
|
34029
|
+
return new Promise((resolve19) => {
|
|
34004
34030
|
if (!this[kSize]) {
|
|
34005
|
-
|
|
34031
|
+
resolve19(null);
|
|
34006
34032
|
} else {
|
|
34007
|
-
this[kClosedResolve] =
|
|
34033
|
+
this[kClosedResolve] = resolve19;
|
|
34008
34034
|
}
|
|
34009
34035
|
});
|
|
34010
34036
|
}
|
|
34011
34037
|
async [kDestroy](err) {
|
|
34012
|
-
return new Promise((
|
|
34038
|
+
return new Promise((resolve19) => {
|
|
34013
34039
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
34014
34040
|
for (let i = 0; i < requests.length; i++) {
|
|
34015
34041
|
const request = requests[i];
|
|
@@ -34020,7 +34046,7 @@ var require_client = __commonJS({
|
|
|
34020
34046
|
this[kClosedResolve]();
|
|
34021
34047
|
this[kClosedResolve] = null;
|
|
34022
34048
|
}
|
|
34023
|
-
|
|
34049
|
+
resolve19();
|
|
34024
34050
|
};
|
|
34025
34051
|
if (this[kHTTP2Session] != null) {
|
|
34026
34052
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -34600,7 +34626,7 @@ var require_client = __commonJS({
|
|
|
34600
34626
|
});
|
|
34601
34627
|
}
|
|
34602
34628
|
try {
|
|
34603
|
-
const socket = await new Promise((
|
|
34629
|
+
const socket = await new Promise((resolve19, reject) => {
|
|
34604
34630
|
client[kConnector]({
|
|
34605
34631
|
host,
|
|
34606
34632
|
hostname,
|
|
@@ -34612,7 +34638,7 @@ var require_client = __commonJS({
|
|
|
34612
34638
|
if (err) {
|
|
34613
34639
|
reject(err);
|
|
34614
34640
|
} else {
|
|
34615
|
-
|
|
34641
|
+
resolve19(socket2);
|
|
34616
34642
|
}
|
|
34617
34643
|
});
|
|
34618
34644
|
});
|
|
@@ -34823,7 +34849,7 @@ var require_client = __commonJS({
|
|
|
34823
34849
|
writeH2(client, client[kHTTP2Session], request);
|
|
34824
34850
|
return;
|
|
34825
34851
|
}
|
|
34826
|
-
const { body, method, path:
|
|
34852
|
+
const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
|
|
34827
34853
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
34828
34854
|
if (body && typeof body.read === "function") {
|
|
34829
34855
|
body.read(0);
|
|
@@ -34873,7 +34899,7 @@ var require_client = __commonJS({
|
|
|
34873
34899
|
if (blocking) {
|
|
34874
34900
|
socket[kBlocking] = true;
|
|
34875
34901
|
}
|
|
34876
|
-
let header = `${method} ${
|
|
34902
|
+
let header = `${method} ${path31} HTTP/1.1\r
|
|
34877
34903
|
`;
|
|
34878
34904
|
if (typeof host === "string") {
|
|
34879
34905
|
header += `host: ${host}\r
|
|
@@ -34936,7 +34962,7 @@ upgrade: ${upgrade}\r
|
|
|
34936
34962
|
return true;
|
|
34937
34963
|
}
|
|
34938
34964
|
function writeH2(client, session, request) {
|
|
34939
|
-
const { body, method, path:
|
|
34965
|
+
const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
34940
34966
|
let headers;
|
|
34941
34967
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
34942
34968
|
else headers = reqHeaders;
|
|
@@ -34979,7 +35005,7 @@ upgrade: ${upgrade}\r
|
|
|
34979
35005
|
});
|
|
34980
35006
|
return true;
|
|
34981
35007
|
}
|
|
34982
|
-
headers[HTTP2_HEADER_PATH] =
|
|
35008
|
+
headers[HTTP2_HEADER_PATH] = path31;
|
|
34983
35009
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
34984
35010
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
34985
35011
|
if (body && typeof body.read === "function") {
|
|
@@ -35236,12 +35262,12 @@ upgrade: ${upgrade}\r
|
|
|
35236
35262
|
cb();
|
|
35237
35263
|
}
|
|
35238
35264
|
}
|
|
35239
|
-
const waitForDrain = () => new Promise((
|
|
35265
|
+
const waitForDrain = () => new Promise((resolve19, reject) => {
|
|
35240
35266
|
assert(callback === null);
|
|
35241
35267
|
if (socket[kError]) {
|
|
35242
35268
|
reject(socket[kError]);
|
|
35243
35269
|
} else {
|
|
35244
|
-
callback =
|
|
35270
|
+
callback = resolve19;
|
|
35245
35271
|
}
|
|
35246
35272
|
});
|
|
35247
35273
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -35587,8 +35613,8 @@ var require_pool_base = __commonJS({
|
|
|
35587
35613
|
if (this[kQueue].isEmpty()) {
|
|
35588
35614
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
35589
35615
|
} else {
|
|
35590
|
-
return new Promise((
|
|
35591
|
-
this[kClosedResolve] =
|
|
35616
|
+
return new Promise((resolve19) => {
|
|
35617
|
+
this[kClosedResolve] = resolve19;
|
|
35592
35618
|
});
|
|
35593
35619
|
}
|
|
35594
35620
|
}
|
|
@@ -36166,7 +36192,7 @@ var require_readable = __commonJS({
|
|
|
36166
36192
|
if (this.closed) {
|
|
36167
36193
|
return Promise.resolve(null);
|
|
36168
36194
|
}
|
|
36169
|
-
return new Promise((
|
|
36195
|
+
return new Promise((resolve19, reject) => {
|
|
36170
36196
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
36171
36197
|
this.destroy();
|
|
36172
36198
|
}) : noop;
|
|
@@ -36175,7 +36201,7 @@ var require_readable = __commonJS({
|
|
|
36175
36201
|
if (signal && signal.aborted) {
|
|
36176
36202
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
36177
36203
|
} else {
|
|
36178
|
-
|
|
36204
|
+
resolve19(null);
|
|
36179
36205
|
}
|
|
36180
36206
|
}).on("error", noop).on("data", function(chunk) {
|
|
36181
36207
|
limit -= chunk.length;
|
|
@@ -36197,11 +36223,11 @@ var require_readable = __commonJS({
|
|
|
36197
36223
|
throw new TypeError("unusable");
|
|
36198
36224
|
}
|
|
36199
36225
|
assert(!stream[kConsume]);
|
|
36200
|
-
return new Promise((
|
|
36226
|
+
return new Promise((resolve19, reject) => {
|
|
36201
36227
|
stream[kConsume] = {
|
|
36202
36228
|
type,
|
|
36203
36229
|
stream,
|
|
36204
|
-
resolve:
|
|
36230
|
+
resolve: resolve19,
|
|
36205
36231
|
reject,
|
|
36206
36232
|
length: 0,
|
|
36207
36233
|
body: []
|
|
@@ -36236,12 +36262,12 @@ var require_readable = __commonJS({
|
|
|
36236
36262
|
}
|
|
36237
36263
|
}
|
|
36238
36264
|
function consumeEnd(consume2) {
|
|
36239
|
-
const { type, body, resolve:
|
|
36265
|
+
const { type, body, resolve: resolve19, stream, length } = consume2;
|
|
36240
36266
|
try {
|
|
36241
36267
|
if (type === "text") {
|
|
36242
|
-
|
|
36268
|
+
resolve19(toUSVString(Buffer.concat(body)));
|
|
36243
36269
|
} else if (type === "json") {
|
|
36244
|
-
|
|
36270
|
+
resolve19(JSON.parse(Buffer.concat(body)));
|
|
36245
36271
|
} else if (type === "arrayBuffer") {
|
|
36246
36272
|
const dst = new Uint8Array(length);
|
|
36247
36273
|
let pos = 0;
|
|
@@ -36249,12 +36275,12 @@ var require_readable = __commonJS({
|
|
|
36249
36275
|
dst.set(buf, pos);
|
|
36250
36276
|
pos += buf.byteLength;
|
|
36251
36277
|
}
|
|
36252
|
-
|
|
36278
|
+
resolve19(dst.buffer);
|
|
36253
36279
|
} else if (type === "blob") {
|
|
36254
36280
|
if (!Blob2) {
|
|
36255
36281
|
Blob2 = require("buffer").Blob;
|
|
36256
36282
|
}
|
|
36257
|
-
|
|
36283
|
+
resolve19(new Blob2(body, { type: stream[kContentType] }));
|
|
36258
36284
|
}
|
|
36259
36285
|
consumeFinish(consume2);
|
|
36260
36286
|
} catch (err) {
|
|
@@ -36511,9 +36537,9 @@ var require_api_request = __commonJS({
|
|
|
36511
36537
|
};
|
|
36512
36538
|
function request(opts, callback) {
|
|
36513
36539
|
if (callback === void 0) {
|
|
36514
|
-
return new Promise((
|
|
36540
|
+
return new Promise((resolve19, reject) => {
|
|
36515
36541
|
request.call(this, opts, (err, data) => {
|
|
36516
|
-
return err ? reject(err) :
|
|
36542
|
+
return err ? reject(err) : resolve19(data);
|
|
36517
36543
|
});
|
|
36518
36544
|
});
|
|
36519
36545
|
}
|
|
@@ -36686,9 +36712,9 @@ var require_api_stream = __commonJS({
|
|
|
36686
36712
|
};
|
|
36687
36713
|
function stream(opts, factory, callback) {
|
|
36688
36714
|
if (callback === void 0) {
|
|
36689
|
-
return new Promise((
|
|
36715
|
+
return new Promise((resolve19, reject) => {
|
|
36690
36716
|
stream.call(this, opts, factory, (err, data) => {
|
|
36691
|
-
return err ? reject(err) :
|
|
36717
|
+
return err ? reject(err) : resolve19(data);
|
|
36692
36718
|
});
|
|
36693
36719
|
});
|
|
36694
36720
|
}
|
|
@@ -36969,9 +36995,9 @@ var require_api_upgrade = __commonJS({
|
|
|
36969
36995
|
};
|
|
36970
36996
|
function upgrade(opts, callback) {
|
|
36971
36997
|
if (callback === void 0) {
|
|
36972
|
-
return new Promise((
|
|
36998
|
+
return new Promise((resolve19, reject) => {
|
|
36973
36999
|
upgrade.call(this, opts, (err, data) => {
|
|
36974
|
-
return err ? reject(err) :
|
|
37000
|
+
return err ? reject(err) : resolve19(data);
|
|
36975
37001
|
});
|
|
36976
37002
|
});
|
|
36977
37003
|
}
|
|
@@ -37060,9 +37086,9 @@ var require_api_connect = __commonJS({
|
|
|
37060
37086
|
};
|
|
37061
37087
|
function connect(opts, callback) {
|
|
37062
37088
|
if (callback === void 0) {
|
|
37063
|
-
return new Promise((
|
|
37089
|
+
return new Promise((resolve19, reject) => {
|
|
37064
37090
|
connect.call(this, opts, (err, data) => {
|
|
37065
|
-
return err ? reject(err) :
|
|
37091
|
+
return err ? reject(err) : resolve19(data);
|
|
37066
37092
|
});
|
|
37067
37093
|
});
|
|
37068
37094
|
}
|
|
@@ -37222,20 +37248,20 @@ var require_mock_utils = __commonJS({
|
|
|
37222
37248
|
}
|
|
37223
37249
|
return true;
|
|
37224
37250
|
}
|
|
37225
|
-
function safeUrl(
|
|
37226
|
-
if (typeof
|
|
37227
|
-
return
|
|
37251
|
+
function safeUrl(path31) {
|
|
37252
|
+
if (typeof path31 !== "string") {
|
|
37253
|
+
return path31;
|
|
37228
37254
|
}
|
|
37229
|
-
const pathSegments =
|
|
37255
|
+
const pathSegments = path31.split("?");
|
|
37230
37256
|
if (pathSegments.length !== 2) {
|
|
37231
|
-
return
|
|
37257
|
+
return path31;
|
|
37232
37258
|
}
|
|
37233
37259
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
37234
37260
|
qp.sort();
|
|
37235
37261
|
return [...pathSegments, qp.toString()].join("?");
|
|
37236
37262
|
}
|
|
37237
|
-
function matchKey(mockDispatch2, { path:
|
|
37238
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
37263
|
+
function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
|
|
37264
|
+
const pathMatch = matchValue(mockDispatch2.path, path31);
|
|
37239
37265
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
37240
37266
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
37241
37267
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -37253,7 +37279,7 @@ var require_mock_utils = __commonJS({
|
|
|
37253
37279
|
function getMockDispatch(mockDispatches, key) {
|
|
37254
37280
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
37255
37281
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
37256
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
37282
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
|
|
37257
37283
|
if (matchedMockDispatches.length === 0) {
|
|
37258
37284
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
37259
37285
|
}
|
|
@@ -37290,9 +37316,9 @@ var require_mock_utils = __commonJS({
|
|
|
37290
37316
|
}
|
|
37291
37317
|
}
|
|
37292
37318
|
function buildKey(opts) {
|
|
37293
|
-
const { path:
|
|
37319
|
+
const { path: path31, method, body, headers, query } = opts;
|
|
37294
37320
|
return {
|
|
37295
|
-
path:
|
|
37321
|
+
path: path31,
|
|
37296
37322
|
method,
|
|
37297
37323
|
body,
|
|
37298
37324
|
headers,
|
|
@@ -37741,10 +37767,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
37741
37767
|
}
|
|
37742
37768
|
format(pendingInterceptors) {
|
|
37743
37769
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
37744
|
-
({ method, path:
|
|
37770
|
+
({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
37745
37771
|
Method: method,
|
|
37746
37772
|
Origin: origin,
|
|
37747
|
-
Path:
|
|
37773
|
+
Path: path31,
|
|
37748
37774
|
"Status code": statusCode,
|
|
37749
37775
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
37750
37776
|
Invocations: timesInvoked,
|
|
@@ -40685,7 +40711,7 @@ var require_fetch = __commonJS({
|
|
|
40685
40711
|
async function dispatch({ body }) {
|
|
40686
40712
|
const url = requestCurrentURL(request);
|
|
40687
40713
|
const agent = fetchParams.controller.dispatcher;
|
|
40688
|
-
return new Promise((
|
|
40714
|
+
return new Promise((resolve19, reject) => agent.dispatch(
|
|
40689
40715
|
{
|
|
40690
40716
|
path: url.pathname + url.search,
|
|
40691
40717
|
origin: url.origin,
|
|
@@ -40761,7 +40787,7 @@ var require_fetch = __commonJS({
|
|
|
40761
40787
|
}
|
|
40762
40788
|
}
|
|
40763
40789
|
}
|
|
40764
|
-
|
|
40790
|
+
resolve19({
|
|
40765
40791
|
status,
|
|
40766
40792
|
statusText,
|
|
40767
40793
|
headersList: headers[kHeadersList],
|
|
@@ -40804,7 +40830,7 @@ var require_fetch = __commonJS({
|
|
|
40804
40830
|
const val = headersList[n + 1].toString("latin1");
|
|
40805
40831
|
headers[kHeadersList].append(key, val);
|
|
40806
40832
|
}
|
|
40807
|
-
|
|
40833
|
+
resolve19({
|
|
40808
40834
|
status,
|
|
40809
40835
|
statusText: STATUS_CODES[status],
|
|
40810
40836
|
headersList: headers[kHeadersList],
|
|
@@ -42365,8 +42391,8 @@ var require_util6 = __commonJS({
|
|
|
42365
42391
|
}
|
|
42366
42392
|
}
|
|
42367
42393
|
}
|
|
42368
|
-
function validateCookiePath(
|
|
42369
|
-
for (const char of
|
|
42394
|
+
function validateCookiePath(path31) {
|
|
42395
|
+
for (const char of path31) {
|
|
42370
42396
|
const code = char.charCodeAt(0);
|
|
42371
42397
|
if (code < 33 || char === ";") {
|
|
42372
42398
|
throw new Error("Invalid cookie path");
|
|
@@ -43163,9 +43189,9 @@ var require_connection = __commonJS({
|
|
|
43163
43189
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
43164
43190
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
43165
43191
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
43166
|
-
var
|
|
43192
|
+
var crypto4;
|
|
43167
43193
|
try {
|
|
43168
|
-
|
|
43194
|
+
crypto4 = require("crypto");
|
|
43169
43195
|
} catch {
|
|
43170
43196
|
}
|
|
43171
43197
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -43184,7 +43210,7 @@ var require_connection = __commonJS({
|
|
|
43184
43210
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
43185
43211
|
request.headersList = headersList;
|
|
43186
43212
|
}
|
|
43187
|
-
const keyValue =
|
|
43213
|
+
const keyValue = crypto4.randomBytes(16).toString("base64");
|
|
43188
43214
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
43189
43215
|
request.headersList.append("sec-websocket-version", "13");
|
|
43190
43216
|
for (const protocol of protocols) {
|
|
@@ -43213,7 +43239,7 @@ var require_connection = __commonJS({
|
|
|
43213
43239
|
return;
|
|
43214
43240
|
}
|
|
43215
43241
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
43216
|
-
const digest =
|
|
43242
|
+
const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
43217
43243
|
if (secWSAccept !== digest) {
|
|
43218
43244
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
43219
43245
|
return;
|
|
@@ -43293,9 +43319,9 @@ var require_frame = __commonJS({
|
|
|
43293
43319
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
43294
43320
|
"use strict";
|
|
43295
43321
|
var { maxUnsigned16Bit } = require_constants5();
|
|
43296
|
-
var
|
|
43322
|
+
var crypto4;
|
|
43297
43323
|
try {
|
|
43298
|
-
|
|
43324
|
+
crypto4 = require("crypto");
|
|
43299
43325
|
} catch {
|
|
43300
43326
|
}
|
|
43301
43327
|
var WebsocketFrameSend = class {
|
|
@@ -43304,7 +43330,7 @@ var require_frame = __commonJS({
|
|
|
43304
43330
|
*/
|
|
43305
43331
|
constructor(data) {
|
|
43306
43332
|
this.frameData = data;
|
|
43307
|
-
this.maskKey =
|
|
43333
|
+
this.maskKey = crypto4.randomBytes(4);
|
|
43308
43334
|
}
|
|
43309
43335
|
createFrame(opcode) {
|
|
43310
43336
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -44046,11 +44072,11 @@ var require_undici = __commonJS({
|
|
|
44046
44072
|
if (typeof opts.path !== "string") {
|
|
44047
44073
|
throw new InvalidArgumentError("invalid opts.path");
|
|
44048
44074
|
}
|
|
44049
|
-
let
|
|
44075
|
+
let path31 = opts.path;
|
|
44050
44076
|
if (!opts.path.startsWith("/")) {
|
|
44051
|
-
|
|
44077
|
+
path31 = `/${path31}`;
|
|
44052
44078
|
}
|
|
44053
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
44079
|
+
url = new URL(util.parseOrigin(url).origin + path31);
|
|
44054
44080
|
} else {
|
|
44055
44081
|
if (!opts) {
|
|
44056
44082
|
opts = typeof url === "object" ? url : {};
|
|
@@ -44599,7 +44625,7 @@ var init_mcp_check_provider = __esm({
|
|
|
44599
44625
|
logger.warn(
|
|
44600
44626
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
44601
44627
|
);
|
|
44602
|
-
await new Promise((
|
|
44628
|
+
await new Promise((resolve19) => setTimeout(resolve19, delay));
|
|
44603
44629
|
attempt += 1;
|
|
44604
44630
|
} finally {
|
|
44605
44631
|
try {
|
|
@@ -44881,7 +44907,7 @@ async function acquirePromptLock() {
|
|
|
44881
44907
|
activePrompt = true;
|
|
44882
44908
|
return;
|
|
44883
44909
|
}
|
|
44884
|
-
await new Promise((
|
|
44910
|
+
await new Promise((resolve19) => waiters.push(resolve19));
|
|
44885
44911
|
activePrompt = true;
|
|
44886
44912
|
}
|
|
44887
44913
|
function releasePromptLock() {
|
|
@@ -44891,7 +44917,7 @@ function releasePromptLock() {
|
|
|
44891
44917
|
}
|
|
44892
44918
|
async function interactivePrompt(options) {
|
|
44893
44919
|
await acquirePromptLock();
|
|
44894
|
-
return new Promise((
|
|
44920
|
+
return new Promise((resolve19, reject) => {
|
|
44895
44921
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
44896
44922
|
try {
|
|
44897
44923
|
if (dbg) {
|
|
@@ -44978,12 +45004,12 @@ async function interactivePrompt(options) {
|
|
|
44978
45004
|
};
|
|
44979
45005
|
const finish = (value) => {
|
|
44980
45006
|
cleanup();
|
|
44981
|
-
|
|
45007
|
+
resolve19(value);
|
|
44982
45008
|
};
|
|
44983
45009
|
if (options.timeout && options.timeout > 0) {
|
|
44984
45010
|
timeoutId = setTimeout(() => {
|
|
44985
45011
|
cleanup();
|
|
44986
|
-
if (defaultValue !== void 0) return
|
|
45012
|
+
if (defaultValue !== void 0) return resolve19(defaultValue);
|
|
44987
45013
|
return reject(new Error("Input timeout"));
|
|
44988
45014
|
}, options.timeout);
|
|
44989
45015
|
}
|
|
@@ -45115,7 +45141,7 @@ async function interactivePrompt(options) {
|
|
|
45115
45141
|
});
|
|
45116
45142
|
}
|
|
45117
45143
|
async function simplePrompt(prompt) {
|
|
45118
|
-
return new Promise((
|
|
45144
|
+
return new Promise((resolve19) => {
|
|
45119
45145
|
const rl = readline.createInterface({
|
|
45120
45146
|
input: process.stdin,
|
|
45121
45147
|
output: process.stdout
|
|
@@ -45131,7 +45157,7 @@ async function simplePrompt(prompt) {
|
|
|
45131
45157
|
rl.question(`${prompt}
|
|
45132
45158
|
> `, (answer) => {
|
|
45133
45159
|
rl.close();
|
|
45134
|
-
|
|
45160
|
+
resolve19(answer.trim());
|
|
45135
45161
|
});
|
|
45136
45162
|
});
|
|
45137
45163
|
}
|
|
@@ -45299,7 +45325,7 @@ function isStdinAvailable() {
|
|
|
45299
45325
|
return !process.stdin.isTTY;
|
|
45300
45326
|
}
|
|
45301
45327
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
45302
|
-
return new Promise((
|
|
45328
|
+
return new Promise((resolve19, reject) => {
|
|
45303
45329
|
let data = "";
|
|
45304
45330
|
let timeoutId;
|
|
45305
45331
|
if (timeout) {
|
|
@@ -45326,7 +45352,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
45326
45352
|
};
|
|
45327
45353
|
const onEnd = () => {
|
|
45328
45354
|
cleanup();
|
|
45329
|
-
|
|
45355
|
+
resolve19(data.trim());
|
|
45330
45356
|
};
|
|
45331
45357
|
const onError = (err) => {
|
|
45332
45358
|
cleanup();
|
|
@@ -49444,23 +49470,23 @@ __export(renderer_schema_exports, {
|
|
|
49444
49470
|
});
|
|
49445
49471
|
async function loadRendererSchema(name) {
|
|
49446
49472
|
try {
|
|
49447
|
-
const
|
|
49448
|
-
const
|
|
49473
|
+
const fs27 = await import("fs/promises");
|
|
49474
|
+
const path31 = await import("path");
|
|
49449
49475
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
49450
49476
|
if (!sanitized) return void 0;
|
|
49451
49477
|
const candidates = [
|
|
49452
49478
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
49453
|
-
|
|
49479
|
+
path31.join(__dirname, "output", sanitized, "schema.json"),
|
|
49454
49480
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
49455
|
-
|
|
49481
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
49456
49482
|
// When running from a checkout with output/ folder copied to CWD
|
|
49457
|
-
|
|
49483
|
+
path31.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
49458
49484
|
// Fallback: cwd/dist/output/
|
|
49459
|
-
|
|
49485
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
49460
49486
|
];
|
|
49461
49487
|
for (const p of candidates) {
|
|
49462
49488
|
try {
|
|
49463
|
-
const raw = await
|
|
49489
|
+
const raw = await fs27.readFile(p, "utf-8");
|
|
49464
49490
|
return JSON.parse(raw);
|
|
49465
49491
|
} catch {
|
|
49466
49492
|
}
|
|
@@ -51879,8 +51905,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
51879
51905
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
51880
51906
|
try {
|
|
51881
51907
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
51882
|
-
const
|
|
51883
|
-
const
|
|
51908
|
+
const fs27 = await import("fs/promises");
|
|
51909
|
+
const path31 = await import("path");
|
|
51884
51910
|
const schemaRaw = checkConfig.schema || "plain";
|
|
51885
51911
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
51886
51912
|
let templateContent;
|
|
@@ -51889,27 +51915,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
51889
51915
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
51890
51916
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
51891
51917
|
const file = String(checkConfig.template.file);
|
|
51892
|
-
const resolved =
|
|
51893
|
-
templateContent = await
|
|
51918
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
51919
|
+
templateContent = await fs27.readFile(resolved, "utf-8");
|
|
51894
51920
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
51895
51921
|
} else if (schema && schema !== "plain") {
|
|
51896
51922
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
51897
51923
|
if (sanitized) {
|
|
51898
51924
|
const candidatePaths = [
|
|
51899
|
-
|
|
51925
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
51900
51926
|
// bundled: dist/output/
|
|
51901
|
-
|
|
51927
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
51902
51928
|
// source (from state-machine/states)
|
|
51903
|
-
|
|
51929
|
+
path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
51904
51930
|
// source (alternate)
|
|
51905
|
-
|
|
51931
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
51906
51932
|
// fallback: cwd/output/
|
|
51907
|
-
|
|
51933
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
51908
51934
|
// fallback: cwd/dist/output/
|
|
51909
51935
|
];
|
|
51910
51936
|
for (const p of candidatePaths) {
|
|
51911
51937
|
try {
|
|
51912
|
-
templateContent = await
|
|
51938
|
+
templateContent = await fs27.readFile(p, "utf-8");
|
|
51913
51939
|
if (templateContent) {
|
|
51914
51940
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
51915
51941
|
break;
|
|
@@ -54049,8 +54075,8 @@ var init_workspace_manager = __esm({
|
|
|
54049
54075
|
);
|
|
54050
54076
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
54051
54077
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
54052
|
-
for (const
|
|
54053
|
-
|
|
54078
|
+
for (const resolve19 of this.cleanupResolvers) {
|
|
54079
|
+
resolve19();
|
|
54054
54080
|
}
|
|
54055
54081
|
this.cleanupResolvers = [];
|
|
54056
54082
|
}
|
|
@@ -54207,19 +54233,19 @@ var init_workspace_manager = __esm({
|
|
|
54207
54233
|
);
|
|
54208
54234
|
this.cleanupRequested = true;
|
|
54209
54235
|
await Promise.race([
|
|
54210
|
-
new Promise((
|
|
54236
|
+
new Promise((resolve19) => {
|
|
54211
54237
|
if (this.activeOperations === 0) {
|
|
54212
|
-
|
|
54238
|
+
resolve19();
|
|
54213
54239
|
} else {
|
|
54214
|
-
this.cleanupResolvers.push(
|
|
54240
|
+
this.cleanupResolvers.push(resolve19);
|
|
54215
54241
|
}
|
|
54216
54242
|
}),
|
|
54217
|
-
new Promise((
|
|
54243
|
+
new Promise((resolve19) => {
|
|
54218
54244
|
setTimeout(() => {
|
|
54219
54245
|
logger.warn(
|
|
54220
54246
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
54221
54247
|
);
|
|
54222
|
-
|
|
54248
|
+
resolve19();
|
|
54223
54249
|
}, timeout);
|
|
54224
54250
|
})
|
|
54225
54251
|
]);
|
|
@@ -54697,6 +54723,1380 @@ var init_build_engine_context = __esm({
|
|
|
54697
54723
|
}
|
|
54698
54724
|
});
|
|
54699
54725
|
|
|
54726
|
+
// src/policy/default-engine.ts
|
|
54727
|
+
var DefaultPolicyEngine;
|
|
54728
|
+
var init_default_engine = __esm({
|
|
54729
|
+
"src/policy/default-engine.ts"() {
|
|
54730
|
+
"use strict";
|
|
54731
|
+
DefaultPolicyEngine = class {
|
|
54732
|
+
async initialize(_config) {
|
|
54733
|
+
}
|
|
54734
|
+
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
54735
|
+
return { allowed: true };
|
|
54736
|
+
}
|
|
54737
|
+
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
54738
|
+
return { allowed: true };
|
|
54739
|
+
}
|
|
54740
|
+
async evaluateCapabilities(_checkId, _capabilities) {
|
|
54741
|
+
return { allowed: true };
|
|
54742
|
+
}
|
|
54743
|
+
async shutdown() {
|
|
54744
|
+
}
|
|
54745
|
+
};
|
|
54746
|
+
}
|
|
54747
|
+
});
|
|
54748
|
+
|
|
54749
|
+
// src/enterprise/license/validator.ts
|
|
54750
|
+
var validator_exports = {};
|
|
54751
|
+
__export(validator_exports, {
|
|
54752
|
+
LicenseValidator: () => LicenseValidator
|
|
54753
|
+
});
|
|
54754
|
+
var crypto2, fs21, path25, LicenseValidator;
|
|
54755
|
+
var init_validator = __esm({
|
|
54756
|
+
"src/enterprise/license/validator.ts"() {
|
|
54757
|
+
"use strict";
|
|
54758
|
+
crypto2 = __toESM(require("crypto"));
|
|
54759
|
+
fs21 = __toESM(require("fs"));
|
|
54760
|
+
path25 = __toESM(require("path"));
|
|
54761
|
+
LicenseValidator = class _LicenseValidator {
|
|
54762
|
+
/** Ed25519 public key for license verification (PEM format). */
|
|
54763
|
+
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
54764
|
+
cache = null;
|
|
54765
|
+
static CACHE_TTL = 5 * 60 * 1e3;
|
|
54766
|
+
// 5 minutes
|
|
54767
|
+
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
54768
|
+
// 72 hours after expiry
|
|
54769
|
+
/**
|
|
54770
|
+
* Load and validate license from environment or file.
|
|
54771
|
+
*
|
|
54772
|
+
* Resolution order:
|
|
54773
|
+
* 1. VISOR_LICENSE env var (JWT string)
|
|
54774
|
+
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
54775
|
+
* 3. .visor-license in project root (cwd)
|
|
54776
|
+
* 4. .visor-license in ~/.config/visor/
|
|
54777
|
+
*/
|
|
54778
|
+
async loadAndValidate() {
|
|
54779
|
+
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
54780
|
+
return this.cache.payload;
|
|
54781
|
+
}
|
|
54782
|
+
const token = this.resolveToken();
|
|
54783
|
+
if (!token) return null;
|
|
54784
|
+
const payload = this.verifyAndDecode(token);
|
|
54785
|
+
if (!payload) return null;
|
|
54786
|
+
this.cache = { payload, validatedAt: Date.now() };
|
|
54787
|
+
return payload;
|
|
54788
|
+
}
|
|
54789
|
+
/** Check if a specific feature is licensed */
|
|
54790
|
+
hasFeature(feature) {
|
|
54791
|
+
if (!this.cache) return false;
|
|
54792
|
+
return this.cache.payload.features.includes(feature);
|
|
54793
|
+
}
|
|
54794
|
+
/** Check if license is valid (with grace period) */
|
|
54795
|
+
isValid() {
|
|
54796
|
+
if (!this.cache) return false;
|
|
54797
|
+
const now = Date.now();
|
|
54798
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
54799
|
+
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
54800
|
+
}
|
|
54801
|
+
/** Check if the license is within its grace period (expired but still valid) */
|
|
54802
|
+
isInGracePeriod() {
|
|
54803
|
+
if (!this.cache) return false;
|
|
54804
|
+
const now = Date.now();
|
|
54805
|
+
const expiryMs = this.cache.payload.exp * 1e3;
|
|
54806
|
+
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
54807
|
+
}
|
|
54808
|
+
resolveToken() {
|
|
54809
|
+
if (process.env.VISOR_LICENSE) {
|
|
54810
|
+
return process.env.VISOR_LICENSE.trim();
|
|
54811
|
+
}
|
|
54812
|
+
if (process.env.VISOR_LICENSE_FILE) {
|
|
54813
|
+
const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
|
|
54814
|
+
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
54815
|
+
const allowedPrefixes = [path25.normalize(process.cwd())];
|
|
54816
|
+
if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
|
|
54817
|
+
let realPath;
|
|
54818
|
+
try {
|
|
54819
|
+
realPath = fs21.realpathSync(resolved);
|
|
54820
|
+
} catch {
|
|
54821
|
+
return null;
|
|
54822
|
+
}
|
|
54823
|
+
const isSafe = allowedPrefixes.some(
|
|
54824
|
+
(prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
|
|
54825
|
+
);
|
|
54826
|
+
if (!isSafe) return null;
|
|
54827
|
+
return this.readFile(realPath);
|
|
54828
|
+
}
|
|
54829
|
+
const cwdPath = path25.join(process.cwd(), ".visor-license");
|
|
54830
|
+
const cwdToken = this.readFile(cwdPath);
|
|
54831
|
+
if (cwdToken) return cwdToken;
|
|
54832
|
+
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
54833
|
+
if (home) {
|
|
54834
|
+
const configPath = path25.join(home, ".config", "visor", ".visor-license");
|
|
54835
|
+
const configToken = this.readFile(configPath);
|
|
54836
|
+
if (configToken) return configToken;
|
|
54837
|
+
}
|
|
54838
|
+
return null;
|
|
54839
|
+
}
|
|
54840
|
+
readFile(filePath) {
|
|
54841
|
+
try {
|
|
54842
|
+
return fs21.readFileSync(filePath, "utf-8").trim();
|
|
54843
|
+
} catch {
|
|
54844
|
+
return null;
|
|
54845
|
+
}
|
|
54846
|
+
}
|
|
54847
|
+
verifyAndDecode(token) {
|
|
54848
|
+
try {
|
|
54849
|
+
const parts = token.split(".");
|
|
54850
|
+
if (parts.length !== 3) return null;
|
|
54851
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
54852
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
54853
|
+
if (header.alg !== "EdDSA") return null;
|
|
54854
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
54855
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
54856
|
+
const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
54857
|
+
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
54858
|
+
return null;
|
|
54859
|
+
}
|
|
54860
|
+
const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
|
|
54861
|
+
if (!isValid) return null;
|
|
54862
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
54863
|
+
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
54864
|
+
return null;
|
|
54865
|
+
}
|
|
54866
|
+
const now = Date.now();
|
|
54867
|
+
const expiryMs = payload.exp * 1e3;
|
|
54868
|
+
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
54869
|
+
return null;
|
|
54870
|
+
}
|
|
54871
|
+
return payload;
|
|
54872
|
+
} catch {
|
|
54873
|
+
return null;
|
|
54874
|
+
}
|
|
54875
|
+
}
|
|
54876
|
+
};
|
|
54877
|
+
}
|
|
54878
|
+
});
|
|
54879
|
+
|
|
54880
|
+
// src/enterprise/policy/opa-compiler.ts
|
|
54881
|
+
var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
|
|
54882
|
+
var init_opa_compiler = __esm({
|
|
54883
|
+
"src/enterprise/policy/opa-compiler.ts"() {
|
|
54884
|
+
"use strict";
|
|
54885
|
+
fs22 = __toESM(require("fs"));
|
|
54886
|
+
path26 = __toESM(require("path"));
|
|
54887
|
+
os2 = __toESM(require("os"));
|
|
54888
|
+
crypto3 = __toESM(require("crypto"));
|
|
54889
|
+
import_child_process8 = require("child_process");
|
|
54890
|
+
OpaCompiler = class _OpaCompiler {
|
|
54891
|
+
static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
|
|
54892
|
+
/**
|
|
54893
|
+
* Resolve the input paths to WASM bytes.
|
|
54894
|
+
*
|
|
54895
|
+
* Strategy:
|
|
54896
|
+
* 1. If any path is a .wasm file, read it directly
|
|
54897
|
+
* 2. If a directory contains policy.wasm, read it
|
|
54898
|
+
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
54899
|
+
*/
|
|
54900
|
+
async resolveWasmBytes(paths) {
|
|
54901
|
+
const regoFiles = [];
|
|
54902
|
+
for (const p of paths) {
|
|
54903
|
+
const resolved = path26.resolve(p);
|
|
54904
|
+
if (path26.normalize(resolved).includes("..")) {
|
|
54905
|
+
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
54906
|
+
}
|
|
54907
|
+
if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
|
|
54908
|
+
return fs22.readFileSync(resolved);
|
|
54909
|
+
}
|
|
54910
|
+
if (!fs22.existsSync(resolved)) continue;
|
|
54911
|
+
const stat2 = fs22.statSync(resolved);
|
|
54912
|
+
if (stat2.isDirectory()) {
|
|
54913
|
+
const wasmCandidate = path26.join(resolved, "policy.wasm");
|
|
54914
|
+
if (fs22.existsSync(wasmCandidate)) {
|
|
54915
|
+
return fs22.readFileSync(wasmCandidate);
|
|
54916
|
+
}
|
|
54917
|
+
const files = fs22.readdirSync(resolved);
|
|
54918
|
+
for (const f of files) {
|
|
54919
|
+
if (f.endsWith(".rego")) {
|
|
54920
|
+
regoFiles.push(path26.join(resolved, f));
|
|
54921
|
+
}
|
|
54922
|
+
}
|
|
54923
|
+
} else if (resolved.endsWith(".rego")) {
|
|
54924
|
+
regoFiles.push(resolved);
|
|
54925
|
+
}
|
|
54926
|
+
}
|
|
54927
|
+
if (regoFiles.length === 0) {
|
|
54928
|
+
throw new Error(
|
|
54929
|
+
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
54930
|
+
);
|
|
54931
|
+
}
|
|
54932
|
+
return this.compileRego(regoFiles);
|
|
54933
|
+
}
|
|
54934
|
+
/**
|
|
54935
|
+
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
54936
|
+
*
|
|
54937
|
+
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
54938
|
+
* so subsequent runs skip compilation if policies haven't changed.
|
|
54939
|
+
*/
|
|
54940
|
+
compileRego(regoFiles) {
|
|
54941
|
+
try {
|
|
54942
|
+
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
54943
|
+
} catch {
|
|
54944
|
+
throw new Error(
|
|
54945
|
+
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
54946
|
+
);
|
|
54947
|
+
}
|
|
54948
|
+
const hash = crypto3.createHash("sha256");
|
|
54949
|
+
for (const f of regoFiles.sort()) {
|
|
54950
|
+
hash.update(fs22.readFileSync(f));
|
|
54951
|
+
hash.update(f);
|
|
54952
|
+
}
|
|
54953
|
+
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
54954
|
+
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
54955
|
+
const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
|
|
54956
|
+
if (fs22.existsSync(cachedWasm)) {
|
|
54957
|
+
return fs22.readFileSync(cachedWasm);
|
|
54958
|
+
}
|
|
54959
|
+
fs22.mkdirSync(cacheDir, { recursive: true });
|
|
54960
|
+
const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
54961
|
+
try {
|
|
54962
|
+
const args = [
|
|
54963
|
+
"build",
|
|
54964
|
+
"-t",
|
|
54965
|
+
"wasm",
|
|
54966
|
+
"-e",
|
|
54967
|
+
"visor",
|
|
54968
|
+
// entrypoint: the visor package tree
|
|
54969
|
+
"-o",
|
|
54970
|
+
bundleTar,
|
|
54971
|
+
...regoFiles
|
|
54972
|
+
];
|
|
54973
|
+
(0, import_child_process8.execFileSync)("opa", args, {
|
|
54974
|
+
stdio: "pipe",
|
|
54975
|
+
timeout: 3e4
|
|
54976
|
+
});
|
|
54977
|
+
} catch (err) {
|
|
54978
|
+
const stderr = err?.stderr?.toString() || "";
|
|
54979
|
+
throw new Error(
|
|
54980
|
+
`Failed to compile .rego files to WASM:
|
|
54981
|
+
${stderr}
|
|
54982
|
+
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
54983
|
+
);
|
|
54984
|
+
}
|
|
54985
|
+
try {
|
|
54986
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
54987
|
+
stdio: "pipe"
|
|
54988
|
+
});
|
|
54989
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
54990
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
54991
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
54992
|
+
}
|
|
54993
|
+
} catch {
|
|
54994
|
+
try {
|
|
54995
|
+
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
54996
|
+
stdio: "pipe"
|
|
54997
|
+
});
|
|
54998
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
54999
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
55000
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
55001
|
+
}
|
|
55002
|
+
} catch (err2) {
|
|
55003
|
+
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
55004
|
+
}
|
|
55005
|
+
}
|
|
55006
|
+
try {
|
|
55007
|
+
fs22.unlinkSync(bundleTar);
|
|
55008
|
+
} catch {
|
|
55009
|
+
}
|
|
55010
|
+
if (!fs22.existsSync(cachedWasm)) {
|
|
55011
|
+
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
55012
|
+
}
|
|
55013
|
+
return fs22.readFileSync(cachedWasm);
|
|
55014
|
+
}
|
|
55015
|
+
};
|
|
55016
|
+
}
|
|
55017
|
+
});
|
|
55018
|
+
|
|
55019
|
+
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
55020
|
+
var fs23, path27, OpaWasmEvaluator;
|
|
55021
|
+
var init_opa_wasm_evaluator = __esm({
|
|
55022
|
+
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
55023
|
+
"use strict";
|
|
55024
|
+
fs23 = __toESM(require("fs"));
|
|
55025
|
+
path27 = __toESM(require("path"));
|
|
55026
|
+
init_opa_compiler();
|
|
55027
|
+
OpaWasmEvaluator = class {
|
|
55028
|
+
policy = null;
|
|
55029
|
+
dataDocument = {};
|
|
55030
|
+
compiler = new OpaCompiler();
|
|
55031
|
+
async initialize(rulesPath) {
|
|
55032
|
+
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
55033
|
+
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
55034
|
+
try {
|
|
55035
|
+
const { createRequire } = require("module");
|
|
55036
|
+
const runtimeRequire = createRequire(__filename);
|
|
55037
|
+
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
55038
|
+
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
55039
|
+
if (!loadPolicy) {
|
|
55040
|
+
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
55041
|
+
}
|
|
55042
|
+
this.policy = await loadPolicy(wasmBytes);
|
|
55043
|
+
} catch (err) {
|
|
55044
|
+
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
55045
|
+
throw new Error(
|
|
55046
|
+
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
55047
|
+
);
|
|
55048
|
+
}
|
|
55049
|
+
throw err;
|
|
55050
|
+
}
|
|
55051
|
+
}
|
|
55052
|
+
/**
|
|
55053
|
+
* Load external data from a JSON file to use as the OPA data document.
|
|
55054
|
+
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
55055
|
+
* making it available in Rego via `data.<key>`.
|
|
55056
|
+
*/
|
|
55057
|
+
loadData(dataPath) {
|
|
55058
|
+
const resolved = path27.resolve(dataPath);
|
|
55059
|
+
if (path27.normalize(resolved).includes("..")) {
|
|
55060
|
+
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
55061
|
+
}
|
|
55062
|
+
if (!fs23.existsSync(resolved)) {
|
|
55063
|
+
throw new Error(`OPA data file not found: ${resolved}`);
|
|
55064
|
+
}
|
|
55065
|
+
const stat2 = fs23.statSync(resolved);
|
|
55066
|
+
if (stat2.size > 10 * 1024 * 1024) {
|
|
55067
|
+
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
55068
|
+
}
|
|
55069
|
+
const raw = fs23.readFileSync(resolved, "utf-8");
|
|
55070
|
+
try {
|
|
55071
|
+
const parsed = JSON.parse(raw);
|
|
55072
|
+
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
55073
|
+
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
55074
|
+
}
|
|
55075
|
+
this.dataDocument = parsed;
|
|
55076
|
+
} catch (err) {
|
|
55077
|
+
if (err.message.startsWith("OPA data file must")) {
|
|
55078
|
+
throw err;
|
|
55079
|
+
}
|
|
55080
|
+
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
55081
|
+
}
|
|
55082
|
+
}
|
|
55083
|
+
async evaluate(input) {
|
|
55084
|
+
if (!this.policy) {
|
|
55085
|
+
throw new Error("OPA WASM evaluator not initialized");
|
|
55086
|
+
}
|
|
55087
|
+
this.policy.setData(this.dataDocument);
|
|
55088
|
+
const resultSet = this.policy.evaluate(input);
|
|
55089
|
+
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
55090
|
+
return resultSet[0].result;
|
|
55091
|
+
}
|
|
55092
|
+
return void 0;
|
|
55093
|
+
}
|
|
55094
|
+
async shutdown() {
|
|
55095
|
+
if (this.policy) {
|
|
55096
|
+
if (typeof this.policy.close === "function") {
|
|
55097
|
+
try {
|
|
55098
|
+
this.policy.close();
|
|
55099
|
+
} catch {
|
|
55100
|
+
}
|
|
55101
|
+
} else if (typeof this.policy.free === "function") {
|
|
55102
|
+
try {
|
|
55103
|
+
this.policy.free();
|
|
55104
|
+
} catch {
|
|
55105
|
+
}
|
|
55106
|
+
}
|
|
55107
|
+
}
|
|
55108
|
+
this.policy = null;
|
|
55109
|
+
}
|
|
55110
|
+
};
|
|
55111
|
+
}
|
|
55112
|
+
});
|
|
55113
|
+
|
|
55114
|
+
// src/enterprise/policy/opa-http-evaluator.ts
|
|
55115
|
+
var OpaHttpEvaluator;
|
|
55116
|
+
var init_opa_http_evaluator = __esm({
|
|
55117
|
+
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
55118
|
+
"use strict";
|
|
55119
|
+
OpaHttpEvaluator = class {
|
|
55120
|
+
baseUrl;
|
|
55121
|
+
timeout;
|
|
55122
|
+
constructor(baseUrl, timeout = 5e3) {
|
|
55123
|
+
let parsed;
|
|
55124
|
+
try {
|
|
55125
|
+
parsed = new URL(baseUrl);
|
|
55126
|
+
} catch {
|
|
55127
|
+
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
55128
|
+
}
|
|
55129
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
55130
|
+
throw new Error(
|
|
55131
|
+
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
55132
|
+
);
|
|
55133
|
+
}
|
|
55134
|
+
const hostname = parsed.hostname;
|
|
55135
|
+
if (this.isBlockedHostname(hostname)) {
|
|
55136
|
+
throw new Error(
|
|
55137
|
+
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
55138
|
+
);
|
|
55139
|
+
}
|
|
55140
|
+
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
55141
|
+
this.timeout = timeout;
|
|
55142
|
+
}
|
|
55143
|
+
/**
|
|
55144
|
+
* Check if a hostname is blocked due to SSRF concerns.
|
|
55145
|
+
*
|
|
55146
|
+
* Blocks:
|
|
55147
|
+
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
55148
|
+
* - Link-local addresses (169.254.x.x)
|
|
55149
|
+
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
55150
|
+
* - IPv6 unique local addresses (fd00::/8)
|
|
55151
|
+
* - Cloud metadata services (*.internal)
|
|
55152
|
+
*/
|
|
55153
|
+
isBlockedHostname(hostname) {
|
|
55154
|
+
if (!hostname) return true;
|
|
55155
|
+
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
55156
|
+
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
55157
|
+
return true;
|
|
55158
|
+
}
|
|
55159
|
+
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
55160
|
+
return true;
|
|
55161
|
+
}
|
|
55162
|
+
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
55163
|
+
return true;
|
|
55164
|
+
}
|
|
55165
|
+
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
55166
|
+
const ipv4Match = normalized.match(ipv4Pattern);
|
|
55167
|
+
if (ipv4Match) {
|
|
55168
|
+
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
55169
|
+
if (octets.some((octet) => octet > 255)) {
|
|
55170
|
+
return false;
|
|
55171
|
+
}
|
|
55172
|
+
const [a, b] = octets;
|
|
55173
|
+
if (a === 127) {
|
|
55174
|
+
return true;
|
|
55175
|
+
}
|
|
55176
|
+
if (a === 0) {
|
|
55177
|
+
return true;
|
|
55178
|
+
}
|
|
55179
|
+
if (a === 169 && b === 254) {
|
|
55180
|
+
return true;
|
|
55181
|
+
}
|
|
55182
|
+
if (a === 10) {
|
|
55183
|
+
return true;
|
|
55184
|
+
}
|
|
55185
|
+
if (a === 172 && b >= 16 && b <= 31) {
|
|
55186
|
+
return true;
|
|
55187
|
+
}
|
|
55188
|
+
if (a === 192 && b === 168) {
|
|
55189
|
+
return true;
|
|
55190
|
+
}
|
|
55191
|
+
}
|
|
55192
|
+
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
55193
|
+
return true;
|
|
55194
|
+
}
|
|
55195
|
+
if (normalized.startsWith("fe80:")) {
|
|
55196
|
+
return true;
|
|
55197
|
+
}
|
|
55198
|
+
return false;
|
|
55199
|
+
}
|
|
55200
|
+
/**
|
|
55201
|
+
* Evaluate a policy rule against an input document via OPA REST API.
|
|
55202
|
+
*
|
|
55203
|
+
* @param input - The input document to evaluate
|
|
55204
|
+
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
55205
|
+
* @returns The result object from OPA, or undefined on error
|
|
55206
|
+
*/
|
|
55207
|
+
async evaluate(input, rulePath) {
|
|
55208
|
+
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
55209
|
+
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
55210
|
+
const controller = new AbortController();
|
|
55211
|
+
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
55212
|
+
try {
|
|
55213
|
+
const response = await fetch(url, {
|
|
55214
|
+
method: "POST",
|
|
55215
|
+
headers: { "Content-Type": "application/json" },
|
|
55216
|
+
body: JSON.stringify({ input }),
|
|
55217
|
+
signal: controller.signal
|
|
55218
|
+
});
|
|
55219
|
+
if (!response.ok) {
|
|
55220
|
+
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
55221
|
+
}
|
|
55222
|
+
let body;
|
|
55223
|
+
try {
|
|
55224
|
+
body = await response.json();
|
|
55225
|
+
} catch (jsonErr) {
|
|
55226
|
+
throw new Error(
|
|
55227
|
+
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
55228
|
+
);
|
|
55229
|
+
}
|
|
55230
|
+
return body?.result;
|
|
55231
|
+
} finally {
|
|
55232
|
+
clearTimeout(timer);
|
|
55233
|
+
}
|
|
55234
|
+
}
|
|
55235
|
+
async shutdown() {
|
|
55236
|
+
}
|
|
55237
|
+
};
|
|
55238
|
+
}
|
|
55239
|
+
});
|
|
55240
|
+
|
|
55241
|
+
// src/enterprise/policy/policy-input-builder.ts
|
|
55242
|
+
var PolicyInputBuilder;
|
|
55243
|
+
var init_policy_input_builder = __esm({
|
|
55244
|
+
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
55245
|
+
"use strict";
|
|
55246
|
+
PolicyInputBuilder = class {
|
|
55247
|
+
roles;
|
|
55248
|
+
actor;
|
|
55249
|
+
repository;
|
|
55250
|
+
pullRequest;
|
|
55251
|
+
constructor(policyConfig, actor, repository, pullRequest) {
|
|
55252
|
+
this.roles = policyConfig.roles || {};
|
|
55253
|
+
this.actor = actor;
|
|
55254
|
+
this.repository = repository;
|
|
55255
|
+
this.pullRequest = pullRequest;
|
|
55256
|
+
}
|
|
55257
|
+
/** Resolve which roles apply to the current actor. */
|
|
55258
|
+
resolveRoles() {
|
|
55259
|
+
const matched = [];
|
|
55260
|
+
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
55261
|
+
let identityMatch = false;
|
|
55262
|
+
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
55263
|
+
identityMatch = true;
|
|
55264
|
+
}
|
|
55265
|
+
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
55266
|
+
identityMatch = true;
|
|
55267
|
+
}
|
|
55268
|
+
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
55269
|
+
identityMatch = true;
|
|
55270
|
+
}
|
|
55271
|
+
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
55272
|
+
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
55273
|
+
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
55274
|
+
identityMatch = true;
|
|
55275
|
+
}
|
|
55276
|
+
}
|
|
55277
|
+
if (!identityMatch) continue;
|
|
55278
|
+
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
55279
|
+
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
55280
|
+
continue;
|
|
55281
|
+
}
|
|
55282
|
+
}
|
|
55283
|
+
matched.push(roleName);
|
|
55284
|
+
}
|
|
55285
|
+
return matched;
|
|
55286
|
+
}
|
|
55287
|
+
buildActor() {
|
|
55288
|
+
return {
|
|
55289
|
+
authorAssociation: this.actor.authorAssociation,
|
|
55290
|
+
login: this.actor.login,
|
|
55291
|
+
roles: this.resolveRoles(),
|
|
55292
|
+
isLocalMode: this.actor.isLocalMode,
|
|
55293
|
+
...this.actor.slack && { slack: this.actor.slack }
|
|
55294
|
+
};
|
|
55295
|
+
}
|
|
55296
|
+
forCheckExecution(check) {
|
|
55297
|
+
return {
|
|
55298
|
+
scope: "check.execute",
|
|
55299
|
+
check: {
|
|
55300
|
+
id: check.id,
|
|
55301
|
+
type: check.type,
|
|
55302
|
+
group: check.group,
|
|
55303
|
+
tags: check.tags,
|
|
55304
|
+
criticality: check.criticality,
|
|
55305
|
+
sandbox: check.sandbox,
|
|
55306
|
+
policy: check.policy
|
|
55307
|
+
},
|
|
55308
|
+
actor: this.buildActor(),
|
|
55309
|
+
repository: this.repository,
|
|
55310
|
+
pullRequest: this.pullRequest
|
|
55311
|
+
};
|
|
55312
|
+
}
|
|
55313
|
+
forToolInvocation(serverName, methodName, transport) {
|
|
55314
|
+
return {
|
|
55315
|
+
scope: "tool.invoke",
|
|
55316
|
+
tool: { serverName, methodName, transport },
|
|
55317
|
+
actor: this.buildActor(),
|
|
55318
|
+
repository: this.repository,
|
|
55319
|
+
pullRequest: this.pullRequest
|
|
55320
|
+
};
|
|
55321
|
+
}
|
|
55322
|
+
forCapabilityResolve(checkId, capabilities) {
|
|
55323
|
+
return {
|
|
55324
|
+
scope: "capability.resolve",
|
|
55325
|
+
check: { id: checkId, type: "ai" },
|
|
55326
|
+
capability: capabilities,
|
|
55327
|
+
actor: this.buildActor(),
|
|
55328
|
+
repository: this.repository,
|
|
55329
|
+
pullRequest: this.pullRequest
|
|
55330
|
+
};
|
|
55331
|
+
}
|
|
55332
|
+
};
|
|
55333
|
+
}
|
|
55334
|
+
});
|
|
55335
|
+
|
|
55336
|
+
// src/enterprise/policy/opa-policy-engine.ts
|
|
55337
|
+
var opa_policy_engine_exports = {};
|
|
55338
|
+
__export(opa_policy_engine_exports, {
|
|
55339
|
+
OpaPolicyEngine: () => OpaPolicyEngine
|
|
55340
|
+
});
|
|
55341
|
+
var OpaPolicyEngine;
|
|
55342
|
+
var init_opa_policy_engine = __esm({
|
|
55343
|
+
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
55344
|
+
"use strict";
|
|
55345
|
+
init_opa_wasm_evaluator();
|
|
55346
|
+
init_opa_http_evaluator();
|
|
55347
|
+
init_policy_input_builder();
|
|
55348
|
+
OpaPolicyEngine = class {
|
|
55349
|
+
evaluator = null;
|
|
55350
|
+
fallback;
|
|
55351
|
+
timeout;
|
|
55352
|
+
config;
|
|
55353
|
+
inputBuilder = null;
|
|
55354
|
+
logger = null;
|
|
55355
|
+
constructor(config) {
|
|
55356
|
+
this.config = config;
|
|
55357
|
+
this.fallback = config.fallback || "deny";
|
|
55358
|
+
this.timeout = config.timeout || 5e3;
|
|
55359
|
+
}
|
|
55360
|
+
async initialize(config) {
|
|
55361
|
+
try {
|
|
55362
|
+
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
55363
|
+
} catch {
|
|
55364
|
+
}
|
|
55365
|
+
const actor = {
|
|
55366
|
+
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
55367
|
+
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
55368
|
+
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
55369
|
+
};
|
|
55370
|
+
const repo = {
|
|
55371
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
55372
|
+
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
55373
|
+
branch: process.env.GITHUB_HEAD_REF,
|
|
55374
|
+
baseBranch: process.env.GITHUB_BASE_REF,
|
|
55375
|
+
event: process.env.GITHUB_EVENT_NAME
|
|
55376
|
+
};
|
|
55377
|
+
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
55378
|
+
const pullRequest = {
|
|
55379
|
+
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
55380
|
+
};
|
|
55381
|
+
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
55382
|
+
if (config.engine === "local") {
|
|
55383
|
+
if (!config.rules) {
|
|
55384
|
+
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
55385
|
+
}
|
|
55386
|
+
const wasm = new OpaWasmEvaluator();
|
|
55387
|
+
await wasm.initialize(config.rules);
|
|
55388
|
+
if (config.data) {
|
|
55389
|
+
wasm.loadData(config.data);
|
|
55390
|
+
}
|
|
55391
|
+
this.evaluator = wasm;
|
|
55392
|
+
} else if (config.engine === "remote") {
|
|
55393
|
+
if (!config.url) {
|
|
55394
|
+
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
55395
|
+
}
|
|
55396
|
+
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
55397
|
+
} else {
|
|
55398
|
+
this.evaluator = null;
|
|
55399
|
+
}
|
|
55400
|
+
}
|
|
55401
|
+
/**
|
|
55402
|
+
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
55403
|
+
* Called by the enterprise loader when engine context is enriched.
|
|
55404
|
+
*/
|
|
55405
|
+
setActorContext(actor, repo, pullRequest) {
|
|
55406
|
+
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
55407
|
+
}
|
|
55408
|
+
async evaluateCheckExecution(checkId, checkConfig) {
|
|
55409
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55410
|
+
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
55411
|
+
const policyOverride = cfg.policy;
|
|
55412
|
+
const input = this.inputBuilder.forCheckExecution({
|
|
55413
|
+
id: checkId,
|
|
55414
|
+
type: cfg.type || "ai",
|
|
55415
|
+
group: cfg.group,
|
|
55416
|
+
tags: cfg.tags,
|
|
55417
|
+
criticality: cfg.criticality,
|
|
55418
|
+
sandbox: cfg.sandbox,
|
|
55419
|
+
policy: policyOverride
|
|
55420
|
+
});
|
|
55421
|
+
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
55422
|
+
}
|
|
55423
|
+
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
55424
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55425
|
+
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
55426
|
+
return this.doEvaluate(input, "visor/tool/invoke");
|
|
55427
|
+
}
|
|
55428
|
+
async evaluateCapabilities(checkId, capabilities) {
|
|
55429
|
+
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55430
|
+
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
55431
|
+
return this.doEvaluate(input, "visor/capability/resolve");
|
|
55432
|
+
}
|
|
55433
|
+
async shutdown() {
|
|
55434
|
+
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
55435
|
+
await this.evaluator.shutdown();
|
|
55436
|
+
}
|
|
55437
|
+
this.evaluator = null;
|
|
55438
|
+
this.inputBuilder = null;
|
|
55439
|
+
}
|
|
55440
|
+
resolveRulePath(defaultScope, override) {
|
|
55441
|
+
if (override) {
|
|
55442
|
+
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
55443
|
+
}
|
|
55444
|
+
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
55445
|
+
}
|
|
55446
|
+
async doEvaluate(input, rulePath) {
|
|
55447
|
+
try {
|
|
55448
|
+
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
55449
|
+
let timer;
|
|
55450
|
+
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
55451
|
+
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
55452
|
+
});
|
|
55453
|
+
try {
|
|
55454
|
+
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
55455
|
+
const decision = this.parseDecision(result);
|
|
55456
|
+
if (!decision.allowed && this.fallback === "warn") {
|
|
55457
|
+
decision.allowed = true;
|
|
55458
|
+
decision.warn = true;
|
|
55459
|
+
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
55460
|
+
}
|
|
55461
|
+
this.logger?.debug(
|
|
55462
|
+
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
55463
|
+
);
|
|
55464
|
+
return decision;
|
|
55465
|
+
} finally {
|
|
55466
|
+
if (timer) clearTimeout(timer);
|
|
55467
|
+
}
|
|
55468
|
+
} catch (err) {
|
|
55469
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
55470
|
+
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
55471
|
+
return {
|
|
55472
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
55473
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
55474
|
+
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
55475
|
+
};
|
|
55476
|
+
}
|
|
55477
|
+
}
|
|
55478
|
+
async rawEvaluate(input, rulePath) {
|
|
55479
|
+
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
55480
|
+
const result = await this.evaluator.evaluate(input);
|
|
55481
|
+
return this.navigateWasmResult(result, rulePath);
|
|
55482
|
+
}
|
|
55483
|
+
return this.evaluator.evaluate(input, rulePath);
|
|
55484
|
+
}
|
|
55485
|
+
/**
|
|
55486
|
+
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
55487
|
+
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
55488
|
+
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
55489
|
+
*/
|
|
55490
|
+
navigateWasmResult(result, rulePath) {
|
|
55491
|
+
if (!result || typeof result !== "object") return result;
|
|
55492
|
+
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
55493
|
+
let current = result;
|
|
55494
|
+
for (const seg of segments) {
|
|
55495
|
+
if (current && typeof current === "object" && seg in current) {
|
|
55496
|
+
current = current[seg];
|
|
55497
|
+
} else {
|
|
55498
|
+
return void 0;
|
|
55499
|
+
}
|
|
55500
|
+
}
|
|
55501
|
+
return current;
|
|
55502
|
+
}
|
|
55503
|
+
parseDecision(result) {
|
|
55504
|
+
if (result === void 0 || result === null) {
|
|
55505
|
+
return {
|
|
55506
|
+
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
55507
|
+
warn: this.fallback === "warn" ? true : void 0,
|
|
55508
|
+
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
55509
|
+
};
|
|
55510
|
+
}
|
|
55511
|
+
const allowed = result.allowed !== false;
|
|
55512
|
+
const decision = {
|
|
55513
|
+
allowed,
|
|
55514
|
+
reason: result.reason
|
|
55515
|
+
};
|
|
55516
|
+
if (result.capabilities) {
|
|
55517
|
+
decision.capabilities = result.capabilities;
|
|
55518
|
+
}
|
|
55519
|
+
return decision;
|
|
55520
|
+
}
|
|
55521
|
+
};
|
|
55522
|
+
}
|
|
55523
|
+
});
|
|
55524
|
+
|
|
55525
|
+
// src/enterprise/scheduler/knex-store.ts
|
|
55526
|
+
var knex_store_exports = {};
|
|
55527
|
+
__export(knex_store_exports, {
|
|
55528
|
+
KnexStoreBackend: () => KnexStoreBackend
|
|
55529
|
+
});
|
|
55530
|
+
function toNum(val) {
|
|
55531
|
+
if (val === null || val === void 0) return void 0;
|
|
55532
|
+
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
55533
|
+
}
|
|
55534
|
+
function safeJsonParse2(value) {
|
|
55535
|
+
if (!value) return void 0;
|
|
55536
|
+
try {
|
|
55537
|
+
return JSON.parse(value);
|
|
55538
|
+
} catch {
|
|
55539
|
+
return void 0;
|
|
55540
|
+
}
|
|
55541
|
+
}
|
|
55542
|
+
function fromTriggerRow2(row) {
|
|
55543
|
+
return {
|
|
55544
|
+
id: row.id,
|
|
55545
|
+
creatorId: row.creator_id,
|
|
55546
|
+
creatorContext: row.creator_context ?? void 0,
|
|
55547
|
+
creatorName: row.creator_name ?? void 0,
|
|
55548
|
+
description: row.description ?? void 0,
|
|
55549
|
+
channels: safeJsonParse2(row.channels),
|
|
55550
|
+
fromUsers: safeJsonParse2(row.from_users),
|
|
55551
|
+
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
55552
|
+
contains: safeJsonParse2(row.contains),
|
|
55553
|
+
matchPattern: row.match_pattern ?? void 0,
|
|
55554
|
+
threads: row.threads,
|
|
55555
|
+
workflow: row.workflow,
|
|
55556
|
+
inputs: safeJsonParse2(row.inputs),
|
|
55557
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
55558
|
+
status: row.status,
|
|
55559
|
+
enabled: row.enabled === true || row.enabled === 1,
|
|
55560
|
+
createdAt: toNum(row.created_at)
|
|
55561
|
+
};
|
|
55562
|
+
}
|
|
55563
|
+
function toTriggerInsertRow(trigger) {
|
|
55564
|
+
return {
|
|
55565
|
+
id: trigger.id,
|
|
55566
|
+
creator_id: trigger.creatorId,
|
|
55567
|
+
creator_context: trigger.creatorContext ?? null,
|
|
55568
|
+
creator_name: trigger.creatorName ?? null,
|
|
55569
|
+
description: trigger.description ?? null,
|
|
55570
|
+
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
55571
|
+
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
55572
|
+
from_bots: trigger.fromBots,
|
|
55573
|
+
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
55574
|
+
match_pattern: trigger.matchPattern ?? null,
|
|
55575
|
+
threads: trigger.threads,
|
|
55576
|
+
workflow: trigger.workflow,
|
|
55577
|
+
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
55578
|
+
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
55579
|
+
status: trigger.status,
|
|
55580
|
+
enabled: trigger.enabled,
|
|
55581
|
+
created_at: trigger.createdAt
|
|
55582
|
+
};
|
|
55583
|
+
}
|
|
55584
|
+
function fromDbRow2(row) {
|
|
55585
|
+
return {
|
|
55586
|
+
id: row.id,
|
|
55587
|
+
creatorId: row.creator_id,
|
|
55588
|
+
creatorContext: row.creator_context ?? void 0,
|
|
55589
|
+
creatorName: row.creator_name ?? void 0,
|
|
55590
|
+
timezone: row.timezone,
|
|
55591
|
+
schedule: row.schedule_expr,
|
|
55592
|
+
runAt: toNum(row.run_at),
|
|
55593
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
55594
|
+
originalExpression: row.original_expression,
|
|
55595
|
+
workflow: row.workflow ?? void 0,
|
|
55596
|
+
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
55597
|
+
outputContext: safeJsonParse2(row.output_context),
|
|
55598
|
+
status: row.status,
|
|
55599
|
+
createdAt: toNum(row.created_at),
|
|
55600
|
+
lastRunAt: toNum(row.last_run_at),
|
|
55601
|
+
nextRunAt: toNum(row.next_run_at),
|
|
55602
|
+
runCount: row.run_count,
|
|
55603
|
+
failureCount: row.failure_count,
|
|
55604
|
+
lastError: row.last_error ?? void 0,
|
|
55605
|
+
previousResponse: row.previous_response ?? void 0
|
|
55606
|
+
};
|
|
55607
|
+
}
|
|
55608
|
+
function toInsertRow(schedule) {
|
|
55609
|
+
return {
|
|
55610
|
+
id: schedule.id,
|
|
55611
|
+
creator_id: schedule.creatorId,
|
|
55612
|
+
creator_context: schedule.creatorContext ?? null,
|
|
55613
|
+
creator_name: schedule.creatorName ?? null,
|
|
55614
|
+
timezone: schedule.timezone,
|
|
55615
|
+
schedule_expr: schedule.schedule,
|
|
55616
|
+
run_at: schedule.runAt ?? null,
|
|
55617
|
+
is_recurring: schedule.isRecurring,
|
|
55618
|
+
original_expression: schedule.originalExpression,
|
|
55619
|
+
workflow: schedule.workflow ?? null,
|
|
55620
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
55621
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
55622
|
+
status: schedule.status,
|
|
55623
|
+
created_at: schedule.createdAt,
|
|
55624
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
55625
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
55626
|
+
run_count: schedule.runCount,
|
|
55627
|
+
failure_count: schedule.failureCount,
|
|
55628
|
+
last_error: schedule.lastError ?? null,
|
|
55629
|
+
previous_response: schedule.previousResponse ?? null
|
|
55630
|
+
};
|
|
55631
|
+
}
|
|
55632
|
+
var fs24, path28, import_uuid2, KnexStoreBackend;
|
|
55633
|
+
var init_knex_store = __esm({
|
|
55634
|
+
"src/enterprise/scheduler/knex-store.ts"() {
|
|
55635
|
+
"use strict";
|
|
55636
|
+
fs24 = __toESM(require("fs"));
|
|
55637
|
+
path28 = __toESM(require("path"));
|
|
55638
|
+
import_uuid2 = require("uuid");
|
|
55639
|
+
init_logger();
|
|
55640
|
+
KnexStoreBackend = class {
|
|
55641
|
+
knex = null;
|
|
55642
|
+
driver;
|
|
55643
|
+
connection;
|
|
55644
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
55645
|
+
this.driver = driver;
|
|
55646
|
+
this.connection = storageConfig.connection || {};
|
|
55647
|
+
}
|
|
55648
|
+
async initialize() {
|
|
55649
|
+
const { createRequire } = require("module");
|
|
55650
|
+
const runtimeRequire = createRequire(__filename);
|
|
55651
|
+
let knexFactory;
|
|
55652
|
+
try {
|
|
55653
|
+
knexFactory = runtimeRequire("knex");
|
|
55654
|
+
} catch (err) {
|
|
55655
|
+
const code = err?.code;
|
|
55656
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
55657
|
+
throw new Error(
|
|
55658
|
+
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
55659
|
+
);
|
|
55660
|
+
}
|
|
55661
|
+
throw err;
|
|
55662
|
+
}
|
|
55663
|
+
const clientMap = {
|
|
55664
|
+
postgresql: "pg",
|
|
55665
|
+
mysql: "mysql2",
|
|
55666
|
+
mssql: "tedious"
|
|
55667
|
+
};
|
|
55668
|
+
const client = clientMap[this.driver];
|
|
55669
|
+
let connection;
|
|
55670
|
+
if (this.connection.connection_string) {
|
|
55671
|
+
connection = this.connection.connection_string;
|
|
55672
|
+
} else if (this.driver === "mssql") {
|
|
55673
|
+
connection = this.buildMssqlConnection();
|
|
55674
|
+
} else {
|
|
55675
|
+
connection = this.buildStandardConnection();
|
|
55676
|
+
}
|
|
55677
|
+
this.knex = knexFactory({
|
|
55678
|
+
client,
|
|
55679
|
+
connection,
|
|
55680
|
+
pool: {
|
|
55681
|
+
min: this.connection.pool?.min ?? 0,
|
|
55682
|
+
max: this.connection.pool?.max ?? 10
|
|
55683
|
+
}
|
|
55684
|
+
});
|
|
55685
|
+
await this.migrateSchema();
|
|
55686
|
+
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
55687
|
+
}
|
|
55688
|
+
buildStandardConnection() {
|
|
55689
|
+
return {
|
|
55690
|
+
host: this.connection.host || "localhost",
|
|
55691
|
+
port: this.connection.port,
|
|
55692
|
+
database: this.connection.database || "visor",
|
|
55693
|
+
user: this.connection.user,
|
|
55694
|
+
password: this.connection.password,
|
|
55695
|
+
ssl: this.resolveSslConfig()
|
|
55696
|
+
};
|
|
55697
|
+
}
|
|
55698
|
+
buildMssqlConnection() {
|
|
55699
|
+
const ssl = this.connection.ssl;
|
|
55700
|
+
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
55701
|
+
return {
|
|
55702
|
+
server: this.connection.host || "localhost",
|
|
55703
|
+
port: this.connection.port,
|
|
55704
|
+
database: this.connection.database || "visor",
|
|
55705
|
+
user: this.connection.user,
|
|
55706
|
+
password: this.connection.password,
|
|
55707
|
+
options: {
|
|
55708
|
+
encrypt: sslEnabled,
|
|
55709
|
+
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
55710
|
+
}
|
|
55711
|
+
};
|
|
55712
|
+
}
|
|
55713
|
+
resolveSslConfig() {
|
|
55714
|
+
const ssl = this.connection.ssl;
|
|
55715
|
+
if (ssl === false || ssl === void 0) return false;
|
|
55716
|
+
if (ssl === true) return { rejectUnauthorized: true };
|
|
55717
|
+
if (ssl.enabled === false) return false;
|
|
55718
|
+
const result = {
|
|
55719
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
55720
|
+
};
|
|
55721
|
+
if (ssl.ca) {
|
|
55722
|
+
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
55723
|
+
result.ca = fs24.readFileSync(caPath, "utf8");
|
|
55724
|
+
}
|
|
55725
|
+
if (ssl.cert) {
|
|
55726
|
+
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
55727
|
+
result.cert = fs24.readFileSync(certPath, "utf8");
|
|
55728
|
+
}
|
|
55729
|
+
if (ssl.key) {
|
|
55730
|
+
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
55731
|
+
result.key = fs24.readFileSync(keyPath, "utf8");
|
|
55732
|
+
}
|
|
55733
|
+
return result;
|
|
55734
|
+
}
|
|
55735
|
+
validateSslPath(filePath, label) {
|
|
55736
|
+
const resolved = path28.resolve(filePath);
|
|
55737
|
+
if (resolved !== path28.normalize(resolved)) {
|
|
55738
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
55739
|
+
}
|
|
55740
|
+
if (!fs24.existsSync(resolved)) {
|
|
55741
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
55742
|
+
}
|
|
55743
|
+
return resolved;
|
|
55744
|
+
}
|
|
55745
|
+
async shutdown() {
|
|
55746
|
+
if (this.knex) {
|
|
55747
|
+
await this.knex.destroy();
|
|
55748
|
+
this.knex = null;
|
|
55749
|
+
}
|
|
55750
|
+
}
|
|
55751
|
+
async migrateSchema() {
|
|
55752
|
+
const knex = this.getKnex();
|
|
55753
|
+
const exists = await knex.schema.hasTable("schedules");
|
|
55754
|
+
if (!exists) {
|
|
55755
|
+
await knex.schema.createTable("schedules", (table) => {
|
|
55756
|
+
table.string("id", 36).primary();
|
|
55757
|
+
table.string("creator_id", 255).notNullable().index();
|
|
55758
|
+
table.string("creator_context", 255);
|
|
55759
|
+
table.string("creator_name", 255);
|
|
55760
|
+
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
55761
|
+
table.string("schedule_expr", 255);
|
|
55762
|
+
table.bigInteger("run_at");
|
|
55763
|
+
table.boolean("is_recurring").notNullable();
|
|
55764
|
+
table.text("original_expression");
|
|
55765
|
+
table.string("workflow", 255);
|
|
55766
|
+
table.text("workflow_inputs");
|
|
55767
|
+
table.text("output_context");
|
|
55768
|
+
table.string("status", 20).notNullable().index();
|
|
55769
|
+
table.bigInteger("created_at").notNullable();
|
|
55770
|
+
table.bigInteger("last_run_at");
|
|
55771
|
+
table.bigInteger("next_run_at");
|
|
55772
|
+
table.integer("run_count").notNullable().defaultTo(0);
|
|
55773
|
+
table.integer("failure_count").notNullable().defaultTo(0);
|
|
55774
|
+
table.text("last_error");
|
|
55775
|
+
table.text("previous_response");
|
|
55776
|
+
table.index(["status", "next_run_at"]);
|
|
55777
|
+
});
|
|
55778
|
+
}
|
|
55779
|
+
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
55780
|
+
if (!triggersExist) {
|
|
55781
|
+
await knex.schema.createTable("message_triggers", (table) => {
|
|
55782
|
+
table.string("id", 36).primary();
|
|
55783
|
+
table.string("creator_id", 255).notNullable().index();
|
|
55784
|
+
table.string("creator_context", 255);
|
|
55785
|
+
table.string("creator_name", 255);
|
|
55786
|
+
table.text("description");
|
|
55787
|
+
table.text("channels");
|
|
55788
|
+
table.text("from_users");
|
|
55789
|
+
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
55790
|
+
table.text("contains");
|
|
55791
|
+
table.text("match_pattern");
|
|
55792
|
+
table.string("threads", 20).notNullable().defaultTo("any");
|
|
55793
|
+
table.string("workflow", 255).notNullable();
|
|
55794
|
+
table.text("inputs");
|
|
55795
|
+
table.text("output_context");
|
|
55796
|
+
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
55797
|
+
table.boolean("enabled").notNullable().defaultTo(true);
|
|
55798
|
+
table.bigInteger("created_at").notNullable();
|
|
55799
|
+
});
|
|
55800
|
+
}
|
|
55801
|
+
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
55802
|
+
if (!locksExist) {
|
|
55803
|
+
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
55804
|
+
table.string("lock_id", 255).primary();
|
|
55805
|
+
table.string("node_id", 255).notNullable();
|
|
55806
|
+
table.string("lock_token", 36).notNullable();
|
|
55807
|
+
table.bigInteger("acquired_at").notNullable();
|
|
55808
|
+
table.bigInteger("expires_at").notNullable();
|
|
55809
|
+
});
|
|
55810
|
+
}
|
|
55811
|
+
}
|
|
55812
|
+
getKnex() {
|
|
55813
|
+
if (!this.knex) {
|
|
55814
|
+
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
55815
|
+
}
|
|
55816
|
+
return this.knex;
|
|
55817
|
+
}
|
|
55818
|
+
// --- CRUD ---
|
|
55819
|
+
async create(schedule) {
|
|
55820
|
+
const knex = this.getKnex();
|
|
55821
|
+
const newSchedule = {
|
|
55822
|
+
...schedule,
|
|
55823
|
+
id: (0, import_uuid2.v4)(),
|
|
55824
|
+
createdAt: Date.now(),
|
|
55825
|
+
runCount: 0,
|
|
55826
|
+
failureCount: 0,
|
|
55827
|
+
status: "active"
|
|
55828
|
+
};
|
|
55829
|
+
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
55830
|
+
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
55831
|
+
return newSchedule;
|
|
55832
|
+
}
|
|
55833
|
+
async importSchedule(schedule) {
|
|
55834
|
+
const knex = this.getKnex();
|
|
55835
|
+
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
55836
|
+
if (existing) return;
|
|
55837
|
+
await knex("schedules").insert(toInsertRow(schedule));
|
|
55838
|
+
}
|
|
55839
|
+
async get(id) {
|
|
55840
|
+
const knex = this.getKnex();
|
|
55841
|
+
const row = await knex("schedules").where("id", id).first();
|
|
55842
|
+
return row ? fromDbRow2(row) : void 0;
|
|
55843
|
+
}
|
|
55844
|
+
async update(id, patch) {
|
|
55845
|
+
const knex = this.getKnex();
|
|
55846
|
+
const existing = await knex("schedules").where("id", id).first();
|
|
55847
|
+
if (!existing) return void 0;
|
|
55848
|
+
const current = fromDbRow2(existing);
|
|
55849
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
55850
|
+
const row = toInsertRow(updated);
|
|
55851
|
+
delete row.id;
|
|
55852
|
+
await knex("schedules").where("id", id).update(row);
|
|
55853
|
+
return updated;
|
|
55854
|
+
}
|
|
55855
|
+
async delete(id) {
|
|
55856
|
+
const knex = this.getKnex();
|
|
55857
|
+
const deleted = await knex("schedules").where("id", id).del();
|
|
55858
|
+
if (deleted > 0) {
|
|
55859
|
+
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
55860
|
+
return true;
|
|
55861
|
+
}
|
|
55862
|
+
return false;
|
|
55863
|
+
}
|
|
55864
|
+
// --- Queries ---
|
|
55865
|
+
async getByCreator(creatorId) {
|
|
55866
|
+
const knex = this.getKnex();
|
|
55867
|
+
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
55868
|
+
return rows.map((r) => fromDbRow2(r));
|
|
55869
|
+
}
|
|
55870
|
+
async getActiveSchedules() {
|
|
55871
|
+
const knex = this.getKnex();
|
|
55872
|
+
const rows = await knex("schedules").where("status", "active");
|
|
55873
|
+
return rows.map((r) => fromDbRow2(r));
|
|
55874
|
+
}
|
|
55875
|
+
async getDueSchedules(now) {
|
|
55876
|
+
const ts = now ?? Date.now();
|
|
55877
|
+
const knex = this.getKnex();
|
|
55878
|
+
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
55879
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
55880
|
+
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
55881
|
+
this.where(function() {
|
|
55882
|
+
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
55883
|
+
}).orWhere(function() {
|
|
55884
|
+
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
55885
|
+
});
|
|
55886
|
+
});
|
|
55887
|
+
return rows.map((r) => fromDbRow2(r));
|
|
55888
|
+
}
|
|
55889
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
55890
|
+
const knex = this.getKnex();
|
|
55891
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
55892
|
+
const pattern = `%${escaped}%`;
|
|
55893
|
+
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
55894
|
+
return rows.map((r) => fromDbRow2(r));
|
|
55895
|
+
}
|
|
55896
|
+
async getAll() {
|
|
55897
|
+
const knex = this.getKnex();
|
|
55898
|
+
const rows = await knex("schedules");
|
|
55899
|
+
return rows.map((r) => fromDbRow2(r));
|
|
55900
|
+
}
|
|
55901
|
+
async getStats() {
|
|
55902
|
+
const knex = this.getKnex();
|
|
55903
|
+
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
55904
|
+
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
55905
|
+
const result = await knex("schedules").select(
|
|
55906
|
+
knex.raw("COUNT(*) as total"),
|
|
55907
|
+
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
55908
|
+
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
55909
|
+
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
55910
|
+
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
55911
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
55912
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
55913
|
+
).first();
|
|
55914
|
+
return {
|
|
55915
|
+
total: Number(result.total) || 0,
|
|
55916
|
+
active: Number(result.active) || 0,
|
|
55917
|
+
paused: Number(result.paused) || 0,
|
|
55918
|
+
completed: Number(result.completed) || 0,
|
|
55919
|
+
failed: Number(result.failed) || 0,
|
|
55920
|
+
recurring: Number(result.recurring) || 0,
|
|
55921
|
+
oneTime: Number(result.one_time) || 0
|
|
55922
|
+
};
|
|
55923
|
+
}
|
|
55924
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
55925
|
+
const knex = this.getKnex();
|
|
55926
|
+
if (limits.maxGlobal) {
|
|
55927
|
+
const result = await knex("schedules").count("* as cnt").first();
|
|
55928
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
55929
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
55930
|
+
}
|
|
55931
|
+
}
|
|
55932
|
+
if (limits.maxPerUser) {
|
|
55933
|
+
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
55934
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
55935
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
55936
|
+
}
|
|
55937
|
+
}
|
|
55938
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
55939
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
55940
|
+
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
55941
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
55942
|
+
throw new Error(
|
|
55943
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
55944
|
+
);
|
|
55945
|
+
}
|
|
55946
|
+
}
|
|
55947
|
+
}
|
|
55948
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
55949
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
55950
|
+
const knex = this.getKnex();
|
|
55951
|
+
const now = Date.now();
|
|
55952
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
55953
|
+
const token = (0, import_uuid2.v4)();
|
|
55954
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
55955
|
+
node_id: nodeId,
|
|
55956
|
+
lock_token: token,
|
|
55957
|
+
acquired_at: now,
|
|
55958
|
+
expires_at: expiresAt
|
|
55959
|
+
});
|
|
55960
|
+
if (updated > 0) return token;
|
|
55961
|
+
try {
|
|
55962
|
+
await knex("scheduler_locks").insert({
|
|
55963
|
+
lock_id: lockId,
|
|
55964
|
+
node_id: nodeId,
|
|
55965
|
+
lock_token: token,
|
|
55966
|
+
acquired_at: now,
|
|
55967
|
+
expires_at: expiresAt
|
|
55968
|
+
});
|
|
55969
|
+
return token;
|
|
55970
|
+
} catch {
|
|
55971
|
+
return null;
|
|
55972
|
+
}
|
|
55973
|
+
}
|
|
55974
|
+
async releaseLock(lockId, lockToken) {
|
|
55975
|
+
const knex = this.getKnex();
|
|
55976
|
+
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
55977
|
+
}
|
|
55978
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
55979
|
+
const knex = this.getKnex();
|
|
55980
|
+
const now = Date.now();
|
|
55981
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
55982
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
55983
|
+
return updated > 0;
|
|
55984
|
+
}
|
|
55985
|
+
async flush() {
|
|
55986
|
+
}
|
|
55987
|
+
// --- Message Trigger CRUD ---
|
|
55988
|
+
async createTrigger(trigger) {
|
|
55989
|
+
const knex = this.getKnex();
|
|
55990
|
+
const newTrigger = {
|
|
55991
|
+
...trigger,
|
|
55992
|
+
id: (0, import_uuid2.v4)(),
|
|
55993
|
+
createdAt: Date.now()
|
|
55994
|
+
};
|
|
55995
|
+
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
55996
|
+
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
55997
|
+
return newTrigger;
|
|
55998
|
+
}
|
|
55999
|
+
async getTrigger(id) {
|
|
56000
|
+
const knex = this.getKnex();
|
|
56001
|
+
const row = await knex("message_triggers").where("id", id).first();
|
|
56002
|
+
return row ? fromTriggerRow2(row) : void 0;
|
|
56003
|
+
}
|
|
56004
|
+
async updateTrigger(id, patch) {
|
|
56005
|
+
const knex = this.getKnex();
|
|
56006
|
+
const existing = await knex("message_triggers").where("id", id).first();
|
|
56007
|
+
if (!existing) return void 0;
|
|
56008
|
+
const current = fromTriggerRow2(existing);
|
|
56009
|
+
const updated = {
|
|
56010
|
+
...current,
|
|
56011
|
+
...patch,
|
|
56012
|
+
id: current.id,
|
|
56013
|
+
createdAt: current.createdAt
|
|
56014
|
+
};
|
|
56015
|
+
const row = toTriggerInsertRow(updated);
|
|
56016
|
+
delete row.id;
|
|
56017
|
+
await knex("message_triggers").where("id", id).update(row);
|
|
56018
|
+
return updated;
|
|
56019
|
+
}
|
|
56020
|
+
async deleteTrigger(id) {
|
|
56021
|
+
const knex = this.getKnex();
|
|
56022
|
+
const deleted = await knex("message_triggers").where("id", id).del();
|
|
56023
|
+
if (deleted > 0) {
|
|
56024
|
+
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
56025
|
+
return true;
|
|
56026
|
+
}
|
|
56027
|
+
return false;
|
|
56028
|
+
}
|
|
56029
|
+
async getTriggersByCreator(creatorId) {
|
|
56030
|
+
const knex = this.getKnex();
|
|
56031
|
+
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
56032
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
56033
|
+
}
|
|
56034
|
+
async getActiveTriggers() {
|
|
56035
|
+
const knex = this.getKnex();
|
|
56036
|
+
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
56037
|
+
return rows.map((r) => fromTriggerRow2(r));
|
|
56038
|
+
}
|
|
56039
|
+
};
|
|
56040
|
+
}
|
|
56041
|
+
});
|
|
56042
|
+
|
|
56043
|
+
// src/enterprise/loader.ts
|
|
56044
|
+
var loader_exports = {};
|
|
56045
|
+
__export(loader_exports, {
|
|
56046
|
+
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
56047
|
+
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
56048
|
+
});
|
|
56049
|
+
async function loadEnterprisePolicyEngine(config) {
|
|
56050
|
+
try {
|
|
56051
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
56052
|
+
const validator = new LicenseValidator2();
|
|
56053
|
+
const license = await validator.loadAndValidate();
|
|
56054
|
+
if (!license || !validator.hasFeature("policy")) {
|
|
56055
|
+
return new DefaultPolicyEngine();
|
|
56056
|
+
}
|
|
56057
|
+
if (validator.isInGracePeriod()) {
|
|
56058
|
+
console.warn(
|
|
56059
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
56060
|
+
);
|
|
56061
|
+
}
|
|
56062
|
+
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
56063
|
+
const engine = new OpaPolicyEngine2(config);
|
|
56064
|
+
await engine.initialize(config);
|
|
56065
|
+
return engine;
|
|
56066
|
+
} catch (err) {
|
|
56067
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
56068
|
+
try {
|
|
56069
|
+
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
56070
|
+
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
56071
|
+
} catch {
|
|
56072
|
+
}
|
|
56073
|
+
return new DefaultPolicyEngine();
|
|
56074
|
+
}
|
|
56075
|
+
}
|
|
56076
|
+
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
56077
|
+
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
56078
|
+
const validator = new LicenseValidator2();
|
|
56079
|
+
const license = await validator.loadAndValidate();
|
|
56080
|
+
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
56081
|
+
throw new Error(
|
|
56082
|
+
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
56083
|
+
);
|
|
56084
|
+
}
|
|
56085
|
+
if (validator.isInGracePeriod()) {
|
|
56086
|
+
console.warn(
|
|
56087
|
+
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
56088
|
+
);
|
|
56089
|
+
}
|
|
56090
|
+
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
56091
|
+
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
56092
|
+
}
|
|
56093
|
+
var init_loader = __esm({
|
|
56094
|
+
"src/enterprise/loader.ts"() {
|
|
56095
|
+
"use strict";
|
|
56096
|
+
init_default_engine();
|
|
56097
|
+
}
|
|
56098
|
+
});
|
|
56099
|
+
|
|
54700
56100
|
// src/event-bus/event-bus.ts
|
|
54701
56101
|
var event_bus_exports = {};
|
|
54702
56102
|
__export(event_bus_exports, {
|
|
@@ -55603,8 +57003,8 @@ ${content}
|
|
|
55603
57003
|
* Sleep utility
|
|
55604
57004
|
*/
|
|
55605
57005
|
sleep(ms) {
|
|
55606
|
-
return new Promise((
|
|
55607
|
-
const t = setTimeout(
|
|
57006
|
+
return new Promise((resolve19) => {
|
|
57007
|
+
const t = setTimeout(resolve19, ms);
|
|
55608
57008
|
if (typeof t.unref === "function") {
|
|
55609
57009
|
try {
|
|
55610
57010
|
t.unref();
|
|
@@ -55889,8 +57289,8 @@ ${end}`);
|
|
|
55889
57289
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
55890
57290
|
const existingLock = this.updateLocks.get(group);
|
|
55891
57291
|
let resolveLock;
|
|
55892
|
-
const ourLock = new Promise((
|
|
55893
|
-
resolveLock =
|
|
57292
|
+
const ourLock = new Promise((resolve19) => {
|
|
57293
|
+
resolveLock = resolve19;
|
|
55894
57294
|
});
|
|
55895
57295
|
this.updateLocks.set(group, ourLock);
|
|
55896
57296
|
try {
|
|
@@ -56203,7 +57603,7 @@ ${blocks}
|
|
|
56203
57603
|
* Sleep utility for enforcing delays
|
|
56204
57604
|
*/
|
|
56205
57605
|
sleep(ms) {
|
|
56206
|
-
return new Promise((
|
|
57606
|
+
return new Promise((resolve19) => setTimeout(resolve19, ms));
|
|
56207
57607
|
}
|
|
56208
57608
|
};
|
|
56209
57609
|
}
|
|
@@ -57474,15 +58874,15 @@ function serializeRunState(state) {
|
|
|
57474
58874
|
])
|
|
57475
58875
|
};
|
|
57476
58876
|
}
|
|
57477
|
-
var
|
|
58877
|
+
var path30, fs26, StateMachineExecutionEngine;
|
|
57478
58878
|
var init_state_machine_execution_engine = __esm({
|
|
57479
58879
|
"src/state-machine-execution-engine.ts"() {
|
|
57480
58880
|
"use strict";
|
|
57481
58881
|
init_runner();
|
|
57482
58882
|
init_logger();
|
|
57483
58883
|
init_sandbox_manager();
|
|
57484
|
-
|
|
57485
|
-
|
|
58884
|
+
path30 = __toESM(require("path"));
|
|
58885
|
+
fs26 = __toESM(require("fs"));
|
|
57486
58886
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
57487
58887
|
workingDirectory;
|
|
57488
58888
|
executionContext;
|
|
@@ -57714,8 +59114,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57714
59114
|
logger.debug(
|
|
57715
59115
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
57716
59116
|
);
|
|
57717
|
-
const { loadEnterprisePolicyEngine } = await
|
|
57718
|
-
context2.policyEngine = await
|
|
59117
|
+
const { loadEnterprisePolicyEngine: loadEnterprisePolicyEngine2 } = await Promise.resolve().then(() => (init_loader(), loader_exports));
|
|
59118
|
+
context2.policyEngine = await loadEnterprisePolicyEngine2(configWithTagFilter.policy);
|
|
57719
59119
|
logger.debug(
|
|
57720
59120
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
57721
59121
|
);
|
|
@@ -57867,9 +59267,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57867
59267
|
}
|
|
57868
59268
|
const checkId = String(ev?.checkId || "unknown");
|
|
57869
59269
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
57870
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
57871
|
-
|
|
57872
|
-
const filePath =
|
|
59270
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
|
|
59271
|
+
fs26.mkdirSync(baseDir, { recursive: true });
|
|
59272
|
+
const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
57873
59273
|
await this.saveSnapshotToFile(filePath);
|
|
57874
59274
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
57875
59275
|
try {
|
|
@@ -58010,7 +59410,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
58010
59410
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
58011
59411
|
*/
|
|
58012
59412
|
async saveSnapshotToFile(filePath) {
|
|
58013
|
-
const
|
|
59413
|
+
const fs27 = await import("fs/promises");
|
|
58014
59414
|
const ctx = this._lastContext;
|
|
58015
59415
|
const runner = this._lastRunner;
|
|
58016
59416
|
if (!ctx || !runner) {
|
|
@@ -58030,14 +59430,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
58030
59430
|
journal: entries,
|
|
58031
59431
|
requestedChecks: ctx.requestedChecks || []
|
|
58032
59432
|
};
|
|
58033
|
-
await
|
|
59433
|
+
await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
58034
59434
|
}
|
|
58035
59435
|
/**
|
|
58036
59436
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
58037
59437
|
*/
|
|
58038
59438
|
async loadSnapshotFromFile(filePath) {
|
|
58039
|
-
const
|
|
58040
|
-
const raw = await
|
|
59439
|
+
const fs27 = await import("fs/promises");
|
|
59440
|
+
const raw = await fs27.readFile(filePath, "utf8");
|
|
58041
59441
|
return JSON.parse(raw);
|
|
58042
59442
|
}
|
|
58043
59443
|
/**
|