@probelabs/visor 0.1.165-ee → 0.1.165
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +23 -1840
- package/dist/output/traces/run-2026-03-06T06-08-10-897Z.ndjson +138 -0
- package/dist/output/traces/run-2026-03-06T06-08-55-016Z.ndjson +2235 -0
- package/dist/sdk/check-provider-registry-4SHN3GSH.mjs +29 -0
- package/dist/sdk/{check-provider-registry-5ZE2KMA2.mjs → check-provider-registry-6P2KJ423.mjs} +2 -2
- package/dist/sdk/chunk-EO4IJNM7.mjs +739 -0
- package/dist/sdk/chunk-EO4IJNM7.mjs.map +1 -0
- package/dist/sdk/{chunk-ODEDLFSQ.mjs → chunk-G5JBWW3O.mjs} +9 -9
- package/dist/sdk/{chunk-ODEDLFSQ.mjs.map → chunk-G5JBWW3O.mjs.map} +1 -1
- package/dist/sdk/chunk-GMHSXC5K.mjs +1502 -0
- package/dist/sdk/chunk-GMHSXC5K.mjs.map +1 -0
- package/dist/sdk/chunk-LDE33FGE.mjs +443 -0
- package/dist/sdk/chunk-LDE33FGE.mjs.map +1 -0
- package/dist/sdk/chunk-MYROK4LB.mjs +43917 -0
- package/dist/sdk/chunk-MYROK4LB.mjs.map +1 -0
- package/dist/sdk/failure-condition-evaluator-M6SIUQF4.mjs +17 -0
- package/dist/sdk/github-frontend-MHXL2Q2V.mjs +1368 -0
- package/dist/sdk/github-frontend-MHXL2Q2V.mjs.map +1 -0
- package/dist/sdk/{host-QFABFVSJ.mjs → host-JROON6IT.mjs} +2 -2
- package/dist/sdk/routing-QCDX43XD.mjs +25 -0
- package/dist/sdk/schedule-tool-C5QN5OQU.mjs +35 -0
- package/dist/sdk/schedule-tool-C5QN5OQU.mjs.map +1 -0
- package/dist/sdk/{schedule-tool-SBBVNRBS.mjs → schedule-tool-XCGJI2VB.mjs} +2 -2
- package/dist/sdk/schedule-tool-XCGJI2VB.mjs.map +1 -0
- package/dist/sdk/{schedule-tool-handler-DPZEXA25.mjs → schedule-tool-handler-OKZ53WMC.mjs} +2 -2
- package/dist/sdk/schedule-tool-handler-OKZ53WMC.mjs.map +1 -0
- package/dist/sdk/schedule-tool-handler-ZUMPNAVY.mjs +39 -0
- package/dist/sdk/schedule-tool-handler-ZUMPNAVY.mjs.map +1 -0
- package/dist/sdk/sdk.js +259 -1633
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -4
- package/dist/sdk/trace-helpers-KFQJ7IAG.mjs +25 -0
- package/dist/sdk/trace-helpers-KFQJ7IAG.mjs.map +1 -0
- package/dist/sdk/workflow-check-provider-RBYA6ZGU.mjs +29 -0
- package/dist/sdk/workflow-check-provider-RBYA6ZGU.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-HGHSY5QF.mjs → workflow-check-provider-WLUAJPAS.mjs} +2 -2
- package/dist/sdk/workflow-check-provider-WLUAJPAS.mjs.map +1 -0
- package/dist/traces/run-2026-03-06T06-08-10-897Z.ndjson +138 -0
- package/dist/traces/run-2026-03-06T06-08-55-016Z.ndjson +2235 -0
- package/package.json +1 -1
- package/dist/sdk/knex-store-CRORFJE6.mjs +0 -527
- package/dist/sdk/knex-store-CRORFJE6.mjs.map +0 -1
- package/dist/sdk/loader-NJCF7DUS.mjs +0 -89
- package/dist/sdk/loader-NJCF7DUS.mjs.map +0 -1
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +0 -655
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +0 -1
- package/dist/sdk/validator-XTZJZZJH.mjs +0 -134
- package/dist/sdk/validator-XTZJZZJH.mjs.map +0 -1
- /package/dist/sdk/{check-provider-registry-5ZE2KMA2.mjs.map → check-provider-registry-4SHN3GSH.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-SBBVNRBS.mjs.map → check-provider-registry-6P2KJ423.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-DPZEXA25.mjs.map → failure-condition-evaluator-M6SIUQF4.mjs.map} +0 -0
- /package/dist/sdk/{host-QFABFVSJ.mjs.map → host-JROON6IT.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-HGHSY5QF.mjs.map → routing-QCDX43XD.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -646,7 +646,7 @@ var require_package = __commonJS({
|
|
|
646
646
|
"package.json"(exports2, module2) {
|
|
647
647
|
module2.exports = {
|
|
648
648
|
name: "@probelabs/visor",
|
|
649
|
-
version: "0.1.
|
|
649
|
+
version: "0.1.165",
|
|
650
650
|
main: "dist/index.js",
|
|
651
651
|
bin: {
|
|
652
652
|
visor: "./dist/index.js"
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve15, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve15(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path27 = require("path");
|
|
949
|
+
const fs23 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path27.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs23 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3193
3193
|
*/
|
|
3194
3194
|
evaluateExpression(condition, context2) {
|
|
3195
3195
|
try {
|
|
3196
|
-
const
|
|
3196
|
+
const normalize4 = (expr) => {
|
|
3197
3197
|
const trimmed = expr.trim();
|
|
3198
3198
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3199
3199
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3351
3351
|
try {
|
|
3352
3352
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3353
3353
|
} catch {
|
|
3354
|
-
const normalizedExpr =
|
|
3354
|
+
const normalizedExpr = normalize4(condition);
|
|
3355
3355
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3356
3356
|
}
|
|
3357
3357
|
const result = exec2(scope).run();
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path27) return obj;
|
|
3739
|
+
const parts = path27.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path27) => {
|
|
3859
|
+
if (!obj || !path27) return void 0;
|
|
3860
|
+
const parts = path27.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs23 = await import("fs/promises");
|
|
6413
|
+
const path27 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path27.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs23.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path27.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path27.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs23.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs23 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs23.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6958,7 +6958,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6958
6958
|
if (chromiumPath) {
|
|
6959
6959
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6960
6960
|
}
|
|
6961
|
-
const result = await new Promise((
|
|
6961
|
+
const result = await new Promise((resolve15) => {
|
|
6962
6962
|
const proc = (0, import_child_process.spawn)(
|
|
6963
6963
|
"npx",
|
|
6964
6964
|
[
|
|
@@ -6988,13 +6988,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6988
6988
|
});
|
|
6989
6989
|
proc.on("close", (code) => {
|
|
6990
6990
|
if (code === 0) {
|
|
6991
|
-
|
|
6991
|
+
resolve15({ success: true });
|
|
6992
6992
|
} else {
|
|
6993
|
-
|
|
6993
|
+
resolve15({ success: false, error: stderr || `Exit code ${code}` });
|
|
6994
6994
|
}
|
|
6995
6995
|
});
|
|
6996
6996
|
proc.on("error", (err) => {
|
|
6997
|
-
|
|
6997
|
+
resolve15({ success: false, error: err.message });
|
|
6998
6998
|
});
|
|
6999
6999
|
});
|
|
7000
7000
|
if (!result.success) {
|
|
@@ -8156,8 +8156,8 @@ ${schemaString}`);
|
|
|
8156
8156
|
}
|
|
8157
8157
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8158
8158
|
try {
|
|
8159
|
-
const
|
|
8160
|
-
const
|
|
8159
|
+
const fs23 = require("fs");
|
|
8160
|
+
const path27 = require("path");
|
|
8161
8161
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8162
8162
|
const provider = this.config.provider || "auto";
|
|
8163
8163
|
const model = this.config.model || "default";
|
|
@@ -8271,20 +8271,20 @@ ${"=".repeat(60)}
|
|
|
8271
8271
|
`;
|
|
8272
8272
|
readableVersion += `${"=".repeat(60)}
|
|
8273
8273
|
`;
|
|
8274
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8275
|
-
if (!
|
|
8276
|
-
|
|
8274
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8275
|
+
if (!fs23.existsSync(debugArtifactsDir)) {
|
|
8276
|
+
fs23.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8277
8277
|
}
|
|
8278
|
-
const debugFile =
|
|
8278
|
+
const debugFile = path27.join(
|
|
8279
8279
|
debugArtifactsDir,
|
|
8280
8280
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8281
8281
|
);
|
|
8282
|
-
|
|
8283
|
-
const readableFile =
|
|
8282
|
+
fs23.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8283
|
+
const readableFile = path27.join(
|
|
8284
8284
|
debugArtifactsDir,
|
|
8285
8285
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8286
8286
|
);
|
|
8287
|
-
|
|
8287
|
+
fs23.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8288
8288
|
log(`
|
|
8289
8289
|
\u{1F4BE} Full debug info saved to:`);
|
|
8290
8290
|
log(` JSON: ${debugFile}`);
|
|
@@ -8317,8 +8317,8 @@ ${"=".repeat(60)}
|
|
|
8317
8317
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8318
8318
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8319
8319
|
try {
|
|
8320
|
-
const
|
|
8321
|
-
const
|
|
8320
|
+
const fs23 = require("fs");
|
|
8321
|
+
const path27 = require("path");
|
|
8322
8322
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8323
8323
|
const agentAny2 = agent;
|
|
8324
8324
|
let fullHistory = [];
|
|
@@ -8329,8 +8329,8 @@ ${"=".repeat(60)}
|
|
|
8329
8329
|
} else if (agentAny2._messages) {
|
|
8330
8330
|
fullHistory = agentAny2._messages;
|
|
8331
8331
|
}
|
|
8332
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8333
|
-
const sessionBase =
|
|
8332
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8333
|
+
const sessionBase = path27.join(
|
|
8334
8334
|
debugArtifactsDir,
|
|
8335
8335
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8336
8336
|
);
|
|
@@ -8342,7 +8342,7 @@ ${"=".repeat(60)}
|
|
|
8342
8342
|
schema: effectiveSchema,
|
|
8343
8343
|
totalMessages: fullHistory.length
|
|
8344
8344
|
};
|
|
8345
|
-
|
|
8345
|
+
fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8346
8346
|
let readable = `=============================================================
|
|
8347
8347
|
`;
|
|
8348
8348
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8369,7 +8369,7 @@ ${"=".repeat(60)}
|
|
|
8369
8369
|
`;
|
|
8370
8370
|
readable += content + "\n";
|
|
8371
8371
|
});
|
|
8372
|
-
|
|
8372
|
+
fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8373
8373
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8374
8374
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8375
8375
|
} catch (error) {
|
|
@@ -8378,11 +8378,11 @@ ${"=".repeat(60)}
|
|
|
8378
8378
|
}
|
|
8379
8379
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8380
8380
|
try {
|
|
8381
|
-
const
|
|
8382
|
-
const
|
|
8381
|
+
const fs23 = require("fs");
|
|
8382
|
+
const path27 = require("path");
|
|
8383
8383
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8384
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8385
|
-
const responseFile =
|
|
8384
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8385
|
+
const responseFile = path27.join(
|
|
8386
8386
|
debugArtifactsDir,
|
|
8387
8387
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8388
8388
|
);
|
|
@@ -8415,7 +8415,7 @@ ${"=".repeat(60)}
|
|
|
8415
8415
|
`;
|
|
8416
8416
|
responseContent += `${"=".repeat(60)}
|
|
8417
8417
|
`;
|
|
8418
|
-
|
|
8418
|
+
fs23.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8419
8419
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8420
8420
|
} catch (error) {
|
|
8421
8421
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8431,9 +8431,9 @@ ${"=".repeat(60)}
|
|
|
8431
8431
|
await agentAny._telemetryConfig.shutdown();
|
|
8432
8432
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8433
8433
|
if (process.env.GITHUB_ACTIONS) {
|
|
8434
|
-
const
|
|
8435
|
-
if (
|
|
8436
|
-
const stats =
|
|
8434
|
+
const fs23 = require("fs");
|
|
8435
|
+
if (fs23.existsSync(agentAny._traceFilePath)) {
|
|
8436
|
+
const stats = fs23.statSync(agentAny._traceFilePath);
|
|
8437
8437
|
console.log(
|
|
8438
8438
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8439
8439
|
);
|
|
@@ -8646,9 +8646,9 @@ ${schemaString}`);
|
|
|
8646
8646
|
const model = this.config.model || "default";
|
|
8647
8647
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8648
8648
|
try {
|
|
8649
|
-
const
|
|
8650
|
-
const
|
|
8651
|
-
const
|
|
8649
|
+
const fs23 = require("fs");
|
|
8650
|
+
const path27 = require("path");
|
|
8651
|
+
const os2 = require("os");
|
|
8652
8652
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8653
8653
|
const debugData = {
|
|
8654
8654
|
timestamp,
|
|
@@ -8720,19 +8720,19 @@ ${"=".repeat(60)}
|
|
|
8720
8720
|
`;
|
|
8721
8721
|
readableVersion += `${"=".repeat(60)}
|
|
8722
8722
|
`;
|
|
8723
|
-
const tempDir =
|
|
8724
|
-
const promptFile =
|
|
8725
|
-
|
|
8723
|
+
const tempDir = os2.tmpdir();
|
|
8724
|
+
const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8725
|
+
fs23.writeFileSync(promptFile, prompt, "utf-8");
|
|
8726
8726
|
log(`
|
|
8727
8727
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8728
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8728
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8729
8729
|
try {
|
|
8730
|
-
const base =
|
|
8730
|
+
const base = path27.join(
|
|
8731
8731
|
debugArtifactsDir,
|
|
8732
8732
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8733
8733
|
);
|
|
8734
|
-
|
|
8735
|
-
|
|
8734
|
+
fs23.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8735
|
+
fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8736
8736
|
log(`
|
|
8737
8737
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8738
8738
|
} catch {
|
|
@@ -8777,8 +8777,8 @@ $ ${cliCommand}
|
|
|
8777
8777
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8778
8778
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8779
8779
|
try {
|
|
8780
|
-
const
|
|
8781
|
-
const
|
|
8780
|
+
const fs23 = require("fs");
|
|
8781
|
+
const path27 = require("path");
|
|
8782
8782
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8783
8783
|
const agentAny = agent;
|
|
8784
8784
|
let fullHistory = [];
|
|
@@ -8789,8 +8789,8 @@ $ ${cliCommand}
|
|
|
8789
8789
|
} else if (agentAny._messages) {
|
|
8790
8790
|
fullHistory = agentAny._messages;
|
|
8791
8791
|
}
|
|
8792
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8793
|
-
const sessionBase =
|
|
8792
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8793
|
+
const sessionBase = path27.join(
|
|
8794
8794
|
debugArtifactsDir,
|
|
8795
8795
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8796
8796
|
);
|
|
@@ -8802,7 +8802,7 @@ $ ${cliCommand}
|
|
|
8802
8802
|
schema: effectiveSchema,
|
|
8803
8803
|
totalMessages: fullHistory.length
|
|
8804
8804
|
};
|
|
8805
|
-
|
|
8805
|
+
fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8806
8806
|
let readable = `=============================================================
|
|
8807
8807
|
`;
|
|
8808
8808
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8829,7 +8829,7 @@ ${"=".repeat(60)}
|
|
|
8829
8829
|
`;
|
|
8830
8830
|
readable += content + "\n";
|
|
8831
8831
|
});
|
|
8832
|
-
|
|
8832
|
+
fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8833
8833
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8834
8834
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8835
8835
|
} catch (error) {
|
|
@@ -8838,11 +8838,11 @@ ${"=".repeat(60)}
|
|
|
8838
8838
|
}
|
|
8839
8839
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8840
8840
|
try {
|
|
8841
|
-
const
|
|
8842
|
-
const
|
|
8841
|
+
const fs23 = require("fs");
|
|
8842
|
+
const path27 = require("path");
|
|
8843
8843
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8844
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8845
|
-
const responseFile =
|
|
8844
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8845
|
+
const responseFile = path27.join(
|
|
8846
8846
|
debugArtifactsDir,
|
|
8847
8847
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8848
8848
|
);
|
|
@@ -8875,7 +8875,7 @@ ${"=".repeat(60)}
|
|
|
8875
8875
|
`;
|
|
8876
8876
|
responseContent += `${"=".repeat(60)}
|
|
8877
8877
|
`;
|
|
8878
|
-
|
|
8878
|
+
fs23.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8879
8879
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8880
8880
|
} catch (error) {
|
|
8881
8881
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8893,9 +8893,9 @@ ${"=".repeat(60)}
|
|
|
8893
8893
|
await telemetry.shutdown();
|
|
8894
8894
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8895
8895
|
if (process.env.GITHUB_ACTIONS) {
|
|
8896
|
-
const
|
|
8897
|
-
if (
|
|
8898
|
-
const stats =
|
|
8896
|
+
const fs23 = require("fs");
|
|
8897
|
+
if (fs23.existsSync(traceFilePath)) {
|
|
8898
|
+
const stats = fs23.statSync(traceFilePath);
|
|
8899
8899
|
console.log(
|
|
8900
8900
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8901
8901
|
);
|
|
@@ -8933,8 +8933,8 @@ ${"=".repeat(60)}
|
|
|
8933
8933
|
* Load schema content from schema files or inline definitions
|
|
8934
8934
|
*/
|
|
8935
8935
|
async loadSchemaContent(schema) {
|
|
8936
|
-
const
|
|
8937
|
-
const
|
|
8936
|
+
const fs23 = require("fs").promises;
|
|
8937
|
+
const path27 = require("path");
|
|
8938
8938
|
if (typeof schema === "object" && schema !== null) {
|
|
8939
8939
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8940
8940
|
return JSON.stringify(schema);
|
|
@@ -8947,14 +8947,14 @@ ${"=".repeat(60)}
|
|
|
8947
8947
|
}
|
|
8948
8948
|
} catch {
|
|
8949
8949
|
}
|
|
8950
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8950
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
|
|
8951
8951
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8952
8952
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8953
8953
|
}
|
|
8954
8954
|
try {
|
|
8955
|
-
const schemaPath =
|
|
8955
|
+
const schemaPath = path27.resolve(process.cwd(), schema);
|
|
8956
8956
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8957
|
-
const schemaContent = await
|
|
8957
|
+
const schemaContent = await fs23.readFile(schemaPath, "utf-8");
|
|
8958
8958
|
return schemaContent.trim();
|
|
8959
8959
|
} catch (error) {
|
|
8960
8960
|
throw new Error(
|
|
@@ -8968,22 +8968,22 @@ ${"=".repeat(60)}
|
|
|
8968
8968
|
}
|
|
8969
8969
|
const candidatePaths = [
|
|
8970
8970
|
// GitHub Action bundle location
|
|
8971
|
-
|
|
8971
|
+
path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8972
8972
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8973
|
-
|
|
8973
|
+
path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8974
8974
|
// Local dev (repo root)
|
|
8975
|
-
|
|
8975
|
+
path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8976
8976
|
];
|
|
8977
8977
|
for (const schemaPath of candidatePaths) {
|
|
8978
8978
|
try {
|
|
8979
|
-
const schemaContent = await
|
|
8979
|
+
const schemaContent = await fs23.readFile(schemaPath, "utf-8");
|
|
8980
8980
|
return schemaContent.trim();
|
|
8981
8981
|
} catch {
|
|
8982
8982
|
}
|
|
8983
8983
|
}
|
|
8984
|
-
const distPath =
|
|
8985
|
-
const distAltPath =
|
|
8986
|
-
const cwdPath =
|
|
8984
|
+
const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8985
|
+
const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8986
|
+
const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8987
8987
|
throw new Error(
|
|
8988
8988
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8989
8989
|
);
|
|
@@ -9228,7 +9228,7 @@ ${"=".repeat(60)}
|
|
|
9228
9228
|
* Generate mock response for testing
|
|
9229
9229
|
*/
|
|
9230
9230
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9231
|
-
await new Promise((
|
|
9231
|
+
await new Promise((resolve15) => setTimeout(resolve15, 500));
|
|
9232
9232
|
const name = (_checkName || "").toLowerCase();
|
|
9233
9233
|
if (name.includes("extract-facts")) {
|
|
9234
9234
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9589,7 +9589,7 @@ var init_command_executor = __esm({
|
|
|
9589
9589
|
* Execute command with stdin input
|
|
9590
9590
|
*/
|
|
9591
9591
|
executeWithStdin(command, options) {
|
|
9592
|
-
return new Promise((
|
|
9592
|
+
return new Promise((resolve15, reject) => {
|
|
9593
9593
|
const childProcess = (0, import_child_process2.exec)(
|
|
9594
9594
|
command,
|
|
9595
9595
|
{
|
|
@@ -9601,7 +9601,7 @@ var init_command_executor = __esm({
|
|
|
9601
9601
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9602
9602
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9603
9603
|
} else {
|
|
9604
|
-
|
|
9604
|
+
resolve15({
|
|
9605
9605
|
stdout: stdout || "",
|
|
9606
9606
|
stderr: stderr || "",
|
|
9607
9607
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -17704,17 +17704,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17704
17704
|
* so it can be executed by the state machine as a nested workflow.
|
|
17705
17705
|
*/
|
|
17706
17706
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17707
|
-
const
|
|
17708
|
-
const
|
|
17707
|
+
const path27 = require("path");
|
|
17708
|
+
const fs23 = require("fs");
|
|
17709
17709
|
const yaml5 = require("js-yaml");
|
|
17710
|
-
const resolved =
|
|
17711
|
-
if (!
|
|
17710
|
+
const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
|
|
17711
|
+
if (!fs23.existsSync(resolved)) {
|
|
17712
17712
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17713
17713
|
}
|
|
17714
|
-
const rawContent =
|
|
17714
|
+
const rawContent = fs23.readFileSync(resolved, "utf8");
|
|
17715
17715
|
const rawData = yaml5.load(rawContent);
|
|
17716
17716
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17717
|
-
const configDir =
|
|
17717
|
+
const configDir = path27.dirname(resolved);
|
|
17718
17718
|
for (const source of rawData.imports) {
|
|
17719
17719
|
const results = await this.registry.import(source, {
|
|
17720
17720
|
basePath: configDir,
|
|
@@ -17744,8 +17744,8 @@ ${errors}`);
|
|
|
17744
17744
|
if (!steps || Object.keys(steps).length === 0) {
|
|
17745
17745
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
17746
17746
|
}
|
|
17747
|
-
const id =
|
|
17748
|
-
const name = loaded.name || `Workflow from ${
|
|
17747
|
+
const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
17748
|
+
const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
|
|
17749
17749
|
const workflowDef = {
|
|
17750
17750
|
id,
|
|
17751
17751
|
name,
|
|
@@ -18551,8 +18551,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
18551
18551
|
case "mssql": {
|
|
18552
18552
|
try {
|
|
18553
18553
|
const loaderPath = "../../enterprise/loader";
|
|
18554
|
-
const { loadEnterpriseStoreBackend
|
|
18555
|
-
return await
|
|
18554
|
+
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
18555
|
+
return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
|
|
18556
18556
|
} catch (err) {
|
|
18557
18557
|
const msg = err instanceof Error ? err.message : String(err);
|
|
18558
18558
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -21123,7 +21123,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21123
21123
|
* Returns the actual bound port number
|
|
21124
21124
|
*/
|
|
21125
21125
|
async start() {
|
|
21126
|
-
return new Promise((
|
|
21126
|
+
return new Promise((resolve15, reject) => {
|
|
21127
21127
|
try {
|
|
21128
21128
|
this.server = import_http.default.createServer((req, res) => {
|
|
21129
21129
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -21157,7 +21157,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21157
21157
|
);
|
|
21158
21158
|
}
|
|
21159
21159
|
this.startKeepalive();
|
|
21160
|
-
|
|
21160
|
+
resolve15(this.port);
|
|
21161
21161
|
});
|
|
21162
21162
|
} catch (error) {
|
|
21163
21163
|
reject(error);
|
|
@@ -21220,7 +21220,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21220
21220
|
logger.debug(
|
|
21221
21221
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
21222
21222
|
);
|
|
21223
|
-
await new Promise((
|
|
21223
|
+
await new Promise((resolve15) => setTimeout(resolve15, waitMs));
|
|
21224
21224
|
}
|
|
21225
21225
|
}
|
|
21226
21226
|
if (this.activeToolCalls > 0) {
|
|
@@ -21229,7 +21229,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21229
21229
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
21230
21230
|
);
|
|
21231
21231
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
21232
|
-
await new Promise((
|
|
21232
|
+
await new Promise((resolve15) => setTimeout(resolve15, 250));
|
|
21233
21233
|
}
|
|
21234
21234
|
if (this.activeToolCalls > 0) {
|
|
21235
21235
|
logger.warn(
|
|
@@ -21254,21 +21254,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21254
21254
|
}
|
|
21255
21255
|
this.connections.clear();
|
|
21256
21256
|
if (this.server) {
|
|
21257
|
-
await new Promise((
|
|
21257
|
+
await new Promise((resolve15, reject) => {
|
|
21258
21258
|
const timeout = setTimeout(() => {
|
|
21259
21259
|
if (this.debug) {
|
|
21260
21260
|
logger.debug(
|
|
21261
21261
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
21262
21262
|
);
|
|
21263
21263
|
}
|
|
21264
|
-
this.server?.close(() =>
|
|
21264
|
+
this.server?.close(() => resolve15());
|
|
21265
21265
|
}, 5e3);
|
|
21266
21266
|
this.server.close((error) => {
|
|
21267
21267
|
clearTimeout(timeout);
|
|
21268
21268
|
if (error) {
|
|
21269
21269
|
reject(error);
|
|
21270
21270
|
} else {
|
|
21271
|
-
|
|
21271
|
+
resolve15();
|
|
21272
21272
|
}
|
|
21273
21273
|
});
|
|
21274
21274
|
});
|
|
@@ -21703,7 +21703,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
21703
21703
|
logger.warn(
|
|
21704
21704
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
21705
21705
|
);
|
|
21706
|
-
await new Promise((
|
|
21706
|
+
await new Promise((resolve15) => setTimeout(resolve15, delay));
|
|
21707
21707
|
attempt++;
|
|
21708
21708
|
}
|
|
21709
21709
|
}
|
|
@@ -22016,9 +22016,9 @@ var init_ai_check_provider = __esm({
|
|
|
22016
22016
|
} else {
|
|
22017
22017
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
22018
22018
|
}
|
|
22019
|
-
const
|
|
22019
|
+
const fs23 = require("fs").promises;
|
|
22020
22020
|
try {
|
|
22021
|
-
const stat2 = await
|
|
22021
|
+
const stat2 = await fs23.stat(resolvedPath);
|
|
22022
22022
|
return stat2.isFile();
|
|
22023
22023
|
} catch {
|
|
22024
22024
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -27980,14 +27980,14 @@ var require_util = __commonJS({
|
|
|
27980
27980
|
}
|
|
27981
27981
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
27982
27982
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
27983
|
-
let
|
|
27983
|
+
let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
27984
27984
|
if (origin.endsWith("/")) {
|
|
27985
27985
|
origin = origin.substring(0, origin.length - 1);
|
|
27986
27986
|
}
|
|
27987
|
-
if (
|
|
27988
|
-
|
|
27987
|
+
if (path27 && !path27.startsWith("/")) {
|
|
27988
|
+
path27 = `/${path27}`;
|
|
27989
27989
|
}
|
|
27990
|
-
url = new URL(origin +
|
|
27990
|
+
url = new URL(origin + path27);
|
|
27991
27991
|
}
|
|
27992
27992
|
return url;
|
|
27993
27993
|
}
|
|
@@ -29601,20 +29601,20 @@ var require_parseParams = __commonJS({
|
|
|
29601
29601
|
var require_basename = __commonJS({
|
|
29602
29602
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
29603
29603
|
"use strict";
|
|
29604
|
-
module2.exports = function basename4(
|
|
29605
|
-
if (typeof
|
|
29604
|
+
module2.exports = function basename4(path27) {
|
|
29605
|
+
if (typeof path27 !== "string") {
|
|
29606
29606
|
return "";
|
|
29607
29607
|
}
|
|
29608
|
-
for (var i =
|
|
29609
|
-
switch (
|
|
29608
|
+
for (var i = path27.length - 1; i >= 0; --i) {
|
|
29609
|
+
switch (path27.charCodeAt(i)) {
|
|
29610
29610
|
case 47:
|
|
29611
29611
|
// '/'
|
|
29612
29612
|
case 92:
|
|
29613
|
-
|
|
29614
|
-
return
|
|
29613
|
+
path27 = path27.slice(i + 1);
|
|
29614
|
+
return path27 === ".." || path27 === "." ? "" : path27;
|
|
29615
29615
|
}
|
|
29616
29616
|
}
|
|
29617
|
-
return
|
|
29617
|
+
return path27 === ".." || path27 === "." ? "" : path27;
|
|
29618
29618
|
};
|
|
29619
29619
|
}
|
|
29620
29620
|
});
|
|
@@ -30618,11 +30618,11 @@ var require_util2 = __commonJS({
|
|
|
30618
30618
|
var assert = require("assert");
|
|
30619
30619
|
var { isUint8Array } = require("util/types");
|
|
30620
30620
|
var supportedHashes = [];
|
|
30621
|
-
var
|
|
30621
|
+
var crypto2;
|
|
30622
30622
|
try {
|
|
30623
|
-
|
|
30623
|
+
crypto2 = require("crypto");
|
|
30624
30624
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
30625
|
-
supportedHashes =
|
|
30625
|
+
supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
30626
30626
|
} catch {
|
|
30627
30627
|
}
|
|
30628
30628
|
function responseURL(response) {
|
|
@@ -30899,7 +30899,7 @@ var require_util2 = __commonJS({
|
|
|
30899
30899
|
}
|
|
30900
30900
|
}
|
|
30901
30901
|
function bytesMatch(bytes, metadataList) {
|
|
30902
|
-
if (
|
|
30902
|
+
if (crypto2 === void 0) {
|
|
30903
30903
|
return true;
|
|
30904
30904
|
}
|
|
30905
30905
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -30914,7 +30914,7 @@ var require_util2 = __commonJS({
|
|
|
30914
30914
|
for (const item of metadata) {
|
|
30915
30915
|
const algorithm = item.algo;
|
|
30916
30916
|
const expectedValue = item.hash;
|
|
30917
|
-
let actualValue =
|
|
30917
|
+
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
|
|
30918
30918
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
30919
30919
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
30920
30920
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -31007,8 +31007,8 @@ var require_util2 = __commonJS({
|
|
|
31007
31007
|
function createDeferredPromise() {
|
|
31008
31008
|
let res;
|
|
31009
31009
|
let rej;
|
|
31010
|
-
const promise = new Promise((
|
|
31011
|
-
res =
|
|
31010
|
+
const promise = new Promise((resolve15, reject) => {
|
|
31011
|
+
res = resolve15;
|
|
31012
31012
|
rej = reject;
|
|
31013
31013
|
});
|
|
31014
31014
|
return { promise, resolve: res, reject: rej };
|
|
@@ -32261,8 +32261,8 @@ var require_body = __commonJS({
|
|
|
32261
32261
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
32262
32262
|
var random;
|
|
32263
32263
|
try {
|
|
32264
|
-
const
|
|
32265
|
-
random = (max) =>
|
|
32264
|
+
const crypto2 = require("crypto");
|
|
32265
|
+
random = (max) => crypto2.randomInt(0, max);
|
|
32266
32266
|
} catch {
|
|
32267
32267
|
random = (max) => Math.floor(Math.random(max));
|
|
32268
32268
|
}
|
|
@@ -32513,8 +32513,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
32513
32513
|
});
|
|
32514
32514
|
}
|
|
32515
32515
|
});
|
|
32516
|
-
const busboyResolve = new Promise((
|
|
32517
|
-
busboy.on("finish",
|
|
32516
|
+
const busboyResolve = new Promise((resolve15, reject) => {
|
|
32517
|
+
busboy.on("finish", resolve15);
|
|
32518
32518
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
32519
32519
|
});
|
|
32520
32520
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -32645,7 +32645,7 @@ var require_request = __commonJS({
|
|
|
32645
32645
|
}
|
|
32646
32646
|
var Request = class _Request {
|
|
32647
32647
|
constructor(origin, {
|
|
32648
|
-
path:
|
|
32648
|
+
path: path27,
|
|
32649
32649
|
method,
|
|
32650
32650
|
body,
|
|
32651
32651
|
headers,
|
|
@@ -32659,11 +32659,11 @@ var require_request = __commonJS({
|
|
|
32659
32659
|
throwOnError,
|
|
32660
32660
|
expectContinue
|
|
32661
32661
|
}, handler) {
|
|
32662
|
-
if (typeof
|
|
32662
|
+
if (typeof path27 !== "string") {
|
|
32663
32663
|
throw new InvalidArgumentError("path must be a string");
|
|
32664
|
-
} else if (
|
|
32664
|
+
} else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
|
|
32665
32665
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
32666
|
-
} else if (invalidPathRegex.exec(
|
|
32666
|
+
} else if (invalidPathRegex.exec(path27) !== null) {
|
|
32667
32667
|
throw new InvalidArgumentError("invalid request path");
|
|
32668
32668
|
}
|
|
32669
32669
|
if (typeof method !== "string") {
|
|
@@ -32726,7 +32726,7 @@ var require_request = __commonJS({
|
|
|
32726
32726
|
this.completed = false;
|
|
32727
32727
|
this.aborted = false;
|
|
32728
32728
|
this.upgrade = upgrade || null;
|
|
32729
|
-
this.path = query ? util.buildURL(
|
|
32729
|
+
this.path = query ? util.buildURL(path27, query) : path27;
|
|
32730
32730
|
this.origin = origin;
|
|
32731
32731
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
32732
32732
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -33048,9 +33048,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
33048
33048
|
}
|
|
33049
33049
|
close(callback) {
|
|
33050
33050
|
if (callback === void 0) {
|
|
33051
|
-
return new Promise((
|
|
33051
|
+
return new Promise((resolve15, reject) => {
|
|
33052
33052
|
this.close((err, data) => {
|
|
33053
|
-
return err ? reject(err) :
|
|
33053
|
+
return err ? reject(err) : resolve15(data);
|
|
33054
33054
|
});
|
|
33055
33055
|
});
|
|
33056
33056
|
}
|
|
@@ -33088,12 +33088,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
33088
33088
|
err = null;
|
|
33089
33089
|
}
|
|
33090
33090
|
if (callback === void 0) {
|
|
33091
|
-
return new Promise((
|
|
33091
|
+
return new Promise((resolve15, reject) => {
|
|
33092
33092
|
this.destroy(err, (err2, data) => {
|
|
33093
33093
|
return err2 ? (
|
|
33094
33094
|
/* istanbul ignore next: should never error */
|
|
33095
33095
|
reject(err2)
|
|
33096
|
-
) :
|
|
33096
|
+
) : resolve15(data);
|
|
33097
33097
|
});
|
|
33098
33098
|
});
|
|
33099
33099
|
}
|
|
@@ -33734,9 +33734,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
33734
33734
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
33735
33735
|
}
|
|
33736
33736
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
33737
|
-
const
|
|
33737
|
+
const path27 = search ? `${pathname}${search}` : pathname;
|
|
33738
33738
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
33739
|
-
this.opts.path =
|
|
33739
|
+
this.opts.path = path27;
|
|
33740
33740
|
this.opts.origin = origin;
|
|
33741
33741
|
this.opts.maxRedirections = 0;
|
|
33742
33742
|
this.opts.query = null;
|
|
@@ -34155,16 +34155,16 @@ var require_client = __commonJS({
|
|
|
34155
34155
|
return this[kNeedDrain] < 2;
|
|
34156
34156
|
}
|
|
34157
34157
|
async [kClose]() {
|
|
34158
|
-
return new Promise((
|
|
34158
|
+
return new Promise((resolve15) => {
|
|
34159
34159
|
if (!this[kSize]) {
|
|
34160
|
-
|
|
34160
|
+
resolve15(null);
|
|
34161
34161
|
} else {
|
|
34162
|
-
this[kClosedResolve] =
|
|
34162
|
+
this[kClosedResolve] = resolve15;
|
|
34163
34163
|
}
|
|
34164
34164
|
});
|
|
34165
34165
|
}
|
|
34166
34166
|
async [kDestroy](err) {
|
|
34167
|
-
return new Promise((
|
|
34167
|
+
return new Promise((resolve15) => {
|
|
34168
34168
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
34169
34169
|
for (let i = 0; i < requests.length; i++) {
|
|
34170
34170
|
const request = requests[i];
|
|
@@ -34175,7 +34175,7 @@ var require_client = __commonJS({
|
|
|
34175
34175
|
this[kClosedResolve]();
|
|
34176
34176
|
this[kClosedResolve] = null;
|
|
34177
34177
|
}
|
|
34178
|
-
|
|
34178
|
+
resolve15();
|
|
34179
34179
|
};
|
|
34180
34180
|
if (this[kHTTP2Session] != null) {
|
|
34181
34181
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -34755,7 +34755,7 @@ var require_client = __commonJS({
|
|
|
34755
34755
|
});
|
|
34756
34756
|
}
|
|
34757
34757
|
try {
|
|
34758
|
-
const socket = await new Promise((
|
|
34758
|
+
const socket = await new Promise((resolve15, reject) => {
|
|
34759
34759
|
client[kConnector]({
|
|
34760
34760
|
host,
|
|
34761
34761
|
hostname,
|
|
@@ -34767,7 +34767,7 @@ var require_client = __commonJS({
|
|
|
34767
34767
|
if (err) {
|
|
34768
34768
|
reject(err);
|
|
34769
34769
|
} else {
|
|
34770
|
-
|
|
34770
|
+
resolve15(socket2);
|
|
34771
34771
|
}
|
|
34772
34772
|
});
|
|
34773
34773
|
});
|
|
@@ -34978,7 +34978,7 @@ var require_client = __commonJS({
|
|
|
34978
34978
|
writeH2(client, client[kHTTP2Session], request);
|
|
34979
34979
|
return;
|
|
34980
34980
|
}
|
|
34981
|
-
const { body, method, path:
|
|
34981
|
+
const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
|
|
34982
34982
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
34983
34983
|
if (body && typeof body.read === "function") {
|
|
34984
34984
|
body.read(0);
|
|
@@ -35028,7 +35028,7 @@ var require_client = __commonJS({
|
|
|
35028
35028
|
if (blocking) {
|
|
35029
35029
|
socket[kBlocking] = true;
|
|
35030
35030
|
}
|
|
35031
|
-
let header = `${method} ${
|
|
35031
|
+
let header = `${method} ${path27} HTTP/1.1\r
|
|
35032
35032
|
`;
|
|
35033
35033
|
if (typeof host === "string") {
|
|
35034
35034
|
header += `host: ${host}\r
|
|
@@ -35091,7 +35091,7 @@ upgrade: ${upgrade}\r
|
|
|
35091
35091
|
return true;
|
|
35092
35092
|
}
|
|
35093
35093
|
function writeH2(client, session, request) {
|
|
35094
|
-
const { body, method, path:
|
|
35094
|
+
const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
35095
35095
|
let headers;
|
|
35096
35096
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
35097
35097
|
else headers = reqHeaders;
|
|
@@ -35134,7 +35134,7 @@ upgrade: ${upgrade}\r
|
|
|
35134
35134
|
});
|
|
35135
35135
|
return true;
|
|
35136
35136
|
}
|
|
35137
|
-
headers[HTTP2_HEADER_PATH] =
|
|
35137
|
+
headers[HTTP2_HEADER_PATH] = path27;
|
|
35138
35138
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
35139
35139
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
35140
35140
|
if (body && typeof body.read === "function") {
|
|
@@ -35391,12 +35391,12 @@ upgrade: ${upgrade}\r
|
|
|
35391
35391
|
cb();
|
|
35392
35392
|
}
|
|
35393
35393
|
}
|
|
35394
|
-
const waitForDrain = () => new Promise((
|
|
35394
|
+
const waitForDrain = () => new Promise((resolve15, reject) => {
|
|
35395
35395
|
assert(callback === null);
|
|
35396
35396
|
if (socket[kError]) {
|
|
35397
35397
|
reject(socket[kError]);
|
|
35398
35398
|
} else {
|
|
35399
|
-
callback =
|
|
35399
|
+
callback = resolve15;
|
|
35400
35400
|
}
|
|
35401
35401
|
});
|
|
35402
35402
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -35742,8 +35742,8 @@ var require_pool_base = __commonJS({
|
|
|
35742
35742
|
if (this[kQueue].isEmpty()) {
|
|
35743
35743
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
35744
35744
|
} else {
|
|
35745
|
-
return new Promise((
|
|
35746
|
-
this[kClosedResolve] =
|
|
35745
|
+
return new Promise((resolve15) => {
|
|
35746
|
+
this[kClosedResolve] = resolve15;
|
|
35747
35747
|
});
|
|
35748
35748
|
}
|
|
35749
35749
|
}
|
|
@@ -36321,7 +36321,7 @@ var require_readable = __commonJS({
|
|
|
36321
36321
|
if (this.closed) {
|
|
36322
36322
|
return Promise.resolve(null);
|
|
36323
36323
|
}
|
|
36324
|
-
return new Promise((
|
|
36324
|
+
return new Promise((resolve15, reject) => {
|
|
36325
36325
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
36326
36326
|
this.destroy();
|
|
36327
36327
|
}) : noop;
|
|
@@ -36330,7 +36330,7 @@ var require_readable = __commonJS({
|
|
|
36330
36330
|
if (signal && signal.aborted) {
|
|
36331
36331
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
36332
36332
|
} else {
|
|
36333
|
-
|
|
36333
|
+
resolve15(null);
|
|
36334
36334
|
}
|
|
36335
36335
|
}).on("error", noop).on("data", function(chunk) {
|
|
36336
36336
|
limit -= chunk.length;
|
|
@@ -36352,11 +36352,11 @@ var require_readable = __commonJS({
|
|
|
36352
36352
|
throw new TypeError("unusable");
|
|
36353
36353
|
}
|
|
36354
36354
|
assert(!stream[kConsume]);
|
|
36355
|
-
return new Promise((
|
|
36355
|
+
return new Promise((resolve15, reject) => {
|
|
36356
36356
|
stream[kConsume] = {
|
|
36357
36357
|
type,
|
|
36358
36358
|
stream,
|
|
36359
|
-
resolve:
|
|
36359
|
+
resolve: resolve15,
|
|
36360
36360
|
reject,
|
|
36361
36361
|
length: 0,
|
|
36362
36362
|
body: []
|
|
@@ -36391,12 +36391,12 @@ var require_readable = __commonJS({
|
|
|
36391
36391
|
}
|
|
36392
36392
|
}
|
|
36393
36393
|
function consumeEnd(consume2) {
|
|
36394
|
-
const { type, body, resolve:
|
|
36394
|
+
const { type, body, resolve: resolve15, stream, length } = consume2;
|
|
36395
36395
|
try {
|
|
36396
36396
|
if (type === "text") {
|
|
36397
|
-
|
|
36397
|
+
resolve15(toUSVString(Buffer.concat(body)));
|
|
36398
36398
|
} else if (type === "json") {
|
|
36399
|
-
|
|
36399
|
+
resolve15(JSON.parse(Buffer.concat(body)));
|
|
36400
36400
|
} else if (type === "arrayBuffer") {
|
|
36401
36401
|
const dst = new Uint8Array(length);
|
|
36402
36402
|
let pos = 0;
|
|
@@ -36404,12 +36404,12 @@ var require_readable = __commonJS({
|
|
|
36404
36404
|
dst.set(buf, pos);
|
|
36405
36405
|
pos += buf.byteLength;
|
|
36406
36406
|
}
|
|
36407
|
-
|
|
36407
|
+
resolve15(dst.buffer);
|
|
36408
36408
|
} else if (type === "blob") {
|
|
36409
36409
|
if (!Blob2) {
|
|
36410
36410
|
Blob2 = require("buffer").Blob;
|
|
36411
36411
|
}
|
|
36412
|
-
|
|
36412
|
+
resolve15(new Blob2(body, { type: stream[kContentType] }));
|
|
36413
36413
|
}
|
|
36414
36414
|
consumeFinish(consume2);
|
|
36415
36415
|
} catch (err) {
|
|
@@ -36666,9 +36666,9 @@ var require_api_request = __commonJS({
|
|
|
36666
36666
|
};
|
|
36667
36667
|
function request(opts, callback) {
|
|
36668
36668
|
if (callback === void 0) {
|
|
36669
|
-
return new Promise((
|
|
36669
|
+
return new Promise((resolve15, reject) => {
|
|
36670
36670
|
request.call(this, opts, (err, data) => {
|
|
36671
|
-
return err ? reject(err) :
|
|
36671
|
+
return err ? reject(err) : resolve15(data);
|
|
36672
36672
|
});
|
|
36673
36673
|
});
|
|
36674
36674
|
}
|
|
@@ -36841,9 +36841,9 @@ var require_api_stream = __commonJS({
|
|
|
36841
36841
|
};
|
|
36842
36842
|
function stream(opts, factory, callback) {
|
|
36843
36843
|
if (callback === void 0) {
|
|
36844
|
-
return new Promise((
|
|
36844
|
+
return new Promise((resolve15, reject) => {
|
|
36845
36845
|
stream.call(this, opts, factory, (err, data) => {
|
|
36846
|
-
return err ? reject(err) :
|
|
36846
|
+
return err ? reject(err) : resolve15(data);
|
|
36847
36847
|
});
|
|
36848
36848
|
});
|
|
36849
36849
|
}
|
|
@@ -37124,9 +37124,9 @@ var require_api_upgrade = __commonJS({
|
|
|
37124
37124
|
};
|
|
37125
37125
|
function upgrade(opts, callback) {
|
|
37126
37126
|
if (callback === void 0) {
|
|
37127
|
-
return new Promise((
|
|
37127
|
+
return new Promise((resolve15, reject) => {
|
|
37128
37128
|
upgrade.call(this, opts, (err, data) => {
|
|
37129
|
-
return err ? reject(err) :
|
|
37129
|
+
return err ? reject(err) : resolve15(data);
|
|
37130
37130
|
});
|
|
37131
37131
|
});
|
|
37132
37132
|
}
|
|
@@ -37215,9 +37215,9 @@ var require_api_connect = __commonJS({
|
|
|
37215
37215
|
};
|
|
37216
37216
|
function connect(opts, callback) {
|
|
37217
37217
|
if (callback === void 0) {
|
|
37218
|
-
return new Promise((
|
|
37218
|
+
return new Promise((resolve15, reject) => {
|
|
37219
37219
|
connect.call(this, opts, (err, data) => {
|
|
37220
|
-
return err ? reject(err) :
|
|
37220
|
+
return err ? reject(err) : resolve15(data);
|
|
37221
37221
|
});
|
|
37222
37222
|
});
|
|
37223
37223
|
}
|
|
@@ -37377,20 +37377,20 @@ var require_mock_utils = __commonJS({
|
|
|
37377
37377
|
}
|
|
37378
37378
|
return true;
|
|
37379
37379
|
}
|
|
37380
|
-
function safeUrl(
|
|
37381
|
-
if (typeof
|
|
37382
|
-
return
|
|
37380
|
+
function safeUrl(path27) {
|
|
37381
|
+
if (typeof path27 !== "string") {
|
|
37382
|
+
return path27;
|
|
37383
37383
|
}
|
|
37384
|
-
const pathSegments =
|
|
37384
|
+
const pathSegments = path27.split("?");
|
|
37385
37385
|
if (pathSegments.length !== 2) {
|
|
37386
|
-
return
|
|
37386
|
+
return path27;
|
|
37387
37387
|
}
|
|
37388
37388
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
37389
37389
|
qp.sort();
|
|
37390
37390
|
return [...pathSegments, qp.toString()].join("?");
|
|
37391
37391
|
}
|
|
37392
|
-
function matchKey(mockDispatch2, { path:
|
|
37393
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
37392
|
+
function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
|
|
37393
|
+
const pathMatch = matchValue(mockDispatch2.path, path27);
|
|
37394
37394
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
37395
37395
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
37396
37396
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -37408,7 +37408,7 @@ var require_mock_utils = __commonJS({
|
|
|
37408
37408
|
function getMockDispatch(mockDispatches, key) {
|
|
37409
37409
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
37410
37410
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
37411
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
37411
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
|
|
37412
37412
|
if (matchedMockDispatches.length === 0) {
|
|
37413
37413
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
37414
37414
|
}
|
|
@@ -37445,9 +37445,9 @@ var require_mock_utils = __commonJS({
|
|
|
37445
37445
|
}
|
|
37446
37446
|
}
|
|
37447
37447
|
function buildKey(opts) {
|
|
37448
|
-
const { path:
|
|
37448
|
+
const { path: path27, method, body, headers, query } = opts;
|
|
37449
37449
|
return {
|
|
37450
|
-
path:
|
|
37450
|
+
path: path27,
|
|
37451
37451
|
method,
|
|
37452
37452
|
body,
|
|
37453
37453
|
headers,
|
|
@@ -37896,10 +37896,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
37896
37896
|
}
|
|
37897
37897
|
format(pendingInterceptors) {
|
|
37898
37898
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
37899
|
-
({ method, path:
|
|
37899
|
+
({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
37900
37900
|
Method: method,
|
|
37901
37901
|
Origin: origin,
|
|
37902
|
-
Path:
|
|
37902
|
+
Path: path27,
|
|
37903
37903
|
"Status code": statusCode,
|
|
37904
37904
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
37905
37905
|
Invocations: timesInvoked,
|
|
@@ -40840,7 +40840,7 @@ var require_fetch = __commonJS({
|
|
|
40840
40840
|
async function dispatch({ body }) {
|
|
40841
40841
|
const url = requestCurrentURL(request);
|
|
40842
40842
|
const agent = fetchParams.controller.dispatcher;
|
|
40843
|
-
return new Promise((
|
|
40843
|
+
return new Promise((resolve15, reject) => agent.dispatch(
|
|
40844
40844
|
{
|
|
40845
40845
|
path: url.pathname + url.search,
|
|
40846
40846
|
origin: url.origin,
|
|
@@ -40916,7 +40916,7 @@ var require_fetch = __commonJS({
|
|
|
40916
40916
|
}
|
|
40917
40917
|
}
|
|
40918
40918
|
}
|
|
40919
|
-
|
|
40919
|
+
resolve15({
|
|
40920
40920
|
status,
|
|
40921
40921
|
statusText,
|
|
40922
40922
|
headersList: headers[kHeadersList],
|
|
@@ -40959,7 +40959,7 @@ var require_fetch = __commonJS({
|
|
|
40959
40959
|
const val = headersList[n + 1].toString("latin1");
|
|
40960
40960
|
headers[kHeadersList].append(key, val);
|
|
40961
40961
|
}
|
|
40962
|
-
|
|
40962
|
+
resolve15({
|
|
40963
40963
|
status,
|
|
40964
40964
|
statusText: STATUS_CODES[status],
|
|
40965
40965
|
headersList: headers[kHeadersList],
|
|
@@ -42520,8 +42520,8 @@ var require_util6 = __commonJS({
|
|
|
42520
42520
|
}
|
|
42521
42521
|
}
|
|
42522
42522
|
}
|
|
42523
|
-
function validateCookiePath(
|
|
42524
|
-
for (const char of
|
|
42523
|
+
function validateCookiePath(path27) {
|
|
42524
|
+
for (const char of path27) {
|
|
42525
42525
|
const code = char.charCodeAt(0);
|
|
42526
42526
|
if (code < 33 || char === ";") {
|
|
42527
42527
|
throw new Error("Invalid cookie path");
|
|
@@ -43318,9 +43318,9 @@ var require_connection = __commonJS({
|
|
|
43318
43318
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
43319
43319
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
43320
43320
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
43321
|
-
var
|
|
43321
|
+
var crypto2;
|
|
43322
43322
|
try {
|
|
43323
|
-
|
|
43323
|
+
crypto2 = require("crypto");
|
|
43324
43324
|
} catch {
|
|
43325
43325
|
}
|
|
43326
43326
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -43339,7 +43339,7 @@ var require_connection = __commonJS({
|
|
|
43339
43339
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
43340
43340
|
request.headersList = headersList;
|
|
43341
43341
|
}
|
|
43342
|
-
const keyValue =
|
|
43342
|
+
const keyValue = crypto2.randomBytes(16).toString("base64");
|
|
43343
43343
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
43344
43344
|
request.headersList.append("sec-websocket-version", "13");
|
|
43345
43345
|
for (const protocol of protocols) {
|
|
@@ -43368,7 +43368,7 @@ var require_connection = __commonJS({
|
|
|
43368
43368
|
return;
|
|
43369
43369
|
}
|
|
43370
43370
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
43371
|
-
const digest =
|
|
43371
|
+
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
43372
43372
|
if (secWSAccept !== digest) {
|
|
43373
43373
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
43374
43374
|
return;
|
|
@@ -43448,9 +43448,9 @@ var require_frame = __commonJS({
|
|
|
43448
43448
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
43449
43449
|
"use strict";
|
|
43450
43450
|
var { maxUnsigned16Bit } = require_constants5();
|
|
43451
|
-
var
|
|
43451
|
+
var crypto2;
|
|
43452
43452
|
try {
|
|
43453
|
-
|
|
43453
|
+
crypto2 = require("crypto");
|
|
43454
43454
|
} catch {
|
|
43455
43455
|
}
|
|
43456
43456
|
var WebsocketFrameSend = class {
|
|
@@ -43459,7 +43459,7 @@ var require_frame = __commonJS({
|
|
|
43459
43459
|
*/
|
|
43460
43460
|
constructor(data) {
|
|
43461
43461
|
this.frameData = data;
|
|
43462
|
-
this.maskKey =
|
|
43462
|
+
this.maskKey = crypto2.randomBytes(4);
|
|
43463
43463
|
}
|
|
43464
43464
|
createFrame(opcode) {
|
|
43465
43465
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -44201,11 +44201,11 @@ var require_undici = __commonJS({
|
|
|
44201
44201
|
if (typeof opts.path !== "string") {
|
|
44202
44202
|
throw new InvalidArgumentError("invalid opts.path");
|
|
44203
44203
|
}
|
|
44204
|
-
let
|
|
44204
|
+
let path27 = opts.path;
|
|
44205
44205
|
if (!opts.path.startsWith("/")) {
|
|
44206
|
-
|
|
44206
|
+
path27 = `/${path27}`;
|
|
44207
44207
|
}
|
|
44208
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
44208
|
+
url = new URL(util.parseOrigin(url).origin + path27);
|
|
44209
44209
|
} else {
|
|
44210
44210
|
if (!opts) {
|
|
44211
44211
|
opts = typeof url === "object" ? url : {};
|
|
@@ -44754,7 +44754,7 @@ var init_mcp_check_provider = __esm({
|
|
|
44754
44754
|
logger.warn(
|
|
44755
44755
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
44756
44756
|
);
|
|
44757
|
-
await new Promise((
|
|
44757
|
+
await new Promise((resolve15) => setTimeout(resolve15, delay));
|
|
44758
44758
|
attempt += 1;
|
|
44759
44759
|
} finally {
|
|
44760
44760
|
try {
|
|
@@ -45036,7 +45036,7 @@ async function acquirePromptLock() {
|
|
|
45036
45036
|
activePrompt = true;
|
|
45037
45037
|
return;
|
|
45038
45038
|
}
|
|
45039
|
-
await new Promise((
|
|
45039
|
+
await new Promise((resolve15) => waiters.push(resolve15));
|
|
45040
45040
|
activePrompt = true;
|
|
45041
45041
|
}
|
|
45042
45042
|
function releasePromptLock() {
|
|
@@ -45046,7 +45046,7 @@ function releasePromptLock() {
|
|
|
45046
45046
|
}
|
|
45047
45047
|
async function interactivePrompt(options) {
|
|
45048
45048
|
await acquirePromptLock();
|
|
45049
|
-
return new Promise((
|
|
45049
|
+
return new Promise((resolve15, reject) => {
|
|
45050
45050
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
45051
45051
|
try {
|
|
45052
45052
|
if (dbg) {
|
|
@@ -45133,12 +45133,12 @@ async function interactivePrompt(options) {
|
|
|
45133
45133
|
};
|
|
45134
45134
|
const finish = (value) => {
|
|
45135
45135
|
cleanup();
|
|
45136
|
-
|
|
45136
|
+
resolve15(value);
|
|
45137
45137
|
};
|
|
45138
45138
|
if (options.timeout && options.timeout > 0) {
|
|
45139
45139
|
timeoutId = setTimeout(() => {
|
|
45140
45140
|
cleanup();
|
|
45141
|
-
if (defaultValue !== void 0) return
|
|
45141
|
+
if (defaultValue !== void 0) return resolve15(defaultValue);
|
|
45142
45142
|
return reject(new Error("Input timeout"));
|
|
45143
45143
|
}, options.timeout);
|
|
45144
45144
|
}
|
|
@@ -45270,7 +45270,7 @@ async function interactivePrompt(options) {
|
|
|
45270
45270
|
});
|
|
45271
45271
|
}
|
|
45272
45272
|
async function simplePrompt(prompt) {
|
|
45273
|
-
return new Promise((
|
|
45273
|
+
return new Promise((resolve15) => {
|
|
45274
45274
|
const rl = readline.createInterface({
|
|
45275
45275
|
input: process.stdin,
|
|
45276
45276
|
output: process.stdout
|
|
@@ -45286,7 +45286,7 @@ async function simplePrompt(prompt) {
|
|
|
45286
45286
|
rl.question(`${prompt}
|
|
45287
45287
|
> `, (answer) => {
|
|
45288
45288
|
rl.close();
|
|
45289
|
-
|
|
45289
|
+
resolve15(answer.trim());
|
|
45290
45290
|
});
|
|
45291
45291
|
});
|
|
45292
45292
|
}
|
|
@@ -45454,7 +45454,7 @@ function isStdinAvailable() {
|
|
|
45454
45454
|
return !process.stdin.isTTY;
|
|
45455
45455
|
}
|
|
45456
45456
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
45457
|
-
return new Promise((
|
|
45457
|
+
return new Promise((resolve15, reject) => {
|
|
45458
45458
|
let data = "";
|
|
45459
45459
|
let timeoutId;
|
|
45460
45460
|
if (timeout) {
|
|
@@ -45481,7 +45481,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
45481
45481
|
};
|
|
45482
45482
|
const onEnd = () => {
|
|
45483
45483
|
cleanup();
|
|
45484
|
-
|
|
45484
|
+
resolve15(data.trim());
|
|
45485
45485
|
};
|
|
45486
45486
|
const onError = (err) => {
|
|
45487
45487
|
cleanup();
|
|
@@ -49607,23 +49607,23 @@ __export(renderer_schema_exports, {
|
|
|
49607
49607
|
});
|
|
49608
49608
|
async function loadRendererSchema(name) {
|
|
49609
49609
|
try {
|
|
49610
|
-
const
|
|
49611
|
-
const
|
|
49610
|
+
const fs23 = await import("fs/promises");
|
|
49611
|
+
const path27 = await import("path");
|
|
49612
49612
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
49613
49613
|
if (!sanitized) return void 0;
|
|
49614
49614
|
const candidates = [
|
|
49615
49615
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
49616
|
-
|
|
49616
|
+
path27.join(__dirname, "output", sanitized, "schema.json"),
|
|
49617
49617
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
49618
|
-
|
|
49618
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
49619
49619
|
// When running from a checkout with output/ folder copied to CWD
|
|
49620
|
-
|
|
49620
|
+
path27.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
49621
49621
|
// Fallback: cwd/dist/output/
|
|
49622
|
-
|
|
49622
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
49623
49623
|
];
|
|
49624
49624
|
for (const p of candidates) {
|
|
49625
49625
|
try {
|
|
49626
|
-
const raw = await
|
|
49626
|
+
const raw = await fs23.readFile(p, "utf-8");
|
|
49627
49627
|
return JSON.parse(raw);
|
|
49628
49628
|
} catch {
|
|
49629
49629
|
}
|
|
@@ -52042,8 +52042,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
52042
52042
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
52043
52043
|
try {
|
|
52044
52044
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
52045
|
-
const
|
|
52046
|
-
const
|
|
52045
|
+
const fs23 = await import("fs/promises");
|
|
52046
|
+
const path27 = await import("path");
|
|
52047
52047
|
const schemaRaw = checkConfig.schema || "plain";
|
|
52048
52048
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
52049
52049
|
let templateContent;
|
|
@@ -52052,27 +52052,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
52052
52052
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
52053
52053
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
52054
52054
|
const file = String(checkConfig.template.file);
|
|
52055
|
-
const resolved =
|
|
52056
|
-
templateContent = await
|
|
52055
|
+
const resolved = path27.resolve(process.cwd(), file);
|
|
52056
|
+
templateContent = await fs23.readFile(resolved, "utf-8");
|
|
52057
52057
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
52058
52058
|
} else if (schema && schema !== "plain") {
|
|
52059
52059
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
52060
52060
|
if (sanitized) {
|
|
52061
52061
|
const candidatePaths = [
|
|
52062
|
-
|
|
52062
|
+
path27.join(__dirname, "output", sanitized, "template.liquid"),
|
|
52063
52063
|
// bundled: dist/output/
|
|
52064
|
-
|
|
52064
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
52065
52065
|
// source (from state-machine/states)
|
|
52066
|
-
|
|
52066
|
+
path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
52067
52067
|
// source (alternate)
|
|
52068
|
-
|
|
52068
|
+
path27.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
52069
52069
|
// fallback: cwd/output/
|
|
52070
|
-
|
|
52070
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
52071
52071
|
// fallback: cwd/dist/output/
|
|
52072
52072
|
];
|
|
52073
52073
|
for (const p of candidatePaths) {
|
|
52074
52074
|
try {
|
|
52075
|
-
templateContent = await
|
|
52075
|
+
templateContent = await fs23.readFile(p, "utf-8");
|
|
52076
52076
|
if (templateContent) {
|
|
52077
52077
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
52078
52078
|
break;
|
|
@@ -54212,8 +54212,8 @@ var init_workspace_manager = __esm({
|
|
|
54212
54212
|
);
|
|
54213
54213
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
54214
54214
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
54215
|
-
for (const
|
|
54216
|
-
|
|
54215
|
+
for (const resolve15 of this.cleanupResolvers) {
|
|
54216
|
+
resolve15();
|
|
54217
54217
|
}
|
|
54218
54218
|
this.cleanupResolvers = [];
|
|
54219
54219
|
}
|
|
@@ -54370,19 +54370,19 @@ var init_workspace_manager = __esm({
|
|
|
54370
54370
|
);
|
|
54371
54371
|
this.cleanupRequested = true;
|
|
54372
54372
|
await Promise.race([
|
|
54373
|
-
new Promise((
|
|
54373
|
+
new Promise((resolve15) => {
|
|
54374
54374
|
if (this.activeOperations === 0) {
|
|
54375
|
-
|
|
54375
|
+
resolve15();
|
|
54376
54376
|
} else {
|
|
54377
|
-
this.cleanupResolvers.push(
|
|
54377
|
+
this.cleanupResolvers.push(resolve15);
|
|
54378
54378
|
}
|
|
54379
54379
|
}),
|
|
54380
|
-
new Promise((
|
|
54380
|
+
new Promise((resolve15) => {
|
|
54381
54381
|
setTimeout(() => {
|
|
54382
54382
|
logger.warn(
|
|
54383
54383
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
54384
54384
|
);
|
|
54385
|
-
|
|
54385
|
+
resolve15();
|
|
54386
54386
|
}, timeout);
|
|
54387
54387
|
})
|
|
54388
54388
|
]);
|
|
@@ -54860,1380 +54860,6 @@ var init_build_engine_context = __esm({
|
|
|
54860
54860
|
}
|
|
54861
54861
|
});
|
|
54862
54862
|
|
|
54863
|
-
// src/policy/default-engine.ts
|
|
54864
|
-
var DefaultPolicyEngine;
|
|
54865
|
-
var init_default_engine = __esm({
|
|
54866
|
-
"src/policy/default-engine.ts"() {
|
|
54867
|
-
"use strict";
|
|
54868
|
-
DefaultPolicyEngine = class {
|
|
54869
|
-
async initialize(_config) {
|
|
54870
|
-
}
|
|
54871
|
-
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
54872
|
-
return { allowed: true };
|
|
54873
|
-
}
|
|
54874
|
-
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
54875
|
-
return { allowed: true };
|
|
54876
|
-
}
|
|
54877
|
-
async evaluateCapabilities(_checkId, _capabilities) {
|
|
54878
|
-
return { allowed: true };
|
|
54879
|
-
}
|
|
54880
|
-
async shutdown() {
|
|
54881
|
-
}
|
|
54882
|
-
};
|
|
54883
|
-
}
|
|
54884
|
-
});
|
|
54885
|
-
|
|
54886
|
-
// src/enterprise/license/validator.ts
|
|
54887
|
-
var validator_exports = {};
|
|
54888
|
-
__export(validator_exports, {
|
|
54889
|
-
LicenseValidator: () => LicenseValidator
|
|
54890
|
-
});
|
|
54891
|
-
var crypto2, fs21, path25, LicenseValidator;
|
|
54892
|
-
var init_validator = __esm({
|
|
54893
|
-
"src/enterprise/license/validator.ts"() {
|
|
54894
|
-
"use strict";
|
|
54895
|
-
crypto2 = __toESM(require("crypto"));
|
|
54896
|
-
fs21 = __toESM(require("fs"));
|
|
54897
|
-
path25 = __toESM(require("path"));
|
|
54898
|
-
LicenseValidator = class _LicenseValidator {
|
|
54899
|
-
/** Ed25519 public key for license verification (PEM format). */
|
|
54900
|
-
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
54901
|
-
cache = null;
|
|
54902
|
-
static CACHE_TTL = 5 * 60 * 1e3;
|
|
54903
|
-
// 5 minutes
|
|
54904
|
-
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
54905
|
-
// 72 hours after expiry
|
|
54906
|
-
/**
|
|
54907
|
-
* Load and validate license from environment or file.
|
|
54908
|
-
*
|
|
54909
|
-
* Resolution order:
|
|
54910
|
-
* 1. VISOR_LICENSE env var (JWT string)
|
|
54911
|
-
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
54912
|
-
* 3. .visor-license in project root (cwd)
|
|
54913
|
-
* 4. .visor-license in ~/.config/visor/
|
|
54914
|
-
*/
|
|
54915
|
-
async loadAndValidate() {
|
|
54916
|
-
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
54917
|
-
return this.cache.payload;
|
|
54918
|
-
}
|
|
54919
|
-
const token = this.resolveToken();
|
|
54920
|
-
if (!token) return null;
|
|
54921
|
-
const payload = this.verifyAndDecode(token);
|
|
54922
|
-
if (!payload) return null;
|
|
54923
|
-
this.cache = { payload, validatedAt: Date.now() };
|
|
54924
|
-
return payload;
|
|
54925
|
-
}
|
|
54926
|
-
/** Check if a specific feature is licensed */
|
|
54927
|
-
hasFeature(feature) {
|
|
54928
|
-
if (!this.cache) return false;
|
|
54929
|
-
return this.cache.payload.features.includes(feature);
|
|
54930
|
-
}
|
|
54931
|
-
/** Check if license is valid (with grace period) */
|
|
54932
|
-
isValid() {
|
|
54933
|
-
if (!this.cache) return false;
|
|
54934
|
-
const now = Date.now();
|
|
54935
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
54936
|
-
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
54937
|
-
}
|
|
54938
|
-
/** Check if the license is within its grace period (expired but still valid) */
|
|
54939
|
-
isInGracePeriod() {
|
|
54940
|
-
if (!this.cache) return false;
|
|
54941
|
-
const now = Date.now();
|
|
54942
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
54943
|
-
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
54944
|
-
}
|
|
54945
|
-
resolveToken() {
|
|
54946
|
-
if (process.env.VISOR_LICENSE) {
|
|
54947
|
-
return process.env.VISOR_LICENSE.trim();
|
|
54948
|
-
}
|
|
54949
|
-
if (process.env.VISOR_LICENSE_FILE) {
|
|
54950
|
-
const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
|
|
54951
|
-
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
54952
|
-
const allowedPrefixes = [path25.normalize(process.cwd())];
|
|
54953
|
-
if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
|
|
54954
|
-
let realPath;
|
|
54955
|
-
try {
|
|
54956
|
-
realPath = fs21.realpathSync(resolved);
|
|
54957
|
-
} catch {
|
|
54958
|
-
return null;
|
|
54959
|
-
}
|
|
54960
|
-
const isSafe = allowedPrefixes.some(
|
|
54961
|
-
(prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
|
|
54962
|
-
);
|
|
54963
|
-
if (!isSafe) return null;
|
|
54964
|
-
return this.readFile(realPath);
|
|
54965
|
-
}
|
|
54966
|
-
const cwdPath = path25.join(process.cwd(), ".visor-license");
|
|
54967
|
-
const cwdToken = this.readFile(cwdPath);
|
|
54968
|
-
if (cwdToken) return cwdToken;
|
|
54969
|
-
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
54970
|
-
if (home) {
|
|
54971
|
-
const configPath = path25.join(home, ".config", "visor", ".visor-license");
|
|
54972
|
-
const configToken = this.readFile(configPath);
|
|
54973
|
-
if (configToken) return configToken;
|
|
54974
|
-
}
|
|
54975
|
-
return null;
|
|
54976
|
-
}
|
|
54977
|
-
readFile(filePath) {
|
|
54978
|
-
try {
|
|
54979
|
-
return fs21.readFileSync(filePath, "utf-8").trim();
|
|
54980
|
-
} catch {
|
|
54981
|
-
return null;
|
|
54982
|
-
}
|
|
54983
|
-
}
|
|
54984
|
-
verifyAndDecode(token) {
|
|
54985
|
-
try {
|
|
54986
|
-
const parts = token.split(".");
|
|
54987
|
-
if (parts.length !== 3) return null;
|
|
54988
|
-
const [headerB64, payloadB64, signatureB64] = parts;
|
|
54989
|
-
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
54990
|
-
if (header.alg !== "EdDSA") return null;
|
|
54991
|
-
const data = `${headerB64}.${payloadB64}`;
|
|
54992
|
-
const signature = Buffer.from(signatureB64, "base64url");
|
|
54993
|
-
const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
54994
|
-
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
54995
|
-
return null;
|
|
54996
|
-
}
|
|
54997
|
-
const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
|
|
54998
|
-
if (!isValid) return null;
|
|
54999
|
-
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
55000
|
-
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
55001
|
-
return null;
|
|
55002
|
-
}
|
|
55003
|
-
const now = Date.now();
|
|
55004
|
-
const expiryMs = payload.exp * 1e3;
|
|
55005
|
-
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
55006
|
-
return null;
|
|
55007
|
-
}
|
|
55008
|
-
return payload;
|
|
55009
|
-
} catch {
|
|
55010
|
-
return null;
|
|
55011
|
-
}
|
|
55012
|
-
}
|
|
55013
|
-
};
|
|
55014
|
-
}
|
|
55015
|
-
});
|
|
55016
|
-
|
|
55017
|
-
// src/enterprise/policy/opa-compiler.ts
|
|
55018
|
-
var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
|
|
55019
|
-
var init_opa_compiler = __esm({
|
|
55020
|
-
"src/enterprise/policy/opa-compiler.ts"() {
|
|
55021
|
-
"use strict";
|
|
55022
|
-
fs22 = __toESM(require("fs"));
|
|
55023
|
-
path26 = __toESM(require("path"));
|
|
55024
|
-
os2 = __toESM(require("os"));
|
|
55025
|
-
crypto3 = __toESM(require("crypto"));
|
|
55026
|
-
import_child_process8 = require("child_process");
|
|
55027
|
-
OpaCompiler = class _OpaCompiler {
|
|
55028
|
-
static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
|
|
55029
|
-
/**
|
|
55030
|
-
* Resolve the input paths to WASM bytes.
|
|
55031
|
-
*
|
|
55032
|
-
* Strategy:
|
|
55033
|
-
* 1. If any path is a .wasm file, read it directly
|
|
55034
|
-
* 2. If a directory contains policy.wasm, read it
|
|
55035
|
-
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
55036
|
-
*/
|
|
55037
|
-
async resolveWasmBytes(paths) {
|
|
55038
|
-
const regoFiles = [];
|
|
55039
|
-
for (const p of paths) {
|
|
55040
|
-
const resolved = path26.resolve(p);
|
|
55041
|
-
if (path26.normalize(resolved).includes("..")) {
|
|
55042
|
-
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
55043
|
-
}
|
|
55044
|
-
if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
|
|
55045
|
-
return fs22.readFileSync(resolved);
|
|
55046
|
-
}
|
|
55047
|
-
if (!fs22.existsSync(resolved)) continue;
|
|
55048
|
-
const stat2 = fs22.statSync(resolved);
|
|
55049
|
-
if (stat2.isDirectory()) {
|
|
55050
|
-
const wasmCandidate = path26.join(resolved, "policy.wasm");
|
|
55051
|
-
if (fs22.existsSync(wasmCandidate)) {
|
|
55052
|
-
return fs22.readFileSync(wasmCandidate);
|
|
55053
|
-
}
|
|
55054
|
-
const files = fs22.readdirSync(resolved);
|
|
55055
|
-
for (const f of files) {
|
|
55056
|
-
if (f.endsWith(".rego")) {
|
|
55057
|
-
regoFiles.push(path26.join(resolved, f));
|
|
55058
|
-
}
|
|
55059
|
-
}
|
|
55060
|
-
} else if (resolved.endsWith(".rego")) {
|
|
55061
|
-
regoFiles.push(resolved);
|
|
55062
|
-
}
|
|
55063
|
-
}
|
|
55064
|
-
if (regoFiles.length === 0) {
|
|
55065
|
-
throw new Error(
|
|
55066
|
-
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
55067
|
-
);
|
|
55068
|
-
}
|
|
55069
|
-
return this.compileRego(regoFiles);
|
|
55070
|
-
}
|
|
55071
|
-
/**
|
|
55072
|
-
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
55073
|
-
*
|
|
55074
|
-
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
55075
|
-
* so subsequent runs skip compilation if policies haven't changed.
|
|
55076
|
-
*/
|
|
55077
|
-
compileRego(regoFiles) {
|
|
55078
|
-
try {
|
|
55079
|
-
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
55080
|
-
} catch {
|
|
55081
|
-
throw new Error(
|
|
55082
|
-
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
55083
|
-
);
|
|
55084
|
-
}
|
|
55085
|
-
const hash = crypto3.createHash("sha256");
|
|
55086
|
-
for (const f of regoFiles.sort()) {
|
|
55087
|
-
hash.update(fs22.readFileSync(f));
|
|
55088
|
-
hash.update(f);
|
|
55089
|
-
}
|
|
55090
|
-
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
55091
|
-
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
55092
|
-
const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
|
|
55093
|
-
if (fs22.existsSync(cachedWasm)) {
|
|
55094
|
-
return fs22.readFileSync(cachedWasm);
|
|
55095
|
-
}
|
|
55096
|
-
fs22.mkdirSync(cacheDir, { recursive: true });
|
|
55097
|
-
const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
55098
|
-
try {
|
|
55099
|
-
const args = [
|
|
55100
|
-
"build",
|
|
55101
|
-
"-t",
|
|
55102
|
-
"wasm",
|
|
55103
|
-
"-e",
|
|
55104
|
-
"visor",
|
|
55105
|
-
// entrypoint: the visor package tree
|
|
55106
|
-
"-o",
|
|
55107
|
-
bundleTar,
|
|
55108
|
-
...regoFiles
|
|
55109
|
-
];
|
|
55110
|
-
(0, import_child_process8.execFileSync)("opa", args, {
|
|
55111
|
-
stdio: "pipe",
|
|
55112
|
-
timeout: 3e4
|
|
55113
|
-
});
|
|
55114
|
-
} catch (err) {
|
|
55115
|
-
const stderr = err?.stderr?.toString() || "";
|
|
55116
|
-
throw new Error(
|
|
55117
|
-
`Failed to compile .rego files to WASM:
|
|
55118
|
-
${stderr}
|
|
55119
|
-
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
55120
|
-
);
|
|
55121
|
-
}
|
|
55122
|
-
try {
|
|
55123
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
55124
|
-
stdio: "pipe"
|
|
55125
|
-
});
|
|
55126
|
-
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
55127
|
-
if (fs22.existsSync(extractedWasm)) {
|
|
55128
|
-
fs22.renameSync(extractedWasm, cachedWasm);
|
|
55129
|
-
}
|
|
55130
|
-
} catch {
|
|
55131
|
-
try {
|
|
55132
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
55133
|
-
stdio: "pipe"
|
|
55134
|
-
});
|
|
55135
|
-
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
55136
|
-
if (fs22.existsSync(extractedWasm)) {
|
|
55137
|
-
fs22.renameSync(extractedWasm, cachedWasm);
|
|
55138
|
-
}
|
|
55139
|
-
} catch (err2) {
|
|
55140
|
-
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
55141
|
-
}
|
|
55142
|
-
}
|
|
55143
|
-
try {
|
|
55144
|
-
fs22.unlinkSync(bundleTar);
|
|
55145
|
-
} catch {
|
|
55146
|
-
}
|
|
55147
|
-
if (!fs22.existsSync(cachedWasm)) {
|
|
55148
|
-
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
55149
|
-
}
|
|
55150
|
-
return fs22.readFileSync(cachedWasm);
|
|
55151
|
-
}
|
|
55152
|
-
};
|
|
55153
|
-
}
|
|
55154
|
-
});
|
|
55155
|
-
|
|
55156
|
-
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
55157
|
-
var fs23, path27, OpaWasmEvaluator;
|
|
55158
|
-
var init_opa_wasm_evaluator = __esm({
|
|
55159
|
-
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
55160
|
-
"use strict";
|
|
55161
|
-
fs23 = __toESM(require("fs"));
|
|
55162
|
-
path27 = __toESM(require("path"));
|
|
55163
|
-
init_opa_compiler();
|
|
55164
|
-
OpaWasmEvaluator = class {
|
|
55165
|
-
policy = null;
|
|
55166
|
-
dataDocument = {};
|
|
55167
|
-
compiler = new OpaCompiler();
|
|
55168
|
-
async initialize(rulesPath) {
|
|
55169
|
-
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
55170
|
-
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
55171
|
-
try {
|
|
55172
|
-
const { createRequire } = require("module");
|
|
55173
|
-
const runtimeRequire = createRequire(__filename);
|
|
55174
|
-
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
55175
|
-
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
55176
|
-
if (!loadPolicy) {
|
|
55177
|
-
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
55178
|
-
}
|
|
55179
|
-
this.policy = await loadPolicy(wasmBytes);
|
|
55180
|
-
} catch (err) {
|
|
55181
|
-
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
55182
|
-
throw new Error(
|
|
55183
|
-
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
55184
|
-
);
|
|
55185
|
-
}
|
|
55186
|
-
throw err;
|
|
55187
|
-
}
|
|
55188
|
-
}
|
|
55189
|
-
/**
|
|
55190
|
-
* Load external data from a JSON file to use as the OPA data document.
|
|
55191
|
-
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
55192
|
-
* making it available in Rego via `data.<key>`.
|
|
55193
|
-
*/
|
|
55194
|
-
loadData(dataPath) {
|
|
55195
|
-
const resolved = path27.resolve(dataPath);
|
|
55196
|
-
if (path27.normalize(resolved).includes("..")) {
|
|
55197
|
-
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
55198
|
-
}
|
|
55199
|
-
if (!fs23.existsSync(resolved)) {
|
|
55200
|
-
throw new Error(`OPA data file not found: ${resolved}`);
|
|
55201
|
-
}
|
|
55202
|
-
const stat2 = fs23.statSync(resolved);
|
|
55203
|
-
if (stat2.size > 10 * 1024 * 1024) {
|
|
55204
|
-
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
55205
|
-
}
|
|
55206
|
-
const raw = fs23.readFileSync(resolved, "utf-8");
|
|
55207
|
-
try {
|
|
55208
|
-
const parsed = JSON.parse(raw);
|
|
55209
|
-
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
55210
|
-
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
55211
|
-
}
|
|
55212
|
-
this.dataDocument = parsed;
|
|
55213
|
-
} catch (err) {
|
|
55214
|
-
if (err.message.startsWith("OPA data file must")) {
|
|
55215
|
-
throw err;
|
|
55216
|
-
}
|
|
55217
|
-
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
55218
|
-
}
|
|
55219
|
-
}
|
|
55220
|
-
async evaluate(input) {
|
|
55221
|
-
if (!this.policy) {
|
|
55222
|
-
throw new Error("OPA WASM evaluator not initialized");
|
|
55223
|
-
}
|
|
55224
|
-
this.policy.setData(this.dataDocument);
|
|
55225
|
-
const resultSet = this.policy.evaluate(input);
|
|
55226
|
-
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
55227
|
-
return resultSet[0].result;
|
|
55228
|
-
}
|
|
55229
|
-
return void 0;
|
|
55230
|
-
}
|
|
55231
|
-
async shutdown() {
|
|
55232
|
-
if (this.policy) {
|
|
55233
|
-
if (typeof this.policy.close === "function") {
|
|
55234
|
-
try {
|
|
55235
|
-
this.policy.close();
|
|
55236
|
-
} catch {
|
|
55237
|
-
}
|
|
55238
|
-
} else if (typeof this.policy.free === "function") {
|
|
55239
|
-
try {
|
|
55240
|
-
this.policy.free();
|
|
55241
|
-
} catch {
|
|
55242
|
-
}
|
|
55243
|
-
}
|
|
55244
|
-
}
|
|
55245
|
-
this.policy = null;
|
|
55246
|
-
}
|
|
55247
|
-
};
|
|
55248
|
-
}
|
|
55249
|
-
});
|
|
55250
|
-
|
|
55251
|
-
// src/enterprise/policy/opa-http-evaluator.ts
|
|
55252
|
-
var OpaHttpEvaluator;
|
|
55253
|
-
var init_opa_http_evaluator = __esm({
|
|
55254
|
-
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
55255
|
-
"use strict";
|
|
55256
|
-
OpaHttpEvaluator = class {
|
|
55257
|
-
baseUrl;
|
|
55258
|
-
timeout;
|
|
55259
|
-
constructor(baseUrl, timeout = 5e3) {
|
|
55260
|
-
let parsed;
|
|
55261
|
-
try {
|
|
55262
|
-
parsed = new URL(baseUrl);
|
|
55263
|
-
} catch {
|
|
55264
|
-
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
55265
|
-
}
|
|
55266
|
-
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
55267
|
-
throw new Error(
|
|
55268
|
-
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
55269
|
-
);
|
|
55270
|
-
}
|
|
55271
|
-
const hostname = parsed.hostname;
|
|
55272
|
-
if (this.isBlockedHostname(hostname)) {
|
|
55273
|
-
throw new Error(
|
|
55274
|
-
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
55275
|
-
);
|
|
55276
|
-
}
|
|
55277
|
-
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
55278
|
-
this.timeout = timeout;
|
|
55279
|
-
}
|
|
55280
|
-
/**
|
|
55281
|
-
* Check if a hostname is blocked due to SSRF concerns.
|
|
55282
|
-
*
|
|
55283
|
-
* Blocks:
|
|
55284
|
-
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
55285
|
-
* - Link-local addresses (169.254.x.x)
|
|
55286
|
-
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
55287
|
-
* - IPv6 unique local addresses (fd00::/8)
|
|
55288
|
-
* - Cloud metadata services (*.internal)
|
|
55289
|
-
*/
|
|
55290
|
-
isBlockedHostname(hostname) {
|
|
55291
|
-
if (!hostname) return true;
|
|
55292
|
-
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
55293
|
-
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
55294
|
-
return true;
|
|
55295
|
-
}
|
|
55296
|
-
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
55297
|
-
return true;
|
|
55298
|
-
}
|
|
55299
|
-
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
55300
|
-
return true;
|
|
55301
|
-
}
|
|
55302
|
-
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
55303
|
-
const ipv4Match = normalized.match(ipv4Pattern);
|
|
55304
|
-
if (ipv4Match) {
|
|
55305
|
-
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
55306
|
-
if (octets.some((octet) => octet > 255)) {
|
|
55307
|
-
return false;
|
|
55308
|
-
}
|
|
55309
|
-
const [a, b] = octets;
|
|
55310
|
-
if (a === 127) {
|
|
55311
|
-
return true;
|
|
55312
|
-
}
|
|
55313
|
-
if (a === 0) {
|
|
55314
|
-
return true;
|
|
55315
|
-
}
|
|
55316
|
-
if (a === 169 && b === 254) {
|
|
55317
|
-
return true;
|
|
55318
|
-
}
|
|
55319
|
-
if (a === 10) {
|
|
55320
|
-
return true;
|
|
55321
|
-
}
|
|
55322
|
-
if (a === 172 && b >= 16 && b <= 31) {
|
|
55323
|
-
return true;
|
|
55324
|
-
}
|
|
55325
|
-
if (a === 192 && b === 168) {
|
|
55326
|
-
return true;
|
|
55327
|
-
}
|
|
55328
|
-
}
|
|
55329
|
-
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
55330
|
-
return true;
|
|
55331
|
-
}
|
|
55332
|
-
if (normalized.startsWith("fe80:")) {
|
|
55333
|
-
return true;
|
|
55334
|
-
}
|
|
55335
|
-
return false;
|
|
55336
|
-
}
|
|
55337
|
-
/**
|
|
55338
|
-
* Evaluate a policy rule against an input document via OPA REST API.
|
|
55339
|
-
*
|
|
55340
|
-
* @param input - The input document to evaluate
|
|
55341
|
-
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
55342
|
-
* @returns The result object from OPA, or undefined on error
|
|
55343
|
-
*/
|
|
55344
|
-
async evaluate(input, rulePath) {
|
|
55345
|
-
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
55346
|
-
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
55347
|
-
const controller = new AbortController();
|
|
55348
|
-
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
55349
|
-
try {
|
|
55350
|
-
const response = await fetch(url, {
|
|
55351
|
-
method: "POST",
|
|
55352
|
-
headers: { "Content-Type": "application/json" },
|
|
55353
|
-
body: JSON.stringify({ input }),
|
|
55354
|
-
signal: controller.signal
|
|
55355
|
-
});
|
|
55356
|
-
if (!response.ok) {
|
|
55357
|
-
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
55358
|
-
}
|
|
55359
|
-
let body;
|
|
55360
|
-
try {
|
|
55361
|
-
body = await response.json();
|
|
55362
|
-
} catch (jsonErr) {
|
|
55363
|
-
throw new Error(
|
|
55364
|
-
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
55365
|
-
);
|
|
55366
|
-
}
|
|
55367
|
-
return body?.result;
|
|
55368
|
-
} finally {
|
|
55369
|
-
clearTimeout(timer);
|
|
55370
|
-
}
|
|
55371
|
-
}
|
|
55372
|
-
async shutdown() {
|
|
55373
|
-
}
|
|
55374
|
-
};
|
|
55375
|
-
}
|
|
55376
|
-
});
|
|
55377
|
-
|
|
55378
|
-
// src/enterprise/policy/policy-input-builder.ts
|
|
55379
|
-
var PolicyInputBuilder;
|
|
55380
|
-
var init_policy_input_builder = __esm({
|
|
55381
|
-
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
55382
|
-
"use strict";
|
|
55383
|
-
PolicyInputBuilder = class {
|
|
55384
|
-
roles;
|
|
55385
|
-
actor;
|
|
55386
|
-
repository;
|
|
55387
|
-
pullRequest;
|
|
55388
|
-
constructor(policyConfig, actor, repository, pullRequest) {
|
|
55389
|
-
this.roles = policyConfig.roles || {};
|
|
55390
|
-
this.actor = actor;
|
|
55391
|
-
this.repository = repository;
|
|
55392
|
-
this.pullRequest = pullRequest;
|
|
55393
|
-
}
|
|
55394
|
-
/** Resolve which roles apply to the current actor. */
|
|
55395
|
-
resolveRoles() {
|
|
55396
|
-
const matched = [];
|
|
55397
|
-
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
55398
|
-
let identityMatch = false;
|
|
55399
|
-
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
55400
|
-
identityMatch = true;
|
|
55401
|
-
}
|
|
55402
|
-
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
55403
|
-
identityMatch = true;
|
|
55404
|
-
}
|
|
55405
|
-
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
55406
|
-
identityMatch = true;
|
|
55407
|
-
}
|
|
55408
|
-
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
55409
|
-
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
55410
|
-
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
55411
|
-
identityMatch = true;
|
|
55412
|
-
}
|
|
55413
|
-
}
|
|
55414
|
-
if (!identityMatch) continue;
|
|
55415
|
-
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
55416
|
-
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
55417
|
-
continue;
|
|
55418
|
-
}
|
|
55419
|
-
}
|
|
55420
|
-
matched.push(roleName);
|
|
55421
|
-
}
|
|
55422
|
-
return matched;
|
|
55423
|
-
}
|
|
55424
|
-
buildActor() {
|
|
55425
|
-
return {
|
|
55426
|
-
authorAssociation: this.actor.authorAssociation,
|
|
55427
|
-
login: this.actor.login,
|
|
55428
|
-
roles: this.resolveRoles(),
|
|
55429
|
-
isLocalMode: this.actor.isLocalMode,
|
|
55430
|
-
...this.actor.slack && { slack: this.actor.slack }
|
|
55431
|
-
};
|
|
55432
|
-
}
|
|
55433
|
-
forCheckExecution(check) {
|
|
55434
|
-
return {
|
|
55435
|
-
scope: "check.execute",
|
|
55436
|
-
check: {
|
|
55437
|
-
id: check.id,
|
|
55438
|
-
type: check.type,
|
|
55439
|
-
group: check.group,
|
|
55440
|
-
tags: check.tags,
|
|
55441
|
-
criticality: check.criticality,
|
|
55442
|
-
sandbox: check.sandbox,
|
|
55443
|
-
policy: check.policy
|
|
55444
|
-
},
|
|
55445
|
-
actor: this.buildActor(),
|
|
55446
|
-
repository: this.repository,
|
|
55447
|
-
pullRequest: this.pullRequest
|
|
55448
|
-
};
|
|
55449
|
-
}
|
|
55450
|
-
forToolInvocation(serverName, methodName, transport) {
|
|
55451
|
-
return {
|
|
55452
|
-
scope: "tool.invoke",
|
|
55453
|
-
tool: { serverName, methodName, transport },
|
|
55454
|
-
actor: this.buildActor(),
|
|
55455
|
-
repository: this.repository,
|
|
55456
|
-
pullRequest: this.pullRequest
|
|
55457
|
-
};
|
|
55458
|
-
}
|
|
55459
|
-
forCapabilityResolve(checkId, capabilities) {
|
|
55460
|
-
return {
|
|
55461
|
-
scope: "capability.resolve",
|
|
55462
|
-
check: { id: checkId, type: "ai" },
|
|
55463
|
-
capability: capabilities,
|
|
55464
|
-
actor: this.buildActor(),
|
|
55465
|
-
repository: this.repository,
|
|
55466
|
-
pullRequest: this.pullRequest
|
|
55467
|
-
};
|
|
55468
|
-
}
|
|
55469
|
-
};
|
|
55470
|
-
}
|
|
55471
|
-
});
|
|
55472
|
-
|
|
55473
|
-
// src/enterprise/policy/opa-policy-engine.ts
|
|
55474
|
-
var opa_policy_engine_exports = {};
|
|
55475
|
-
__export(opa_policy_engine_exports, {
|
|
55476
|
-
OpaPolicyEngine: () => OpaPolicyEngine
|
|
55477
|
-
});
|
|
55478
|
-
var OpaPolicyEngine;
|
|
55479
|
-
var init_opa_policy_engine = __esm({
|
|
55480
|
-
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
55481
|
-
"use strict";
|
|
55482
|
-
init_opa_wasm_evaluator();
|
|
55483
|
-
init_opa_http_evaluator();
|
|
55484
|
-
init_policy_input_builder();
|
|
55485
|
-
OpaPolicyEngine = class {
|
|
55486
|
-
evaluator = null;
|
|
55487
|
-
fallback;
|
|
55488
|
-
timeout;
|
|
55489
|
-
config;
|
|
55490
|
-
inputBuilder = null;
|
|
55491
|
-
logger = null;
|
|
55492
|
-
constructor(config) {
|
|
55493
|
-
this.config = config;
|
|
55494
|
-
this.fallback = config.fallback || "deny";
|
|
55495
|
-
this.timeout = config.timeout || 5e3;
|
|
55496
|
-
}
|
|
55497
|
-
async initialize(config) {
|
|
55498
|
-
try {
|
|
55499
|
-
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
55500
|
-
} catch {
|
|
55501
|
-
}
|
|
55502
|
-
const actor = {
|
|
55503
|
-
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
55504
|
-
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
55505
|
-
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
55506
|
-
};
|
|
55507
|
-
const repo = {
|
|
55508
|
-
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
55509
|
-
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
55510
|
-
branch: process.env.GITHUB_HEAD_REF,
|
|
55511
|
-
baseBranch: process.env.GITHUB_BASE_REF,
|
|
55512
|
-
event: process.env.GITHUB_EVENT_NAME
|
|
55513
|
-
};
|
|
55514
|
-
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
55515
|
-
const pullRequest = {
|
|
55516
|
-
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
55517
|
-
};
|
|
55518
|
-
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
55519
|
-
if (config.engine === "local") {
|
|
55520
|
-
if (!config.rules) {
|
|
55521
|
-
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
55522
|
-
}
|
|
55523
|
-
const wasm = new OpaWasmEvaluator();
|
|
55524
|
-
await wasm.initialize(config.rules);
|
|
55525
|
-
if (config.data) {
|
|
55526
|
-
wasm.loadData(config.data);
|
|
55527
|
-
}
|
|
55528
|
-
this.evaluator = wasm;
|
|
55529
|
-
} else if (config.engine === "remote") {
|
|
55530
|
-
if (!config.url) {
|
|
55531
|
-
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
55532
|
-
}
|
|
55533
|
-
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
55534
|
-
} else {
|
|
55535
|
-
this.evaluator = null;
|
|
55536
|
-
}
|
|
55537
|
-
}
|
|
55538
|
-
/**
|
|
55539
|
-
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
55540
|
-
* Called by the enterprise loader when engine context is enriched.
|
|
55541
|
-
*/
|
|
55542
|
-
setActorContext(actor, repo, pullRequest) {
|
|
55543
|
-
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
55544
|
-
}
|
|
55545
|
-
async evaluateCheckExecution(checkId, checkConfig) {
|
|
55546
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55547
|
-
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
55548
|
-
const policyOverride = cfg.policy;
|
|
55549
|
-
const input = this.inputBuilder.forCheckExecution({
|
|
55550
|
-
id: checkId,
|
|
55551
|
-
type: cfg.type || "ai",
|
|
55552
|
-
group: cfg.group,
|
|
55553
|
-
tags: cfg.tags,
|
|
55554
|
-
criticality: cfg.criticality,
|
|
55555
|
-
sandbox: cfg.sandbox,
|
|
55556
|
-
policy: policyOverride
|
|
55557
|
-
});
|
|
55558
|
-
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
55559
|
-
}
|
|
55560
|
-
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
55561
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55562
|
-
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
55563
|
-
return this.doEvaluate(input, "visor/tool/invoke");
|
|
55564
|
-
}
|
|
55565
|
-
async evaluateCapabilities(checkId, capabilities) {
|
|
55566
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
55567
|
-
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
55568
|
-
return this.doEvaluate(input, "visor/capability/resolve");
|
|
55569
|
-
}
|
|
55570
|
-
async shutdown() {
|
|
55571
|
-
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
55572
|
-
await this.evaluator.shutdown();
|
|
55573
|
-
}
|
|
55574
|
-
this.evaluator = null;
|
|
55575
|
-
this.inputBuilder = null;
|
|
55576
|
-
}
|
|
55577
|
-
resolveRulePath(defaultScope, override) {
|
|
55578
|
-
if (override) {
|
|
55579
|
-
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
55580
|
-
}
|
|
55581
|
-
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
55582
|
-
}
|
|
55583
|
-
async doEvaluate(input, rulePath) {
|
|
55584
|
-
try {
|
|
55585
|
-
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
55586
|
-
let timer;
|
|
55587
|
-
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
55588
|
-
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
55589
|
-
});
|
|
55590
|
-
try {
|
|
55591
|
-
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
55592
|
-
const decision = this.parseDecision(result);
|
|
55593
|
-
if (!decision.allowed && this.fallback === "warn") {
|
|
55594
|
-
decision.allowed = true;
|
|
55595
|
-
decision.warn = true;
|
|
55596
|
-
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
55597
|
-
}
|
|
55598
|
-
this.logger?.debug(
|
|
55599
|
-
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
55600
|
-
);
|
|
55601
|
-
return decision;
|
|
55602
|
-
} finally {
|
|
55603
|
-
if (timer) clearTimeout(timer);
|
|
55604
|
-
}
|
|
55605
|
-
} catch (err) {
|
|
55606
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
55607
|
-
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
55608
|
-
return {
|
|
55609
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
55610
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
55611
|
-
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
55612
|
-
};
|
|
55613
|
-
}
|
|
55614
|
-
}
|
|
55615
|
-
async rawEvaluate(input, rulePath) {
|
|
55616
|
-
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
55617
|
-
const result = await this.evaluator.evaluate(input);
|
|
55618
|
-
return this.navigateWasmResult(result, rulePath);
|
|
55619
|
-
}
|
|
55620
|
-
return this.evaluator.evaluate(input, rulePath);
|
|
55621
|
-
}
|
|
55622
|
-
/**
|
|
55623
|
-
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
55624
|
-
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
55625
|
-
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
55626
|
-
*/
|
|
55627
|
-
navigateWasmResult(result, rulePath) {
|
|
55628
|
-
if (!result || typeof result !== "object") return result;
|
|
55629
|
-
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
55630
|
-
let current = result;
|
|
55631
|
-
for (const seg of segments) {
|
|
55632
|
-
if (current && typeof current === "object" && seg in current) {
|
|
55633
|
-
current = current[seg];
|
|
55634
|
-
} else {
|
|
55635
|
-
return void 0;
|
|
55636
|
-
}
|
|
55637
|
-
}
|
|
55638
|
-
return current;
|
|
55639
|
-
}
|
|
55640
|
-
parseDecision(result) {
|
|
55641
|
-
if (result === void 0 || result === null) {
|
|
55642
|
-
return {
|
|
55643
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
55644
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
55645
|
-
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
55646
|
-
};
|
|
55647
|
-
}
|
|
55648
|
-
const allowed = result.allowed !== false;
|
|
55649
|
-
const decision = {
|
|
55650
|
-
allowed,
|
|
55651
|
-
reason: result.reason
|
|
55652
|
-
};
|
|
55653
|
-
if (result.capabilities) {
|
|
55654
|
-
decision.capabilities = result.capabilities;
|
|
55655
|
-
}
|
|
55656
|
-
return decision;
|
|
55657
|
-
}
|
|
55658
|
-
};
|
|
55659
|
-
}
|
|
55660
|
-
});
|
|
55661
|
-
|
|
55662
|
-
// src/enterprise/scheduler/knex-store.ts
|
|
55663
|
-
var knex_store_exports = {};
|
|
55664
|
-
__export(knex_store_exports, {
|
|
55665
|
-
KnexStoreBackend: () => KnexStoreBackend
|
|
55666
|
-
});
|
|
55667
|
-
function toNum(val) {
|
|
55668
|
-
if (val === null || val === void 0) return void 0;
|
|
55669
|
-
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
55670
|
-
}
|
|
55671
|
-
function safeJsonParse2(value) {
|
|
55672
|
-
if (!value) return void 0;
|
|
55673
|
-
try {
|
|
55674
|
-
return JSON.parse(value);
|
|
55675
|
-
} catch {
|
|
55676
|
-
return void 0;
|
|
55677
|
-
}
|
|
55678
|
-
}
|
|
55679
|
-
function fromTriggerRow2(row) {
|
|
55680
|
-
return {
|
|
55681
|
-
id: row.id,
|
|
55682
|
-
creatorId: row.creator_id,
|
|
55683
|
-
creatorContext: row.creator_context ?? void 0,
|
|
55684
|
-
creatorName: row.creator_name ?? void 0,
|
|
55685
|
-
description: row.description ?? void 0,
|
|
55686
|
-
channels: safeJsonParse2(row.channels),
|
|
55687
|
-
fromUsers: safeJsonParse2(row.from_users),
|
|
55688
|
-
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
55689
|
-
contains: safeJsonParse2(row.contains),
|
|
55690
|
-
matchPattern: row.match_pattern ?? void 0,
|
|
55691
|
-
threads: row.threads,
|
|
55692
|
-
workflow: row.workflow,
|
|
55693
|
-
inputs: safeJsonParse2(row.inputs),
|
|
55694
|
-
outputContext: safeJsonParse2(row.output_context),
|
|
55695
|
-
status: row.status,
|
|
55696
|
-
enabled: row.enabled === true || row.enabled === 1,
|
|
55697
|
-
createdAt: toNum(row.created_at)
|
|
55698
|
-
};
|
|
55699
|
-
}
|
|
55700
|
-
function toTriggerInsertRow(trigger) {
|
|
55701
|
-
return {
|
|
55702
|
-
id: trigger.id,
|
|
55703
|
-
creator_id: trigger.creatorId,
|
|
55704
|
-
creator_context: trigger.creatorContext ?? null,
|
|
55705
|
-
creator_name: trigger.creatorName ?? null,
|
|
55706
|
-
description: trigger.description ?? null,
|
|
55707
|
-
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
55708
|
-
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
55709
|
-
from_bots: trigger.fromBots,
|
|
55710
|
-
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
55711
|
-
match_pattern: trigger.matchPattern ?? null,
|
|
55712
|
-
threads: trigger.threads,
|
|
55713
|
-
workflow: trigger.workflow,
|
|
55714
|
-
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
55715
|
-
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
55716
|
-
status: trigger.status,
|
|
55717
|
-
enabled: trigger.enabled,
|
|
55718
|
-
created_at: trigger.createdAt
|
|
55719
|
-
};
|
|
55720
|
-
}
|
|
55721
|
-
function fromDbRow2(row) {
|
|
55722
|
-
return {
|
|
55723
|
-
id: row.id,
|
|
55724
|
-
creatorId: row.creator_id,
|
|
55725
|
-
creatorContext: row.creator_context ?? void 0,
|
|
55726
|
-
creatorName: row.creator_name ?? void 0,
|
|
55727
|
-
timezone: row.timezone,
|
|
55728
|
-
schedule: row.schedule_expr,
|
|
55729
|
-
runAt: toNum(row.run_at),
|
|
55730
|
-
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
55731
|
-
originalExpression: row.original_expression,
|
|
55732
|
-
workflow: row.workflow ?? void 0,
|
|
55733
|
-
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
55734
|
-
outputContext: safeJsonParse2(row.output_context),
|
|
55735
|
-
status: row.status,
|
|
55736
|
-
createdAt: toNum(row.created_at),
|
|
55737
|
-
lastRunAt: toNum(row.last_run_at),
|
|
55738
|
-
nextRunAt: toNum(row.next_run_at),
|
|
55739
|
-
runCount: row.run_count,
|
|
55740
|
-
failureCount: row.failure_count,
|
|
55741
|
-
lastError: row.last_error ?? void 0,
|
|
55742
|
-
previousResponse: row.previous_response ?? void 0
|
|
55743
|
-
};
|
|
55744
|
-
}
|
|
55745
|
-
function toInsertRow(schedule) {
|
|
55746
|
-
return {
|
|
55747
|
-
id: schedule.id,
|
|
55748
|
-
creator_id: schedule.creatorId,
|
|
55749
|
-
creator_context: schedule.creatorContext ?? null,
|
|
55750
|
-
creator_name: schedule.creatorName ?? null,
|
|
55751
|
-
timezone: schedule.timezone,
|
|
55752
|
-
schedule_expr: schedule.schedule,
|
|
55753
|
-
run_at: schedule.runAt ?? null,
|
|
55754
|
-
is_recurring: schedule.isRecurring,
|
|
55755
|
-
original_expression: schedule.originalExpression,
|
|
55756
|
-
workflow: schedule.workflow ?? null,
|
|
55757
|
-
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
55758
|
-
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
55759
|
-
status: schedule.status,
|
|
55760
|
-
created_at: schedule.createdAt,
|
|
55761
|
-
last_run_at: schedule.lastRunAt ?? null,
|
|
55762
|
-
next_run_at: schedule.nextRunAt ?? null,
|
|
55763
|
-
run_count: schedule.runCount,
|
|
55764
|
-
failure_count: schedule.failureCount,
|
|
55765
|
-
last_error: schedule.lastError ?? null,
|
|
55766
|
-
previous_response: schedule.previousResponse ?? null
|
|
55767
|
-
};
|
|
55768
|
-
}
|
|
55769
|
-
var fs24, path28, import_uuid2, KnexStoreBackend;
|
|
55770
|
-
var init_knex_store = __esm({
|
|
55771
|
-
"src/enterprise/scheduler/knex-store.ts"() {
|
|
55772
|
-
"use strict";
|
|
55773
|
-
fs24 = __toESM(require("fs"));
|
|
55774
|
-
path28 = __toESM(require("path"));
|
|
55775
|
-
import_uuid2 = require("uuid");
|
|
55776
|
-
init_logger();
|
|
55777
|
-
KnexStoreBackend = class {
|
|
55778
|
-
knex = null;
|
|
55779
|
-
driver;
|
|
55780
|
-
connection;
|
|
55781
|
-
constructor(driver, storageConfig, _haConfig) {
|
|
55782
|
-
this.driver = driver;
|
|
55783
|
-
this.connection = storageConfig.connection || {};
|
|
55784
|
-
}
|
|
55785
|
-
async initialize() {
|
|
55786
|
-
const { createRequire } = require("module");
|
|
55787
|
-
const runtimeRequire = createRequire(__filename);
|
|
55788
|
-
let knexFactory;
|
|
55789
|
-
try {
|
|
55790
|
-
knexFactory = runtimeRequire("knex");
|
|
55791
|
-
} catch (err) {
|
|
55792
|
-
const code = err?.code;
|
|
55793
|
-
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
55794
|
-
throw new Error(
|
|
55795
|
-
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
55796
|
-
);
|
|
55797
|
-
}
|
|
55798
|
-
throw err;
|
|
55799
|
-
}
|
|
55800
|
-
const clientMap = {
|
|
55801
|
-
postgresql: "pg",
|
|
55802
|
-
mysql: "mysql2",
|
|
55803
|
-
mssql: "tedious"
|
|
55804
|
-
};
|
|
55805
|
-
const client = clientMap[this.driver];
|
|
55806
|
-
let connection;
|
|
55807
|
-
if (this.connection.connection_string) {
|
|
55808
|
-
connection = this.connection.connection_string;
|
|
55809
|
-
} else if (this.driver === "mssql") {
|
|
55810
|
-
connection = this.buildMssqlConnection();
|
|
55811
|
-
} else {
|
|
55812
|
-
connection = this.buildStandardConnection();
|
|
55813
|
-
}
|
|
55814
|
-
this.knex = knexFactory({
|
|
55815
|
-
client,
|
|
55816
|
-
connection,
|
|
55817
|
-
pool: {
|
|
55818
|
-
min: this.connection.pool?.min ?? 0,
|
|
55819
|
-
max: this.connection.pool?.max ?? 10
|
|
55820
|
-
}
|
|
55821
|
-
});
|
|
55822
|
-
await this.migrateSchema();
|
|
55823
|
-
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
55824
|
-
}
|
|
55825
|
-
buildStandardConnection() {
|
|
55826
|
-
return {
|
|
55827
|
-
host: this.connection.host || "localhost",
|
|
55828
|
-
port: this.connection.port,
|
|
55829
|
-
database: this.connection.database || "visor",
|
|
55830
|
-
user: this.connection.user,
|
|
55831
|
-
password: this.connection.password,
|
|
55832
|
-
ssl: this.resolveSslConfig()
|
|
55833
|
-
};
|
|
55834
|
-
}
|
|
55835
|
-
buildMssqlConnection() {
|
|
55836
|
-
const ssl = this.connection.ssl;
|
|
55837
|
-
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
55838
|
-
return {
|
|
55839
|
-
server: this.connection.host || "localhost",
|
|
55840
|
-
port: this.connection.port,
|
|
55841
|
-
database: this.connection.database || "visor",
|
|
55842
|
-
user: this.connection.user,
|
|
55843
|
-
password: this.connection.password,
|
|
55844
|
-
options: {
|
|
55845
|
-
encrypt: sslEnabled,
|
|
55846
|
-
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
55847
|
-
}
|
|
55848
|
-
};
|
|
55849
|
-
}
|
|
55850
|
-
resolveSslConfig() {
|
|
55851
|
-
const ssl = this.connection.ssl;
|
|
55852
|
-
if (ssl === false || ssl === void 0) return false;
|
|
55853
|
-
if (ssl === true) return { rejectUnauthorized: true };
|
|
55854
|
-
if (ssl.enabled === false) return false;
|
|
55855
|
-
const result = {
|
|
55856
|
-
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
55857
|
-
};
|
|
55858
|
-
if (ssl.ca) {
|
|
55859
|
-
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
55860
|
-
result.ca = fs24.readFileSync(caPath, "utf8");
|
|
55861
|
-
}
|
|
55862
|
-
if (ssl.cert) {
|
|
55863
|
-
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
55864
|
-
result.cert = fs24.readFileSync(certPath, "utf8");
|
|
55865
|
-
}
|
|
55866
|
-
if (ssl.key) {
|
|
55867
|
-
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
55868
|
-
result.key = fs24.readFileSync(keyPath, "utf8");
|
|
55869
|
-
}
|
|
55870
|
-
return result;
|
|
55871
|
-
}
|
|
55872
|
-
validateSslPath(filePath, label) {
|
|
55873
|
-
const resolved = path28.resolve(filePath);
|
|
55874
|
-
if (resolved !== path28.normalize(resolved)) {
|
|
55875
|
-
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
55876
|
-
}
|
|
55877
|
-
if (!fs24.existsSync(resolved)) {
|
|
55878
|
-
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
55879
|
-
}
|
|
55880
|
-
return resolved;
|
|
55881
|
-
}
|
|
55882
|
-
async shutdown() {
|
|
55883
|
-
if (this.knex) {
|
|
55884
|
-
await this.knex.destroy();
|
|
55885
|
-
this.knex = null;
|
|
55886
|
-
}
|
|
55887
|
-
}
|
|
55888
|
-
async migrateSchema() {
|
|
55889
|
-
const knex = this.getKnex();
|
|
55890
|
-
const exists = await knex.schema.hasTable("schedules");
|
|
55891
|
-
if (!exists) {
|
|
55892
|
-
await knex.schema.createTable("schedules", (table) => {
|
|
55893
|
-
table.string("id", 36).primary();
|
|
55894
|
-
table.string("creator_id", 255).notNullable().index();
|
|
55895
|
-
table.string("creator_context", 255);
|
|
55896
|
-
table.string("creator_name", 255);
|
|
55897
|
-
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
55898
|
-
table.string("schedule_expr", 255);
|
|
55899
|
-
table.bigInteger("run_at");
|
|
55900
|
-
table.boolean("is_recurring").notNullable();
|
|
55901
|
-
table.text("original_expression");
|
|
55902
|
-
table.string("workflow", 255);
|
|
55903
|
-
table.text("workflow_inputs");
|
|
55904
|
-
table.text("output_context");
|
|
55905
|
-
table.string("status", 20).notNullable().index();
|
|
55906
|
-
table.bigInteger("created_at").notNullable();
|
|
55907
|
-
table.bigInteger("last_run_at");
|
|
55908
|
-
table.bigInteger("next_run_at");
|
|
55909
|
-
table.integer("run_count").notNullable().defaultTo(0);
|
|
55910
|
-
table.integer("failure_count").notNullable().defaultTo(0);
|
|
55911
|
-
table.text("last_error");
|
|
55912
|
-
table.text("previous_response");
|
|
55913
|
-
table.index(["status", "next_run_at"]);
|
|
55914
|
-
});
|
|
55915
|
-
}
|
|
55916
|
-
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
55917
|
-
if (!triggersExist) {
|
|
55918
|
-
await knex.schema.createTable("message_triggers", (table) => {
|
|
55919
|
-
table.string("id", 36).primary();
|
|
55920
|
-
table.string("creator_id", 255).notNullable().index();
|
|
55921
|
-
table.string("creator_context", 255);
|
|
55922
|
-
table.string("creator_name", 255);
|
|
55923
|
-
table.text("description");
|
|
55924
|
-
table.text("channels");
|
|
55925
|
-
table.text("from_users");
|
|
55926
|
-
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
55927
|
-
table.text("contains");
|
|
55928
|
-
table.text("match_pattern");
|
|
55929
|
-
table.string("threads", 20).notNullable().defaultTo("any");
|
|
55930
|
-
table.string("workflow", 255).notNullable();
|
|
55931
|
-
table.text("inputs");
|
|
55932
|
-
table.text("output_context");
|
|
55933
|
-
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
55934
|
-
table.boolean("enabled").notNullable().defaultTo(true);
|
|
55935
|
-
table.bigInteger("created_at").notNullable();
|
|
55936
|
-
});
|
|
55937
|
-
}
|
|
55938
|
-
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
55939
|
-
if (!locksExist) {
|
|
55940
|
-
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
55941
|
-
table.string("lock_id", 255).primary();
|
|
55942
|
-
table.string("node_id", 255).notNullable();
|
|
55943
|
-
table.string("lock_token", 36).notNullable();
|
|
55944
|
-
table.bigInteger("acquired_at").notNullable();
|
|
55945
|
-
table.bigInteger("expires_at").notNullable();
|
|
55946
|
-
});
|
|
55947
|
-
}
|
|
55948
|
-
}
|
|
55949
|
-
getKnex() {
|
|
55950
|
-
if (!this.knex) {
|
|
55951
|
-
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
55952
|
-
}
|
|
55953
|
-
return this.knex;
|
|
55954
|
-
}
|
|
55955
|
-
// --- CRUD ---
|
|
55956
|
-
async create(schedule) {
|
|
55957
|
-
const knex = this.getKnex();
|
|
55958
|
-
const newSchedule = {
|
|
55959
|
-
...schedule,
|
|
55960
|
-
id: (0, import_uuid2.v4)(),
|
|
55961
|
-
createdAt: Date.now(),
|
|
55962
|
-
runCount: 0,
|
|
55963
|
-
failureCount: 0,
|
|
55964
|
-
status: "active"
|
|
55965
|
-
};
|
|
55966
|
-
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
55967
|
-
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
55968
|
-
return newSchedule;
|
|
55969
|
-
}
|
|
55970
|
-
async importSchedule(schedule) {
|
|
55971
|
-
const knex = this.getKnex();
|
|
55972
|
-
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
55973
|
-
if (existing) return;
|
|
55974
|
-
await knex("schedules").insert(toInsertRow(schedule));
|
|
55975
|
-
}
|
|
55976
|
-
async get(id) {
|
|
55977
|
-
const knex = this.getKnex();
|
|
55978
|
-
const row = await knex("schedules").where("id", id).first();
|
|
55979
|
-
return row ? fromDbRow2(row) : void 0;
|
|
55980
|
-
}
|
|
55981
|
-
async update(id, patch) {
|
|
55982
|
-
const knex = this.getKnex();
|
|
55983
|
-
const existing = await knex("schedules").where("id", id).first();
|
|
55984
|
-
if (!existing) return void 0;
|
|
55985
|
-
const current = fromDbRow2(existing);
|
|
55986
|
-
const updated = { ...current, ...patch, id: current.id };
|
|
55987
|
-
const row = toInsertRow(updated);
|
|
55988
|
-
delete row.id;
|
|
55989
|
-
await knex("schedules").where("id", id).update(row);
|
|
55990
|
-
return updated;
|
|
55991
|
-
}
|
|
55992
|
-
async delete(id) {
|
|
55993
|
-
const knex = this.getKnex();
|
|
55994
|
-
const deleted = await knex("schedules").where("id", id).del();
|
|
55995
|
-
if (deleted > 0) {
|
|
55996
|
-
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
55997
|
-
return true;
|
|
55998
|
-
}
|
|
55999
|
-
return false;
|
|
56000
|
-
}
|
|
56001
|
-
// --- Queries ---
|
|
56002
|
-
async getByCreator(creatorId) {
|
|
56003
|
-
const knex = this.getKnex();
|
|
56004
|
-
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
56005
|
-
return rows.map((r) => fromDbRow2(r));
|
|
56006
|
-
}
|
|
56007
|
-
async getActiveSchedules() {
|
|
56008
|
-
const knex = this.getKnex();
|
|
56009
|
-
const rows = await knex("schedules").where("status", "active");
|
|
56010
|
-
return rows.map((r) => fromDbRow2(r));
|
|
56011
|
-
}
|
|
56012
|
-
async getDueSchedules(now) {
|
|
56013
|
-
const ts = now ?? Date.now();
|
|
56014
|
-
const knex = this.getKnex();
|
|
56015
|
-
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
56016
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
56017
|
-
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
56018
|
-
this.where(function() {
|
|
56019
|
-
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
56020
|
-
}).orWhere(function() {
|
|
56021
|
-
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
56022
|
-
});
|
|
56023
|
-
});
|
|
56024
|
-
return rows.map((r) => fromDbRow2(r));
|
|
56025
|
-
}
|
|
56026
|
-
async findByWorkflow(creatorId, workflowName) {
|
|
56027
|
-
const knex = this.getKnex();
|
|
56028
|
-
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
56029
|
-
const pattern = `%${escaped}%`;
|
|
56030
|
-
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
56031
|
-
return rows.map((r) => fromDbRow2(r));
|
|
56032
|
-
}
|
|
56033
|
-
async getAll() {
|
|
56034
|
-
const knex = this.getKnex();
|
|
56035
|
-
const rows = await knex("schedules");
|
|
56036
|
-
return rows.map((r) => fromDbRow2(r));
|
|
56037
|
-
}
|
|
56038
|
-
async getStats() {
|
|
56039
|
-
const knex = this.getKnex();
|
|
56040
|
-
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
56041
|
-
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
56042
|
-
const result = await knex("schedules").select(
|
|
56043
|
-
knex.raw("COUNT(*) as total"),
|
|
56044
|
-
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
56045
|
-
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
56046
|
-
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
56047
|
-
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
56048
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
56049
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
56050
|
-
).first();
|
|
56051
|
-
return {
|
|
56052
|
-
total: Number(result.total) || 0,
|
|
56053
|
-
active: Number(result.active) || 0,
|
|
56054
|
-
paused: Number(result.paused) || 0,
|
|
56055
|
-
completed: Number(result.completed) || 0,
|
|
56056
|
-
failed: Number(result.failed) || 0,
|
|
56057
|
-
recurring: Number(result.recurring) || 0,
|
|
56058
|
-
oneTime: Number(result.one_time) || 0
|
|
56059
|
-
};
|
|
56060
|
-
}
|
|
56061
|
-
async validateLimits(creatorId, isRecurring, limits) {
|
|
56062
|
-
const knex = this.getKnex();
|
|
56063
|
-
if (limits.maxGlobal) {
|
|
56064
|
-
const result = await knex("schedules").count("* as cnt").first();
|
|
56065
|
-
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
56066
|
-
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
56067
|
-
}
|
|
56068
|
-
}
|
|
56069
|
-
if (limits.maxPerUser) {
|
|
56070
|
-
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
56071
|
-
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
56072
|
-
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
56073
|
-
}
|
|
56074
|
-
}
|
|
56075
|
-
if (isRecurring && limits.maxRecurringPerUser) {
|
|
56076
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
56077
|
-
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
56078
|
-
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
56079
|
-
throw new Error(
|
|
56080
|
-
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
56081
|
-
);
|
|
56082
|
-
}
|
|
56083
|
-
}
|
|
56084
|
-
}
|
|
56085
|
-
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
56086
|
-
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
56087
|
-
const knex = this.getKnex();
|
|
56088
|
-
const now = Date.now();
|
|
56089
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
56090
|
-
const token = (0, import_uuid2.v4)();
|
|
56091
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
56092
|
-
node_id: nodeId,
|
|
56093
|
-
lock_token: token,
|
|
56094
|
-
acquired_at: now,
|
|
56095
|
-
expires_at: expiresAt
|
|
56096
|
-
});
|
|
56097
|
-
if (updated > 0) return token;
|
|
56098
|
-
try {
|
|
56099
|
-
await knex("scheduler_locks").insert({
|
|
56100
|
-
lock_id: lockId,
|
|
56101
|
-
node_id: nodeId,
|
|
56102
|
-
lock_token: token,
|
|
56103
|
-
acquired_at: now,
|
|
56104
|
-
expires_at: expiresAt
|
|
56105
|
-
});
|
|
56106
|
-
return token;
|
|
56107
|
-
} catch {
|
|
56108
|
-
return null;
|
|
56109
|
-
}
|
|
56110
|
-
}
|
|
56111
|
-
async releaseLock(lockId, lockToken) {
|
|
56112
|
-
const knex = this.getKnex();
|
|
56113
|
-
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
56114
|
-
}
|
|
56115
|
-
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
56116
|
-
const knex = this.getKnex();
|
|
56117
|
-
const now = Date.now();
|
|
56118
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
56119
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
56120
|
-
return updated > 0;
|
|
56121
|
-
}
|
|
56122
|
-
async flush() {
|
|
56123
|
-
}
|
|
56124
|
-
// --- Message Trigger CRUD ---
|
|
56125
|
-
async createTrigger(trigger) {
|
|
56126
|
-
const knex = this.getKnex();
|
|
56127
|
-
const newTrigger = {
|
|
56128
|
-
...trigger,
|
|
56129
|
-
id: (0, import_uuid2.v4)(),
|
|
56130
|
-
createdAt: Date.now()
|
|
56131
|
-
};
|
|
56132
|
-
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
56133
|
-
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
56134
|
-
return newTrigger;
|
|
56135
|
-
}
|
|
56136
|
-
async getTrigger(id) {
|
|
56137
|
-
const knex = this.getKnex();
|
|
56138
|
-
const row = await knex("message_triggers").where("id", id).first();
|
|
56139
|
-
return row ? fromTriggerRow2(row) : void 0;
|
|
56140
|
-
}
|
|
56141
|
-
async updateTrigger(id, patch) {
|
|
56142
|
-
const knex = this.getKnex();
|
|
56143
|
-
const existing = await knex("message_triggers").where("id", id).first();
|
|
56144
|
-
if (!existing) return void 0;
|
|
56145
|
-
const current = fromTriggerRow2(existing);
|
|
56146
|
-
const updated = {
|
|
56147
|
-
...current,
|
|
56148
|
-
...patch,
|
|
56149
|
-
id: current.id,
|
|
56150
|
-
createdAt: current.createdAt
|
|
56151
|
-
};
|
|
56152
|
-
const row = toTriggerInsertRow(updated);
|
|
56153
|
-
delete row.id;
|
|
56154
|
-
await knex("message_triggers").where("id", id).update(row);
|
|
56155
|
-
return updated;
|
|
56156
|
-
}
|
|
56157
|
-
async deleteTrigger(id) {
|
|
56158
|
-
const knex = this.getKnex();
|
|
56159
|
-
const deleted = await knex("message_triggers").where("id", id).del();
|
|
56160
|
-
if (deleted > 0) {
|
|
56161
|
-
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
56162
|
-
return true;
|
|
56163
|
-
}
|
|
56164
|
-
return false;
|
|
56165
|
-
}
|
|
56166
|
-
async getTriggersByCreator(creatorId) {
|
|
56167
|
-
const knex = this.getKnex();
|
|
56168
|
-
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
56169
|
-
return rows.map((r) => fromTriggerRow2(r));
|
|
56170
|
-
}
|
|
56171
|
-
async getActiveTriggers() {
|
|
56172
|
-
const knex = this.getKnex();
|
|
56173
|
-
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
56174
|
-
return rows.map((r) => fromTriggerRow2(r));
|
|
56175
|
-
}
|
|
56176
|
-
};
|
|
56177
|
-
}
|
|
56178
|
-
});
|
|
56179
|
-
|
|
56180
|
-
// src/enterprise/loader.ts
|
|
56181
|
-
var loader_exports = {};
|
|
56182
|
-
__export(loader_exports, {
|
|
56183
|
-
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
56184
|
-
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
56185
|
-
});
|
|
56186
|
-
async function loadEnterprisePolicyEngine(config) {
|
|
56187
|
-
try {
|
|
56188
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
56189
|
-
const validator = new LicenseValidator2();
|
|
56190
|
-
const license = await validator.loadAndValidate();
|
|
56191
|
-
if (!license || !validator.hasFeature("policy")) {
|
|
56192
|
-
return new DefaultPolicyEngine();
|
|
56193
|
-
}
|
|
56194
|
-
if (validator.isInGracePeriod()) {
|
|
56195
|
-
console.warn(
|
|
56196
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
56197
|
-
);
|
|
56198
|
-
}
|
|
56199
|
-
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
56200
|
-
const engine = new OpaPolicyEngine2(config);
|
|
56201
|
-
await engine.initialize(config);
|
|
56202
|
-
return engine;
|
|
56203
|
-
} catch (err) {
|
|
56204
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
56205
|
-
try {
|
|
56206
|
-
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
56207
|
-
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
56208
|
-
} catch {
|
|
56209
|
-
}
|
|
56210
|
-
return new DefaultPolicyEngine();
|
|
56211
|
-
}
|
|
56212
|
-
}
|
|
56213
|
-
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
56214
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
56215
|
-
const validator = new LicenseValidator2();
|
|
56216
|
-
const license = await validator.loadAndValidate();
|
|
56217
|
-
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
56218
|
-
throw new Error(
|
|
56219
|
-
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
56220
|
-
);
|
|
56221
|
-
}
|
|
56222
|
-
if (validator.isInGracePeriod()) {
|
|
56223
|
-
console.warn(
|
|
56224
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
56225
|
-
);
|
|
56226
|
-
}
|
|
56227
|
-
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
56228
|
-
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
56229
|
-
}
|
|
56230
|
-
var init_loader = __esm({
|
|
56231
|
-
"src/enterprise/loader.ts"() {
|
|
56232
|
-
"use strict";
|
|
56233
|
-
init_default_engine();
|
|
56234
|
-
}
|
|
56235
|
-
});
|
|
56236
|
-
|
|
56237
54863
|
// src/event-bus/event-bus.ts
|
|
56238
54864
|
var event_bus_exports = {};
|
|
56239
54865
|
__export(event_bus_exports, {
|
|
@@ -57140,8 +55766,8 @@ ${content}
|
|
|
57140
55766
|
* Sleep utility
|
|
57141
55767
|
*/
|
|
57142
55768
|
sleep(ms) {
|
|
57143
|
-
return new Promise((
|
|
57144
|
-
const t = setTimeout(
|
|
55769
|
+
return new Promise((resolve15) => {
|
|
55770
|
+
const t = setTimeout(resolve15, ms);
|
|
57145
55771
|
if (typeof t.unref === "function") {
|
|
57146
55772
|
try {
|
|
57147
55773
|
t.unref();
|
|
@@ -57426,8 +56052,8 @@ ${end}`);
|
|
|
57426
56052
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
57427
56053
|
const existingLock = this.updateLocks.get(group);
|
|
57428
56054
|
let resolveLock;
|
|
57429
|
-
const ourLock = new Promise((
|
|
57430
|
-
resolveLock =
|
|
56055
|
+
const ourLock = new Promise((resolve15) => {
|
|
56056
|
+
resolveLock = resolve15;
|
|
57431
56057
|
});
|
|
57432
56058
|
this.updateLocks.set(group, ourLock);
|
|
57433
56059
|
try {
|
|
@@ -57740,7 +56366,7 @@ ${blocks}
|
|
|
57740
56366
|
* Sleep utility for enforcing delays
|
|
57741
56367
|
*/
|
|
57742
56368
|
sleep(ms) {
|
|
57743
|
-
return new Promise((
|
|
56369
|
+
return new Promise((resolve15) => setTimeout(resolve15, ms));
|
|
57744
56370
|
}
|
|
57745
56371
|
};
|
|
57746
56372
|
}
|
|
@@ -59032,15 +57658,15 @@ function serializeRunState(state) {
|
|
|
59032
57658
|
])
|
|
59033
57659
|
};
|
|
59034
57660
|
}
|
|
59035
|
-
var
|
|
57661
|
+
var path26, fs22, StateMachineExecutionEngine;
|
|
59036
57662
|
var init_state_machine_execution_engine = __esm({
|
|
59037
57663
|
"src/state-machine-execution-engine.ts"() {
|
|
59038
57664
|
"use strict";
|
|
59039
57665
|
init_runner();
|
|
59040
57666
|
init_logger();
|
|
59041
57667
|
init_sandbox_manager();
|
|
59042
|
-
|
|
59043
|
-
|
|
57668
|
+
path26 = __toESM(require("path"));
|
|
57669
|
+
fs22 = __toESM(require("fs"));
|
|
59044
57670
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
59045
57671
|
workingDirectory;
|
|
59046
57672
|
executionContext;
|
|
@@ -59272,8 +57898,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
59272
57898
|
logger.debug(
|
|
59273
57899
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
59274
57900
|
);
|
|
59275
|
-
const { loadEnterprisePolicyEngine
|
|
59276
|
-
context2.policyEngine = await
|
|
57901
|
+
const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
|
|
57902
|
+
context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
|
|
59277
57903
|
logger.debug(
|
|
59278
57904
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
59279
57905
|
);
|
|
@@ -59425,9 +58051,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
59425
58051
|
}
|
|
59426
58052
|
const checkId = String(ev?.checkId || "unknown");
|
|
59427
58053
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
59428
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
59429
|
-
|
|
59430
|
-
const filePath =
|
|
58054
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
|
|
58055
|
+
fs22.mkdirSync(baseDir, { recursive: true });
|
|
58056
|
+
const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
59431
58057
|
await this.saveSnapshotToFile(filePath);
|
|
59432
58058
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
59433
58059
|
try {
|
|
@@ -59568,7 +58194,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
59568
58194
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
59569
58195
|
*/
|
|
59570
58196
|
async saveSnapshotToFile(filePath) {
|
|
59571
|
-
const
|
|
58197
|
+
const fs23 = await import("fs/promises");
|
|
59572
58198
|
const ctx = this._lastContext;
|
|
59573
58199
|
const runner = this._lastRunner;
|
|
59574
58200
|
if (!ctx || !runner) {
|
|
@@ -59588,14 +58214,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
59588
58214
|
journal: entries,
|
|
59589
58215
|
requestedChecks: ctx.requestedChecks || []
|
|
59590
58216
|
};
|
|
59591
|
-
await
|
|
58217
|
+
await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
59592
58218
|
}
|
|
59593
58219
|
/**
|
|
59594
58220
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
59595
58221
|
*/
|
|
59596
58222
|
async loadSnapshotFromFile(filePath) {
|
|
59597
|
-
const
|
|
59598
|
-
const raw = await
|
|
58223
|
+
const fs23 = await import("fs/promises");
|
|
58224
|
+
const raw = await fs23.readFile(filePath, "utf8");
|
|
59599
58225
|
return JSON.parse(raw);
|
|
59600
58226
|
}
|
|
59601
58227
|
/**
|