@probelabs/visor 0.1.147-ee → 0.1.148
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontends/github-frontend.d.ts +2 -1
- package/dist/frontends/github-frontend.d.ts.map +1 -1
- package/dist/index.js +747 -1831
- package/dist/output/traces/run-2026-03-02T18-32-11-359Z.ndjson +138 -0
- package/dist/output/traces/run-2026-03-02T18-32-55-702Z.ndjson +1442 -0
- package/dist/providers/ai-check-provider.d.ts.map +1 -1
- package/dist/scheduler/schedule-tool.d.ts.map +1 -1
- package/dist/scheduler/scheduler.d.ts +5 -0
- package/dist/scheduler/scheduler.d.ts.map +1 -1
- package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs → check-provider-registry-35BPTY4W.mjs} +5 -6
- package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs → check-provider-registry-DVQDGTOE.mjs} +5 -6
- package/dist/sdk/check-provider-registry-KHPY6LB4.mjs +29 -0
- package/dist/sdk/{chunk-4F5UVWAN.mjs → chunk-62TNF5PJ.mjs} +2 -2
- package/dist/sdk/{chunk-4F5UVWAN.mjs.map → chunk-62TNF5PJ.mjs.map} +1 -1
- package/dist/sdk/{chunk-PNZH3JSI.mjs → chunk-6N6JRWCW.mjs} +2743 -277
- package/dist/sdk/chunk-6N6JRWCW.mjs.map +1 -0
- package/dist/sdk/{chunk-FBJ7MC7R.mjs → chunk-AYQE4JCU.mjs} +3 -3
- package/dist/sdk/chunk-CISJ6DJW.mjs +1502 -0
- package/dist/sdk/chunk-CISJ6DJW.mjs.map +1 -0
- package/dist/sdk/chunk-EGUHXVWS.mjs +443 -0
- package/dist/sdk/chunk-EGUHXVWS.mjs.map +1 -0
- package/dist/sdk/{chunk-EWGX7LI7.mjs → chunk-H4AYMOAT.mjs} +2742 -276
- package/dist/sdk/chunk-H4AYMOAT.mjs.map +1 -0
- package/dist/sdk/chunk-IF2UD2KS.mjs +43159 -0
- package/dist/sdk/chunk-IF2UD2KS.mjs.map +1 -0
- package/dist/sdk/{chunk-V2QW6ECX.mjs → chunk-RJLJUTSU.mjs} +2 -2
- package/dist/sdk/chunk-S2YO4ZE3.mjs +739 -0
- package/dist/sdk/chunk-S2YO4ZE3.mjs.map +1 -0
- package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs → failure-condition-evaluator-I6QWFKV3.mjs} +3 -3
- package/dist/sdk/failure-condition-evaluator-IVCTD4BZ.mjs +17 -0
- package/dist/sdk/{github-frontend-47EU2HBY.mjs → github-frontend-2MC77L7F.mjs} +16 -4
- package/dist/sdk/github-frontend-2MC77L7F.mjs.map +1 -0
- package/dist/sdk/github-frontend-DFT5G32K.mjs +1368 -0
- package/dist/sdk/github-frontend-DFT5G32K.mjs.map +1 -0
- package/dist/sdk/{host-GVR4UGZ3.mjs → host-4F6I3ZXN.mjs} +2 -2
- package/dist/sdk/{host-KGN5OIAM.mjs → host-H7IX4GBK.mjs} +2 -2
- package/dist/sdk/{routing-CZ36LVVS.mjs → routing-LU5PAREW.mjs} +4 -4
- package/dist/sdk/routing-UT3BXBXH.mjs +25 -0
- package/dist/sdk/schedule-tool-CONR4VW3.mjs +35 -0
- package/dist/sdk/schedule-tool-K3GQXCBN.mjs +35 -0
- package/dist/sdk/schedule-tool-SBXAEBDD.mjs +35 -0
- package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs → schedule-tool-handler-GFQCJAVZ.mjs} +5 -6
- package/dist/sdk/schedule-tool-handler-GFQCJAVZ.mjs.map +1 -0
- package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs → schedule-tool-handler-R7PG3VMR.mjs} +5 -6
- package/dist/sdk/schedule-tool-handler-R7PG3VMR.mjs.map +1 -0
- package/dist/sdk/schedule-tool-handler-YUC6CAXX.mjs +39 -0
- package/dist/sdk/schedule-tool-handler-YUC6CAXX.mjs.map +1 -0
- package/dist/sdk/sdk.js +1573 -1629
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -5
- package/dist/sdk/sdk.mjs.map +1 -1
- package/dist/sdk/{trace-helpers-EHDZ42HH.mjs → trace-helpers-6ROJR7N3.mjs} +2 -2
- package/dist/sdk/trace-helpers-6ROJR7N3.mjs.map +1 -0
- package/dist/sdk/trace-helpers-J463EU4B.mjs +25 -0
- package/dist/sdk/trace-helpers-J463EU4B.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-5453TW65.mjs → workflow-check-provider-DYSO3PML.mjs} +5 -6
- package/dist/sdk/workflow-check-provider-DYSO3PML.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs → workflow-check-provider-FIFFQDQU.mjs} +5 -6
- package/dist/sdk/workflow-check-provider-FIFFQDQU.mjs.map +1 -0
- package/dist/sdk/workflow-check-provider-GJNGTS3F.mjs +29 -0
- package/dist/sdk/workflow-check-provider-GJNGTS3F.mjs.map +1 -0
- package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
- package/dist/traces/run-2026-03-02T18-32-11-359Z.ndjson +138 -0
- package/dist/traces/run-2026-03-02T18-32-55-702Z.ndjson +1442 -0
- package/dist/utils/tool-resolver.d.ts.map +1 -1
- package/dist/utils/workspace-manager.d.ts +31 -8
- package/dist/utils/workspace-manager.d.ts.map +1 -1
- package/dist/utils/worktree-manager.d.ts +6 -0
- package/dist/utils/worktree-manager.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/sdk/chunk-EWGX7LI7.mjs.map +0 -1
- package/dist/sdk/chunk-PNZH3JSI.mjs.map +0 -1
- package/dist/sdk/chunk-XKCER23W.mjs +0 -1490
- package/dist/sdk/chunk-XKCER23W.mjs.map +0 -1
- package/dist/sdk/github-frontend-47EU2HBY.mjs.map +0 -1
- package/dist/sdk/knex-store-HPXJILBL.mjs +0 -411
- package/dist/sdk/knex-store-HPXJILBL.mjs.map +0 -1
- package/dist/sdk/loader-YSRMVXC3.mjs +0 -89
- package/dist/sdk/loader-YSRMVXC3.mjs.map +0 -1
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +0 -655
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +0 -1
- package/dist/sdk/schedule-tool-2COUUTF7.mjs +0 -18
- package/dist/sdk/validator-XTZJZZJH.mjs +0 -134
- package/dist/sdk/validator-XTZJZZJH.mjs.map +0 -1
- /package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs.map → check-provider-registry-35BPTY4W.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs.map → check-provider-registry-DVQDGTOE.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs.map → check-provider-registry-KHPY6LB4.mjs.map} +0 -0
- /package/dist/sdk/{chunk-FBJ7MC7R.mjs.map → chunk-AYQE4JCU.mjs.map} +0 -0
- /package/dist/sdk/{chunk-V2QW6ECX.mjs.map → chunk-RJLJUTSU.mjs.map} +0 -0
- /package/dist/sdk/{routing-CZ36LVVS.mjs.map → failure-condition-evaluator-I6QWFKV3.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-2COUUTF7.mjs.map → failure-condition-evaluator-IVCTD4BZ.mjs.map} +0 -0
- /package/dist/sdk/{host-GVR4UGZ3.mjs.map → host-4F6I3ZXN.mjs.map} +0 -0
- /package/dist/sdk/{host-KGN5OIAM.mjs.map → host-H7IX4GBK.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs.map → routing-LU5PAREW.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs.map → routing-UT3BXBXH.mjs.map} +0 -0
- /package/dist/sdk/{trace-helpers-EHDZ42HH.mjs.map → schedule-tool-CONR4VW3.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-5453TW65.mjs.map → schedule-tool-K3GQXCBN.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs.map → schedule-tool-SBXAEBDD.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -646,7 +646,7 @@ var require_package = __commonJS({
|
|
|
646
646
|
"package.json"(exports2, module2) {
|
|
647
647
|
module2.exports = {
|
|
648
648
|
name: "@probelabs/visor",
|
|
649
|
-
version: "0.1.
|
|
649
|
+
version: "0.1.148",
|
|
650
650
|
main: "dist/index.js",
|
|
651
651
|
bin: {
|
|
652
652
|
visor: "./dist/index.js"
|
|
@@ -760,7 +760,7 @@ var require_package = __commonJS({
|
|
|
760
760
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
761
761
|
"@opentelemetry/sdk-trace-base": "^1.30.1",
|
|
762
762
|
"@opentelemetry/semantic-conventions": "^1.30.1",
|
|
763
|
-
"@probelabs/probe": "^0.6.0-
|
|
763
|
+
"@probelabs/probe": "^0.6.0-rc264",
|
|
764
764
|
"@types/commander": "^2.12.0",
|
|
765
765
|
"@types/uuid": "^10.0.0",
|
|
766
766
|
acorn: "^8.16.0",
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve14, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve14(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path27 = require("path");
|
|
949
|
+
const fs23 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path27.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs23.existsSync(dir)) fs23.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path27.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs23.existsSync(outDir)) fs23.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path27.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs23 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs23.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3193,7 +3193,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3193
3193
|
*/
|
|
3194
3194
|
evaluateExpression(condition, context2) {
|
|
3195
3195
|
try {
|
|
3196
|
-
const
|
|
3196
|
+
const normalize4 = (expr) => {
|
|
3197
3197
|
const trimmed = expr.trim();
|
|
3198
3198
|
if (!/[\n;]/.test(trimmed)) return trimmed;
|
|
3199
3199
|
const parts = trimmed.split(/[\n;]+/).map((s) => s.trim()).filter((s) => s.length > 0 && !s.startsWith("//"));
|
|
@@ -3351,7 +3351,7 @@ var init_failure_condition_evaluator = __esm({
|
|
|
3351
3351
|
try {
|
|
3352
3352
|
exec2 = this.sandbox.compile(`return (${raw});`);
|
|
3353
3353
|
} catch {
|
|
3354
|
-
const normalizedExpr =
|
|
3354
|
+
const normalizedExpr = normalize4(condition);
|
|
3355
3355
|
exec2 = this.sandbox.compile(`return (${normalizedExpr});`);
|
|
3356
3356
|
}
|
|
3357
3357
|
const result = exec2(scope).run();
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path27 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path27) return obj;
|
|
3739
|
+
const parts = path27.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path27) => {
|
|
3859
|
+
if (!obj || !path27) return void 0;
|
|
3860
|
+
const parts = path27.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs23 = await import("fs/promises");
|
|
6413
|
+
const path27 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path27.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs23.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path27.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path27.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs23.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs23 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs23.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6972
6972
|
if (chromiumPath) {
|
|
6973
6973
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6974
6974
|
}
|
|
6975
|
-
const result = await new Promise((
|
|
6975
|
+
const result = await new Promise((resolve14) => {
|
|
6976
6976
|
const proc = (0, import_child_process.spawn)(
|
|
6977
6977
|
"npx",
|
|
6978
6978
|
[
|
|
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7002
7002
|
});
|
|
7003
7003
|
proc.on("close", (code) => {
|
|
7004
7004
|
if (code === 0) {
|
|
7005
|
-
|
|
7005
|
+
resolve14({ success: true });
|
|
7006
7006
|
} else {
|
|
7007
|
-
|
|
7007
|
+
resolve14({ success: false, error: stderr || `Exit code ${code}` });
|
|
7008
7008
|
}
|
|
7009
7009
|
});
|
|
7010
7010
|
proc.on("error", (err) => {
|
|
7011
|
-
|
|
7011
|
+
resolve14({ success: false, error: err.message });
|
|
7012
7012
|
});
|
|
7013
7013
|
});
|
|
7014
7014
|
if (!result.success) {
|
|
@@ -8153,8 +8153,8 @@ ${schemaString}`);
|
|
|
8153
8153
|
}
|
|
8154
8154
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8155
8155
|
try {
|
|
8156
|
-
const
|
|
8157
|
-
const
|
|
8156
|
+
const fs23 = require("fs");
|
|
8157
|
+
const path27 = require("path");
|
|
8158
8158
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8159
8159
|
const provider = this.config.provider || "auto";
|
|
8160
8160
|
const model = this.config.model || "default";
|
|
@@ -8268,20 +8268,20 @@ ${"=".repeat(60)}
|
|
|
8268
8268
|
`;
|
|
8269
8269
|
readableVersion += `${"=".repeat(60)}
|
|
8270
8270
|
`;
|
|
8271
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8272
|
-
if (!
|
|
8273
|
-
|
|
8271
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8272
|
+
if (!fs23.existsSync(debugArtifactsDir)) {
|
|
8273
|
+
fs23.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8274
8274
|
}
|
|
8275
|
-
const debugFile =
|
|
8275
|
+
const debugFile = path27.join(
|
|
8276
8276
|
debugArtifactsDir,
|
|
8277
8277
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8278
8278
|
);
|
|
8279
|
-
|
|
8280
|
-
const readableFile =
|
|
8279
|
+
fs23.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8280
|
+
const readableFile = path27.join(
|
|
8281
8281
|
debugArtifactsDir,
|
|
8282
8282
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8283
8283
|
);
|
|
8284
|
-
|
|
8284
|
+
fs23.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8285
8285
|
log(`
|
|
8286
8286
|
\u{1F4BE} Full debug info saved to:`);
|
|
8287
8287
|
log(` JSON: ${debugFile}`);
|
|
@@ -8314,8 +8314,8 @@ ${"=".repeat(60)}
|
|
|
8314
8314
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8315
8315
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8316
8316
|
try {
|
|
8317
|
-
const
|
|
8318
|
-
const
|
|
8317
|
+
const fs23 = require("fs");
|
|
8318
|
+
const path27 = require("path");
|
|
8319
8319
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8320
8320
|
const agentAny2 = agent;
|
|
8321
8321
|
let fullHistory = [];
|
|
@@ -8326,8 +8326,8 @@ ${"=".repeat(60)}
|
|
|
8326
8326
|
} else if (agentAny2._messages) {
|
|
8327
8327
|
fullHistory = agentAny2._messages;
|
|
8328
8328
|
}
|
|
8329
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8330
|
-
const sessionBase =
|
|
8329
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8330
|
+
const sessionBase = path27.join(
|
|
8331
8331
|
debugArtifactsDir,
|
|
8332
8332
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8333
8333
|
);
|
|
@@ -8339,7 +8339,7 @@ ${"=".repeat(60)}
|
|
|
8339
8339
|
schema: effectiveSchema,
|
|
8340
8340
|
totalMessages: fullHistory.length
|
|
8341
8341
|
};
|
|
8342
|
-
|
|
8342
|
+
fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8343
8343
|
let readable = `=============================================================
|
|
8344
8344
|
`;
|
|
8345
8345
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8366,7 +8366,7 @@ ${"=".repeat(60)}
|
|
|
8366
8366
|
`;
|
|
8367
8367
|
readable += content + "\n";
|
|
8368
8368
|
});
|
|
8369
|
-
|
|
8369
|
+
fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8370
8370
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8371
8371
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8372
8372
|
} catch (error) {
|
|
@@ -8375,11 +8375,11 @@ ${"=".repeat(60)}
|
|
|
8375
8375
|
}
|
|
8376
8376
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8377
8377
|
try {
|
|
8378
|
-
const
|
|
8379
|
-
const
|
|
8378
|
+
const fs23 = require("fs");
|
|
8379
|
+
const path27 = require("path");
|
|
8380
8380
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8381
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8382
|
-
const responseFile =
|
|
8381
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8382
|
+
const responseFile = path27.join(
|
|
8383
8383
|
debugArtifactsDir,
|
|
8384
8384
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8385
8385
|
);
|
|
@@ -8412,7 +8412,7 @@ ${"=".repeat(60)}
|
|
|
8412
8412
|
`;
|
|
8413
8413
|
responseContent += `${"=".repeat(60)}
|
|
8414
8414
|
`;
|
|
8415
|
-
|
|
8415
|
+
fs23.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8416
8416
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8417
8417
|
} catch (error) {
|
|
8418
8418
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8428,9 +8428,9 @@ ${"=".repeat(60)}
|
|
|
8428
8428
|
await agentAny._telemetryConfig.shutdown();
|
|
8429
8429
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8430
8430
|
if (process.env.GITHUB_ACTIONS) {
|
|
8431
|
-
const
|
|
8432
|
-
if (
|
|
8433
|
-
const stats =
|
|
8431
|
+
const fs23 = require("fs");
|
|
8432
|
+
if (fs23.existsSync(agentAny._traceFilePath)) {
|
|
8433
|
+
const stats = fs23.statSync(agentAny._traceFilePath);
|
|
8434
8434
|
console.log(
|
|
8435
8435
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8436
8436
|
);
|
|
@@ -8637,9 +8637,9 @@ ${schemaString}`);
|
|
|
8637
8637
|
const model = this.config.model || "default";
|
|
8638
8638
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8639
8639
|
try {
|
|
8640
|
-
const
|
|
8641
|
-
const
|
|
8642
|
-
const
|
|
8640
|
+
const fs23 = require("fs");
|
|
8641
|
+
const path27 = require("path");
|
|
8642
|
+
const os2 = require("os");
|
|
8643
8643
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8644
8644
|
const debugData = {
|
|
8645
8645
|
timestamp,
|
|
@@ -8711,19 +8711,19 @@ ${"=".repeat(60)}
|
|
|
8711
8711
|
`;
|
|
8712
8712
|
readableVersion += `${"=".repeat(60)}
|
|
8713
8713
|
`;
|
|
8714
|
-
const tempDir =
|
|
8715
|
-
const promptFile =
|
|
8716
|
-
|
|
8714
|
+
const tempDir = os2.tmpdir();
|
|
8715
|
+
const promptFile = path27.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8716
|
+
fs23.writeFileSync(promptFile, prompt, "utf-8");
|
|
8717
8717
|
log(`
|
|
8718
8718
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8719
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8719
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8720
8720
|
try {
|
|
8721
|
-
const base =
|
|
8721
|
+
const base = path27.join(
|
|
8722
8722
|
debugArtifactsDir,
|
|
8723
8723
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8724
8724
|
);
|
|
8725
|
-
|
|
8726
|
-
|
|
8725
|
+
fs23.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8726
|
+
fs23.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8727
8727
|
log(`
|
|
8728
8728
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8729
8729
|
} catch {
|
|
@@ -8768,8 +8768,8 @@ $ ${cliCommand}
|
|
|
8768
8768
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8769
8769
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8770
8770
|
try {
|
|
8771
|
-
const
|
|
8772
|
-
const
|
|
8771
|
+
const fs23 = require("fs");
|
|
8772
|
+
const path27 = require("path");
|
|
8773
8773
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8774
8774
|
const agentAny = agent;
|
|
8775
8775
|
let fullHistory = [];
|
|
@@ -8780,8 +8780,8 @@ $ ${cliCommand}
|
|
|
8780
8780
|
} else if (agentAny._messages) {
|
|
8781
8781
|
fullHistory = agentAny._messages;
|
|
8782
8782
|
}
|
|
8783
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8784
|
-
const sessionBase =
|
|
8783
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8784
|
+
const sessionBase = path27.join(
|
|
8785
8785
|
debugArtifactsDir,
|
|
8786
8786
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8787
8787
|
);
|
|
@@ -8793,7 +8793,7 @@ $ ${cliCommand}
|
|
|
8793
8793
|
schema: effectiveSchema,
|
|
8794
8794
|
totalMessages: fullHistory.length
|
|
8795
8795
|
};
|
|
8796
|
-
|
|
8796
|
+
fs23.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8797
8797
|
let readable = `=============================================================
|
|
8798
8798
|
`;
|
|
8799
8799
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8820,7 +8820,7 @@ ${"=".repeat(60)}
|
|
|
8820
8820
|
`;
|
|
8821
8821
|
readable += content + "\n";
|
|
8822
8822
|
});
|
|
8823
|
-
|
|
8823
|
+
fs23.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8824
8824
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8825
8825
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8826
8826
|
} catch (error) {
|
|
@@ -8829,11 +8829,11 @@ ${"=".repeat(60)}
|
|
|
8829
8829
|
}
|
|
8830
8830
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8831
8831
|
try {
|
|
8832
|
-
const
|
|
8833
|
-
const
|
|
8832
|
+
const fs23 = require("fs");
|
|
8833
|
+
const path27 = require("path");
|
|
8834
8834
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8835
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8836
|
-
const responseFile =
|
|
8835
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path27.join(process.cwd(), "debug-artifacts");
|
|
8836
|
+
const responseFile = path27.join(
|
|
8837
8837
|
debugArtifactsDir,
|
|
8838
8838
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8839
8839
|
);
|
|
@@ -8866,7 +8866,7 @@ ${"=".repeat(60)}
|
|
|
8866
8866
|
`;
|
|
8867
8867
|
responseContent += `${"=".repeat(60)}
|
|
8868
8868
|
`;
|
|
8869
|
-
|
|
8869
|
+
fs23.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8870
8870
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8871
8871
|
} catch (error) {
|
|
8872
8872
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8884,9 +8884,9 @@ ${"=".repeat(60)}
|
|
|
8884
8884
|
await telemetry.shutdown();
|
|
8885
8885
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8886
8886
|
if (process.env.GITHUB_ACTIONS) {
|
|
8887
|
-
const
|
|
8888
|
-
if (
|
|
8889
|
-
const stats =
|
|
8887
|
+
const fs23 = require("fs");
|
|
8888
|
+
if (fs23.existsSync(traceFilePath)) {
|
|
8889
|
+
const stats = fs23.statSync(traceFilePath);
|
|
8890
8890
|
console.log(
|
|
8891
8891
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8892
8892
|
);
|
|
@@ -8924,8 +8924,8 @@ ${"=".repeat(60)}
|
|
|
8924
8924
|
* Load schema content from schema files or inline definitions
|
|
8925
8925
|
*/
|
|
8926
8926
|
async loadSchemaContent(schema) {
|
|
8927
|
-
const
|
|
8928
|
-
const
|
|
8927
|
+
const fs23 = require("fs").promises;
|
|
8928
|
+
const path27 = require("path");
|
|
8929
8929
|
if (typeof schema === "object" && schema !== null) {
|
|
8930
8930
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8931
8931
|
return JSON.stringify(schema);
|
|
@@ -8938,14 +8938,14 @@ ${"=".repeat(60)}
|
|
|
8938
8938
|
}
|
|
8939
8939
|
} catch {
|
|
8940
8940
|
}
|
|
8941
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8941
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path27.isAbsolute(schema)) {
|
|
8942
8942
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8943
8943
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8944
8944
|
}
|
|
8945
8945
|
try {
|
|
8946
|
-
const schemaPath =
|
|
8946
|
+
const schemaPath = path27.resolve(process.cwd(), schema);
|
|
8947
8947
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8948
|
-
const schemaContent = await
|
|
8948
|
+
const schemaContent = await fs23.readFile(schemaPath, "utf-8");
|
|
8949
8949
|
return schemaContent.trim();
|
|
8950
8950
|
} catch (error) {
|
|
8951
8951
|
throw new Error(
|
|
@@ -8959,22 +8959,22 @@ ${"=".repeat(60)}
|
|
|
8959
8959
|
}
|
|
8960
8960
|
const candidatePaths = [
|
|
8961
8961
|
// GitHub Action bundle location
|
|
8962
|
-
|
|
8962
|
+
path27.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8963
8963
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8964
|
-
|
|
8964
|
+
path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8965
8965
|
// Local dev (repo root)
|
|
8966
|
-
|
|
8966
|
+
path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8967
8967
|
];
|
|
8968
8968
|
for (const schemaPath of candidatePaths) {
|
|
8969
8969
|
try {
|
|
8970
|
-
const schemaContent = await
|
|
8970
|
+
const schemaContent = await fs23.readFile(schemaPath, "utf-8");
|
|
8971
8971
|
return schemaContent.trim();
|
|
8972
8972
|
} catch {
|
|
8973
8973
|
}
|
|
8974
8974
|
}
|
|
8975
|
-
const distPath =
|
|
8976
|
-
const distAltPath =
|
|
8977
|
-
const cwdPath =
|
|
8975
|
+
const distPath = path27.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8976
|
+
const distAltPath = path27.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8977
|
+
const cwdPath = path27.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8978
8978
|
throw new Error(
|
|
8979
8979
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8980
8980
|
);
|
|
@@ -9219,7 +9219,7 @@ ${"=".repeat(60)}
|
|
|
9219
9219
|
* Generate mock response for testing
|
|
9220
9220
|
*/
|
|
9221
9221
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9222
|
-
await new Promise((
|
|
9222
|
+
await new Promise((resolve14) => setTimeout(resolve14, 500));
|
|
9223
9223
|
const name = (_checkName || "").toLowerCase();
|
|
9224
9224
|
if (name.includes("extract-facts")) {
|
|
9225
9225
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9580,7 +9580,7 @@ var init_command_executor = __esm({
|
|
|
9580
9580
|
* Execute command with stdin input
|
|
9581
9581
|
*/
|
|
9582
9582
|
executeWithStdin(command, options) {
|
|
9583
|
-
return new Promise((
|
|
9583
|
+
return new Promise((resolve14, reject) => {
|
|
9584
9584
|
const childProcess = (0, import_child_process2.exec)(
|
|
9585
9585
|
command,
|
|
9586
9586
|
{
|
|
@@ -9592,7 +9592,7 @@ var init_command_executor = __esm({
|
|
|
9592
9592
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9593
9593
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9594
9594
|
} else {
|
|
9595
|
-
|
|
9595
|
+
resolve14({
|
|
9596
9596
|
stdout: stdout || "",
|
|
9597
9597
|
stderr: stderr || "",
|
|
9598
9598
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -17422,17 +17422,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17422
17422
|
* so it can be executed by the state machine as a nested workflow.
|
|
17423
17423
|
*/
|
|
17424
17424
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17425
|
-
const
|
|
17426
|
-
const
|
|
17425
|
+
const path27 = require("path");
|
|
17426
|
+
const fs23 = require("fs");
|
|
17427
17427
|
const yaml5 = require("js-yaml");
|
|
17428
|
-
const resolved =
|
|
17429
|
-
if (!
|
|
17428
|
+
const resolved = path27.isAbsolute(sourcePath) ? sourcePath : path27.resolve(baseDir, sourcePath);
|
|
17429
|
+
if (!fs23.existsSync(resolved)) {
|
|
17430
17430
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17431
17431
|
}
|
|
17432
|
-
const rawContent =
|
|
17432
|
+
const rawContent = fs23.readFileSync(resolved, "utf8");
|
|
17433
17433
|
const rawData = yaml5.load(rawContent);
|
|
17434
17434
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17435
|
-
const configDir =
|
|
17435
|
+
const configDir = path27.dirname(resolved);
|
|
17436
17436
|
for (const source of rawData.imports) {
|
|
17437
17437
|
const results = await this.registry.import(source, {
|
|
17438
17438
|
basePath: configDir,
|
|
@@ -17462,8 +17462,8 @@ ${errors}`);
|
|
|
17462
17462
|
if (!steps || Object.keys(steps).length === 0) {
|
|
17463
17463
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
17464
17464
|
}
|
|
17465
|
-
const id =
|
|
17466
|
-
const name = loaded.name || `Workflow from ${
|
|
17465
|
+
const id = path27.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
17466
|
+
const name = loaded.name || `Workflow from ${path27.basename(resolved)}`;
|
|
17467
17467
|
const workflowDef = {
|
|
17468
17468
|
id,
|
|
17469
17469
|
name,
|
|
@@ -18093,8 +18093,8 @@ async function createStoreBackend(storageConfig, haConfig) {
|
|
|
18093
18093
|
case "mssql": {
|
|
18094
18094
|
try {
|
|
18095
18095
|
const loaderPath = "../../enterprise/loader";
|
|
18096
|
-
const { loadEnterpriseStoreBackend
|
|
18097
|
-
return await
|
|
18096
|
+
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
18097
|
+
return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
|
|
18098
18098
|
} catch (err) {
|
|
18099
18099
|
const msg = err instanceof Error ? err.message : String(err);
|
|
18100
18100
|
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
@@ -18457,6 +18457,986 @@ var init_schedule_parser = __esm({
|
|
|
18457
18457
|
}
|
|
18458
18458
|
});
|
|
18459
18459
|
|
|
18460
|
+
// src/github-auth.ts
|
|
18461
|
+
var github_auth_exports = {};
|
|
18462
|
+
__export(github_auth_exports, {
|
|
18463
|
+
_testGetCachedToken: () => _testGetCachedToken,
|
|
18464
|
+
_testSetCachedToken: () => _testSetCachedToken,
|
|
18465
|
+
createAuthenticatedOctokit: () => createAuthenticatedOctokit,
|
|
18466
|
+
injectGitHubCredentials: () => injectGitHubCredentials,
|
|
18467
|
+
markTokenFresh: () => markTokenFresh,
|
|
18468
|
+
refreshGitHubCredentials: () => refreshGitHubCredentials,
|
|
18469
|
+
resolveAuthFromEnvironment: () => resolveAuthFromEnvironment,
|
|
18470
|
+
resolvePrivateKey: () => resolvePrivateKey,
|
|
18471
|
+
startTokenRefreshTimer: () => startTokenRefreshTimer,
|
|
18472
|
+
stopTokenRefreshTimer: () => stopTokenRefreshTimer
|
|
18473
|
+
});
|
|
18474
|
+
async function createAuthenticatedOctokit(options) {
|
|
18475
|
+
const { token, appId, installationId, owner, repo } = options;
|
|
18476
|
+
const privateKey = options.privateKey ? resolvePrivateKey(options.privateKey) : void 0;
|
|
18477
|
+
if (appId && privateKey) {
|
|
18478
|
+
const { createAppAuth } = await import("@octokit/auth-app");
|
|
18479
|
+
let finalInstallationId;
|
|
18480
|
+
if (installationId) {
|
|
18481
|
+
finalInstallationId = parseInt(installationId, 10);
|
|
18482
|
+
if (isNaN(finalInstallationId) || finalInstallationId <= 0) {
|
|
18483
|
+
throw new Error("Invalid installation-id. It must be a positive integer.");
|
|
18484
|
+
}
|
|
18485
|
+
}
|
|
18486
|
+
if (!finalInstallationId && owner && repo) {
|
|
18487
|
+
const appOctokit = new import_rest.Octokit({
|
|
18488
|
+
authStrategy: createAppAuth,
|
|
18489
|
+
auth: { appId, privateKey }
|
|
18490
|
+
});
|
|
18491
|
+
try {
|
|
18492
|
+
const { data: installation } = await appOctokit.rest.apps.getRepoInstallation({
|
|
18493
|
+
owner,
|
|
18494
|
+
repo
|
|
18495
|
+
});
|
|
18496
|
+
finalInstallationId = installation.id;
|
|
18497
|
+
} catch {
|
|
18498
|
+
throw new Error(
|
|
18499
|
+
"GitHub App installation ID could not be auto-detected. Provide --github-installation-id or ensure the app is installed on the repository."
|
|
18500
|
+
);
|
|
18501
|
+
}
|
|
18502
|
+
}
|
|
18503
|
+
if (!finalInstallationId) {
|
|
18504
|
+
throw new Error(
|
|
18505
|
+
"GitHub App installation ID is required. Provide --github-installation-id or set owner/repo for auto-detection."
|
|
18506
|
+
);
|
|
18507
|
+
}
|
|
18508
|
+
const octokit = new import_rest.Octokit({
|
|
18509
|
+
authStrategy: createAppAuth,
|
|
18510
|
+
auth: {
|
|
18511
|
+
appId,
|
|
18512
|
+
privateKey,
|
|
18513
|
+
installationId: finalInstallationId
|
|
18514
|
+
}
|
|
18515
|
+
});
|
|
18516
|
+
const authResult = await octokit.auth({ type: "installation" });
|
|
18517
|
+
return {
|
|
18518
|
+
octokit,
|
|
18519
|
+
authType: "github-app",
|
|
18520
|
+
token: authResult.token
|
|
18521
|
+
};
|
|
18522
|
+
}
|
|
18523
|
+
if (token) {
|
|
18524
|
+
return {
|
|
18525
|
+
octokit: new import_rest.Octokit({ auth: token }),
|
|
18526
|
+
authType: "token",
|
|
18527
|
+
token
|
|
18528
|
+
};
|
|
18529
|
+
}
|
|
18530
|
+
return void 0;
|
|
18531
|
+
}
|
|
18532
|
+
function resolveAuthFromEnvironment() {
|
|
18533
|
+
return {
|
|
18534
|
+
token: process.env.GITHUB_TOKEN || process.env.GH_TOKEN,
|
|
18535
|
+
appId: process.env.GITHUB_APP_ID,
|
|
18536
|
+
privateKey: process.env.GITHUB_APP_PRIVATE_KEY,
|
|
18537
|
+
installationId: process.env.GITHUB_APP_INSTALLATION_ID,
|
|
18538
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER || process.env.GITHUB_REPOSITORY?.split("/")[0],
|
|
18539
|
+
repo: process.env.GITHUB_REPOSITORY?.split("/")[1]
|
|
18540
|
+
};
|
|
18541
|
+
}
|
|
18542
|
+
function resolvePrivateKey(keyOrPath) {
|
|
18543
|
+
if (keyOrPath.includes("-----BEGIN")) {
|
|
18544
|
+
return keyOrPath;
|
|
18545
|
+
}
|
|
18546
|
+
const resolved = path16.resolve(keyOrPath);
|
|
18547
|
+
if (fs13.existsSync(resolved)) {
|
|
18548
|
+
return fs13.readFileSync(resolved, "utf8");
|
|
18549
|
+
}
|
|
18550
|
+
return keyOrPath;
|
|
18551
|
+
}
|
|
18552
|
+
function injectGitHubCredentials(token) {
|
|
18553
|
+
process.env.GITHUB_TOKEN = token;
|
|
18554
|
+
process.env.GH_TOKEN = token;
|
|
18555
|
+
const currentCount = parseInt(process.env.GIT_CONFIG_COUNT || "0", 10);
|
|
18556
|
+
let base;
|
|
18557
|
+
if (_authBase === void 0) {
|
|
18558
|
+
base = currentCount;
|
|
18559
|
+
} else if (_lastWrittenCount !== void 0 && currentCount !== _lastWrittenCount) {
|
|
18560
|
+
base = currentCount;
|
|
18561
|
+
} else {
|
|
18562
|
+
base = _authBase;
|
|
18563
|
+
}
|
|
18564
|
+
_authBase = base;
|
|
18565
|
+
const authUrl = `https://x-access-token:${token}@github.com/`;
|
|
18566
|
+
process.env[`GIT_CONFIG_KEY_${base}`] = `url.${authUrl}.insteadOf`;
|
|
18567
|
+
process.env[`GIT_CONFIG_VALUE_${base}`] = "https://github.com/";
|
|
18568
|
+
process.env[`GIT_CONFIG_KEY_${base + 1}`] = `url.${authUrl}.insteadOf`;
|
|
18569
|
+
process.env[`GIT_CONFIG_VALUE_${base + 1}`] = "git@github.com:";
|
|
18570
|
+
const newCount = base + 2;
|
|
18571
|
+
process.env.GIT_CONFIG_COUNT = String(newCount);
|
|
18572
|
+
_lastWrittenCount = newCount;
|
|
18573
|
+
}
|
|
18574
|
+
function markTokenFresh() {
|
|
18575
|
+
const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN;
|
|
18576
|
+
if (token) {
|
|
18577
|
+
_cachedAppToken = { token, generatedAt: Date.now() };
|
|
18578
|
+
}
|
|
18579
|
+
}
|
|
18580
|
+
async function refreshGitHubCredentials() {
|
|
18581
|
+
const appId = process.env.GITHUB_APP_ID;
|
|
18582
|
+
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY;
|
|
18583
|
+
if (!appId || !privateKey) return;
|
|
18584
|
+
const now = Date.now();
|
|
18585
|
+
if (_cachedAppToken && now - _cachedAppToken.generatedAt < TOKEN_REFRESH_MS) {
|
|
18586
|
+
return;
|
|
18587
|
+
}
|
|
18588
|
+
try {
|
|
18589
|
+
const opts = resolveAuthFromEnvironment();
|
|
18590
|
+
const result = await createAuthenticatedOctokit(opts);
|
|
18591
|
+
if (result && result.authType === "github-app") {
|
|
18592
|
+
injectGitHubCredentials(result.token);
|
|
18593
|
+
_cachedAppToken = { token: result.token, generatedAt: now };
|
|
18594
|
+
logger.debug("[github-auth] Refreshed GitHub App installation token");
|
|
18595
|
+
}
|
|
18596
|
+
} catch (err) {
|
|
18597
|
+
const age = _cachedAppToken ? `${Math.round((now - _cachedAppToken.generatedAt) / 6e4)}min old` : "no cached token";
|
|
18598
|
+
logger.warn(
|
|
18599
|
+
`[github-auth] Failed to refresh GitHub App token (${age}): ${err instanceof Error ? err.message : String(err)}. Child processes may fail with authentication errors.`
|
|
18600
|
+
);
|
|
18601
|
+
}
|
|
18602
|
+
}
|
|
18603
|
+
function startTokenRefreshTimer() {
|
|
18604
|
+
if (_refreshTimer) return;
|
|
18605
|
+
const appId = process.env.GITHUB_APP_ID;
|
|
18606
|
+
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY;
|
|
18607
|
+
if (!appId || !privateKey) return;
|
|
18608
|
+
_refreshTimer = setInterval(() => {
|
|
18609
|
+
refreshGitHubCredentials().catch((err) => {
|
|
18610
|
+
logger.warn(
|
|
18611
|
+
`[github-auth] Background token refresh failed: ${err instanceof Error ? err.message : String(err)}`
|
|
18612
|
+
);
|
|
18613
|
+
});
|
|
18614
|
+
}, TIMER_INTERVAL_MS);
|
|
18615
|
+
_refreshTimer.unref();
|
|
18616
|
+
logger.debug("[github-auth] Background token refresh timer started (every 30 min)");
|
|
18617
|
+
}
|
|
18618
|
+
function stopTokenRefreshTimer() {
|
|
18619
|
+
if (_refreshTimer) {
|
|
18620
|
+
clearInterval(_refreshTimer);
|
|
18621
|
+
_refreshTimer = void 0;
|
|
18622
|
+
logger.debug("[github-auth] Background token refresh timer stopped");
|
|
18623
|
+
}
|
|
18624
|
+
}
|
|
18625
|
+
function _testSetCachedToken(token, generatedAt) {
|
|
18626
|
+
if (token) {
|
|
18627
|
+
_cachedAppToken = { token, generatedAt: generatedAt ?? Date.now() };
|
|
18628
|
+
} else {
|
|
18629
|
+
_cachedAppToken = void 0;
|
|
18630
|
+
}
|
|
18631
|
+
}
|
|
18632
|
+
function _testGetCachedToken() {
|
|
18633
|
+
return _cachedAppToken;
|
|
18634
|
+
}
|
|
18635
|
+
var import_rest, fs13, path16, _authBase, _lastWrittenCount, _cachedAppToken, TOKEN_REFRESH_MS, _refreshTimer, TIMER_INTERVAL_MS;
|
|
18636
|
+
var init_github_auth = __esm({
|
|
18637
|
+
"src/github-auth.ts"() {
|
|
18638
|
+
"use strict";
|
|
18639
|
+
import_rest = require("@octokit/rest");
|
|
18640
|
+
fs13 = __toESM(require("fs"));
|
|
18641
|
+
path16 = __toESM(require("path"));
|
|
18642
|
+
init_logger();
|
|
18643
|
+
TOKEN_REFRESH_MS = 45 * 60 * 1e3;
|
|
18644
|
+
TIMER_INTERVAL_MS = 30 * 60 * 1e3;
|
|
18645
|
+
}
|
|
18646
|
+
});
|
|
18647
|
+
|
|
18648
|
+
// src/scheduler/scheduler.ts
|
|
18649
|
+
function getScheduler(visorConfig, config) {
|
|
18650
|
+
if (!schedulerInstance && visorConfig) {
|
|
18651
|
+
schedulerInstance = new Scheduler(visorConfig, config);
|
|
18652
|
+
}
|
|
18653
|
+
return schedulerInstance;
|
|
18654
|
+
}
|
|
18655
|
+
var import_node_cron, Scheduler, schedulerInstance;
|
|
18656
|
+
var init_scheduler = __esm({
|
|
18657
|
+
"src/scheduler/scheduler.ts"() {
|
|
18658
|
+
"use strict";
|
|
18659
|
+
import_node_cron = __toESM(require("node-cron"));
|
|
18660
|
+
init_schedule_store();
|
|
18661
|
+
init_schedule_parser();
|
|
18662
|
+
init_logger();
|
|
18663
|
+
init_state_machine_execution_engine();
|
|
18664
|
+
Scheduler = class {
|
|
18665
|
+
store;
|
|
18666
|
+
visorConfig;
|
|
18667
|
+
checkIntervalMs;
|
|
18668
|
+
defaultTimezone;
|
|
18669
|
+
checkInterval = null;
|
|
18670
|
+
cronJobs = /* @__PURE__ */ new Map();
|
|
18671
|
+
oneTimeTimeouts = /* @__PURE__ */ new Map();
|
|
18672
|
+
running = false;
|
|
18673
|
+
engine;
|
|
18674
|
+
outputAdapters = /* @__PURE__ */ new Map();
|
|
18675
|
+
executionContext = {};
|
|
18676
|
+
contextEnricher;
|
|
18677
|
+
// HA fields
|
|
18678
|
+
haConfig;
|
|
18679
|
+
nodeId;
|
|
18680
|
+
heartbeatInterval = null;
|
|
18681
|
+
heldLocks = /* @__PURE__ */ new Map();
|
|
18682
|
+
// scheduleId → lockToken
|
|
18683
|
+
constructor(visorConfig, config) {
|
|
18684
|
+
this.visorConfig = visorConfig;
|
|
18685
|
+
this.checkIntervalMs = config?.checkIntervalMs ?? 6e4;
|
|
18686
|
+
this.defaultTimezone = config?.defaultTimezone ?? "UTC";
|
|
18687
|
+
this.haConfig = config?.ha;
|
|
18688
|
+
this.nodeId = config?.ha?.node_id || `${require("os").hostname()}-${process.pid}`;
|
|
18689
|
+
const storeConfig = {
|
|
18690
|
+
path: config?.storagePath,
|
|
18691
|
+
storage: config?.storage,
|
|
18692
|
+
ha: config?.ha
|
|
18693
|
+
};
|
|
18694
|
+
this.store = ScheduleStore.getInstance(storeConfig, config?.limits);
|
|
18695
|
+
}
|
|
18696
|
+
/**
|
|
18697
|
+
* Set the execution engine (called after construction to avoid circular deps)
|
|
18698
|
+
*/
|
|
18699
|
+
setEngine(engine) {
|
|
18700
|
+
this.engine = engine;
|
|
18701
|
+
}
|
|
18702
|
+
/**
|
|
18703
|
+
* Set the execution context (e.g., Slack client) for workflow executions
|
|
18704
|
+
*/
|
|
18705
|
+
setExecutionContext(context2) {
|
|
18706
|
+
this.executionContext = { ...this.executionContext, ...context2 };
|
|
18707
|
+
}
|
|
18708
|
+
/**
|
|
18709
|
+
* Register an output adapter for a specific type
|
|
18710
|
+
*/
|
|
18711
|
+
registerOutputAdapter(adapter) {
|
|
18712
|
+
this.outputAdapters.set(adapter.type, adapter);
|
|
18713
|
+
logger.debug(`[Scheduler] Registered output adapter: ${adapter.type}`);
|
|
18714
|
+
}
|
|
18715
|
+
/**
|
|
18716
|
+
* Register a context enricher for frontend-specific functionality
|
|
18717
|
+
* This allows frontends to inject thread history, prompt state, etc.
|
|
18718
|
+
*/
|
|
18719
|
+
registerContextEnricher(enricher) {
|
|
18720
|
+
this.contextEnricher = enricher;
|
|
18721
|
+
logger.debug("[Scheduler] Registered context enricher");
|
|
18722
|
+
}
|
|
18723
|
+
/**
|
|
18724
|
+
* Get the schedule store instance
|
|
18725
|
+
*/
|
|
18726
|
+
getStore() {
|
|
18727
|
+
return this.store;
|
|
18728
|
+
}
|
|
18729
|
+
/**
|
|
18730
|
+
* Cancel a schedule's in-memory job (cron or timeout).
|
|
18731
|
+
* Called after deleting from DB to ensure the job doesn't fire again.
|
|
18732
|
+
*/
|
|
18733
|
+
cancelSchedule(scheduleId) {
|
|
18734
|
+
const cronJob = this.cronJobs.get(scheduleId);
|
|
18735
|
+
if (cronJob) {
|
|
18736
|
+
cronJob.stop();
|
|
18737
|
+
this.cronJobs.delete(scheduleId);
|
|
18738
|
+
logger.debug(`[Scheduler] Cancelled cron job for schedule ${scheduleId}`);
|
|
18739
|
+
return;
|
|
18740
|
+
}
|
|
18741
|
+
const timeout = this.oneTimeTimeouts.get(scheduleId);
|
|
18742
|
+
if (timeout) {
|
|
18743
|
+
clearTimeout(timeout);
|
|
18744
|
+
this.oneTimeTimeouts.delete(scheduleId);
|
|
18745
|
+
logger.debug(`[Scheduler] Cancelled timeout for schedule ${scheduleId}`);
|
|
18746
|
+
}
|
|
18747
|
+
}
|
|
18748
|
+
/**
|
|
18749
|
+
* Start the scheduler
|
|
18750
|
+
*/
|
|
18751
|
+
async start() {
|
|
18752
|
+
if (this.running) {
|
|
18753
|
+
logger.warn("[Scheduler] Already running");
|
|
18754
|
+
return;
|
|
18755
|
+
}
|
|
18756
|
+
await this.store.initialize();
|
|
18757
|
+
try {
|
|
18758
|
+
await this.loadStaticCronJobs();
|
|
18759
|
+
} catch (err) {
|
|
18760
|
+
logger.error(
|
|
18761
|
+
`[Scheduler] Failed to load static cron jobs: ${err instanceof Error ? err.message : err}`
|
|
18762
|
+
);
|
|
18763
|
+
}
|
|
18764
|
+
try {
|
|
18765
|
+
await this.restoreSchedules();
|
|
18766
|
+
} catch (err) {
|
|
18767
|
+
logger.error(
|
|
18768
|
+
`[Scheduler] Failed to restore schedules: ${err instanceof Error ? err.message : err}`
|
|
18769
|
+
);
|
|
18770
|
+
}
|
|
18771
|
+
this.checkInterval = setInterval(() => {
|
|
18772
|
+
this.checkDueSchedules().catch((error) => {
|
|
18773
|
+
logger.error(
|
|
18774
|
+
`[Scheduler] Error checking due schedules: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18775
|
+
);
|
|
18776
|
+
});
|
|
18777
|
+
}, this.checkIntervalMs);
|
|
18778
|
+
if (this.haConfig?.enabled) {
|
|
18779
|
+
this.startHeartbeat();
|
|
18780
|
+
}
|
|
18781
|
+
this.running = true;
|
|
18782
|
+
logger.info("[Scheduler] Started");
|
|
18783
|
+
}
|
|
18784
|
+
/**
|
|
18785
|
+
* Load and schedule static cron jobs from visor config
|
|
18786
|
+
* These are defined in scheduler.cron section and always run regardless of permissions
|
|
18787
|
+
*/
|
|
18788
|
+
async loadStaticCronJobs() {
|
|
18789
|
+
const schedulerCfg = this.visorConfig.scheduler;
|
|
18790
|
+
if (!schedulerCfg?.cron) {
|
|
18791
|
+
return;
|
|
18792
|
+
}
|
|
18793
|
+
const cronJobs = schedulerCfg.cron;
|
|
18794
|
+
let loadedCount = 0;
|
|
18795
|
+
for (const [jobId, job] of Object.entries(cronJobs)) {
|
|
18796
|
+
if (job.enabled === false) {
|
|
18797
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' is disabled, skipping`);
|
|
18798
|
+
continue;
|
|
18799
|
+
}
|
|
18800
|
+
try {
|
|
18801
|
+
await this.scheduleStaticCronJob(jobId, job);
|
|
18802
|
+
loadedCount++;
|
|
18803
|
+
} catch (error) {
|
|
18804
|
+
logger.error(
|
|
18805
|
+
`[Scheduler] Failed to load static cron job '${jobId}': ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18806
|
+
);
|
|
18807
|
+
}
|
|
18808
|
+
}
|
|
18809
|
+
if (loadedCount > 0) {
|
|
18810
|
+
logger.info(`[Scheduler] Loaded ${loadedCount} static cron job(s) from config`);
|
|
18811
|
+
}
|
|
18812
|
+
}
|
|
18813
|
+
/**
|
|
18814
|
+
* Schedule a static cron job from config
|
|
18815
|
+
*/
|
|
18816
|
+
async scheduleStaticCronJob(jobId, job) {
|
|
18817
|
+
if (!import_node_cron.default.validate(job.schedule)) {
|
|
18818
|
+
throw new Error(`Invalid cron expression: ${job.schedule}`);
|
|
18819
|
+
}
|
|
18820
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
18821
|
+
if (!allChecks.includes(job.workflow)) {
|
|
18822
|
+
throw new Error(`Workflow "${job.workflow}" not found in configuration`);
|
|
18823
|
+
}
|
|
18824
|
+
const internalId = `__static_cron__:${jobId}`;
|
|
18825
|
+
const cronJob = import_node_cron.default.schedule(
|
|
18826
|
+
job.schedule,
|
|
18827
|
+
async () => {
|
|
18828
|
+
logger.info(`[Scheduler] Executing static cron job '${jobId}': workflow="${job.workflow}"`);
|
|
18829
|
+
await this.executeStaticCronJob(jobId, job);
|
|
18830
|
+
},
|
|
18831
|
+
{
|
|
18832
|
+
scheduled: true,
|
|
18833
|
+
timezone: job.timezone || this.defaultTimezone
|
|
18834
|
+
}
|
|
18835
|
+
);
|
|
18836
|
+
this.cronJobs.set(internalId, cronJob);
|
|
18837
|
+
try {
|
|
18838
|
+
const nextRun = getNextRunTime(job.schedule, job.timezone || this.defaultTimezone);
|
|
18839
|
+
const description = job.description ? ` (${job.description})` : "";
|
|
18840
|
+
logger.debug(
|
|
18841
|
+
`[Scheduler] Scheduled static cron job '${jobId}'${description}: ${job.schedule} \u2192 ${job.workflow}, next run: ${nextRun.toISOString()}`
|
|
18842
|
+
);
|
|
18843
|
+
} catch {
|
|
18844
|
+
}
|
|
18845
|
+
}
|
|
18846
|
+
/**
|
|
18847
|
+
* Execute a static cron job
|
|
18848
|
+
*/
|
|
18849
|
+
async executeStaticCronJob(jobId, job) {
|
|
18850
|
+
if (this.haConfig?.enabled) {
|
|
18851
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
18852
|
+
const backend = this.store.getBackend();
|
|
18853
|
+
const lockId = `__static_cron__:${jobId}`;
|
|
18854
|
+
const lockToken = await backend.tryAcquireLock(lockId, this.nodeId, ttl);
|
|
18855
|
+
if (!lockToken) {
|
|
18856
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' locked by another node, skipping`);
|
|
18857
|
+
return;
|
|
18858
|
+
}
|
|
18859
|
+
this.heldLocks.set(lockId, lockToken);
|
|
18860
|
+
try {
|
|
18861
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
18862
|
+
} finally {
|
|
18863
|
+
await backend.releaseLock(lockId, lockToken);
|
|
18864
|
+
this.heldLocks.delete(lockId);
|
|
18865
|
+
}
|
|
18866
|
+
} else {
|
|
18867
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
18868
|
+
}
|
|
18869
|
+
}
|
|
18870
|
+
/**
|
|
18871
|
+
* Internal: execute a static cron job (after lock is held in HA mode)
|
|
18872
|
+
*/
|
|
18873
|
+
async doExecuteStaticCronJob(jobId, job) {
|
|
18874
|
+
const startTime = Date.now();
|
|
18875
|
+
let result;
|
|
18876
|
+
const syntheticSchedule = {
|
|
18877
|
+
id: `__static_cron__:${jobId}`,
|
|
18878
|
+
creatorId: "system",
|
|
18879
|
+
creatorContext: "config",
|
|
18880
|
+
timezone: job.timezone || this.defaultTimezone,
|
|
18881
|
+
schedule: job.schedule,
|
|
18882
|
+
isRecurring: true,
|
|
18883
|
+
originalExpression: job.schedule,
|
|
18884
|
+
workflow: job.workflow,
|
|
18885
|
+
workflowInputs: job.inputs,
|
|
18886
|
+
outputContext: job.output ? {
|
|
18887
|
+
type: job.output.type,
|
|
18888
|
+
target: job.output.target,
|
|
18889
|
+
threadId: job.output.thread_id
|
|
18890
|
+
} : void 0,
|
|
18891
|
+
status: "active",
|
|
18892
|
+
createdAt: 0,
|
|
18893
|
+
runCount: 0,
|
|
18894
|
+
failureCount: 0
|
|
18895
|
+
};
|
|
18896
|
+
try {
|
|
18897
|
+
const output = await this.executeWorkflow(syntheticSchedule);
|
|
18898
|
+
result = {
|
|
18899
|
+
success: true,
|
|
18900
|
+
output,
|
|
18901
|
+
executionTimeMs: Date.now() - startTime
|
|
18902
|
+
};
|
|
18903
|
+
logger.info(
|
|
18904
|
+
`[Scheduler] Static cron job '${jobId}' completed in ${result.executionTimeMs}ms`
|
|
18905
|
+
);
|
|
18906
|
+
} catch (error) {
|
|
18907
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
18908
|
+
result = {
|
|
18909
|
+
success: false,
|
|
18910
|
+
error: errorMsg,
|
|
18911
|
+
executionTimeMs: Date.now() - startTime
|
|
18912
|
+
};
|
|
18913
|
+
logger.error(`[Scheduler] Static cron job '${jobId}' failed: ${errorMsg}`);
|
|
18914
|
+
}
|
|
18915
|
+
await this.sendResult(syntheticSchedule, result);
|
|
18916
|
+
}
|
|
18917
|
+
/**
|
|
18918
|
+
* Stop the scheduler
|
|
18919
|
+
*/
|
|
18920
|
+
async stop() {
|
|
18921
|
+
if (!this.running) {
|
|
18922
|
+
return;
|
|
18923
|
+
}
|
|
18924
|
+
this.stopHeartbeat();
|
|
18925
|
+
if (this.heldLocks.size > 0) {
|
|
18926
|
+
const backend = this.store.getBackend();
|
|
18927
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
18928
|
+
await backend.releaseLock(scheduleId, lockToken).catch(() => {
|
|
18929
|
+
});
|
|
18930
|
+
}
|
|
18931
|
+
this.heldLocks.clear();
|
|
18932
|
+
}
|
|
18933
|
+
if (this.checkInterval) {
|
|
18934
|
+
clearInterval(this.checkInterval);
|
|
18935
|
+
this.checkInterval = null;
|
|
18936
|
+
}
|
|
18937
|
+
for (const [id, job] of this.cronJobs.entries()) {
|
|
18938
|
+
job.stop();
|
|
18939
|
+
logger.debug(`[Scheduler] Stopped cron job for schedule ${id}`);
|
|
18940
|
+
}
|
|
18941
|
+
this.cronJobs.clear();
|
|
18942
|
+
for (const [id, timeout] of this.oneTimeTimeouts.entries()) {
|
|
18943
|
+
clearTimeout(timeout);
|
|
18944
|
+
logger.debug(`[Scheduler] Cleared timeout for schedule ${id}`);
|
|
18945
|
+
}
|
|
18946
|
+
this.oneTimeTimeouts.clear();
|
|
18947
|
+
await this.store.flush();
|
|
18948
|
+
this.running = false;
|
|
18949
|
+
logger.info("[Scheduler] Stopped");
|
|
18950
|
+
}
|
|
18951
|
+
/**
|
|
18952
|
+
* Start the heartbeat timer for renewing HA locks
|
|
18953
|
+
*/
|
|
18954
|
+
startHeartbeat() {
|
|
18955
|
+
if (this.heartbeatInterval) return;
|
|
18956
|
+
const intervalMs = (this.haConfig?.heartbeat_interval ?? 15) * 1e3;
|
|
18957
|
+
this.heartbeatInterval = setInterval(async () => {
|
|
18958
|
+
const backend = this.store.getBackend();
|
|
18959
|
+
const ttl = this.haConfig?.lock_ttl ?? 60;
|
|
18960
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
18961
|
+
const renewed = await backend.renewLock(scheduleId, lockToken, ttl);
|
|
18962
|
+
if (!renewed) {
|
|
18963
|
+
logger.warn(`[Scheduler] Failed to renew lock for schedule ${scheduleId}, lock lost`);
|
|
18964
|
+
this.heldLocks.delete(scheduleId);
|
|
18965
|
+
}
|
|
18966
|
+
}
|
|
18967
|
+
}, intervalMs);
|
|
18968
|
+
logger.debug(`[Scheduler] Heartbeat started (interval: ${intervalMs}ms)`);
|
|
18969
|
+
}
|
|
18970
|
+
/**
|
|
18971
|
+
* Stop the heartbeat timer
|
|
18972
|
+
*/
|
|
18973
|
+
stopHeartbeat() {
|
|
18974
|
+
if (this.heartbeatInterval) {
|
|
18975
|
+
clearInterval(this.heartbeatInterval);
|
|
18976
|
+
this.heartbeatInterval = null;
|
|
18977
|
+
}
|
|
18978
|
+
}
|
|
18979
|
+
/**
|
|
18980
|
+
* Restore schedules from persistent storage
|
|
18981
|
+
*/
|
|
18982
|
+
async restoreSchedules() {
|
|
18983
|
+
const activeSchedules = await this.store.getActiveSchedulesAsync();
|
|
18984
|
+
logger.info(`[Scheduler] Restoring ${activeSchedules.length} active schedules`);
|
|
18985
|
+
for (const schedule of activeSchedules) {
|
|
18986
|
+
try {
|
|
18987
|
+
await this.scheduleExecution(schedule);
|
|
18988
|
+
} catch (error) {
|
|
18989
|
+
logger.error(
|
|
18990
|
+
`[Scheduler] Failed to restore schedule ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18991
|
+
);
|
|
18992
|
+
}
|
|
18993
|
+
}
|
|
18994
|
+
}
|
|
18995
|
+
/**
|
|
18996
|
+
* Schedule a workflow for execution
|
|
18997
|
+
*/
|
|
18998
|
+
async scheduleExecution(schedule) {
|
|
18999
|
+
if (schedule.isRecurring) {
|
|
19000
|
+
await this.scheduleRecurring(schedule);
|
|
19001
|
+
} else {
|
|
19002
|
+
await this.scheduleOneTime(schedule);
|
|
19003
|
+
}
|
|
19004
|
+
}
|
|
19005
|
+
/**
|
|
19006
|
+
* Schedule a recurring workflow using cron
|
|
19007
|
+
*/
|
|
19008
|
+
async scheduleRecurring(schedule) {
|
|
19009
|
+
if (!import_node_cron.default.validate(schedule.schedule)) {
|
|
19010
|
+
logger.error(
|
|
19011
|
+
`[Scheduler] Invalid cron expression for schedule ${schedule.id}: ${schedule.schedule}`
|
|
19012
|
+
);
|
|
19013
|
+
const existingJob2 = this.cronJobs.get(schedule.id);
|
|
19014
|
+
if (existingJob2) {
|
|
19015
|
+
existingJob2.stop();
|
|
19016
|
+
this.cronJobs.delete(schedule.id);
|
|
19017
|
+
}
|
|
19018
|
+
await this.store.updateAsync(schedule.id, {
|
|
19019
|
+
status: "failed",
|
|
19020
|
+
lastError: "Invalid cron expression"
|
|
19021
|
+
});
|
|
19022
|
+
return;
|
|
19023
|
+
}
|
|
19024
|
+
const existingJob = this.cronJobs.get(schedule.id);
|
|
19025
|
+
if (existingJob) {
|
|
19026
|
+
existingJob.stop();
|
|
19027
|
+
}
|
|
19028
|
+
const job = import_node_cron.default.schedule(
|
|
19029
|
+
schedule.schedule,
|
|
19030
|
+
async () => {
|
|
19031
|
+
await this.executeSchedule(schedule);
|
|
19032
|
+
},
|
|
19033
|
+
{
|
|
19034
|
+
scheduled: true,
|
|
19035
|
+
timezone: schedule.timezone || this.defaultTimezone
|
|
19036
|
+
}
|
|
19037
|
+
);
|
|
19038
|
+
this.cronJobs.set(schedule.id, job);
|
|
19039
|
+
try {
|
|
19040
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
19041
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
19042
|
+
} catch (error) {
|
|
19043
|
+
logger.warn(
|
|
19044
|
+
`[Scheduler] Could not compute next run time for ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
19045
|
+
);
|
|
19046
|
+
}
|
|
19047
|
+
logger.debug(`[Scheduler] Scheduled recurring execution ${schedule.id}: ${schedule.schedule}`);
|
|
19048
|
+
}
|
|
19049
|
+
/**
|
|
19050
|
+
* Schedule a one-time workflow using setTimeout
|
|
19051
|
+
*/
|
|
19052
|
+
async scheduleOneTime(schedule) {
|
|
19053
|
+
const existingTimeout = this.oneTimeTimeouts.get(schedule.id);
|
|
19054
|
+
if (existingTimeout) {
|
|
19055
|
+
clearTimeout(existingTimeout);
|
|
19056
|
+
}
|
|
19057
|
+
if (!schedule.runAt) {
|
|
19058
|
+
logger.error(`[Scheduler] One-time schedule ${schedule.id} has no runAt time`);
|
|
19059
|
+
return;
|
|
19060
|
+
}
|
|
19061
|
+
const delayMs = schedule.runAt - Date.now();
|
|
19062
|
+
if (delayMs <= 0) {
|
|
19063
|
+
await this.executeSchedule(schedule);
|
|
19064
|
+
return;
|
|
19065
|
+
}
|
|
19066
|
+
const timeout = setTimeout(async () => {
|
|
19067
|
+
this.oneTimeTimeouts.delete(schedule.id);
|
|
19068
|
+
await this.executeSchedule(schedule);
|
|
19069
|
+
}, delayMs);
|
|
19070
|
+
this.oneTimeTimeouts.set(schedule.id, timeout);
|
|
19071
|
+
logger.debug(
|
|
19072
|
+
`[Scheduler] Scheduled one-time execution ${schedule.id} for ${new Date(schedule.runAt).toISOString()}`
|
|
19073
|
+
);
|
|
19074
|
+
}
|
|
19075
|
+
/**
|
|
19076
|
+
* Check for and execute due schedules
|
|
19077
|
+
*/
|
|
19078
|
+
async checkDueSchedules() {
|
|
19079
|
+
const dueSchedules = await this.store.getDueSchedulesAsync();
|
|
19080
|
+
for (const schedule of dueSchedules) {
|
|
19081
|
+
if (this.cronJobs.has(schedule.id) || this.oneTimeTimeouts.has(schedule.id)) {
|
|
19082
|
+
continue;
|
|
19083
|
+
}
|
|
19084
|
+
if (this.haConfig?.enabled) {
|
|
19085
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
19086
|
+
const backend = this.store.getBackend();
|
|
19087
|
+
const lockToken = await backend.tryAcquireLock(schedule.id, this.nodeId, ttl);
|
|
19088
|
+
if (!lockToken) {
|
|
19089
|
+
logger.debug(`[Scheduler] Schedule ${schedule.id} locked by another node, skipping`);
|
|
19090
|
+
continue;
|
|
19091
|
+
}
|
|
19092
|
+
this.heldLocks.set(schedule.id, lockToken);
|
|
19093
|
+
try {
|
|
19094
|
+
await this.executeSchedule(schedule);
|
|
19095
|
+
} finally {
|
|
19096
|
+
await backend.releaseLock(schedule.id, lockToken);
|
|
19097
|
+
this.heldLocks.delete(schedule.id);
|
|
19098
|
+
}
|
|
19099
|
+
} else {
|
|
19100
|
+
await this.executeSchedule(schedule);
|
|
19101
|
+
}
|
|
19102
|
+
}
|
|
19103
|
+
}
|
|
19104
|
+
/**
|
|
19105
|
+
* Execute a scheduled workflow
|
|
19106
|
+
*/
|
|
19107
|
+
async executeSchedule(schedule) {
|
|
19108
|
+
try {
|
|
19109
|
+
const fresh = await this.store.getAsync(schedule.id);
|
|
19110
|
+
if (!fresh || fresh.status !== "active") {
|
|
19111
|
+
logger.info(
|
|
19112
|
+
`[Scheduler] Schedule ${schedule.id} is no longer active (${fresh ? fresh.status : "deleted"}), skipping execution`
|
|
19113
|
+
);
|
|
19114
|
+
this.cancelSchedule(schedule.id);
|
|
19115
|
+
return;
|
|
19116
|
+
}
|
|
19117
|
+
} catch {
|
|
19118
|
+
logger.warn(
|
|
19119
|
+
`[Scheduler] Could not verify schedule ${schedule.id} freshness, proceeding with execution`
|
|
19120
|
+
);
|
|
19121
|
+
}
|
|
19122
|
+
const description = schedule.workflow || "reminder";
|
|
19123
|
+
logger.info(`[Scheduler] Executing schedule ${schedule.id}: ${description}`);
|
|
19124
|
+
const startTime = Date.now();
|
|
19125
|
+
let result;
|
|
19126
|
+
try {
|
|
19127
|
+
const output = await this.executeWorkflow(schedule);
|
|
19128
|
+
result = {
|
|
19129
|
+
success: true,
|
|
19130
|
+
output,
|
|
19131
|
+
executionTimeMs: Date.now() - startTime
|
|
19132
|
+
};
|
|
19133
|
+
const now = Date.now();
|
|
19134
|
+
await this.store.updateAsync(schedule.id, {
|
|
19135
|
+
lastRunAt: now,
|
|
19136
|
+
runCount: schedule.runCount + 1,
|
|
19137
|
+
failureCount: 0,
|
|
19138
|
+
// Reset on success
|
|
19139
|
+
lastError: void 0
|
|
19140
|
+
});
|
|
19141
|
+
if (!schedule.isRecurring) {
|
|
19142
|
+
await this.store.updateAsync(schedule.id, { status: "completed" });
|
|
19143
|
+
await this.store.deleteAsync(schedule.id);
|
|
19144
|
+
logger.info(`[Scheduler] One-time schedule ${schedule.id} completed and removed`);
|
|
19145
|
+
} else {
|
|
19146
|
+
try {
|
|
19147
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
19148
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
19149
|
+
} catch (err) {
|
|
19150
|
+
logger.warn(
|
|
19151
|
+
`[Scheduler] Failed to compute next run time for ${schedule.id}, pausing schedule: ${err instanceof Error ? err.message : err}`
|
|
19152
|
+
);
|
|
19153
|
+
await this.store.updateAsync(schedule.id, {
|
|
19154
|
+
status: "paused",
|
|
19155
|
+
lastError: `Failed to compute next run time: ${err instanceof Error ? err.message : err}`
|
|
19156
|
+
});
|
|
19157
|
+
}
|
|
19158
|
+
}
|
|
19159
|
+
} catch (error) {
|
|
19160
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
19161
|
+
result = {
|
|
19162
|
+
success: false,
|
|
19163
|
+
error: errorMsg,
|
|
19164
|
+
executionTimeMs: Date.now() - startTime
|
|
19165
|
+
};
|
|
19166
|
+
await this.handleScheduleFailure(schedule, error);
|
|
19167
|
+
}
|
|
19168
|
+
await this.sendResult(schedule, result);
|
|
19169
|
+
}
|
|
19170
|
+
/**
|
|
19171
|
+
* Helper to prepare execution environment - reduces duplication between workflow and reminder execution
|
|
19172
|
+
*/
|
|
19173
|
+
prepareExecution(schedule, cliMessage) {
|
|
19174
|
+
const config = JSON.parse(JSON.stringify(this.visorConfig));
|
|
19175
|
+
const fronts = Array.isArray(config.frontends) ? config.frontends : [];
|
|
19176
|
+
const hasSlackFrontend = fronts.some((f) => f && f.name === "slack");
|
|
19177
|
+
if (!hasSlackFrontend && (cliMessage || schedule.outputContext?.type === "slack")) {
|
|
19178
|
+
fronts.push({ name: "slack" });
|
|
19179
|
+
}
|
|
19180
|
+
config.frontends = fronts;
|
|
19181
|
+
const engine = new StateMachineExecutionEngine();
|
|
19182
|
+
const responseRef = {};
|
|
19183
|
+
const responseCapture = (text) => {
|
|
19184
|
+
responseRef.captured = text;
|
|
19185
|
+
logger.debug(
|
|
19186
|
+
`[Scheduler] Captured AI response for schedule ${schedule.id} (${text.length} chars)`
|
|
19187
|
+
);
|
|
19188
|
+
};
|
|
19189
|
+
engine.setExecutionContext({
|
|
19190
|
+
...this.executionContext,
|
|
19191
|
+
cliMessage,
|
|
19192
|
+
responseCapture
|
|
19193
|
+
});
|
|
19194
|
+
return { engine, config, responseRef };
|
|
19195
|
+
}
|
|
19196
|
+
/**
|
|
19197
|
+
* Execute the workflow for a schedule
|
|
19198
|
+
*/
|
|
19199
|
+
async executeWorkflow(schedule) {
|
|
19200
|
+
if (!schedule.workflow) {
|
|
19201
|
+
return this.executeSimpleReminder(schedule);
|
|
19202
|
+
}
|
|
19203
|
+
if (!this.engine) {
|
|
19204
|
+
logger.warn("[Scheduler] No execution engine set, skipping workflow execution");
|
|
19205
|
+
return { message: "No execution engine configured" };
|
|
19206
|
+
}
|
|
19207
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
19208
|
+
if (!allChecks.includes(schedule.workflow)) {
|
|
19209
|
+
throw new Error(`Workflow "${schedule.workflow}" not found in configuration`);
|
|
19210
|
+
}
|
|
19211
|
+
const syntheticPayload = {
|
|
19212
|
+
event: {
|
|
19213
|
+
type: "schedule_triggered",
|
|
19214
|
+
schedule_id: schedule.id,
|
|
19215
|
+
workflow: schedule.workflow,
|
|
19216
|
+
creator_id: schedule.creatorId,
|
|
19217
|
+
creator_context: schedule.creatorContext,
|
|
19218
|
+
timestamp: Date.now()
|
|
19219
|
+
},
|
|
19220
|
+
schedule: {
|
|
19221
|
+
id: schedule.id,
|
|
19222
|
+
workflow: schedule.workflow,
|
|
19223
|
+
workflowInputs: schedule.workflowInputs,
|
|
19224
|
+
isRecurring: schedule.isRecurring,
|
|
19225
|
+
outputContext: schedule.outputContext
|
|
19226
|
+
}
|
|
19227
|
+
};
|
|
19228
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
19229
|
+
const endpoint = "/scheduler/trigger";
|
|
19230
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
19231
|
+
try {
|
|
19232
|
+
const { refreshGitHubCredentials: refreshGitHubCredentials2 } = await Promise.resolve().then(() => (init_github_auth(), github_auth_exports));
|
|
19233
|
+
await refreshGitHubCredentials2();
|
|
19234
|
+
} catch {
|
|
19235
|
+
}
|
|
19236
|
+
const { engine: runEngine, config: cfgForRun } = this.prepareExecution(schedule);
|
|
19237
|
+
await runEngine.executeChecks({
|
|
19238
|
+
checks: [schedule.workflow],
|
|
19239
|
+
showDetails: true,
|
|
19240
|
+
outputFormat: "json",
|
|
19241
|
+
config: cfgForRun,
|
|
19242
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
19243
|
+
debug: process.env.VISOR_DEBUG === "true",
|
|
19244
|
+
inputs: schedule.workflowInputs
|
|
19245
|
+
});
|
|
19246
|
+
return { message: "Workflow completed", workflow: schedule.workflow };
|
|
19247
|
+
}
|
|
19248
|
+
/**
|
|
19249
|
+
* Execute a simple reminder by running it through the visor pipeline
|
|
19250
|
+
* Treats the reminder text as if the user sent it as a message
|
|
19251
|
+
*/
|
|
19252
|
+
async executeSimpleReminder(schedule) {
|
|
19253
|
+
const reminderText = schedule.workflowInputs?.text;
|
|
19254
|
+
if (!reminderText) {
|
|
19255
|
+
return { message: "Reminder!", type: "simple_reminder" };
|
|
19256
|
+
}
|
|
19257
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
19258
|
+
if (allChecks.length === 0) {
|
|
19259
|
+
logger.warn("[Scheduler] No checks configured, returning reminder text directly");
|
|
19260
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
19261
|
+
}
|
|
19262
|
+
logger.info(`[Scheduler] Running reminder through visor pipeline (${allChecks.length} checks)`);
|
|
19263
|
+
const channel = schedule.outputContext?.target || "";
|
|
19264
|
+
const threadId = schedule.outputContext?.threadId;
|
|
19265
|
+
let threadMessages = [];
|
|
19266
|
+
let additionalPayload = {};
|
|
19267
|
+
if (this.contextEnricher?.enrichContext) {
|
|
19268
|
+
try {
|
|
19269
|
+
const enriched = await this.contextEnricher.enrichContext(schedule);
|
|
19270
|
+
threadMessages = enriched.threadMessages || [];
|
|
19271
|
+
additionalPayload = enriched.additionalPayload || {};
|
|
19272
|
+
if (threadMessages.length > 0) {
|
|
19273
|
+
logger.debug(
|
|
19274
|
+
`[Scheduler] Context enricher provided ${threadMessages.length} thread messages`
|
|
19275
|
+
);
|
|
19276
|
+
}
|
|
19277
|
+
} catch (error) {
|
|
19278
|
+
logger.warn(
|
|
19279
|
+
`[Scheduler] Context enrichment failed: ${error instanceof Error ? error.message : error}`
|
|
19280
|
+
);
|
|
19281
|
+
}
|
|
19282
|
+
}
|
|
19283
|
+
let contextualReminderText = reminderText;
|
|
19284
|
+
if (schedule.isRecurring && schedule.previousResponse) {
|
|
19285
|
+
contextualReminderText = `${reminderText}
|
|
19286
|
+
|
|
19287
|
+
---
|
|
19288
|
+
**Previous Response (for context):**
|
|
19289
|
+
${schedule.previousResponse}
|
|
19290
|
+
---
|
|
19291
|
+
|
|
19292
|
+
Please provide an updated response based on the reminder above. You may reference or build upon the previous response if relevant.`;
|
|
19293
|
+
}
|
|
19294
|
+
const conversationData = {
|
|
19295
|
+
current: {
|
|
19296
|
+
user: schedule.creatorName || schedule.creatorId,
|
|
19297
|
+
text: contextualReminderText
|
|
19298
|
+
},
|
|
19299
|
+
messages: threadMessages.length > 0 ? [
|
|
19300
|
+
...threadMessages,
|
|
19301
|
+
{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }
|
|
19302
|
+
] : [{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }]
|
|
19303
|
+
};
|
|
19304
|
+
const syntheticPayload = {
|
|
19305
|
+
event: {
|
|
19306
|
+
type: "message",
|
|
19307
|
+
subtype: "scheduled_reminder",
|
|
19308
|
+
text: contextualReminderText,
|
|
19309
|
+
user: schedule.creatorId,
|
|
19310
|
+
channel,
|
|
19311
|
+
ts: String(Date.now() / 1e3),
|
|
19312
|
+
thread_ts: threadId
|
|
19313
|
+
},
|
|
19314
|
+
// Include both for compatibility (slack_conversation for existing checks, conversation for generic)
|
|
19315
|
+
slack_conversation: conversationData,
|
|
19316
|
+
conversation: conversationData,
|
|
19317
|
+
// Include schedule context for any checks that need it
|
|
19318
|
+
schedule: {
|
|
19319
|
+
id: schedule.id,
|
|
19320
|
+
isReminder: true,
|
|
19321
|
+
creatorId: schedule.creatorId,
|
|
19322
|
+
creatorContext: schedule.creatorContext,
|
|
19323
|
+
previousResponse: schedule.previousResponse
|
|
19324
|
+
},
|
|
19325
|
+
// Merge any additional frontend-specific payload
|
|
19326
|
+
...additionalPayload
|
|
19327
|
+
};
|
|
19328
|
+
const endpoint = this.contextEnricher?.getWebhookEndpoint?.() || this.visorConfig.slack?.endpoint || "/bots/slack/support";
|
|
19329
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
19330
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
19331
|
+
if (this.contextEnricher?.prepareExecution) {
|
|
19332
|
+
try {
|
|
19333
|
+
await this.contextEnricher.prepareExecution(schedule, reminderText);
|
|
19334
|
+
} catch (error) {
|
|
19335
|
+
logger.warn(
|
|
19336
|
+
`[Scheduler] Execution preparation failed: ${error instanceof Error ? error.message : error}`
|
|
19337
|
+
);
|
|
19338
|
+
}
|
|
19339
|
+
}
|
|
19340
|
+
try {
|
|
19341
|
+
const { refreshGitHubCredentials: refreshGitHubCredentials2 } = await Promise.resolve().then(() => (init_github_auth(), github_auth_exports));
|
|
19342
|
+
await refreshGitHubCredentials2();
|
|
19343
|
+
} catch {
|
|
19344
|
+
}
|
|
19345
|
+
const {
|
|
19346
|
+
engine: runEngine,
|
|
19347
|
+
config: cfgForRun,
|
|
19348
|
+
responseRef
|
|
19349
|
+
} = this.prepareExecution(schedule, reminderText);
|
|
19350
|
+
try {
|
|
19351
|
+
await runEngine.executeChecks({
|
|
19352
|
+
checks: allChecks,
|
|
19353
|
+
showDetails: true,
|
|
19354
|
+
outputFormat: "json",
|
|
19355
|
+
config: cfgForRun,
|
|
19356
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
19357
|
+
debug: process.env.VISOR_DEBUG === "true"
|
|
19358
|
+
});
|
|
19359
|
+
if (schedule.isRecurring && responseRef.captured) {
|
|
19360
|
+
await this.store.updateAsync(schedule.id, { previousResponse: responseRef.captured });
|
|
19361
|
+
logger.info(
|
|
19362
|
+
`[Scheduler] Saved previousResponse for recurring schedule ${schedule.id} (${responseRef.captured.length} chars)`
|
|
19363
|
+
);
|
|
19364
|
+
}
|
|
19365
|
+
return {
|
|
19366
|
+
message: "Reminder processed through pipeline",
|
|
19367
|
+
type: "pipeline_executed",
|
|
19368
|
+
reminderText,
|
|
19369
|
+
capturedResponse: responseRef.captured
|
|
19370
|
+
};
|
|
19371
|
+
} catch (error) {
|
|
19372
|
+
logger.error(
|
|
19373
|
+
`[Scheduler] Failed to run reminder through pipeline: ${error instanceof Error ? error.message : error}`
|
|
19374
|
+
);
|
|
19375
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
19376
|
+
}
|
|
19377
|
+
}
|
|
19378
|
+
/**
|
|
19379
|
+
* Handle schedule execution failure
|
|
19380
|
+
*/
|
|
19381
|
+
async handleScheduleFailure(schedule, error) {
|
|
19382
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
19383
|
+
logger.error(`[Scheduler] Schedule ${schedule.id} failed: ${errorMsg}`);
|
|
19384
|
+
const newFailureCount = schedule.failureCount + 1;
|
|
19385
|
+
await this.store.updateAsync(schedule.id, {
|
|
19386
|
+
failureCount: newFailureCount,
|
|
19387
|
+
lastError: errorMsg
|
|
19388
|
+
});
|
|
19389
|
+
if (newFailureCount >= 3) {
|
|
19390
|
+
await this.store.updateAsync(schedule.id, { status: "failed" });
|
|
19391
|
+
const job = this.cronJobs.get(schedule.id);
|
|
19392
|
+
if (job) {
|
|
19393
|
+
job.stop();
|
|
19394
|
+
this.cronJobs.delete(schedule.id);
|
|
19395
|
+
}
|
|
19396
|
+
logger.warn(`[Scheduler] Schedule ${schedule.id} paused after 3 consecutive failures`);
|
|
19397
|
+
}
|
|
19398
|
+
}
|
|
19399
|
+
/**
|
|
19400
|
+
* Send execution result to the appropriate output adapter
|
|
19401
|
+
*/
|
|
19402
|
+
async sendResult(schedule, result) {
|
|
19403
|
+
const outputType = schedule.outputContext?.type || "none";
|
|
19404
|
+
const adapter = this.outputAdapters.get(outputType);
|
|
19405
|
+
if (!adapter) {
|
|
19406
|
+
if (outputType !== "none") {
|
|
19407
|
+
logger.warn(`[Scheduler] No output adapter registered for type: ${outputType}`);
|
|
19408
|
+
}
|
|
19409
|
+
return;
|
|
19410
|
+
}
|
|
19411
|
+
try {
|
|
19412
|
+
await adapter.sendResult(schedule, result);
|
|
19413
|
+
} catch (error) {
|
|
19414
|
+
logger.error(
|
|
19415
|
+
`[Scheduler] Failed to send result via ${outputType} adapter: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
19416
|
+
);
|
|
19417
|
+
}
|
|
19418
|
+
}
|
|
19419
|
+
/**
|
|
19420
|
+
* Check if scheduler is running
|
|
19421
|
+
*/
|
|
19422
|
+
isRunning() {
|
|
19423
|
+
return this.running;
|
|
19424
|
+
}
|
|
19425
|
+
/**
|
|
19426
|
+
* Get scheduler stats
|
|
19427
|
+
*/
|
|
19428
|
+
async getStats() {
|
|
19429
|
+
return {
|
|
19430
|
+
running: this.running,
|
|
19431
|
+
activeCronJobs: this.cronJobs.size,
|
|
19432
|
+
pendingOneTimeSchedules: this.oneTimeTimeouts.size,
|
|
19433
|
+
storeStats: await this.store.getStatsAsync()
|
|
19434
|
+
};
|
|
19435
|
+
}
|
|
19436
|
+
};
|
|
19437
|
+
}
|
|
19438
|
+
});
|
|
19439
|
+
|
|
18460
19440
|
// src/scheduler/schedule-tool.ts
|
|
18461
19441
|
var schedule_tool_exports = {};
|
|
18462
19442
|
__export(schedule_tool_exports, {
|
|
@@ -18815,6 +19795,10 @@ async function handleCancel(args, context2, store) {
|
|
|
18815
19795
|
};
|
|
18816
19796
|
}
|
|
18817
19797
|
await store.deleteAsync(schedule.id);
|
|
19798
|
+
const scheduler = getScheduler();
|
|
19799
|
+
if (scheduler) {
|
|
19800
|
+
scheduler.cancelSchedule(schedule.id);
|
|
19801
|
+
}
|
|
18818
19802
|
logger.info(`[ScheduleTool] Cancelled schedule ${schedule.id} for user ${context2.userId}`);
|
|
18819
19803
|
return {
|
|
18820
19804
|
success: true,
|
|
@@ -19120,23 +20104,11 @@ var init_schedule_tool = __esm({
|
|
|
19120
20104
|
"use strict";
|
|
19121
20105
|
init_schedule_store();
|
|
19122
20106
|
init_schedule_parser();
|
|
20107
|
+
init_scheduler();
|
|
19123
20108
|
init_logger();
|
|
19124
20109
|
}
|
|
19125
20110
|
});
|
|
19126
20111
|
|
|
19127
|
-
// src/scheduler/scheduler.ts
|
|
19128
|
-
var import_node_cron;
|
|
19129
|
-
var init_scheduler = __esm({
|
|
19130
|
-
"src/scheduler/scheduler.ts"() {
|
|
19131
|
-
"use strict";
|
|
19132
|
-
import_node_cron = __toESM(require("node-cron"));
|
|
19133
|
-
init_schedule_store();
|
|
19134
|
-
init_schedule_parser();
|
|
19135
|
-
init_logger();
|
|
19136
|
-
init_state_machine_execution_engine();
|
|
19137
|
-
}
|
|
19138
|
-
});
|
|
19139
|
-
|
|
19140
20112
|
// src/scheduler/cli-handler.ts
|
|
19141
20113
|
var init_cli_handler = __esm({
|
|
19142
20114
|
"src/scheduler/cli-handler.ts"() {
|
|
@@ -19368,7 +20340,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19368
20340
|
* Returns the actual bound port number
|
|
19369
20341
|
*/
|
|
19370
20342
|
async start() {
|
|
19371
|
-
return new Promise((
|
|
20343
|
+
return new Promise((resolve14, reject) => {
|
|
19372
20344
|
try {
|
|
19373
20345
|
this.server = import_http.default.createServer((req, res) => {
|
|
19374
20346
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -19402,7 +20374,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19402
20374
|
);
|
|
19403
20375
|
}
|
|
19404
20376
|
this.startKeepalive();
|
|
19405
|
-
|
|
20377
|
+
resolve14(this.port);
|
|
19406
20378
|
});
|
|
19407
20379
|
} catch (error) {
|
|
19408
20380
|
reject(error);
|
|
@@ -19465,7 +20437,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19465
20437
|
logger.debug(
|
|
19466
20438
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
19467
20439
|
);
|
|
19468
|
-
await new Promise((
|
|
20440
|
+
await new Promise((resolve14) => setTimeout(resolve14, waitMs));
|
|
19469
20441
|
}
|
|
19470
20442
|
}
|
|
19471
20443
|
if (this.activeToolCalls > 0) {
|
|
@@ -19474,7 +20446,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19474
20446
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
19475
20447
|
);
|
|
19476
20448
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
19477
|
-
await new Promise((
|
|
20449
|
+
await new Promise((resolve14) => setTimeout(resolve14, 250));
|
|
19478
20450
|
}
|
|
19479
20451
|
if (this.activeToolCalls > 0) {
|
|
19480
20452
|
logger.warn(
|
|
@@ -19499,21 +20471,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19499
20471
|
}
|
|
19500
20472
|
this.connections.clear();
|
|
19501
20473
|
if (this.server) {
|
|
19502
|
-
await new Promise((
|
|
20474
|
+
await new Promise((resolve14, reject) => {
|
|
19503
20475
|
const timeout = setTimeout(() => {
|
|
19504
20476
|
if (this.debug) {
|
|
19505
20477
|
logger.debug(
|
|
19506
20478
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
19507
20479
|
);
|
|
19508
20480
|
}
|
|
19509
|
-
this.server?.close(() =>
|
|
20481
|
+
this.server?.close(() => resolve14());
|
|
19510
20482
|
}, 5e3);
|
|
19511
20483
|
this.server.close((error) => {
|
|
19512
20484
|
clearTimeout(timeout);
|
|
19513
20485
|
if (error) {
|
|
19514
20486
|
reject(error);
|
|
19515
20487
|
} else {
|
|
19516
|
-
|
|
20488
|
+
resolve14();
|
|
19517
20489
|
}
|
|
19518
20490
|
});
|
|
19519
20491
|
});
|
|
@@ -19939,7 +20911,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19939
20911
|
logger.warn(
|
|
19940
20912
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
19941
20913
|
);
|
|
19942
|
-
await new Promise((
|
|
20914
|
+
await new Promise((resolve14) => setTimeout(resolve14, delay));
|
|
19943
20915
|
attempt++;
|
|
19944
20916
|
}
|
|
19945
20917
|
}
|
|
@@ -20017,6 +20989,13 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
20017
20989
|
// src/utils/tool-resolver.ts
|
|
20018
20990
|
function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
20019
20991
|
const tools = /* @__PURE__ */ new Map();
|
|
20992
|
+
const registry = WorkflowRegistry.getInstance();
|
|
20993
|
+
const registeredWorkflows = registry.list().map((w) => w.id);
|
|
20994
|
+
if (toolItems.some((item) => typeof item !== "string" && isWorkflowToolReference(item))) {
|
|
20995
|
+
logger.info(
|
|
20996
|
+
`${logPrefix} Resolving ${toolItems.length} tool items. WorkflowRegistry has ${registeredWorkflows.length} workflows: [${registeredWorkflows.join(", ")}]`
|
|
20997
|
+
);
|
|
20998
|
+
}
|
|
20020
20999
|
for (const item of toolItems) {
|
|
20021
21000
|
const workflowTool = resolveWorkflowToolFromItem(item);
|
|
20022
21001
|
if (workflowTool) {
|
|
@@ -20033,7 +21012,9 @@ function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
|
20033
21012
|
}
|
|
20034
21013
|
logger.warn(`${logPrefix} Tool '${item}' not found in global tools or workflow registry`);
|
|
20035
21014
|
} else if (isWorkflowToolReference(item)) {
|
|
20036
|
-
logger.warn(
|
|
21015
|
+
logger.warn(
|
|
21016
|
+
`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry. Available: [${registeredWorkflows.join(", ")}]`
|
|
21017
|
+
);
|
|
20037
21018
|
}
|
|
20038
21019
|
}
|
|
20039
21020
|
if (tools.size === 0 && toolItems.length > 0 && !globalTools) {
|
|
@@ -20047,6 +21028,7 @@ var init_tool_resolver = __esm({
|
|
|
20047
21028
|
"src/utils/tool-resolver.ts"() {
|
|
20048
21029
|
"use strict";
|
|
20049
21030
|
init_workflow_tool_executor();
|
|
21031
|
+
init_workflow_registry();
|
|
20050
21032
|
init_logger();
|
|
20051
21033
|
}
|
|
20052
21034
|
});
|
|
@@ -20242,9 +21224,9 @@ var init_ai_check_provider = __esm({
|
|
|
20242
21224
|
} else {
|
|
20243
21225
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
20244
21226
|
}
|
|
20245
|
-
const
|
|
21227
|
+
const fs23 = require("fs").promises;
|
|
20246
21228
|
try {
|
|
20247
|
-
const stat2 = await
|
|
21229
|
+
const stat2 = await fs23.stat(resolvedPath);
|
|
20248
21230
|
return stat2.isFile();
|
|
20249
21231
|
} catch {
|
|
20250
21232
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -20850,10 +21832,41 @@ ${preview}`);
|
|
|
20850
21832
|
if (Object.keys(dynamicServers).length > 0) {
|
|
20851
21833
|
Object.assign(mcpServers, dynamicServers);
|
|
20852
21834
|
}
|
|
21835
|
+
try {
|
|
21836
|
+
const span = trace.getSpan(context.active());
|
|
21837
|
+
if (span) {
|
|
21838
|
+
span.addEvent("tool_setup.mcp_servers_js", {
|
|
21839
|
+
"tool_setup.server_count": Object.keys(dynamicServers).length,
|
|
21840
|
+
"tool_setup.server_names": Object.keys(dynamicServers).join(","),
|
|
21841
|
+
"tool_setup.workflow_entries": Object.entries(dynamicServers).filter(([, cfg]) => cfg?.workflow).map(([name, cfg]) => `${name}\u2192${cfg.workflow}`).join(",")
|
|
21842
|
+
});
|
|
21843
|
+
}
|
|
21844
|
+
} catch {
|
|
21845
|
+
}
|
|
20853
21846
|
} catch (error) {
|
|
20854
|
-
|
|
20855
|
-
|
|
20856
|
-
|
|
21847
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
21848
|
+
logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${errMsg}`);
|
|
21849
|
+
try {
|
|
21850
|
+
const span = trace.getSpan(context.active());
|
|
21851
|
+
if (span) {
|
|
21852
|
+
span.addEvent("tool_setup.mcp_servers_js_error", {
|
|
21853
|
+
"tool_setup.error": errMsg
|
|
21854
|
+
});
|
|
21855
|
+
}
|
|
21856
|
+
} catch {
|
|
21857
|
+
}
|
|
21858
|
+
}
|
|
21859
|
+
} else if (mcpServersJsExpr && !_dependencyResults) {
|
|
21860
|
+
try {
|
|
21861
|
+
const span = trace.getSpan(context.active());
|
|
21862
|
+
if (span) {
|
|
21863
|
+
span.addEvent("tool_setup.mcp_servers_js_skipped", {
|
|
21864
|
+
"tool_setup.reason": "no_dependency_results",
|
|
21865
|
+
"tool_setup.has_expr": true,
|
|
21866
|
+
"tool_setup.has_deps": false
|
|
21867
|
+
});
|
|
21868
|
+
}
|
|
21869
|
+
} catch {
|
|
20857
21870
|
}
|
|
20858
21871
|
}
|
|
20859
21872
|
for (const serverConfig of Object.values(mcpServers)) {
|
|
@@ -20991,6 +22004,27 @@ ${preview}`);
|
|
|
20991
22004
|
}
|
|
20992
22005
|
try {
|
|
20993
22006
|
const customTools = this.loadCustomTools(customToolsToLoad, config);
|
|
22007
|
+
try {
|
|
22008
|
+
const span = trace.getSpan(context.active());
|
|
22009
|
+
if (span) {
|
|
22010
|
+
const requestedNames = customToolsToLoad.map(
|
|
22011
|
+
(item) => typeof item === "string" ? item : `${item.name || item.workflow}(wf:${item.workflow})`
|
|
22012
|
+
);
|
|
22013
|
+
span.addEvent("tool_setup.resolution", {
|
|
22014
|
+
"tool_setup.requested_count": customToolsToLoad.length,
|
|
22015
|
+
"tool_setup.requested_names": requestedNames.join(","),
|
|
22016
|
+
"tool_setup.resolved_count": customTools.size,
|
|
22017
|
+
"tool_setup.resolved_names": Array.from(customTools.keys()).join(","),
|
|
22018
|
+
"tool_setup.missing_count": customToolsToLoad.length - customTools.size
|
|
22019
|
+
});
|
|
22020
|
+
}
|
|
22021
|
+
} catch {
|
|
22022
|
+
}
|
|
22023
|
+
if (customToolsToLoad.length > 0 && customTools.size === 0) {
|
|
22024
|
+
logger.warn(
|
|
22025
|
+
`[AICheckProvider] All ${customToolsToLoad.length} custom tools failed to resolve! Requested: ${customToolsToLoad.map((item) => typeof item === "string" ? item : item.workflow).join(", ")}. AI will have no workflow tools available.`
|
|
22026
|
+
);
|
|
22027
|
+
}
|
|
20994
22028
|
if (scheduleToolEnabled) {
|
|
20995
22029
|
const scheduleTool = getScheduleToolDefinition();
|
|
20996
22030
|
customTools.set(scheduleTool.name, scheduleTool);
|
|
@@ -21028,10 +22062,34 @@ ${preview}`);
|
|
|
21028
22062
|
};
|
|
21029
22063
|
}
|
|
21030
22064
|
} catch (error) {
|
|
22065
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
21031
22066
|
logger.error(
|
|
21032
|
-
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${
|
|
22067
|
+
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${errMsg}`
|
|
21033
22068
|
);
|
|
22069
|
+
try {
|
|
22070
|
+
const span = trace.getSpan(context.active());
|
|
22071
|
+
if (span) {
|
|
22072
|
+
span.addEvent("tool_setup.sse_server_error", {
|
|
22073
|
+
"tool_setup.error": errMsg,
|
|
22074
|
+
"tool_setup.server_name": customToolsServerName || ""
|
|
22075
|
+
});
|
|
22076
|
+
}
|
|
22077
|
+
} catch {
|
|
22078
|
+
}
|
|
22079
|
+
}
|
|
22080
|
+
}
|
|
22081
|
+
try {
|
|
22082
|
+
const span = trace.getSpan(context.active());
|
|
22083
|
+
if (span) {
|
|
22084
|
+
const finalServerNames = Object.keys(mcpServers);
|
|
22085
|
+
span.addEvent("tool_setup.final", {
|
|
22086
|
+
"tool_setup.final_server_count": finalServerNames.length,
|
|
22087
|
+
"tool_setup.final_server_names": finalServerNames.join(","),
|
|
22088
|
+
"tool_setup.has_custom_tools_server": !!customToolsServer,
|
|
22089
|
+
"tool_setup.tools_disabled": !!config.ai?.disableTools
|
|
22090
|
+
});
|
|
21034
22091
|
}
|
|
22092
|
+
} catch {
|
|
21035
22093
|
}
|
|
21036
22094
|
if (Object.keys(mcpServers).length > 0 && !config.ai?.disableTools) {
|
|
21037
22095
|
aiConfig.mcpServers = mcpServers;
|
|
@@ -22231,7 +23289,7 @@ var init_template_context = __esm({
|
|
|
22231
23289
|
});
|
|
22232
23290
|
|
|
22233
23291
|
// src/providers/http-client-provider.ts
|
|
22234
|
-
var
|
|
23292
|
+
var fs15, path18, HttpClientProvider;
|
|
22235
23293
|
var init_http_client_provider = __esm({
|
|
22236
23294
|
"src/providers/http-client-provider.ts"() {
|
|
22237
23295
|
"use strict";
|
|
@@ -22241,8 +23299,8 @@ var init_http_client_provider = __esm({
|
|
|
22241
23299
|
init_sandbox();
|
|
22242
23300
|
init_template_context();
|
|
22243
23301
|
init_logger();
|
|
22244
|
-
|
|
22245
|
-
|
|
23302
|
+
fs15 = __toESM(require("fs"));
|
|
23303
|
+
path18 = __toESM(require("path"));
|
|
22246
23304
|
HttpClientProvider = class extends CheckProvider {
|
|
22247
23305
|
liquid;
|
|
22248
23306
|
sandbox;
|
|
@@ -22337,14 +23395,14 @@ var init_http_client_provider = __esm({
|
|
|
22337
23395
|
const parentContext = context2?._parentContext;
|
|
22338
23396
|
const workingDirectory = parentContext?.workingDirectory;
|
|
22339
23397
|
const workspaceEnabled = parentContext?.workspace?.isEnabled?.();
|
|
22340
|
-
if (workspaceEnabled && workingDirectory && !
|
|
22341
|
-
resolvedOutputFile =
|
|
23398
|
+
if (workspaceEnabled && workingDirectory && !path18.isAbsolute(resolvedOutputFile)) {
|
|
23399
|
+
resolvedOutputFile = path18.join(workingDirectory, resolvedOutputFile);
|
|
22342
23400
|
logger.debug(
|
|
22343
23401
|
`[http_client] Resolved relative output_file to workspace: ${resolvedOutputFile}`
|
|
22344
23402
|
);
|
|
22345
23403
|
}
|
|
22346
|
-
if (skipIfExists &&
|
|
22347
|
-
const stats =
|
|
23404
|
+
if (skipIfExists && fs15.existsSync(resolvedOutputFile)) {
|
|
23405
|
+
const stats = fs15.statSync(resolvedOutputFile);
|
|
22348
23406
|
logger.verbose(`[http_client] File cached: ${resolvedOutputFile} (${stats.size} bytes)`);
|
|
22349
23407
|
return {
|
|
22350
23408
|
issues: [],
|
|
@@ -22555,13 +23613,13 @@ var init_http_client_provider = __esm({
|
|
|
22555
23613
|
]
|
|
22556
23614
|
};
|
|
22557
23615
|
}
|
|
22558
|
-
const parentDir =
|
|
22559
|
-
if (parentDir && !
|
|
22560
|
-
|
|
23616
|
+
const parentDir = path18.dirname(outputFile);
|
|
23617
|
+
if (parentDir && !fs15.existsSync(parentDir)) {
|
|
23618
|
+
fs15.mkdirSync(parentDir, { recursive: true });
|
|
22561
23619
|
}
|
|
22562
23620
|
const arrayBuffer = await response.arrayBuffer();
|
|
22563
23621
|
const buffer = Buffer.from(arrayBuffer);
|
|
22564
|
-
|
|
23622
|
+
fs15.writeFileSync(outputFile, buffer);
|
|
22565
23623
|
const contentType = response.headers.get("content-type") || "application/octet-stream";
|
|
22566
23624
|
logger.verbose(`[http_client] Downloaded: ${outputFile} (${buffer.length} bytes)`);
|
|
22567
23625
|
return {
|
|
@@ -26096,14 +27154,14 @@ var require_util = __commonJS({
|
|
|
26096
27154
|
}
|
|
26097
27155
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
26098
27156
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
26099
|
-
let
|
|
27157
|
+
let path27 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
26100
27158
|
if (origin.endsWith("/")) {
|
|
26101
27159
|
origin = origin.substring(0, origin.length - 1);
|
|
26102
27160
|
}
|
|
26103
|
-
if (
|
|
26104
|
-
|
|
27161
|
+
if (path27 && !path27.startsWith("/")) {
|
|
27162
|
+
path27 = `/${path27}`;
|
|
26105
27163
|
}
|
|
26106
|
-
url = new URL(origin +
|
|
27164
|
+
url = new URL(origin + path27);
|
|
26107
27165
|
}
|
|
26108
27166
|
return url;
|
|
26109
27167
|
}
|
|
@@ -27717,20 +28775,20 @@ var require_parseParams = __commonJS({
|
|
|
27717
28775
|
var require_basename = __commonJS({
|
|
27718
28776
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
27719
28777
|
"use strict";
|
|
27720
|
-
module2.exports = function basename4(
|
|
27721
|
-
if (typeof
|
|
28778
|
+
module2.exports = function basename4(path27) {
|
|
28779
|
+
if (typeof path27 !== "string") {
|
|
27722
28780
|
return "";
|
|
27723
28781
|
}
|
|
27724
|
-
for (var i =
|
|
27725
|
-
switch (
|
|
28782
|
+
for (var i = path27.length - 1; i >= 0; --i) {
|
|
28783
|
+
switch (path27.charCodeAt(i)) {
|
|
27726
28784
|
case 47:
|
|
27727
28785
|
// '/'
|
|
27728
28786
|
case 92:
|
|
27729
|
-
|
|
27730
|
-
return
|
|
28787
|
+
path27 = path27.slice(i + 1);
|
|
28788
|
+
return path27 === ".." || path27 === "." ? "" : path27;
|
|
27731
28789
|
}
|
|
27732
28790
|
}
|
|
27733
|
-
return
|
|
28791
|
+
return path27 === ".." || path27 === "." ? "" : path27;
|
|
27734
28792
|
};
|
|
27735
28793
|
}
|
|
27736
28794
|
});
|
|
@@ -28734,11 +29792,11 @@ var require_util2 = __commonJS({
|
|
|
28734
29792
|
var assert = require("assert");
|
|
28735
29793
|
var { isUint8Array } = require("util/types");
|
|
28736
29794
|
var supportedHashes = [];
|
|
28737
|
-
var
|
|
29795
|
+
var crypto2;
|
|
28738
29796
|
try {
|
|
28739
|
-
|
|
29797
|
+
crypto2 = require("crypto");
|
|
28740
29798
|
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
|
|
28741
|
-
supportedHashes =
|
|
29799
|
+
supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
|
|
28742
29800
|
} catch {
|
|
28743
29801
|
}
|
|
28744
29802
|
function responseURL(response) {
|
|
@@ -29015,7 +30073,7 @@ var require_util2 = __commonJS({
|
|
|
29015
30073
|
}
|
|
29016
30074
|
}
|
|
29017
30075
|
function bytesMatch(bytes, metadataList) {
|
|
29018
|
-
if (
|
|
30076
|
+
if (crypto2 === void 0) {
|
|
29019
30077
|
return true;
|
|
29020
30078
|
}
|
|
29021
30079
|
const parsedMetadata = parseMetadata(metadataList);
|
|
@@ -29030,7 +30088,7 @@ var require_util2 = __commonJS({
|
|
|
29030
30088
|
for (const item of metadata) {
|
|
29031
30089
|
const algorithm = item.algo;
|
|
29032
30090
|
const expectedValue = item.hash;
|
|
29033
|
-
let actualValue =
|
|
30091
|
+
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
|
|
29034
30092
|
if (actualValue[actualValue.length - 1] === "=") {
|
|
29035
30093
|
if (actualValue[actualValue.length - 2] === "=") {
|
|
29036
30094
|
actualValue = actualValue.slice(0, -2);
|
|
@@ -29123,8 +30181,8 @@ var require_util2 = __commonJS({
|
|
|
29123
30181
|
function createDeferredPromise() {
|
|
29124
30182
|
let res;
|
|
29125
30183
|
let rej;
|
|
29126
|
-
const promise = new Promise((
|
|
29127
|
-
res =
|
|
30184
|
+
const promise = new Promise((resolve14, reject) => {
|
|
30185
|
+
res = resolve14;
|
|
29128
30186
|
rej = reject;
|
|
29129
30187
|
});
|
|
29130
30188
|
return { promise, resolve: res, reject: rej };
|
|
@@ -30377,8 +31435,8 @@ var require_body = __commonJS({
|
|
|
30377
31435
|
var { parseMIMEType, serializeAMimeType } = require_dataURL();
|
|
30378
31436
|
var random;
|
|
30379
31437
|
try {
|
|
30380
|
-
const
|
|
30381
|
-
random = (max) =>
|
|
31438
|
+
const crypto2 = require("crypto");
|
|
31439
|
+
random = (max) => crypto2.randomInt(0, max);
|
|
30382
31440
|
} catch {
|
|
30383
31441
|
random = (max) => Math.floor(Math.random(max));
|
|
30384
31442
|
}
|
|
@@ -30629,8 +31687,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
30629
31687
|
});
|
|
30630
31688
|
}
|
|
30631
31689
|
});
|
|
30632
|
-
const busboyResolve = new Promise((
|
|
30633
|
-
busboy.on("finish",
|
|
31690
|
+
const busboyResolve = new Promise((resolve14, reject) => {
|
|
31691
|
+
busboy.on("finish", resolve14);
|
|
30634
31692
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
30635
31693
|
});
|
|
30636
31694
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -30761,7 +31819,7 @@ var require_request = __commonJS({
|
|
|
30761
31819
|
}
|
|
30762
31820
|
var Request = class _Request {
|
|
30763
31821
|
constructor(origin, {
|
|
30764
|
-
path:
|
|
31822
|
+
path: path27,
|
|
30765
31823
|
method,
|
|
30766
31824
|
body,
|
|
30767
31825
|
headers,
|
|
@@ -30775,11 +31833,11 @@ var require_request = __commonJS({
|
|
|
30775
31833
|
throwOnError,
|
|
30776
31834
|
expectContinue
|
|
30777
31835
|
}, handler) {
|
|
30778
|
-
if (typeof
|
|
31836
|
+
if (typeof path27 !== "string") {
|
|
30779
31837
|
throw new InvalidArgumentError("path must be a string");
|
|
30780
|
-
} else if (
|
|
31838
|
+
} else if (path27[0] !== "/" && !(path27.startsWith("http://") || path27.startsWith("https://")) && method !== "CONNECT") {
|
|
30781
31839
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
30782
|
-
} else if (invalidPathRegex.exec(
|
|
31840
|
+
} else if (invalidPathRegex.exec(path27) !== null) {
|
|
30783
31841
|
throw new InvalidArgumentError("invalid request path");
|
|
30784
31842
|
}
|
|
30785
31843
|
if (typeof method !== "string") {
|
|
@@ -30842,7 +31900,7 @@ var require_request = __commonJS({
|
|
|
30842
31900
|
this.completed = false;
|
|
30843
31901
|
this.aborted = false;
|
|
30844
31902
|
this.upgrade = upgrade || null;
|
|
30845
|
-
this.path = query ? util.buildURL(
|
|
31903
|
+
this.path = query ? util.buildURL(path27, query) : path27;
|
|
30846
31904
|
this.origin = origin;
|
|
30847
31905
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
30848
31906
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -31164,9 +32222,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
31164
32222
|
}
|
|
31165
32223
|
close(callback) {
|
|
31166
32224
|
if (callback === void 0) {
|
|
31167
|
-
return new Promise((
|
|
32225
|
+
return new Promise((resolve14, reject) => {
|
|
31168
32226
|
this.close((err, data) => {
|
|
31169
|
-
return err ? reject(err) :
|
|
32227
|
+
return err ? reject(err) : resolve14(data);
|
|
31170
32228
|
});
|
|
31171
32229
|
});
|
|
31172
32230
|
}
|
|
@@ -31204,12 +32262,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
31204
32262
|
err = null;
|
|
31205
32263
|
}
|
|
31206
32264
|
if (callback === void 0) {
|
|
31207
|
-
return new Promise((
|
|
32265
|
+
return new Promise((resolve14, reject) => {
|
|
31208
32266
|
this.destroy(err, (err2, data) => {
|
|
31209
32267
|
return err2 ? (
|
|
31210
32268
|
/* istanbul ignore next: should never error */
|
|
31211
32269
|
reject(err2)
|
|
31212
|
-
) :
|
|
32270
|
+
) : resolve14(data);
|
|
31213
32271
|
});
|
|
31214
32272
|
});
|
|
31215
32273
|
}
|
|
@@ -31850,9 +32908,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
31850
32908
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
31851
32909
|
}
|
|
31852
32910
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
31853
|
-
const
|
|
32911
|
+
const path27 = search ? `${pathname}${search}` : pathname;
|
|
31854
32912
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
31855
|
-
this.opts.path =
|
|
32913
|
+
this.opts.path = path27;
|
|
31856
32914
|
this.opts.origin = origin;
|
|
31857
32915
|
this.opts.maxRedirections = 0;
|
|
31858
32916
|
this.opts.query = null;
|
|
@@ -32271,16 +33329,16 @@ var require_client = __commonJS({
|
|
|
32271
33329
|
return this[kNeedDrain] < 2;
|
|
32272
33330
|
}
|
|
32273
33331
|
async [kClose]() {
|
|
32274
|
-
return new Promise((
|
|
33332
|
+
return new Promise((resolve14) => {
|
|
32275
33333
|
if (!this[kSize]) {
|
|
32276
|
-
|
|
33334
|
+
resolve14(null);
|
|
32277
33335
|
} else {
|
|
32278
|
-
this[kClosedResolve] =
|
|
33336
|
+
this[kClosedResolve] = resolve14;
|
|
32279
33337
|
}
|
|
32280
33338
|
});
|
|
32281
33339
|
}
|
|
32282
33340
|
async [kDestroy](err) {
|
|
32283
|
-
return new Promise((
|
|
33341
|
+
return new Promise((resolve14) => {
|
|
32284
33342
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
32285
33343
|
for (let i = 0; i < requests.length; i++) {
|
|
32286
33344
|
const request = requests[i];
|
|
@@ -32291,7 +33349,7 @@ var require_client = __commonJS({
|
|
|
32291
33349
|
this[kClosedResolve]();
|
|
32292
33350
|
this[kClosedResolve] = null;
|
|
32293
33351
|
}
|
|
32294
|
-
|
|
33352
|
+
resolve14();
|
|
32295
33353
|
};
|
|
32296
33354
|
if (this[kHTTP2Session] != null) {
|
|
32297
33355
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -32871,7 +33929,7 @@ var require_client = __commonJS({
|
|
|
32871
33929
|
});
|
|
32872
33930
|
}
|
|
32873
33931
|
try {
|
|
32874
|
-
const socket = await new Promise((
|
|
33932
|
+
const socket = await new Promise((resolve14, reject) => {
|
|
32875
33933
|
client[kConnector]({
|
|
32876
33934
|
host,
|
|
32877
33935
|
hostname,
|
|
@@ -32883,7 +33941,7 @@ var require_client = __commonJS({
|
|
|
32883
33941
|
if (err) {
|
|
32884
33942
|
reject(err);
|
|
32885
33943
|
} else {
|
|
32886
|
-
|
|
33944
|
+
resolve14(socket2);
|
|
32887
33945
|
}
|
|
32888
33946
|
});
|
|
32889
33947
|
});
|
|
@@ -33094,7 +34152,7 @@ var require_client = __commonJS({
|
|
|
33094
34152
|
writeH2(client, client[kHTTP2Session], request);
|
|
33095
34153
|
return;
|
|
33096
34154
|
}
|
|
33097
|
-
const { body, method, path:
|
|
34155
|
+
const { body, method, path: path27, host, upgrade, headers, blocking, reset } = request;
|
|
33098
34156
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33099
34157
|
if (body && typeof body.read === "function") {
|
|
33100
34158
|
body.read(0);
|
|
@@ -33144,7 +34202,7 @@ var require_client = __commonJS({
|
|
|
33144
34202
|
if (blocking) {
|
|
33145
34203
|
socket[kBlocking] = true;
|
|
33146
34204
|
}
|
|
33147
|
-
let header = `${method} ${
|
|
34205
|
+
let header = `${method} ${path27} HTTP/1.1\r
|
|
33148
34206
|
`;
|
|
33149
34207
|
if (typeof host === "string") {
|
|
33150
34208
|
header += `host: ${host}\r
|
|
@@ -33207,7 +34265,7 @@ upgrade: ${upgrade}\r
|
|
|
33207
34265
|
return true;
|
|
33208
34266
|
}
|
|
33209
34267
|
function writeH2(client, session, request) {
|
|
33210
|
-
const { body, method, path:
|
|
34268
|
+
const { body, method, path: path27, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
33211
34269
|
let headers;
|
|
33212
34270
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
33213
34271
|
else headers = reqHeaders;
|
|
@@ -33250,7 +34308,7 @@ upgrade: ${upgrade}\r
|
|
|
33250
34308
|
});
|
|
33251
34309
|
return true;
|
|
33252
34310
|
}
|
|
33253
|
-
headers[HTTP2_HEADER_PATH] =
|
|
34311
|
+
headers[HTTP2_HEADER_PATH] = path27;
|
|
33254
34312
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
33255
34313
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33256
34314
|
if (body && typeof body.read === "function") {
|
|
@@ -33507,12 +34565,12 @@ upgrade: ${upgrade}\r
|
|
|
33507
34565
|
cb();
|
|
33508
34566
|
}
|
|
33509
34567
|
}
|
|
33510
|
-
const waitForDrain = () => new Promise((
|
|
34568
|
+
const waitForDrain = () => new Promise((resolve14, reject) => {
|
|
33511
34569
|
assert(callback === null);
|
|
33512
34570
|
if (socket[kError]) {
|
|
33513
34571
|
reject(socket[kError]);
|
|
33514
34572
|
} else {
|
|
33515
|
-
callback =
|
|
34573
|
+
callback = resolve14;
|
|
33516
34574
|
}
|
|
33517
34575
|
});
|
|
33518
34576
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -33858,8 +34916,8 @@ var require_pool_base = __commonJS({
|
|
|
33858
34916
|
if (this[kQueue].isEmpty()) {
|
|
33859
34917
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
33860
34918
|
} else {
|
|
33861
|
-
return new Promise((
|
|
33862
|
-
this[kClosedResolve] =
|
|
34919
|
+
return new Promise((resolve14) => {
|
|
34920
|
+
this[kClosedResolve] = resolve14;
|
|
33863
34921
|
});
|
|
33864
34922
|
}
|
|
33865
34923
|
}
|
|
@@ -34437,7 +35495,7 @@ var require_readable = __commonJS({
|
|
|
34437
35495
|
if (this.closed) {
|
|
34438
35496
|
return Promise.resolve(null);
|
|
34439
35497
|
}
|
|
34440
|
-
return new Promise((
|
|
35498
|
+
return new Promise((resolve14, reject) => {
|
|
34441
35499
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
34442
35500
|
this.destroy();
|
|
34443
35501
|
}) : noop;
|
|
@@ -34446,7 +35504,7 @@ var require_readable = __commonJS({
|
|
|
34446
35504
|
if (signal && signal.aborted) {
|
|
34447
35505
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
34448
35506
|
} else {
|
|
34449
|
-
|
|
35507
|
+
resolve14(null);
|
|
34450
35508
|
}
|
|
34451
35509
|
}).on("error", noop).on("data", function(chunk) {
|
|
34452
35510
|
limit -= chunk.length;
|
|
@@ -34468,11 +35526,11 @@ var require_readable = __commonJS({
|
|
|
34468
35526
|
throw new TypeError("unusable");
|
|
34469
35527
|
}
|
|
34470
35528
|
assert(!stream[kConsume]);
|
|
34471
|
-
return new Promise((
|
|
35529
|
+
return new Promise((resolve14, reject) => {
|
|
34472
35530
|
stream[kConsume] = {
|
|
34473
35531
|
type,
|
|
34474
35532
|
stream,
|
|
34475
|
-
resolve:
|
|
35533
|
+
resolve: resolve14,
|
|
34476
35534
|
reject,
|
|
34477
35535
|
length: 0,
|
|
34478
35536
|
body: []
|
|
@@ -34507,12 +35565,12 @@ var require_readable = __commonJS({
|
|
|
34507
35565
|
}
|
|
34508
35566
|
}
|
|
34509
35567
|
function consumeEnd(consume2) {
|
|
34510
|
-
const { type, body, resolve:
|
|
35568
|
+
const { type, body, resolve: resolve14, stream, length } = consume2;
|
|
34511
35569
|
try {
|
|
34512
35570
|
if (type === "text") {
|
|
34513
|
-
|
|
35571
|
+
resolve14(toUSVString(Buffer.concat(body)));
|
|
34514
35572
|
} else if (type === "json") {
|
|
34515
|
-
|
|
35573
|
+
resolve14(JSON.parse(Buffer.concat(body)));
|
|
34516
35574
|
} else if (type === "arrayBuffer") {
|
|
34517
35575
|
const dst = new Uint8Array(length);
|
|
34518
35576
|
let pos = 0;
|
|
@@ -34520,12 +35578,12 @@ var require_readable = __commonJS({
|
|
|
34520
35578
|
dst.set(buf, pos);
|
|
34521
35579
|
pos += buf.byteLength;
|
|
34522
35580
|
}
|
|
34523
|
-
|
|
35581
|
+
resolve14(dst.buffer);
|
|
34524
35582
|
} else if (type === "blob") {
|
|
34525
35583
|
if (!Blob2) {
|
|
34526
35584
|
Blob2 = require("buffer").Blob;
|
|
34527
35585
|
}
|
|
34528
|
-
|
|
35586
|
+
resolve14(new Blob2(body, { type: stream[kContentType] }));
|
|
34529
35587
|
}
|
|
34530
35588
|
consumeFinish(consume2);
|
|
34531
35589
|
} catch (err) {
|
|
@@ -34782,9 +35840,9 @@ var require_api_request = __commonJS({
|
|
|
34782
35840
|
};
|
|
34783
35841
|
function request(opts, callback) {
|
|
34784
35842
|
if (callback === void 0) {
|
|
34785
|
-
return new Promise((
|
|
35843
|
+
return new Promise((resolve14, reject) => {
|
|
34786
35844
|
request.call(this, opts, (err, data) => {
|
|
34787
|
-
return err ? reject(err) :
|
|
35845
|
+
return err ? reject(err) : resolve14(data);
|
|
34788
35846
|
});
|
|
34789
35847
|
});
|
|
34790
35848
|
}
|
|
@@ -34957,9 +36015,9 @@ var require_api_stream = __commonJS({
|
|
|
34957
36015
|
};
|
|
34958
36016
|
function stream(opts, factory, callback) {
|
|
34959
36017
|
if (callback === void 0) {
|
|
34960
|
-
return new Promise((
|
|
36018
|
+
return new Promise((resolve14, reject) => {
|
|
34961
36019
|
stream.call(this, opts, factory, (err, data) => {
|
|
34962
|
-
return err ? reject(err) :
|
|
36020
|
+
return err ? reject(err) : resolve14(data);
|
|
34963
36021
|
});
|
|
34964
36022
|
});
|
|
34965
36023
|
}
|
|
@@ -35240,9 +36298,9 @@ var require_api_upgrade = __commonJS({
|
|
|
35240
36298
|
};
|
|
35241
36299
|
function upgrade(opts, callback) {
|
|
35242
36300
|
if (callback === void 0) {
|
|
35243
|
-
return new Promise((
|
|
36301
|
+
return new Promise((resolve14, reject) => {
|
|
35244
36302
|
upgrade.call(this, opts, (err, data) => {
|
|
35245
|
-
return err ? reject(err) :
|
|
36303
|
+
return err ? reject(err) : resolve14(data);
|
|
35246
36304
|
});
|
|
35247
36305
|
});
|
|
35248
36306
|
}
|
|
@@ -35331,9 +36389,9 @@ var require_api_connect = __commonJS({
|
|
|
35331
36389
|
};
|
|
35332
36390
|
function connect(opts, callback) {
|
|
35333
36391
|
if (callback === void 0) {
|
|
35334
|
-
return new Promise((
|
|
36392
|
+
return new Promise((resolve14, reject) => {
|
|
35335
36393
|
connect.call(this, opts, (err, data) => {
|
|
35336
|
-
return err ? reject(err) :
|
|
36394
|
+
return err ? reject(err) : resolve14(data);
|
|
35337
36395
|
});
|
|
35338
36396
|
});
|
|
35339
36397
|
}
|
|
@@ -35493,20 +36551,20 @@ var require_mock_utils = __commonJS({
|
|
|
35493
36551
|
}
|
|
35494
36552
|
return true;
|
|
35495
36553
|
}
|
|
35496
|
-
function safeUrl(
|
|
35497
|
-
if (typeof
|
|
35498
|
-
return
|
|
36554
|
+
function safeUrl(path27) {
|
|
36555
|
+
if (typeof path27 !== "string") {
|
|
36556
|
+
return path27;
|
|
35499
36557
|
}
|
|
35500
|
-
const pathSegments =
|
|
36558
|
+
const pathSegments = path27.split("?");
|
|
35501
36559
|
if (pathSegments.length !== 2) {
|
|
35502
|
-
return
|
|
36560
|
+
return path27;
|
|
35503
36561
|
}
|
|
35504
36562
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
35505
36563
|
qp.sort();
|
|
35506
36564
|
return [...pathSegments, qp.toString()].join("?");
|
|
35507
36565
|
}
|
|
35508
|
-
function matchKey(mockDispatch2, { path:
|
|
35509
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
36566
|
+
function matchKey(mockDispatch2, { path: path27, method, body, headers }) {
|
|
36567
|
+
const pathMatch = matchValue(mockDispatch2.path, path27);
|
|
35510
36568
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
35511
36569
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
35512
36570
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -35524,7 +36582,7 @@ var require_mock_utils = __commonJS({
|
|
|
35524
36582
|
function getMockDispatch(mockDispatches, key) {
|
|
35525
36583
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
35526
36584
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
35527
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
36585
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path27 }) => matchValue(safeUrl(path27), resolvedPath));
|
|
35528
36586
|
if (matchedMockDispatches.length === 0) {
|
|
35529
36587
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
35530
36588
|
}
|
|
@@ -35561,9 +36619,9 @@ var require_mock_utils = __commonJS({
|
|
|
35561
36619
|
}
|
|
35562
36620
|
}
|
|
35563
36621
|
function buildKey(opts) {
|
|
35564
|
-
const { path:
|
|
36622
|
+
const { path: path27, method, body, headers, query } = opts;
|
|
35565
36623
|
return {
|
|
35566
|
-
path:
|
|
36624
|
+
path: path27,
|
|
35567
36625
|
method,
|
|
35568
36626
|
body,
|
|
35569
36627
|
headers,
|
|
@@ -36012,10 +37070,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
36012
37070
|
}
|
|
36013
37071
|
format(pendingInterceptors) {
|
|
36014
37072
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
36015
|
-
({ method, path:
|
|
37073
|
+
({ method, path: path27, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
36016
37074
|
Method: method,
|
|
36017
37075
|
Origin: origin,
|
|
36018
|
-
Path:
|
|
37076
|
+
Path: path27,
|
|
36019
37077
|
"Status code": statusCode,
|
|
36020
37078
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
36021
37079
|
Invocations: timesInvoked,
|
|
@@ -38956,7 +40014,7 @@ var require_fetch = __commonJS({
|
|
|
38956
40014
|
async function dispatch({ body }) {
|
|
38957
40015
|
const url = requestCurrentURL(request);
|
|
38958
40016
|
const agent = fetchParams.controller.dispatcher;
|
|
38959
|
-
return new Promise((
|
|
40017
|
+
return new Promise((resolve14, reject) => agent.dispatch(
|
|
38960
40018
|
{
|
|
38961
40019
|
path: url.pathname + url.search,
|
|
38962
40020
|
origin: url.origin,
|
|
@@ -39032,7 +40090,7 @@ var require_fetch = __commonJS({
|
|
|
39032
40090
|
}
|
|
39033
40091
|
}
|
|
39034
40092
|
}
|
|
39035
|
-
|
|
40093
|
+
resolve14({
|
|
39036
40094
|
status,
|
|
39037
40095
|
statusText,
|
|
39038
40096
|
headersList: headers[kHeadersList],
|
|
@@ -39075,7 +40133,7 @@ var require_fetch = __commonJS({
|
|
|
39075
40133
|
const val = headersList[n + 1].toString("latin1");
|
|
39076
40134
|
headers[kHeadersList].append(key, val);
|
|
39077
40135
|
}
|
|
39078
|
-
|
|
40136
|
+
resolve14({
|
|
39079
40137
|
status,
|
|
39080
40138
|
statusText: STATUS_CODES[status],
|
|
39081
40139
|
headersList: headers[kHeadersList],
|
|
@@ -40636,8 +41694,8 @@ var require_util6 = __commonJS({
|
|
|
40636
41694
|
}
|
|
40637
41695
|
}
|
|
40638
41696
|
}
|
|
40639
|
-
function validateCookiePath(
|
|
40640
|
-
for (const char of
|
|
41697
|
+
function validateCookiePath(path27) {
|
|
41698
|
+
for (const char of path27) {
|
|
40641
41699
|
const code = char.charCodeAt(0);
|
|
40642
41700
|
if (code < 33 || char === ";") {
|
|
40643
41701
|
throw new Error("Invalid cookie path");
|
|
@@ -41434,9 +42492,9 @@ var require_connection = __commonJS({
|
|
|
41434
42492
|
channels.open = diagnosticsChannel.channel("undici:websocket:open");
|
|
41435
42493
|
channels.close = diagnosticsChannel.channel("undici:websocket:close");
|
|
41436
42494
|
channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
|
|
41437
|
-
var
|
|
42495
|
+
var crypto2;
|
|
41438
42496
|
try {
|
|
41439
|
-
|
|
42497
|
+
crypto2 = require("crypto");
|
|
41440
42498
|
} catch {
|
|
41441
42499
|
}
|
|
41442
42500
|
function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
|
|
@@ -41455,7 +42513,7 @@ var require_connection = __commonJS({
|
|
|
41455
42513
|
const headersList = new Headers(options.headers)[kHeadersList];
|
|
41456
42514
|
request.headersList = headersList;
|
|
41457
42515
|
}
|
|
41458
|
-
const keyValue =
|
|
42516
|
+
const keyValue = crypto2.randomBytes(16).toString("base64");
|
|
41459
42517
|
request.headersList.append("sec-websocket-key", keyValue);
|
|
41460
42518
|
request.headersList.append("sec-websocket-version", "13");
|
|
41461
42519
|
for (const protocol of protocols) {
|
|
@@ -41484,7 +42542,7 @@ var require_connection = __commonJS({
|
|
|
41484
42542
|
return;
|
|
41485
42543
|
}
|
|
41486
42544
|
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
|
|
41487
|
-
const digest =
|
|
42545
|
+
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
|
|
41488
42546
|
if (secWSAccept !== digest) {
|
|
41489
42547
|
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
|
|
41490
42548
|
return;
|
|
@@ -41564,9 +42622,9 @@ var require_frame = __commonJS({
|
|
|
41564
42622
|
"node_modules/undici/lib/websocket/frame.js"(exports2, module2) {
|
|
41565
42623
|
"use strict";
|
|
41566
42624
|
var { maxUnsigned16Bit } = require_constants5();
|
|
41567
|
-
var
|
|
42625
|
+
var crypto2;
|
|
41568
42626
|
try {
|
|
41569
|
-
|
|
42627
|
+
crypto2 = require("crypto");
|
|
41570
42628
|
} catch {
|
|
41571
42629
|
}
|
|
41572
42630
|
var WebsocketFrameSend = class {
|
|
@@ -41575,7 +42633,7 @@ var require_frame = __commonJS({
|
|
|
41575
42633
|
*/
|
|
41576
42634
|
constructor(data) {
|
|
41577
42635
|
this.frameData = data;
|
|
41578
|
-
this.maskKey =
|
|
42636
|
+
this.maskKey = crypto2.randomBytes(4);
|
|
41579
42637
|
}
|
|
41580
42638
|
createFrame(opcode) {
|
|
41581
42639
|
const bodyLength = this.frameData?.byteLength ?? 0;
|
|
@@ -42317,11 +43375,11 @@ var require_undici = __commonJS({
|
|
|
42317
43375
|
if (typeof opts.path !== "string") {
|
|
42318
43376
|
throw new InvalidArgumentError("invalid opts.path");
|
|
42319
43377
|
}
|
|
42320
|
-
let
|
|
43378
|
+
let path27 = opts.path;
|
|
42321
43379
|
if (!opts.path.startsWith("/")) {
|
|
42322
|
-
|
|
43380
|
+
path27 = `/${path27}`;
|
|
42323
43381
|
}
|
|
42324
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
43382
|
+
url = new URL(util.parseOrigin(url).origin + path27);
|
|
42325
43383
|
} else {
|
|
42326
43384
|
if (!opts) {
|
|
42327
43385
|
opts = typeof url === "object" ? url : {};
|
|
@@ -42870,7 +43928,7 @@ var init_mcp_check_provider = __esm({
|
|
|
42870
43928
|
logger.warn(
|
|
42871
43929
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
42872
43930
|
);
|
|
42873
|
-
await new Promise((
|
|
43931
|
+
await new Promise((resolve14) => setTimeout(resolve14, delay));
|
|
42874
43932
|
attempt += 1;
|
|
42875
43933
|
} finally {
|
|
42876
43934
|
try {
|
|
@@ -43152,7 +44210,7 @@ async function acquirePromptLock() {
|
|
|
43152
44210
|
activePrompt = true;
|
|
43153
44211
|
return;
|
|
43154
44212
|
}
|
|
43155
|
-
await new Promise((
|
|
44213
|
+
await new Promise((resolve14) => waiters.push(resolve14));
|
|
43156
44214
|
activePrompt = true;
|
|
43157
44215
|
}
|
|
43158
44216
|
function releasePromptLock() {
|
|
@@ -43162,7 +44220,7 @@ function releasePromptLock() {
|
|
|
43162
44220
|
}
|
|
43163
44221
|
async function interactivePrompt(options) {
|
|
43164
44222
|
await acquirePromptLock();
|
|
43165
|
-
return new Promise((
|
|
44223
|
+
return new Promise((resolve14, reject) => {
|
|
43166
44224
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
43167
44225
|
try {
|
|
43168
44226
|
if (dbg) {
|
|
@@ -43249,12 +44307,12 @@ async function interactivePrompt(options) {
|
|
|
43249
44307
|
};
|
|
43250
44308
|
const finish = (value) => {
|
|
43251
44309
|
cleanup();
|
|
43252
|
-
|
|
44310
|
+
resolve14(value);
|
|
43253
44311
|
};
|
|
43254
44312
|
if (options.timeout && options.timeout > 0) {
|
|
43255
44313
|
timeoutId = setTimeout(() => {
|
|
43256
44314
|
cleanup();
|
|
43257
|
-
if (defaultValue !== void 0) return
|
|
44315
|
+
if (defaultValue !== void 0) return resolve14(defaultValue);
|
|
43258
44316
|
return reject(new Error("Input timeout"));
|
|
43259
44317
|
}, options.timeout);
|
|
43260
44318
|
}
|
|
@@ -43386,7 +44444,7 @@ async function interactivePrompt(options) {
|
|
|
43386
44444
|
});
|
|
43387
44445
|
}
|
|
43388
44446
|
async function simplePrompt(prompt) {
|
|
43389
|
-
return new Promise((
|
|
44447
|
+
return new Promise((resolve14) => {
|
|
43390
44448
|
const rl = readline.createInterface({
|
|
43391
44449
|
input: process.stdin,
|
|
43392
44450
|
output: process.stdout
|
|
@@ -43402,7 +44460,7 @@ async function simplePrompt(prompt) {
|
|
|
43402
44460
|
rl.question(`${prompt}
|
|
43403
44461
|
> `, (answer) => {
|
|
43404
44462
|
rl.close();
|
|
43405
|
-
|
|
44463
|
+
resolve14(answer.trim());
|
|
43406
44464
|
});
|
|
43407
44465
|
});
|
|
43408
44466
|
}
|
|
@@ -43570,7 +44628,7 @@ function isStdinAvailable() {
|
|
|
43570
44628
|
return !process.stdin.isTTY;
|
|
43571
44629
|
}
|
|
43572
44630
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
43573
|
-
return new Promise((
|
|
44631
|
+
return new Promise((resolve14, reject) => {
|
|
43574
44632
|
let data = "";
|
|
43575
44633
|
let timeoutId;
|
|
43576
44634
|
if (timeout) {
|
|
@@ -43597,7 +44655,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
43597
44655
|
};
|
|
43598
44656
|
const onEnd = () => {
|
|
43599
44657
|
cleanup();
|
|
43600
|
-
|
|
44658
|
+
resolve14(data.trim());
|
|
43601
44659
|
};
|
|
43602
44660
|
const onError = (err) => {
|
|
43603
44661
|
cleanup();
|
|
@@ -43627,7 +44685,7 @@ var init_stdin_reader = __esm({
|
|
|
43627
44685
|
});
|
|
43628
44686
|
|
|
43629
44687
|
// src/providers/human-input-check-provider.ts
|
|
43630
|
-
var
|
|
44688
|
+
var fs17, path20, HumanInputCheckProvider;
|
|
43631
44689
|
var init_human_input_check_provider = __esm({
|
|
43632
44690
|
"src/providers/human-input-check-provider.ts"() {
|
|
43633
44691
|
"use strict";
|
|
@@ -43636,8 +44694,8 @@ var init_human_input_check_provider = __esm({
|
|
|
43636
44694
|
init_prompt_state();
|
|
43637
44695
|
init_liquid_extensions();
|
|
43638
44696
|
init_stdin_reader();
|
|
43639
|
-
|
|
43640
|
-
|
|
44697
|
+
fs17 = __toESM(require("fs"));
|
|
44698
|
+
path20 = __toESM(require("path"));
|
|
43641
44699
|
HumanInputCheckProvider = class _HumanInputCheckProvider extends CheckProvider {
|
|
43642
44700
|
liquid;
|
|
43643
44701
|
/**
|
|
@@ -43811,19 +44869,19 @@ var init_human_input_check_provider = __esm({
|
|
|
43811
44869
|
*/
|
|
43812
44870
|
async tryReadFile(filePath) {
|
|
43813
44871
|
try {
|
|
43814
|
-
const absolutePath =
|
|
43815
|
-
const normalizedPath =
|
|
44872
|
+
const absolutePath = path20.isAbsolute(filePath) ? filePath : path20.resolve(process.cwd(), filePath);
|
|
44873
|
+
const normalizedPath = path20.normalize(absolutePath);
|
|
43816
44874
|
const cwd = process.cwd();
|
|
43817
|
-
if (!normalizedPath.startsWith(cwd +
|
|
44875
|
+
if (!normalizedPath.startsWith(cwd + path20.sep) && normalizedPath !== cwd) {
|
|
43818
44876
|
return null;
|
|
43819
44877
|
}
|
|
43820
44878
|
try {
|
|
43821
|
-
await
|
|
43822
|
-
const stats = await
|
|
44879
|
+
await fs17.promises.access(normalizedPath, fs17.constants.R_OK);
|
|
44880
|
+
const stats = await fs17.promises.stat(normalizedPath);
|
|
43823
44881
|
if (!stats.isFile()) {
|
|
43824
44882
|
return null;
|
|
43825
44883
|
}
|
|
43826
|
-
const content = await
|
|
44884
|
+
const content = await fs17.promises.readFile(normalizedPath, "utf-8");
|
|
43827
44885
|
return content.trim();
|
|
43828
44886
|
} catch {
|
|
43829
44887
|
return null;
|
|
@@ -44969,13 +46027,13 @@ var init_script_check_provider = __esm({
|
|
|
44969
46027
|
});
|
|
44970
46028
|
|
|
44971
46029
|
// src/utils/worktree-manager.ts
|
|
44972
|
-
var
|
|
46030
|
+
var fs18, fsp, path21, crypto, WorktreeManager, worktreeManager;
|
|
44973
46031
|
var init_worktree_manager = __esm({
|
|
44974
46032
|
"src/utils/worktree-manager.ts"() {
|
|
44975
46033
|
"use strict";
|
|
44976
|
-
|
|
46034
|
+
fs18 = __toESM(require("fs"));
|
|
44977
46035
|
fsp = __toESM(require("fs/promises"));
|
|
44978
|
-
|
|
46036
|
+
path21 = __toESM(require("path"));
|
|
44979
46037
|
crypto = __toESM(require("crypto"));
|
|
44980
46038
|
init_command_executor();
|
|
44981
46039
|
init_logger();
|
|
@@ -44991,7 +46049,7 @@ var init_worktree_manager = __esm({
|
|
|
44991
46049
|
} catch {
|
|
44992
46050
|
cwd = "/tmp";
|
|
44993
46051
|
}
|
|
44994
|
-
const defaultBasePath = process.env.VISOR_WORKTREE_PATH ||
|
|
46052
|
+
const defaultBasePath = process.env.VISOR_WORKTREE_PATH || path21.join(cwd, ".visor", "worktrees");
|
|
44995
46053
|
this.config = {
|
|
44996
46054
|
enabled: true,
|
|
44997
46055
|
base_path: defaultBasePath,
|
|
@@ -45028,20 +46086,20 @@ var init_worktree_manager = __esm({
|
|
|
45028
46086
|
}
|
|
45029
46087
|
const reposDir = this.getReposDir();
|
|
45030
46088
|
const worktreesDir = this.getWorktreesDir();
|
|
45031
|
-
if (!
|
|
45032
|
-
|
|
46089
|
+
if (!fs18.existsSync(reposDir)) {
|
|
46090
|
+
fs18.mkdirSync(reposDir, { recursive: true });
|
|
45033
46091
|
logger.debug(`Created repos directory: ${reposDir}`);
|
|
45034
46092
|
}
|
|
45035
|
-
if (!
|
|
45036
|
-
|
|
46093
|
+
if (!fs18.existsSync(worktreesDir)) {
|
|
46094
|
+
fs18.mkdirSync(worktreesDir, { recursive: true });
|
|
45037
46095
|
logger.debug(`Created worktrees directory: ${worktreesDir}`);
|
|
45038
46096
|
}
|
|
45039
46097
|
}
|
|
45040
46098
|
getReposDir() {
|
|
45041
|
-
return
|
|
46099
|
+
return path21.join(this.config.base_path, "repos");
|
|
45042
46100
|
}
|
|
45043
46101
|
getWorktreesDir() {
|
|
45044
|
-
return
|
|
46102
|
+
return path21.join(this.config.base_path, "worktrees");
|
|
45045
46103
|
}
|
|
45046
46104
|
/**
|
|
45047
46105
|
* Generate a deterministic worktree ID based on repository and ref.
|
|
@@ -45059,8 +46117,8 @@ var init_worktree_manager = __esm({
|
|
|
45059
46117
|
async getOrCreateBareRepo(repository, repoUrl, _token, fetchDepth, cloneTimeoutMs) {
|
|
45060
46118
|
const reposDir = this.getReposDir();
|
|
45061
46119
|
const repoName = repository.replace(/\//g, "-");
|
|
45062
|
-
const bareRepoPath =
|
|
45063
|
-
if (
|
|
46120
|
+
const bareRepoPath = path21.join(reposDir, `${repoName}.git`);
|
|
46121
|
+
if (fs18.existsSync(bareRepoPath)) {
|
|
45064
46122
|
logger.debug(`Bare repository already exists: ${bareRepoPath}`);
|
|
45065
46123
|
const verifyResult = await this.verifyBareRepoRemote(bareRepoPath, repoUrl);
|
|
45066
46124
|
if (verifyResult === "timeout") {
|
|
@@ -45207,11 +46265,11 @@ var init_worktree_manager = __esm({
|
|
|
45207
46265
|
options.cloneTimeoutMs
|
|
45208
46266
|
);
|
|
45209
46267
|
const worktreeId = this.generateWorktreeId(repository, ref);
|
|
45210
|
-
let worktreePath = options.workingDirectory ||
|
|
46268
|
+
let worktreePath = options.workingDirectory || path21.join(this.getWorktreesDir(), worktreeId);
|
|
45211
46269
|
if (options.workingDirectory) {
|
|
45212
46270
|
worktreePath = this.validatePath(options.workingDirectory);
|
|
45213
46271
|
}
|
|
45214
|
-
if (
|
|
46272
|
+
if (fs18.existsSync(worktreePath)) {
|
|
45215
46273
|
logger.debug(`Worktree already exists: ${worktreePath}`);
|
|
45216
46274
|
const metadata2 = await this.loadMetadata(worktreePath);
|
|
45217
46275
|
if (metadata2) {
|
|
@@ -45452,31 +46510,48 @@ var init_worktree_manager = __esm({
|
|
|
45452
46510
|
const result = await this.executeGitCommand(removeCmd, { timeout: 3e4 });
|
|
45453
46511
|
if (result.exitCode !== 0) {
|
|
45454
46512
|
logger.warn(`Failed to remove worktree via git: ${result.stderr}`);
|
|
45455
|
-
if (
|
|
46513
|
+
if (fs18.existsSync(worktree_path)) {
|
|
45456
46514
|
logger.debug(`Manually removing worktree directory`);
|
|
45457
|
-
|
|
46515
|
+
fs18.rmSync(worktree_path, { recursive: true, force: true });
|
|
45458
46516
|
}
|
|
45459
46517
|
}
|
|
46518
|
+
const metadataPath = this.getMetadataPath(worktree_path);
|
|
46519
|
+
try {
|
|
46520
|
+
if (fs18.existsSync(metadataPath)) {
|
|
46521
|
+
fs18.unlinkSync(metadataPath);
|
|
46522
|
+
}
|
|
46523
|
+
} catch {
|
|
46524
|
+
}
|
|
45460
46525
|
this.activeWorktrees.delete(worktreeId);
|
|
45461
46526
|
logger.info(`Successfully removed worktree: ${worktreeId}`);
|
|
45462
46527
|
}
|
|
46528
|
+
/**
|
|
46529
|
+
* Get the metadata file path for a worktree.
|
|
46530
|
+
* Stored as a sibling file OUTSIDE the worktree to avoid being committed
|
|
46531
|
+
* when agents run `git add .` inside the checked-out repo.
|
|
46532
|
+
*/
|
|
46533
|
+
getMetadataPath(worktreePath) {
|
|
46534
|
+
return worktreePath.replace(/\/?$/, "") + ".metadata.json";
|
|
46535
|
+
}
|
|
45463
46536
|
/**
|
|
45464
46537
|
* Save worktree metadata
|
|
45465
46538
|
*/
|
|
45466
46539
|
async saveMetadata(worktreePath, metadata) {
|
|
45467
|
-
const metadataPath =
|
|
45468
|
-
|
|
46540
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
46541
|
+
fs18.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2), "utf8");
|
|
45469
46542
|
}
|
|
45470
46543
|
/**
|
|
45471
46544
|
* Load worktree metadata
|
|
45472
46545
|
*/
|
|
45473
46546
|
async loadMetadata(worktreePath) {
|
|
45474
|
-
const metadataPath =
|
|
45475
|
-
|
|
46547
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
46548
|
+
const legacyPath = path21.join(worktreePath, ".visor-metadata.json");
|
|
46549
|
+
const pathToRead = fs18.existsSync(metadataPath) ? metadataPath : fs18.existsSync(legacyPath) ? legacyPath : null;
|
|
46550
|
+
if (!pathToRead) {
|
|
45476
46551
|
return null;
|
|
45477
46552
|
}
|
|
45478
46553
|
try {
|
|
45479
|
-
const content =
|
|
46554
|
+
const content = fs18.readFileSync(pathToRead, "utf8");
|
|
45480
46555
|
return JSON.parse(content);
|
|
45481
46556
|
} catch (error) {
|
|
45482
46557
|
logger.warn(`Failed to load metadata: ${error}`);
|
|
@@ -45488,14 +46563,14 @@ var init_worktree_manager = __esm({
|
|
|
45488
46563
|
*/
|
|
45489
46564
|
async listWorktrees() {
|
|
45490
46565
|
const worktreesDir = this.getWorktreesDir();
|
|
45491
|
-
if (!
|
|
46566
|
+
if (!fs18.existsSync(worktreesDir)) {
|
|
45492
46567
|
return [];
|
|
45493
46568
|
}
|
|
45494
|
-
const entries =
|
|
46569
|
+
const entries = fs18.readdirSync(worktreesDir, { withFileTypes: true });
|
|
45495
46570
|
const worktrees = [];
|
|
45496
46571
|
for (const entry of entries) {
|
|
45497
46572
|
if (!entry.isDirectory()) continue;
|
|
45498
|
-
const worktreePath =
|
|
46573
|
+
const worktreePath = path21.join(worktreesDir, entry.name);
|
|
45499
46574
|
const metadata = await this.loadMetadata(worktreePath);
|
|
45500
46575
|
if (metadata) {
|
|
45501
46576
|
worktrees.push({
|
|
@@ -45627,8 +46702,8 @@ var init_worktree_manager = __esm({
|
|
|
45627
46702
|
* Validate path to prevent directory traversal
|
|
45628
46703
|
*/
|
|
45629
46704
|
validatePath(userPath) {
|
|
45630
|
-
const resolvedPath =
|
|
45631
|
-
if (!
|
|
46705
|
+
const resolvedPath = path21.resolve(userPath);
|
|
46706
|
+
if (!path21.isAbsolute(resolvedPath)) {
|
|
45632
46707
|
throw new Error("Path must be absolute");
|
|
45633
46708
|
}
|
|
45634
46709
|
const sensitivePatterns = [
|
|
@@ -47657,23 +48732,23 @@ __export(renderer_schema_exports, {
|
|
|
47657
48732
|
});
|
|
47658
48733
|
async function loadRendererSchema(name) {
|
|
47659
48734
|
try {
|
|
47660
|
-
const
|
|
47661
|
-
const
|
|
48735
|
+
const fs23 = await import("fs/promises");
|
|
48736
|
+
const path27 = await import("path");
|
|
47662
48737
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
47663
48738
|
if (!sanitized) return void 0;
|
|
47664
48739
|
const candidates = [
|
|
47665
48740
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
47666
|
-
|
|
48741
|
+
path27.join(__dirname, "output", sanitized, "schema.json"),
|
|
47667
48742
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
47668
|
-
|
|
48743
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
47669
48744
|
// When running from a checkout with output/ folder copied to CWD
|
|
47670
|
-
|
|
48745
|
+
path27.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
47671
48746
|
// Fallback: cwd/dist/output/
|
|
47672
|
-
|
|
48747
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
47673
48748
|
];
|
|
47674
48749
|
for (const p of candidates) {
|
|
47675
48750
|
try {
|
|
47676
|
-
const raw = await
|
|
48751
|
+
const raw = await fs23.readFile(p, "utf-8");
|
|
47677
48752
|
return JSON.parse(raw);
|
|
47678
48753
|
} catch {
|
|
47679
48754
|
}
|
|
@@ -50092,8 +51167,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
50092
51167
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
50093
51168
|
try {
|
|
50094
51169
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
50095
|
-
const
|
|
50096
|
-
const
|
|
51170
|
+
const fs23 = await import("fs/promises");
|
|
51171
|
+
const path27 = await import("path");
|
|
50097
51172
|
const schemaRaw = checkConfig.schema || "plain";
|
|
50098
51173
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
50099
51174
|
let templateContent;
|
|
@@ -50102,27 +51177,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
50102
51177
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
50103
51178
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
50104
51179
|
const file = String(checkConfig.template.file);
|
|
50105
|
-
const resolved =
|
|
50106
|
-
templateContent = await
|
|
51180
|
+
const resolved = path27.resolve(process.cwd(), file);
|
|
51181
|
+
templateContent = await fs23.readFile(resolved, "utf-8");
|
|
50107
51182
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
50108
51183
|
} else if (schema && schema !== "plain") {
|
|
50109
51184
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
50110
51185
|
if (sanitized) {
|
|
50111
51186
|
const candidatePaths = [
|
|
50112
|
-
|
|
51187
|
+
path27.join(__dirname, "output", sanitized, "template.liquid"),
|
|
50113
51188
|
// bundled: dist/output/
|
|
50114
|
-
|
|
51189
|
+
path27.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
50115
51190
|
// source (from state-machine/states)
|
|
50116
|
-
|
|
51191
|
+
path27.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
50117
51192
|
// source (alternate)
|
|
50118
|
-
|
|
51193
|
+
path27.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
50119
51194
|
// fallback: cwd/output/
|
|
50120
|
-
|
|
51195
|
+
path27.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
50121
51196
|
// fallback: cwd/dist/output/
|
|
50122
51197
|
];
|
|
50123
51198
|
for (const p of candidatePaths) {
|
|
50124
51199
|
try {
|
|
50125
|
-
templateContent = await
|
|
51200
|
+
templateContent = await fs23.readFile(p, "utf-8");
|
|
50126
51201
|
if (templateContent) {
|
|
50127
51202
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
50128
51203
|
break;
|
|
@@ -51645,13 +52720,13 @@ var init_sandbox_manager = __esm({
|
|
|
51645
52720
|
});
|
|
51646
52721
|
|
|
51647
52722
|
// src/utils/file-exclusion.ts
|
|
51648
|
-
var import_ignore,
|
|
52723
|
+
var import_ignore, fs19, path22, DEFAULT_EXCLUSION_PATTERNS, FileExclusionHelper;
|
|
51649
52724
|
var init_file_exclusion = __esm({
|
|
51650
52725
|
"src/utils/file-exclusion.ts"() {
|
|
51651
52726
|
"use strict";
|
|
51652
52727
|
import_ignore = __toESM(require("ignore"));
|
|
51653
|
-
|
|
51654
|
-
|
|
52728
|
+
fs19 = __toESM(require("fs"));
|
|
52729
|
+
path22 = __toESM(require("path"));
|
|
51655
52730
|
DEFAULT_EXCLUSION_PATTERNS = [
|
|
51656
52731
|
"dist/",
|
|
51657
52732
|
"build/",
|
|
@@ -51670,7 +52745,7 @@ var init_file_exclusion = __esm({
|
|
|
51670
52745
|
* @param additionalPatterns - Additional patterns to include (optional, defaults to common build artifacts)
|
|
51671
52746
|
*/
|
|
51672
52747
|
constructor(workingDirectory = process.cwd(), additionalPatterns = DEFAULT_EXCLUSION_PATTERNS) {
|
|
51673
|
-
const normalizedPath =
|
|
52748
|
+
const normalizedPath = path22.resolve(workingDirectory);
|
|
51674
52749
|
if (normalizedPath.includes("\0")) {
|
|
51675
52750
|
throw new Error("Invalid workingDirectory: contains null bytes");
|
|
51676
52751
|
}
|
|
@@ -51682,11 +52757,11 @@ var init_file_exclusion = __esm({
|
|
|
51682
52757
|
* @param additionalPatterns - Additional patterns to add to gitignore rules
|
|
51683
52758
|
*/
|
|
51684
52759
|
loadGitignore(additionalPatterns) {
|
|
51685
|
-
const gitignorePath =
|
|
51686
|
-
const resolvedWorkingDir =
|
|
52760
|
+
const gitignorePath = path22.resolve(this.workingDirectory, ".gitignore");
|
|
52761
|
+
const resolvedWorkingDir = path22.resolve(this.workingDirectory);
|
|
51687
52762
|
try {
|
|
51688
|
-
const relativePath =
|
|
51689
|
-
if (relativePath.startsWith("..") ||
|
|
52763
|
+
const relativePath = path22.relative(resolvedWorkingDir, gitignorePath);
|
|
52764
|
+
if (relativePath.startsWith("..") || path22.isAbsolute(relativePath)) {
|
|
51690
52765
|
throw new Error("Invalid gitignore path: path traversal detected");
|
|
51691
52766
|
}
|
|
51692
52767
|
if (relativePath !== ".gitignore") {
|
|
@@ -51696,8 +52771,8 @@ var init_file_exclusion = __esm({
|
|
|
51696
52771
|
if (additionalPatterns && additionalPatterns.length > 0) {
|
|
51697
52772
|
this.gitignore.add(additionalPatterns);
|
|
51698
52773
|
}
|
|
51699
|
-
if (
|
|
51700
|
-
const rawContent =
|
|
52774
|
+
if (fs19.existsSync(gitignorePath)) {
|
|
52775
|
+
const rawContent = fs19.readFileSync(gitignorePath, "utf8");
|
|
51701
52776
|
const gitignoreContent = rawContent.replace(/[\r\n]+/g, "\n").replace(/[\x00-\x09\x0B-\x1F\x7F]/g, "").split("\n").filter((line) => line.length < 1e3).join("\n").trim();
|
|
51702
52777
|
this.gitignore.add(gitignoreContent);
|
|
51703
52778
|
if (process.env.VISOR_DEBUG === "true") {
|
|
@@ -51729,13 +52804,13 @@ var git_repository_analyzer_exports = {};
|
|
|
51729
52804
|
__export(git_repository_analyzer_exports, {
|
|
51730
52805
|
GitRepositoryAnalyzer: () => GitRepositoryAnalyzer
|
|
51731
52806
|
});
|
|
51732
|
-
var import_simple_git2,
|
|
52807
|
+
var import_simple_git2, path23, fs20, MAX_PATCH_SIZE, GitRepositoryAnalyzer;
|
|
51733
52808
|
var init_git_repository_analyzer = __esm({
|
|
51734
52809
|
"src/git-repository-analyzer.ts"() {
|
|
51735
52810
|
"use strict";
|
|
51736
52811
|
import_simple_git2 = require("simple-git");
|
|
51737
|
-
|
|
51738
|
-
|
|
52812
|
+
path23 = __toESM(require("path"));
|
|
52813
|
+
fs20 = __toESM(require("fs"));
|
|
51739
52814
|
init_file_exclusion();
|
|
51740
52815
|
MAX_PATCH_SIZE = 50 * 1024;
|
|
51741
52816
|
GitRepositoryAnalyzer = class {
|
|
@@ -51924,7 +52999,7 @@ ${file.patch}`).join("\n\n");
|
|
|
51924
52999
|
console.error(`\u23ED\uFE0F Skipping excluded file: ${file}`);
|
|
51925
53000
|
continue;
|
|
51926
53001
|
}
|
|
51927
|
-
const filePath =
|
|
53002
|
+
const filePath = path23.join(this.cwd, file);
|
|
51928
53003
|
const fileChange = await this.analyzeFileChange(file, status2, filePath, includeContext);
|
|
51929
53004
|
changes.push(fileChange);
|
|
51930
53005
|
}
|
|
@@ -52000,7 +53075,7 @@ ${file.patch}`).join("\n\n");
|
|
|
52000
53075
|
let content;
|
|
52001
53076
|
let truncated = false;
|
|
52002
53077
|
try {
|
|
52003
|
-
if (includeContext && status !== "added" &&
|
|
53078
|
+
if (includeContext && status !== "added" && fs20.existsSync(filePath)) {
|
|
52004
53079
|
const diff = await this.git.diff(["--", filename]).catch(() => "");
|
|
52005
53080
|
if (diff) {
|
|
52006
53081
|
const result = this.truncatePatch(diff, filename);
|
|
@@ -52010,7 +53085,7 @@ ${file.patch}`).join("\n\n");
|
|
|
52010
53085
|
additions = lines.filter((line) => line.startsWith("+")).length;
|
|
52011
53086
|
deletions = lines.filter((line) => line.startsWith("-")).length;
|
|
52012
53087
|
}
|
|
52013
|
-
} else if (status !== "added" &&
|
|
53088
|
+
} else if (status !== "added" && fs20.existsSync(filePath)) {
|
|
52014
53089
|
const diff = await this.git.diff(["--", filename]).catch(() => "");
|
|
52015
53090
|
if (diff) {
|
|
52016
53091
|
const lines = diff.split("\n");
|
|
@@ -52018,17 +53093,17 @@ ${file.patch}`).join("\n\n");
|
|
|
52018
53093
|
deletions = lines.filter((line) => line.startsWith("-")).length;
|
|
52019
53094
|
}
|
|
52020
53095
|
}
|
|
52021
|
-
if (status === "added" &&
|
|
53096
|
+
if (status === "added" && fs20.existsSync(filePath)) {
|
|
52022
53097
|
try {
|
|
52023
|
-
const stats =
|
|
53098
|
+
const stats = fs20.statSync(filePath);
|
|
52024
53099
|
if (stats.isFile() && stats.size < 1024 * 1024) {
|
|
52025
53100
|
if (includeContext) {
|
|
52026
|
-
content =
|
|
53101
|
+
content = fs20.readFileSync(filePath, "utf8");
|
|
52027
53102
|
const result = this.truncatePatch(content, filename);
|
|
52028
53103
|
patch = result.patch;
|
|
52029
53104
|
truncated = result.truncated;
|
|
52030
53105
|
}
|
|
52031
|
-
const fileContent = includeContext ? content :
|
|
53106
|
+
const fileContent = includeContext ? content : fs20.readFileSync(filePath, "utf8");
|
|
52032
53107
|
additions = fileContent.split("\n").length;
|
|
52033
53108
|
}
|
|
52034
53109
|
} catch {
|
|
@@ -52119,12 +53194,12 @@ function shellEscape(str) {
|
|
|
52119
53194
|
function sanitizePathComponent(name) {
|
|
52120
53195
|
return name.replace(/\.\./g, "").replace(/[\/\\]/g, "-").replace(/^\.+/, "").trim() || "unnamed";
|
|
52121
53196
|
}
|
|
52122
|
-
var fsp2,
|
|
53197
|
+
var fsp2, path24, WorkspaceManager;
|
|
52123
53198
|
var init_workspace_manager = __esm({
|
|
52124
53199
|
"src/utils/workspace-manager.ts"() {
|
|
52125
53200
|
"use strict";
|
|
52126
53201
|
fsp2 = __toESM(require("fs/promises"));
|
|
52127
|
-
|
|
53202
|
+
path24 = __toESM(require("path"));
|
|
52128
53203
|
init_command_executor();
|
|
52129
53204
|
init_logger();
|
|
52130
53205
|
WorkspaceManager = class _WorkspaceManager {
|
|
@@ -52158,7 +53233,7 @@ var init_workspace_manager = __esm({
|
|
|
52158
53233
|
};
|
|
52159
53234
|
this.basePath = this.config.basePath;
|
|
52160
53235
|
const workspaceDirName = sanitizePathComponent(this.config.name || this.sessionId);
|
|
52161
|
-
this.workspacePath =
|
|
53236
|
+
this.workspacePath = path24.join(this.basePath, workspaceDirName);
|
|
52162
53237
|
}
|
|
52163
53238
|
/**
|
|
52164
53239
|
* Get or create a WorkspaceManager instance for a session
|
|
@@ -52205,8 +53280,8 @@ var init_workspace_manager = __esm({
|
|
|
52205
53280
|
);
|
|
52206
53281
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
52207
53282
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
52208
|
-
for (const
|
|
52209
|
-
|
|
53283
|
+
for (const resolve14 of this.cleanupResolvers) {
|
|
53284
|
+
resolve14();
|
|
52210
53285
|
}
|
|
52211
53286
|
this.cleanupResolvers = [];
|
|
52212
53287
|
}
|
|
@@ -52253,7 +53328,7 @@ var init_workspace_manager = __esm({
|
|
|
52253
53328
|
configuredMainProjectName || this.extractProjectName(this.originalPath)
|
|
52254
53329
|
);
|
|
52255
53330
|
this.usedNames.add(mainProjectName);
|
|
52256
|
-
const mainProjectPath =
|
|
53331
|
+
const mainProjectPath = path24.join(this.workspacePath, mainProjectName);
|
|
52257
53332
|
const isGitRepo = await this.isGitRepository(this.originalPath);
|
|
52258
53333
|
if (isGitRepo) {
|
|
52259
53334
|
const exists = await this.pathExists(mainProjectPath);
|
|
@@ -52271,6 +53346,8 @@ var init_workspace_manager = __esm({
|
|
|
52271
53346
|
} catch {
|
|
52272
53347
|
}
|
|
52273
53348
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
53349
|
+
} else {
|
|
53350
|
+
await this.refreshWorktreeToUpstream(mainProjectPath);
|
|
52274
53351
|
}
|
|
52275
53352
|
} else {
|
|
52276
53353
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
@@ -52315,7 +53392,7 @@ var init_workspace_manager = __esm({
|
|
|
52315
53392
|
let projectName = sanitizePathComponent(description || this.extractRepoName(repository));
|
|
52316
53393
|
projectName = this.getUniqueName(projectName);
|
|
52317
53394
|
this.usedNames.add(projectName);
|
|
52318
|
-
const workspacePath =
|
|
53395
|
+
const workspacePath = path24.join(this.workspacePath, projectName);
|
|
52319
53396
|
await fsp2.rm(workspacePath, { recursive: true, force: true });
|
|
52320
53397
|
try {
|
|
52321
53398
|
await fsp2.symlink(worktreePath, workspacePath);
|
|
@@ -52361,19 +53438,19 @@ var init_workspace_manager = __esm({
|
|
|
52361
53438
|
);
|
|
52362
53439
|
this.cleanupRequested = true;
|
|
52363
53440
|
await Promise.race([
|
|
52364
|
-
new Promise((
|
|
53441
|
+
new Promise((resolve14) => {
|
|
52365
53442
|
if (this.activeOperations === 0) {
|
|
52366
|
-
|
|
53443
|
+
resolve14();
|
|
52367
53444
|
} else {
|
|
52368
|
-
this.cleanupResolvers.push(
|
|
53445
|
+
this.cleanupResolvers.push(resolve14);
|
|
52369
53446
|
}
|
|
52370
53447
|
}),
|
|
52371
|
-
new Promise((
|
|
53448
|
+
new Promise((resolve14) => {
|
|
52372
53449
|
setTimeout(() => {
|
|
52373
53450
|
logger.warn(
|
|
52374
53451
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
52375
53452
|
);
|
|
52376
|
-
|
|
53453
|
+
resolve14();
|
|
52377
53454
|
}, timeout);
|
|
52378
53455
|
})
|
|
52379
53456
|
]);
|
|
@@ -52425,7 +53502,7 @@ var init_workspace_manager = __esm({
|
|
|
52425
53502
|
const now = Date.now();
|
|
52426
53503
|
for (const entry of entries) {
|
|
52427
53504
|
if (!entry.isDirectory()) continue;
|
|
52428
|
-
const dirPath =
|
|
53505
|
+
const dirPath = path24.join(basePath, entry.name);
|
|
52429
53506
|
try {
|
|
52430
53507
|
const stat2 = await fsp2.stat(dirPath);
|
|
52431
53508
|
if (now - stat2.mtimeMs > maxAgeMs) {
|
|
@@ -52433,8 +53510,8 @@ var init_workspace_manager = __esm({
|
|
|
52433
53510
|
const subdirs = await fsp2.readdir(dirPath, { withFileTypes: true });
|
|
52434
53511
|
for (const sub of subdirs) {
|
|
52435
53512
|
if (!sub.isDirectory()) continue;
|
|
52436
|
-
const subPath =
|
|
52437
|
-
const gitFilePath =
|
|
53513
|
+
const subPath = path24.join(dirPath, sub.name);
|
|
53514
|
+
const gitFilePath = path24.join(subPath, ".git");
|
|
52438
53515
|
try {
|
|
52439
53516
|
const gitContent = await fsp2.readFile(gitFilePath, "utf-8");
|
|
52440
53517
|
const match = gitContent.match(/gitdir:\s*(.+)/);
|
|
@@ -52466,33 +53543,143 @@ var init_workspace_manager = __esm({
|
|
|
52466
53543
|
return cleaned;
|
|
52467
53544
|
}
|
|
52468
53545
|
/**
|
|
52469
|
-
*
|
|
52470
|
-
*
|
|
52471
|
-
*
|
|
52472
|
-
*
|
|
52473
|
-
*
|
|
52474
|
-
*
|
|
52475
|
-
*
|
|
52476
|
-
* full clone/bare-repo/fetch/worktree pipeline.
|
|
53546
|
+
* visor-disable: architecture - The helpers below (resolveUpstreamRef,
|
|
53547
|
+
* fetchAndResolveUpstream, resetAndCleanWorktree, refreshWorktreeToUpstream)
|
|
53548
|
+
* are NOT duplicates of WorktreeManager's fetchRef/getCommitShaForRef/cleanWorktree.
|
|
53549
|
+
* WorktreeManager operates on BARE repo caches cloned from remote URLs, while
|
|
53550
|
+
* WorkspaceManager operates on the LOCAL working repo the user already has checked out.
|
|
53551
|
+
* The git commands differ (e.g. `fetch origin --prune` vs `fetch origin <ref>:<ref>`)
|
|
53552
|
+
* and sharing code would require adding a "local mode" to WorktreeManager for no benefit.
|
|
52477
53553
|
*/
|
|
52478
|
-
|
|
52479
|
-
|
|
53554
|
+
/**
|
|
53555
|
+
* Resolve the upstream default branch ref.
|
|
53556
|
+
* Tries origin/HEAD (symbolic), then origin/main, then origin/master.
|
|
53557
|
+
* Falls back to local HEAD if no remote is configured.
|
|
53558
|
+
*/
|
|
53559
|
+
async resolveUpstreamRef() {
|
|
53560
|
+
const esc = shellEscape(this.originalPath);
|
|
53561
|
+
const symbolicResult = await commandExecutor.execute(
|
|
53562
|
+
`git -C ${esc} symbolic-ref refs/remotes/origin/HEAD 2>/dev/null`,
|
|
53563
|
+
{ timeout: 1e4 }
|
|
53564
|
+
);
|
|
53565
|
+
if (symbolicResult.exitCode === 0 && symbolicResult.stdout.trim()) {
|
|
53566
|
+
const ref = symbolicResult.stdout.trim().replace("refs/remotes/", "");
|
|
53567
|
+
logger.debug(`[Workspace] Resolved upstream default branch via origin/HEAD: ${ref}`);
|
|
53568
|
+
return ref;
|
|
53569
|
+
}
|
|
53570
|
+
const mainResult = await commandExecutor.execute(
|
|
53571
|
+
`git -C ${esc} rev-parse --verify origin/main 2>/dev/null`,
|
|
53572
|
+
{ timeout: 1e4 }
|
|
53573
|
+
);
|
|
53574
|
+
if (mainResult.exitCode === 0) {
|
|
53575
|
+
logger.debug(`[Workspace] Using origin/main as upstream ref`);
|
|
53576
|
+
return "origin/main";
|
|
53577
|
+
}
|
|
53578
|
+
const masterResult = await commandExecutor.execute(
|
|
53579
|
+
`git -C ${esc} rev-parse --verify origin/master 2>/dev/null`,
|
|
53580
|
+
{ timeout: 1e4 }
|
|
53581
|
+
);
|
|
53582
|
+
if (masterResult.exitCode === 0) {
|
|
53583
|
+
logger.debug(`[Workspace] Using origin/master as upstream ref`);
|
|
53584
|
+
return "origin/master";
|
|
53585
|
+
}
|
|
53586
|
+
logger.warn(`[Workspace] No upstream remote found, falling back to local HEAD`);
|
|
53587
|
+
return "HEAD";
|
|
53588
|
+
}
|
|
53589
|
+
/**
|
|
53590
|
+
* Fetch latest from origin, resolve the upstream default branch, and return
|
|
53591
|
+
* both the ref name and the resolved commit SHA.
|
|
53592
|
+
*/
|
|
53593
|
+
async fetchAndResolveUpstream() {
|
|
53594
|
+
logger.debug(`[Workspace] Fetching latest from origin`);
|
|
53595
|
+
const fetchResult = await commandExecutor.execute(
|
|
53596
|
+
`git -C ${shellEscape(this.originalPath)} fetch origin --prune 2>&1`,
|
|
53597
|
+
{ timeout: 12e4 }
|
|
53598
|
+
);
|
|
53599
|
+
if (fetchResult.exitCode !== 0) {
|
|
53600
|
+
logger.warn(`[Workspace] fetch origin failed (will use cached refs): ${fetchResult.stderr}`);
|
|
53601
|
+
}
|
|
53602
|
+
const upstreamRef = await this.resolveUpstreamRef();
|
|
53603
|
+
const shaResult = await commandExecutor.execute(
|
|
53604
|
+
`git -C ${shellEscape(this.originalPath)} rev-parse ${shellEscape(upstreamRef)}`,
|
|
53605
|
+
{ timeout: 1e4 }
|
|
53606
|
+
);
|
|
53607
|
+
if (shaResult.exitCode === 0) {
|
|
53608
|
+
return { upstreamRef, targetSha: shaResult.stdout.trim() };
|
|
53609
|
+
}
|
|
53610
|
+
logger.warn(
|
|
53611
|
+
`[Workspace] Could not resolve ${upstreamRef} (${shaResult.stderr.trim()}), falling back to HEAD`
|
|
53612
|
+
);
|
|
52480
53613
|
const headResult = await commandExecutor.execute(
|
|
52481
53614
|
`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`,
|
|
52482
|
-
{
|
|
52483
|
-
timeout: 1e4
|
|
52484
|
-
}
|
|
53615
|
+
{ timeout: 1e4 }
|
|
52485
53616
|
);
|
|
52486
53617
|
if (headResult.exitCode !== 0) {
|
|
52487
|
-
throw new Error(`
|
|
53618
|
+
throw new Error(`Repository has no commits \u2014 cannot create worktree: ${headResult.stderr}`);
|
|
52488
53619
|
}
|
|
52489
|
-
|
|
52490
|
-
|
|
53620
|
+
return { upstreamRef: "HEAD", targetSha: headResult.stdout.trim() };
|
|
53621
|
+
}
|
|
53622
|
+
/**
|
|
53623
|
+
* Reset a worktree to a specific commit and clean all modifications.
|
|
53624
|
+
*/
|
|
53625
|
+
async resetAndCleanWorktree(worktreePath, targetSha) {
|
|
53626
|
+
const escapedPath = shellEscape(worktreePath);
|
|
53627
|
+
const escapedSha = shellEscape(targetSha);
|
|
53628
|
+
const resetResult = await commandExecutor.execute(
|
|
53629
|
+
`git -C ${escapedPath} reset --hard ${escapedSha}`,
|
|
53630
|
+
{ timeout: 1e4 }
|
|
53631
|
+
);
|
|
53632
|
+
if (resetResult.exitCode !== 0) {
|
|
53633
|
+
logger.warn(`[Workspace] reset --hard failed: ${resetResult.stderr}`);
|
|
53634
|
+
}
|
|
53635
|
+
const cleanResult = await commandExecutor.execute(`git -C ${escapedPath} clean -fdx`, {
|
|
53636
|
+
timeout: 3e4
|
|
53637
|
+
});
|
|
53638
|
+
if (cleanResult.exitCode !== 0) {
|
|
53639
|
+
logger.warn(`[Workspace] clean -fdx failed: ${cleanResult.stderr}`);
|
|
53640
|
+
}
|
|
53641
|
+
}
|
|
53642
|
+
/**
|
|
53643
|
+
* Refresh an existing worktree to the latest upstream default branch
|
|
53644
|
+
* and ensure it has no modified or untracked files.
|
|
53645
|
+
*/
|
|
53646
|
+
async refreshWorktreeToUpstream(worktreePath) {
|
|
53647
|
+
logger.info(`[Workspace] Refreshing worktree to latest upstream: ${worktreePath}`);
|
|
53648
|
+
try {
|
|
53649
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
53650
|
+
const checkoutResult = await commandExecutor.execute(
|
|
53651
|
+
`git -C ${shellEscape(worktreePath)} checkout --detach ${shellEscape(targetSha)}`,
|
|
53652
|
+
{ timeout: 3e4 }
|
|
53653
|
+
);
|
|
53654
|
+
if (checkoutResult.exitCode !== 0) {
|
|
53655
|
+
logger.warn(
|
|
53656
|
+
`[Workspace] checkout --detach failed (worktree stays at current commit): ${checkoutResult.stderr}`
|
|
53657
|
+
);
|
|
53658
|
+
await this.resetAndCleanWorktree(worktreePath, "HEAD");
|
|
53659
|
+
return;
|
|
53660
|
+
}
|
|
53661
|
+
await this.resetAndCleanWorktree(worktreePath, targetSha);
|
|
53662
|
+
logger.info(`[Workspace] Worktree updated to ${upstreamRef} (${targetSha.slice(0, 8)})`);
|
|
53663
|
+
} catch (error) {
|
|
53664
|
+
logger.warn(`[Workspace] Failed to refresh worktree (continuing with stale state): ${error}`);
|
|
53665
|
+
}
|
|
53666
|
+
}
|
|
53667
|
+
/**
|
|
53668
|
+
* Create worktree for the main project.
|
|
53669
|
+
* See visor-disable comment above resolveUpstreamRef for why this doesn't use WorktreeManager.
|
|
53670
|
+
*/
|
|
53671
|
+
async createMainProjectWorktree(targetPath) {
|
|
53672
|
+
logger.debug(`Creating main project worktree: ${targetPath}`);
|
|
53673
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
53674
|
+
const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(targetSha)}`;
|
|
52491
53675
|
const result = await commandExecutor.execute(createCmd, { timeout: 6e4 });
|
|
52492
53676
|
if (result.exitCode !== 0) {
|
|
52493
53677
|
throw new Error(`Failed to create main project worktree: ${result.stderr}`);
|
|
52494
53678
|
}
|
|
52495
|
-
|
|
53679
|
+
await this.resetAndCleanWorktree(targetPath, targetSha);
|
|
53680
|
+
logger.info(
|
|
53681
|
+
`Created main project worktree at ${targetPath} (${upstreamRef} -> ${targetSha.slice(0, 8)})`
|
|
53682
|
+
);
|
|
52496
53683
|
}
|
|
52497
53684
|
/**
|
|
52498
53685
|
* Remove main project worktree
|
|
@@ -52525,7 +53712,7 @@ var init_workspace_manager = __esm({
|
|
|
52525
53712
|
* Extract project name from path
|
|
52526
53713
|
*/
|
|
52527
53714
|
extractProjectName(dirPath) {
|
|
52528
|
-
return
|
|
53715
|
+
return path24.basename(dirPath);
|
|
52529
53716
|
}
|
|
52530
53717
|
/**
|
|
52531
53718
|
* Extract repository name from owner/repo format
|
|
@@ -52680,6 +53867,9 @@ async function initializeWorkspace(context2) {
|
|
|
52680
53867
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT = info.mainProjectPath;
|
|
52681
53868
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT_NAME = info.mainProjectName;
|
|
52682
53869
|
process.env.VISOR_ORIGINAL_WORKDIR = originalPath;
|
|
53870
|
+
const basePath = workspaceConfig?.base_path || process.env.VISOR_WORKSPACE_PATH || "/tmp/visor-workspaces";
|
|
53871
|
+
const existing = process.env.GIT_CEILING_DIRECTORIES;
|
|
53872
|
+
process.env.GIT_CEILING_DIRECTORIES = existing ? `${existing}:${basePath}` : basePath;
|
|
52683
53873
|
} catch {
|
|
52684
53874
|
}
|
|
52685
53875
|
logger.info(`[Workspace] Initialized workspace: ${info.workspacePath}`);
|
|
@@ -52714,1264 +53904,6 @@ var init_build_engine_context = __esm({
|
|
|
52714
53904
|
}
|
|
52715
53905
|
});
|
|
52716
53906
|
|
|
52717
|
-
// src/policy/default-engine.ts
|
|
52718
|
-
var DefaultPolicyEngine;
|
|
52719
|
-
var init_default_engine = __esm({
|
|
52720
|
-
"src/policy/default-engine.ts"() {
|
|
52721
|
-
"use strict";
|
|
52722
|
-
DefaultPolicyEngine = class {
|
|
52723
|
-
async initialize(_config) {
|
|
52724
|
-
}
|
|
52725
|
-
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
52726
|
-
return { allowed: true };
|
|
52727
|
-
}
|
|
52728
|
-
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
52729
|
-
return { allowed: true };
|
|
52730
|
-
}
|
|
52731
|
-
async evaluateCapabilities(_checkId, _capabilities) {
|
|
52732
|
-
return { allowed: true };
|
|
52733
|
-
}
|
|
52734
|
-
async shutdown() {
|
|
52735
|
-
}
|
|
52736
|
-
};
|
|
52737
|
-
}
|
|
52738
|
-
});
|
|
52739
|
-
|
|
52740
|
-
// src/enterprise/license/validator.ts
|
|
52741
|
-
var validator_exports = {};
|
|
52742
|
-
__export(validator_exports, {
|
|
52743
|
-
LicenseValidator: () => LicenseValidator
|
|
52744
|
-
});
|
|
52745
|
-
var crypto2, fs20, path24, LicenseValidator;
|
|
52746
|
-
var init_validator = __esm({
|
|
52747
|
-
"src/enterprise/license/validator.ts"() {
|
|
52748
|
-
"use strict";
|
|
52749
|
-
crypto2 = __toESM(require("crypto"));
|
|
52750
|
-
fs20 = __toESM(require("fs"));
|
|
52751
|
-
path24 = __toESM(require("path"));
|
|
52752
|
-
LicenseValidator = class _LicenseValidator {
|
|
52753
|
-
/** Ed25519 public key for license verification (PEM format). */
|
|
52754
|
-
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
52755
|
-
cache = null;
|
|
52756
|
-
static CACHE_TTL = 5 * 60 * 1e3;
|
|
52757
|
-
// 5 minutes
|
|
52758
|
-
static GRACE_PERIOD = 72 * 3600 * 1e3;
|
|
52759
|
-
// 72 hours after expiry
|
|
52760
|
-
/**
|
|
52761
|
-
* Load and validate license from environment or file.
|
|
52762
|
-
*
|
|
52763
|
-
* Resolution order:
|
|
52764
|
-
* 1. VISOR_LICENSE env var (JWT string)
|
|
52765
|
-
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
52766
|
-
* 3. .visor-license in project root (cwd)
|
|
52767
|
-
* 4. .visor-license in ~/.config/visor/
|
|
52768
|
-
*/
|
|
52769
|
-
async loadAndValidate() {
|
|
52770
|
-
if (this.cache && Date.now() - this.cache.validatedAt < _LicenseValidator.CACHE_TTL) {
|
|
52771
|
-
return this.cache.payload;
|
|
52772
|
-
}
|
|
52773
|
-
const token = this.resolveToken();
|
|
52774
|
-
if (!token) return null;
|
|
52775
|
-
const payload = this.verifyAndDecode(token);
|
|
52776
|
-
if (!payload) return null;
|
|
52777
|
-
this.cache = { payload, validatedAt: Date.now() };
|
|
52778
|
-
return payload;
|
|
52779
|
-
}
|
|
52780
|
-
/** Check if a specific feature is licensed */
|
|
52781
|
-
hasFeature(feature) {
|
|
52782
|
-
if (!this.cache) return false;
|
|
52783
|
-
return this.cache.payload.features.includes(feature);
|
|
52784
|
-
}
|
|
52785
|
-
/** Check if license is valid (with grace period) */
|
|
52786
|
-
isValid() {
|
|
52787
|
-
if (!this.cache) return false;
|
|
52788
|
-
const now = Date.now();
|
|
52789
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
52790
|
-
return now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
52791
|
-
}
|
|
52792
|
-
/** Check if the license is within its grace period (expired but still valid) */
|
|
52793
|
-
isInGracePeriod() {
|
|
52794
|
-
if (!this.cache) return false;
|
|
52795
|
-
const now = Date.now();
|
|
52796
|
-
const expiryMs = this.cache.payload.exp * 1e3;
|
|
52797
|
-
return now >= expiryMs && now < expiryMs + _LicenseValidator.GRACE_PERIOD;
|
|
52798
|
-
}
|
|
52799
|
-
resolveToken() {
|
|
52800
|
-
if (process.env.VISOR_LICENSE) {
|
|
52801
|
-
return process.env.VISOR_LICENSE.trim();
|
|
52802
|
-
}
|
|
52803
|
-
if (process.env.VISOR_LICENSE_FILE) {
|
|
52804
|
-
const resolved = path24.resolve(process.env.VISOR_LICENSE_FILE);
|
|
52805
|
-
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
52806
|
-
const allowedPrefixes = [path24.normalize(process.cwd())];
|
|
52807
|
-
if (home2) allowedPrefixes.push(path24.normalize(path24.join(home2, ".config", "visor")));
|
|
52808
|
-
let realPath;
|
|
52809
|
-
try {
|
|
52810
|
-
realPath = fs20.realpathSync(resolved);
|
|
52811
|
-
} catch {
|
|
52812
|
-
return null;
|
|
52813
|
-
}
|
|
52814
|
-
const isSafe = allowedPrefixes.some(
|
|
52815
|
-
(prefix) => realPath === prefix || realPath.startsWith(prefix + path24.sep)
|
|
52816
|
-
);
|
|
52817
|
-
if (!isSafe) return null;
|
|
52818
|
-
return this.readFile(realPath);
|
|
52819
|
-
}
|
|
52820
|
-
const cwdPath = path24.join(process.cwd(), ".visor-license");
|
|
52821
|
-
const cwdToken = this.readFile(cwdPath);
|
|
52822
|
-
if (cwdToken) return cwdToken;
|
|
52823
|
-
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
52824
|
-
if (home) {
|
|
52825
|
-
const configPath = path24.join(home, ".config", "visor", ".visor-license");
|
|
52826
|
-
const configToken = this.readFile(configPath);
|
|
52827
|
-
if (configToken) return configToken;
|
|
52828
|
-
}
|
|
52829
|
-
return null;
|
|
52830
|
-
}
|
|
52831
|
-
readFile(filePath) {
|
|
52832
|
-
try {
|
|
52833
|
-
return fs20.readFileSync(filePath, "utf-8").trim();
|
|
52834
|
-
} catch {
|
|
52835
|
-
return null;
|
|
52836
|
-
}
|
|
52837
|
-
}
|
|
52838
|
-
verifyAndDecode(token) {
|
|
52839
|
-
try {
|
|
52840
|
-
const parts = token.split(".");
|
|
52841
|
-
if (parts.length !== 3) return null;
|
|
52842
|
-
const [headerB64, payloadB64, signatureB64] = parts;
|
|
52843
|
-
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
52844
|
-
if (header.alg !== "EdDSA") return null;
|
|
52845
|
-
const data = `${headerB64}.${payloadB64}`;
|
|
52846
|
-
const signature = Buffer.from(signatureB64, "base64url");
|
|
52847
|
-
const publicKey = crypto2.createPublicKey(_LicenseValidator.PUBLIC_KEY);
|
|
52848
|
-
if (publicKey.asymmetricKeyType !== "ed25519") {
|
|
52849
|
-
return null;
|
|
52850
|
-
}
|
|
52851
|
-
const isValid = crypto2.verify(null, Buffer.from(data), publicKey, signature);
|
|
52852
|
-
if (!isValid) return null;
|
|
52853
|
-
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
52854
|
-
if (!payload.org || !Array.isArray(payload.features) || typeof payload.exp !== "number" || typeof payload.iat !== "number" || !payload.sub) {
|
|
52855
|
-
return null;
|
|
52856
|
-
}
|
|
52857
|
-
const now = Date.now();
|
|
52858
|
-
const expiryMs = payload.exp * 1e3;
|
|
52859
|
-
if (now >= expiryMs + _LicenseValidator.GRACE_PERIOD) {
|
|
52860
|
-
return null;
|
|
52861
|
-
}
|
|
52862
|
-
return payload;
|
|
52863
|
-
} catch {
|
|
52864
|
-
return null;
|
|
52865
|
-
}
|
|
52866
|
-
}
|
|
52867
|
-
};
|
|
52868
|
-
}
|
|
52869
|
-
});
|
|
52870
|
-
|
|
52871
|
-
// src/enterprise/policy/opa-compiler.ts
|
|
52872
|
-
var fs21, path25, os2, crypto3, import_child_process8, OpaCompiler;
|
|
52873
|
-
var init_opa_compiler = __esm({
|
|
52874
|
-
"src/enterprise/policy/opa-compiler.ts"() {
|
|
52875
|
-
"use strict";
|
|
52876
|
-
fs21 = __toESM(require("fs"));
|
|
52877
|
-
path25 = __toESM(require("path"));
|
|
52878
|
-
os2 = __toESM(require("os"));
|
|
52879
|
-
crypto3 = __toESM(require("crypto"));
|
|
52880
|
-
import_child_process8 = require("child_process");
|
|
52881
|
-
OpaCompiler = class _OpaCompiler {
|
|
52882
|
-
static CACHE_DIR = path25.join(os2.tmpdir(), "visor-opa-cache");
|
|
52883
|
-
/**
|
|
52884
|
-
* Resolve the input paths to WASM bytes.
|
|
52885
|
-
*
|
|
52886
|
-
* Strategy:
|
|
52887
|
-
* 1. If any path is a .wasm file, read it directly
|
|
52888
|
-
* 2. If a directory contains policy.wasm, read it
|
|
52889
|
-
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
52890
|
-
*/
|
|
52891
|
-
async resolveWasmBytes(paths) {
|
|
52892
|
-
const regoFiles = [];
|
|
52893
|
-
for (const p of paths) {
|
|
52894
|
-
const resolved = path25.resolve(p);
|
|
52895
|
-
if (path25.normalize(resolved).includes("..")) {
|
|
52896
|
-
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
52897
|
-
}
|
|
52898
|
-
if (resolved.endsWith(".wasm") && fs21.existsSync(resolved)) {
|
|
52899
|
-
return fs21.readFileSync(resolved);
|
|
52900
|
-
}
|
|
52901
|
-
if (!fs21.existsSync(resolved)) continue;
|
|
52902
|
-
const stat2 = fs21.statSync(resolved);
|
|
52903
|
-
if (stat2.isDirectory()) {
|
|
52904
|
-
const wasmCandidate = path25.join(resolved, "policy.wasm");
|
|
52905
|
-
if (fs21.existsSync(wasmCandidate)) {
|
|
52906
|
-
return fs21.readFileSync(wasmCandidate);
|
|
52907
|
-
}
|
|
52908
|
-
const files = fs21.readdirSync(resolved);
|
|
52909
|
-
for (const f of files) {
|
|
52910
|
-
if (f.endsWith(".rego")) {
|
|
52911
|
-
regoFiles.push(path25.join(resolved, f));
|
|
52912
|
-
}
|
|
52913
|
-
}
|
|
52914
|
-
} else if (resolved.endsWith(".rego")) {
|
|
52915
|
-
regoFiles.push(resolved);
|
|
52916
|
-
}
|
|
52917
|
-
}
|
|
52918
|
-
if (regoFiles.length === 0) {
|
|
52919
|
-
throw new Error(
|
|
52920
|
-
`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(", ")}`
|
|
52921
|
-
);
|
|
52922
|
-
}
|
|
52923
|
-
return this.compileRego(regoFiles);
|
|
52924
|
-
}
|
|
52925
|
-
/**
|
|
52926
|
-
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
52927
|
-
*
|
|
52928
|
-
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
52929
|
-
* so subsequent runs skip compilation if policies haven't changed.
|
|
52930
|
-
*/
|
|
52931
|
-
compileRego(regoFiles) {
|
|
52932
|
-
try {
|
|
52933
|
-
(0, import_child_process8.execFileSync)("opa", ["version"], { stdio: "pipe" });
|
|
52934
|
-
} catch {
|
|
52935
|
-
throw new Error(
|
|
52936
|
-
"OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\nOr pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz " + regoFiles.join(" ")
|
|
52937
|
-
);
|
|
52938
|
-
}
|
|
52939
|
-
const hash = crypto3.createHash("sha256");
|
|
52940
|
-
for (const f of regoFiles.sort()) {
|
|
52941
|
-
hash.update(fs21.readFileSync(f));
|
|
52942
|
-
hash.update(f);
|
|
52943
|
-
}
|
|
52944
|
-
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
52945
|
-
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
52946
|
-
const cachedWasm = path25.join(cacheDir, `${cacheKey}.wasm`);
|
|
52947
|
-
if (fs21.existsSync(cachedWasm)) {
|
|
52948
|
-
return fs21.readFileSync(cachedWasm);
|
|
52949
|
-
}
|
|
52950
|
-
fs21.mkdirSync(cacheDir, { recursive: true });
|
|
52951
|
-
const bundleTar = path25.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
52952
|
-
try {
|
|
52953
|
-
const args = [
|
|
52954
|
-
"build",
|
|
52955
|
-
"-t",
|
|
52956
|
-
"wasm",
|
|
52957
|
-
"-e",
|
|
52958
|
-
"visor",
|
|
52959
|
-
// entrypoint: the visor package tree
|
|
52960
|
-
"-o",
|
|
52961
|
-
bundleTar,
|
|
52962
|
-
...regoFiles
|
|
52963
|
-
];
|
|
52964
|
-
(0, import_child_process8.execFileSync)("opa", args, {
|
|
52965
|
-
stdio: "pipe",
|
|
52966
|
-
timeout: 3e4
|
|
52967
|
-
});
|
|
52968
|
-
} catch (err) {
|
|
52969
|
-
const stderr = err?.stderr?.toString() || "";
|
|
52970
|
-
throw new Error(
|
|
52971
|
-
`Failed to compile .rego files to WASM:
|
|
52972
|
-
${stderr}
|
|
52973
|
-
Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
52974
|
-
);
|
|
52975
|
-
}
|
|
52976
|
-
try {
|
|
52977
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
52978
|
-
stdio: "pipe"
|
|
52979
|
-
});
|
|
52980
|
-
const extractedWasm = path25.join(cacheDir, "policy.wasm");
|
|
52981
|
-
if (fs21.existsSync(extractedWasm)) {
|
|
52982
|
-
fs21.renameSync(extractedWasm, cachedWasm);
|
|
52983
|
-
}
|
|
52984
|
-
} catch {
|
|
52985
|
-
try {
|
|
52986
|
-
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
52987
|
-
stdio: "pipe"
|
|
52988
|
-
});
|
|
52989
|
-
const extractedWasm = path25.join(cacheDir, "policy.wasm");
|
|
52990
|
-
if (fs21.existsSync(extractedWasm)) {
|
|
52991
|
-
fs21.renameSync(extractedWasm, cachedWasm);
|
|
52992
|
-
}
|
|
52993
|
-
} catch (err2) {
|
|
52994
|
-
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
52995
|
-
}
|
|
52996
|
-
}
|
|
52997
|
-
try {
|
|
52998
|
-
fs21.unlinkSync(bundleTar);
|
|
52999
|
-
} catch {
|
|
53000
|
-
}
|
|
53001
|
-
if (!fs21.existsSync(cachedWasm)) {
|
|
53002
|
-
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
53003
|
-
}
|
|
53004
|
-
return fs21.readFileSync(cachedWasm);
|
|
53005
|
-
}
|
|
53006
|
-
};
|
|
53007
|
-
}
|
|
53008
|
-
});
|
|
53009
|
-
|
|
53010
|
-
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
53011
|
-
var fs22, path26, OpaWasmEvaluator;
|
|
53012
|
-
var init_opa_wasm_evaluator = __esm({
|
|
53013
|
-
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
53014
|
-
"use strict";
|
|
53015
|
-
fs22 = __toESM(require("fs"));
|
|
53016
|
-
path26 = __toESM(require("path"));
|
|
53017
|
-
init_opa_compiler();
|
|
53018
|
-
OpaWasmEvaluator = class {
|
|
53019
|
-
policy = null;
|
|
53020
|
-
dataDocument = {};
|
|
53021
|
-
compiler = new OpaCompiler();
|
|
53022
|
-
async initialize(rulesPath) {
|
|
53023
|
-
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
53024
|
-
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
53025
|
-
try {
|
|
53026
|
-
const { createRequire } = require("module");
|
|
53027
|
-
const runtimeRequire = createRequire(__filename);
|
|
53028
|
-
const opaWasm = runtimeRequire("@open-policy-agent/opa-wasm");
|
|
53029
|
-
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
53030
|
-
if (!loadPolicy) {
|
|
53031
|
-
throw new Error("loadPolicy not found in @open-policy-agent/opa-wasm");
|
|
53032
|
-
}
|
|
53033
|
-
this.policy = await loadPolicy(wasmBytes);
|
|
53034
|
-
} catch (err) {
|
|
53035
|
-
if (err?.code === "MODULE_NOT_FOUND" || err?.code === "ERR_MODULE_NOT_FOUND") {
|
|
53036
|
-
throw new Error(
|
|
53037
|
-
"OPA WASM evaluator requires @open-policy-agent/opa-wasm. Install it with: npm install @open-policy-agent/opa-wasm"
|
|
53038
|
-
);
|
|
53039
|
-
}
|
|
53040
|
-
throw err;
|
|
53041
|
-
}
|
|
53042
|
-
}
|
|
53043
|
-
/**
|
|
53044
|
-
* Load external data from a JSON file to use as the OPA data document.
|
|
53045
|
-
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
53046
|
-
* making it available in Rego via `data.<key>`.
|
|
53047
|
-
*/
|
|
53048
|
-
loadData(dataPath) {
|
|
53049
|
-
const resolved = path26.resolve(dataPath);
|
|
53050
|
-
if (path26.normalize(resolved).includes("..")) {
|
|
53051
|
-
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
53052
|
-
}
|
|
53053
|
-
if (!fs22.existsSync(resolved)) {
|
|
53054
|
-
throw new Error(`OPA data file not found: ${resolved}`);
|
|
53055
|
-
}
|
|
53056
|
-
const stat2 = fs22.statSync(resolved);
|
|
53057
|
-
if (stat2.size > 10 * 1024 * 1024) {
|
|
53058
|
-
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
53059
|
-
}
|
|
53060
|
-
const raw = fs22.readFileSync(resolved, "utf-8");
|
|
53061
|
-
try {
|
|
53062
|
-
const parsed = JSON.parse(raw);
|
|
53063
|
-
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
53064
|
-
throw new Error("OPA data file must contain a JSON object (not an array or primitive)");
|
|
53065
|
-
}
|
|
53066
|
-
this.dataDocument = parsed;
|
|
53067
|
-
} catch (err) {
|
|
53068
|
-
if (err.message.startsWith("OPA data file must")) {
|
|
53069
|
-
throw err;
|
|
53070
|
-
}
|
|
53071
|
-
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
53072
|
-
}
|
|
53073
|
-
}
|
|
53074
|
-
async evaluate(input) {
|
|
53075
|
-
if (!this.policy) {
|
|
53076
|
-
throw new Error("OPA WASM evaluator not initialized");
|
|
53077
|
-
}
|
|
53078
|
-
this.policy.setData(this.dataDocument);
|
|
53079
|
-
const resultSet = this.policy.evaluate(input);
|
|
53080
|
-
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
53081
|
-
return resultSet[0].result;
|
|
53082
|
-
}
|
|
53083
|
-
return void 0;
|
|
53084
|
-
}
|
|
53085
|
-
async shutdown() {
|
|
53086
|
-
if (this.policy) {
|
|
53087
|
-
if (typeof this.policy.close === "function") {
|
|
53088
|
-
try {
|
|
53089
|
-
this.policy.close();
|
|
53090
|
-
} catch {
|
|
53091
|
-
}
|
|
53092
|
-
} else if (typeof this.policy.free === "function") {
|
|
53093
|
-
try {
|
|
53094
|
-
this.policy.free();
|
|
53095
|
-
} catch {
|
|
53096
|
-
}
|
|
53097
|
-
}
|
|
53098
|
-
}
|
|
53099
|
-
this.policy = null;
|
|
53100
|
-
}
|
|
53101
|
-
};
|
|
53102
|
-
}
|
|
53103
|
-
});
|
|
53104
|
-
|
|
53105
|
-
// src/enterprise/policy/opa-http-evaluator.ts
|
|
53106
|
-
var OpaHttpEvaluator;
|
|
53107
|
-
var init_opa_http_evaluator = __esm({
|
|
53108
|
-
"src/enterprise/policy/opa-http-evaluator.ts"() {
|
|
53109
|
-
"use strict";
|
|
53110
|
-
OpaHttpEvaluator = class {
|
|
53111
|
-
baseUrl;
|
|
53112
|
-
timeout;
|
|
53113
|
-
constructor(baseUrl, timeout = 5e3) {
|
|
53114
|
-
let parsed;
|
|
53115
|
-
try {
|
|
53116
|
-
parsed = new URL(baseUrl);
|
|
53117
|
-
} catch {
|
|
53118
|
-
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
53119
|
-
}
|
|
53120
|
-
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
53121
|
-
throw new Error(
|
|
53122
|
-
`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`
|
|
53123
|
-
);
|
|
53124
|
-
}
|
|
53125
|
-
const hostname = parsed.hostname;
|
|
53126
|
-
if (this.isBlockedHostname(hostname)) {
|
|
53127
|
-
throw new Error(
|
|
53128
|
-
`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`
|
|
53129
|
-
);
|
|
53130
|
-
}
|
|
53131
|
-
this.baseUrl = baseUrl.replace(/\/+$/, "");
|
|
53132
|
-
this.timeout = timeout;
|
|
53133
|
-
}
|
|
53134
|
-
/**
|
|
53135
|
-
* Check if a hostname is blocked due to SSRF concerns.
|
|
53136
|
-
*
|
|
53137
|
-
* Blocks:
|
|
53138
|
-
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
53139
|
-
* - Link-local addresses (169.254.x.x)
|
|
53140
|
-
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
53141
|
-
* - IPv6 unique local addresses (fd00::/8)
|
|
53142
|
-
* - Cloud metadata services (*.internal)
|
|
53143
|
-
*/
|
|
53144
|
-
isBlockedHostname(hostname) {
|
|
53145
|
-
if (!hostname) return true;
|
|
53146
|
-
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, "");
|
|
53147
|
-
if (normalized === "metadata.google.internal" || normalized.endsWith(".internal")) {
|
|
53148
|
-
return true;
|
|
53149
|
-
}
|
|
53150
|
-
if (normalized === "localhost" || normalized === "localhost.localdomain") {
|
|
53151
|
-
return true;
|
|
53152
|
-
}
|
|
53153
|
-
if (normalized === "::1" || normalized === "0:0:0:0:0:0:0:1") {
|
|
53154
|
-
return true;
|
|
53155
|
-
}
|
|
53156
|
-
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
53157
|
-
const ipv4Match = normalized.match(ipv4Pattern);
|
|
53158
|
-
if (ipv4Match) {
|
|
53159
|
-
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
53160
|
-
if (octets.some((octet) => octet > 255)) {
|
|
53161
|
-
return false;
|
|
53162
|
-
}
|
|
53163
|
-
const [a, b] = octets;
|
|
53164
|
-
if (a === 127) {
|
|
53165
|
-
return true;
|
|
53166
|
-
}
|
|
53167
|
-
if (a === 0) {
|
|
53168
|
-
return true;
|
|
53169
|
-
}
|
|
53170
|
-
if (a === 169 && b === 254) {
|
|
53171
|
-
return true;
|
|
53172
|
-
}
|
|
53173
|
-
if (a === 10) {
|
|
53174
|
-
return true;
|
|
53175
|
-
}
|
|
53176
|
-
if (a === 172 && b >= 16 && b <= 31) {
|
|
53177
|
-
return true;
|
|
53178
|
-
}
|
|
53179
|
-
if (a === 192 && b === 168) {
|
|
53180
|
-
return true;
|
|
53181
|
-
}
|
|
53182
|
-
}
|
|
53183
|
-
if (normalized.startsWith("fd") || normalized.startsWith("fc")) {
|
|
53184
|
-
return true;
|
|
53185
|
-
}
|
|
53186
|
-
if (normalized.startsWith("fe80:")) {
|
|
53187
|
-
return true;
|
|
53188
|
-
}
|
|
53189
|
-
return false;
|
|
53190
|
-
}
|
|
53191
|
-
/**
|
|
53192
|
-
* Evaluate a policy rule against an input document via OPA REST API.
|
|
53193
|
-
*
|
|
53194
|
-
* @param input - The input document to evaluate
|
|
53195
|
-
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
53196
|
-
* @returns The result object from OPA, or undefined on error
|
|
53197
|
-
*/
|
|
53198
|
-
async evaluate(input, rulePath) {
|
|
53199
|
-
const encodedPath = rulePath.split("/").map((s) => encodeURIComponent(s)).join("/");
|
|
53200
|
-
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
53201
|
-
const controller = new AbortController();
|
|
53202
|
-
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
53203
|
-
try {
|
|
53204
|
-
const response = await fetch(url, {
|
|
53205
|
-
method: "POST",
|
|
53206
|
-
headers: { "Content-Type": "application/json" },
|
|
53207
|
-
body: JSON.stringify({ input }),
|
|
53208
|
-
signal: controller.signal
|
|
53209
|
-
});
|
|
53210
|
-
if (!response.ok) {
|
|
53211
|
-
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
53212
|
-
}
|
|
53213
|
-
let body;
|
|
53214
|
-
try {
|
|
53215
|
-
body = await response.json();
|
|
53216
|
-
} catch (jsonErr) {
|
|
53217
|
-
throw new Error(
|
|
53218
|
-
`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`
|
|
53219
|
-
);
|
|
53220
|
-
}
|
|
53221
|
-
return body?.result;
|
|
53222
|
-
} finally {
|
|
53223
|
-
clearTimeout(timer);
|
|
53224
|
-
}
|
|
53225
|
-
}
|
|
53226
|
-
async shutdown() {
|
|
53227
|
-
}
|
|
53228
|
-
};
|
|
53229
|
-
}
|
|
53230
|
-
});
|
|
53231
|
-
|
|
53232
|
-
// src/enterprise/policy/policy-input-builder.ts
|
|
53233
|
-
var PolicyInputBuilder;
|
|
53234
|
-
var init_policy_input_builder = __esm({
|
|
53235
|
-
"src/enterprise/policy/policy-input-builder.ts"() {
|
|
53236
|
-
"use strict";
|
|
53237
|
-
PolicyInputBuilder = class {
|
|
53238
|
-
roles;
|
|
53239
|
-
actor;
|
|
53240
|
-
repository;
|
|
53241
|
-
pullRequest;
|
|
53242
|
-
constructor(policyConfig, actor, repository, pullRequest) {
|
|
53243
|
-
this.roles = policyConfig.roles || {};
|
|
53244
|
-
this.actor = actor;
|
|
53245
|
-
this.repository = repository;
|
|
53246
|
-
this.pullRequest = pullRequest;
|
|
53247
|
-
}
|
|
53248
|
-
/** Resolve which roles apply to the current actor. */
|
|
53249
|
-
resolveRoles() {
|
|
53250
|
-
const matched = [];
|
|
53251
|
-
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
53252
|
-
let identityMatch = false;
|
|
53253
|
-
if (roleConfig.author_association && this.actor.authorAssociation && roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
53254
|
-
identityMatch = true;
|
|
53255
|
-
}
|
|
53256
|
-
if (!identityMatch && roleConfig.users && this.actor.login && roleConfig.users.includes(this.actor.login)) {
|
|
53257
|
-
identityMatch = true;
|
|
53258
|
-
}
|
|
53259
|
-
if (!identityMatch && roleConfig.slack_users && this.actor.slack?.userId && roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
53260
|
-
identityMatch = true;
|
|
53261
|
-
}
|
|
53262
|
-
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
53263
|
-
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
53264
|
-
if (roleConfig.emails.some((e) => e.toLowerCase() === actorEmail)) {
|
|
53265
|
-
identityMatch = true;
|
|
53266
|
-
}
|
|
53267
|
-
}
|
|
53268
|
-
if (!identityMatch) continue;
|
|
53269
|
-
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
53270
|
-
if (!this.actor.slack?.channelId || !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
53271
|
-
continue;
|
|
53272
|
-
}
|
|
53273
|
-
}
|
|
53274
|
-
matched.push(roleName);
|
|
53275
|
-
}
|
|
53276
|
-
return matched;
|
|
53277
|
-
}
|
|
53278
|
-
buildActor() {
|
|
53279
|
-
return {
|
|
53280
|
-
authorAssociation: this.actor.authorAssociation,
|
|
53281
|
-
login: this.actor.login,
|
|
53282
|
-
roles: this.resolveRoles(),
|
|
53283
|
-
isLocalMode: this.actor.isLocalMode,
|
|
53284
|
-
...this.actor.slack && { slack: this.actor.slack }
|
|
53285
|
-
};
|
|
53286
|
-
}
|
|
53287
|
-
forCheckExecution(check) {
|
|
53288
|
-
return {
|
|
53289
|
-
scope: "check.execute",
|
|
53290
|
-
check: {
|
|
53291
|
-
id: check.id,
|
|
53292
|
-
type: check.type,
|
|
53293
|
-
group: check.group,
|
|
53294
|
-
tags: check.tags,
|
|
53295
|
-
criticality: check.criticality,
|
|
53296
|
-
sandbox: check.sandbox,
|
|
53297
|
-
policy: check.policy
|
|
53298
|
-
},
|
|
53299
|
-
actor: this.buildActor(),
|
|
53300
|
-
repository: this.repository,
|
|
53301
|
-
pullRequest: this.pullRequest
|
|
53302
|
-
};
|
|
53303
|
-
}
|
|
53304
|
-
forToolInvocation(serverName, methodName, transport) {
|
|
53305
|
-
return {
|
|
53306
|
-
scope: "tool.invoke",
|
|
53307
|
-
tool: { serverName, methodName, transport },
|
|
53308
|
-
actor: this.buildActor(),
|
|
53309
|
-
repository: this.repository,
|
|
53310
|
-
pullRequest: this.pullRequest
|
|
53311
|
-
};
|
|
53312
|
-
}
|
|
53313
|
-
forCapabilityResolve(checkId, capabilities) {
|
|
53314
|
-
return {
|
|
53315
|
-
scope: "capability.resolve",
|
|
53316
|
-
check: { id: checkId, type: "ai" },
|
|
53317
|
-
capability: capabilities,
|
|
53318
|
-
actor: this.buildActor(),
|
|
53319
|
-
repository: this.repository,
|
|
53320
|
-
pullRequest: this.pullRequest
|
|
53321
|
-
};
|
|
53322
|
-
}
|
|
53323
|
-
};
|
|
53324
|
-
}
|
|
53325
|
-
});
|
|
53326
|
-
|
|
53327
|
-
// src/enterprise/policy/opa-policy-engine.ts
|
|
53328
|
-
var opa_policy_engine_exports = {};
|
|
53329
|
-
__export(opa_policy_engine_exports, {
|
|
53330
|
-
OpaPolicyEngine: () => OpaPolicyEngine
|
|
53331
|
-
});
|
|
53332
|
-
var OpaPolicyEngine;
|
|
53333
|
-
var init_opa_policy_engine = __esm({
|
|
53334
|
-
"src/enterprise/policy/opa-policy-engine.ts"() {
|
|
53335
|
-
"use strict";
|
|
53336
|
-
init_opa_wasm_evaluator();
|
|
53337
|
-
init_opa_http_evaluator();
|
|
53338
|
-
init_policy_input_builder();
|
|
53339
|
-
OpaPolicyEngine = class {
|
|
53340
|
-
evaluator = null;
|
|
53341
|
-
fallback;
|
|
53342
|
-
timeout;
|
|
53343
|
-
config;
|
|
53344
|
-
inputBuilder = null;
|
|
53345
|
-
logger = null;
|
|
53346
|
-
constructor(config) {
|
|
53347
|
-
this.config = config;
|
|
53348
|
-
this.fallback = config.fallback || "deny";
|
|
53349
|
-
this.timeout = config.timeout || 5e3;
|
|
53350
|
-
}
|
|
53351
|
-
async initialize(config) {
|
|
53352
|
-
try {
|
|
53353
|
-
this.logger = (init_logger(), __toCommonJS(logger_exports)).logger;
|
|
53354
|
-
} catch {
|
|
53355
|
-
}
|
|
53356
|
-
const actor = {
|
|
53357
|
-
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
53358
|
-
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
53359
|
-
isLocalMode: !process.env.GITHUB_ACTIONS
|
|
53360
|
-
};
|
|
53361
|
-
const repo = {
|
|
53362
|
-
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
53363
|
-
name: process.env.GITHUB_REPOSITORY?.split("/")[1],
|
|
53364
|
-
branch: process.env.GITHUB_HEAD_REF,
|
|
53365
|
-
baseBranch: process.env.GITHUB_BASE_REF,
|
|
53366
|
-
event: process.env.GITHUB_EVENT_NAME
|
|
53367
|
-
};
|
|
53368
|
-
const prNum = process.env.GITHUB_PR_NUMBER ? parseInt(process.env.GITHUB_PR_NUMBER, 10) : void 0;
|
|
53369
|
-
const pullRequest = {
|
|
53370
|
-
number: prNum !== void 0 && Number.isFinite(prNum) ? prNum : void 0
|
|
53371
|
-
};
|
|
53372
|
-
this.inputBuilder = new PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
53373
|
-
if (config.engine === "local") {
|
|
53374
|
-
if (!config.rules) {
|
|
53375
|
-
throw new Error("OPA local mode requires `policy.rules` path to .wasm or .rego files");
|
|
53376
|
-
}
|
|
53377
|
-
const wasm = new OpaWasmEvaluator();
|
|
53378
|
-
await wasm.initialize(config.rules);
|
|
53379
|
-
if (config.data) {
|
|
53380
|
-
wasm.loadData(config.data);
|
|
53381
|
-
}
|
|
53382
|
-
this.evaluator = wasm;
|
|
53383
|
-
} else if (config.engine === "remote") {
|
|
53384
|
-
if (!config.url) {
|
|
53385
|
-
throw new Error("OPA remote mode requires `policy.url` pointing to OPA server");
|
|
53386
|
-
}
|
|
53387
|
-
this.evaluator = new OpaHttpEvaluator(config.url, this.timeout);
|
|
53388
|
-
} else {
|
|
53389
|
-
this.evaluator = null;
|
|
53390
|
-
}
|
|
53391
|
-
}
|
|
53392
|
-
/**
|
|
53393
|
-
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
53394
|
-
* Called by the enterprise loader when engine context is enriched.
|
|
53395
|
-
*/
|
|
53396
|
-
setActorContext(actor, repo, pullRequest) {
|
|
53397
|
-
this.inputBuilder = new PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
53398
|
-
}
|
|
53399
|
-
async evaluateCheckExecution(checkId, checkConfig) {
|
|
53400
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53401
|
-
const cfg = checkConfig && typeof checkConfig === "object" ? checkConfig : {};
|
|
53402
|
-
const policyOverride = cfg.policy;
|
|
53403
|
-
const input = this.inputBuilder.forCheckExecution({
|
|
53404
|
-
id: checkId,
|
|
53405
|
-
type: cfg.type || "ai",
|
|
53406
|
-
group: cfg.group,
|
|
53407
|
-
tags: cfg.tags,
|
|
53408
|
-
criticality: cfg.criticality,
|
|
53409
|
-
sandbox: cfg.sandbox,
|
|
53410
|
-
policy: policyOverride
|
|
53411
|
-
});
|
|
53412
|
-
return this.doEvaluate(input, this.resolveRulePath("check.execute", policyOverride?.rule));
|
|
53413
|
-
}
|
|
53414
|
-
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
53415
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53416
|
-
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
53417
|
-
return this.doEvaluate(input, "visor/tool/invoke");
|
|
53418
|
-
}
|
|
53419
|
-
async evaluateCapabilities(checkId, capabilities) {
|
|
53420
|
-
if (!this.evaluator || !this.inputBuilder) return { allowed: true };
|
|
53421
|
-
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
53422
|
-
return this.doEvaluate(input, "visor/capability/resolve");
|
|
53423
|
-
}
|
|
53424
|
-
async shutdown() {
|
|
53425
|
-
if (this.evaluator && "shutdown" in this.evaluator) {
|
|
53426
|
-
await this.evaluator.shutdown();
|
|
53427
|
-
}
|
|
53428
|
-
this.evaluator = null;
|
|
53429
|
-
this.inputBuilder = null;
|
|
53430
|
-
}
|
|
53431
|
-
resolveRulePath(defaultScope, override) {
|
|
53432
|
-
if (override) {
|
|
53433
|
-
return override.startsWith("visor/") ? override : `visor/${override}`;
|
|
53434
|
-
}
|
|
53435
|
-
return `visor/${defaultScope.replace(/\./g, "/")}`;
|
|
53436
|
-
}
|
|
53437
|
-
async doEvaluate(input, rulePath) {
|
|
53438
|
-
try {
|
|
53439
|
-
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
53440
|
-
let timer;
|
|
53441
|
-
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
53442
|
-
timer = setTimeout(() => reject(new Error("policy evaluation timeout")), this.timeout);
|
|
53443
|
-
});
|
|
53444
|
-
try {
|
|
53445
|
-
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
53446
|
-
const decision = this.parseDecision(result);
|
|
53447
|
-
if (!decision.allowed && this.fallback === "warn") {
|
|
53448
|
-
decision.allowed = true;
|
|
53449
|
-
decision.warn = true;
|
|
53450
|
-
decision.reason = `audit: ${decision.reason || "policy denied"}`;
|
|
53451
|
-
}
|
|
53452
|
-
this.logger?.debug(
|
|
53453
|
-
`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || "none"}`
|
|
53454
|
-
);
|
|
53455
|
-
return decision;
|
|
53456
|
-
} finally {
|
|
53457
|
-
if (timer) clearTimeout(timer);
|
|
53458
|
-
}
|
|
53459
|
-
} catch (err) {
|
|
53460
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
53461
|
-
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
53462
|
-
return {
|
|
53463
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
53464
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
53465
|
-
reason: `policy evaluation failed, fallback=${this.fallback}`
|
|
53466
|
-
};
|
|
53467
|
-
}
|
|
53468
|
-
}
|
|
53469
|
-
async rawEvaluate(input, rulePath) {
|
|
53470
|
-
if (this.evaluator instanceof OpaWasmEvaluator) {
|
|
53471
|
-
const result = await this.evaluator.evaluate(input);
|
|
53472
|
-
return this.navigateWasmResult(result, rulePath);
|
|
53473
|
-
}
|
|
53474
|
-
return this.evaluator.evaluate(input, rulePath);
|
|
53475
|
-
}
|
|
53476
|
-
/**
|
|
53477
|
-
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
53478
|
-
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
53479
|
-
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
53480
|
-
*/
|
|
53481
|
-
navigateWasmResult(result, rulePath) {
|
|
53482
|
-
if (!result || typeof result !== "object") return result;
|
|
53483
|
-
const segments = rulePath.replace(/^visor\//, "").split("/");
|
|
53484
|
-
let current = result;
|
|
53485
|
-
for (const seg of segments) {
|
|
53486
|
-
if (current && typeof current === "object" && seg in current) {
|
|
53487
|
-
current = current[seg];
|
|
53488
|
-
} else {
|
|
53489
|
-
return void 0;
|
|
53490
|
-
}
|
|
53491
|
-
}
|
|
53492
|
-
return current;
|
|
53493
|
-
}
|
|
53494
|
-
parseDecision(result) {
|
|
53495
|
-
if (result === void 0 || result === null) {
|
|
53496
|
-
return {
|
|
53497
|
-
allowed: this.fallback === "allow" || this.fallback === "warn",
|
|
53498
|
-
warn: this.fallback === "warn" ? true : void 0,
|
|
53499
|
-
reason: this.fallback === "warn" ? "audit: no policy result" : "no policy result"
|
|
53500
|
-
};
|
|
53501
|
-
}
|
|
53502
|
-
const allowed = result.allowed !== false;
|
|
53503
|
-
const decision = {
|
|
53504
|
-
allowed,
|
|
53505
|
-
reason: result.reason
|
|
53506
|
-
};
|
|
53507
|
-
if (result.capabilities) {
|
|
53508
|
-
decision.capabilities = result.capabilities;
|
|
53509
|
-
}
|
|
53510
|
-
return decision;
|
|
53511
|
-
}
|
|
53512
|
-
};
|
|
53513
|
-
}
|
|
53514
|
-
});
|
|
53515
|
-
|
|
53516
|
-
// src/enterprise/scheduler/knex-store.ts
|
|
53517
|
-
var knex_store_exports = {};
|
|
53518
|
-
__export(knex_store_exports, {
|
|
53519
|
-
KnexStoreBackend: () => KnexStoreBackend
|
|
53520
|
-
});
|
|
53521
|
-
function toNum(val) {
|
|
53522
|
-
if (val === null || val === void 0) return void 0;
|
|
53523
|
-
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
53524
|
-
}
|
|
53525
|
-
function safeJsonParse2(value) {
|
|
53526
|
-
if (!value) return void 0;
|
|
53527
|
-
try {
|
|
53528
|
-
return JSON.parse(value);
|
|
53529
|
-
} catch {
|
|
53530
|
-
return void 0;
|
|
53531
|
-
}
|
|
53532
|
-
}
|
|
53533
|
-
function fromDbRow2(row) {
|
|
53534
|
-
return {
|
|
53535
|
-
id: row.id,
|
|
53536
|
-
creatorId: row.creator_id,
|
|
53537
|
-
creatorContext: row.creator_context ?? void 0,
|
|
53538
|
-
creatorName: row.creator_name ?? void 0,
|
|
53539
|
-
timezone: row.timezone,
|
|
53540
|
-
schedule: row.schedule_expr,
|
|
53541
|
-
runAt: toNum(row.run_at),
|
|
53542
|
-
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
53543
|
-
originalExpression: row.original_expression,
|
|
53544
|
-
workflow: row.workflow ?? void 0,
|
|
53545
|
-
workflowInputs: safeJsonParse2(row.workflow_inputs),
|
|
53546
|
-
outputContext: safeJsonParse2(row.output_context),
|
|
53547
|
-
status: row.status,
|
|
53548
|
-
createdAt: toNum(row.created_at),
|
|
53549
|
-
lastRunAt: toNum(row.last_run_at),
|
|
53550
|
-
nextRunAt: toNum(row.next_run_at),
|
|
53551
|
-
runCount: row.run_count,
|
|
53552
|
-
failureCount: row.failure_count,
|
|
53553
|
-
lastError: row.last_error ?? void 0,
|
|
53554
|
-
previousResponse: row.previous_response ?? void 0
|
|
53555
|
-
};
|
|
53556
|
-
}
|
|
53557
|
-
function toInsertRow(schedule) {
|
|
53558
|
-
return {
|
|
53559
|
-
id: schedule.id,
|
|
53560
|
-
creator_id: schedule.creatorId,
|
|
53561
|
-
creator_context: schedule.creatorContext ?? null,
|
|
53562
|
-
creator_name: schedule.creatorName ?? null,
|
|
53563
|
-
timezone: schedule.timezone,
|
|
53564
|
-
schedule_expr: schedule.schedule,
|
|
53565
|
-
run_at: schedule.runAt ?? null,
|
|
53566
|
-
is_recurring: schedule.isRecurring,
|
|
53567
|
-
original_expression: schedule.originalExpression,
|
|
53568
|
-
workflow: schedule.workflow ?? null,
|
|
53569
|
-
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
53570
|
-
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
53571
|
-
status: schedule.status,
|
|
53572
|
-
created_at: schedule.createdAt,
|
|
53573
|
-
last_run_at: schedule.lastRunAt ?? null,
|
|
53574
|
-
next_run_at: schedule.nextRunAt ?? null,
|
|
53575
|
-
run_count: schedule.runCount,
|
|
53576
|
-
failure_count: schedule.failureCount,
|
|
53577
|
-
last_error: schedule.lastError ?? null,
|
|
53578
|
-
previous_response: schedule.previousResponse ?? null
|
|
53579
|
-
};
|
|
53580
|
-
}
|
|
53581
|
-
var fs23, path27, import_uuid2, KnexStoreBackend;
|
|
53582
|
-
var init_knex_store = __esm({
|
|
53583
|
-
"src/enterprise/scheduler/knex-store.ts"() {
|
|
53584
|
-
"use strict";
|
|
53585
|
-
fs23 = __toESM(require("fs"));
|
|
53586
|
-
path27 = __toESM(require("path"));
|
|
53587
|
-
import_uuid2 = require("uuid");
|
|
53588
|
-
init_logger();
|
|
53589
|
-
KnexStoreBackend = class {
|
|
53590
|
-
knex = null;
|
|
53591
|
-
driver;
|
|
53592
|
-
connection;
|
|
53593
|
-
constructor(driver, storageConfig, _haConfig) {
|
|
53594
|
-
this.driver = driver;
|
|
53595
|
-
this.connection = storageConfig.connection || {};
|
|
53596
|
-
}
|
|
53597
|
-
async initialize() {
|
|
53598
|
-
const { createRequire } = require("module");
|
|
53599
|
-
const runtimeRequire = createRequire(__filename);
|
|
53600
|
-
let knexFactory;
|
|
53601
|
-
try {
|
|
53602
|
-
knexFactory = runtimeRequire("knex");
|
|
53603
|
-
} catch (err) {
|
|
53604
|
-
const code = err?.code;
|
|
53605
|
-
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
53606
|
-
throw new Error(
|
|
53607
|
-
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
53608
|
-
);
|
|
53609
|
-
}
|
|
53610
|
-
throw err;
|
|
53611
|
-
}
|
|
53612
|
-
const clientMap = {
|
|
53613
|
-
postgresql: "pg",
|
|
53614
|
-
mysql: "mysql2",
|
|
53615
|
-
mssql: "tedious"
|
|
53616
|
-
};
|
|
53617
|
-
const client = clientMap[this.driver];
|
|
53618
|
-
let connection;
|
|
53619
|
-
if (this.connection.connection_string) {
|
|
53620
|
-
connection = this.connection.connection_string;
|
|
53621
|
-
} else if (this.driver === "mssql") {
|
|
53622
|
-
connection = this.buildMssqlConnection();
|
|
53623
|
-
} else {
|
|
53624
|
-
connection = this.buildStandardConnection();
|
|
53625
|
-
}
|
|
53626
|
-
this.knex = knexFactory({
|
|
53627
|
-
client,
|
|
53628
|
-
connection,
|
|
53629
|
-
pool: {
|
|
53630
|
-
min: this.connection.pool?.min ?? 0,
|
|
53631
|
-
max: this.connection.pool?.max ?? 10
|
|
53632
|
-
}
|
|
53633
|
-
});
|
|
53634
|
-
await this.migrateSchema();
|
|
53635
|
-
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
53636
|
-
}
|
|
53637
|
-
buildStandardConnection() {
|
|
53638
|
-
return {
|
|
53639
|
-
host: this.connection.host || "localhost",
|
|
53640
|
-
port: this.connection.port,
|
|
53641
|
-
database: this.connection.database || "visor",
|
|
53642
|
-
user: this.connection.user,
|
|
53643
|
-
password: this.connection.password,
|
|
53644
|
-
ssl: this.resolveSslConfig()
|
|
53645
|
-
};
|
|
53646
|
-
}
|
|
53647
|
-
buildMssqlConnection() {
|
|
53648
|
-
const ssl = this.connection.ssl;
|
|
53649
|
-
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
53650
|
-
return {
|
|
53651
|
-
server: this.connection.host || "localhost",
|
|
53652
|
-
port: this.connection.port,
|
|
53653
|
-
database: this.connection.database || "visor",
|
|
53654
|
-
user: this.connection.user,
|
|
53655
|
-
password: this.connection.password,
|
|
53656
|
-
options: {
|
|
53657
|
-
encrypt: sslEnabled,
|
|
53658
|
-
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
53659
|
-
}
|
|
53660
|
-
};
|
|
53661
|
-
}
|
|
53662
|
-
resolveSslConfig() {
|
|
53663
|
-
const ssl = this.connection.ssl;
|
|
53664
|
-
if (ssl === false || ssl === void 0) return false;
|
|
53665
|
-
if (ssl === true) return { rejectUnauthorized: true };
|
|
53666
|
-
if (ssl.enabled === false) return false;
|
|
53667
|
-
const result = {
|
|
53668
|
-
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
53669
|
-
};
|
|
53670
|
-
if (ssl.ca) {
|
|
53671
|
-
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
53672
|
-
result.ca = fs23.readFileSync(caPath, "utf8");
|
|
53673
|
-
}
|
|
53674
|
-
if (ssl.cert) {
|
|
53675
|
-
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
53676
|
-
result.cert = fs23.readFileSync(certPath, "utf8");
|
|
53677
|
-
}
|
|
53678
|
-
if (ssl.key) {
|
|
53679
|
-
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
53680
|
-
result.key = fs23.readFileSync(keyPath, "utf8");
|
|
53681
|
-
}
|
|
53682
|
-
return result;
|
|
53683
|
-
}
|
|
53684
|
-
validateSslPath(filePath, label) {
|
|
53685
|
-
const resolved = path27.resolve(filePath);
|
|
53686
|
-
if (resolved !== path27.normalize(resolved)) {
|
|
53687
|
-
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
53688
|
-
}
|
|
53689
|
-
if (!fs23.existsSync(resolved)) {
|
|
53690
|
-
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
53691
|
-
}
|
|
53692
|
-
return resolved;
|
|
53693
|
-
}
|
|
53694
|
-
async shutdown() {
|
|
53695
|
-
if (this.knex) {
|
|
53696
|
-
await this.knex.destroy();
|
|
53697
|
-
this.knex = null;
|
|
53698
|
-
}
|
|
53699
|
-
}
|
|
53700
|
-
async migrateSchema() {
|
|
53701
|
-
const knex = this.getKnex();
|
|
53702
|
-
const exists = await knex.schema.hasTable("schedules");
|
|
53703
|
-
if (!exists) {
|
|
53704
|
-
await knex.schema.createTable("schedules", (table) => {
|
|
53705
|
-
table.string("id", 36).primary();
|
|
53706
|
-
table.string("creator_id", 255).notNullable().index();
|
|
53707
|
-
table.string("creator_context", 255);
|
|
53708
|
-
table.string("creator_name", 255);
|
|
53709
|
-
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
53710
|
-
table.string("schedule_expr", 255);
|
|
53711
|
-
table.bigInteger("run_at");
|
|
53712
|
-
table.boolean("is_recurring").notNullable();
|
|
53713
|
-
table.text("original_expression");
|
|
53714
|
-
table.string("workflow", 255);
|
|
53715
|
-
table.text("workflow_inputs");
|
|
53716
|
-
table.text("output_context");
|
|
53717
|
-
table.string("status", 20).notNullable().index();
|
|
53718
|
-
table.bigInteger("created_at").notNullable();
|
|
53719
|
-
table.bigInteger("last_run_at");
|
|
53720
|
-
table.bigInteger("next_run_at");
|
|
53721
|
-
table.integer("run_count").notNullable().defaultTo(0);
|
|
53722
|
-
table.integer("failure_count").notNullable().defaultTo(0);
|
|
53723
|
-
table.text("last_error");
|
|
53724
|
-
table.text("previous_response");
|
|
53725
|
-
table.index(["status", "next_run_at"]);
|
|
53726
|
-
});
|
|
53727
|
-
}
|
|
53728
|
-
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
53729
|
-
if (!locksExist) {
|
|
53730
|
-
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
53731
|
-
table.string("lock_id", 255).primary();
|
|
53732
|
-
table.string("node_id", 255).notNullable();
|
|
53733
|
-
table.string("lock_token", 36).notNullable();
|
|
53734
|
-
table.bigInteger("acquired_at").notNullable();
|
|
53735
|
-
table.bigInteger("expires_at").notNullable();
|
|
53736
|
-
});
|
|
53737
|
-
}
|
|
53738
|
-
}
|
|
53739
|
-
getKnex() {
|
|
53740
|
-
if (!this.knex) {
|
|
53741
|
-
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
53742
|
-
}
|
|
53743
|
-
return this.knex;
|
|
53744
|
-
}
|
|
53745
|
-
// --- CRUD ---
|
|
53746
|
-
async create(schedule) {
|
|
53747
|
-
const knex = this.getKnex();
|
|
53748
|
-
const newSchedule = {
|
|
53749
|
-
...schedule,
|
|
53750
|
-
id: (0, import_uuid2.v4)(),
|
|
53751
|
-
createdAt: Date.now(),
|
|
53752
|
-
runCount: 0,
|
|
53753
|
-
failureCount: 0,
|
|
53754
|
-
status: "active"
|
|
53755
|
-
};
|
|
53756
|
-
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
53757
|
-
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
53758
|
-
return newSchedule;
|
|
53759
|
-
}
|
|
53760
|
-
async importSchedule(schedule) {
|
|
53761
|
-
const knex = this.getKnex();
|
|
53762
|
-
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
53763
|
-
if (existing) return;
|
|
53764
|
-
await knex("schedules").insert(toInsertRow(schedule));
|
|
53765
|
-
}
|
|
53766
|
-
async get(id) {
|
|
53767
|
-
const knex = this.getKnex();
|
|
53768
|
-
const row = await knex("schedules").where("id", id).first();
|
|
53769
|
-
return row ? fromDbRow2(row) : void 0;
|
|
53770
|
-
}
|
|
53771
|
-
async update(id, patch) {
|
|
53772
|
-
const knex = this.getKnex();
|
|
53773
|
-
const existing = await knex("schedules").where("id", id).first();
|
|
53774
|
-
if (!existing) return void 0;
|
|
53775
|
-
const current = fromDbRow2(existing);
|
|
53776
|
-
const updated = { ...current, ...patch, id: current.id };
|
|
53777
|
-
const row = toInsertRow(updated);
|
|
53778
|
-
delete row.id;
|
|
53779
|
-
await knex("schedules").where("id", id).update(row);
|
|
53780
|
-
return updated;
|
|
53781
|
-
}
|
|
53782
|
-
async delete(id) {
|
|
53783
|
-
const knex = this.getKnex();
|
|
53784
|
-
const deleted = await knex("schedules").where("id", id).del();
|
|
53785
|
-
if (deleted > 0) {
|
|
53786
|
-
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
53787
|
-
return true;
|
|
53788
|
-
}
|
|
53789
|
-
return false;
|
|
53790
|
-
}
|
|
53791
|
-
// --- Queries ---
|
|
53792
|
-
async getByCreator(creatorId) {
|
|
53793
|
-
const knex = this.getKnex();
|
|
53794
|
-
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
53795
|
-
return rows.map((r) => fromDbRow2(r));
|
|
53796
|
-
}
|
|
53797
|
-
async getActiveSchedules() {
|
|
53798
|
-
const knex = this.getKnex();
|
|
53799
|
-
const rows = await knex("schedules").where("status", "active");
|
|
53800
|
-
return rows.map((r) => fromDbRow2(r));
|
|
53801
|
-
}
|
|
53802
|
-
async getDueSchedules(now) {
|
|
53803
|
-
const ts = now ?? Date.now();
|
|
53804
|
-
const knex = this.getKnex();
|
|
53805
|
-
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
53806
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
53807
|
-
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
53808
|
-
this.where(function() {
|
|
53809
|
-
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
53810
|
-
}).orWhere(function() {
|
|
53811
|
-
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
53812
|
-
});
|
|
53813
|
-
});
|
|
53814
|
-
return rows.map((r) => fromDbRow2(r));
|
|
53815
|
-
}
|
|
53816
|
-
async findByWorkflow(creatorId, workflowName) {
|
|
53817
|
-
const knex = this.getKnex();
|
|
53818
|
-
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
53819
|
-
const pattern = `%${escaped}%`;
|
|
53820
|
-
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
53821
|
-
return rows.map((r) => fromDbRow2(r));
|
|
53822
|
-
}
|
|
53823
|
-
async getAll() {
|
|
53824
|
-
const knex = this.getKnex();
|
|
53825
|
-
const rows = await knex("schedules");
|
|
53826
|
-
return rows.map((r) => fromDbRow2(r));
|
|
53827
|
-
}
|
|
53828
|
-
async getStats() {
|
|
53829
|
-
const knex = this.getKnex();
|
|
53830
|
-
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
53831
|
-
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
53832
|
-
const result = await knex("schedules").select(
|
|
53833
|
-
knex.raw("COUNT(*) as total"),
|
|
53834
|
-
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
53835
|
-
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
53836
|
-
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
53837
|
-
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
53838
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
53839
|
-
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
53840
|
-
).first();
|
|
53841
|
-
return {
|
|
53842
|
-
total: Number(result.total) || 0,
|
|
53843
|
-
active: Number(result.active) || 0,
|
|
53844
|
-
paused: Number(result.paused) || 0,
|
|
53845
|
-
completed: Number(result.completed) || 0,
|
|
53846
|
-
failed: Number(result.failed) || 0,
|
|
53847
|
-
recurring: Number(result.recurring) || 0,
|
|
53848
|
-
oneTime: Number(result.one_time) || 0
|
|
53849
|
-
};
|
|
53850
|
-
}
|
|
53851
|
-
async validateLimits(creatorId, isRecurring, limits) {
|
|
53852
|
-
const knex = this.getKnex();
|
|
53853
|
-
if (limits.maxGlobal) {
|
|
53854
|
-
const result = await knex("schedules").count("* as cnt").first();
|
|
53855
|
-
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
53856
|
-
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
53857
|
-
}
|
|
53858
|
-
}
|
|
53859
|
-
if (limits.maxPerUser) {
|
|
53860
|
-
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
53861
|
-
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
53862
|
-
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
53863
|
-
}
|
|
53864
|
-
}
|
|
53865
|
-
if (isRecurring && limits.maxRecurringPerUser) {
|
|
53866
|
-
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
53867
|
-
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
53868
|
-
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
53869
|
-
throw new Error(
|
|
53870
|
-
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
53871
|
-
);
|
|
53872
|
-
}
|
|
53873
|
-
}
|
|
53874
|
-
}
|
|
53875
|
-
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
53876
|
-
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
53877
|
-
const knex = this.getKnex();
|
|
53878
|
-
const now = Date.now();
|
|
53879
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
53880
|
-
const token = (0, import_uuid2.v4)();
|
|
53881
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
53882
|
-
node_id: nodeId,
|
|
53883
|
-
lock_token: token,
|
|
53884
|
-
acquired_at: now,
|
|
53885
|
-
expires_at: expiresAt
|
|
53886
|
-
});
|
|
53887
|
-
if (updated > 0) return token;
|
|
53888
|
-
try {
|
|
53889
|
-
await knex("scheduler_locks").insert({
|
|
53890
|
-
lock_id: lockId,
|
|
53891
|
-
node_id: nodeId,
|
|
53892
|
-
lock_token: token,
|
|
53893
|
-
acquired_at: now,
|
|
53894
|
-
expires_at: expiresAt
|
|
53895
|
-
});
|
|
53896
|
-
return token;
|
|
53897
|
-
} catch {
|
|
53898
|
-
return null;
|
|
53899
|
-
}
|
|
53900
|
-
}
|
|
53901
|
-
async releaseLock(lockId, lockToken) {
|
|
53902
|
-
const knex = this.getKnex();
|
|
53903
|
-
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
53904
|
-
}
|
|
53905
|
-
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
53906
|
-
const knex = this.getKnex();
|
|
53907
|
-
const now = Date.now();
|
|
53908
|
-
const expiresAt = now + ttlSeconds * 1e3;
|
|
53909
|
-
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
53910
|
-
return updated > 0;
|
|
53911
|
-
}
|
|
53912
|
-
async flush() {
|
|
53913
|
-
}
|
|
53914
|
-
};
|
|
53915
|
-
}
|
|
53916
|
-
});
|
|
53917
|
-
|
|
53918
|
-
// src/enterprise/loader.ts
|
|
53919
|
-
var loader_exports = {};
|
|
53920
|
-
__export(loader_exports, {
|
|
53921
|
-
loadEnterprisePolicyEngine: () => loadEnterprisePolicyEngine,
|
|
53922
|
-
loadEnterpriseStoreBackend: () => loadEnterpriseStoreBackend
|
|
53923
|
-
});
|
|
53924
|
-
async function loadEnterprisePolicyEngine(config) {
|
|
53925
|
-
try {
|
|
53926
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
53927
|
-
const validator = new LicenseValidator2();
|
|
53928
|
-
const license = await validator.loadAndValidate();
|
|
53929
|
-
if (!license || !validator.hasFeature("policy")) {
|
|
53930
|
-
return new DefaultPolicyEngine();
|
|
53931
|
-
}
|
|
53932
|
-
if (validator.isInGracePeriod()) {
|
|
53933
|
-
console.warn(
|
|
53934
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
53935
|
-
);
|
|
53936
|
-
}
|
|
53937
|
-
const { OpaPolicyEngine: OpaPolicyEngine2 } = await Promise.resolve().then(() => (init_opa_policy_engine(), opa_policy_engine_exports));
|
|
53938
|
-
const engine = new OpaPolicyEngine2(config);
|
|
53939
|
-
await engine.initialize(config);
|
|
53940
|
-
return engine;
|
|
53941
|
-
} catch (err) {
|
|
53942
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
53943
|
-
try {
|
|
53944
|
-
const { logger: logger2 } = (init_logger(), __toCommonJS(logger_exports));
|
|
53945
|
-
logger2.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
53946
|
-
} catch {
|
|
53947
|
-
}
|
|
53948
|
-
return new DefaultPolicyEngine();
|
|
53949
|
-
}
|
|
53950
|
-
}
|
|
53951
|
-
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
53952
|
-
const { LicenseValidator: LicenseValidator2 } = await Promise.resolve().then(() => (init_validator(), validator_exports));
|
|
53953
|
-
const validator = new LicenseValidator2();
|
|
53954
|
-
const license = await validator.loadAndValidate();
|
|
53955
|
-
if (!license || !validator.hasFeature("scheduler-sql")) {
|
|
53956
|
-
throw new Error(
|
|
53957
|
-
`The ${driver} schedule storage driver requires a Visor Enterprise license with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`
|
|
53958
|
-
);
|
|
53959
|
-
}
|
|
53960
|
-
if (validator.isInGracePeriod()) {
|
|
53961
|
-
console.warn(
|
|
53962
|
-
"[visor:enterprise] License has expired but is within the 72-hour grace period. Please renew your license."
|
|
53963
|
-
);
|
|
53964
|
-
}
|
|
53965
|
-
const { KnexStoreBackend: KnexStoreBackend2 } = await Promise.resolve().then(() => (init_knex_store(), knex_store_exports));
|
|
53966
|
-
return new KnexStoreBackend2(driver, storageConfig, haConfig);
|
|
53967
|
-
}
|
|
53968
|
-
var init_loader = __esm({
|
|
53969
|
-
"src/enterprise/loader.ts"() {
|
|
53970
|
-
"use strict";
|
|
53971
|
-
init_default_engine();
|
|
53972
|
-
}
|
|
53973
|
-
});
|
|
53974
|
-
|
|
53975
53907
|
// src/event-bus/event-bus.ts
|
|
53976
53908
|
var event_bus_exports = {};
|
|
53977
53909
|
__export(event_bus_exports, {
|
|
@@ -54878,8 +54810,8 @@ ${content}
|
|
|
54878
54810
|
* Sleep utility
|
|
54879
54811
|
*/
|
|
54880
54812
|
sleep(ms) {
|
|
54881
|
-
return new Promise((
|
|
54882
|
-
const t = setTimeout(
|
|
54813
|
+
return new Promise((resolve14) => {
|
|
54814
|
+
const t = setTimeout(resolve14, ms);
|
|
54883
54815
|
if (typeof t.unref === "function") {
|
|
54884
54816
|
try {
|
|
54885
54817
|
t.unref();
|
|
@@ -54956,6 +54888,7 @@ var init_github_frontend = __esm({
|
|
|
54956
54888
|
// Minimum delay between updates (public for testing)
|
|
54957
54889
|
// Cache of created GitHub comment IDs per group to handle API eventual consistency
|
|
54958
54890
|
createdCommentGithubIds = /* @__PURE__ */ new Map();
|
|
54891
|
+
_stopped = false;
|
|
54959
54892
|
start(ctx) {
|
|
54960
54893
|
const log2 = ctx.logger;
|
|
54961
54894
|
const bus = ctx.eventBus;
|
|
@@ -55100,9 +55033,19 @@ var init_github_frontend = __esm({
|
|
|
55100
55033
|
})
|
|
55101
55034
|
);
|
|
55102
55035
|
}
|
|
55103
|
-
stop() {
|
|
55036
|
+
async stop() {
|
|
55037
|
+
this._stopped = true;
|
|
55104
55038
|
for (const s of this.subs) s.unsubscribe();
|
|
55105
55039
|
this.subs = [];
|
|
55040
|
+
if (this._timer) {
|
|
55041
|
+
clearTimeout(this._timer);
|
|
55042
|
+
this._timer = null;
|
|
55043
|
+
}
|
|
55044
|
+
this._pendingIds.clear();
|
|
55045
|
+
const pending = Array.from(this.updateLocks.values());
|
|
55046
|
+
if (pending.length > 0) {
|
|
55047
|
+
await Promise.allSettled(pending);
|
|
55048
|
+
}
|
|
55106
55049
|
}
|
|
55107
55050
|
async buildFullBody(ctx, group) {
|
|
55108
55051
|
const header = this.renderThreadHeader(ctx, group);
|
|
@@ -55153,8 +55096,8 @@ ${end}`);
|
|
|
55153
55096
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
55154
55097
|
const existingLock = this.updateLocks.get(group);
|
|
55155
55098
|
let resolveLock;
|
|
55156
|
-
const ourLock = new Promise((
|
|
55157
|
-
resolveLock =
|
|
55099
|
+
const ourLock = new Promise((resolve14) => {
|
|
55100
|
+
resolveLock = resolve14;
|
|
55158
55101
|
});
|
|
55159
55102
|
this.updateLocks.set(group, ourLock);
|
|
55160
55103
|
try {
|
|
@@ -55180,6 +55123,7 @@ ${end}`);
|
|
|
55180
55123
|
*/
|
|
55181
55124
|
async performGroupedCommentUpdate(ctx, comments, group, changedIds) {
|
|
55182
55125
|
try {
|
|
55126
|
+
if (this._stopped) return;
|
|
55183
55127
|
if (!ctx.run.repo || !ctx.run.pr) return;
|
|
55184
55128
|
const config = ctx.config;
|
|
55185
55129
|
const prCommentEnabled = config?.output?.pr_comment?.enabled !== false;
|
|
@@ -55466,7 +55410,7 @@ ${blocks}
|
|
|
55466
55410
|
* Sleep utility for enforcing delays
|
|
55467
55411
|
*/
|
|
55468
55412
|
sleep(ms) {
|
|
55469
|
-
return new Promise((
|
|
55413
|
+
return new Promise((resolve14) => setTimeout(resolve14, ms));
|
|
55470
55414
|
}
|
|
55471
55415
|
};
|
|
55472
55416
|
}
|
|
@@ -56737,15 +56681,15 @@ function serializeRunState(state) {
|
|
|
56737
56681
|
])
|
|
56738
56682
|
};
|
|
56739
56683
|
}
|
|
56740
|
-
var
|
|
56684
|
+
var path26, fs22, StateMachineExecutionEngine;
|
|
56741
56685
|
var init_state_machine_execution_engine = __esm({
|
|
56742
56686
|
"src/state-machine-execution-engine.ts"() {
|
|
56743
56687
|
"use strict";
|
|
56744
56688
|
init_runner();
|
|
56745
56689
|
init_logger();
|
|
56746
56690
|
init_sandbox_manager();
|
|
56747
|
-
|
|
56748
|
-
|
|
56691
|
+
path26 = __toESM(require("path"));
|
|
56692
|
+
fs22 = __toESM(require("fs"));
|
|
56749
56693
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
56750
56694
|
workingDirectory;
|
|
56751
56695
|
executionContext;
|
|
@@ -56977,8 +56921,8 @@ var init_state_machine_execution_engine = __esm({
|
|
|
56977
56921
|
logger.debug(
|
|
56978
56922
|
`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`
|
|
56979
56923
|
);
|
|
56980
|
-
const { loadEnterprisePolicyEngine
|
|
56981
|
-
context2.policyEngine = await
|
|
56924
|
+
const { loadEnterprisePolicyEngine } = await import("./enterprise/loader");
|
|
56925
|
+
context2.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
|
|
56982
56926
|
logger.debug(
|
|
56983
56927
|
`[PolicyEngine] Initialized: ${context2.policyEngine?.constructor?.name || "unknown"}`
|
|
56984
56928
|
);
|
|
@@ -57130,9 +57074,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57130
57074
|
}
|
|
57131
57075
|
const checkId = String(ev?.checkId || "unknown");
|
|
57132
57076
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
57133
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
57134
|
-
|
|
57135
|
-
const filePath =
|
|
57077
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path26.resolve(process.cwd(), ".visor", "snapshots");
|
|
57078
|
+
fs22.mkdirSync(baseDir, { recursive: true });
|
|
57079
|
+
const filePath = path26.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
57136
57080
|
await this.saveSnapshotToFile(filePath);
|
|
57137
57081
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
57138
57082
|
try {
|
|
@@ -57273,7 +57217,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57273
57217
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
57274
57218
|
*/
|
|
57275
57219
|
async saveSnapshotToFile(filePath) {
|
|
57276
|
-
const
|
|
57220
|
+
const fs23 = await import("fs/promises");
|
|
57277
57221
|
const ctx = this._lastContext;
|
|
57278
57222
|
const runner = this._lastRunner;
|
|
57279
57223
|
if (!ctx || !runner) {
|
|
@@ -57293,14 +57237,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57293
57237
|
journal: entries,
|
|
57294
57238
|
requestedChecks: ctx.requestedChecks || []
|
|
57295
57239
|
};
|
|
57296
|
-
await
|
|
57240
|
+
await fs23.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
57297
57241
|
}
|
|
57298
57242
|
/**
|
|
57299
57243
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
57300
57244
|
*/
|
|
57301
57245
|
async loadSnapshotFromFile(filePath) {
|
|
57302
|
-
const
|
|
57303
|
-
const raw = await
|
|
57246
|
+
const fs23 = await import("fs/promises");
|
|
57247
|
+
const raw = await fs23.readFile(filePath, "utf8");
|
|
57304
57248
|
return JSON.parse(raw);
|
|
57305
57249
|
}
|
|
57306
57250
|
/**
|