@probelabs/visor 0.1.147-ee → 0.1.148-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontends/github-frontend.d.ts +2 -1
- package/dist/frontends/github-frontend.d.ts.map +1 -1
- package/dist/index.js +726 -113
- package/dist/providers/ai-check-provider.d.ts.map +1 -1
- package/dist/scheduler/schedule-tool.d.ts.map +1 -1
- package/dist/scheduler/scheduler.d.ts +5 -0
- package/dist/scheduler/scheduler.d.ts.map +1 -1
- package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs → check-provider-registry-AMYY2ZJY.mjs} +5 -6
- package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs → check-provider-registry-DVQDGTOE.mjs} +5 -6
- package/dist/sdk/{chunk-4F5UVWAN.mjs → chunk-62TNF5PJ.mjs} +2 -2
- package/dist/sdk/{chunk-4F5UVWAN.mjs.map → chunk-62TNF5PJ.mjs.map} +1 -1
- package/dist/sdk/{chunk-PNZH3JSI.mjs → chunk-75Q63UNX.mjs} +2742 -276
- package/dist/sdk/chunk-75Q63UNX.mjs.map +1 -0
- package/dist/sdk/{chunk-FBJ7MC7R.mjs → chunk-CISJ6DJW.mjs} +3 -3
- package/dist/sdk/{chunk-EWGX7LI7.mjs → chunk-H4AYMOAT.mjs} +2742 -276
- package/dist/sdk/chunk-H4AYMOAT.mjs.map +1 -0
- package/dist/sdk/{chunk-V2QW6ECX.mjs → chunk-RJLJUTSU.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs → failure-condition-evaluator-IVCTD4BZ.mjs} +3 -3
- package/dist/sdk/{github-frontend-47EU2HBY.mjs → github-frontend-DFT5G32K.mjs} +16 -4
- package/dist/sdk/github-frontend-DFT5G32K.mjs.map +1 -0
- package/dist/sdk/{host-GVR4UGZ3.mjs → host-H7IX4GBK.mjs} +2 -2
- package/dist/sdk/{host-KGN5OIAM.mjs → host-NZXGBBJI.mjs} +2 -2
- package/dist/sdk/{routing-CZ36LVVS.mjs → routing-LU5PAREW.mjs} +4 -4
- package/dist/sdk/schedule-tool-4JMWZCCK.mjs +35 -0
- package/dist/sdk/schedule-tool-CONR4VW3.mjs +35 -0
- package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs → schedule-tool-handler-AXMR7NBI.mjs} +5 -6
- package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs → schedule-tool-handler-YUC6CAXX.mjs} +5 -6
- package/dist/sdk/sdk.js +1608 -406
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -5
- package/dist/sdk/sdk.mjs.map +1 -1
- package/dist/sdk/{trace-helpers-EHDZ42HH.mjs → trace-helpers-6ROJR7N3.mjs} +2 -2
- package/dist/sdk/{workflow-check-provider-5453TW65.mjs → workflow-check-provider-DYSO3PML.mjs} +5 -6
- package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs → workflow-check-provider-MMB7L3YG.mjs} +5 -6
- package/dist/sdk/workflow-check-provider-MMB7L3YG.mjs.map +1 -0
- package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
- package/dist/utils/tool-resolver.d.ts.map +1 -1
- package/dist/utils/workspace-manager.d.ts +31 -8
- package/dist/utils/workspace-manager.d.ts.map +1 -1
- package/dist/utils/worktree-manager.d.ts +6 -0
- package/dist/utils/worktree-manager.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/sdk/chunk-EWGX7LI7.mjs.map +0 -1
- package/dist/sdk/chunk-PNZH3JSI.mjs.map +0 -1
- package/dist/sdk/chunk-XKCER23W.mjs +0 -1490
- package/dist/sdk/chunk-XKCER23W.mjs.map +0 -1
- package/dist/sdk/github-frontend-47EU2HBY.mjs.map +0 -1
- package/dist/sdk/schedule-tool-2COUUTF7.mjs +0 -18
- /package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs.map → check-provider-registry-AMYY2ZJY.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs.map → check-provider-registry-DVQDGTOE.mjs.map} +0 -0
- /package/dist/sdk/{chunk-FBJ7MC7R.mjs.map → chunk-CISJ6DJW.mjs.map} +0 -0
- /package/dist/sdk/{chunk-V2QW6ECX.mjs.map → chunk-RJLJUTSU.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs.map → failure-condition-evaluator-IVCTD4BZ.mjs.map} +0 -0
- /package/dist/sdk/{host-GVR4UGZ3.mjs.map → host-H7IX4GBK.mjs.map} +0 -0
- /package/dist/sdk/{host-KGN5OIAM.mjs.map → host-NZXGBBJI.mjs.map} +0 -0
- /package/dist/sdk/{routing-CZ36LVVS.mjs.map → routing-LU5PAREW.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-2COUUTF7.mjs.map → schedule-tool-4JMWZCCK.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs.map → schedule-tool-CONR4VW3.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs.map → schedule-tool-handler-AXMR7NBI.mjs.map} +0 -0
- /package/dist/sdk/{trace-helpers-EHDZ42HH.mjs.map → schedule-tool-handler-YUC6CAXX.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-5453TW65.mjs.map → trace-helpers-6ROJR7N3.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs.map → workflow-check-provider-DYSO3PML.mjs.map} +0 -0
package/dist/sdk/sdk.js
CHANGED
|
@@ -760,7 +760,7 @@ var require_package = __commonJS({
|
|
|
760
760
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
761
761
|
"@opentelemetry/sdk-trace-base": "^1.30.1",
|
|
762
762
|
"@opentelemetry/semantic-conventions": "^1.30.1",
|
|
763
|
-
"@probelabs/probe": "^0.6.0-
|
|
763
|
+
"@probelabs/probe": "^0.6.0-rc264",
|
|
764
764
|
"@types/commander": "^2.12.0",
|
|
765
765
|
"@types/uuid": "^10.0.0",
|
|
766
766
|
acorn: "^8.16.0",
|
|
@@ -864,11 +864,11 @@ function getTracer() {
|
|
|
864
864
|
}
|
|
865
865
|
async function withActiveSpan(name, attrs, fn) {
|
|
866
866
|
const tracer = getTracer();
|
|
867
|
-
return await new Promise((
|
|
867
|
+
return await new Promise((resolve18, reject) => {
|
|
868
868
|
const callback = async (span) => {
|
|
869
869
|
try {
|
|
870
870
|
const res = await fn(span);
|
|
871
|
-
|
|
871
|
+
resolve18(res);
|
|
872
872
|
} catch (err) {
|
|
873
873
|
try {
|
|
874
874
|
if (err instanceof Error) span.recordException(err);
|
|
@@ -945,19 +945,19 @@ function __getOrCreateNdjsonPath() {
|
|
|
945
945
|
try {
|
|
946
946
|
if (process.env.VISOR_TELEMETRY_SINK && process.env.VISOR_TELEMETRY_SINK !== "file")
|
|
947
947
|
return null;
|
|
948
|
-
const
|
|
949
|
-
const
|
|
948
|
+
const path31 = require("path");
|
|
949
|
+
const fs27 = require("fs");
|
|
950
950
|
if (process.env.VISOR_FALLBACK_TRACE_FILE) {
|
|
951
951
|
__ndjsonPath = process.env.VISOR_FALLBACK_TRACE_FILE;
|
|
952
|
-
const dir =
|
|
953
|
-
if (!
|
|
952
|
+
const dir = path31.dirname(__ndjsonPath);
|
|
953
|
+
if (!fs27.existsSync(dir)) fs27.mkdirSync(dir, { recursive: true });
|
|
954
954
|
return __ndjsonPath;
|
|
955
955
|
}
|
|
956
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
957
|
-
if (!
|
|
956
|
+
const outDir = process.env.VISOR_TRACE_DIR || path31.join(process.cwd(), "output", "traces");
|
|
957
|
+
if (!fs27.existsSync(outDir)) fs27.mkdirSync(outDir, { recursive: true });
|
|
958
958
|
if (!__ndjsonPath) {
|
|
959
959
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
960
|
-
__ndjsonPath =
|
|
960
|
+
__ndjsonPath = path31.join(outDir, `${ts}.ndjson`);
|
|
961
961
|
}
|
|
962
962
|
return __ndjsonPath;
|
|
963
963
|
} catch {
|
|
@@ -966,11 +966,11 @@ function __getOrCreateNdjsonPath() {
|
|
|
966
966
|
}
|
|
967
967
|
function _appendRunMarker() {
|
|
968
968
|
try {
|
|
969
|
-
const
|
|
969
|
+
const fs27 = require("fs");
|
|
970
970
|
const p = __getOrCreateNdjsonPath();
|
|
971
971
|
if (!p) return;
|
|
972
972
|
const line = { name: "visor.run", attributes: { started: true } };
|
|
973
|
-
|
|
973
|
+
fs27.appendFileSync(p, JSON.stringify(line) + "\n", "utf8");
|
|
974
974
|
} catch {
|
|
975
975
|
}
|
|
976
976
|
}
|
|
@@ -3734,9 +3734,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3734
3734
|
});
|
|
3735
3735
|
liquid.registerFilter("get", (obj, pathExpr) => {
|
|
3736
3736
|
if (obj == null) return void 0;
|
|
3737
|
-
const
|
|
3738
|
-
if (!
|
|
3739
|
-
const parts =
|
|
3737
|
+
const path31 = typeof pathExpr === "string" ? pathExpr : String(pathExpr || "");
|
|
3738
|
+
if (!path31) return obj;
|
|
3739
|
+
const parts = path31.split(".");
|
|
3740
3740
|
let cur = obj;
|
|
3741
3741
|
for (const p of parts) {
|
|
3742
3742
|
if (cur == null) return void 0;
|
|
@@ -3855,9 +3855,9 @@ function configureLiquidWithExtensions(liquid) {
|
|
|
3855
3855
|
}
|
|
3856
3856
|
}
|
|
3857
3857
|
const defaultRole = typeof rolesCfg.default === "string" && rolesCfg.default.trim() ? rolesCfg.default.trim() : void 0;
|
|
3858
|
-
const getNested = (obj,
|
|
3859
|
-
if (!obj || !
|
|
3860
|
-
const parts =
|
|
3858
|
+
const getNested = (obj, path31) => {
|
|
3859
|
+
if (!obj || !path31) return void 0;
|
|
3860
|
+
const parts = path31.split(".");
|
|
3861
3861
|
let cur = obj;
|
|
3862
3862
|
for (const p of parts) {
|
|
3863
3863
|
if (cur == null) return void 0;
|
|
@@ -6409,8 +6409,8 @@ var init_dependency_gating = __esm({
|
|
|
6409
6409
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
6410
6410
|
try {
|
|
6411
6411
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
6412
|
-
const
|
|
6413
|
-
const
|
|
6412
|
+
const fs27 = await import("fs/promises");
|
|
6413
|
+
const path31 = await import("path");
|
|
6414
6414
|
const schemaRaw = checkConfig.schema || "plain";
|
|
6415
6415
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
6416
6416
|
let templateContent;
|
|
@@ -6418,24 +6418,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
6418
6418
|
templateContent = String(checkConfig.template.content);
|
|
6419
6419
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
6420
6420
|
const file = String(checkConfig.template.file);
|
|
6421
|
-
const resolved =
|
|
6422
|
-
templateContent = await
|
|
6421
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
6422
|
+
templateContent = await fs27.readFile(resolved, "utf-8");
|
|
6423
6423
|
} else if (schema && schema !== "plain") {
|
|
6424
6424
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
6425
6425
|
if (sanitized) {
|
|
6426
6426
|
const candidatePaths = [
|
|
6427
|
-
|
|
6427
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
6428
6428
|
// bundled: dist/output/
|
|
6429
|
-
|
|
6429
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
6430
6430
|
// source: output/
|
|
6431
|
-
|
|
6431
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
6432
6432
|
// fallback: cwd/output/
|
|
6433
|
-
|
|
6433
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
6434
6434
|
// fallback: cwd/dist/output/
|
|
6435
6435
|
];
|
|
6436
6436
|
for (const p of candidatePaths) {
|
|
6437
6437
|
try {
|
|
6438
|
-
templateContent = await
|
|
6438
|
+
templateContent = await fs27.readFile(p, "utf-8");
|
|
6439
6439
|
if (templateContent) break;
|
|
6440
6440
|
} catch {
|
|
6441
6441
|
}
|
|
@@ -6840,7 +6840,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6840
6840
|
}
|
|
6841
6841
|
try {
|
|
6842
6842
|
const originalProbePath = process.env.PROBE_PATH;
|
|
6843
|
-
const
|
|
6843
|
+
const fs27 = require("fs");
|
|
6844
6844
|
const possiblePaths = [
|
|
6845
6845
|
// Relative to current working directory (most common in production)
|
|
6846
6846
|
path6.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -6851,7 +6851,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
6851
6851
|
];
|
|
6852
6852
|
let probeBinaryPath;
|
|
6853
6853
|
for (const candidatePath of possiblePaths) {
|
|
6854
|
-
if (
|
|
6854
|
+
if (fs27.existsSync(candidatePath)) {
|
|
6855
6855
|
probeBinaryPath = candidatePath;
|
|
6856
6856
|
break;
|
|
6857
6857
|
}
|
|
@@ -6972,7 +6972,7 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
6972
6972
|
if (chromiumPath) {
|
|
6973
6973
|
env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;
|
|
6974
6974
|
}
|
|
6975
|
-
const result = await new Promise((
|
|
6975
|
+
const result = await new Promise((resolve18) => {
|
|
6976
6976
|
const proc = (0, import_child_process.spawn)(
|
|
6977
6977
|
"npx",
|
|
6978
6978
|
[
|
|
@@ -7002,13 +7002,13 @@ async function renderMermaidToPng(mermaidCode) {
|
|
|
7002
7002
|
});
|
|
7003
7003
|
proc.on("close", (code) => {
|
|
7004
7004
|
if (code === 0) {
|
|
7005
|
-
|
|
7005
|
+
resolve18({ success: true });
|
|
7006
7006
|
} else {
|
|
7007
|
-
|
|
7007
|
+
resolve18({ success: false, error: stderr || `Exit code ${code}` });
|
|
7008
7008
|
}
|
|
7009
7009
|
});
|
|
7010
7010
|
proc.on("error", (err) => {
|
|
7011
|
-
|
|
7011
|
+
resolve18({ success: false, error: err.message });
|
|
7012
7012
|
});
|
|
7013
7013
|
});
|
|
7014
7014
|
if (!result.success) {
|
|
@@ -8153,8 +8153,8 @@ ${schemaString}`);
|
|
|
8153
8153
|
}
|
|
8154
8154
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8155
8155
|
try {
|
|
8156
|
-
const
|
|
8157
|
-
const
|
|
8156
|
+
const fs27 = require("fs");
|
|
8157
|
+
const path31 = require("path");
|
|
8158
8158
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8159
8159
|
const provider = this.config.provider || "auto";
|
|
8160
8160
|
const model = this.config.model || "default";
|
|
@@ -8268,20 +8268,20 @@ ${"=".repeat(60)}
|
|
|
8268
8268
|
`;
|
|
8269
8269
|
readableVersion += `${"=".repeat(60)}
|
|
8270
8270
|
`;
|
|
8271
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8272
|
-
if (!
|
|
8273
|
-
|
|
8271
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8272
|
+
if (!fs27.existsSync(debugArtifactsDir)) {
|
|
8273
|
+
fs27.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
8274
8274
|
}
|
|
8275
|
-
const debugFile =
|
|
8275
|
+
const debugFile = path31.join(
|
|
8276
8276
|
debugArtifactsDir,
|
|
8277
8277
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
8278
8278
|
);
|
|
8279
|
-
|
|
8280
|
-
const readableFile =
|
|
8279
|
+
fs27.writeFileSync(debugFile, debugJson, "utf-8");
|
|
8280
|
+
const readableFile = path31.join(
|
|
8281
8281
|
debugArtifactsDir,
|
|
8282
8282
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8283
8283
|
);
|
|
8284
|
-
|
|
8284
|
+
fs27.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
8285
8285
|
log(`
|
|
8286
8286
|
\u{1F4BE} Full debug info saved to:`);
|
|
8287
8287
|
log(` JSON: ${debugFile}`);
|
|
@@ -8314,8 +8314,8 @@ ${"=".repeat(60)}
|
|
|
8314
8314
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8315
8315
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8316
8316
|
try {
|
|
8317
|
-
const
|
|
8318
|
-
const
|
|
8317
|
+
const fs27 = require("fs");
|
|
8318
|
+
const path31 = require("path");
|
|
8319
8319
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8320
8320
|
const agentAny2 = agent;
|
|
8321
8321
|
let fullHistory = [];
|
|
@@ -8326,8 +8326,8 @@ ${"=".repeat(60)}
|
|
|
8326
8326
|
} else if (agentAny2._messages) {
|
|
8327
8327
|
fullHistory = agentAny2._messages;
|
|
8328
8328
|
}
|
|
8329
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8330
|
-
const sessionBase =
|
|
8329
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8330
|
+
const sessionBase = path31.join(
|
|
8331
8331
|
debugArtifactsDir,
|
|
8332
8332
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8333
8333
|
);
|
|
@@ -8339,7 +8339,7 @@ ${"=".repeat(60)}
|
|
|
8339
8339
|
schema: effectiveSchema,
|
|
8340
8340
|
totalMessages: fullHistory.length
|
|
8341
8341
|
};
|
|
8342
|
-
|
|
8342
|
+
fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8343
8343
|
let readable = `=============================================================
|
|
8344
8344
|
`;
|
|
8345
8345
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8366,7 +8366,7 @@ ${"=".repeat(60)}
|
|
|
8366
8366
|
`;
|
|
8367
8367
|
readable += content + "\n";
|
|
8368
8368
|
});
|
|
8369
|
-
|
|
8369
|
+
fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8370
8370
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8371
8371
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8372
8372
|
} catch (error) {
|
|
@@ -8375,11 +8375,11 @@ ${"=".repeat(60)}
|
|
|
8375
8375
|
}
|
|
8376
8376
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8377
8377
|
try {
|
|
8378
|
-
const
|
|
8379
|
-
const
|
|
8378
|
+
const fs27 = require("fs");
|
|
8379
|
+
const path31 = require("path");
|
|
8380
8380
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8381
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8382
|
-
const responseFile =
|
|
8381
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8382
|
+
const responseFile = path31.join(
|
|
8383
8383
|
debugArtifactsDir,
|
|
8384
8384
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8385
8385
|
);
|
|
@@ -8412,7 +8412,7 @@ ${"=".repeat(60)}
|
|
|
8412
8412
|
`;
|
|
8413
8413
|
responseContent += `${"=".repeat(60)}
|
|
8414
8414
|
`;
|
|
8415
|
-
|
|
8415
|
+
fs27.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8416
8416
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8417
8417
|
} catch (error) {
|
|
8418
8418
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8428,9 +8428,9 @@ ${"=".repeat(60)}
|
|
|
8428
8428
|
await agentAny._telemetryConfig.shutdown();
|
|
8429
8429
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
8430
8430
|
if (process.env.GITHUB_ACTIONS) {
|
|
8431
|
-
const
|
|
8432
|
-
if (
|
|
8433
|
-
const stats =
|
|
8431
|
+
const fs27 = require("fs");
|
|
8432
|
+
if (fs27.existsSync(agentAny._traceFilePath)) {
|
|
8433
|
+
const stats = fs27.statSync(agentAny._traceFilePath);
|
|
8434
8434
|
console.log(
|
|
8435
8435
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
8436
8436
|
);
|
|
@@ -8637,8 +8637,8 @@ ${schemaString}`);
|
|
|
8637
8637
|
const model = this.config.model || "default";
|
|
8638
8638
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8639
8639
|
try {
|
|
8640
|
-
const
|
|
8641
|
-
const
|
|
8640
|
+
const fs27 = require("fs");
|
|
8641
|
+
const path31 = require("path");
|
|
8642
8642
|
const os3 = require("os");
|
|
8643
8643
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8644
8644
|
const debugData = {
|
|
@@ -8712,18 +8712,18 @@ ${"=".repeat(60)}
|
|
|
8712
8712
|
readableVersion += `${"=".repeat(60)}
|
|
8713
8713
|
`;
|
|
8714
8714
|
const tempDir = os3.tmpdir();
|
|
8715
|
-
const promptFile =
|
|
8716
|
-
|
|
8715
|
+
const promptFile = path31.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
8716
|
+
fs27.writeFileSync(promptFile, prompt, "utf-8");
|
|
8717
8717
|
log(`
|
|
8718
8718
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
8719
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8719
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8720
8720
|
try {
|
|
8721
|
-
const base =
|
|
8721
|
+
const base = path31.join(
|
|
8722
8722
|
debugArtifactsDir,
|
|
8723
8723
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
8724
8724
|
);
|
|
8725
|
-
|
|
8726
|
-
|
|
8725
|
+
fs27.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
8726
|
+
fs27.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
8727
8727
|
log(`
|
|
8728
8728
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
8729
8729
|
} catch {
|
|
@@ -8768,8 +8768,8 @@ $ ${cliCommand}
|
|
|
8768
8768
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
8769
8769
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8770
8770
|
try {
|
|
8771
|
-
const
|
|
8772
|
-
const
|
|
8771
|
+
const fs27 = require("fs");
|
|
8772
|
+
const path31 = require("path");
|
|
8773
8773
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8774
8774
|
const agentAny = agent;
|
|
8775
8775
|
let fullHistory = [];
|
|
@@ -8780,8 +8780,8 @@ $ ${cliCommand}
|
|
|
8780
8780
|
} else if (agentAny._messages) {
|
|
8781
8781
|
fullHistory = agentAny._messages;
|
|
8782
8782
|
}
|
|
8783
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8784
|
-
const sessionBase =
|
|
8783
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8784
|
+
const sessionBase = path31.join(
|
|
8785
8785
|
debugArtifactsDir,
|
|
8786
8786
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
8787
8787
|
);
|
|
@@ -8793,7 +8793,7 @@ $ ${cliCommand}
|
|
|
8793
8793
|
schema: effectiveSchema,
|
|
8794
8794
|
totalMessages: fullHistory.length
|
|
8795
8795
|
};
|
|
8796
|
-
|
|
8796
|
+
fs27.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
8797
8797
|
let readable = `=============================================================
|
|
8798
8798
|
`;
|
|
8799
8799
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -8820,7 +8820,7 @@ ${"=".repeat(60)}
|
|
|
8820
8820
|
`;
|
|
8821
8821
|
readable += content + "\n";
|
|
8822
8822
|
});
|
|
8823
|
-
|
|
8823
|
+
fs27.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
8824
8824
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
8825
8825
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
8826
8826
|
} catch (error) {
|
|
@@ -8829,11 +8829,11 @@ ${"=".repeat(60)}
|
|
|
8829
8829
|
}
|
|
8830
8830
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
8831
8831
|
try {
|
|
8832
|
-
const
|
|
8833
|
-
const
|
|
8832
|
+
const fs27 = require("fs");
|
|
8833
|
+
const path31 = require("path");
|
|
8834
8834
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
8835
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
8836
|
-
const responseFile =
|
|
8835
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path31.join(process.cwd(), "debug-artifacts");
|
|
8836
|
+
const responseFile = path31.join(
|
|
8837
8837
|
debugArtifactsDir,
|
|
8838
8838
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
8839
8839
|
);
|
|
@@ -8866,7 +8866,7 @@ ${"=".repeat(60)}
|
|
|
8866
8866
|
`;
|
|
8867
8867
|
responseContent += `${"=".repeat(60)}
|
|
8868
8868
|
`;
|
|
8869
|
-
|
|
8869
|
+
fs27.writeFileSync(responseFile, responseContent, "utf-8");
|
|
8870
8870
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
8871
8871
|
} catch (error) {
|
|
8872
8872
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -8884,9 +8884,9 @@ ${"=".repeat(60)}
|
|
|
8884
8884
|
await telemetry.shutdown();
|
|
8885
8885
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
8886
8886
|
if (process.env.GITHUB_ACTIONS) {
|
|
8887
|
-
const
|
|
8888
|
-
if (
|
|
8889
|
-
const stats =
|
|
8887
|
+
const fs27 = require("fs");
|
|
8888
|
+
if (fs27.existsSync(traceFilePath)) {
|
|
8889
|
+
const stats = fs27.statSync(traceFilePath);
|
|
8890
8890
|
console.log(
|
|
8891
8891
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
8892
8892
|
);
|
|
@@ -8924,8 +8924,8 @@ ${"=".repeat(60)}
|
|
|
8924
8924
|
* Load schema content from schema files or inline definitions
|
|
8925
8925
|
*/
|
|
8926
8926
|
async loadSchemaContent(schema) {
|
|
8927
|
-
const
|
|
8928
|
-
const
|
|
8927
|
+
const fs27 = require("fs").promises;
|
|
8928
|
+
const path31 = require("path");
|
|
8929
8929
|
if (typeof schema === "object" && schema !== null) {
|
|
8930
8930
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
8931
8931
|
return JSON.stringify(schema);
|
|
@@ -8938,14 +8938,14 @@ ${"=".repeat(60)}
|
|
|
8938
8938
|
}
|
|
8939
8939
|
} catch {
|
|
8940
8940
|
}
|
|
8941
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
8941
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path31.isAbsolute(schema)) {
|
|
8942
8942
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
8943
8943
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
8944
8944
|
}
|
|
8945
8945
|
try {
|
|
8946
|
-
const schemaPath =
|
|
8946
|
+
const schemaPath = path31.resolve(process.cwd(), schema);
|
|
8947
8947
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
8948
|
-
const schemaContent = await
|
|
8948
|
+
const schemaContent = await fs27.readFile(schemaPath, "utf-8");
|
|
8949
8949
|
return schemaContent.trim();
|
|
8950
8950
|
} catch (error) {
|
|
8951
8951
|
throw new Error(
|
|
@@ -8959,22 +8959,22 @@ ${"=".repeat(60)}
|
|
|
8959
8959
|
}
|
|
8960
8960
|
const candidatePaths = [
|
|
8961
8961
|
// GitHub Action bundle location
|
|
8962
|
-
|
|
8962
|
+
path31.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
8963
8963
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
8964
|
-
|
|
8964
|
+
path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
8965
8965
|
// Local dev (repo root)
|
|
8966
|
-
|
|
8966
|
+
path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
8967
8967
|
];
|
|
8968
8968
|
for (const schemaPath of candidatePaths) {
|
|
8969
8969
|
try {
|
|
8970
|
-
const schemaContent = await
|
|
8970
|
+
const schemaContent = await fs27.readFile(schemaPath, "utf-8");
|
|
8971
8971
|
return schemaContent.trim();
|
|
8972
8972
|
} catch {
|
|
8973
8973
|
}
|
|
8974
8974
|
}
|
|
8975
|
-
const distPath =
|
|
8976
|
-
const distAltPath =
|
|
8977
|
-
const cwdPath =
|
|
8975
|
+
const distPath = path31.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
8976
|
+
const distAltPath = path31.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
8977
|
+
const cwdPath = path31.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
8978
8978
|
throw new Error(
|
|
8979
8979
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
8980
8980
|
);
|
|
@@ -9219,7 +9219,7 @@ ${"=".repeat(60)}
|
|
|
9219
9219
|
* Generate mock response for testing
|
|
9220
9220
|
*/
|
|
9221
9221
|
async generateMockResponse(_prompt, _checkName, _schema) {
|
|
9222
|
-
await new Promise((
|
|
9222
|
+
await new Promise((resolve18) => setTimeout(resolve18, 500));
|
|
9223
9223
|
const name = (_checkName || "").toLowerCase();
|
|
9224
9224
|
if (name.includes("extract-facts")) {
|
|
9225
9225
|
const arr = Array.from({ length: 6 }, (_, i) => ({
|
|
@@ -9580,7 +9580,7 @@ var init_command_executor = __esm({
|
|
|
9580
9580
|
* Execute command with stdin input
|
|
9581
9581
|
*/
|
|
9582
9582
|
executeWithStdin(command, options) {
|
|
9583
|
-
return new Promise((
|
|
9583
|
+
return new Promise((resolve18, reject) => {
|
|
9584
9584
|
const childProcess = (0, import_child_process2.exec)(
|
|
9585
9585
|
command,
|
|
9586
9586
|
{
|
|
@@ -9592,7 +9592,7 @@ var init_command_executor = __esm({
|
|
|
9592
9592
|
if (error && error.killed && (error.code === "ETIMEDOUT" || error.signal === "SIGTERM")) {
|
|
9593
9593
|
reject(new Error(`Command timed out after ${options.timeout || 3e4}ms`));
|
|
9594
9594
|
} else {
|
|
9595
|
-
|
|
9595
|
+
resolve18({
|
|
9596
9596
|
stdout: stdout || "",
|
|
9597
9597
|
stderr: stderr || "",
|
|
9598
9598
|
exitCode: error ? error.code || 1 : 0
|
|
@@ -17422,17 +17422,17 @@ var init_workflow_check_provider = __esm({
|
|
|
17422
17422
|
* so it can be executed by the state machine as a nested workflow.
|
|
17423
17423
|
*/
|
|
17424
17424
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
17425
|
-
const
|
|
17426
|
-
const
|
|
17425
|
+
const path31 = require("path");
|
|
17426
|
+
const fs27 = require("fs");
|
|
17427
17427
|
const yaml5 = require("js-yaml");
|
|
17428
|
-
const resolved =
|
|
17429
|
-
if (!
|
|
17428
|
+
const resolved = path31.isAbsolute(sourcePath) ? sourcePath : path31.resolve(baseDir, sourcePath);
|
|
17429
|
+
if (!fs27.existsSync(resolved)) {
|
|
17430
17430
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
17431
17431
|
}
|
|
17432
|
-
const rawContent =
|
|
17432
|
+
const rawContent = fs27.readFileSync(resolved, "utf8");
|
|
17433
17433
|
const rawData = yaml5.load(rawContent);
|
|
17434
17434
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
17435
|
-
const configDir =
|
|
17435
|
+
const configDir = path31.dirname(resolved);
|
|
17436
17436
|
for (const source of rawData.imports) {
|
|
17437
17437
|
const results = await this.registry.import(source, {
|
|
17438
17438
|
basePath: configDir,
|
|
@@ -17462,8 +17462,8 @@ ${errors}`);
|
|
|
17462
17462
|
if (!steps || Object.keys(steps).length === 0) {
|
|
17463
17463
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
17464
17464
|
}
|
|
17465
|
-
const id =
|
|
17466
|
-
const name = loaded.name || `Workflow from ${
|
|
17465
|
+
const id = path31.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
17466
|
+
const name = loaded.name || `Workflow from ${path31.basename(resolved)}`;
|
|
17467
17467
|
const workflowDef = {
|
|
17468
17468
|
id,
|
|
17469
17469
|
name,
|
|
@@ -18457,6 +18457,986 @@ var init_schedule_parser = __esm({
|
|
|
18457
18457
|
}
|
|
18458
18458
|
});
|
|
18459
18459
|
|
|
18460
|
+
// src/github-auth.ts
|
|
18461
|
+
var github_auth_exports = {};
|
|
18462
|
+
__export(github_auth_exports, {
|
|
18463
|
+
_testGetCachedToken: () => _testGetCachedToken,
|
|
18464
|
+
_testSetCachedToken: () => _testSetCachedToken,
|
|
18465
|
+
createAuthenticatedOctokit: () => createAuthenticatedOctokit,
|
|
18466
|
+
injectGitHubCredentials: () => injectGitHubCredentials,
|
|
18467
|
+
markTokenFresh: () => markTokenFresh,
|
|
18468
|
+
refreshGitHubCredentials: () => refreshGitHubCredentials,
|
|
18469
|
+
resolveAuthFromEnvironment: () => resolveAuthFromEnvironment,
|
|
18470
|
+
resolvePrivateKey: () => resolvePrivateKey,
|
|
18471
|
+
startTokenRefreshTimer: () => startTokenRefreshTimer,
|
|
18472
|
+
stopTokenRefreshTimer: () => stopTokenRefreshTimer
|
|
18473
|
+
});
|
|
18474
|
+
async function createAuthenticatedOctokit(options) {
|
|
18475
|
+
const { token, appId, installationId, owner, repo } = options;
|
|
18476
|
+
const privateKey = options.privateKey ? resolvePrivateKey(options.privateKey) : void 0;
|
|
18477
|
+
if (appId && privateKey) {
|
|
18478
|
+
const { createAppAuth } = await import("@octokit/auth-app");
|
|
18479
|
+
let finalInstallationId;
|
|
18480
|
+
if (installationId) {
|
|
18481
|
+
finalInstallationId = parseInt(installationId, 10);
|
|
18482
|
+
if (isNaN(finalInstallationId) || finalInstallationId <= 0) {
|
|
18483
|
+
throw new Error("Invalid installation-id. It must be a positive integer.");
|
|
18484
|
+
}
|
|
18485
|
+
}
|
|
18486
|
+
if (!finalInstallationId && owner && repo) {
|
|
18487
|
+
const appOctokit = new import_rest.Octokit({
|
|
18488
|
+
authStrategy: createAppAuth,
|
|
18489
|
+
auth: { appId, privateKey }
|
|
18490
|
+
});
|
|
18491
|
+
try {
|
|
18492
|
+
const { data: installation } = await appOctokit.rest.apps.getRepoInstallation({
|
|
18493
|
+
owner,
|
|
18494
|
+
repo
|
|
18495
|
+
});
|
|
18496
|
+
finalInstallationId = installation.id;
|
|
18497
|
+
} catch {
|
|
18498
|
+
throw new Error(
|
|
18499
|
+
"GitHub App installation ID could not be auto-detected. Provide --github-installation-id or ensure the app is installed on the repository."
|
|
18500
|
+
);
|
|
18501
|
+
}
|
|
18502
|
+
}
|
|
18503
|
+
if (!finalInstallationId) {
|
|
18504
|
+
throw new Error(
|
|
18505
|
+
"GitHub App installation ID is required. Provide --github-installation-id or set owner/repo for auto-detection."
|
|
18506
|
+
);
|
|
18507
|
+
}
|
|
18508
|
+
const octokit = new import_rest.Octokit({
|
|
18509
|
+
authStrategy: createAppAuth,
|
|
18510
|
+
auth: {
|
|
18511
|
+
appId,
|
|
18512
|
+
privateKey,
|
|
18513
|
+
installationId: finalInstallationId
|
|
18514
|
+
}
|
|
18515
|
+
});
|
|
18516
|
+
const authResult = await octokit.auth({ type: "installation" });
|
|
18517
|
+
return {
|
|
18518
|
+
octokit,
|
|
18519
|
+
authType: "github-app",
|
|
18520
|
+
token: authResult.token
|
|
18521
|
+
};
|
|
18522
|
+
}
|
|
18523
|
+
if (token) {
|
|
18524
|
+
return {
|
|
18525
|
+
octokit: new import_rest.Octokit({ auth: token }),
|
|
18526
|
+
authType: "token",
|
|
18527
|
+
token
|
|
18528
|
+
};
|
|
18529
|
+
}
|
|
18530
|
+
return void 0;
|
|
18531
|
+
}
|
|
18532
|
+
function resolveAuthFromEnvironment() {
|
|
18533
|
+
return {
|
|
18534
|
+
token: process.env.GITHUB_TOKEN || process.env.GH_TOKEN,
|
|
18535
|
+
appId: process.env.GITHUB_APP_ID,
|
|
18536
|
+
privateKey: process.env.GITHUB_APP_PRIVATE_KEY,
|
|
18537
|
+
installationId: process.env.GITHUB_APP_INSTALLATION_ID,
|
|
18538
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER || process.env.GITHUB_REPOSITORY?.split("/")[0],
|
|
18539
|
+
repo: process.env.GITHUB_REPOSITORY?.split("/")[1]
|
|
18540
|
+
};
|
|
18541
|
+
}
|
|
18542
|
+
function resolvePrivateKey(keyOrPath) {
|
|
18543
|
+
if (keyOrPath.includes("-----BEGIN")) {
|
|
18544
|
+
return keyOrPath;
|
|
18545
|
+
}
|
|
18546
|
+
const resolved = path16.resolve(keyOrPath);
|
|
18547
|
+
if (fs13.existsSync(resolved)) {
|
|
18548
|
+
return fs13.readFileSync(resolved, "utf8");
|
|
18549
|
+
}
|
|
18550
|
+
return keyOrPath;
|
|
18551
|
+
}
|
|
18552
|
+
function injectGitHubCredentials(token) {
|
|
18553
|
+
process.env.GITHUB_TOKEN = token;
|
|
18554
|
+
process.env.GH_TOKEN = token;
|
|
18555
|
+
const currentCount = parseInt(process.env.GIT_CONFIG_COUNT || "0", 10);
|
|
18556
|
+
let base;
|
|
18557
|
+
if (_authBase === void 0) {
|
|
18558
|
+
base = currentCount;
|
|
18559
|
+
} else if (_lastWrittenCount !== void 0 && currentCount !== _lastWrittenCount) {
|
|
18560
|
+
base = currentCount;
|
|
18561
|
+
} else {
|
|
18562
|
+
base = _authBase;
|
|
18563
|
+
}
|
|
18564
|
+
_authBase = base;
|
|
18565
|
+
const authUrl = `https://x-access-token:${token}@github.com/`;
|
|
18566
|
+
process.env[`GIT_CONFIG_KEY_${base}`] = `url.${authUrl}.insteadOf`;
|
|
18567
|
+
process.env[`GIT_CONFIG_VALUE_${base}`] = "https://github.com/";
|
|
18568
|
+
process.env[`GIT_CONFIG_KEY_${base + 1}`] = `url.${authUrl}.insteadOf`;
|
|
18569
|
+
process.env[`GIT_CONFIG_VALUE_${base + 1}`] = "git@github.com:";
|
|
18570
|
+
const newCount = base + 2;
|
|
18571
|
+
process.env.GIT_CONFIG_COUNT = String(newCount);
|
|
18572
|
+
_lastWrittenCount = newCount;
|
|
18573
|
+
}
|
|
18574
|
+
function markTokenFresh() {
|
|
18575
|
+
const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN;
|
|
18576
|
+
if (token) {
|
|
18577
|
+
_cachedAppToken = { token, generatedAt: Date.now() };
|
|
18578
|
+
}
|
|
18579
|
+
}
|
|
18580
|
+
async function refreshGitHubCredentials() {
|
|
18581
|
+
const appId = process.env.GITHUB_APP_ID;
|
|
18582
|
+
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY;
|
|
18583
|
+
if (!appId || !privateKey) return;
|
|
18584
|
+
const now = Date.now();
|
|
18585
|
+
if (_cachedAppToken && now - _cachedAppToken.generatedAt < TOKEN_REFRESH_MS) {
|
|
18586
|
+
return;
|
|
18587
|
+
}
|
|
18588
|
+
try {
|
|
18589
|
+
const opts = resolveAuthFromEnvironment();
|
|
18590
|
+
const result = await createAuthenticatedOctokit(opts);
|
|
18591
|
+
if (result && result.authType === "github-app") {
|
|
18592
|
+
injectGitHubCredentials(result.token);
|
|
18593
|
+
_cachedAppToken = { token: result.token, generatedAt: now };
|
|
18594
|
+
logger.debug("[github-auth] Refreshed GitHub App installation token");
|
|
18595
|
+
}
|
|
18596
|
+
} catch (err) {
|
|
18597
|
+
const age = _cachedAppToken ? `${Math.round((now - _cachedAppToken.generatedAt) / 6e4)}min old` : "no cached token";
|
|
18598
|
+
logger.warn(
|
|
18599
|
+
`[github-auth] Failed to refresh GitHub App token (${age}): ${err instanceof Error ? err.message : String(err)}. Child processes may fail with authentication errors.`
|
|
18600
|
+
);
|
|
18601
|
+
}
|
|
18602
|
+
}
|
|
18603
|
+
function startTokenRefreshTimer() {
|
|
18604
|
+
if (_refreshTimer) return;
|
|
18605
|
+
const appId = process.env.GITHUB_APP_ID;
|
|
18606
|
+
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY;
|
|
18607
|
+
if (!appId || !privateKey) return;
|
|
18608
|
+
_refreshTimer = setInterval(() => {
|
|
18609
|
+
refreshGitHubCredentials().catch((err) => {
|
|
18610
|
+
logger.warn(
|
|
18611
|
+
`[github-auth] Background token refresh failed: ${err instanceof Error ? err.message : String(err)}`
|
|
18612
|
+
);
|
|
18613
|
+
});
|
|
18614
|
+
}, TIMER_INTERVAL_MS);
|
|
18615
|
+
_refreshTimer.unref();
|
|
18616
|
+
logger.debug("[github-auth] Background token refresh timer started (every 30 min)");
|
|
18617
|
+
}
|
|
18618
|
+
function stopTokenRefreshTimer() {
|
|
18619
|
+
if (_refreshTimer) {
|
|
18620
|
+
clearInterval(_refreshTimer);
|
|
18621
|
+
_refreshTimer = void 0;
|
|
18622
|
+
logger.debug("[github-auth] Background token refresh timer stopped");
|
|
18623
|
+
}
|
|
18624
|
+
}
|
|
18625
|
+
function _testSetCachedToken(token, generatedAt) {
|
|
18626
|
+
if (token) {
|
|
18627
|
+
_cachedAppToken = { token, generatedAt: generatedAt ?? Date.now() };
|
|
18628
|
+
} else {
|
|
18629
|
+
_cachedAppToken = void 0;
|
|
18630
|
+
}
|
|
18631
|
+
}
|
|
18632
|
+
function _testGetCachedToken() {
|
|
18633
|
+
return _cachedAppToken;
|
|
18634
|
+
}
|
|
18635
|
+
var import_rest, fs13, path16, _authBase, _lastWrittenCount, _cachedAppToken, TOKEN_REFRESH_MS, _refreshTimer, TIMER_INTERVAL_MS;
|
|
18636
|
+
var init_github_auth = __esm({
|
|
18637
|
+
"src/github-auth.ts"() {
|
|
18638
|
+
"use strict";
|
|
18639
|
+
import_rest = require("@octokit/rest");
|
|
18640
|
+
fs13 = __toESM(require("fs"));
|
|
18641
|
+
path16 = __toESM(require("path"));
|
|
18642
|
+
init_logger();
|
|
18643
|
+
TOKEN_REFRESH_MS = 45 * 60 * 1e3;
|
|
18644
|
+
TIMER_INTERVAL_MS = 30 * 60 * 1e3;
|
|
18645
|
+
}
|
|
18646
|
+
});
|
|
18647
|
+
|
|
18648
|
+
// src/scheduler/scheduler.ts
|
|
18649
|
+
function getScheduler(visorConfig, config) {
|
|
18650
|
+
if (!schedulerInstance && visorConfig) {
|
|
18651
|
+
schedulerInstance = new Scheduler(visorConfig, config);
|
|
18652
|
+
}
|
|
18653
|
+
return schedulerInstance;
|
|
18654
|
+
}
|
|
18655
|
+
var import_node_cron, Scheduler, schedulerInstance;
|
|
18656
|
+
var init_scheduler = __esm({
|
|
18657
|
+
"src/scheduler/scheduler.ts"() {
|
|
18658
|
+
"use strict";
|
|
18659
|
+
import_node_cron = __toESM(require("node-cron"));
|
|
18660
|
+
init_schedule_store();
|
|
18661
|
+
init_schedule_parser();
|
|
18662
|
+
init_logger();
|
|
18663
|
+
init_state_machine_execution_engine();
|
|
18664
|
+
Scheduler = class {
|
|
18665
|
+
store;
|
|
18666
|
+
visorConfig;
|
|
18667
|
+
checkIntervalMs;
|
|
18668
|
+
defaultTimezone;
|
|
18669
|
+
checkInterval = null;
|
|
18670
|
+
cronJobs = /* @__PURE__ */ new Map();
|
|
18671
|
+
oneTimeTimeouts = /* @__PURE__ */ new Map();
|
|
18672
|
+
running = false;
|
|
18673
|
+
engine;
|
|
18674
|
+
outputAdapters = /* @__PURE__ */ new Map();
|
|
18675
|
+
executionContext = {};
|
|
18676
|
+
contextEnricher;
|
|
18677
|
+
// HA fields
|
|
18678
|
+
haConfig;
|
|
18679
|
+
nodeId;
|
|
18680
|
+
heartbeatInterval = null;
|
|
18681
|
+
heldLocks = /* @__PURE__ */ new Map();
|
|
18682
|
+
// scheduleId → lockToken
|
|
18683
|
+
constructor(visorConfig, config) {
|
|
18684
|
+
this.visorConfig = visorConfig;
|
|
18685
|
+
this.checkIntervalMs = config?.checkIntervalMs ?? 6e4;
|
|
18686
|
+
this.defaultTimezone = config?.defaultTimezone ?? "UTC";
|
|
18687
|
+
this.haConfig = config?.ha;
|
|
18688
|
+
this.nodeId = config?.ha?.node_id || `${require("os").hostname()}-${process.pid}`;
|
|
18689
|
+
const storeConfig = {
|
|
18690
|
+
path: config?.storagePath,
|
|
18691
|
+
storage: config?.storage,
|
|
18692
|
+
ha: config?.ha
|
|
18693
|
+
};
|
|
18694
|
+
this.store = ScheduleStore.getInstance(storeConfig, config?.limits);
|
|
18695
|
+
}
|
|
18696
|
+
/**
|
|
18697
|
+
* Set the execution engine (called after construction to avoid circular deps)
|
|
18698
|
+
*/
|
|
18699
|
+
setEngine(engine) {
|
|
18700
|
+
this.engine = engine;
|
|
18701
|
+
}
|
|
18702
|
+
/**
|
|
18703
|
+
* Set the execution context (e.g., Slack client) for workflow executions
|
|
18704
|
+
*/
|
|
18705
|
+
setExecutionContext(context2) {
|
|
18706
|
+
this.executionContext = { ...this.executionContext, ...context2 };
|
|
18707
|
+
}
|
|
18708
|
+
/**
|
|
18709
|
+
* Register an output adapter for a specific type
|
|
18710
|
+
*/
|
|
18711
|
+
registerOutputAdapter(adapter) {
|
|
18712
|
+
this.outputAdapters.set(adapter.type, adapter);
|
|
18713
|
+
logger.debug(`[Scheduler] Registered output adapter: ${adapter.type}`);
|
|
18714
|
+
}
|
|
18715
|
+
/**
|
|
18716
|
+
* Register a context enricher for frontend-specific functionality
|
|
18717
|
+
* This allows frontends to inject thread history, prompt state, etc.
|
|
18718
|
+
*/
|
|
18719
|
+
registerContextEnricher(enricher) {
|
|
18720
|
+
this.contextEnricher = enricher;
|
|
18721
|
+
logger.debug("[Scheduler] Registered context enricher");
|
|
18722
|
+
}
|
|
18723
|
+
/**
|
|
18724
|
+
* Get the schedule store instance
|
|
18725
|
+
*/
|
|
18726
|
+
getStore() {
|
|
18727
|
+
return this.store;
|
|
18728
|
+
}
|
|
18729
|
+
/**
|
|
18730
|
+
* Cancel a schedule's in-memory job (cron or timeout).
|
|
18731
|
+
* Called after deleting from DB to ensure the job doesn't fire again.
|
|
18732
|
+
*/
|
|
18733
|
+
cancelSchedule(scheduleId) {
|
|
18734
|
+
const cronJob = this.cronJobs.get(scheduleId);
|
|
18735
|
+
if (cronJob) {
|
|
18736
|
+
cronJob.stop();
|
|
18737
|
+
this.cronJobs.delete(scheduleId);
|
|
18738
|
+
logger.debug(`[Scheduler] Cancelled cron job for schedule ${scheduleId}`);
|
|
18739
|
+
return;
|
|
18740
|
+
}
|
|
18741
|
+
const timeout = this.oneTimeTimeouts.get(scheduleId);
|
|
18742
|
+
if (timeout) {
|
|
18743
|
+
clearTimeout(timeout);
|
|
18744
|
+
this.oneTimeTimeouts.delete(scheduleId);
|
|
18745
|
+
logger.debug(`[Scheduler] Cancelled timeout for schedule ${scheduleId}`);
|
|
18746
|
+
}
|
|
18747
|
+
}
|
|
18748
|
+
/**
|
|
18749
|
+
* Start the scheduler
|
|
18750
|
+
*/
|
|
18751
|
+
async start() {
|
|
18752
|
+
if (this.running) {
|
|
18753
|
+
logger.warn("[Scheduler] Already running");
|
|
18754
|
+
return;
|
|
18755
|
+
}
|
|
18756
|
+
await this.store.initialize();
|
|
18757
|
+
try {
|
|
18758
|
+
await this.loadStaticCronJobs();
|
|
18759
|
+
} catch (err) {
|
|
18760
|
+
logger.error(
|
|
18761
|
+
`[Scheduler] Failed to load static cron jobs: ${err instanceof Error ? err.message : err}`
|
|
18762
|
+
);
|
|
18763
|
+
}
|
|
18764
|
+
try {
|
|
18765
|
+
await this.restoreSchedules();
|
|
18766
|
+
} catch (err) {
|
|
18767
|
+
logger.error(
|
|
18768
|
+
`[Scheduler] Failed to restore schedules: ${err instanceof Error ? err.message : err}`
|
|
18769
|
+
);
|
|
18770
|
+
}
|
|
18771
|
+
this.checkInterval = setInterval(() => {
|
|
18772
|
+
this.checkDueSchedules().catch((error) => {
|
|
18773
|
+
logger.error(
|
|
18774
|
+
`[Scheduler] Error checking due schedules: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18775
|
+
);
|
|
18776
|
+
});
|
|
18777
|
+
}, this.checkIntervalMs);
|
|
18778
|
+
if (this.haConfig?.enabled) {
|
|
18779
|
+
this.startHeartbeat();
|
|
18780
|
+
}
|
|
18781
|
+
this.running = true;
|
|
18782
|
+
logger.info("[Scheduler] Started");
|
|
18783
|
+
}
|
|
18784
|
+
/**
|
|
18785
|
+
* Load and schedule static cron jobs from visor config
|
|
18786
|
+
* These are defined in scheduler.cron section and always run regardless of permissions
|
|
18787
|
+
*/
|
|
18788
|
+
async loadStaticCronJobs() {
|
|
18789
|
+
const schedulerCfg = this.visorConfig.scheduler;
|
|
18790
|
+
if (!schedulerCfg?.cron) {
|
|
18791
|
+
return;
|
|
18792
|
+
}
|
|
18793
|
+
const cronJobs = schedulerCfg.cron;
|
|
18794
|
+
let loadedCount = 0;
|
|
18795
|
+
for (const [jobId, job] of Object.entries(cronJobs)) {
|
|
18796
|
+
if (job.enabled === false) {
|
|
18797
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' is disabled, skipping`);
|
|
18798
|
+
continue;
|
|
18799
|
+
}
|
|
18800
|
+
try {
|
|
18801
|
+
await this.scheduleStaticCronJob(jobId, job);
|
|
18802
|
+
loadedCount++;
|
|
18803
|
+
} catch (error) {
|
|
18804
|
+
logger.error(
|
|
18805
|
+
`[Scheduler] Failed to load static cron job '${jobId}': ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18806
|
+
);
|
|
18807
|
+
}
|
|
18808
|
+
}
|
|
18809
|
+
if (loadedCount > 0) {
|
|
18810
|
+
logger.info(`[Scheduler] Loaded ${loadedCount} static cron job(s) from config`);
|
|
18811
|
+
}
|
|
18812
|
+
}
|
|
18813
|
+
/**
|
|
18814
|
+
* Schedule a static cron job from config
|
|
18815
|
+
*/
|
|
18816
|
+
async scheduleStaticCronJob(jobId, job) {
|
|
18817
|
+
if (!import_node_cron.default.validate(job.schedule)) {
|
|
18818
|
+
throw new Error(`Invalid cron expression: ${job.schedule}`);
|
|
18819
|
+
}
|
|
18820
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
18821
|
+
if (!allChecks.includes(job.workflow)) {
|
|
18822
|
+
throw new Error(`Workflow "${job.workflow}" not found in configuration`);
|
|
18823
|
+
}
|
|
18824
|
+
const internalId = `__static_cron__:${jobId}`;
|
|
18825
|
+
const cronJob = import_node_cron.default.schedule(
|
|
18826
|
+
job.schedule,
|
|
18827
|
+
async () => {
|
|
18828
|
+
logger.info(`[Scheduler] Executing static cron job '${jobId}': workflow="${job.workflow}"`);
|
|
18829
|
+
await this.executeStaticCronJob(jobId, job);
|
|
18830
|
+
},
|
|
18831
|
+
{
|
|
18832
|
+
scheduled: true,
|
|
18833
|
+
timezone: job.timezone || this.defaultTimezone
|
|
18834
|
+
}
|
|
18835
|
+
);
|
|
18836
|
+
this.cronJobs.set(internalId, cronJob);
|
|
18837
|
+
try {
|
|
18838
|
+
const nextRun = getNextRunTime(job.schedule, job.timezone || this.defaultTimezone);
|
|
18839
|
+
const description = job.description ? ` (${job.description})` : "";
|
|
18840
|
+
logger.debug(
|
|
18841
|
+
`[Scheduler] Scheduled static cron job '${jobId}'${description}: ${job.schedule} \u2192 ${job.workflow}, next run: ${nextRun.toISOString()}`
|
|
18842
|
+
);
|
|
18843
|
+
} catch {
|
|
18844
|
+
}
|
|
18845
|
+
}
|
|
18846
|
+
/**
|
|
18847
|
+
* Execute a static cron job
|
|
18848
|
+
*/
|
|
18849
|
+
async executeStaticCronJob(jobId, job) {
|
|
18850
|
+
if (this.haConfig?.enabled) {
|
|
18851
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
18852
|
+
const backend = this.store.getBackend();
|
|
18853
|
+
const lockId = `__static_cron__:${jobId}`;
|
|
18854
|
+
const lockToken = await backend.tryAcquireLock(lockId, this.nodeId, ttl);
|
|
18855
|
+
if (!lockToken) {
|
|
18856
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' locked by another node, skipping`);
|
|
18857
|
+
return;
|
|
18858
|
+
}
|
|
18859
|
+
this.heldLocks.set(lockId, lockToken);
|
|
18860
|
+
try {
|
|
18861
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
18862
|
+
} finally {
|
|
18863
|
+
await backend.releaseLock(lockId, lockToken);
|
|
18864
|
+
this.heldLocks.delete(lockId);
|
|
18865
|
+
}
|
|
18866
|
+
} else {
|
|
18867
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
18868
|
+
}
|
|
18869
|
+
}
|
|
18870
|
+
/**
|
|
18871
|
+
* Internal: execute a static cron job (after lock is held in HA mode)
|
|
18872
|
+
*/
|
|
18873
|
+
async doExecuteStaticCronJob(jobId, job) {
|
|
18874
|
+
const startTime = Date.now();
|
|
18875
|
+
let result;
|
|
18876
|
+
const syntheticSchedule = {
|
|
18877
|
+
id: `__static_cron__:${jobId}`,
|
|
18878
|
+
creatorId: "system",
|
|
18879
|
+
creatorContext: "config",
|
|
18880
|
+
timezone: job.timezone || this.defaultTimezone,
|
|
18881
|
+
schedule: job.schedule,
|
|
18882
|
+
isRecurring: true,
|
|
18883
|
+
originalExpression: job.schedule,
|
|
18884
|
+
workflow: job.workflow,
|
|
18885
|
+
workflowInputs: job.inputs,
|
|
18886
|
+
outputContext: job.output ? {
|
|
18887
|
+
type: job.output.type,
|
|
18888
|
+
target: job.output.target,
|
|
18889
|
+
threadId: job.output.thread_id
|
|
18890
|
+
} : void 0,
|
|
18891
|
+
status: "active",
|
|
18892
|
+
createdAt: 0,
|
|
18893
|
+
runCount: 0,
|
|
18894
|
+
failureCount: 0
|
|
18895
|
+
};
|
|
18896
|
+
try {
|
|
18897
|
+
const output = await this.executeWorkflow(syntheticSchedule);
|
|
18898
|
+
result = {
|
|
18899
|
+
success: true,
|
|
18900
|
+
output,
|
|
18901
|
+
executionTimeMs: Date.now() - startTime
|
|
18902
|
+
};
|
|
18903
|
+
logger.info(
|
|
18904
|
+
`[Scheduler] Static cron job '${jobId}' completed in ${result.executionTimeMs}ms`
|
|
18905
|
+
);
|
|
18906
|
+
} catch (error) {
|
|
18907
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
18908
|
+
result = {
|
|
18909
|
+
success: false,
|
|
18910
|
+
error: errorMsg,
|
|
18911
|
+
executionTimeMs: Date.now() - startTime
|
|
18912
|
+
};
|
|
18913
|
+
logger.error(`[Scheduler] Static cron job '${jobId}' failed: ${errorMsg}`);
|
|
18914
|
+
}
|
|
18915
|
+
await this.sendResult(syntheticSchedule, result);
|
|
18916
|
+
}
|
|
18917
|
+
/**
|
|
18918
|
+
* Stop the scheduler
|
|
18919
|
+
*/
|
|
18920
|
+
async stop() {
|
|
18921
|
+
if (!this.running) {
|
|
18922
|
+
return;
|
|
18923
|
+
}
|
|
18924
|
+
this.stopHeartbeat();
|
|
18925
|
+
if (this.heldLocks.size > 0) {
|
|
18926
|
+
const backend = this.store.getBackend();
|
|
18927
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
18928
|
+
await backend.releaseLock(scheduleId, lockToken).catch(() => {
|
|
18929
|
+
});
|
|
18930
|
+
}
|
|
18931
|
+
this.heldLocks.clear();
|
|
18932
|
+
}
|
|
18933
|
+
if (this.checkInterval) {
|
|
18934
|
+
clearInterval(this.checkInterval);
|
|
18935
|
+
this.checkInterval = null;
|
|
18936
|
+
}
|
|
18937
|
+
for (const [id, job] of this.cronJobs.entries()) {
|
|
18938
|
+
job.stop();
|
|
18939
|
+
logger.debug(`[Scheduler] Stopped cron job for schedule ${id}`);
|
|
18940
|
+
}
|
|
18941
|
+
this.cronJobs.clear();
|
|
18942
|
+
for (const [id, timeout] of this.oneTimeTimeouts.entries()) {
|
|
18943
|
+
clearTimeout(timeout);
|
|
18944
|
+
logger.debug(`[Scheduler] Cleared timeout for schedule ${id}`);
|
|
18945
|
+
}
|
|
18946
|
+
this.oneTimeTimeouts.clear();
|
|
18947
|
+
await this.store.flush();
|
|
18948
|
+
this.running = false;
|
|
18949
|
+
logger.info("[Scheduler] Stopped");
|
|
18950
|
+
}
|
|
18951
|
+
/**
|
|
18952
|
+
* Start the heartbeat timer for renewing HA locks
|
|
18953
|
+
*/
|
|
18954
|
+
startHeartbeat() {
|
|
18955
|
+
if (this.heartbeatInterval) return;
|
|
18956
|
+
const intervalMs = (this.haConfig?.heartbeat_interval ?? 15) * 1e3;
|
|
18957
|
+
this.heartbeatInterval = setInterval(async () => {
|
|
18958
|
+
const backend = this.store.getBackend();
|
|
18959
|
+
const ttl = this.haConfig?.lock_ttl ?? 60;
|
|
18960
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
18961
|
+
const renewed = await backend.renewLock(scheduleId, lockToken, ttl);
|
|
18962
|
+
if (!renewed) {
|
|
18963
|
+
logger.warn(`[Scheduler] Failed to renew lock for schedule ${scheduleId}, lock lost`);
|
|
18964
|
+
this.heldLocks.delete(scheduleId);
|
|
18965
|
+
}
|
|
18966
|
+
}
|
|
18967
|
+
}, intervalMs);
|
|
18968
|
+
logger.debug(`[Scheduler] Heartbeat started (interval: ${intervalMs}ms)`);
|
|
18969
|
+
}
|
|
18970
|
+
/**
|
|
18971
|
+
* Stop the heartbeat timer
|
|
18972
|
+
*/
|
|
18973
|
+
stopHeartbeat() {
|
|
18974
|
+
if (this.heartbeatInterval) {
|
|
18975
|
+
clearInterval(this.heartbeatInterval);
|
|
18976
|
+
this.heartbeatInterval = null;
|
|
18977
|
+
}
|
|
18978
|
+
}
|
|
18979
|
+
/**
|
|
18980
|
+
* Restore schedules from persistent storage
|
|
18981
|
+
*/
|
|
18982
|
+
async restoreSchedules() {
|
|
18983
|
+
const activeSchedules = await this.store.getActiveSchedulesAsync();
|
|
18984
|
+
logger.info(`[Scheduler] Restoring ${activeSchedules.length} active schedules`);
|
|
18985
|
+
for (const schedule of activeSchedules) {
|
|
18986
|
+
try {
|
|
18987
|
+
await this.scheduleExecution(schedule);
|
|
18988
|
+
} catch (error) {
|
|
18989
|
+
logger.error(
|
|
18990
|
+
`[Scheduler] Failed to restore schedule ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
18991
|
+
);
|
|
18992
|
+
}
|
|
18993
|
+
}
|
|
18994
|
+
}
|
|
18995
|
+
/**
|
|
18996
|
+
* Schedule a workflow for execution
|
|
18997
|
+
*/
|
|
18998
|
+
async scheduleExecution(schedule) {
|
|
18999
|
+
if (schedule.isRecurring) {
|
|
19000
|
+
await this.scheduleRecurring(schedule);
|
|
19001
|
+
} else {
|
|
19002
|
+
await this.scheduleOneTime(schedule);
|
|
19003
|
+
}
|
|
19004
|
+
}
|
|
19005
|
+
/**
|
|
19006
|
+
* Schedule a recurring workflow using cron
|
|
19007
|
+
*/
|
|
19008
|
+
async scheduleRecurring(schedule) {
|
|
19009
|
+
if (!import_node_cron.default.validate(schedule.schedule)) {
|
|
19010
|
+
logger.error(
|
|
19011
|
+
`[Scheduler] Invalid cron expression for schedule ${schedule.id}: ${schedule.schedule}`
|
|
19012
|
+
);
|
|
19013
|
+
const existingJob2 = this.cronJobs.get(schedule.id);
|
|
19014
|
+
if (existingJob2) {
|
|
19015
|
+
existingJob2.stop();
|
|
19016
|
+
this.cronJobs.delete(schedule.id);
|
|
19017
|
+
}
|
|
19018
|
+
await this.store.updateAsync(schedule.id, {
|
|
19019
|
+
status: "failed",
|
|
19020
|
+
lastError: "Invalid cron expression"
|
|
19021
|
+
});
|
|
19022
|
+
return;
|
|
19023
|
+
}
|
|
19024
|
+
const existingJob = this.cronJobs.get(schedule.id);
|
|
19025
|
+
if (existingJob) {
|
|
19026
|
+
existingJob.stop();
|
|
19027
|
+
}
|
|
19028
|
+
const job = import_node_cron.default.schedule(
|
|
19029
|
+
schedule.schedule,
|
|
19030
|
+
async () => {
|
|
19031
|
+
await this.executeSchedule(schedule);
|
|
19032
|
+
},
|
|
19033
|
+
{
|
|
19034
|
+
scheduled: true,
|
|
19035
|
+
timezone: schedule.timezone || this.defaultTimezone
|
|
19036
|
+
}
|
|
19037
|
+
);
|
|
19038
|
+
this.cronJobs.set(schedule.id, job);
|
|
19039
|
+
try {
|
|
19040
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
19041
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
19042
|
+
} catch (error) {
|
|
19043
|
+
logger.warn(
|
|
19044
|
+
`[Scheduler] Could not compute next run time for ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
19045
|
+
);
|
|
19046
|
+
}
|
|
19047
|
+
logger.debug(`[Scheduler] Scheduled recurring execution ${schedule.id}: ${schedule.schedule}`);
|
|
19048
|
+
}
|
|
19049
|
+
/**
|
|
19050
|
+
* Schedule a one-time workflow using setTimeout
|
|
19051
|
+
*/
|
|
19052
|
+
async scheduleOneTime(schedule) {
|
|
19053
|
+
const existingTimeout = this.oneTimeTimeouts.get(schedule.id);
|
|
19054
|
+
if (existingTimeout) {
|
|
19055
|
+
clearTimeout(existingTimeout);
|
|
19056
|
+
}
|
|
19057
|
+
if (!schedule.runAt) {
|
|
19058
|
+
logger.error(`[Scheduler] One-time schedule ${schedule.id} has no runAt time`);
|
|
19059
|
+
return;
|
|
19060
|
+
}
|
|
19061
|
+
const delayMs = schedule.runAt - Date.now();
|
|
19062
|
+
if (delayMs <= 0) {
|
|
19063
|
+
await this.executeSchedule(schedule);
|
|
19064
|
+
return;
|
|
19065
|
+
}
|
|
19066
|
+
const timeout = setTimeout(async () => {
|
|
19067
|
+
this.oneTimeTimeouts.delete(schedule.id);
|
|
19068
|
+
await this.executeSchedule(schedule);
|
|
19069
|
+
}, delayMs);
|
|
19070
|
+
this.oneTimeTimeouts.set(schedule.id, timeout);
|
|
19071
|
+
logger.debug(
|
|
19072
|
+
`[Scheduler] Scheduled one-time execution ${schedule.id} for ${new Date(schedule.runAt).toISOString()}`
|
|
19073
|
+
);
|
|
19074
|
+
}
|
|
19075
|
+
/**
|
|
19076
|
+
* Check for and execute due schedules
|
|
19077
|
+
*/
|
|
19078
|
+
async checkDueSchedules() {
|
|
19079
|
+
const dueSchedules = await this.store.getDueSchedulesAsync();
|
|
19080
|
+
for (const schedule of dueSchedules) {
|
|
19081
|
+
if (this.cronJobs.has(schedule.id) || this.oneTimeTimeouts.has(schedule.id)) {
|
|
19082
|
+
continue;
|
|
19083
|
+
}
|
|
19084
|
+
if (this.haConfig?.enabled) {
|
|
19085
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
19086
|
+
const backend = this.store.getBackend();
|
|
19087
|
+
const lockToken = await backend.tryAcquireLock(schedule.id, this.nodeId, ttl);
|
|
19088
|
+
if (!lockToken) {
|
|
19089
|
+
logger.debug(`[Scheduler] Schedule ${schedule.id} locked by another node, skipping`);
|
|
19090
|
+
continue;
|
|
19091
|
+
}
|
|
19092
|
+
this.heldLocks.set(schedule.id, lockToken);
|
|
19093
|
+
try {
|
|
19094
|
+
await this.executeSchedule(schedule);
|
|
19095
|
+
} finally {
|
|
19096
|
+
await backend.releaseLock(schedule.id, lockToken);
|
|
19097
|
+
this.heldLocks.delete(schedule.id);
|
|
19098
|
+
}
|
|
19099
|
+
} else {
|
|
19100
|
+
await this.executeSchedule(schedule);
|
|
19101
|
+
}
|
|
19102
|
+
}
|
|
19103
|
+
}
|
|
19104
|
+
/**
|
|
19105
|
+
* Execute a scheduled workflow
|
|
19106
|
+
*/
|
|
19107
|
+
async executeSchedule(schedule) {
|
|
19108
|
+
try {
|
|
19109
|
+
const fresh = await this.store.getAsync(schedule.id);
|
|
19110
|
+
if (!fresh || fresh.status !== "active") {
|
|
19111
|
+
logger.info(
|
|
19112
|
+
`[Scheduler] Schedule ${schedule.id} is no longer active (${fresh ? fresh.status : "deleted"}), skipping execution`
|
|
19113
|
+
);
|
|
19114
|
+
this.cancelSchedule(schedule.id);
|
|
19115
|
+
return;
|
|
19116
|
+
}
|
|
19117
|
+
} catch {
|
|
19118
|
+
logger.warn(
|
|
19119
|
+
`[Scheduler] Could not verify schedule ${schedule.id} freshness, proceeding with execution`
|
|
19120
|
+
);
|
|
19121
|
+
}
|
|
19122
|
+
const description = schedule.workflow || "reminder";
|
|
19123
|
+
logger.info(`[Scheduler] Executing schedule ${schedule.id}: ${description}`);
|
|
19124
|
+
const startTime = Date.now();
|
|
19125
|
+
let result;
|
|
19126
|
+
try {
|
|
19127
|
+
const output = await this.executeWorkflow(schedule);
|
|
19128
|
+
result = {
|
|
19129
|
+
success: true,
|
|
19130
|
+
output,
|
|
19131
|
+
executionTimeMs: Date.now() - startTime
|
|
19132
|
+
};
|
|
19133
|
+
const now = Date.now();
|
|
19134
|
+
await this.store.updateAsync(schedule.id, {
|
|
19135
|
+
lastRunAt: now,
|
|
19136
|
+
runCount: schedule.runCount + 1,
|
|
19137
|
+
failureCount: 0,
|
|
19138
|
+
// Reset on success
|
|
19139
|
+
lastError: void 0
|
|
19140
|
+
});
|
|
19141
|
+
if (!schedule.isRecurring) {
|
|
19142
|
+
await this.store.updateAsync(schedule.id, { status: "completed" });
|
|
19143
|
+
await this.store.deleteAsync(schedule.id);
|
|
19144
|
+
logger.info(`[Scheduler] One-time schedule ${schedule.id} completed and removed`);
|
|
19145
|
+
} else {
|
|
19146
|
+
try {
|
|
19147
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
19148
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
19149
|
+
} catch (err) {
|
|
19150
|
+
logger.warn(
|
|
19151
|
+
`[Scheduler] Failed to compute next run time for ${schedule.id}, pausing schedule: ${err instanceof Error ? err.message : err}`
|
|
19152
|
+
);
|
|
19153
|
+
await this.store.updateAsync(schedule.id, {
|
|
19154
|
+
status: "paused",
|
|
19155
|
+
lastError: `Failed to compute next run time: ${err instanceof Error ? err.message : err}`
|
|
19156
|
+
});
|
|
19157
|
+
}
|
|
19158
|
+
}
|
|
19159
|
+
} catch (error) {
|
|
19160
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
19161
|
+
result = {
|
|
19162
|
+
success: false,
|
|
19163
|
+
error: errorMsg,
|
|
19164
|
+
executionTimeMs: Date.now() - startTime
|
|
19165
|
+
};
|
|
19166
|
+
await this.handleScheduleFailure(schedule, error);
|
|
19167
|
+
}
|
|
19168
|
+
await this.sendResult(schedule, result);
|
|
19169
|
+
}
|
|
19170
|
+
/**
|
|
19171
|
+
* Helper to prepare execution environment - reduces duplication between workflow and reminder execution
|
|
19172
|
+
*/
|
|
19173
|
+
prepareExecution(schedule, cliMessage) {
|
|
19174
|
+
const config = JSON.parse(JSON.stringify(this.visorConfig));
|
|
19175
|
+
const fronts = Array.isArray(config.frontends) ? config.frontends : [];
|
|
19176
|
+
const hasSlackFrontend = fronts.some((f) => f && f.name === "slack");
|
|
19177
|
+
if (!hasSlackFrontend && (cliMessage || schedule.outputContext?.type === "slack")) {
|
|
19178
|
+
fronts.push({ name: "slack" });
|
|
19179
|
+
}
|
|
19180
|
+
config.frontends = fronts;
|
|
19181
|
+
const engine = new StateMachineExecutionEngine();
|
|
19182
|
+
const responseRef = {};
|
|
19183
|
+
const responseCapture = (text) => {
|
|
19184
|
+
responseRef.captured = text;
|
|
19185
|
+
logger.debug(
|
|
19186
|
+
`[Scheduler] Captured AI response for schedule ${schedule.id} (${text.length} chars)`
|
|
19187
|
+
);
|
|
19188
|
+
};
|
|
19189
|
+
engine.setExecutionContext({
|
|
19190
|
+
...this.executionContext,
|
|
19191
|
+
cliMessage,
|
|
19192
|
+
responseCapture
|
|
19193
|
+
});
|
|
19194
|
+
return { engine, config, responseRef };
|
|
19195
|
+
}
|
|
19196
|
+
/**
|
|
19197
|
+
* Execute the workflow for a schedule
|
|
19198
|
+
*/
|
|
19199
|
+
async executeWorkflow(schedule) {
|
|
19200
|
+
if (!schedule.workflow) {
|
|
19201
|
+
return this.executeSimpleReminder(schedule);
|
|
19202
|
+
}
|
|
19203
|
+
if (!this.engine) {
|
|
19204
|
+
logger.warn("[Scheduler] No execution engine set, skipping workflow execution");
|
|
19205
|
+
return { message: "No execution engine configured" };
|
|
19206
|
+
}
|
|
19207
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
19208
|
+
if (!allChecks.includes(schedule.workflow)) {
|
|
19209
|
+
throw new Error(`Workflow "${schedule.workflow}" not found in configuration`);
|
|
19210
|
+
}
|
|
19211
|
+
const syntheticPayload = {
|
|
19212
|
+
event: {
|
|
19213
|
+
type: "schedule_triggered",
|
|
19214
|
+
schedule_id: schedule.id,
|
|
19215
|
+
workflow: schedule.workflow,
|
|
19216
|
+
creator_id: schedule.creatorId,
|
|
19217
|
+
creator_context: schedule.creatorContext,
|
|
19218
|
+
timestamp: Date.now()
|
|
19219
|
+
},
|
|
19220
|
+
schedule: {
|
|
19221
|
+
id: schedule.id,
|
|
19222
|
+
workflow: schedule.workflow,
|
|
19223
|
+
workflowInputs: schedule.workflowInputs,
|
|
19224
|
+
isRecurring: schedule.isRecurring,
|
|
19225
|
+
outputContext: schedule.outputContext
|
|
19226
|
+
}
|
|
19227
|
+
};
|
|
19228
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
19229
|
+
const endpoint = "/scheduler/trigger";
|
|
19230
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
19231
|
+
try {
|
|
19232
|
+
const { refreshGitHubCredentials: refreshGitHubCredentials2 } = await Promise.resolve().then(() => (init_github_auth(), github_auth_exports));
|
|
19233
|
+
await refreshGitHubCredentials2();
|
|
19234
|
+
} catch {
|
|
19235
|
+
}
|
|
19236
|
+
const { engine: runEngine, config: cfgForRun } = this.prepareExecution(schedule);
|
|
19237
|
+
await runEngine.executeChecks({
|
|
19238
|
+
checks: [schedule.workflow],
|
|
19239
|
+
showDetails: true,
|
|
19240
|
+
outputFormat: "json",
|
|
19241
|
+
config: cfgForRun,
|
|
19242
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
19243
|
+
debug: process.env.VISOR_DEBUG === "true",
|
|
19244
|
+
inputs: schedule.workflowInputs
|
|
19245
|
+
});
|
|
19246
|
+
return { message: "Workflow completed", workflow: schedule.workflow };
|
|
19247
|
+
}
|
|
19248
|
+
/**
|
|
19249
|
+
* Execute a simple reminder by running it through the visor pipeline
|
|
19250
|
+
* Treats the reminder text as if the user sent it as a message
|
|
19251
|
+
*/
|
|
19252
|
+
async executeSimpleReminder(schedule) {
|
|
19253
|
+
const reminderText = schedule.workflowInputs?.text;
|
|
19254
|
+
if (!reminderText) {
|
|
19255
|
+
return { message: "Reminder!", type: "simple_reminder" };
|
|
19256
|
+
}
|
|
19257
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
19258
|
+
if (allChecks.length === 0) {
|
|
19259
|
+
logger.warn("[Scheduler] No checks configured, returning reminder text directly");
|
|
19260
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
19261
|
+
}
|
|
19262
|
+
logger.info(`[Scheduler] Running reminder through visor pipeline (${allChecks.length} checks)`);
|
|
19263
|
+
const channel = schedule.outputContext?.target || "";
|
|
19264
|
+
const threadId = schedule.outputContext?.threadId;
|
|
19265
|
+
let threadMessages = [];
|
|
19266
|
+
let additionalPayload = {};
|
|
19267
|
+
if (this.contextEnricher?.enrichContext) {
|
|
19268
|
+
try {
|
|
19269
|
+
const enriched = await this.contextEnricher.enrichContext(schedule);
|
|
19270
|
+
threadMessages = enriched.threadMessages || [];
|
|
19271
|
+
additionalPayload = enriched.additionalPayload || {};
|
|
19272
|
+
if (threadMessages.length > 0) {
|
|
19273
|
+
logger.debug(
|
|
19274
|
+
`[Scheduler] Context enricher provided ${threadMessages.length} thread messages`
|
|
19275
|
+
);
|
|
19276
|
+
}
|
|
19277
|
+
} catch (error) {
|
|
19278
|
+
logger.warn(
|
|
19279
|
+
`[Scheduler] Context enrichment failed: ${error instanceof Error ? error.message : error}`
|
|
19280
|
+
);
|
|
19281
|
+
}
|
|
19282
|
+
}
|
|
19283
|
+
let contextualReminderText = reminderText;
|
|
19284
|
+
if (schedule.isRecurring && schedule.previousResponse) {
|
|
19285
|
+
contextualReminderText = `${reminderText}
|
|
19286
|
+
|
|
19287
|
+
---
|
|
19288
|
+
**Previous Response (for context):**
|
|
19289
|
+
${schedule.previousResponse}
|
|
19290
|
+
---
|
|
19291
|
+
|
|
19292
|
+
Please provide an updated response based on the reminder above. You may reference or build upon the previous response if relevant.`;
|
|
19293
|
+
}
|
|
19294
|
+
const conversationData = {
|
|
19295
|
+
current: {
|
|
19296
|
+
user: schedule.creatorName || schedule.creatorId,
|
|
19297
|
+
text: contextualReminderText
|
|
19298
|
+
},
|
|
19299
|
+
messages: threadMessages.length > 0 ? [
|
|
19300
|
+
...threadMessages,
|
|
19301
|
+
{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }
|
|
19302
|
+
] : [{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }]
|
|
19303
|
+
};
|
|
19304
|
+
const syntheticPayload = {
|
|
19305
|
+
event: {
|
|
19306
|
+
type: "message",
|
|
19307
|
+
subtype: "scheduled_reminder",
|
|
19308
|
+
text: contextualReminderText,
|
|
19309
|
+
user: schedule.creatorId,
|
|
19310
|
+
channel,
|
|
19311
|
+
ts: String(Date.now() / 1e3),
|
|
19312
|
+
thread_ts: threadId
|
|
19313
|
+
},
|
|
19314
|
+
// Include both for compatibility (slack_conversation for existing checks, conversation for generic)
|
|
19315
|
+
slack_conversation: conversationData,
|
|
19316
|
+
conversation: conversationData,
|
|
19317
|
+
// Include schedule context for any checks that need it
|
|
19318
|
+
schedule: {
|
|
19319
|
+
id: schedule.id,
|
|
19320
|
+
isReminder: true,
|
|
19321
|
+
creatorId: schedule.creatorId,
|
|
19322
|
+
creatorContext: schedule.creatorContext,
|
|
19323
|
+
previousResponse: schedule.previousResponse
|
|
19324
|
+
},
|
|
19325
|
+
// Merge any additional frontend-specific payload
|
|
19326
|
+
...additionalPayload
|
|
19327
|
+
};
|
|
19328
|
+
const endpoint = this.contextEnricher?.getWebhookEndpoint?.() || this.visorConfig.slack?.endpoint || "/bots/slack/support";
|
|
19329
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
19330
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
19331
|
+
if (this.contextEnricher?.prepareExecution) {
|
|
19332
|
+
try {
|
|
19333
|
+
await this.contextEnricher.prepareExecution(schedule, reminderText);
|
|
19334
|
+
} catch (error) {
|
|
19335
|
+
logger.warn(
|
|
19336
|
+
`[Scheduler] Execution preparation failed: ${error instanceof Error ? error.message : error}`
|
|
19337
|
+
);
|
|
19338
|
+
}
|
|
19339
|
+
}
|
|
19340
|
+
try {
|
|
19341
|
+
const { refreshGitHubCredentials: refreshGitHubCredentials2 } = await Promise.resolve().then(() => (init_github_auth(), github_auth_exports));
|
|
19342
|
+
await refreshGitHubCredentials2();
|
|
19343
|
+
} catch {
|
|
19344
|
+
}
|
|
19345
|
+
const {
|
|
19346
|
+
engine: runEngine,
|
|
19347
|
+
config: cfgForRun,
|
|
19348
|
+
responseRef
|
|
19349
|
+
} = this.prepareExecution(schedule, reminderText);
|
|
19350
|
+
try {
|
|
19351
|
+
await runEngine.executeChecks({
|
|
19352
|
+
checks: allChecks,
|
|
19353
|
+
showDetails: true,
|
|
19354
|
+
outputFormat: "json",
|
|
19355
|
+
config: cfgForRun,
|
|
19356
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
19357
|
+
debug: process.env.VISOR_DEBUG === "true"
|
|
19358
|
+
});
|
|
19359
|
+
if (schedule.isRecurring && responseRef.captured) {
|
|
19360
|
+
await this.store.updateAsync(schedule.id, { previousResponse: responseRef.captured });
|
|
19361
|
+
logger.info(
|
|
19362
|
+
`[Scheduler] Saved previousResponse for recurring schedule ${schedule.id} (${responseRef.captured.length} chars)`
|
|
19363
|
+
);
|
|
19364
|
+
}
|
|
19365
|
+
return {
|
|
19366
|
+
message: "Reminder processed through pipeline",
|
|
19367
|
+
type: "pipeline_executed",
|
|
19368
|
+
reminderText,
|
|
19369
|
+
capturedResponse: responseRef.captured
|
|
19370
|
+
};
|
|
19371
|
+
} catch (error) {
|
|
19372
|
+
logger.error(
|
|
19373
|
+
`[Scheduler] Failed to run reminder through pipeline: ${error instanceof Error ? error.message : error}`
|
|
19374
|
+
);
|
|
19375
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
19376
|
+
}
|
|
19377
|
+
}
|
|
19378
|
+
/**
|
|
19379
|
+
* Handle schedule execution failure
|
|
19380
|
+
*/
|
|
19381
|
+
async handleScheduleFailure(schedule, error) {
|
|
19382
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
19383
|
+
logger.error(`[Scheduler] Schedule ${schedule.id} failed: ${errorMsg}`);
|
|
19384
|
+
const newFailureCount = schedule.failureCount + 1;
|
|
19385
|
+
await this.store.updateAsync(schedule.id, {
|
|
19386
|
+
failureCount: newFailureCount,
|
|
19387
|
+
lastError: errorMsg
|
|
19388
|
+
});
|
|
19389
|
+
if (newFailureCount >= 3) {
|
|
19390
|
+
await this.store.updateAsync(schedule.id, { status: "failed" });
|
|
19391
|
+
const job = this.cronJobs.get(schedule.id);
|
|
19392
|
+
if (job) {
|
|
19393
|
+
job.stop();
|
|
19394
|
+
this.cronJobs.delete(schedule.id);
|
|
19395
|
+
}
|
|
19396
|
+
logger.warn(`[Scheduler] Schedule ${schedule.id} paused after 3 consecutive failures`);
|
|
19397
|
+
}
|
|
19398
|
+
}
|
|
19399
|
+
/**
|
|
19400
|
+
* Send execution result to the appropriate output adapter
|
|
19401
|
+
*/
|
|
19402
|
+
async sendResult(schedule, result) {
|
|
19403
|
+
const outputType = schedule.outputContext?.type || "none";
|
|
19404
|
+
const adapter = this.outputAdapters.get(outputType);
|
|
19405
|
+
if (!adapter) {
|
|
19406
|
+
if (outputType !== "none") {
|
|
19407
|
+
logger.warn(`[Scheduler] No output adapter registered for type: ${outputType}`);
|
|
19408
|
+
}
|
|
19409
|
+
return;
|
|
19410
|
+
}
|
|
19411
|
+
try {
|
|
19412
|
+
await adapter.sendResult(schedule, result);
|
|
19413
|
+
} catch (error) {
|
|
19414
|
+
logger.error(
|
|
19415
|
+
`[Scheduler] Failed to send result via ${outputType} adapter: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
19416
|
+
);
|
|
19417
|
+
}
|
|
19418
|
+
}
|
|
19419
|
+
/**
|
|
19420
|
+
* Check if scheduler is running
|
|
19421
|
+
*/
|
|
19422
|
+
isRunning() {
|
|
19423
|
+
return this.running;
|
|
19424
|
+
}
|
|
19425
|
+
/**
|
|
19426
|
+
* Get scheduler stats
|
|
19427
|
+
*/
|
|
19428
|
+
async getStats() {
|
|
19429
|
+
return {
|
|
19430
|
+
running: this.running,
|
|
19431
|
+
activeCronJobs: this.cronJobs.size,
|
|
19432
|
+
pendingOneTimeSchedules: this.oneTimeTimeouts.size,
|
|
19433
|
+
storeStats: await this.store.getStatsAsync()
|
|
19434
|
+
};
|
|
19435
|
+
}
|
|
19436
|
+
};
|
|
19437
|
+
}
|
|
19438
|
+
});
|
|
19439
|
+
|
|
18460
19440
|
// src/scheduler/schedule-tool.ts
|
|
18461
19441
|
var schedule_tool_exports = {};
|
|
18462
19442
|
__export(schedule_tool_exports, {
|
|
@@ -18815,6 +19795,10 @@ async function handleCancel(args, context2, store) {
|
|
|
18815
19795
|
};
|
|
18816
19796
|
}
|
|
18817
19797
|
await store.deleteAsync(schedule.id);
|
|
19798
|
+
const scheduler = getScheduler();
|
|
19799
|
+
if (scheduler) {
|
|
19800
|
+
scheduler.cancelSchedule(schedule.id);
|
|
19801
|
+
}
|
|
18818
19802
|
logger.info(`[ScheduleTool] Cancelled schedule ${schedule.id} for user ${context2.userId}`);
|
|
18819
19803
|
return {
|
|
18820
19804
|
success: true,
|
|
@@ -19120,23 +20104,11 @@ var init_schedule_tool = __esm({
|
|
|
19120
20104
|
"use strict";
|
|
19121
20105
|
init_schedule_store();
|
|
19122
20106
|
init_schedule_parser();
|
|
20107
|
+
init_scheduler();
|
|
19123
20108
|
init_logger();
|
|
19124
20109
|
}
|
|
19125
20110
|
});
|
|
19126
20111
|
|
|
19127
|
-
// src/scheduler/scheduler.ts
|
|
19128
|
-
var import_node_cron;
|
|
19129
|
-
var init_scheduler = __esm({
|
|
19130
|
-
"src/scheduler/scheduler.ts"() {
|
|
19131
|
-
"use strict";
|
|
19132
|
-
import_node_cron = __toESM(require("node-cron"));
|
|
19133
|
-
init_schedule_store();
|
|
19134
|
-
init_schedule_parser();
|
|
19135
|
-
init_logger();
|
|
19136
|
-
init_state_machine_execution_engine();
|
|
19137
|
-
}
|
|
19138
|
-
});
|
|
19139
|
-
|
|
19140
20112
|
// src/scheduler/cli-handler.ts
|
|
19141
20113
|
var init_cli_handler = __esm({
|
|
19142
20114
|
"src/scheduler/cli-handler.ts"() {
|
|
@@ -19368,7 +20340,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19368
20340
|
* Returns the actual bound port number
|
|
19369
20341
|
*/
|
|
19370
20342
|
async start() {
|
|
19371
|
-
return new Promise((
|
|
20343
|
+
return new Promise((resolve18, reject) => {
|
|
19372
20344
|
try {
|
|
19373
20345
|
this.server = import_http.default.createServer((req, res) => {
|
|
19374
20346
|
this.handleRequest(req, res).catch((error) => {
|
|
@@ -19402,7 +20374,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19402
20374
|
);
|
|
19403
20375
|
}
|
|
19404
20376
|
this.startKeepalive();
|
|
19405
|
-
|
|
20377
|
+
resolve18(this.port);
|
|
19406
20378
|
});
|
|
19407
20379
|
} catch (error) {
|
|
19408
20380
|
reject(error);
|
|
@@ -19465,7 +20437,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19465
20437
|
logger.debug(
|
|
19466
20438
|
`[CustomToolsSSEServer:${this.sessionId}] Grace period before stop: ${waitMs}ms (activeToolCalls=${this.activeToolCalls})`
|
|
19467
20439
|
);
|
|
19468
|
-
await new Promise((
|
|
20440
|
+
await new Promise((resolve18) => setTimeout(resolve18, waitMs));
|
|
19469
20441
|
}
|
|
19470
20442
|
}
|
|
19471
20443
|
if (this.activeToolCalls > 0) {
|
|
@@ -19474,7 +20446,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19474
20446
|
`[CustomToolsSSEServer:${this.sessionId}] Waiting for ${this.activeToolCalls} active tool call(s) before stop`
|
|
19475
20447
|
);
|
|
19476
20448
|
while (this.activeToolCalls > 0 && Date.now() - startedAt < effectiveDrainTimeoutMs) {
|
|
19477
|
-
await new Promise((
|
|
20449
|
+
await new Promise((resolve18) => setTimeout(resolve18, 250));
|
|
19478
20450
|
}
|
|
19479
20451
|
if (this.activeToolCalls > 0) {
|
|
19480
20452
|
logger.warn(
|
|
@@ -19499,21 +20471,21 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19499
20471
|
}
|
|
19500
20472
|
this.connections.clear();
|
|
19501
20473
|
if (this.server) {
|
|
19502
|
-
await new Promise((
|
|
20474
|
+
await new Promise((resolve18, reject) => {
|
|
19503
20475
|
const timeout = setTimeout(() => {
|
|
19504
20476
|
if (this.debug) {
|
|
19505
20477
|
logger.debug(
|
|
19506
20478
|
`[CustomToolsSSEServer:${this.sessionId}] Force closing server after timeout`
|
|
19507
20479
|
);
|
|
19508
20480
|
}
|
|
19509
|
-
this.server?.close(() =>
|
|
20481
|
+
this.server?.close(() => resolve18());
|
|
19510
20482
|
}, 5e3);
|
|
19511
20483
|
this.server.close((error) => {
|
|
19512
20484
|
clearTimeout(timeout);
|
|
19513
20485
|
if (error) {
|
|
19514
20486
|
reject(error);
|
|
19515
20487
|
} else {
|
|
19516
|
-
|
|
20488
|
+
resolve18();
|
|
19517
20489
|
}
|
|
19518
20490
|
});
|
|
19519
20491
|
});
|
|
@@ -19939,7 +20911,7 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
19939
20911
|
logger.warn(
|
|
19940
20912
|
`[CustomToolsSSEServer:${this.sessionId}] Tool ${toolName} failed (attempt ${attempt + 1}/${retryCount + 1}): ${errorMsg}. Retrying in ${delay}ms`
|
|
19941
20913
|
);
|
|
19942
|
-
await new Promise((
|
|
20914
|
+
await new Promise((resolve18) => setTimeout(resolve18, delay));
|
|
19943
20915
|
attempt++;
|
|
19944
20916
|
}
|
|
19945
20917
|
}
|
|
@@ -20017,6 +20989,13 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
20017
20989
|
// src/utils/tool-resolver.ts
|
|
20018
20990
|
function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
20019
20991
|
const tools = /* @__PURE__ */ new Map();
|
|
20992
|
+
const registry = WorkflowRegistry.getInstance();
|
|
20993
|
+
const registeredWorkflows = registry.list().map((w) => w.id);
|
|
20994
|
+
if (toolItems.some((item) => typeof item !== "string" && isWorkflowToolReference(item))) {
|
|
20995
|
+
logger.info(
|
|
20996
|
+
`${logPrefix} Resolving ${toolItems.length} tool items. WorkflowRegistry has ${registeredWorkflows.length} workflows: [${registeredWorkflows.join(", ")}]`
|
|
20997
|
+
);
|
|
20998
|
+
}
|
|
20020
20999
|
for (const item of toolItems) {
|
|
20021
21000
|
const workflowTool = resolveWorkflowToolFromItem(item);
|
|
20022
21001
|
if (workflowTool) {
|
|
@@ -20033,7 +21012,9 @@ function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
|
20033
21012
|
}
|
|
20034
21013
|
logger.warn(`${logPrefix} Tool '${item}' not found in global tools or workflow registry`);
|
|
20035
21014
|
} else if (isWorkflowToolReference(item)) {
|
|
20036
|
-
logger.warn(
|
|
21015
|
+
logger.warn(
|
|
21016
|
+
`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry. Available: [${registeredWorkflows.join(", ")}]`
|
|
21017
|
+
);
|
|
20037
21018
|
}
|
|
20038
21019
|
}
|
|
20039
21020
|
if (tools.size === 0 && toolItems.length > 0 && !globalTools) {
|
|
@@ -20047,6 +21028,7 @@ var init_tool_resolver = __esm({
|
|
|
20047
21028
|
"src/utils/tool-resolver.ts"() {
|
|
20048
21029
|
"use strict";
|
|
20049
21030
|
init_workflow_tool_executor();
|
|
21031
|
+
init_workflow_registry();
|
|
20050
21032
|
init_logger();
|
|
20051
21033
|
}
|
|
20052
21034
|
});
|
|
@@ -20242,9 +21224,9 @@ var init_ai_check_provider = __esm({
|
|
|
20242
21224
|
} else {
|
|
20243
21225
|
resolvedPath = import_path7.default.resolve(process.cwd(), str);
|
|
20244
21226
|
}
|
|
20245
|
-
const
|
|
21227
|
+
const fs27 = require("fs").promises;
|
|
20246
21228
|
try {
|
|
20247
|
-
const stat2 = await
|
|
21229
|
+
const stat2 = await fs27.stat(resolvedPath);
|
|
20248
21230
|
return stat2.isFile();
|
|
20249
21231
|
} catch {
|
|
20250
21232
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -20850,10 +21832,41 @@ ${preview}`);
|
|
|
20850
21832
|
if (Object.keys(dynamicServers).length > 0) {
|
|
20851
21833
|
Object.assign(mcpServers, dynamicServers);
|
|
20852
21834
|
}
|
|
21835
|
+
try {
|
|
21836
|
+
const span = trace.getSpan(context.active());
|
|
21837
|
+
if (span) {
|
|
21838
|
+
span.addEvent("tool_setup.mcp_servers_js", {
|
|
21839
|
+
"tool_setup.server_count": Object.keys(dynamicServers).length,
|
|
21840
|
+
"tool_setup.server_names": Object.keys(dynamicServers).join(","),
|
|
21841
|
+
"tool_setup.workflow_entries": Object.entries(dynamicServers).filter(([, cfg]) => cfg?.workflow).map(([name, cfg]) => `${name}\u2192${cfg.workflow}`).join(",")
|
|
21842
|
+
});
|
|
21843
|
+
}
|
|
21844
|
+
} catch {
|
|
21845
|
+
}
|
|
20853
21846
|
} catch (error) {
|
|
20854
|
-
|
|
20855
|
-
|
|
20856
|
-
|
|
21847
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
21848
|
+
logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${errMsg}`);
|
|
21849
|
+
try {
|
|
21850
|
+
const span = trace.getSpan(context.active());
|
|
21851
|
+
if (span) {
|
|
21852
|
+
span.addEvent("tool_setup.mcp_servers_js_error", {
|
|
21853
|
+
"tool_setup.error": errMsg
|
|
21854
|
+
});
|
|
21855
|
+
}
|
|
21856
|
+
} catch {
|
|
21857
|
+
}
|
|
21858
|
+
}
|
|
21859
|
+
} else if (mcpServersJsExpr && !_dependencyResults) {
|
|
21860
|
+
try {
|
|
21861
|
+
const span = trace.getSpan(context.active());
|
|
21862
|
+
if (span) {
|
|
21863
|
+
span.addEvent("tool_setup.mcp_servers_js_skipped", {
|
|
21864
|
+
"tool_setup.reason": "no_dependency_results",
|
|
21865
|
+
"tool_setup.has_expr": true,
|
|
21866
|
+
"tool_setup.has_deps": false
|
|
21867
|
+
});
|
|
21868
|
+
}
|
|
21869
|
+
} catch {
|
|
20857
21870
|
}
|
|
20858
21871
|
}
|
|
20859
21872
|
for (const serverConfig of Object.values(mcpServers)) {
|
|
@@ -20991,6 +22004,27 @@ ${preview}`);
|
|
|
20991
22004
|
}
|
|
20992
22005
|
try {
|
|
20993
22006
|
const customTools = this.loadCustomTools(customToolsToLoad, config);
|
|
22007
|
+
try {
|
|
22008
|
+
const span = trace.getSpan(context.active());
|
|
22009
|
+
if (span) {
|
|
22010
|
+
const requestedNames = customToolsToLoad.map(
|
|
22011
|
+
(item) => typeof item === "string" ? item : `${item.name || item.workflow}(wf:${item.workflow})`
|
|
22012
|
+
);
|
|
22013
|
+
span.addEvent("tool_setup.resolution", {
|
|
22014
|
+
"tool_setup.requested_count": customToolsToLoad.length,
|
|
22015
|
+
"tool_setup.requested_names": requestedNames.join(","),
|
|
22016
|
+
"tool_setup.resolved_count": customTools.size,
|
|
22017
|
+
"tool_setup.resolved_names": Array.from(customTools.keys()).join(","),
|
|
22018
|
+
"tool_setup.missing_count": customToolsToLoad.length - customTools.size
|
|
22019
|
+
});
|
|
22020
|
+
}
|
|
22021
|
+
} catch {
|
|
22022
|
+
}
|
|
22023
|
+
if (customToolsToLoad.length > 0 && customTools.size === 0) {
|
|
22024
|
+
logger.warn(
|
|
22025
|
+
`[AICheckProvider] All ${customToolsToLoad.length} custom tools failed to resolve! Requested: ${customToolsToLoad.map((item) => typeof item === "string" ? item : item.workflow).join(", ")}. AI will have no workflow tools available.`
|
|
22026
|
+
);
|
|
22027
|
+
}
|
|
20994
22028
|
if (scheduleToolEnabled) {
|
|
20995
22029
|
const scheduleTool = getScheduleToolDefinition();
|
|
20996
22030
|
customTools.set(scheduleTool.name, scheduleTool);
|
|
@@ -21028,11 +22062,35 @@ ${preview}`);
|
|
|
21028
22062
|
};
|
|
21029
22063
|
}
|
|
21030
22064
|
} catch (error) {
|
|
22065
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
21031
22066
|
logger.error(
|
|
21032
|
-
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${
|
|
22067
|
+
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${errMsg}`
|
|
21033
22068
|
);
|
|
22069
|
+
try {
|
|
22070
|
+
const span = trace.getSpan(context.active());
|
|
22071
|
+
if (span) {
|
|
22072
|
+
span.addEvent("tool_setup.sse_server_error", {
|
|
22073
|
+
"tool_setup.error": errMsg,
|
|
22074
|
+
"tool_setup.server_name": customToolsServerName || ""
|
|
22075
|
+
});
|
|
22076
|
+
}
|
|
22077
|
+
} catch {
|
|
22078
|
+
}
|
|
21034
22079
|
}
|
|
21035
22080
|
}
|
|
22081
|
+
try {
|
|
22082
|
+
const span = trace.getSpan(context.active());
|
|
22083
|
+
if (span) {
|
|
22084
|
+
const finalServerNames = Object.keys(mcpServers);
|
|
22085
|
+
span.addEvent("tool_setup.final", {
|
|
22086
|
+
"tool_setup.final_server_count": finalServerNames.length,
|
|
22087
|
+
"tool_setup.final_server_names": finalServerNames.join(","),
|
|
22088
|
+
"tool_setup.has_custom_tools_server": !!customToolsServer,
|
|
22089
|
+
"tool_setup.tools_disabled": !!config.ai?.disableTools
|
|
22090
|
+
});
|
|
22091
|
+
}
|
|
22092
|
+
} catch {
|
|
22093
|
+
}
|
|
21036
22094
|
if (Object.keys(mcpServers).length > 0 && !config.ai?.disableTools) {
|
|
21037
22095
|
aiConfig.mcpServers = mcpServers;
|
|
21038
22096
|
} else if (config.ai?.disableTools) {
|
|
@@ -22231,7 +23289,7 @@ var init_template_context = __esm({
|
|
|
22231
23289
|
});
|
|
22232
23290
|
|
|
22233
23291
|
// src/providers/http-client-provider.ts
|
|
22234
|
-
var
|
|
23292
|
+
var fs15, path18, HttpClientProvider;
|
|
22235
23293
|
var init_http_client_provider = __esm({
|
|
22236
23294
|
"src/providers/http-client-provider.ts"() {
|
|
22237
23295
|
"use strict";
|
|
@@ -22241,8 +23299,8 @@ var init_http_client_provider = __esm({
|
|
|
22241
23299
|
init_sandbox();
|
|
22242
23300
|
init_template_context();
|
|
22243
23301
|
init_logger();
|
|
22244
|
-
|
|
22245
|
-
|
|
23302
|
+
fs15 = __toESM(require("fs"));
|
|
23303
|
+
path18 = __toESM(require("path"));
|
|
22246
23304
|
HttpClientProvider = class extends CheckProvider {
|
|
22247
23305
|
liquid;
|
|
22248
23306
|
sandbox;
|
|
@@ -22337,14 +23395,14 @@ var init_http_client_provider = __esm({
|
|
|
22337
23395
|
const parentContext = context2?._parentContext;
|
|
22338
23396
|
const workingDirectory = parentContext?.workingDirectory;
|
|
22339
23397
|
const workspaceEnabled = parentContext?.workspace?.isEnabled?.();
|
|
22340
|
-
if (workspaceEnabled && workingDirectory && !
|
|
22341
|
-
resolvedOutputFile =
|
|
23398
|
+
if (workspaceEnabled && workingDirectory && !path18.isAbsolute(resolvedOutputFile)) {
|
|
23399
|
+
resolvedOutputFile = path18.join(workingDirectory, resolvedOutputFile);
|
|
22342
23400
|
logger.debug(
|
|
22343
23401
|
`[http_client] Resolved relative output_file to workspace: ${resolvedOutputFile}`
|
|
22344
23402
|
);
|
|
22345
23403
|
}
|
|
22346
|
-
if (skipIfExists &&
|
|
22347
|
-
const stats =
|
|
23404
|
+
if (skipIfExists && fs15.existsSync(resolvedOutputFile)) {
|
|
23405
|
+
const stats = fs15.statSync(resolvedOutputFile);
|
|
22348
23406
|
logger.verbose(`[http_client] File cached: ${resolvedOutputFile} (${stats.size} bytes)`);
|
|
22349
23407
|
return {
|
|
22350
23408
|
issues: [],
|
|
@@ -22555,13 +23613,13 @@ var init_http_client_provider = __esm({
|
|
|
22555
23613
|
]
|
|
22556
23614
|
};
|
|
22557
23615
|
}
|
|
22558
|
-
const parentDir =
|
|
22559
|
-
if (parentDir && !
|
|
22560
|
-
|
|
23616
|
+
const parentDir = path18.dirname(outputFile);
|
|
23617
|
+
if (parentDir && !fs15.existsSync(parentDir)) {
|
|
23618
|
+
fs15.mkdirSync(parentDir, { recursive: true });
|
|
22561
23619
|
}
|
|
22562
23620
|
const arrayBuffer = await response.arrayBuffer();
|
|
22563
23621
|
const buffer = Buffer.from(arrayBuffer);
|
|
22564
|
-
|
|
23622
|
+
fs15.writeFileSync(outputFile, buffer);
|
|
22565
23623
|
const contentType = response.headers.get("content-type") || "application/octet-stream";
|
|
22566
23624
|
logger.verbose(`[http_client] Downloaded: ${outputFile} (${buffer.length} bytes)`);
|
|
22567
23625
|
return {
|
|
@@ -26096,14 +27154,14 @@ var require_util = __commonJS({
|
|
|
26096
27154
|
}
|
|
26097
27155
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
26098
27156
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
26099
|
-
let
|
|
27157
|
+
let path31 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
26100
27158
|
if (origin.endsWith("/")) {
|
|
26101
27159
|
origin = origin.substring(0, origin.length - 1);
|
|
26102
27160
|
}
|
|
26103
|
-
if (
|
|
26104
|
-
|
|
27161
|
+
if (path31 && !path31.startsWith("/")) {
|
|
27162
|
+
path31 = `/${path31}`;
|
|
26105
27163
|
}
|
|
26106
|
-
url = new URL(origin +
|
|
27164
|
+
url = new URL(origin + path31);
|
|
26107
27165
|
}
|
|
26108
27166
|
return url;
|
|
26109
27167
|
}
|
|
@@ -27717,20 +28775,20 @@ var require_parseParams = __commonJS({
|
|
|
27717
28775
|
var require_basename = __commonJS({
|
|
27718
28776
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
|
|
27719
28777
|
"use strict";
|
|
27720
|
-
module2.exports = function basename4(
|
|
27721
|
-
if (typeof
|
|
28778
|
+
module2.exports = function basename4(path31) {
|
|
28779
|
+
if (typeof path31 !== "string") {
|
|
27722
28780
|
return "";
|
|
27723
28781
|
}
|
|
27724
|
-
for (var i =
|
|
27725
|
-
switch (
|
|
28782
|
+
for (var i = path31.length - 1; i >= 0; --i) {
|
|
28783
|
+
switch (path31.charCodeAt(i)) {
|
|
27726
28784
|
case 47:
|
|
27727
28785
|
// '/'
|
|
27728
28786
|
case 92:
|
|
27729
|
-
|
|
27730
|
-
return
|
|
28787
|
+
path31 = path31.slice(i + 1);
|
|
28788
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
27731
28789
|
}
|
|
27732
28790
|
}
|
|
27733
|
-
return
|
|
28791
|
+
return path31 === ".." || path31 === "." ? "" : path31;
|
|
27734
28792
|
};
|
|
27735
28793
|
}
|
|
27736
28794
|
});
|
|
@@ -29123,8 +30181,8 @@ var require_util2 = __commonJS({
|
|
|
29123
30181
|
function createDeferredPromise() {
|
|
29124
30182
|
let res;
|
|
29125
30183
|
let rej;
|
|
29126
|
-
const promise = new Promise((
|
|
29127
|
-
res =
|
|
30184
|
+
const promise = new Promise((resolve18, reject) => {
|
|
30185
|
+
res = resolve18;
|
|
29128
30186
|
rej = reject;
|
|
29129
30187
|
});
|
|
29130
30188
|
return { promise, resolve: res, reject: rej };
|
|
@@ -30629,8 +31687,8 @@ Content-Type: ${value.type || "application/octet-stream"}\r
|
|
|
30629
31687
|
});
|
|
30630
31688
|
}
|
|
30631
31689
|
});
|
|
30632
|
-
const busboyResolve = new Promise((
|
|
30633
|
-
busboy.on("finish",
|
|
31690
|
+
const busboyResolve = new Promise((resolve18, reject) => {
|
|
31691
|
+
busboy.on("finish", resolve18);
|
|
30634
31692
|
busboy.on("error", (err) => reject(new TypeError(err)));
|
|
30635
31693
|
});
|
|
30636
31694
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk);
|
|
@@ -30761,7 +31819,7 @@ var require_request = __commonJS({
|
|
|
30761
31819
|
}
|
|
30762
31820
|
var Request = class _Request {
|
|
30763
31821
|
constructor(origin, {
|
|
30764
|
-
path:
|
|
31822
|
+
path: path31,
|
|
30765
31823
|
method,
|
|
30766
31824
|
body,
|
|
30767
31825
|
headers,
|
|
@@ -30775,11 +31833,11 @@ var require_request = __commonJS({
|
|
|
30775
31833
|
throwOnError,
|
|
30776
31834
|
expectContinue
|
|
30777
31835
|
}, handler) {
|
|
30778
|
-
if (typeof
|
|
31836
|
+
if (typeof path31 !== "string") {
|
|
30779
31837
|
throw new InvalidArgumentError("path must be a string");
|
|
30780
|
-
} else if (
|
|
31838
|
+
} else if (path31[0] !== "/" && !(path31.startsWith("http://") || path31.startsWith("https://")) && method !== "CONNECT") {
|
|
30781
31839
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
30782
|
-
} else if (invalidPathRegex.exec(
|
|
31840
|
+
} else if (invalidPathRegex.exec(path31) !== null) {
|
|
30783
31841
|
throw new InvalidArgumentError("invalid request path");
|
|
30784
31842
|
}
|
|
30785
31843
|
if (typeof method !== "string") {
|
|
@@ -30842,7 +31900,7 @@ var require_request = __commonJS({
|
|
|
30842
31900
|
this.completed = false;
|
|
30843
31901
|
this.aborted = false;
|
|
30844
31902
|
this.upgrade = upgrade || null;
|
|
30845
|
-
this.path = query ? util.buildURL(
|
|
31903
|
+
this.path = query ? util.buildURL(path31, query) : path31;
|
|
30846
31904
|
this.origin = origin;
|
|
30847
31905
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
30848
31906
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -31164,9 +32222,9 @@ var require_dispatcher_base = __commonJS({
|
|
|
31164
32222
|
}
|
|
31165
32223
|
close(callback) {
|
|
31166
32224
|
if (callback === void 0) {
|
|
31167
|
-
return new Promise((
|
|
32225
|
+
return new Promise((resolve18, reject) => {
|
|
31168
32226
|
this.close((err, data) => {
|
|
31169
|
-
return err ? reject(err) :
|
|
32227
|
+
return err ? reject(err) : resolve18(data);
|
|
31170
32228
|
});
|
|
31171
32229
|
});
|
|
31172
32230
|
}
|
|
@@ -31204,12 +32262,12 @@ var require_dispatcher_base = __commonJS({
|
|
|
31204
32262
|
err = null;
|
|
31205
32263
|
}
|
|
31206
32264
|
if (callback === void 0) {
|
|
31207
|
-
return new Promise((
|
|
32265
|
+
return new Promise((resolve18, reject) => {
|
|
31208
32266
|
this.destroy(err, (err2, data) => {
|
|
31209
32267
|
return err2 ? (
|
|
31210
32268
|
/* istanbul ignore next: should never error */
|
|
31211
32269
|
reject(err2)
|
|
31212
|
-
) :
|
|
32270
|
+
) : resolve18(data);
|
|
31213
32271
|
});
|
|
31214
32272
|
});
|
|
31215
32273
|
}
|
|
@@ -31850,9 +32908,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
31850
32908
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
31851
32909
|
}
|
|
31852
32910
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
31853
|
-
const
|
|
32911
|
+
const path31 = search ? `${pathname}${search}` : pathname;
|
|
31854
32912
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
31855
|
-
this.opts.path =
|
|
32913
|
+
this.opts.path = path31;
|
|
31856
32914
|
this.opts.origin = origin;
|
|
31857
32915
|
this.opts.maxRedirections = 0;
|
|
31858
32916
|
this.opts.query = null;
|
|
@@ -32271,16 +33329,16 @@ var require_client = __commonJS({
|
|
|
32271
33329
|
return this[kNeedDrain] < 2;
|
|
32272
33330
|
}
|
|
32273
33331
|
async [kClose]() {
|
|
32274
|
-
return new Promise((
|
|
33332
|
+
return new Promise((resolve18) => {
|
|
32275
33333
|
if (!this[kSize]) {
|
|
32276
|
-
|
|
33334
|
+
resolve18(null);
|
|
32277
33335
|
} else {
|
|
32278
|
-
this[kClosedResolve] =
|
|
33336
|
+
this[kClosedResolve] = resolve18;
|
|
32279
33337
|
}
|
|
32280
33338
|
});
|
|
32281
33339
|
}
|
|
32282
33340
|
async [kDestroy](err) {
|
|
32283
|
-
return new Promise((
|
|
33341
|
+
return new Promise((resolve18) => {
|
|
32284
33342
|
const requests = this[kQueue].splice(this[kPendingIdx]);
|
|
32285
33343
|
for (let i = 0; i < requests.length; i++) {
|
|
32286
33344
|
const request = requests[i];
|
|
@@ -32291,7 +33349,7 @@ var require_client = __commonJS({
|
|
|
32291
33349
|
this[kClosedResolve]();
|
|
32292
33350
|
this[kClosedResolve] = null;
|
|
32293
33351
|
}
|
|
32294
|
-
|
|
33352
|
+
resolve18();
|
|
32295
33353
|
};
|
|
32296
33354
|
if (this[kHTTP2Session] != null) {
|
|
32297
33355
|
util.destroy(this[kHTTP2Session], err);
|
|
@@ -32871,7 +33929,7 @@ var require_client = __commonJS({
|
|
|
32871
33929
|
});
|
|
32872
33930
|
}
|
|
32873
33931
|
try {
|
|
32874
|
-
const socket = await new Promise((
|
|
33932
|
+
const socket = await new Promise((resolve18, reject) => {
|
|
32875
33933
|
client[kConnector]({
|
|
32876
33934
|
host,
|
|
32877
33935
|
hostname,
|
|
@@ -32883,7 +33941,7 @@ var require_client = __commonJS({
|
|
|
32883
33941
|
if (err) {
|
|
32884
33942
|
reject(err);
|
|
32885
33943
|
} else {
|
|
32886
|
-
|
|
33944
|
+
resolve18(socket2);
|
|
32887
33945
|
}
|
|
32888
33946
|
});
|
|
32889
33947
|
});
|
|
@@ -33094,7 +34152,7 @@ var require_client = __commonJS({
|
|
|
33094
34152
|
writeH2(client, client[kHTTP2Session], request);
|
|
33095
34153
|
return;
|
|
33096
34154
|
}
|
|
33097
|
-
const { body, method, path:
|
|
34155
|
+
const { body, method, path: path31, host, upgrade, headers, blocking, reset } = request;
|
|
33098
34156
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33099
34157
|
if (body && typeof body.read === "function") {
|
|
33100
34158
|
body.read(0);
|
|
@@ -33144,7 +34202,7 @@ var require_client = __commonJS({
|
|
|
33144
34202
|
if (blocking) {
|
|
33145
34203
|
socket[kBlocking] = true;
|
|
33146
34204
|
}
|
|
33147
|
-
let header = `${method} ${
|
|
34205
|
+
let header = `${method} ${path31} HTTP/1.1\r
|
|
33148
34206
|
`;
|
|
33149
34207
|
if (typeof host === "string") {
|
|
33150
34208
|
header += `host: ${host}\r
|
|
@@ -33207,7 +34265,7 @@ upgrade: ${upgrade}\r
|
|
|
33207
34265
|
return true;
|
|
33208
34266
|
}
|
|
33209
34267
|
function writeH2(client, session, request) {
|
|
33210
|
-
const { body, method, path:
|
|
34268
|
+
const { body, method, path: path31, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
33211
34269
|
let headers;
|
|
33212
34270
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
33213
34271
|
else headers = reqHeaders;
|
|
@@ -33250,7 +34308,7 @@ upgrade: ${upgrade}\r
|
|
|
33250
34308
|
});
|
|
33251
34309
|
return true;
|
|
33252
34310
|
}
|
|
33253
|
-
headers[HTTP2_HEADER_PATH] =
|
|
34311
|
+
headers[HTTP2_HEADER_PATH] = path31;
|
|
33254
34312
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
33255
34313
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
33256
34314
|
if (body && typeof body.read === "function") {
|
|
@@ -33507,12 +34565,12 @@ upgrade: ${upgrade}\r
|
|
|
33507
34565
|
cb();
|
|
33508
34566
|
}
|
|
33509
34567
|
}
|
|
33510
|
-
const waitForDrain = () => new Promise((
|
|
34568
|
+
const waitForDrain = () => new Promise((resolve18, reject) => {
|
|
33511
34569
|
assert(callback === null);
|
|
33512
34570
|
if (socket[kError]) {
|
|
33513
34571
|
reject(socket[kError]);
|
|
33514
34572
|
} else {
|
|
33515
|
-
callback =
|
|
34573
|
+
callback = resolve18;
|
|
33516
34574
|
}
|
|
33517
34575
|
});
|
|
33518
34576
|
if (client[kHTTPConnVersion] === "h2") {
|
|
@@ -33858,8 +34916,8 @@ var require_pool_base = __commonJS({
|
|
|
33858
34916
|
if (this[kQueue].isEmpty()) {
|
|
33859
34917
|
return Promise.all(this[kClients].map((c) => c.close()));
|
|
33860
34918
|
} else {
|
|
33861
|
-
return new Promise((
|
|
33862
|
-
this[kClosedResolve] =
|
|
34919
|
+
return new Promise((resolve18) => {
|
|
34920
|
+
this[kClosedResolve] = resolve18;
|
|
33863
34921
|
});
|
|
33864
34922
|
}
|
|
33865
34923
|
}
|
|
@@ -34437,7 +35495,7 @@ var require_readable = __commonJS({
|
|
|
34437
35495
|
if (this.closed) {
|
|
34438
35496
|
return Promise.resolve(null);
|
|
34439
35497
|
}
|
|
34440
|
-
return new Promise((
|
|
35498
|
+
return new Promise((resolve18, reject) => {
|
|
34441
35499
|
const signalListenerCleanup = signal ? util.addAbortListener(signal, () => {
|
|
34442
35500
|
this.destroy();
|
|
34443
35501
|
}) : noop;
|
|
@@ -34446,7 +35504,7 @@ var require_readable = __commonJS({
|
|
|
34446
35504
|
if (signal && signal.aborted) {
|
|
34447
35505
|
reject(signal.reason || Object.assign(new Error("The operation was aborted"), { name: "AbortError" }));
|
|
34448
35506
|
} else {
|
|
34449
|
-
|
|
35507
|
+
resolve18(null);
|
|
34450
35508
|
}
|
|
34451
35509
|
}).on("error", noop).on("data", function(chunk) {
|
|
34452
35510
|
limit -= chunk.length;
|
|
@@ -34468,11 +35526,11 @@ var require_readable = __commonJS({
|
|
|
34468
35526
|
throw new TypeError("unusable");
|
|
34469
35527
|
}
|
|
34470
35528
|
assert(!stream[kConsume]);
|
|
34471
|
-
return new Promise((
|
|
35529
|
+
return new Promise((resolve18, reject) => {
|
|
34472
35530
|
stream[kConsume] = {
|
|
34473
35531
|
type,
|
|
34474
35532
|
stream,
|
|
34475
|
-
resolve:
|
|
35533
|
+
resolve: resolve18,
|
|
34476
35534
|
reject,
|
|
34477
35535
|
length: 0,
|
|
34478
35536
|
body: []
|
|
@@ -34507,12 +35565,12 @@ var require_readable = __commonJS({
|
|
|
34507
35565
|
}
|
|
34508
35566
|
}
|
|
34509
35567
|
function consumeEnd(consume2) {
|
|
34510
|
-
const { type, body, resolve:
|
|
35568
|
+
const { type, body, resolve: resolve18, stream, length } = consume2;
|
|
34511
35569
|
try {
|
|
34512
35570
|
if (type === "text") {
|
|
34513
|
-
|
|
35571
|
+
resolve18(toUSVString(Buffer.concat(body)));
|
|
34514
35572
|
} else if (type === "json") {
|
|
34515
|
-
|
|
35573
|
+
resolve18(JSON.parse(Buffer.concat(body)));
|
|
34516
35574
|
} else if (type === "arrayBuffer") {
|
|
34517
35575
|
const dst = new Uint8Array(length);
|
|
34518
35576
|
let pos = 0;
|
|
@@ -34520,12 +35578,12 @@ var require_readable = __commonJS({
|
|
|
34520
35578
|
dst.set(buf, pos);
|
|
34521
35579
|
pos += buf.byteLength;
|
|
34522
35580
|
}
|
|
34523
|
-
|
|
35581
|
+
resolve18(dst.buffer);
|
|
34524
35582
|
} else if (type === "blob") {
|
|
34525
35583
|
if (!Blob2) {
|
|
34526
35584
|
Blob2 = require("buffer").Blob;
|
|
34527
35585
|
}
|
|
34528
|
-
|
|
35586
|
+
resolve18(new Blob2(body, { type: stream[kContentType] }));
|
|
34529
35587
|
}
|
|
34530
35588
|
consumeFinish(consume2);
|
|
34531
35589
|
} catch (err) {
|
|
@@ -34782,9 +35840,9 @@ var require_api_request = __commonJS({
|
|
|
34782
35840
|
};
|
|
34783
35841
|
function request(opts, callback) {
|
|
34784
35842
|
if (callback === void 0) {
|
|
34785
|
-
return new Promise((
|
|
35843
|
+
return new Promise((resolve18, reject) => {
|
|
34786
35844
|
request.call(this, opts, (err, data) => {
|
|
34787
|
-
return err ? reject(err) :
|
|
35845
|
+
return err ? reject(err) : resolve18(data);
|
|
34788
35846
|
});
|
|
34789
35847
|
});
|
|
34790
35848
|
}
|
|
@@ -34957,9 +36015,9 @@ var require_api_stream = __commonJS({
|
|
|
34957
36015
|
};
|
|
34958
36016
|
function stream(opts, factory, callback) {
|
|
34959
36017
|
if (callback === void 0) {
|
|
34960
|
-
return new Promise((
|
|
36018
|
+
return new Promise((resolve18, reject) => {
|
|
34961
36019
|
stream.call(this, opts, factory, (err, data) => {
|
|
34962
|
-
return err ? reject(err) :
|
|
36020
|
+
return err ? reject(err) : resolve18(data);
|
|
34963
36021
|
});
|
|
34964
36022
|
});
|
|
34965
36023
|
}
|
|
@@ -35240,9 +36298,9 @@ var require_api_upgrade = __commonJS({
|
|
|
35240
36298
|
};
|
|
35241
36299
|
function upgrade(opts, callback) {
|
|
35242
36300
|
if (callback === void 0) {
|
|
35243
|
-
return new Promise((
|
|
36301
|
+
return new Promise((resolve18, reject) => {
|
|
35244
36302
|
upgrade.call(this, opts, (err, data) => {
|
|
35245
|
-
return err ? reject(err) :
|
|
36303
|
+
return err ? reject(err) : resolve18(data);
|
|
35246
36304
|
});
|
|
35247
36305
|
});
|
|
35248
36306
|
}
|
|
@@ -35331,9 +36389,9 @@ var require_api_connect = __commonJS({
|
|
|
35331
36389
|
};
|
|
35332
36390
|
function connect(opts, callback) {
|
|
35333
36391
|
if (callback === void 0) {
|
|
35334
|
-
return new Promise((
|
|
36392
|
+
return new Promise((resolve18, reject) => {
|
|
35335
36393
|
connect.call(this, opts, (err, data) => {
|
|
35336
|
-
return err ? reject(err) :
|
|
36394
|
+
return err ? reject(err) : resolve18(data);
|
|
35337
36395
|
});
|
|
35338
36396
|
});
|
|
35339
36397
|
}
|
|
@@ -35493,20 +36551,20 @@ var require_mock_utils = __commonJS({
|
|
|
35493
36551
|
}
|
|
35494
36552
|
return true;
|
|
35495
36553
|
}
|
|
35496
|
-
function safeUrl(
|
|
35497
|
-
if (typeof
|
|
35498
|
-
return
|
|
36554
|
+
function safeUrl(path31) {
|
|
36555
|
+
if (typeof path31 !== "string") {
|
|
36556
|
+
return path31;
|
|
35499
36557
|
}
|
|
35500
|
-
const pathSegments =
|
|
36558
|
+
const pathSegments = path31.split("?");
|
|
35501
36559
|
if (pathSegments.length !== 2) {
|
|
35502
|
-
return
|
|
36560
|
+
return path31;
|
|
35503
36561
|
}
|
|
35504
36562
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
35505
36563
|
qp.sort();
|
|
35506
36564
|
return [...pathSegments, qp.toString()].join("?");
|
|
35507
36565
|
}
|
|
35508
|
-
function matchKey(mockDispatch2, { path:
|
|
35509
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
36566
|
+
function matchKey(mockDispatch2, { path: path31, method, body, headers }) {
|
|
36567
|
+
const pathMatch = matchValue(mockDispatch2.path, path31);
|
|
35510
36568
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
35511
36569
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
35512
36570
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -35524,7 +36582,7 @@ var require_mock_utils = __commonJS({
|
|
|
35524
36582
|
function getMockDispatch(mockDispatches, key) {
|
|
35525
36583
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
35526
36584
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
35527
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
36585
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path31 }) => matchValue(safeUrl(path31), resolvedPath));
|
|
35528
36586
|
if (matchedMockDispatches.length === 0) {
|
|
35529
36587
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
35530
36588
|
}
|
|
@@ -35561,9 +36619,9 @@ var require_mock_utils = __commonJS({
|
|
|
35561
36619
|
}
|
|
35562
36620
|
}
|
|
35563
36621
|
function buildKey(opts) {
|
|
35564
|
-
const { path:
|
|
36622
|
+
const { path: path31, method, body, headers, query } = opts;
|
|
35565
36623
|
return {
|
|
35566
|
-
path:
|
|
36624
|
+
path: path31,
|
|
35567
36625
|
method,
|
|
35568
36626
|
body,
|
|
35569
36627
|
headers,
|
|
@@ -36012,10 +37070,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
36012
37070
|
}
|
|
36013
37071
|
format(pendingInterceptors) {
|
|
36014
37072
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
36015
|
-
({ method, path:
|
|
37073
|
+
({ method, path: path31, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
36016
37074
|
Method: method,
|
|
36017
37075
|
Origin: origin,
|
|
36018
|
-
Path:
|
|
37076
|
+
Path: path31,
|
|
36019
37077
|
"Status code": statusCode,
|
|
36020
37078
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
36021
37079
|
Invocations: timesInvoked,
|
|
@@ -38956,7 +40014,7 @@ var require_fetch = __commonJS({
|
|
|
38956
40014
|
async function dispatch({ body }) {
|
|
38957
40015
|
const url = requestCurrentURL(request);
|
|
38958
40016
|
const agent = fetchParams.controller.dispatcher;
|
|
38959
|
-
return new Promise((
|
|
40017
|
+
return new Promise((resolve18, reject) => agent.dispatch(
|
|
38960
40018
|
{
|
|
38961
40019
|
path: url.pathname + url.search,
|
|
38962
40020
|
origin: url.origin,
|
|
@@ -39032,7 +40090,7 @@ var require_fetch = __commonJS({
|
|
|
39032
40090
|
}
|
|
39033
40091
|
}
|
|
39034
40092
|
}
|
|
39035
|
-
|
|
40093
|
+
resolve18({
|
|
39036
40094
|
status,
|
|
39037
40095
|
statusText,
|
|
39038
40096
|
headersList: headers[kHeadersList],
|
|
@@ -39075,7 +40133,7 @@ var require_fetch = __commonJS({
|
|
|
39075
40133
|
const val = headersList[n + 1].toString("latin1");
|
|
39076
40134
|
headers[kHeadersList].append(key, val);
|
|
39077
40135
|
}
|
|
39078
|
-
|
|
40136
|
+
resolve18({
|
|
39079
40137
|
status,
|
|
39080
40138
|
statusText: STATUS_CODES[status],
|
|
39081
40139
|
headersList: headers[kHeadersList],
|
|
@@ -40636,8 +41694,8 @@ var require_util6 = __commonJS({
|
|
|
40636
41694
|
}
|
|
40637
41695
|
}
|
|
40638
41696
|
}
|
|
40639
|
-
function validateCookiePath(
|
|
40640
|
-
for (const char of
|
|
41697
|
+
function validateCookiePath(path31) {
|
|
41698
|
+
for (const char of path31) {
|
|
40641
41699
|
const code = char.charCodeAt(0);
|
|
40642
41700
|
if (code < 33 || char === ";") {
|
|
40643
41701
|
throw new Error("Invalid cookie path");
|
|
@@ -42317,11 +43375,11 @@ var require_undici = __commonJS({
|
|
|
42317
43375
|
if (typeof opts.path !== "string") {
|
|
42318
43376
|
throw new InvalidArgumentError("invalid opts.path");
|
|
42319
43377
|
}
|
|
42320
|
-
let
|
|
43378
|
+
let path31 = opts.path;
|
|
42321
43379
|
if (!opts.path.startsWith("/")) {
|
|
42322
|
-
|
|
43380
|
+
path31 = `/${path31}`;
|
|
42323
43381
|
}
|
|
42324
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
43382
|
+
url = new URL(util.parseOrigin(url).origin + path31);
|
|
42325
43383
|
} else {
|
|
42326
43384
|
if (!opts) {
|
|
42327
43385
|
opts = typeof url === "object" ? url : {};
|
|
@@ -42870,7 +43928,7 @@ var init_mcp_check_provider = __esm({
|
|
|
42870
43928
|
logger.warn(
|
|
42871
43929
|
`MCP ${transportName} failed (attempt ${attempt + 1}/${maxRetries + 1}), retrying in ${delay}ms: ${error instanceof Error ? error.message : String(error)}`
|
|
42872
43930
|
);
|
|
42873
|
-
await new Promise((
|
|
43931
|
+
await new Promise((resolve18) => setTimeout(resolve18, delay));
|
|
42874
43932
|
attempt += 1;
|
|
42875
43933
|
} finally {
|
|
42876
43934
|
try {
|
|
@@ -43152,7 +44210,7 @@ async function acquirePromptLock() {
|
|
|
43152
44210
|
activePrompt = true;
|
|
43153
44211
|
return;
|
|
43154
44212
|
}
|
|
43155
|
-
await new Promise((
|
|
44213
|
+
await new Promise((resolve18) => waiters.push(resolve18));
|
|
43156
44214
|
activePrompt = true;
|
|
43157
44215
|
}
|
|
43158
44216
|
function releasePromptLock() {
|
|
@@ -43162,7 +44220,7 @@ function releasePromptLock() {
|
|
|
43162
44220
|
}
|
|
43163
44221
|
async function interactivePrompt(options) {
|
|
43164
44222
|
await acquirePromptLock();
|
|
43165
|
-
return new Promise((
|
|
44223
|
+
return new Promise((resolve18, reject) => {
|
|
43166
44224
|
const dbg = process.env.VISOR_DEBUG === "true";
|
|
43167
44225
|
try {
|
|
43168
44226
|
if (dbg) {
|
|
@@ -43249,12 +44307,12 @@ async function interactivePrompt(options) {
|
|
|
43249
44307
|
};
|
|
43250
44308
|
const finish = (value) => {
|
|
43251
44309
|
cleanup();
|
|
43252
|
-
|
|
44310
|
+
resolve18(value);
|
|
43253
44311
|
};
|
|
43254
44312
|
if (options.timeout && options.timeout > 0) {
|
|
43255
44313
|
timeoutId = setTimeout(() => {
|
|
43256
44314
|
cleanup();
|
|
43257
|
-
if (defaultValue !== void 0) return
|
|
44315
|
+
if (defaultValue !== void 0) return resolve18(defaultValue);
|
|
43258
44316
|
return reject(new Error("Input timeout"));
|
|
43259
44317
|
}, options.timeout);
|
|
43260
44318
|
}
|
|
@@ -43386,7 +44444,7 @@ async function interactivePrompt(options) {
|
|
|
43386
44444
|
});
|
|
43387
44445
|
}
|
|
43388
44446
|
async function simplePrompt(prompt) {
|
|
43389
|
-
return new Promise((
|
|
44447
|
+
return new Promise((resolve18) => {
|
|
43390
44448
|
const rl = readline.createInterface({
|
|
43391
44449
|
input: process.stdin,
|
|
43392
44450
|
output: process.stdout
|
|
@@ -43402,7 +44460,7 @@ async function simplePrompt(prompt) {
|
|
|
43402
44460
|
rl.question(`${prompt}
|
|
43403
44461
|
> `, (answer) => {
|
|
43404
44462
|
rl.close();
|
|
43405
|
-
|
|
44463
|
+
resolve18(answer.trim());
|
|
43406
44464
|
});
|
|
43407
44465
|
});
|
|
43408
44466
|
}
|
|
@@ -43570,7 +44628,7 @@ function isStdinAvailable() {
|
|
|
43570
44628
|
return !process.stdin.isTTY;
|
|
43571
44629
|
}
|
|
43572
44630
|
async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
43573
|
-
return new Promise((
|
|
44631
|
+
return new Promise((resolve18, reject) => {
|
|
43574
44632
|
let data = "";
|
|
43575
44633
|
let timeoutId;
|
|
43576
44634
|
if (timeout) {
|
|
@@ -43597,7 +44655,7 @@ async function readStdin(timeout, maxSize = 1024 * 1024) {
|
|
|
43597
44655
|
};
|
|
43598
44656
|
const onEnd = () => {
|
|
43599
44657
|
cleanup();
|
|
43600
|
-
|
|
44658
|
+
resolve18(data.trim());
|
|
43601
44659
|
};
|
|
43602
44660
|
const onError = (err) => {
|
|
43603
44661
|
cleanup();
|
|
@@ -43627,7 +44685,7 @@ var init_stdin_reader = __esm({
|
|
|
43627
44685
|
});
|
|
43628
44686
|
|
|
43629
44687
|
// src/providers/human-input-check-provider.ts
|
|
43630
|
-
var
|
|
44688
|
+
var fs17, path20, HumanInputCheckProvider;
|
|
43631
44689
|
var init_human_input_check_provider = __esm({
|
|
43632
44690
|
"src/providers/human-input-check-provider.ts"() {
|
|
43633
44691
|
"use strict";
|
|
@@ -43636,8 +44694,8 @@ var init_human_input_check_provider = __esm({
|
|
|
43636
44694
|
init_prompt_state();
|
|
43637
44695
|
init_liquid_extensions();
|
|
43638
44696
|
init_stdin_reader();
|
|
43639
|
-
|
|
43640
|
-
|
|
44697
|
+
fs17 = __toESM(require("fs"));
|
|
44698
|
+
path20 = __toESM(require("path"));
|
|
43641
44699
|
HumanInputCheckProvider = class _HumanInputCheckProvider extends CheckProvider {
|
|
43642
44700
|
liquid;
|
|
43643
44701
|
/**
|
|
@@ -43811,19 +44869,19 @@ var init_human_input_check_provider = __esm({
|
|
|
43811
44869
|
*/
|
|
43812
44870
|
async tryReadFile(filePath) {
|
|
43813
44871
|
try {
|
|
43814
|
-
const absolutePath =
|
|
43815
|
-
const normalizedPath =
|
|
44872
|
+
const absolutePath = path20.isAbsolute(filePath) ? filePath : path20.resolve(process.cwd(), filePath);
|
|
44873
|
+
const normalizedPath = path20.normalize(absolutePath);
|
|
43816
44874
|
const cwd = process.cwd();
|
|
43817
|
-
if (!normalizedPath.startsWith(cwd +
|
|
44875
|
+
if (!normalizedPath.startsWith(cwd + path20.sep) && normalizedPath !== cwd) {
|
|
43818
44876
|
return null;
|
|
43819
44877
|
}
|
|
43820
44878
|
try {
|
|
43821
|
-
await
|
|
43822
|
-
const stats = await
|
|
44879
|
+
await fs17.promises.access(normalizedPath, fs17.constants.R_OK);
|
|
44880
|
+
const stats = await fs17.promises.stat(normalizedPath);
|
|
43823
44881
|
if (!stats.isFile()) {
|
|
43824
44882
|
return null;
|
|
43825
44883
|
}
|
|
43826
|
-
const content = await
|
|
44884
|
+
const content = await fs17.promises.readFile(normalizedPath, "utf-8");
|
|
43827
44885
|
return content.trim();
|
|
43828
44886
|
} catch {
|
|
43829
44887
|
return null;
|
|
@@ -44969,13 +46027,13 @@ var init_script_check_provider = __esm({
|
|
|
44969
46027
|
});
|
|
44970
46028
|
|
|
44971
46029
|
// src/utils/worktree-manager.ts
|
|
44972
|
-
var
|
|
46030
|
+
var fs18, fsp, path21, crypto, WorktreeManager, worktreeManager;
|
|
44973
46031
|
var init_worktree_manager = __esm({
|
|
44974
46032
|
"src/utils/worktree-manager.ts"() {
|
|
44975
46033
|
"use strict";
|
|
44976
|
-
|
|
46034
|
+
fs18 = __toESM(require("fs"));
|
|
44977
46035
|
fsp = __toESM(require("fs/promises"));
|
|
44978
|
-
|
|
46036
|
+
path21 = __toESM(require("path"));
|
|
44979
46037
|
crypto = __toESM(require("crypto"));
|
|
44980
46038
|
init_command_executor();
|
|
44981
46039
|
init_logger();
|
|
@@ -44991,7 +46049,7 @@ var init_worktree_manager = __esm({
|
|
|
44991
46049
|
} catch {
|
|
44992
46050
|
cwd = "/tmp";
|
|
44993
46051
|
}
|
|
44994
|
-
const defaultBasePath = process.env.VISOR_WORKTREE_PATH ||
|
|
46052
|
+
const defaultBasePath = process.env.VISOR_WORKTREE_PATH || path21.join(cwd, ".visor", "worktrees");
|
|
44995
46053
|
this.config = {
|
|
44996
46054
|
enabled: true,
|
|
44997
46055
|
base_path: defaultBasePath,
|
|
@@ -45028,20 +46086,20 @@ var init_worktree_manager = __esm({
|
|
|
45028
46086
|
}
|
|
45029
46087
|
const reposDir = this.getReposDir();
|
|
45030
46088
|
const worktreesDir = this.getWorktreesDir();
|
|
45031
|
-
if (!
|
|
45032
|
-
|
|
46089
|
+
if (!fs18.existsSync(reposDir)) {
|
|
46090
|
+
fs18.mkdirSync(reposDir, { recursive: true });
|
|
45033
46091
|
logger.debug(`Created repos directory: ${reposDir}`);
|
|
45034
46092
|
}
|
|
45035
|
-
if (!
|
|
45036
|
-
|
|
46093
|
+
if (!fs18.existsSync(worktreesDir)) {
|
|
46094
|
+
fs18.mkdirSync(worktreesDir, { recursive: true });
|
|
45037
46095
|
logger.debug(`Created worktrees directory: ${worktreesDir}`);
|
|
45038
46096
|
}
|
|
45039
46097
|
}
|
|
45040
46098
|
getReposDir() {
|
|
45041
|
-
return
|
|
46099
|
+
return path21.join(this.config.base_path, "repos");
|
|
45042
46100
|
}
|
|
45043
46101
|
getWorktreesDir() {
|
|
45044
|
-
return
|
|
46102
|
+
return path21.join(this.config.base_path, "worktrees");
|
|
45045
46103
|
}
|
|
45046
46104
|
/**
|
|
45047
46105
|
* Generate a deterministic worktree ID based on repository and ref.
|
|
@@ -45059,8 +46117,8 @@ var init_worktree_manager = __esm({
|
|
|
45059
46117
|
async getOrCreateBareRepo(repository, repoUrl, _token, fetchDepth, cloneTimeoutMs) {
|
|
45060
46118
|
const reposDir = this.getReposDir();
|
|
45061
46119
|
const repoName = repository.replace(/\//g, "-");
|
|
45062
|
-
const bareRepoPath =
|
|
45063
|
-
if (
|
|
46120
|
+
const bareRepoPath = path21.join(reposDir, `${repoName}.git`);
|
|
46121
|
+
if (fs18.existsSync(bareRepoPath)) {
|
|
45064
46122
|
logger.debug(`Bare repository already exists: ${bareRepoPath}`);
|
|
45065
46123
|
const verifyResult = await this.verifyBareRepoRemote(bareRepoPath, repoUrl);
|
|
45066
46124
|
if (verifyResult === "timeout") {
|
|
@@ -45207,11 +46265,11 @@ var init_worktree_manager = __esm({
|
|
|
45207
46265
|
options.cloneTimeoutMs
|
|
45208
46266
|
);
|
|
45209
46267
|
const worktreeId = this.generateWorktreeId(repository, ref);
|
|
45210
|
-
let worktreePath = options.workingDirectory ||
|
|
46268
|
+
let worktreePath = options.workingDirectory || path21.join(this.getWorktreesDir(), worktreeId);
|
|
45211
46269
|
if (options.workingDirectory) {
|
|
45212
46270
|
worktreePath = this.validatePath(options.workingDirectory);
|
|
45213
46271
|
}
|
|
45214
|
-
if (
|
|
46272
|
+
if (fs18.existsSync(worktreePath)) {
|
|
45215
46273
|
logger.debug(`Worktree already exists: ${worktreePath}`);
|
|
45216
46274
|
const metadata2 = await this.loadMetadata(worktreePath);
|
|
45217
46275
|
if (metadata2) {
|
|
@@ -45452,31 +46510,48 @@ var init_worktree_manager = __esm({
|
|
|
45452
46510
|
const result = await this.executeGitCommand(removeCmd, { timeout: 3e4 });
|
|
45453
46511
|
if (result.exitCode !== 0) {
|
|
45454
46512
|
logger.warn(`Failed to remove worktree via git: ${result.stderr}`);
|
|
45455
|
-
if (
|
|
46513
|
+
if (fs18.existsSync(worktree_path)) {
|
|
45456
46514
|
logger.debug(`Manually removing worktree directory`);
|
|
45457
|
-
|
|
46515
|
+
fs18.rmSync(worktree_path, { recursive: true, force: true });
|
|
46516
|
+
}
|
|
46517
|
+
}
|
|
46518
|
+
const metadataPath = this.getMetadataPath(worktree_path);
|
|
46519
|
+
try {
|
|
46520
|
+
if (fs18.existsSync(metadataPath)) {
|
|
46521
|
+
fs18.unlinkSync(metadataPath);
|
|
45458
46522
|
}
|
|
46523
|
+
} catch {
|
|
45459
46524
|
}
|
|
45460
46525
|
this.activeWorktrees.delete(worktreeId);
|
|
45461
46526
|
logger.info(`Successfully removed worktree: ${worktreeId}`);
|
|
45462
46527
|
}
|
|
46528
|
+
/**
|
|
46529
|
+
* Get the metadata file path for a worktree.
|
|
46530
|
+
* Stored as a sibling file OUTSIDE the worktree to avoid being committed
|
|
46531
|
+
* when agents run `git add .` inside the checked-out repo.
|
|
46532
|
+
*/
|
|
46533
|
+
getMetadataPath(worktreePath) {
|
|
46534
|
+
return worktreePath.replace(/\/?$/, "") + ".metadata.json";
|
|
46535
|
+
}
|
|
45463
46536
|
/**
|
|
45464
46537
|
* Save worktree metadata
|
|
45465
46538
|
*/
|
|
45466
46539
|
async saveMetadata(worktreePath, metadata) {
|
|
45467
|
-
const metadataPath =
|
|
45468
|
-
|
|
46540
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
46541
|
+
fs18.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2), "utf8");
|
|
45469
46542
|
}
|
|
45470
46543
|
/**
|
|
45471
46544
|
* Load worktree metadata
|
|
45472
46545
|
*/
|
|
45473
46546
|
async loadMetadata(worktreePath) {
|
|
45474
|
-
const metadataPath =
|
|
45475
|
-
|
|
46547
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
46548
|
+
const legacyPath = path21.join(worktreePath, ".visor-metadata.json");
|
|
46549
|
+
const pathToRead = fs18.existsSync(metadataPath) ? metadataPath : fs18.existsSync(legacyPath) ? legacyPath : null;
|
|
46550
|
+
if (!pathToRead) {
|
|
45476
46551
|
return null;
|
|
45477
46552
|
}
|
|
45478
46553
|
try {
|
|
45479
|
-
const content =
|
|
46554
|
+
const content = fs18.readFileSync(pathToRead, "utf8");
|
|
45480
46555
|
return JSON.parse(content);
|
|
45481
46556
|
} catch (error) {
|
|
45482
46557
|
logger.warn(`Failed to load metadata: ${error}`);
|
|
@@ -45488,14 +46563,14 @@ var init_worktree_manager = __esm({
|
|
|
45488
46563
|
*/
|
|
45489
46564
|
async listWorktrees() {
|
|
45490
46565
|
const worktreesDir = this.getWorktreesDir();
|
|
45491
|
-
if (!
|
|
46566
|
+
if (!fs18.existsSync(worktreesDir)) {
|
|
45492
46567
|
return [];
|
|
45493
46568
|
}
|
|
45494
|
-
const entries =
|
|
46569
|
+
const entries = fs18.readdirSync(worktreesDir, { withFileTypes: true });
|
|
45495
46570
|
const worktrees = [];
|
|
45496
46571
|
for (const entry of entries) {
|
|
45497
46572
|
if (!entry.isDirectory()) continue;
|
|
45498
|
-
const worktreePath =
|
|
46573
|
+
const worktreePath = path21.join(worktreesDir, entry.name);
|
|
45499
46574
|
const metadata = await this.loadMetadata(worktreePath);
|
|
45500
46575
|
if (metadata) {
|
|
45501
46576
|
worktrees.push({
|
|
@@ -45627,8 +46702,8 @@ var init_worktree_manager = __esm({
|
|
|
45627
46702
|
* Validate path to prevent directory traversal
|
|
45628
46703
|
*/
|
|
45629
46704
|
validatePath(userPath) {
|
|
45630
|
-
const resolvedPath =
|
|
45631
|
-
if (!
|
|
46705
|
+
const resolvedPath = path21.resolve(userPath);
|
|
46706
|
+
if (!path21.isAbsolute(resolvedPath)) {
|
|
45632
46707
|
throw new Error("Path must be absolute");
|
|
45633
46708
|
}
|
|
45634
46709
|
const sensitivePatterns = [
|
|
@@ -47657,23 +48732,23 @@ __export(renderer_schema_exports, {
|
|
|
47657
48732
|
});
|
|
47658
48733
|
async function loadRendererSchema(name) {
|
|
47659
48734
|
try {
|
|
47660
|
-
const
|
|
47661
|
-
const
|
|
48735
|
+
const fs27 = await import("fs/promises");
|
|
48736
|
+
const path31 = await import("path");
|
|
47662
48737
|
const sanitized = String(name).replace(/[^a-zA-Z0-9-]/g, "");
|
|
47663
48738
|
if (!sanitized) return void 0;
|
|
47664
48739
|
const candidates = [
|
|
47665
48740
|
// When bundled with ncc, __dirname is dist/ and output/ is at dist/output/
|
|
47666
|
-
|
|
48741
|
+
path31.join(__dirname, "output", sanitized, "schema.json"),
|
|
47667
48742
|
// When running from source, __dirname is src/state-machine/dispatch/ and output/ is at output/
|
|
47668
|
-
|
|
48743
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "schema.json"),
|
|
47669
48744
|
// When running from a checkout with output/ folder copied to CWD
|
|
47670
|
-
|
|
48745
|
+
path31.join(process.cwd(), "output", sanitized, "schema.json"),
|
|
47671
48746
|
// Fallback: cwd/dist/output/
|
|
47672
|
-
|
|
48747
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "schema.json")
|
|
47673
48748
|
];
|
|
47674
48749
|
for (const p of candidates) {
|
|
47675
48750
|
try {
|
|
47676
|
-
const raw = await
|
|
48751
|
+
const raw = await fs27.readFile(p, "utf-8");
|
|
47677
48752
|
return JSON.parse(raw);
|
|
47678
48753
|
} catch {
|
|
47679
48754
|
}
|
|
@@ -50092,8 +51167,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
50092
51167
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
50093
51168
|
try {
|
|
50094
51169
|
const { createExtendedLiquid: createExtendedLiquid2 } = await Promise.resolve().then(() => (init_liquid_extensions(), liquid_extensions_exports));
|
|
50095
|
-
const
|
|
50096
|
-
const
|
|
51170
|
+
const fs27 = await import("fs/promises");
|
|
51171
|
+
const path31 = await import("path");
|
|
50097
51172
|
const schemaRaw = checkConfig.schema || "plain";
|
|
50098
51173
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
50099
51174
|
let templateContent;
|
|
@@ -50102,27 +51177,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
50102
51177
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
50103
51178
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
50104
51179
|
const file = String(checkConfig.template.file);
|
|
50105
|
-
const resolved =
|
|
50106
|
-
templateContent = await
|
|
51180
|
+
const resolved = path31.resolve(process.cwd(), file);
|
|
51181
|
+
templateContent = await fs27.readFile(resolved, "utf-8");
|
|
50107
51182
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
50108
51183
|
} else if (schema && schema !== "plain") {
|
|
50109
51184
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
50110
51185
|
if (sanitized) {
|
|
50111
51186
|
const candidatePaths = [
|
|
50112
|
-
|
|
51187
|
+
path31.join(__dirname, "output", sanitized, "template.liquid"),
|
|
50113
51188
|
// bundled: dist/output/
|
|
50114
|
-
|
|
51189
|
+
path31.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
50115
51190
|
// source (from state-machine/states)
|
|
50116
|
-
|
|
51191
|
+
path31.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
50117
51192
|
// source (alternate)
|
|
50118
|
-
|
|
51193
|
+
path31.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
50119
51194
|
// fallback: cwd/output/
|
|
50120
|
-
|
|
51195
|
+
path31.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
50121
51196
|
// fallback: cwd/dist/output/
|
|
50122
51197
|
];
|
|
50123
51198
|
for (const p of candidatePaths) {
|
|
50124
51199
|
try {
|
|
50125
|
-
templateContent = await
|
|
51200
|
+
templateContent = await fs27.readFile(p, "utf-8");
|
|
50126
51201
|
if (templateContent) {
|
|
50127
51202
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
50128
51203
|
break;
|
|
@@ -51645,13 +52720,13 @@ var init_sandbox_manager = __esm({
|
|
|
51645
52720
|
});
|
|
51646
52721
|
|
|
51647
52722
|
// src/utils/file-exclusion.ts
|
|
51648
|
-
var import_ignore,
|
|
52723
|
+
var import_ignore, fs19, path22, DEFAULT_EXCLUSION_PATTERNS, FileExclusionHelper;
|
|
51649
52724
|
var init_file_exclusion = __esm({
|
|
51650
52725
|
"src/utils/file-exclusion.ts"() {
|
|
51651
52726
|
"use strict";
|
|
51652
52727
|
import_ignore = __toESM(require("ignore"));
|
|
51653
|
-
|
|
51654
|
-
|
|
52728
|
+
fs19 = __toESM(require("fs"));
|
|
52729
|
+
path22 = __toESM(require("path"));
|
|
51655
52730
|
DEFAULT_EXCLUSION_PATTERNS = [
|
|
51656
52731
|
"dist/",
|
|
51657
52732
|
"build/",
|
|
@@ -51670,7 +52745,7 @@ var init_file_exclusion = __esm({
|
|
|
51670
52745
|
* @param additionalPatterns - Additional patterns to include (optional, defaults to common build artifacts)
|
|
51671
52746
|
*/
|
|
51672
52747
|
constructor(workingDirectory = process.cwd(), additionalPatterns = DEFAULT_EXCLUSION_PATTERNS) {
|
|
51673
|
-
const normalizedPath =
|
|
52748
|
+
const normalizedPath = path22.resolve(workingDirectory);
|
|
51674
52749
|
if (normalizedPath.includes("\0")) {
|
|
51675
52750
|
throw new Error("Invalid workingDirectory: contains null bytes");
|
|
51676
52751
|
}
|
|
@@ -51682,11 +52757,11 @@ var init_file_exclusion = __esm({
|
|
|
51682
52757
|
* @param additionalPatterns - Additional patterns to add to gitignore rules
|
|
51683
52758
|
*/
|
|
51684
52759
|
loadGitignore(additionalPatterns) {
|
|
51685
|
-
const gitignorePath =
|
|
51686
|
-
const resolvedWorkingDir =
|
|
52760
|
+
const gitignorePath = path22.resolve(this.workingDirectory, ".gitignore");
|
|
52761
|
+
const resolvedWorkingDir = path22.resolve(this.workingDirectory);
|
|
51687
52762
|
try {
|
|
51688
|
-
const relativePath =
|
|
51689
|
-
if (relativePath.startsWith("..") ||
|
|
52763
|
+
const relativePath = path22.relative(resolvedWorkingDir, gitignorePath);
|
|
52764
|
+
if (relativePath.startsWith("..") || path22.isAbsolute(relativePath)) {
|
|
51690
52765
|
throw new Error("Invalid gitignore path: path traversal detected");
|
|
51691
52766
|
}
|
|
51692
52767
|
if (relativePath !== ".gitignore") {
|
|
@@ -51696,8 +52771,8 @@ var init_file_exclusion = __esm({
|
|
|
51696
52771
|
if (additionalPatterns && additionalPatterns.length > 0) {
|
|
51697
52772
|
this.gitignore.add(additionalPatterns);
|
|
51698
52773
|
}
|
|
51699
|
-
if (
|
|
51700
|
-
const rawContent =
|
|
52774
|
+
if (fs19.existsSync(gitignorePath)) {
|
|
52775
|
+
const rawContent = fs19.readFileSync(gitignorePath, "utf8");
|
|
51701
52776
|
const gitignoreContent = rawContent.replace(/[\r\n]+/g, "\n").replace(/[\x00-\x09\x0B-\x1F\x7F]/g, "").split("\n").filter((line) => line.length < 1e3).join("\n").trim();
|
|
51702
52777
|
this.gitignore.add(gitignoreContent);
|
|
51703
52778
|
if (process.env.VISOR_DEBUG === "true") {
|
|
@@ -51729,13 +52804,13 @@ var git_repository_analyzer_exports = {};
|
|
|
51729
52804
|
__export(git_repository_analyzer_exports, {
|
|
51730
52805
|
GitRepositoryAnalyzer: () => GitRepositoryAnalyzer
|
|
51731
52806
|
});
|
|
51732
|
-
var import_simple_git2,
|
|
52807
|
+
var import_simple_git2, path23, fs20, MAX_PATCH_SIZE, GitRepositoryAnalyzer;
|
|
51733
52808
|
var init_git_repository_analyzer = __esm({
|
|
51734
52809
|
"src/git-repository-analyzer.ts"() {
|
|
51735
52810
|
"use strict";
|
|
51736
52811
|
import_simple_git2 = require("simple-git");
|
|
51737
|
-
|
|
51738
|
-
|
|
52812
|
+
path23 = __toESM(require("path"));
|
|
52813
|
+
fs20 = __toESM(require("fs"));
|
|
51739
52814
|
init_file_exclusion();
|
|
51740
52815
|
MAX_PATCH_SIZE = 50 * 1024;
|
|
51741
52816
|
GitRepositoryAnalyzer = class {
|
|
@@ -51924,7 +52999,7 @@ ${file.patch}`).join("\n\n");
|
|
|
51924
52999
|
console.error(`\u23ED\uFE0F Skipping excluded file: ${file}`);
|
|
51925
53000
|
continue;
|
|
51926
53001
|
}
|
|
51927
|
-
const filePath =
|
|
53002
|
+
const filePath = path23.join(this.cwd, file);
|
|
51928
53003
|
const fileChange = await this.analyzeFileChange(file, status2, filePath, includeContext);
|
|
51929
53004
|
changes.push(fileChange);
|
|
51930
53005
|
}
|
|
@@ -52000,7 +53075,7 @@ ${file.patch}`).join("\n\n");
|
|
|
52000
53075
|
let content;
|
|
52001
53076
|
let truncated = false;
|
|
52002
53077
|
try {
|
|
52003
|
-
if (includeContext && status !== "added" &&
|
|
53078
|
+
if (includeContext && status !== "added" && fs20.existsSync(filePath)) {
|
|
52004
53079
|
const diff = await this.git.diff(["--", filename]).catch(() => "");
|
|
52005
53080
|
if (diff) {
|
|
52006
53081
|
const result = this.truncatePatch(diff, filename);
|
|
@@ -52010,7 +53085,7 @@ ${file.patch}`).join("\n\n");
|
|
|
52010
53085
|
additions = lines.filter((line) => line.startsWith("+")).length;
|
|
52011
53086
|
deletions = lines.filter((line) => line.startsWith("-")).length;
|
|
52012
53087
|
}
|
|
52013
|
-
} else if (status !== "added" &&
|
|
53088
|
+
} else if (status !== "added" && fs20.existsSync(filePath)) {
|
|
52014
53089
|
const diff = await this.git.diff(["--", filename]).catch(() => "");
|
|
52015
53090
|
if (diff) {
|
|
52016
53091
|
const lines = diff.split("\n");
|
|
@@ -52018,17 +53093,17 @@ ${file.patch}`).join("\n\n");
|
|
|
52018
53093
|
deletions = lines.filter((line) => line.startsWith("-")).length;
|
|
52019
53094
|
}
|
|
52020
53095
|
}
|
|
52021
|
-
if (status === "added" &&
|
|
53096
|
+
if (status === "added" && fs20.existsSync(filePath)) {
|
|
52022
53097
|
try {
|
|
52023
|
-
const stats =
|
|
53098
|
+
const stats = fs20.statSync(filePath);
|
|
52024
53099
|
if (stats.isFile() && stats.size < 1024 * 1024) {
|
|
52025
53100
|
if (includeContext) {
|
|
52026
|
-
content =
|
|
53101
|
+
content = fs20.readFileSync(filePath, "utf8");
|
|
52027
53102
|
const result = this.truncatePatch(content, filename);
|
|
52028
53103
|
patch = result.patch;
|
|
52029
53104
|
truncated = result.truncated;
|
|
52030
53105
|
}
|
|
52031
|
-
const fileContent = includeContext ? content :
|
|
53106
|
+
const fileContent = includeContext ? content : fs20.readFileSync(filePath, "utf8");
|
|
52032
53107
|
additions = fileContent.split("\n").length;
|
|
52033
53108
|
}
|
|
52034
53109
|
} catch {
|
|
@@ -52119,12 +53194,12 @@ function shellEscape(str) {
|
|
|
52119
53194
|
function sanitizePathComponent(name) {
|
|
52120
53195
|
return name.replace(/\.\./g, "").replace(/[\/\\]/g, "-").replace(/^\.+/, "").trim() || "unnamed";
|
|
52121
53196
|
}
|
|
52122
|
-
var fsp2,
|
|
53197
|
+
var fsp2, path24, WorkspaceManager;
|
|
52123
53198
|
var init_workspace_manager = __esm({
|
|
52124
53199
|
"src/utils/workspace-manager.ts"() {
|
|
52125
53200
|
"use strict";
|
|
52126
53201
|
fsp2 = __toESM(require("fs/promises"));
|
|
52127
|
-
|
|
53202
|
+
path24 = __toESM(require("path"));
|
|
52128
53203
|
init_command_executor();
|
|
52129
53204
|
init_logger();
|
|
52130
53205
|
WorkspaceManager = class _WorkspaceManager {
|
|
@@ -52158,7 +53233,7 @@ var init_workspace_manager = __esm({
|
|
|
52158
53233
|
};
|
|
52159
53234
|
this.basePath = this.config.basePath;
|
|
52160
53235
|
const workspaceDirName = sanitizePathComponent(this.config.name || this.sessionId);
|
|
52161
|
-
this.workspacePath =
|
|
53236
|
+
this.workspacePath = path24.join(this.basePath, workspaceDirName);
|
|
52162
53237
|
}
|
|
52163
53238
|
/**
|
|
52164
53239
|
* Get or create a WorkspaceManager instance for a session
|
|
@@ -52205,8 +53280,8 @@ var init_workspace_manager = __esm({
|
|
|
52205
53280
|
);
|
|
52206
53281
|
if (this.cleanupRequested && this.activeOperations === 0) {
|
|
52207
53282
|
logger.debug(`[Workspace] All references released, proceeding with deferred cleanup`);
|
|
52208
|
-
for (const
|
|
52209
|
-
|
|
53283
|
+
for (const resolve18 of this.cleanupResolvers) {
|
|
53284
|
+
resolve18();
|
|
52210
53285
|
}
|
|
52211
53286
|
this.cleanupResolvers = [];
|
|
52212
53287
|
}
|
|
@@ -52253,7 +53328,7 @@ var init_workspace_manager = __esm({
|
|
|
52253
53328
|
configuredMainProjectName || this.extractProjectName(this.originalPath)
|
|
52254
53329
|
);
|
|
52255
53330
|
this.usedNames.add(mainProjectName);
|
|
52256
|
-
const mainProjectPath =
|
|
53331
|
+
const mainProjectPath = path24.join(this.workspacePath, mainProjectName);
|
|
52257
53332
|
const isGitRepo = await this.isGitRepository(this.originalPath);
|
|
52258
53333
|
if (isGitRepo) {
|
|
52259
53334
|
const exists = await this.pathExists(mainProjectPath);
|
|
@@ -52271,6 +53346,8 @@ var init_workspace_manager = __esm({
|
|
|
52271
53346
|
} catch {
|
|
52272
53347
|
}
|
|
52273
53348
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
53349
|
+
} else {
|
|
53350
|
+
await this.refreshWorktreeToUpstream(mainProjectPath);
|
|
52274
53351
|
}
|
|
52275
53352
|
} else {
|
|
52276
53353
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
@@ -52315,7 +53392,7 @@ var init_workspace_manager = __esm({
|
|
|
52315
53392
|
let projectName = sanitizePathComponent(description || this.extractRepoName(repository));
|
|
52316
53393
|
projectName = this.getUniqueName(projectName);
|
|
52317
53394
|
this.usedNames.add(projectName);
|
|
52318
|
-
const workspacePath =
|
|
53395
|
+
const workspacePath = path24.join(this.workspacePath, projectName);
|
|
52319
53396
|
await fsp2.rm(workspacePath, { recursive: true, force: true });
|
|
52320
53397
|
try {
|
|
52321
53398
|
await fsp2.symlink(worktreePath, workspacePath);
|
|
@@ -52361,19 +53438,19 @@ var init_workspace_manager = __esm({
|
|
|
52361
53438
|
);
|
|
52362
53439
|
this.cleanupRequested = true;
|
|
52363
53440
|
await Promise.race([
|
|
52364
|
-
new Promise((
|
|
53441
|
+
new Promise((resolve18) => {
|
|
52365
53442
|
if (this.activeOperations === 0) {
|
|
52366
|
-
|
|
53443
|
+
resolve18();
|
|
52367
53444
|
} else {
|
|
52368
|
-
this.cleanupResolvers.push(
|
|
53445
|
+
this.cleanupResolvers.push(resolve18);
|
|
52369
53446
|
}
|
|
52370
53447
|
}),
|
|
52371
|
-
new Promise((
|
|
53448
|
+
new Promise((resolve18) => {
|
|
52372
53449
|
setTimeout(() => {
|
|
52373
53450
|
logger.warn(
|
|
52374
53451
|
`[Workspace] Cleanup timeout after ${timeout}ms, proceeding anyway (${this.activeOperations} operations still active)`
|
|
52375
53452
|
);
|
|
52376
|
-
|
|
53453
|
+
resolve18();
|
|
52377
53454
|
}, timeout);
|
|
52378
53455
|
})
|
|
52379
53456
|
]);
|
|
@@ -52425,7 +53502,7 @@ var init_workspace_manager = __esm({
|
|
|
52425
53502
|
const now = Date.now();
|
|
52426
53503
|
for (const entry of entries) {
|
|
52427
53504
|
if (!entry.isDirectory()) continue;
|
|
52428
|
-
const dirPath =
|
|
53505
|
+
const dirPath = path24.join(basePath, entry.name);
|
|
52429
53506
|
try {
|
|
52430
53507
|
const stat2 = await fsp2.stat(dirPath);
|
|
52431
53508
|
if (now - stat2.mtimeMs > maxAgeMs) {
|
|
@@ -52433,8 +53510,8 @@ var init_workspace_manager = __esm({
|
|
|
52433
53510
|
const subdirs = await fsp2.readdir(dirPath, { withFileTypes: true });
|
|
52434
53511
|
for (const sub of subdirs) {
|
|
52435
53512
|
if (!sub.isDirectory()) continue;
|
|
52436
|
-
const subPath =
|
|
52437
|
-
const gitFilePath =
|
|
53513
|
+
const subPath = path24.join(dirPath, sub.name);
|
|
53514
|
+
const gitFilePath = path24.join(subPath, ".git");
|
|
52438
53515
|
try {
|
|
52439
53516
|
const gitContent = await fsp2.readFile(gitFilePath, "utf-8");
|
|
52440
53517
|
const match = gitContent.match(/gitdir:\s*(.+)/);
|
|
@@ -52466,33 +53543,143 @@ var init_workspace_manager = __esm({
|
|
|
52466
53543
|
return cleaned;
|
|
52467
53544
|
}
|
|
52468
53545
|
/**
|
|
52469
|
-
*
|
|
52470
|
-
*
|
|
52471
|
-
*
|
|
52472
|
-
*
|
|
52473
|
-
*
|
|
52474
|
-
*
|
|
52475
|
-
*
|
|
52476
|
-
* full clone/bare-repo/fetch/worktree pipeline.
|
|
53546
|
+
* visor-disable: architecture - The helpers below (resolveUpstreamRef,
|
|
53547
|
+
* fetchAndResolveUpstream, resetAndCleanWorktree, refreshWorktreeToUpstream)
|
|
53548
|
+
* are NOT duplicates of WorktreeManager's fetchRef/getCommitShaForRef/cleanWorktree.
|
|
53549
|
+
* WorktreeManager operates on BARE repo caches cloned from remote URLs, while
|
|
53550
|
+
* WorkspaceManager operates on the LOCAL working repo the user already has checked out.
|
|
53551
|
+
* The git commands differ (e.g. `fetch origin --prune` vs `fetch origin <ref>:<ref>`)
|
|
53552
|
+
* and sharing code would require adding a "local mode" to WorktreeManager for no benefit.
|
|
52477
53553
|
*/
|
|
52478
|
-
|
|
52479
|
-
|
|
53554
|
+
/**
|
|
53555
|
+
* Resolve the upstream default branch ref.
|
|
53556
|
+
* Tries origin/HEAD (symbolic), then origin/main, then origin/master.
|
|
53557
|
+
* Falls back to local HEAD if no remote is configured.
|
|
53558
|
+
*/
|
|
53559
|
+
async resolveUpstreamRef() {
|
|
53560
|
+
const esc = shellEscape(this.originalPath);
|
|
53561
|
+
const symbolicResult = await commandExecutor.execute(
|
|
53562
|
+
`git -C ${esc} symbolic-ref refs/remotes/origin/HEAD 2>/dev/null`,
|
|
53563
|
+
{ timeout: 1e4 }
|
|
53564
|
+
);
|
|
53565
|
+
if (symbolicResult.exitCode === 0 && symbolicResult.stdout.trim()) {
|
|
53566
|
+
const ref = symbolicResult.stdout.trim().replace("refs/remotes/", "");
|
|
53567
|
+
logger.debug(`[Workspace] Resolved upstream default branch via origin/HEAD: ${ref}`);
|
|
53568
|
+
return ref;
|
|
53569
|
+
}
|
|
53570
|
+
const mainResult = await commandExecutor.execute(
|
|
53571
|
+
`git -C ${esc} rev-parse --verify origin/main 2>/dev/null`,
|
|
53572
|
+
{ timeout: 1e4 }
|
|
53573
|
+
);
|
|
53574
|
+
if (mainResult.exitCode === 0) {
|
|
53575
|
+
logger.debug(`[Workspace] Using origin/main as upstream ref`);
|
|
53576
|
+
return "origin/main";
|
|
53577
|
+
}
|
|
53578
|
+
const masterResult = await commandExecutor.execute(
|
|
53579
|
+
`git -C ${esc} rev-parse --verify origin/master 2>/dev/null`,
|
|
53580
|
+
{ timeout: 1e4 }
|
|
53581
|
+
);
|
|
53582
|
+
if (masterResult.exitCode === 0) {
|
|
53583
|
+
logger.debug(`[Workspace] Using origin/master as upstream ref`);
|
|
53584
|
+
return "origin/master";
|
|
53585
|
+
}
|
|
53586
|
+
logger.warn(`[Workspace] No upstream remote found, falling back to local HEAD`);
|
|
53587
|
+
return "HEAD";
|
|
53588
|
+
}
|
|
53589
|
+
/**
|
|
53590
|
+
* Fetch latest from origin, resolve the upstream default branch, and return
|
|
53591
|
+
* both the ref name and the resolved commit SHA.
|
|
53592
|
+
*/
|
|
53593
|
+
async fetchAndResolveUpstream() {
|
|
53594
|
+
logger.debug(`[Workspace] Fetching latest from origin`);
|
|
53595
|
+
const fetchResult = await commandExecutor.execute(
|
|
53596
|
+
`git -C ${shellEscape(this.originalPath)} fetch origin --prune 2>&1`,
|
|
53597
|
+
{ timeout: 12e4 }
|
|
53598
|
+
);
|
|
53599
|
+
if (fetchResult.exitCode !== 0) {
|
|
53600
|
+
logger.warn(`[Workspace] fetch origin failed (will use cached refs): ${fetchResult.stderr}`);
|
|
53601
|
+
}
|
|
53602
|
+
const upstreamRef = await this.resolveUpstreamRef();
|
|
53603
|
+
const shaResult = await commandExecutor.execute(
|
|
53604
|
+
`git -C ${shellEscape(this.originalPath)} rev-parse ${shellEscape(upstreamRef)}`,
|
|
53605
|
+
{ timeout: 1e4 }
|
|
53606
|
+
);
|
|
53607
|
+
if (shaResult.exitCode === 0) {
|
|
53608
|
+
return { upstreamRef, targetSha: shaResult.stdout.trim() };
|
|
53609
|
+
}
|
|
53610
|
+
logger.warn(
|
|
53611
|
+
`[Workspace] Could not resolve ${upstreamRef} (${shaResult.stderr.trim()}), falling back to HEAD`
|
|
53612
|
+
);
|
|
52480
53613
|
const headResult = await commandExecutor.execute(
|
|
52481
53614
|
`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`,
|
|
52482
|
-
{
|
|
52483
|
-
timeout: 1e4
|
|
52484
|
-
}
|
|
53615
|
+
{ timeout: 1e4 }
|
|
52485
53616
|
);
|
|
52486
53617
|
if (headResult.exitCode !== 0) {
|
|
52487
|
-
throw new Error(`
|
|
53618
|
+
throw new Error(`Repository has no commits \u2014 cannot create worktree: ${headResult.stderr}`);
|
|
52488
53619
|
}
|
|
52489
|
-
|
|
52490
|
-
|
|
53620
|
+
return { upstreamRef: "HEAD", targetSha: headResult.stdout.trim() };
|
|
53621
|
+
}
|
|
53622
|
+
/**
|
|
53623
|
+
* Reset a worktree to a specific commit and clean all modifications.
|
|
53624
|
+
*/
|
|
53625
|
+
async resetAndCleanWorktree(worktreePath, targetSha) {
|
|
53626
|
+
const escapedPath = shellEscape(worktreePath);
|
|
53627
|
+
const escapedSha = shellEscape(targetSha);
|
|
53628
|
+
const resetResult = await commandExecutor.execute(
|
|
53629
|
+
`git -C ${escapedPath} reset --hard ${escapedSha}`,
|
|
53630
|
+
{ timeout: 1e4 }
|
|
53631
|
+
);
|
|
53632
|
+
if (resetResult.exitCode !== 0) {
|
|
53633
|
+
logger.warn(`[Workspace] reset --hard failed: ${resetResult.stderr}`);
|
|
53634
|
+
}
|
|
53635
|
+
const cleanResult = await commandExecutor.execute(`git -C ${escapedPath} clean -fdx`, {
|
|
53636
|
+
timeout: 3e4
|
|
53637
|
+
});
|
|
53638
|
+
if (cleanResult.exitCode !== 0) {
|
|
53639
|
+
logger.warn(`[Workspace] clean -fdx failed: ${cleanResult.stderr}`);
|
|
53640
|
+
}
|
|
53641
|
+
}
|
|
53642
|
+
/**
|
|
53643
|
+
* Refresh an existing worktree to the latest upstream default branch
|
|
53644
|
+
* and ensure it has no modified or untracked files.
|
|
53645
|
+
*/
|
|
53646
|
+
async refreshWorktreeToUpstream(worktreePath) {
|
|
53647
|
+
logger.info(`[Workspace] Refreshing worktree to latest upstream: ${worktreePath}`);
|
|
53648
|
+
try {
|
|
53649
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
53650
|
+
const checkoutResult = await commandExecutor.execute(
|
|
53651
|
+
`git -C ${shellEscape(worktreePath)} checkout --detach ${shellEscape(targetSha)}`,
|
|
53652
|
+
{ timeout: 3e4 }
|
|
53653
|
+
);
|
|
53654
|
+
if (checkoutResult.exitCode !== 0) {
|
|
53655
|
+
logger.warn(
|
|
53656
|
+
`[Workspace] checkout --detach failed (worktree stays at current commit): ${checkoutResult.stderr}`
|
|
53657
|
+
);
|
|
53658
|
+
await this.resetAndCleanWorktree(worktreePath, "HEAD");
|
|
53659
|
+
return;
|
|
53660
|
+
}
|
|
53661
|
+
await this.resetAndCleanWorktree(worktreePath, targetSha);
|
|
53662
|
+
logger.info(`[Workspace] Worktree updated to ${upstreamRef} (${targetSha.slice(0, 8)})`);
|
|
53663
|
+
} catch (error) {
|
|
53664
|
+
logger.warn(`[Workspace] Failed to refresh worktree (continuing with stale state): ${error}`);
|
|
53665
|
+
}
|
|
53666
|
+
}
|
|
53667
|
+
/**
|
|
53668
|
+
* Create worktree for the main project.
|
|
53669
|
+
* See visor-disable comment above resolveUpstreamRef for why this doesn't use WorktreeManager.
|
|
53670
|
+
*/
|
|
53671
|
+
async createMainProjectWorktree(targetPath) {
|
|
53672
|
+
logger.debug(`Creating main project worktree: ${targetPath}`);
|
|
53673
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
53674
|
+
const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(targetSha)}`;
|
|
52491
53675
|
const result = await commandExecutor.execute(createCmd, { timeout: 6e4 });
|
|
52492
53676
|
if (result.exitCode !== 0) {
|
|
52493
53677
|
throw new Error(`Failed to create main project worktree: ${result.stderr}`);
|
|
52494
53678
|
}
|
|
52495
|
-
|
|
53679
|
+
await this.resetAndCleanWorktree(targetPath, targetSha);
|
|
53680
|
+
logger.info(
|
|
53681
|
+
`Created main project worktree at ${targetPath} (${upstreamRef} -> ${targetSha.slice(0, 8)})`
|
|
53682
|
+
);
|
|
52496
53683
|
}
|
|
52497
53684
|
/**
|
|
52498
53685
|
* Remove main project worktree
|
|
@@ -52525,7 +53712,7 @@ var init_workspace_manager = __esm({
|
|
|
52525
53712
|
* Extract project name from path
|
|
52526
53713
|
*/
|
|
52527
53714
|
extractProjectName(dirPath) {
|
|
52528
|
-
return
|
|
53715
|
+
return path24.basename(dirPath);
|
|
52529
53716
|
}
|
|
52530
53717
|
/**
|
|
52531
53718
|
* Extract repository name from owner/repo format
|
|
@@ -52680,6 +53867,9 @@ async function initializeWorkspace(context2) {
|
|
|
52680
53867
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT = info.mainProjectPath;
|
|
52681
53868
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT_NAME = info.mainProjectName;
|
|
52682
53869
|
process.env.VISOR_ORIGINAL_WORKDIR = originalPath;
|
|
53870
|
+
const basePath = workspaceConfig?.base_path || process.env.VISOR_WORKSPACE_PATH || "/tmp/visor-workspaces";
|
|
53871
|
+
const existing = process.env.GIT_CEILING_DIRECTORIES;
|
|
53872
|
+
process.env.GIT_CEILING_DIRECTORIES = existing ? `${existing}:${basePath}` : basePath;
|
|
52683
53873
|
} catch {
|
|
52684
53874
|
}
|
|
52685
53875
|
logger.info(`[Workspace] Initialized workspace: ${info.workspacePath}`);
|
|
@@ -52742,13 +53932,13 @@ var validator_exports = {};
|
|
|
52742
53932
|
__export(validator_exports, {
|
|
52743
53933
|
LicenseValidator: () => LicenseValidator
|
|
52744
53934
|
});
|
|
52745
|
-
var crypto2,
|
|
53935
|
+
var crypto2, fs21, path25, LicenseValidator;
|
|
52746
53936
|
var init_validator = __esm({
|
|
52747
53937
|
"src/enterprise/license/validator.ts"() {
|
|
52748
53938
|
"use strict";
|
|
52749
53939
|
crypto2 = __toESM(require("crypto"));
|
|
52750
|
-
|
|
52751
|
-
|
|
53940
|
+
fs21 = __toESM(require("fs"));
|
|
53941
|
+
path25 = __toESM(require("path"));
|
|
52752
53942
|
LicenseValidator = class _LicenseValidator {
|
|
52753
53943
|
/** Ed25519 public key for license verification (PEM format). */
|
|
52754
53944
|
static PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n-----END PUBLIC KEY-----\n";
|
|
@@ -52801,28 +53991,28 @@ var init_validator = __esm({
|
|
|
52801
53991
|
return process.env.VISOR_LICENSE.trim();
|
|
52802
53992
|
}
|
|
52803
53993
|
if (process.env.VISOR_LICENSE_FILE) {
|
|
52804
|
-
const resolved =
|
|
53994
|
+
const resolved = path25.resolve(process.env.VISOR_LICENSE_FILE);
|
|
52805
53995
|
const home2 = process.env.HOME || process.env.USERPROFILE || "";
|
|
52806
|
-
const allowedPrefixes = [
|
|
52807
|
-
if (home2) allowedPrefixes.push(
|
|
53996
|
+
const allowedPrefixes = [path25.normalize(process.cwd())];
|
|
53997
|
+
if (home2) allowedPrefixes.push(path25.normalize(path25.join(home2, ".config", "visor")));
|
|
52808
53998
|
let realPath;
|
|
52809
53999
|
try {
|
|
52810
|
-
realPath =
|
|
54000
|
+
realPath = fs21.realpathSync(resolved);
|
|
52811
54001
|
} catch {
|
|
52812
54002
|
return null;
|
|
52813
54003
|
}
|
|
52814
54004
|
const isSafe = allowedPrefixes.some(
|
|
52815
|
-
(prefix) => realPath === prefix || realPath.startsWith(prefix +
|
|
54005
|
+
(prefix) => realPath === prefix || realPath.startsWith(prefix + path25.sep)
|
|
52816
54006
|
);
|
|
52817
54007
|
if (!isSafe) return null;
|
|
52818
54008
|
return this.readFile(realPath);
|
|
52819
54009
|
}
|
|
52820
|
-
const cwdPath =
|
|
54010
|
+
const cwdPath = path25.join(process.cwd(), ".visor-license");
|
|
52821
54011
|
const cwdToken = this.readFile(cwdPath);
|
|
52822
54012
|
if (cwdToken) return cwdToken;
|
|
52823
54013
|
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
52824
54014
|
if (home) {
|
|
52825
|
-
const configPath =
|
|
54015
|
+
const configPath = path25.join(home, ".config", "visor", ".visor-license");
|
|
52826
54016
|
const configToken = this.readFile(configPath);
|
|
52827
54017
|
if (configToken) return configToken;
|
|
52828
54018
|
}
|
|
@@ -52830,7 +54020,7 @@ var init_validator = __esm({
|
|
|
52830
54020
|
}
|
|
52831
54021
|
readFile(filePath) {
|
|
52832
54022
|
try {
|
|
52833
|
-
return
|
|
54023
|
+
return fs21.readFileSync(filePath, "utf-8").trim();
|
|
52834
54024
|
} catch {
|
|
52835
54025
|
return null;
|
|
52836
54026
|
}
|
|
@@ -52869,17 +54059,17 @@ var init_validator = __esm({
|
|
|
52869
54059
|
});
|
|
52870
54060
|
|
|
52871
54061
|
// src/enterprise/policy/opa-compiler.ts
|
|
52872
|
-
var
|
|
54062
|
+
var fs22, path26, os2, crypto3, import_child_process8, OpaCompiler;
|
|
52873
54063
|
var init_opa_compiler = __esm({
|
|
52874
54064
|
"src/enterprise/policy/opa-compiler.ts"() {
|
|
52875
54065
|
"use strict";
|
|
52876
|
-
|
|
52877
|
-
|
|
54066
|
+
fs22 = __toESM(require("fs"));
|
|
54067
|
+
path26 = __toESM(require("path"));
|
|
52878
54068
|
os2 = __toESM(require("os"));
|
|
52879
54069
|
crypto3 = __toESM(require("crypto"));
|
|
52880
54070
|
import_child_process8 = require("child_process");
|
|
52881
54071
|
OpaCompiler = class _OpaCompiler {
|
|
52882
|
-
static CACHE_DIR =
|
|
54072
|
+
static CACHE_DIR = path26.join(os2.tmpdir(), "visor-opa-cache");
|
|
52883
54073
|
/**
|
|
52884
54074
|
* Resolve the input paths to WASM bytes.
|
|
52885
54075
|
*
|
|
@@ -52891,24 +54081,24 @@ var init_opa_compiler = __esm({
|
|
|
52891
54081
|
async resolveWasmBytes(paths) {
|
|
52892
54082
|
const regoFiles = [];
|
|
52893
54083
|
for (const p of paths) {
|
|
52894
|
-
const resolved =
|
|
52895
|
-
if (
|
|
54084
|
+
const resolved = path26.resolve(p);
|
|
54085
|
+
if (path26.normalize(resolved).includes("..")) {
|
|
52896
54086
|
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
52897
54087
|
}
|
|
52898
|
-
if (resolved.endsWith(".wasm") &&
|
|
52899
|
-
return
|
|
54088
|
+
if (resolved.endsWith(".wasm") && fs22.existsSync(resolved)) {
|
|
54089
|
+
return fs22.readFileSync(resolved);
|
|
52900
54090
|
}
|
|
52901
|
-
if (!
|
|
52902
|
-
const stat2 =
|
|
54091
|
+
if (!fs22.existsSync(resolved)) continue;
|
|
54092
|
+
const stat2 = fs22.statSync(resolved);
|
|
52903
54093
|
if (stat2.isDirectory()) {
|
|
52904
|
-
const wasmCandidate =
|
|
52905
|
-
if (
|
|
52906
|
-
return
|
|
54094
|
+
const wasmCandidate = path26.join(resolved, "policy.wasm");
|
|
54095
|
+
if (fs22.existsSync(wasmCandidate)) {
|
|
54096
|
+
return fs22.readFileSync(wasmCandidate);
|
|
52907
54097
|
}
|
|
52908
|
-
const files =
|
|
54098
|
+
const files = fs22.readdirSync(resolved);
|
|
52909
54099
|
for (const f of files) {
|
|
52910
54100
|
if (f.endsWith(".rego")) {
|
|
52911
|
-
regoFiles.push(
|
|
54101
|
+
regoFiles.push(path26.join(resolved, f));
|
|
52912
54102
|
}
|
|
52913
54103
|
}
|
|
52914
54104
|
} else if (resolved.endsWith(".rego")) {
|
|
@@ -52938,17 +54128,17 @@ var init_opa_compiler = __esm({
|
|
|
52938
54128
|
}
|
|
52939
54129
|
const hash = crypto3.createHash("sha256");
|
|
52940
54130
|
for (const f of regoFiles.sort()) {
|
|
52941
|
-
hash.update(
|
|
54131
|
+
hash.update(fs22.readFileSync(f));
|
|
52942
54132
|
hash.update(f);
|
|
52943
54133
|
}
|
|
52944
54134
|
const cacheKey = hash.digest("hex").slice(0, 16);
|
|
52945
54135
|
const cacheDir = _OpaCompiler.CACHE_DIR;
|
|
52946
|
-
const cachedWasm =
|
|
52947
|
-
if (
|
|
52948
|
-
return
|
|
54136
|
+
const cachedWasm = path26.join(cacheDir, `${cacheKey}.wasm`);
|
|
54137
|
+
if (fs22.existsSync(cachedWasm)) {
|
|
54138
|
+
return fs22.readFileSync(cachedWasm);
|
|
52949
54139
|
}
|
|
52950
|
-
|
|
52951
|
-
const bundleTar =
|
|
54140
|
+
fs22.mkdirSync(cacheDir, { recursive: true });
|
|
54141
|
+
const bundleTar = path26.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
52952
54142
|
try {
|
|
52953
54143
|
const args = [
|
|
52954
54144
|
"build",
|
|
@@ -52977,43 +54167,43 @@ Ensure your .rego files are valid and the \`opa\` CLI is installed.`
|
|
|
52977
54167
|
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "/policy.wasm"], {
|
|
52978
54168
|
stdio: "pipe"
|
|
52979
54169
|
});
|
|
52980
|
-
const extractedWasm =
|
|
52981
|
-
if (
|
|
52982
|
-
|
|
54170
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
54171
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
54172
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
52983
54173
|
}
|
|
52984
54174
|
} catch {
|
|
52985
54175
|
try {
|
|
52986
54176
|
(0, import_child_process8.execFileSync)("tar", ["-xzf", bundleTar, "-C", cacheDir, "policy.wasm"], {
|
|
52987
54177
|
stdio: "pipe"
|
|
52988
54178
|
});
|
|
52989
|
-
const extractedWasm =
|
|
52990
|
-
if (
|
|
52991
|
-
|
|
54179
|
+
const extractedWasm = path26.join(cacheDir, "policy.wasm");
|
|
54180
|
+
if (fs22.existsSync(extractedWasm)) {
|
|
54181
|
+
fs22.renameSync(extractedWasm, cachedWasm);
|
|
52992
54182
|
}
|
|
52993
54183
|
} catch (err2) {
|
|
52994
54184
|
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
52995
54185
|
}
|
|
52996
54186
|
}
|
|
52997
54187
|
try {
|
|
52998
|
-
|
|
54188
|
+
fs22.unlinkSync(bundleTar);
|
|
52999
54189
|
} catch {
|
|
53000
54190
|
}
|
|
53001
|
-
if (!
|
|
54191
|
+
if (!fs22.existsSync(cachedWasm)) {
|
|
53002
54192
|
throw new Error("OPA build succeeded but policy.wasm was not found in the bundle");
|
|
53003
54193
|
}
|
|
53004
|
-
return
|
|
54194
|
+
return fs22.readFileSync(cachedWasm);
|
|
53005
54195
|
}
|
|
53006
54196
|
};
|
|
53007
54197
|
}
|
|
53008
54198
|
});
|
|
53009
54199
|
|
|
53010
54200
|
// src/enterprise/policy/opa-wasm-evaluator.ts
|
|
53011
|
-
var
|
|
54201
|
+
var fs23, path27, OpaWasmEvaluator;
|
|
53012
54202
|
var init_opa_wasm_evaluator = __esm({
|
|
53013
54203
|
"src/enterprise/policy/opa-wasm-evaluator.ts"() {
|
|
53014
54204
|
"use strict";
|
|
53015
|
-
|
|
53016
|
-
|
|
54205
|
+
fs23 = __toESM(require("fs"));
|
|
54206
|
+
path27 = __toESM(require("path"));
|
|
53017
54207
|
init_opa_compiler();
|
|
53018
54208
|
OpaWasmEvaluator = class {
|
|
53019
54209
|
policy = null;
|
|
@@ -53046,18 +54236,18 @@ var init_opa_wasm_evaluator = __esm({
|
|
|
53046
54236
|
* making it available in Rego via `data.<key>`.
|
|
53047
54237
|
*/
|
|
53048
54238
|
loadData(dataPath) {
|
|
53049
|
-
const resolved =
|
|
53050
|
-
if (
|
|
54239
|
+
const resolved = path27.resolve(dataPath);
|
|
54240
|
+
if (path27.normalize(resolved).includes("..")) {
|
|
53051
54241
|
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
53052
54242
|
}
|
|
53053
|
-
if (!
|
|
54243
|
+
if (!fs23.existsSync(resolved)) {
|
|
53054
54244
|
throw new Error(`OPA data file not found: ${resolved}`);
|
|
53055
54245
|
}
|
|
53056
|
-
const stat2 =
|
|
54246
|
+
const stat2 = fs23.statSync(resolved);
|
|
53057
54247
|
if (stat2.size > 10 * 1024 * 1024) {
|
|
53058
54248
|
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat2.size} bytes)`);
|
|
53059
54249
|
}
|
|
53060
|
-
const raw =
|
|
54250
|
+
const raw = fs23.readFileSync(resolved, "utf-8");
|
|
53061
54251
|
try {
|
|
53062
54252
|
const parsed = JSON.parse(raw);
|
|
53063
54253
|
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
@@ -53578,12 +54768,12 @@ function toInsertRow(schedule) {
|
|
|
53578
54768
|
previous_response: schedule.previousResponse ?? null
|
|
53579
54769
|
};
|
|
53580
54770
|
}
|
|
53581
|
-
var
|
|
54771
|
+
var fs24, path28, import_uuid2, KnexStoreBackend;
|
|
53582
54772
|
var init_knex_store = __esm({
|
|
53583
54773
|
"src/enterprise/scheduler/knex-store.ts"() {
|
|
53584
54774
|
"use strict";
|
|
53585
|
-
|
|
53586
|
-
|
|
54775
|
+
fs24 = __toESM(require("fs"));
|
|
54776
|
+
path28 = __toESM(require("path"));
|
|
53587
54777
|
import_uuid2 = require("uuid");
|
|
53588
54778
|
init_logger();
|
|
53589
54779
|
KnexStoreBackend = class {
|
|
@@ -53669,24 +54859,24 @@ var init_knex_store = __esm({
|
|
|
53669
54859
|
};
|
|
53670
54860
|
if (ssl.ca) {
|
|
53671
54861
|
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
53672
|
-
result.ca =
|
|
54862
|
+
result.ca = fs24.readFileSync(caPath, "utf8");
|
|
53673
54863
|
}
|
|
53674
54864
|
if (ssl.cert) {
|
|
53675
54865
|
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
53676
|
-
result.cert =
|
|
54866
|
+
result.cert = fs24.readFileSync(certPath, "utf8");
|
|
53677
54867
|
}
|
|
53678
54868
|
if (ssl.key) {
|
|
53679
54869
|
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
53680
|
-
result.key =
|
|
54870
|
+
result.key = fs24.readFileSync(keyPath, "utf8");
|
|
53681
54871
|
}
|
|
53682
54872
|
return result;
|
|
53683
54873
|
}
|
|
53684
54874
|
validateSslPath(filePath, label) {
|
|
53685
|
-
const resolved =
|
|
53686
|
-
if (resolved !==
|
|
54875
|
+
const resolved = path28.resolve(filePath);
|
|
54876
|
+
if (resolved !== path28.normalize(resolved)) {
|
|
53687
54877
|
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
53688
54878
|
}
|
|
53689
|
-
if (!
|
|
54879
|
+
if (!fs24.existsSync(resolved)) {
|
|
53690
54880
|
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
53691
54881
|
}
|
|
53692
54882
|
return resolved;
|
|
@@ -54878,8 +56068,8 @@ ${content}
|
|
|
54878
56068
|
* Sleep utility
|
|
54879
56069
|
*/
|
|
54880
56070
|
sleep(ms) {
|
|
54881
|
-
return new Promise((
|
|
54882
|
-
const t = setTimeout(
|
|
56071
|
+
return new Promise((resolve18) => {
|
|
56072
|
+
const t = setTimeout(resolve18, ms);
|
|
54883
56073
|
if (typeof t.unref === "function") {
|
|
54884
56074
|
try {
|
|
54885
56075
|
t.unref();
|
|
@@ -54956,6 +56146,7 @@ var init_github_frontend = __esm({
|
|
|
54956
56146
|
// Minimum delay between updates (public for testing)
|
|
54957
56147
|
// Cache of created GitHub comment IDs per group to handle API eventual consistency
|
|
54958
56148
|
createdCommentGithubIds = /* @__PURE__ */ new Map();
|
|
56149
|
+
_stopped = false;
|
|
54959
56150
|
start(ctx) {
|
|
54960
56151
|
const log2 = ctx.logger;
|
|
54961
56152
|
const bus = ctx.eventBus;
|
|
@@ -55100,9 +56291,19 @@ var init_github_frontend = __esm({
|
|
|
55100
56291
|
})
|
|
55101
56292
|
);
|
|
55102
56293
|
}
|
|
55103
|
-
stop() {
|
|
56294
|
+
async stop() {
|
|
56295
|
+
this._stopped = true;
|
|
55104
56296
|
for (const s of this.subs) s.unsubscribe();
|
|
55105
56297
|
this.subs = [];
|
|
56298
|
+
if (this._timer) {
|
|
56299
|
+
clearTimeout(this._timer);
|
|
56300
|
+
this._timer = null;
|
|
56301
|
+
}
|
|
56302
|
+
this._pendingIds.clear();
|
|
56303
|
+
const pending = Array.from(this.updateLocks.values());
|
|
56304
|
+
if (pending.length > 0) {
|
|
56305
|
+
await Promise.allSettled(pending);
|
|
56306
|
+
}
|
|
55106
56307
|
}
|
|
55107
56308
|
async buildFullBody(ctx, group) {
|
|
55108
56309
|
const header = this.renderThreadHeader(ctx, group);
|
|
@@ -55153,8 +56354,8 @@ ${end}`);
|
|
|
55153
56354
|
async updateGroupedComment(ctx, comments, group, changedIds) {
|
|
55154
56355
|
const existingLock = this.updateLocks.get(group);
|
|
55155
56356
|
let resolveLock;
|
|
55156
|
-
const ourLock = new Promise((
|
|
55157
|
-
resolveLock =
|
|
56357
|
+
const ourLock = new Promise((resolve18) => {
|
|
56358
|
+
resolveLock = resolve18;
|
|
55158
56359
|
});
|
|
55159
56360
|
this.updateLocks.set(group, ourLock);
|
|
55160
56361
|
try {
|
|
@@ -55180,6 +56381,7 @@ ${end}`);
|
|
|
55180
56381
|
*/
|
|
55181
56382
|
async performGroupedCommentUpdate(ctx, comments, group, changedIds) {
|
|
55182
56383
|
try {
|
|
56384
|
+
if (this._stopped) return;
|
|
55183
56385
|
if (!ctx.run.repo || !ctx.run.pr) return;
|
|
55184
56386
|
const config = ctx.config;
|
|
55185
56387
|
const prCommentEnabled = config?.output?.pr_comment?.enabled !== false;
|
|
@@ -55466,7 +56668,7 @@ ${blocks}
|
|
|
55466
56668
|
* Sleep utility for enforcing delays
|
|
55467
56669
|
*/
|
|
55468
56670
|
sleep(ms) {
|
|
55469
|
-
return new Promise((
|
|
56671
|
+
return new Promise((resolve18) => setTimeout(resolve18, ms));
|
|
55470
56672
|
}
|
|
55471
56673
|
};
|
|
55472
56674
|
}
|
|
@@ -56737,15 +57939,15 @@ function serializeRunState(state) {
|
|
|
56737
57939
|
])
|
|
56738
57940
|
};
|
|
56739
57941
|
}
|
|
56740
|
-
var
|
|
57942
|
+
var path30, fs26, StateMachineExecutionEngine;
|
|
56741
57943
|
var init_state_machine_execution_engine = __esm({
|
|
56742
57944
|
"src/state-machine-execution-engine.ts"() {
|
|
56743
57945
|
"use strict";
|
|
56744
57946
|
init_runner();
|
|
56745
57947
|
init_logger();
|
|
56746
57948
|
init_sandbox_manager();
|
|
56747
|
-
|
|
56748
|
-
|
|
57949
|
+
path30 = __toESM(require("path"));
|
|
57950
|
+
fs26 = __toESM(require("fs"));
|
|
56749
57951
|
StateMachineExecutionEngine = class _StateMachineExecutionEngine {
|
|
56750
57952
|
workingDirectory;
|
|
56751
57953
|
executionContext;
|
|
@@ -57130,9 +58332,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57130
58332
|
}
|
|
57131
58333
|
const checkId = String(ev?.checkId || "unknown");
|
|
57132
58334
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
57133
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
57134
|
-
|
|
57135
|
-
const filePath =
|
|
58335
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path30.resolve(process.cwd(), ".visor", "snapshots");
|
|
58336
|
+
fs26.mkdirSync(baseDir, { recursive: true });
|
|
58337
|
+
const filePath = path30.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
57136
58338
|
await this.saveSnapshotToFile(filePath);
|
|
57137
58339
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
57138
58340
|
try {
|
|
@@ -57273,7 +58475,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57273
58475
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
57274
58476
|
*/
|
|
57275
58477
|
async saveSnapshotToFile(filePath) {
|
|
57276
|
-
const
|
|
58478
|
+
const fs27 = await import("fs/promises");
|
|
57277
58479
|
const ctx = this._lastContext;
|
|
57278
58480
|
const runner = this._lastRunner;
|
|
57279
58481
|
if (!ctx || !runner) {
|
|
@@ -57293,14 +58495,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
57293
58495
|
journal: entries,
|
|
57294
58496
|
requestedChecks: ctx.requestedChecks || []
|
|
57295
58497
|
};
|
|
57296
|
-
await
|
|
58498
|
+
await fs27.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
57297
58499
|
}
|
|
57298
58500
|
/**
|
|
57299
58501
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
57300
58502
|
*/
|
|
57301
58503
|
async loadSnapshotFromFile(filePath) {
|
|
57302
|
-
const
|
|
57303
|
-
const raw = await
|
|
58504
|
+
const fs27 = await import("fs/promises");
|
|
58505
|
+
const raw = await fs27.readFile(filePath, "utf8");
|
|
57304
58506
|
return JSON.parse(raw);
|
|
57305
58507
|
}
|
|
57306
58508
|
/**
|