@probelabs/visor 0.1.147-ee → 0.1.148-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontends/github-frontend.d.ts +2 -1
- package/dist/frontends/github-frontend.d.ts.map +1 -1
- package/dist/index.js +726 -113
- package/dist/providers/ai-check-provider.d.ts.map +1 -1
- package/dist/scheduler/schedule-tool.d.ts.map +1 -1
- package/dist/scheduler/scheduler.d.ts +5 -0
- package/dist/scheduler/scheduler.d.ts.map +1 -1
- package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs → check-provider-registry-AMYY2ZJY.mjs} +5 -6
- package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs → check-provider-registry-DVQDGTOE.mjs} +5 -6
- package/dist/sdk/{chunk-4F5UVWAN.mjs → chunk-62TNF5PJ.mjs} +2 -2
- package/dist/sdk/{chunk-4F5UVWAN.mjs.map → chunk-62TNF5PJ.mjs.map} +1 -1
- package/dist/sdk/{chunk-PNZH3JSI.mjs → chunk-75Q63UNX.mjs} +2742 -276
- package/dist/sdk/chunk-75Q63UNX.mjs.map +1 -0
- package/dist/sdk/{chunk-FBJ7MC7R.mjs → chunk-CISJ6DJW.mjs} +3 -3
- package/dist/sdk/{chunk-EWGX7LI7.mjs → chunk-H4AYMOAT.mjs} +2742 -276
- package/dist/sdk/chunk-H4AYMOAT.mjs.map +1 -0
- package/dist/sdk/{chunk-V2QW6ECX.mjs → chunk-RJLJUTSU.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs → failure-condition-evaluator-IVCTD4BZ.mjs} +3 -3
- package/dist/sdk/{github-frontend-47EU2HBY.mjs → github-frontend-DFT5G32K.mjs} +16 -4
- package/dist/sdk/github-frontend-DFT5G32K.mjs.map +1 -0
- package/dist/sdk/{host-GVR4UGZ3.mjs → host-H7IX4GBK.mjs} +2 -2
- package/dist/sdk/{host-KGN5OIAM.mjs → host-NZXGBBJI.mjs} +2 -2
- package/dist/sdk/{routing-CZ36LVVS.mjs → routing-LU5PAREW.mjs} +4 -4
- package/dist/sdk/schedule-tool-4JMWZCCK.mjs +35 -0
- package/dist/sdk/schedule-tool-CONR4VW3.mjs +35 -0
- package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs → schedule-tool-handler-AXMR7NBI.mjs} +5 -6
- package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs → schedule-tool-handler-YUC6CAXX.mjs} +5 -6
- package/dist/sdk/sdk.js +1608 -406
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +4 -5
- package/dist/sdk/sdk.mjs.map +1 -1
- package/dist/sdk/{trace-helpers-EHDZ42HH.mjs → trace-helpers-6ROJR7N3.mjs} +2 -2
- package/dist/sdk/{workflow-check-provider-5453TW65.mjs → workflow-check-provider-DYSO3PML.mjs} +5 -6
- package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs → workflow-check-provider-MMB7L3YG.mjs} +5 -6
- package/dist/sdk/workflow-check-provider-MMB7L3YG.mjs.map +1 -0
- package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
- package/dist/utils/tool-resolver.d.ts.map +1 -1
- package/dist/utils/workspace-manager.d.ts +31 -8
- package/dist/utils/workspace-manager.d.ts.map +1 -1
- package/dist/utils/worktree-manager.d.ts +6 -0
- package/dist/utils/worktree-manager.d.ts.map +1 -1
- package/package.json +2 -2
- package/dist/sdk/chunk-EWGX7LI7.mjs.map +0 -1
- package/dist/sdk/chunk-PNZH3JSI.mjs.map +0 -1
- package/dist/sdk/chunk-XKCER23W.mjs +0 -1490
- package/dist/sdk/chunk-XKCER23W.mjs.map +0 -1
- package/dist/sdk/github-frontend-47EU2HBY.mjs.map +0 -1
- package/dist/sdk/schedule-tool-2COUUTF7.mjs +0 -18
- /package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs.map → check-provider-registry-AMYY2ZJY.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs.map → check-provider-registry-DVQDGTOE.mjs.map} +0 -0
- /package/dist/sdk/{chunk-FBJ7MC7R.mjs.map → chunk-CISJ6DJW.mjs.map} +0 -0
- /package/dist/sdk/{chunk-V2QW6ECX.mjs.map → chunk-RJLJUTSU.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs.map → failure-condition-evaluator-IVCTD4BZ.mjs.map} +0 -0
- /package/dist/sdk/{host-GVR4UGZ3.mjs.map → host-H7IX4GBK.mjs.map} +0 -0
- /package/dist/sdk/{host-KGN5OIAM.mjs.map → host-NZXGBBJI.mjs.map} +0 -0
- /package/dist/sdk/{routing-CZ36LVVS.mjs.map → routing-LU5PAREW.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-2COUUTF7.mjs.map → schedule-tool-4JMWZCCK.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs.map → schedule-tool-CONR4VW3.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs.map → schedule-tool-handler-AXMR7NBI.mjs.map} +0 -0
- /package/dist/sdk/{trace-helpers-EHDZ42HH.mjs.map → schedule-tool-handler-YUC6CAXX.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-5453TW65.mjs.map → trace-helpers-6ROJR7N3.mjs.map} +0 -0
- /package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs.map → workflow-check-provider-DYSO3PML.mjs.map} +0 -0
|
@@ -34,17 +34,6 @@ import {
|
|
|
34
34
|
config_exports,
|
|
35
35
|
init_config
|
|
36
36
|
} from "./chunk-XNTBSV6M.mjs";
|
|
37
|
-
import {
|
|
38
|
-
ScheduleStore,
|
|
39
|
-
buildScheduleToolContext,
|
|
40
|
-
getScheduleToolDefinition,
|
|
41
|
-
handleScheduleAction,
|
|
42
|
-
init_schedule_parser,
|
|
43
|
-
init_schedule_store,
|
|
44
|
-
init_schedule_tool,
|
|
45
|
-
init_store,
|
|
46
|
-
isScheduleTool
|
|
47
|
-
} from "./chunk-XKCER23W.mjs";
|
|
48
37
|
import {
|
|
49
38
|
ExecutionJournal,
|
|
50
39
|
checkLoopBudget,
|
|
@@ -53,11 +42,11 @@ import {
|
|
|
53
42
|
init_routing,
|
|
54
43
|
init_snapshot_store,
|
|
55
44
|
snapshot_store_exports
|
|
56
|
-
} from "./chunk-
|
|
45
|
+
} from "./chunk-CISJ6DJW.mjs";
|
|
57
46
|
import {
|
|
58
47
|
FailureConditionEvaluator,
|
|
59
48
|
init_failure_condition_evaluator
|
|
60
|
-
} from "./chunk-
|
|
49
|
+
} from "./chunk-RJLJUTSU.mjs";
|
|
61
50
|
import {
|
|
62
51
|
addEvent,
|
|
63
52
|
emitNdjsonFallback,
|
|
@@ -68,7 +57,7 @@ import {
|
|
|
68
57
|
setSpanAttributes,
|
|
69
58
|
trace_helpers_exports,
|
|
70
59
|
withActiveSpan
|
|
71
|
-
} from "./chunk-
|
|
60
|
+
} from "./chunk-62TNF5PJ.mjs";
|
|
72
61
|
import {
|
|
73
62
|
addDiagramBlock,
|
|
74
63
|
init_metrics
|
|
@@ -133,7 +122,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
133
122
|
}
|
|
134
123
|
try {
|
|
135
124
|
const originalProbePath = process.env.PROBE_PATH;
|
|
136
|
-
const
|
|
125
|
+
const fs12 = __require("fs");
|
|
137
126
|
const possiblePaths = [
|
|
138
127
|
// Relative to current working directory (most common in production)
|
|
139
128
|
path.join(process.cwd(), "node_modules/@probelabs/probe/bin/probe-binary"),
|
|
@@ -144,7 +133,7 @@ async function processDiffWithOutline(diffContent) {
|
|
|
144
133
|
];
|
|
145
134
|
let probeBinaryPath;
|
|
146
135
|
for (const candidatePath of possiblePaths) {
|
|
147
|
-
if (
|
|
136
|
+
if (fs12.existsSync(candidatePath)) {
|
|
148
137
|
probeBinaryPath = candidatePath;
|
|
149
138
|
break;
|
|
150
139
|
}
|
|
@@ -1228,8 +1217,8 @@ ${schemaString}`);
|
|
|
1228
1217
|
}
|
|
1229
1218
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1230
1219
|
try {
|
|
1231
|
-
const
|
|
1232
|
-
const
|
|
1220
|
+
const fs12 = __require("fs");
|
|
1221
|
+
const path15 = __require("path");
|
|
1233
1222
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1234
1223
|
const provider = this.config.provider || "auto";
|
|
1235
1224
|
const model = this.config.model || "default";
|
|
@@ -1343,20 +1332,20 @@ ${"=".repeat(60)}
|
|
|
1343
1332
|
`;
|
|
1344
1333
|
readableVersion += `${"=".repeat(60)}
|
|
1345
1334
|
`;
|
|
1346
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1347
|
-
if (!
|
|
1348
|
-
|
|
1335
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1336
|
+
if (!fs12.existsSync(debugArtifactsDir)) {
|
|
1337
|
+
fs12.mkdirSync(debugArtifactsDir, { recursive: true });
|
|
1349
1338
|
}
|
|
1350
|
-
const debugFile =
|
|
1339
|
+
const debugFile = path15.join(
|
|
1351
1340
|
debugArtifactsDir,
|
|
1352
1341
|
`prompt-${_checkName || "unknown"}-${timestamp}.json`
|
|
1353
1342
|
);
|
|
1354
|
-
|
|
1355
|
-
const readableFile =
|
|
1343
|
+
fs12.writeFileSync(debugFile, debugJson, "utf-8");
|
|
1344
|
+
const readableFile = path15.join(
|
|
1356
1345
|
debugArtifactsDir,
|
|
1357
1346
|
`prompt-${_checkName || "unknown"}-${timestamp}.txt`
|
|
1358
1347
|
);
|
|
1359
|
-
|
|
1348
|
+
fs12.writeFileSync(readableFile, readableVersion, "utf-8");
|
|
1360
1349
|
log(`
|
|
1361
1350
|
\u{1F4BE} Full debug info saved to:`);
|
|
1362
1351
|
log(` JSON: ${debugFile}`);
|
|
@@ -1389,8 +1378,8 @@ ${"=".repeat(60)}
|
|
|
1389
1378
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
1390
1379
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1391
1380
|
try {
|
|
1392
|
-
const
|
|
1393
|
-
const
|
|
1381
|
+
const fs12 = __require("fs");
|
|
1382
|
+
const path15 = __require("path");
|
|
1394
1383
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1395
1384
|
const agentAny2 = agent;
|
|
1396
1385
|
let fullHistory = [];
|
|
@@ -1401,8 +1390,8 @@ ${"=".repeat(60)}
|
|
|
1401
1390
|
} else if (agentAny2._messages) {
|
|
1402
1391
|
fullHistory = agentAny2._messages;
|
|
1403
1392
|
}
|
|
1404
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1405
|
-
const sessionBase =
|
|
1393
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1394
|
+
const sessionBase = path15.join(
|
|
1406
1395
|
debugArtifactsDir,
|
|
1407
1396
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
1408
1397
|
);
|
|
@@ -1414,7 +1403,7 @@ ${"=".repeat(60)}
|
|
|
1414
1403
|
schema: effectiveSchema,
|
|
1415
1404
|
totalMessages: fullHistory.length
|
|
1416
1405
|
};
|
|
1417
|
-
|
|
1406
|
+
fs12.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
1418
1407
|
let readable = `=============================================================
|
|
1419
1408
|
`;
|
|
1420
1409
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -1441,7 +1430,7 @@ ${"=".repeat(60)}
|
|
|
1441
1430
|
`;
|
|
1442
1431
|
readable += content + "\n";
|
|
1443
1432
|
});
|
|
1444
|
-
|
|
1433
|
+
fs12.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
1445
1434
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
1446
1435
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
1447
1436
|
} catch (error) {
|
|
@@ -1450,11 +1439,11 @@ ${"=".repeat(60)}
|
|
|
1450
1439
|
}
|
|
1451
1440
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1452
1441
|
try {
|
|
1453
|
-
const
|
|
1454
|
-
const
|
|
1442
|
+
const fs12 = __require("fs");
|
|
1443
|
+
const path15 = __require("path");
|
|
1455
1444
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1456
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1457
|
-
const responseFile =
|
|
1445
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1446
|
+
const responseFile = path15.join(
|
|
1458
1447
|
debugArtifactsDir,
|
|
1459
1448
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
1460
1449
|
);
|
|
@@ -1487,7 +1476,7 @@ ${"=".repeat(60)}
|
|
|
1487
1476
|
`;
|
|
1488
1477
|
responseContent += `${"=".repeat(60)}
|
|
1489
1478
|
`;
|
|
1490
|
-
|
|
1479
|
+
fs12.writeFileSync(responseFile, responseContent, "utf-8");
|
|
1491
1480
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
1492
1481
|
} catch (error) {
|
|
1493
1482
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -1503,9 +1492,9 @@ ${"=".repeat(60)}
|
|
|
1503
1492
|
await agentAny._telemetryConfig.shutdown();
|
|
1504
1493
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${agentAny._traceFilePath}`);
|
|
1505
1494
|
if (process.env.GITHUB_ACTIONS) {
|
|
1506
|
-
const
|
|
1507
|
-
if (
|
|
1508
|
-
const stats =
|
|
1495
|
+
const fs12 = __require("fs");
|
|
1496
|
+
if (fs12.existsSync(agentAny._traceFilePath)) {
|
|
1497
|
+
const stats = fs12.statSync(agentAny._traceFilePath);
|
|
1509
1498
|
console.log(
|
|
1510
1499
|
`::notice title=AI Trace Saved::${agentAny._traceFilePath} (${stats.size} bytes)`
|
|
1511
1500
|
);
|
|
@@ -1712,8 +1701,8 @@ ${schemaString}`);
|
|
|
1712
1701
|
const model = this.config.model || "default";
|
|
1713
1702
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1714
1703
|
try {
|
|
1715
|
-
const
|
|
1716
|
-
const
|
|
1704
|
+
const fs12 = __require("fs");
|
|
1705
|
+
const path15 = __require("path");
|
|
1717
1706
|
const os = __require("os");
|
|
1718
1707
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1719
1708
|
const debugData = {
|
|
@@ -1787,18 +1776,18 @@ ${"=".repeat(60)}
|
|
|
1787
1776
|
readableVersion += `${"=".repeat(60)}
|
|
1788
1777
|
`;
|
|
1789
1778
|
const tempDir = os.tmpdir();
|
|
1790
|
-
const promptFile =
|
|
1791
|
-
|
|
1779
|
+
const promptFile = path15.join(tempDir, `visor-prompt-${timestamp}.txt`);
|
|
1780
|
+
fs12.writeFileSync(promptFile, prompt, "utf-8");
|
|
1792
1781
|
log(`
|
|
1793
1782
|
\u{1F4BE} Prompt saved to: ${promptFile}`);
|
|
1794
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1783
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1795
1784
|
try {
|
|
1796
|
-
const base =
|
|
1785
|
+
const base = path15.join(
|
|
1797
1786
|
debugArtifactsDir,
|
|
1798
1787
|
`prompt-${_checkName || "unknown"}-${timestamp}`
|
|
1799
1788
|
);
|
|
1800
|
-
|
|
1801
|
-
|
|
1789
|
+
fs12.writeFileSync(base + ".json", debugJson, "utf-8");
|
|
1790
|
+
fs12.writeFileSync(base + ".summary.txt", readableVersion, "utf-8");
|
|
1802
1791
|
log(`
|
|
1803
1792
|
\u{1F4BE} Full debug info saved to directory: ${debugArtifactsDir}`);
|
|
1804
1793
|
} catch {
|
|
@@ -1843,8 +1832,8 @@ $ ${cliCommand}
|
|
|
1843
1832
|
log(`\u{1F4E4} Response length: ${response.length} characters`);
|
|
1844
1833
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1845
1834
|
try {
|
|
1846
|
-
const
|
|
1847
|
-
const
|
|
1835
|
+
const fs12 = __require("fs");
|
|
1836
|
+
const path15 = __require("path");
|
|
1848
1837
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1849
1838
|
const agentAny = agent;
|
|
1850
1839
|
let fullHistory = [];
|
|
@@ -1855,8 +1844,8 @@ $ ${cliCommand}
|
|
|
1855
1844
|
} else if (agentAny._messages) {
|
|
1856
1845
|
fullHistory = agentAny._messages;
|
|
1857
1846
|
}
|
|
1858
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1859
|
-
const sessionBase =
|
|
1847
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1848
|
+
const sessionBase = path15.join(
|
|
1860
1849
|
debugArtifactsDir,
|
|
1861
1850
|
`session-${_checkName || "unknown"}-${timestamp}`
|
|
1862
1851
|
);
|
|
@@ -1868,7 +1857,7 @@ $ ${cliCommand}
|
|
|
1868
1857
|
schema: effectiveSchema,
|
|
1869
1858
|
totalMessages: fullHistory.length
|
|
1870
1859
|
};
|
|
1871
|
-
|
|
1860
|
+
fs12.writeFileSync(sessionBase + ".json", JSON.stringify(sessionData, null, 2), "utf-8");
|
|
1872
1861
|
let readable = `=============================================================
|
|
1873
1862
|
`;
|
|
1874
1863
|
readable += `COMPLETE AI SESSION HISTORY (AFTER RESPONSE)
|
|
@@ -1895,7 +1884,7 @@ ${"=".repeat(60)}
|
|
|
1895
1884
|
`;
|
|
1896
1885
|
readable += content + "\n";
|
|
1897
1886
|
});
|
|
1898
|
-
|
|
1887
|
+
fs12.writeFileSync(sessionBase + ".summary.txt", readable, "utf-8");
|
|
1899
1888
|
log(`\u{1F4BE} Complete session history saved:`);
|
|
1900
1889
|
log(` - Contains ALL ${fullHistory.length} messages (prompts + responses)`);
|
|
1901
1890
|
} catch (error) {
|
|
@@ -1904,11 +1893,11 @@ ${"=".repeat(60)}
|
|
|
1904
1893
|
}
|
|
1905
1894
|
if (process.env.VISOR_DEBUG_AI_SESSIONS === "true") {
|
|
1906
1895
|
try {
|
|
1907
|
-
const
|
|
1908
|
-
const
|
|
1896
|
+
const fs12 = __require("fs");
|
|
1897
|
+
const path15 = __require("path");
|
|
1909
1898
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
1910
|
-
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS ||
|
|
1911
|
-
const responseFile =
|
|
1899
|
+
const debugArtifactsDir = process.env.VISOR_DEBUG_ARTIFACTS || path15.join(process.cwd(), "debug-artifacts");
|
|
1900
|
+
const responseFile = path15.join(
|
|
1912
1901
|
debugArtifactsDir,
|
|
1913
1902
|
`response-${_checkName || "unknown"}-${timestamp}.txt`
|
|
1914
1903
|
);
|
|
@@ -1941,7 +1930,7 @@ ${"=".repeat(60)}
|
|
|
1941
1930
|
`;
|
|
1942
1931
|
responseContent += `${"=".repeat(60)}
|
|
1943
1932
|
`;
|
|
1944
|
-
|
|
1933
|
+
fs12.writeFileSync(responseFile, responseContent, "utf-8");
|
|
1945
1934
|
log(`\u{1F4BE} Response saved to: ${responseFile}`);
|
|
1946
1935
|
} catch (error) {
|
|
1947
1936
|
log(`\u26A0\uFE0F Could not save response file: ${error}`);
|
|
@@ -1959,9 +1948,9 @@ ${"=".repeat(60)}
|
|
|
1959
1948
|
await telemetry.shutdown();
|
|
1960
1949
|
log(`\u{1F4CA} OpenTelemetry trace saved to: ${traceFilePath}`);
|
|
1961
1950
|
if (process.env.GITHUB_ACTIONS) {
|
|
1962
|
-
const
|
|
1963
|
-
if (
|
|
1964
|
-
const stats =
|
|
1951
|
+
const fs12 = __require("fs");
|
|
1952
|
+
if (fs12.existsSync(traceFilePath)) {
|
|
1953
|
+
const stats = fs12.statSync(traceFilePath);
|
|
1965
1954
|
console.log(
|
|
1966
1955
|
`::notice title=AI Trace Saved::OpenTelemetry trace file size: ${stats.size} bytes`
|
|
1967
1956
|
);
|
|
@@ -1999,8 +1988,8 @@ ${"=".repeat(60)}
|
|
|
1999
1988
|
* Load schema content from schema files or inline definitions
|
|
2000
1989
|
*/
|
|
2001
1990
|
async loadSchemaContent(schema) {
|
|
2002
|
-
const
|
|
2003
|
-
const
|
|
1991
|
+
const fs12 = __require("fs").promises;
|
|
1992
|
+
const path15 = __require("path");
|
|
2004
1993
|
if (typeof schema === "object" && schema !== null) {
|
|
2005
1994
|
log("\u{1F4CB} Using inline schema object from configuration");
|
|
2006
1995
|
return JSON.stringify(schema);
|
|
@@ -2013,14 +2002,14 @@ ${"=".repeat(60)}
|
|
|
2013
2002
|
}
|
|
2014
2003
|
} catch {
|
|
2015
2004
|
}
|
|
2016
|
-
if ((schema.startsWith("./") || schema.includes(".json")) && !
|
|
2005
|
+
if ((schema.startsWith("./") || schema.includes(".json")) && !path15.isAbsolute(schema)) {
|
|
2017
2006
|
if (schema.includes("..") || schema.includes("\0")) {
|
|
2018
2007
|
throw new Error("Invalid schema path: path traversal not allowed");
|
|
2019
2008
|
}
|
|
2020
2009
|
try {
|
|
2021
|
-
const schemaPath =
|
|
2010
|
+
const schemaPath = path15.resolve(process.cwd(), schema);
|
|
2022
2011
|
log(`\u{1F4CB} Loading custom schema from file: ${schemaPath}`);
|
|
2023
|
-
const schemaContent = await
|
|
2012
|
+
const schemaContent = await fs12.readFile(schemaPath, "utf-8");
|
|
2024
2013
|
return schemaContent.trim();
|
|
2025
2014
|
} catch (error) {
|
|
2026
2015
|
throw new Error(
|
|
@@ -2034,22 +2023,22 @@ ${"=".repeat(60)}
|
|
|
2034
2023
|
}
|
|
2035
2024
|
const candidatePaths = [
|
|
2036
2025
|
// GitHub Action bundle location
|
|
2037
|
-
|
|
2026
|
+
path15.join(__dirname, "output", sanitizedSchemaName, "schema.json"),
|
|
2038
2027
|
// Historical fallback when src/output was inadvertently bundled as output1/
|
|
2039
|
-
|
|
2028
|
+
path15.join(__dirname, "output1", sanitizedSchemaName, "schema.json"),
|
|
2040
2029
|
// Local dev (repo root)
|
|
2041
|
-
|
|
2030
|
+
path15.join(process.cwd(), "output", sanitizedSchemaName, "schema.json")
|
|
2042
2031
|
];
|
|
2043
2032
|
for (const schemaPath of candidatePaths) {
|
|
2044
2033
|
try {
|
|
2045
|
-
const schemaContent = await
|
|
2034
|
+
const schemaContent = await fs12.readFile(schemaPath, "utf-8");
|
|
2046
2035
|
return schemaContent.trim();
|
|
2047
2036
|
} catch {
|
|
2048
2037
|
}
|
|
2049
2038
|
}
|
|
2050
|
-
const distPath =
|
|
2051
|
-
const distAltPath =
|
|
2052
|
-
const cwdPath =
|
|
2039
|
+
const distPath = path15.join(__dirname, "output", sanitizedSchemaName, "schema.json");
|
|
2040
|
+
const distAltPath = path15.join(__dirname, "output1", sanitizedSchemaName, "schema.json");
|
|
2041
|
+
const cwdPath = path15.join(process.cwd(), "output", sanitizedSchemaName, "schema.json");
|
|
2053
2042
|
throw new Error(
|
|
2054
2043
|
`Failed to load schema '${sanitizedSchemaName}'. Tried: ${distPath}, ${distAltPath}, and ${cwdPath}. Ensure build copies 'output/' into dist (build:cli), or provide a custom schema file/path.`
|
|
2055
2044
|
);
|
|
@@ -3953,7 +3942,7 @@ async function executeWorkflowAsTool(workflowId, args, context2, argsOverrides)
|
|
|
3953
3942
|
...args,
|
|
3954
3943
|
...argsOverrides
|
|
3955
3944
|
};
|
|
3956
|
-
const { WorkflowCheckProvider: WorkflowCheckProvider2 } = await import("./workflow-check-provider-
|
|
3945
|
+
const { WorkflowCheckProvider: WorkflowCheckProvider2 } = await import("./workflow-check-provider-DYSO3PML.mjs");
|
|
3957
3946
|
const provider = new WorkflowCheckProvider2();
|
|
3958
3947
|
const checkConfig = {
|
|
3959
3948
|
type: "workflow",
|
|
@@ -4021,6 +4010,816 @@ var init_workflow_tool_executor = __esm({
|
|
|
4021
4010
|
}
|
|
4022
4011
|
});
|
|
4023
4012
|
|
|
4013
|
+
// src/scheduler/store/sqlite-store.ts
|
|
4014
|
+
import path4 from "path";
|
|
4015
|
+
import fs3 from "fs";
|
|
4016
|
+
import { v4 as uuidv4 } from "uuid";
|
|
4017
|
+
function toDbRow(schedule) {
|
|
4018
|
+
return {
|
|
4019
|
+
id: schedule.id,
|
|
4020
|
+
creator_id: schedule.creatorId,
|
|
4021
|
+
creator_context: schedule.creatorContext ?? null,
|
|
4022
|
+
creator_name: schedule.creatorName ?? null,
|
|
4023
|
+
timezone: schedule.timezone,
|
|
4024
|
+
schedule_expr: schedule.schedule,
|
|
4025
|
+
run_at: schedule.runAt ?? null,
|
|
4026
|
+
is_recurring: schedule.isRecurring ? 1 : 0,
|
|
4027
|
+
original_expression: schedule.originalExpression,
|
|
4028
|
+
workflow: schedule.workflow ?? null,
|
|
4029
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
4030
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
4031
|
+
status: schedule.status,
|
|
4032
|
+
created_at: schedule.createdAt,
|
|
4033
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
4034
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
4035
|
+
run_count: schedule.runCount,
|
|
4036
|
+
failure_count: schedule.failureCount,
|
|
4037
|
+
last_error: schedule.lastError ?? null,
|
|
4038
|
+
previous_response: schedule.previousResponse ?? null
|
|
4039
|
+
};
|
|
4040
|
+
}
|
|
4041
|
+
function safeJsonParse(value) {
|
|
4042
|
+
if (!value) return void 0;
|
|
4043
|
+
try {
|
|
4044
|
+
return JSON.parse(value);
|
|
4045
|
+
} catch {
|
|
4046
|
+
return void 0;
|
|
4047
|
+
}
|
|
4048
|
+
}
|
|
4049
|
+
function fromDbRow(row) {
|
|
4050
|
+
return {
|
|
4051
|
+
id: row.id,
|
|
4052
|
+
creatorId: row.creator_id,
|
|
4053
|
+
creatorContext: row.creator_context ?? void 0,
|
|
4054
|
+
creatorName: row.creator_name ?? void 0,
|
|
4055
|
+
timezone: row.timezone,
|
|
4056
|
+
schedule: row.schedule_expr,
|
|
4057
|
+
runAt: row.run_at ?? void 0,
|
|
4058
|
+
isRecurring: row.is_recurring === 1,
|
|
4059
|
+
originalExpression: row.original_expression,
|
|
4060
|
+
workflow: row.workflow ?? void 0,
|
|
4061
|
+
workflowInputs: safeJsonParse(row.workflow_inputs),
|
|
4062
|
+
outputContext: safeJsonParse(row.output_context),
|
|
4063
|
+
status: row.status,
|
|
4064
|
+
createdAt: row.created_at,
|
|
4065
|
+
lastRunAt: row.last_run_at ?? void 0,
|
|
4066
|
+
nextRunAt: row.next_run_at ?? void 0,
|
|
4067
|
+
runCount: row.run_count,
|
|
4068
|
+
failureCount: row.failure_count,
|
|
4069
|
+
lastError: row.last_error ?? void 0,
|
|
4070
|
+
previousResponse: row.previous_response ?? void 0
|
|
4071
|
+
};
|
|
4072
|
+
}
|
|
4073
|
+
var SqliteStoreBackend;
|
|
4074
|
+
var init_sqlite_store = __esm({
|
|
4075
|
+
"src/scheduler/store/sqlite-store.ts"() {
|
|
4076
|
+
"use strict";
|
|
4077
|
+
init_logger();
|
|
4078
|
+
SqliteStoreBackend = class {
|
|
4079
|
+
db = null;
|
|
4080
|
+
dbPath;
|
|
4081
|
+
// In-memory locks (single-node only; SQLite doesn't support distributed locking)
|
|
4082
|
+
locks = /* @__PURE__ */ new Map();
|
|
4083
|
+
constructor(filename) {
|
|
4084
|
+
this.dbPath = filename || ".visor/schedules.db";
|
|
4085
|
+
}
|
|
4086
|
+
async initialize() {
|
|
4087
|
+
const resolvedPath = path4.resolve(process.cwd(), this.dbPath);
|
|
4088
|
+
const dir = path4.dirname(resolvedPath);
|
|
4089
|
+
fs3.mkdirSync(dir, { recursive: true });
|
|
4090
|
+
const { createRequire } = __require("module");
|
|
4091
|
+
const runtimeRequire = createRequire(__filename);
|
|
4092
|
+
let Database;
|
|
4093
|
+
try {
|
|
4094
|
+
Database = runtimeRequire("better-sqlite3");
|
|
4095
|
+
} catch (err) {
|
|
4096
|
+
const code = err?.code;
|
|
4097
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
4098
|
+
throw new Error(
|
|
4099
|
+
"better-sqlite3 is required for SQLite schedule storage. Install it with: npm install better-sqlite3"
|
|
4100
|
+
);
|
|
4101
|
+
}
|
|
4102
|
+
throw err;
|
|
4103
|
+
}
|
|
4104
|
+
this.db = new Database(resolvedPath);
|
|
4105
|
+
this.db.pragma("journal_mode = WAL");
|
|
4106
|
+
this.migrateSchema();
|
|
4107
|
+
logger.info(`[SqliteStore] Initialized at ${this.dbPath}`);
|
|
4108
|
+
}
|
|
4109
|
+
async shutdown() {
|
|
4110
|
+
if (this.db) {
|
|
4111
|
+
this.db.close();
|
|
4112
|
+
this.db = null;
|
|
4113
|
+
}
|
|
4114
|
+
this.locks.clear();
|
|
4115
|
+
}
|
|
4116
|
+
// --- Schema Migration ---
|
|
4117
|
+
migrateSchema() {
|
|
4118
|
+
const db = this.getDb();
|
|
4119
|
+
db.exec(`
|
|
4120
|
+
CREATE TABLE IF NOT EXISTS schedules (
|
|
4121
|
+
id VARCHAR(36) PRIMARY KEY,
|
|
4122
|
+
creator_id VARCHAR(255) NOT NULL,
|
|
4123
|
+
creator_context VARCHAR(255),
|
|
4124
|
+
creator_name VARCHAR(255),
|
|
4125
|
+
timezone VARCHAR(64) NOT NULL DEFAULT 'UTC',
|
|
4126
|
+
schedule_expr VARCHAR(255),
|
|
4127
|
+
run_at BIGINT,
|
|
4128
|
+
is_recurring BOOLEAN NOT NULL,
|
|
4129
|
+
original_expression TEXT,
|
|
4130
|
+
workflow VARCHAR(255),
|
|
4131
|
+
workflow_inputs TEXT,
|
|
4132
|
+
output_context TEXT,
|
|
4133
|
+
status VARCHAR(20) NOT NULL,
|
|
4134
|
+
created_at BIGINT NOT NULL,
|
|
4135
|
+
last_run_at BIGINT,
|
|
4136
|
+
next_run_at BIGINT,
|
|
4137
|
+
run_count INTEGER NOT NULL DEFAULT 0,
|
|
4138
|
+
failure_count INTEGER NOT NULL DEFAULT 0,
|
|
4139
|
+
last_error TEXT,
|
|
4140
|
+
previous_response TEXT,
|
|
4141
|
+
claimed_by VARCHAR(255),
|
|
4142
|
+
claimed_at BIGINT,
|
|
4143
|
+
lock_token VARCHAR(36)
|
|
4144
|
+
);
|
|
4145
|
+
|
|
4146
|
+
CREATE INDEX IF NOT EXISTS idx_schedules_creator_id
|
|
4147
|
+
ON schedules(creator_id);
|
|
4148
|
+
|
|
4149
|
+
CREATE INDEX IF NOT EXISTS idx_schedules_status
|
|
4150
|
+
ON schedules(status);
|
|
4151
|
+
|
|
4152
|
+
CREATE INDEX IF NOT EXISTS idx_schedules_status_next_run
|
|
4153
|
+
ON schedules(status, next_run_at);
|
|
4154
|
+
|
|
4155
|
+
CREATE TABLE IF NOT EXISTS scheduler_locks (
|
|
4156
|
+
lock_id VARCHAR(255) PRIMARY KEY,
|
|
4157
|
+
node_id VARCHAR(255) NOT NULL,
|
|
4158
|
+
lock_token VARCHAR(36) NOT NULL,
|
|
4159
|
+
acquired_at BIGINT NOT NULL,
|
|
4160
|
+
expires_at BIGINT NOT NULL
|
|
4161
|
+
);
|
|
4162
|
+
`);
|
|
4163
|
+
}
|
|
4164
|
+
// --- Helpers ---
|
|
4165
|
+
getDb() {
|
|
4166
|
+
if (!this.db) {
|
|
4167
|
+
throw new Error("[SqliteStore] Database not initialized. Call initialize() first.");
|
|
4168
|
+
}
|
|
4169
|
+
return this.db;
|
|
4170
|
+
}
|
|
4171
|
+
// --- CRUD ---
|
|
4172
|
+
async create(schedule) {
|
|
4173
|
+
const db = this.getDb();
|
|
4174
|
+
const newSchedule = {
|
|
4175
|
+
...schedule,
|
|
4176
|
+
id: uuidv4(),
|
|
4177
|
+
createdAt: Date.now(),
|
|
4178
|
+
runCount: 0,
|
|
4179
|
+
failureCount: 0,
|
|
4180
|
+
status: "active"
|
|
4181
|
+
};
|
|
4182
|
+
const row = toDbRow(newSchedule);
|
|
4183
|
+
db.prepare(
|
|
4184
|
+
`
|
|
4185
|
+
INSERT INTO schedules (
|
|
4186
|
+
id, creator_id, creator_context, creator_name, timezone,
|
|
4187
|
+
schedule_expr, run_at, is_recurring, original_expression,
|
|
4188
|
+
workflow, workflow_inputs, output_context,
|
|
4189
|
+
status, created_at, last_run_at, next_run_at,
|
|
4190
|
+
run_count, failure_count, last_error, previous_response
|
|
4191
|
+
) VALUES (
|
|
4192
|
+
?, ?, ?, ?, ?,
|
|
4193
|
+
?, ?, ?, ?,
|
|
4194
|
+
?, ?, ?,
|
|
4195
|
+
?, ?, ?, ?,
|
|
4196
|
+
?, ?, ?, ?
|
|
4197
|
+
)
|
|
4198
|
+
`
|
|
4199
|
+
).run(
|
|
4200
|
+
row.id,
|
|
4201
|
+
row.creator_id,
|
|
4202
|
+
row.creator_context,
|
|
4203
|
+
row.creator_name,
|
|
4204
|
+
row.timezone,
|
|
4205
|
+
row.schedule_expr,
|
|
4206
|
+
row.run_at,
|
|
4207
|
+
row.is_recurring,
|
|
4208
|
+
row.original_expression,
|
|
4209
|
+
row.workflow,
|
|
4210
|
+
row.workflow_inputs,
|
|
4211
|
+
row.output_context,
|
|
4212
|
+
row.status,
|
|
4213
|
+
row.created_at,
|
|
4214
|
+
row.last_run_at,
|
|
4215
|
+
row.next_run_at,
|
|
4216
|
+
row.run_count,
|
|
4217
|
+
row.failure_count,
|
|
4218
|
+
row.last_error,
|
|
4219
|
+
row.previous_response
|
|
4220
|
+
);
|
|
4221
|
+
logger.info(
|
|
4222
|
+
`[SqliteStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`
|
|
4223
|
+
);
|
|
4224
|
+
return newSchedule;
|
|
4225
|
+
}
|
|
4226
|
+
async importSchedule(schedule) {
|
|
4227
|
+
const db = this.getDb();
|
|
4228
|
+
const row = toDbRow(schedule);
|
|
4229
|
+
db.prepare(
|
|
4230
|
+
`
|
|
4231
|
+
INSERT OR IGNORE INTO schedules (
|
|
4232
|
+
id, creator_id, creator_context, creator_name, timezone,
|
|
4233
|
+
schedule_expr, run_at, is_recurring, original_expression,
|
|
4234
|
+
workflow, workflow_inputs, output_context,
|
|
4235
|
+
status, created_at, last_run_at, next_run_at,
|
|
4236
|
+
run_count, failure_count, last_error, previous_response
|
|
4237
|
+
) VALUES (
|
|
4238
|
+
?, ?, ?, ?, ?,
|
|
4239
|
+
?, ?, ?, ?,
|
|
4240
|
+
?, ?, ?,
|
|
4241
|
+
?, ?, ?, ?,
|
|
4242
|
+
?, ?, ?, ?
|
|
4243
|
+
)
|
|
4244
|
+
`
|
|
4245
|
+
).run(
|
|
4246
|
+
row.id,
|
|
4247
|
+
row.creator_id,
|
|
4248
|
+
row.creator_context,
|
|
4249
|
+
row.creator_name,
|
|
4250
|
+
row.timezone,
|
|
4251
|
+
row.schedule_expr,
|
|
4252
|
+
row.run_at,
|
|
4253
|
+
row.is_recurring,
|
|
4254
|
+
row.original_expression,
|
|
4255
|
+
row.workflow,
|
|
4256
|
+
row.workflow_inputs,
|
|
4257
|
+
row.output_context,
|
|
4258
|
+
row.status,
|
|
4259
|
+
row.created_at,
|
|
4260
|
+
row.last_run_at,
|
|
4261
|
+
row.next_run_at,
|
|
4262
|
+
row.run_count,
|
|
4263
|
+
row.failure_count,
|
|
4264
|
+
row.last_error,
|
|
4265
|
+
row.previous_response
|
|
4266
|
+
);
|
|
4267
|
+
}
|
|
4268
|
+
async get(id) {
|
|
4269
|
+
const db = this.getDb();
|
|
4270
|
+
const row = db.prepare("SELECT * FROM schedules WHERE id = ?").get(id);
|
|
4271
|
+
return row ? fromDbRow(row) : void 0;
|
|
4272
|
+
}
|
|
4273
|
+
async update(id, patch) {
|
|
4274
|
+
const db = this.getDb();
|
|
4275
|
+
const existing = db.prepare("SELECT * FROM schedules WHERE id = ?").get(id);
|
|
4276
|
+
if (!existing) return void 0;
|
|
4277
|
+
const current = fromDbRow(existing);
|
|
4278
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
4279
|
+
const row = toDbRow(updated);
|
|
4280
|
+
db.prepare(
|
|
4281
|
+
`
|
|
4282
|
+
UPDATE schedules SET
|
|
4283
|
+
creator_id = ?, creator_context = ?, creator_name = ?, timezone = ?,
|
|
4284
|
+
schedule_expr = ?, run_at = ?, is_recurring = ?, original_expression = ?,
|
|
4285
|
+
workflow = ?, workflow_inputs = ?, output_context = ?,
|
|
4286
|
+
status = ?, last_run_at = ?, next_run_at = ?,
|
|
4287
|
+
run_count = ?, failure_count = ?, last_error = ?, previous_response = ?
|
|
4288
|
+
WHERE id = ?
|
|
4289
|
+
`
|
|
4290
|
+
).run(
|
|
4291
|
+
row.creator_id,
|
|
4292
|
+
row.creator_context,
|
|
4293
|
+
row.creator_name,
|
|
4294
|
+
row.timezone,
|
|
4295
|
+
row.schedule_expr,
|
|
4296
|
+
row.run_at,
|
|
4297
|
+
row.is_recurring,
|
|
4298
|
+
row.original_expression,
|
|
4299
|
+
row.workflow,
|
|
4300
|
+
row.workflow_inputs,
|
|
4301
|
+
row.output_context,
|
|
4302
|
+
row.status,
|
|
4303
|
+
row.last_run_at,
|
|
4304
|
+
row.next_run_at,
|
|
4305
|
+
row.run_count,
|
|
4306
|
+
row.failure_count,
|
|
4307
|
+
row.last_error,
|
|
4308
|
+
row.previous_response,
|
|
4309
|
+
row.id
|
|
4310
|
+
);
|
|
4311
|
+
return updated;
|
|
4312
|
+
}
|
|
4313
|
+
async delete(id) {
|
|
4314
|
+
const db = this.getDb();
|
|
4315
|
+
const result = db.prepare("DELETE FROM schedules WHERE id = ?").run(id);
|
|
4316
|
+
if (result.changes > 0) {
|
|
4317
|
+
logger.info(`[SqliteStore] Deleted schedule ${id}`);
|
|
4318
|
+
return true;
|
|
4319
|
+
}
|
|
4320
|
+
return false;
|
|
4321
|
+
}
|
|
4322
|
+
// --- Queries ---
|
|
4323
|
+
async getByCreator(creatorId) {
|
|
4324
|
+
const db = this.getDb();
|
|
4325
|
+
const rows = db.prepare("SELECT * FROM schedules WHERE creator_id = ?").all(creatorId);
|
|
4326
|
+
return rows.map(fromDbRow);
|
|
4327
|
+
}
|
|
4328
|
+
async getActiveSchedules() {
|
|
4329
|
+
const db = this.getDb();
|
|
4330
|
+
const rows = db.prepare("SELECT * FROM schedules WHERE status = 'active'").all();
|
|
4331
|
+
return rows.map(fromDbRow);
|
|
4332
|
+
}
|
|
4333
|
+
async getDueSchedules(now) {
|
|
4334
|
+
const ts = now ?? Date.now();
|
|
4335
|
+
const db = this.getDb();
|
|
4336
|
+
const rows = db.prepare(
|
|
4337
|
+
`SELECT * FROM schedules
|
|
4338
|
+
WHERE status = 'active'
|
|
4339
|
+
AND (
|
|
4340
|
+
(is_recurring = 0 AND run_at IS NOT NULL AND run_at <= ?)
|
|
4341
|
+
OR
|
|
4342
|
+
(is_recurring = 1 AND next_run_at IS NOT NULL AND next_run_at <= ?)
|
|
4343
|
+
)`
|
|
4344
|
+
).all(ts, ts);
|
|
4345
|
+
return rows.map(fromDbRow);
|
|
4346
|
+
}
|
|
4347
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
4348
|
+
const db = this.getDb();
|
|
4349
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
4350
|
+
const pattern = `%${escaped}%`;
|
|
4351
|
+
const rows = db.prepare(
|
|
4352
|
+
`SELECT * FROM schedules
|
|
4353
|
+
WHERE creator_id = ? AND status = 'active'
|
|
4354
|
+
AND LOWER(workflow) LIKE ? ESCAPE '\\'`
|
|
4355
|
+
).all(creatorId, pattern);
|
|
4356
|
+
return rows.map(fromDbRow);
|
|
4357
|
+
}
|
|
4358
|
+
async getAll() {
|
|
4359
|
+
const db = this.getDb();
|
|
4360
|
+
const rows = db.prepare("SELECT * FROM schedules").all();
|
|
4361
|
+
return rows.map(fromDbRow);
|
|
4362
|
+
}
|
|
4363
|
+
async getStats() {
|
|
4364
|
+
const db = this.getDb();
|
|
4365
|
+
const row = db.prepare(
|
|
4366
|
+
`SELECT
|
|
4367
|
+
COUNT(*) as total,
|
|
4368
|
+
SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active,
|
|
4369
|
+
SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused,
|
|
4370
|
+
SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed,
|
|
4371
|
+
SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed,
|
|
4372
|
+
SUM(CASE WHEN is_recurring = 1 THEN 1 ELSE 0 END) as recurring,
|
|
4373
|
+
SUM(CASE WHEN is_recurring = 0 THEN 1 ELSE 0 END) as one_time
|
|
4374
|
+
FROM schedules`
|
|
4375
|
+
).get();
|
|
4376
|
+
return {
|
|
4377
|
+
total: row.total,
|
|
4378
|
+
active: row.active,
|
|
4379
|
+
paused: row.paused,
|
|
4380
|
+
completed: row.completed,
|
|
4381
|
+
failed: row.failed,
|
|
4382
|
+
recurring: row.recurring,
|
|
4383
|
+
oneTime: row.one_time
|
|
4384
|
+
};
|
|
4385
|
+
}
|
|
4386
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
4387
|
+
const db = this.getDb();
|
|
4388
|
+
if (limits.maxGlobal) {
|
|
4389
|
+
const row = db.prepare("SELECT COUNT(*) as cnt FROM schedules").get();
|
|
4390
|
+
if (row.cnt >= limits.maxGlobal) {
|
|
4391
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
4392
|
+
}
|
|
4393
|
+
}
|
|
4394
|
+
if (limits.maxPerUser) {
|
|
4395
|
+
const row = db.prepare("SELECT COUNT(*) as cnt FROM schedules WHERE creator_id = ?").get(creatorId);
|
|
4396
|
+
if (row.cnt >= limits.maxPerUser) {
|
|
4397
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
4398
|
+
}
|
|
4399
|
+
}
|
|
4400
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
4401
|
+
const row = db.prepare("SELECT COUNT(*) as cnt FROM schedules WHERE creator_id = ? AND is_recurring = 1").get(creatorId);
|
|
4402
|
+
if (row.cnt >= limits.maxRecurringPerUser) {
|
|
4403
|
+
throw new Error(
|
|
4404
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
4405
|
+
);
|
|
4406
|
+
}
|
|
4407
|
+
}
|
|
4408
|
+
}
|
|
4409
|
+
// --- HA Locking (in-memory for SQLite — single-node only) ---
|
|
4410
|
+
async tryAcquireLock(scheduleId, nodeId, ttlSeconds) {
|
|
4411
|
+
const now = Date.now();
|
|
4412
|
+
const existing = this.locks.get(scheduleId);
|
|
4413
|
+
if (existing && existing.expiresAt > now) {
|
|
4414
|
+
if (existing.nodeId === nodeId) {
|
|
4415
|
+
return existing.token;
|
|
4416
|
+
}
|
|
4417
|
+
return null;
|
|
4418
|
+
}
|
|
4419
|
+
const token = uuidv4();
|
|
4420
|
+
this.locks.set(scheduleId, {
|
|
4421
|
+
nodeId,
|
|
4422
|
+
token,
|
|
4423
|
+
expiresAt: now + ttlSeconds * 1e3
|
|
4424
|
+
});
|
|
4425
|
+
return token;
|
|
4426
|
+
}
|
|
4427
|
+
async releaseLock(scheduleId, lockToken) {
|
|
4428
|
+
const existing = this.locks.get(scheduleId);
|
|
4429
|
+
if (existing && existing.token === lockToken) {
|
|
4430
|
+
this.locks.delete(scheduleId);
|
|
4431
|
+
}
|
|
4432
|
+
}
|
|
4433
|
+
async renewLock(scheduleId, lockToken, ttlSeconds) {
|
|
4434
|
+
const existing = this.locks.get(scheduleId);
|
|
4435
|
+
if (!existing || existing.token !== lockToken) {
|
|
4436
|
+
return false;
|
|
4437
|
+
}
|
|
4438
|
+
existing.expiresAt = Date.now() + ttlSeconds * 1e3;
|
|
4439
|
+
return true;
|
|
4440
|
+
}
|
|
4441
|
+
async flush() {
|
|
4442
|
+
}
|
|
4443
|
+
};
|
|
4444
|
+
}
|
|
4445
|
+
});
|
|
4446
|
+
|
|
4447
|
+
// src/scheduler/store/index.ts
|
|
4448
|
+
async function createStoreBackend(storageConfig, haConfig) {
|
|
4449
|
+
const driver = storageConfig?.driver || "sqlite";
|
|
4450
|
+
switch (driver) {
|
|
4451
|
+
case "sqlite": {
|
|
4452
|
+
const conn = storageConfig?.connection;
|
|
4453
|
+
return new SqliteStoreBackend(conn?.filename);
|
|
4454
|
+
}
|
|
4455
|
+
case "postgresql":
|
|
4456
|
+
case "mysql":
|
|
4457
|
+
case "mssql": {
|
|
4458
|
+
try {
|
|
4459
|
+
const loaderPath = "../../enterprise/loader";
|
|
4460
|
+
const { loadEnterpriseStoreBackend } = await import(loaderPath);
|
|
4461
|
+
return await loadEnterpriseStoreBackend(driver, storageConfig, haConfig);
|
|
4462
|
+
} catch (err) {
|
|
4463
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
4464
|
+
logger.error(`[StoreFactory] Failed to load enterprise ${driver} backend: ${msg}`);
|
|
4465
|
+
throw new Error(
|
|
4466
|
+
`The ${driver} schedule storage driver requires a Visor Enterprise license. Install the enterprise package or use driver: 'sqlite' (default). Original error: ${msg}`
|
|
4467
|
+
);
|
|
4468
|
+
}
|
|
4469
|
+
}
|
|
4470
|
+
default:
|
|
4471
|
+
throw new Error(`Unknown schedule storage driver: ${driver}`);
|
|
4472
|
+
}
|
|
4473
|
+
}
|
|
4474
|
+
var init_store = __esm({
|
|
4475
|
+
"src/scheduler/store/index.ts"() {
|
|
4476
|
+
"use strict";
|
|
4477
|
+
init_logger();
|
|
4478
|
+
init_sqlite_store();
|
|
4479
|
+
}
|
|
4480
|
+
});
|
|
4481
|
+
|
|
4482
|
+
// src/scheduler/store/json-migrator.ts
|
|
4483
|
+
import fs4 from "fs/promises";
|
|
4484
|
+
import path5 from "path";
|
|
4485
|
+
async function migrateJsonToBackend(jsonPath, backend) {
|
|
4486
|
+
const resolvedPath = path5.resolve(process.cwd(), jsonPath);
|
|
4487
|
+
let content;
|
|
4488
|
+
try {
|
|
4489
|
+
content = await fs4.readFile(resolvedPath, "utf-8");
|
|
4490
|
+
} catch (err) {
|
|
4491
|
+
if (err.code === "ENOENT") {
|
|
4492
|
+
return 0;
|
|
4493
|
+
}
|
|
4494
|
+
throw err;
|
|
4495
|
+
}
|
|
4496
|
+
let data;
|
|
4497
|
+
try {
|
|
4498
|
+
data = JSON.parse(content);
|
|
4499
|
+
} catch {
|
|
4500
|
+
logger.warn(`[JsonMigrator] Failed to parse ${jsonPath}, skipping migration`);
|
|
4501
|
+
return 0;
|
|
4502
|
+
}
|
|
4503
|
+
const schedules = data.schedules;
|
|
4504
|
+
if (!Array.isArray(schedules) || schedules.length === 0) {
|
|
4505
|
+
logger.debug("[JsonMigrator] No schedules to migrate");
|
|
4506
|
+
await renameToMigrated(resolvedPath);
|
|
4507
|
+
return 0;
|
|
4508
|
+
}
|
|
4509
|
+
let migrated = 0;
|
|
4510
|
+
for (const schedule of schedules) {
|
|
4511
|
+
if (!schedule.id) {
|
|
4512
|
+
logger.warn("[JsonMigrator] Skipping schedule without ID");
|
|
4513
|
+
continue;
|
|
4514
|
+
}
|
|
4515
|
+
const existing = await backend.get(schedule.id);
|
|
4516
|
+
if (existing) {
|
|
4517
|
+
logger.debug(`[JsonMigrator] Schedule ${schedule.id} already exists, skipping`);
|
|
4518
|
+
continue;
|
|
4519
|
+
}
|
|
4520
|
+
try {
|
|
4521
|
+
await backend.importSchedule(schedule);
|
|
4522
|
+
migrated++;
|
|
4523
|
+
} catch (err) {
|
|
4524
|
+
logger.warn(
|
|
4525
|
+
`[JsonMigrator] Failed to migrate schedule ${schedule.id}: ${err instanceof Error ? err.message : err}`
|
|
4526
|
+
);
|
|
4527
|
+
}
|
|
4528
|
+
}
|
|
4529
|
+
await renameToMigrated(resolvedPath);
|
|
4530
|
+
logger.info(`[JsonMigrator] Migrated ${migrated}/${schedules.length} schedules from ${jsonPath}`);
|
|
4531
|
+
return migrated;
|
|
4532
|
+
}
|
|
4533
|
+
async function renameToMigrated(resolvedPath) {
|
|
4534
|
+
const migratedPath = `${resolvedPath}.migrated`;
|
|
4535
|
+
try {
|
|
4536
|
+
await fs4.rename(resolvedPath, migratedPath);
|
|
4537
|
+
logger.info(`[JsonMigrator] Backed up ${resolvedPath} \u2192 ${migratedPath}`);
|
|
4538
|
+
} catch (err) {
|
|
4539
|
+
logger.warn(
|
|
4540
|
+
`[JsonMigrator] Failed to rename ${resolvedPath}: ${err instanceof Error ? err.message : err}`
|
|
4541
|
+
);
|
|
4542
|
+
}
|
|
4543
|
+
}
|
|
4544
|
+
var init_json_migrator = __esm({
|
|
4545
|
+
"src/scheduler/store/json-migrator.ts"() {
|
|
4546
|
+
"use strict";
|
|
4547
|
+
init_logger();
|
|
4548
|
+
}
|
|
4549
|
+
});
|
|
4550
|
+
|
|
4551
|
+
// src/scheduler/schedule-store.ts
|
|
4552
|
+
var ScheduleStore;
|
|
4553
|
+
var init_schedule_store = __esm({
|
|
4554
|
+
"src/scheduler/schedule-store.ts"() {
|
|
4555
|
+
"use strict";
|
|
4556
|
+
init_logger();
|
|
4557
|
+
init_store();
|
|
4558
|
+
init_json_migrator();
|
|
4559
|
+
ScheduleStore = class _ScheduleStore {
|
|
4560
|
+
static instance;
|
|
4561
|
+
backend = null;
|
|
4562
|
+
initialized = false;
|
|
4563
|
+
limits;
|
|
4564
|
+
config;
|
|
4565
|
+
externalBackend = null;
|
|
4566
|
+
constructor(config, limits, backend) {
|
|
4567
|
+
this.config = config || {};
|
|
4568
|
+
this.limits = {
|
|
4569
|
+
maxPerUser: limits?.maxPerUser ?? 25,
|
|
4570
|
+
maxRecurringPerUser: limits?.maxRecurringPerUser ?? 10,
|
|
4571
|
+
maxGlobal: limits?.maxGlobal ?? 1e3
|
|
4572
|
+
};
|
|
4573
|
+
if (backend) {
|
|
4574
|
+
this.externalBackend = backend;
|
|
4575
|
+
}
|
|
4576
|
+
}
|
|
4577
|
+
/**
|
|
4578
|
+
* Get singleton instance
|
|
4579
|
+
*
|
|
4580
|
+
* Note: Config and limits are only applied on first call. Subsequent calls
|
|
4581
|
+
* with different parameters will log a warning and return the existing instance.
|
|
4582
|
+
* Use createIsolated() for testing with different configurations.
|
|
4583
|
+
*/
|
|
4584
|
+
static getInstance(config, limits) {
|
|
4585
|
+
if (!_ScheduleStore.instance) {
|
|
4586
|
+
_ScheduleStore.instance = new _ScheduleStore(config, limits);
|
|
4587
|
+
} else if (config || limits) {
|
|
4588
|
+
logger.warn(
|
|
4589
|
+
"[ScheduleStore] getInstance() called with config/limits but instance already exists. Parameters ignored. Use createIsolated() for testing or resetInstance() first."
|
|
4590
|
+
);
|
|
4591
|
+
}
|
|
4592
|
+
return _ScheduleStore.instance;
|
|
4593
|
+
}
|
|
4594
|
+
/**
|
|
4595
|
+
* Create a new isolated instance (for testing)
|
|
4596
|
+
*/
|
|
4597
|
+
static createIsolated(config, limits, backend) {
|
|
4598
|
+
return new _ScheduleStore(config, limits, backend);
|
|
4599
|
+
}
|
|
4600
|
+
/**
|
|
4601
|
+
* Reset singleton instance (for testing)
|
|
4602
|
+
*/
|
|
4603
|
+
static resetInstance() {
|
|
4604
|
+
if (_ScheduleStore.instance) {
|
|
4605
|
+
if (_ScheduleStore.instance.backend) {
|
|
4606
|
+
_ScheduleStore.instance.backend.shutdown().catch(() => {
|
|
4607
|
+
});
|
|
4608
|
+
}
|
|
4609
|
+
}
|
|
4610
|
+
_ScheduleStore.instance = void 0;
|
|
4611
|
+
}
|
|
4612
|
+
/**
|
|
4613
|
+
* Initialize the store - creates backend and runs migrations
|
|
4614
|
+
*/
|
|
4615
|
+
async initialize() {
|
|
4616
|
+
if (this.initialized) {
|
|
4617
|
+
return;
|
|
4618
|
+
}
|
|
4619
|
+
if (this.externalBackend) {
|
|
4620
|
+
this.backend = this.externalBackend;
|
|
4621
|
+
} else {
|
|
4622
|
+
this.backend = await createStoreBackend(this.config.storage, this.config.ha);
|
|
4623
|
+
}
|
|
4624
|
+
await this.backend.initialize();
|
|
4625
|
+
const jsonPath = this.config.path || ".visor/schedules.json";
|
|
4626
|
+
try {
|
|
4627
|
+
await migrateJsonToBackend(jsonPath, this.backend);
|
|
4628
|
+
} catch (err) {
|
|
4629
|
+
logger.warn(
|
|
4630
|
+
`[ScheduleStore] JSON migration failed (non-fatal): ${err instanceof Error ? err.message : err}`
|
|
4631
|
+
);
|
|
4632
|
+
}
|
|
4633
|
+
this.initialized = true;
|
|
4634
|
+
}
|
|
4635
|
+
/**
|
|
4636
|
+
* Create a new schedule (async, persists immediately)
|
|
4637
|
+
*/
|
|
4638
|
+
async createAsync(schedule) {
|
|
4639
|
+
const backend = this.getBackend();
|
|
4640
|
+
await backend.validateLimits(schedule.creatorId, schedule.isRecurring, this.limits);
|
|
4641
|
+
return backend.create(schedule);
|
|
4642
|
+
}
|
|
4643
|
+
/**
|
|
4644
|
+
* Get a schedule by ID
|
|
4645
|
+
*/
|
|
4646
|
+
async getAsync(id) {
|
|
4647
|
+
return this.getBackend().get(id);
|
|
4648
|
+
}
|
|
4649
|
+
/**
|
|
4650
|
+
* Update a schedule
|
|
4651
|
+
*/
|
|
4652
|
+
async updateAsync(id, patch) {
|
|
4653
|
+
return this.getBackend().update(id, patch);
|
|
4654
|
+
}
|
|
4655
|
+
/**
|
|
4656
|
+
* Delete a schedule
|
|
4657
|
+
*/
|
|
4658
|
+
async deleteAsync(id) {
|
|
4659
|
+
return this.getBackend().delete(id);
|
|
4660
|
+
}
|
|
4661
|
+
/**
|
|
4662
|
+
* Get all schedules for a specific creator
|
|
4663
|
+
*/
|
|
4664
|
+
async getByCreatorAsync(creatorId) {
|
|
4665
|
+
return this.getBackend().getByCreator(creatorId);
|
|
4666
|
+
}
|
|
4667
|
+
/**
|
|
4668
|
+
* Get all active schedules
|
|
4669
|
+
*/
|
|
4670
|
+
async getActiveSchedulesAsync() {
|
|
4671
|
+
return this.getBackend().getActiveSchedules();
|
|
4672
|
+
}
|
|
4673
|
+
/**
|
|
4674
|
+
* Get all schedules due for execution
|
|
4675
|
+
* @param now Current timestamp in milliseconds
|
|
4676
|
+
*/
|
|
4677
|
+
async getDueSchedulesAsync(now = Date.now()) {
|
|
4678
|
+
return this.getBackend().getDueSchedules(now);
|
|
4679
|
+
}
|
|
4680
|
+
/**
|
|
4681
|
+
* Find schedules by workflow name
|
|
4682
|
+
*/
|
|
4683
|
+
async findByWorkflowAsync(creatorId, workflowName) {
|
|
4684
|
+
return this.getBackend().findByWorkflow(creatorId, workflowName);
|
|
4685
|
+
}
|
|
4686
|
+
/**
|
|
4687
|
+
* Get schedule count statistics
|
|
4688
|
+
*/
|
|
4689
|
+
async getStatsAsync() {
|
|
4690
|
+
return this.getBackend().getStats();
|
|
4691
|
+
}
|
|
4692
|
+
/**
|
|
4693
|
+
* Force immediate save (useful for shutdown)
|
|
4694
|
+
*/
|
|
4695
|
+
async flush() {
|
|
4696
|
+
if (this.backend) {
|
|
4697
|
+
await this.backend.flush();
|
|
4698
|
+
}
|
|
4699
|
+
}
|
|
4700
|
+
/**
|
|
4701
|
+
* Check if initialized
|
|
4702
|
+
*/
|
|
4703
|
+
isInitialized() {
|
|
4704
|
+
return this.initialized;
|
|
4705
|
+
}
|
|
4706
|
+
/**
|
|
4707
|
+
* Check if there are unsaved changes
|
|
4708
|
+
*/
|
|
4709
|
+
hasPendingChanges() {
|
|
4710
|
+
return false;
|
|
4711
|
+
}
|
|
4712
|
+
/**
|
|
4713
|
+
* Get all schedules
|
|
4714
|
+
*/
|
|
4715
|
+
async getAllAsync() {
|
|
4716
|
+
return this.getBackend().getAll();
|
|
4717
|
+
}
|
|
4718
|
+
/**
|
|
4719
|
+
* Get the underlying backend (for HA lock operations)
|
|
4720
|
+
*/
|
|
4721
|
+
getBackend() {
|
|
4722
|
+
if (!this.backend) {
|
|
4723
|
+
throw new Error("[ScheduleStore] Not initialized. Call initialize() first.");
|
|
4724
|
+
}
|
|
4725
|
+
return this.backend;
|
|
4726
|
+
}
|
|
4727
|
+
/**
|
|
4728
|
+
* Shut down the backend cleanly
|
|
4729
|
+
*/
|
|
4730
|
+
async shutdown() {
|
|
4731
|
+
if (this.backend) {
|
|
4732
|
+
await this.backend.shutdown();
|
|
4733
|
+
this.backend = null;
|
|
4734
|
+
}
|
|
4735
|
+
this.initialized = false;
|
|
4736
|
+
}
|
|
4737
|
+
};
|
|
4738
|
+
}
|
|
4739
|
+
});
|
|
4740
|
+
|
|
4741
|
+
// src/scheduler/schedule-parser.ts
|
|
4742
|
+
function getNextRunTime(cronExpression, _timezone = "UTC") {
|
|
4743
|
+
const parts = cronExpression.split(" ");
|
|
4744
|
+
if (parts.length !== 5) {
|
|
4745
|
+
throw new Error(`Invalid cron expression: ${cronExpression}`);
|
|
4746
|
+
}
|
|
4747
|
+
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts;
|
|
4748
|
+
const now = /* @__PURE__ */ new Date();
|
|
4749
|
+
const next = new Date(now);
|
|
4750
|
+
next.setSeconds(0, 0);
|
|
4751
|
+
next.setMinutes(next.getMinutes() + 1);
|
|
4752
|
+
const maxAttempts = 365 * 24 * 60;
|
|
4753
|
+
for (let i = 0; i < maxAttempts; i++) {
|
|
4754
|
+
if (matchesCronPart(next.getMinutes(), minute) && matchesCronPart(next.getHours(), hour) && matchesCronPart(next.getDate(), dayOfMonth) && matchesCronPart(next.getMonth() + 1, month) && matchesCronPart(next.getDay(), dayOfWeek)) {
|
|
4755
|
+
return next;
|
|
4756
|
+
}
|
|
4757
|
+
next.setMinutes(next.getMinutes() + 1);
|
|
4758
|
+
}
|
|
4759
|
+
const fallback = new Date(now);
|
|
4760
|
+
fallback.setDate(fallback.getDate() + 1);
|
|
4761
|
+
fallback.setHours(parseInt(hour, 10) || 9);
|
|
4762
|
+
fallback.setMinutes(parseInt(minute, 10) || 0);
|
|
4763
|
+
fallback.setSeconds(0, 0);
|
|
4764
|
+
return fallback;
|
|
4765
|
+
}
|
|
4766
|
+
function matchesCronPart(value, cronPart) {
|
|
4767
|
+
if (cronPart === "*") return true;
|
|
4768
|
+
if (cronPart.startsWith("*/")) {
|
|
4769
|
+
const step = parseInt(cronPart.slice(2), 10);
|
|
4770
|
+
return value % step === 0;
|
|
4771
|
+
}
|
|
4772
|
+
if (cronPart.includes("-")) {
|
|
4773
|
+
const [start, end] = cronPart.split("-").map((n) => parseInt(n, 10));
|
|
4774
|
+
return value >= start && value <= end;
|
|
4775
|
+
}
|
|
4776
|
+
if (cronPart.includes(",")) {
|
|
4777
|
+
return cronPart.split(",").map((n) => parseInt(n, 10)).includes(value);
|
|
4778
|
+
}
|
|
4779
|
+
return parseInt(cronPart, 10) === value;
|
|
4780
|
+
}
|
|
4781
|
+
function isValidCronExpression(expr) {
|
|
4782
|
+
if (!expr || typeof expr !== "string") return false;
|
|
4783
|
+
const parts = expr.trim().split(/\s+/);
|
|
4784
|
+
if (parts.length !== 5) return false;
|
|
4785
|
+
const ranges = [
|
|
4786
|
+
[0, 59],
|
|
4787
|
+
// minute
|
|
4788
|
+
[0, 23],
|
|
4789
|
+
// hour
|
|
4790
|
+
[1, 31],
|
|
4791
|
+
// day of month
|
|
4792
|
+
[1, 12],
|
|
4793
|
+
// month
|
|
4794
|
+
[0, 7]
|
|
4795
|
+
// day of week (0 and 7 are Sunday)
|
|
4796
|
+
];
|
|
4797
|
+
return parts.every((part, i) => {
|
|
4798
|
+
if (part === "*") return true;
|
|
4799
|
+
if (part.startsWith("*/")) {
|
|
4800
|
+
const step = parseInt(part.slice(2), 10);
|
|
4801
|
+
return !isNaN(step) && step > 0;
|
|
4802
|
+
}
|
|
4803
|
+
if (part.includes("-")) {
|
|
4804
|
+
const [start, end] = part.split("-").map((n) => parseInt(n, 10));
|
|
4805
|
+
return !isNaN(start) && !isNaN(end) && start >= ranges[i][0] && end <= ranges[i][1];
|
|
4806
|
+
}
|
|
4807
|
+
if (part.includes(",")) {
|
|
4808
|
+
return part.split(",").every((n) => {
|
|
4809
|
+
const val2 = parseInt(n, 10);
|
|
4810
|
+
return !isNaN(val2) && val2 >= ranges[i][0] && val2 <= ranges[i][1];
|
|
4811
|
+
});
|
|
4812
|
+
}
|
|
4813
|
+
const val = parseInt(part, 10);
|
|
4814
|
+
return !isNaN(val) && val >= ranges[i][0] && val <= ranges[i][1];
|
|
4815
|
+
});
|
|
4816
|
+
}
|
|
4817
|
+
var init_schedule_parser = __esm({
|
|
4818
|
+
"src/scheduler/schedule-parser.ts"() {
|
|
4819
|
+
"use strict";
|
|
4820
|
+
}
|
|
4821
|
+
});
|
|
4822
|
+
|
|
4024
4823
|
// src/state-machine/states/init.ts
|
|
4025
4824
|
async function handleInit(context2, state, transition) {
|
|
4026
4825
|
if (context2.debug) {
|
|
@@ -4804,8 +5603,8 @@ var init_wave_planning = __esm({
|
|
|
4804
5603
|
});
|
|
4805
5604
|
|
|
4806
5605
|
// src/utils/mermaid-telemetry.ts
|
|
4807
|
-
import * as
|
|
4808
|
-
import * as
|
|
5606
|
+
import * as fs5 from "fs";
|
|
5607
|
+
import * as path6 from "path";
|
|
4809
5608
|
function emitMermaidFromMarkdown(checkName, markdown, origin) {
|
|
4810
5609
|
if (!markdown || typeof markdown !== "string") return 0;
|
|
4811
5610
|
let m;
|
|
@@ -4818,16 +5617,16 @@ function emitMermaidFromMarkdown(checkName, markdown, origin) {
|
|
|
4818
5617
|
addEvent("diagram.block", { check: checkName, origin, code });
|
|
4819
5618
|
addDiagramBlock(origin);
|
|
4820
5619
|
if (process.env.VISOR_TRACE_REPORT === "true") {
|
|
4821
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
5620
|
+
const outDir = process.env.VISOR_TRACE_DIR || path6.join(process.cwd(), "output", "traces");
|
|
4822
5621
|
try {
|
|
4823
|
-
if (!
|
|
5622
|
+
if (!fs5.existsSync(outDir)) fs5.mkdirSync(outDir, { recursive: true });
|
|
4824
5623
|
const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
4825
|
-
const jsonPath =
|
|
4826
|
-
const htmlPath =
|
|
5624
|
+
const jsonPath = path6.join(outDir, `${ts}.trace.json`);
|
|
5625
|
+
const htmlPath = path6.join(outDir, `${ts}.report.html`);
|
|
4827
5626
|
let data = { spans: [] };
|
|
4828
|
-
if (
|
|
5627
|
+
if (fs5.existsSync(jsonPath)) {
|
|
4829
5628
|
try {
|
|
4830
|
-
data = JSON.parse(
|
|
5629
|
+
data = JSON.parse(fs5.readFileSync(jsonPath, "utf8"));
|
|
4831
5630
|
} catch {
|
|
4832
5631
|
data = { spans: [] };
|
|
4833
5632
|
}
|
|
@@ -4835,9 +5634,9 @@ function emitMermaidFromMarkdown(checkName, markdown, origin) {
|
|
|
4835
5634
|
data.spans.push({
|
|
4836
5635
|
events: [{ name: "diagram.block", attrs: { check: checkName, origin, code } }]
|
|
4837
5636
|
});
|
|
4838
|
-
|
|
4839
|
-
if (!
|
|
4840
|
-
|
|
5637
|
+
fs5.writeFileSync(jsonPath, JSON.stringify(data, null, 2), "utf8");
|
|
5638
|
+
if (!fs5.existsSync(htmlPath)) {
|
|
5639
|
+
fs5.writeFileSync(
|
|
4841
5640
|
htmlPath,
|
|
4842
5641
|
'<!doctype html><html><head><meta charset="utf-8"/><title>Visor Trace Report</title></head><body><h2>Visor Trace Report</h2></body></html>',
|
|
4843
5642
|
"utf8"
|
|
@@ -5494,8 +6293,8 @@ var init_dependency_gating = __esm({
|
|
|
5494
6293
|
async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
5495
6294
|
try {
|
|
5496
6295
|
const { createExtendedLiquid: createExtendedLiquid2 } = await import("./liquid-extensions-PLBOMRLI.mjs");
|
|
5497
|
-
const
|
|
5498
|
-
const
|
|
6296
|
+
const fs12 = await import("fs/promises");
|
|
6297
|
+
const path15 = await import("path");
|
|
5499
6298
|
const schemaRaw = checkConfig.schema || "plain";
|
|
5500
6299
|
const schema = typeof schemaRaw === "string" ? schemaRaw : "code-review";
|
|
5501
6300
|
let templateContent;
|
|
@@ -5503,24 +6302,24 @@ async function renderTemplateContent(checkId, checkConfig, reviewSummary) {
|
|
|
5503
6302
|
templateContent = String(checkConfig.template.content);
|
|
5504
6303
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
5505
6304
|
const file = String(checkConfig.template.file);
|
|
5506
|
-
const resolved =
|
|
5507
|
-
templateContent = await
|
|
6305
|
+
const resolved = path15.resolve(process.cwd(), file);
|
|
6306
|
+
templateContent = await fs12.readFile(resolved, "utf-8");
|
|
5508
6307
|
} else if (schema && schema !== "plain") {
|
|
5509
6308
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
5510
6309
|
if (sanitized) {
|
|
5511
6310
|
const candidatePaths = [
|
|
5512
|
-
|
|
6311
|
+
path15.join(__dirname, "output", sanitized, "template.liquid"),
|
|
5513
6312
|
// bundled: dist/output/
|
|
5514
|
-
|
|
6313
|
+
path15.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
5515
6314
|
// source: output/
|
|
5516
|
-
|
|
6315
|
+
path15.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
5517
6316
|
// fallback: cwd/output/
|
|
5518
|
-
|
|
6317
|
+
path15.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
5519
6318
|
// fallback: cwd/dist/output/
|
|
5520
6319
|
];
|
|
5521
6320
|
for (const p of candidatePaths) {
|
|
5522
6321
|
try {
|
|
5523
|
-
templateContent = await
|
|
6322
|
+
templateContent = await fs12.readFile(p, "utf-8");
|
|
5524
6323
|
if (templateContent) break;
|
|
5525
6324
|
} catch {
|
|
5526
6325
|
}
|
|
@@ -8208,7 +9007,7 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
|
|
|
8208
9007
|
}
|
|
8209
9008
|
}
|
|
8210
9009
|
try {
|
|
8211
|
-
const { evaluateTransitions } = await import("./routing-
|
|
9010
|
+
const { evaluateTransitions } = await import("./routing-LU5PAREW.mjs");
|
|
8212
9011
|
const transTarget = await evaluateTransitions(
|
|
8213
9012
|
onFinish.transitions,
|
|
8214
9013
|
forEachParent,
|
|
@@ -8268,7 +9067,7 @@ async function executeCheckWithForEachItems2(checkId, forEachParent, forEachItem
|
|
|
8268
9067
|
`[LevelDispatch] Error evaluating on_finish transitions for ${forEachParent}: ${e instanceof Error ? e.message : String(e)}`
|
|
8269
9068
|
);
|
|
8270
9069
|
}
|
|
8271
|
-
const { evaluateGoto: evaluateGoto2 } = await import("./routing-
|
|
9070
|
+
const { evaluateGoto: evaluateGoto2 } = await import("./routing-LU5PAREW.mjs");
|
|
8272
9071
|
if (context2.debug) {
|
|
8273
9072
|
logger.info(
|
|
8274
9073
|
`[LevelDispatch] Evaluating on_finish.goto_js for forEach parent: ${forEachParent}`
|
|
@@ -9521,8 +10320,8 @@ function updateStats2(results, state, isForEachIteration = false) {
|
|
|
9521
10320
|
async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
9522
10321
|
try {
|
|
9523
10322
|
const { createExtendedLiquid: createExtendedLiquid2 } = await import("./liquid-extensions-PLBOMRLI.mjs");
|
|
9524
|
-
const
|
|
9525
|
-
const
|
|
10323
|
+
const fs12 = await import("fs/promises");
|
|
10324
|
+
const path15 = await import("path");
|
|
9526
10325
|
const schemaRaw = checkConfig.schema || "plain";
|
|
9527
10326
|
const schema = typeof schemaRaw === "string" && !schemaRaw.includes("{{") && !schemaRaw.includes("{%") ? schemaRaw : typeof schemaRaw === "object" ? "code-review" : "plain";
|
|
9528
10327
|
let templateContent;
|
|
@@ -9531,27 +10330,27 @@ async function renderTemplateContent2(checkId, checkConfig, reviewSummary) {
|
|
|
9531
10330
|
logger.debug(`[LevelDispatch] Using inline template for ${checkId}`);
|
|
9532
10331
|
} else if (checkConfig.template && checkConfig.template.file) {
|
|
9533
10332
|
const file = String(checkConfig.template.file);
|
|
9534
|
-
const resolved =
|
|
9535
|
-
templateContent = await
|
|
10333
|
+
const resolved = path15.resolve(process.cwd(), file);
|
|
10334
|
+
templateContent = await fs12.readFile(resolved, "utf-8");
|
|
9536
10335
|
logger.debug(`[LevelDispatch] Using template file for ${checkId}: ${resolved}`);
|
|
9537
10336
|
} else if (schema && schema !== "plain") {
|
|
9538
10337
|
const sanitized = String(schema).replace(/[^a-zA-Z0-9-]/g, "");
|
|
9539
10338
|
if (sanitized) {
|
|
9540
10339
|
const candidatePaths = [
|
|
9541
|
-
|
|
10340
|
+
path15.join(__dirname, "output", sanitized, "template.liquid"),
|
|
9542
10341
|
// bundled: dist/output/
|
|
9543
|
-
|
|
10342
|
+
path15.join(__dirname, "..", "..", "output", sanitized, "template.liquid"),
|
|
9544
10343
|
// source (from state-machine/states)
|
|
9545
|
-
|
|
10344
|
+
path15.join(__dirname, "..", "..", "..", "output", sanitized, "template.liquid"),
|
|
9546
10345
|
// source (alternate)
|
|
9547
|
-
|
|
10346
|
+
path15.join(process.cwd(), "output", sanitized, "template.liquid"),
|
|
9548
10347
|
// fallback: cwd/output/
|
|
9549
|
-
|
|
10348
|
+
path15.join(process.cwd(), "dist", "output", sanitized, "template.liquid")
|
|
9550
10349
|
// fallback: cwd/dist/output/
|
|
9551
10350
|
];
|
|
9552
10351
|
for (const p of candidatePaths) {
|
|
9553
10352
|
try {
|
|
9554
|
-
templateContent = await
|
|
10353
|
+
templateContent = await fs12.readFile(p, "utf-8");
|
|
9555
10354
|
if (templateContent) {
|
|
9556
10355
|
logger.debug(`[LevelDispatch] Using schema template for ${checkId}: ${p}`);
|
|
9557
10356
|
break;
|
|
@@ -11075,7 +11874,7 @@ var init_sandbox_manager = __esm({
|
|
|
11075
11874
|
|
|
11076
11875
|
// src/utils/workspace-manager.ts
|
|
11077
11876
|
import * as fsp from "fs/promises";
|
|
11078
|
-
import * as
|
|
11877
|
+
import * as path7 from "path";
|
|
11079
11878
|
function shellEscape(str) {
|
|
11080
11879
|
return "'" + str.replace(/'/g, "'\\''") + "'";
|
|
11081
11880
|
}
|
|
@@ -11119,7 +11918,7 @@ var init_workspace_manager = __esm({
|
|
|
11119
11918
|
};
|
|
11120
11919
|
this.basePath = this.config.basePath;
|
|
11121
11920
|
const workspaceDirName = sanitizePathComponent(this.config.name || this.sessionId);
|
|
11122
|
-
this.workspacePath =
|
|
11921
|
+
this.workspacePath = path7.join(this.basePath, workspaceDirName);
|
|
11123
11922
|
}
|
|
11124
11923
|
/**
|
|
11125
11924
|
* Get or create a WorkspaceManager instance for a session
|
|
@@ -11214,7 +12013,7 @@ var init_workspace_manager = __esm({
|
|
|
11214
12013
|
configuredMainProjectName || this.extractProjectName(this.originalPath)
|
|
11215
12014
|
);
|
|
11216
12015
|
this.usedNames.add(mainProjectName);
|
|
11217
|
-
const mainProjectPath =
|
|
12016
|
+
const mainProjectPath = path7.join(this.workspacePath, mainProjectName);
|
|
11218
12017
|
const isGitRepo = await this.isGitRepository(this.originalPath);
|
|
11219
12018
|
if (isGitRepo) {
|
|
11220
12019
|
const exists = await this.pathExists(mainProjectPath);
|
|
@@ -11232,6 +12031,8 @@ var init_workspace_manager = __esm({
|
|
|
11232
12031
|
} catch {
|
|
11233
12032
|
}
|
|
11234
12033
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
12034
|
+
} else {
|
|
12035
|
+
await this.refreshWorktreeToUpstream(mainProjectPath);
|
|
11235
12036
|
}
|
|
11236
12037
|
} else {
|
|
11237
12038
|
await this.createMainProjectWorktree(mainProjectPath);
|
|
@@ -11276,7 +12077,7 @@ var init_workspace_manager = __esm({
|
|
|
11276
12077
|
let projectName = sanitizePathComponent(description || this.extractRepoName(repository));
|
|
11277
12078
|
projectName = this.getUniqueName(projectName);
|
|
11278
12079
|
this.usedNames.add(projectName);
|
|
11279
|
-
const workspacePath =
|
|
12080
|
+
const workspacePath = path7.join(this.workspacePath, projectName);
|
|
11280
12081
|
await fsp.rm(workspacePath, { recursive: true, force: true });
|
|
11281
12082
|
try {
|
|
11282
12083
|
await fsp.symlink(worktreePath, workspacePath);
|
|
@@ -11386,7 +12187,7 @@ var init_workspace_manager = __esm({
|
|
|
11386
12187
|
const now = Date.now();
|
|
11387
12188
|
for (const entry of entries) {
|
|
11388
12189
|
if (!entry.isDirectory()) continue;
|
|
11389
|
-
const dirPath =
|
|
12190
|
+
const dirPath = path7.join(basePath, entry.name);
|
|
11390
12191
|
try {
|
|
11391
12192
|
const stat2 = await fsp.stat(dirPath);
|
|
11392
12193
|
if (now - stat2.mtimeMs > maxAgeMs) {
|
|
@@ -11394,8 +12195,8 @@ var init_workspace_manager = __esm({
|
|
|
11394
12195
|
const subdirs = await fsp.readdir(dirPath, { withFileTypes: true });
|
|
11395
12196
|
for (const sub of subdirs) {
|
|
11396
12197
|
if (!sub.isDirectory()) continue;
|
|
11397
|
-
const subPath =
|
|
11398
|
-
const gitFilePath =
|
|
12198
|
+
const subPath = path7.join(dirPath, sub.name);
|
|
12199
|
+
const gitFilePath = path7.join(subPath, ".git");
|
|
11399
12200
|
try {
|
|
11400
12201
|
const gitContent = await fsp.readFile(gitFilePath, "utf-8");
|
|
11401
12202
|
const match = gitContent.match(/gitdir:\s*(.+)/);
|
|
@@ -11427,33 +12228,143 @@ var init_workspace_manager = __esm({
|
|
|
11427
12228
|
return cleaned;
|
|
11428
12229
|
}
|
|
11429
12230
|
/**
|
|
11430
|
-
*
|
|
11431
|
-
*
|
|
11432
|
-
*
|
|
11433
|
-
*
|
|
11434
|
-
*
|
|
11435
|
-
*
|
|
11436
|
-
*
|
|
11437
|
-
* full clone/bare-repo/fetch/worktree pipeline.
|
|
12231
|
+
* visor-disable: architecture - The helpers below (resolveUpstreamRef,
|
|
12232
|
+
* fetchAndResolveUpstream, resetAndCleanWorktree, refreshWorktreeToUpstream)
|
|
12233
|
+
* are NOT duplicates of WorktreeManager's fetchRef/getCommitShaForRef/cleanWorktree.
|
|
12234
|
+
* WorktreeManager operates on BARE repo caches cloned from remote URLs, while
|
|
12235
|
+
* WorkspaceManager operates on the LOCAL working repo the user already has checked out.
|
|
12236
|
+
* The git commands differ (e.g. `fetch origin --prune` vs `fetch origin <ref>:<ref>`)
|
|
12237
|
+
* and sharing code would require adding a "local mode" to WorktreeManager for no benefit.
|
|
11438
12238
|
*/
|
|
11439
|
-
|
|
11440
|
-
|
|
12239
|
+
/**
|
|
12240
|
+
* Resolve the upstream default branch ref.
|
|
12241
|
+
* Tries origin/HEAD (symbolic), then origin/main, then origin/master.
|
|
12242
|
+
* Falls back to local HEAD if no remote is configured.
|
|
12243
|
+
*/
|
|
12244
|
+
async resolveUpstreamRef() {
|
|
12245
|
+
const esc = shellEscape(this.originalPath);
|
|
12246
|
+
const symbolicResult = await commandExecutor.execute(
|
|
12247
|
+
`git -C ${esc} symbolic-ref refs/remotes/origin/HEAD 2>/dev/null`,
|
|
12248
|
+
{ timeout: 1e4 }
|
|
12249
|
+
);
|
|
12250
|
+
if (symbolicResult.exitCode === 0 && symbolicResult.stdout.trim()) {
|
|
12251
|
+
const ref = symbolicResult.stdout.trim().replace("refs/remotes/", "");
|
|
12252
|
+
logger.debug(`[Workspace] Resolved upstream default branch via origin/HEAD: ${ref}`);
|
|
12253
|
+
return ref;
|
|
12254
|
+
}
|
|
12255
|
+
const mainResult = await commandExecutor.execute(
|
|
12256
|
+
`git -C ${esc} rev-parse --verify origin/main 2>/dev/null`,
|
|
12257
|
+
{ timeout: 1e4 }
|
|
12258
|
+
);
|
|
12259
|
+
if (mainResult.exitCode === 0) {
|
|
12260
|
+
logger.debug(`[Workspace] Using origin/main as upstream ref`);
|
|
12261
|
+
return "origin/main";
|
|
12262
|
+
}
|
|
12263
|
+
const masterResult = await commandExecutor.execute(
|
|
12264
|
+
`git -C ${esc} rev-parse --verify origin/master 2>/dev/null`,
|
|
12265
|
+
{ timeout: 1e4 }
|
|
12266
|
+
);
|
|
12267
|
+
if (masterResult.exitCode === 0) {
|
|
12268
|
+
logger.debug(`[Workspace] Using origin/master as upstream ref`);
|
|
12269
|
+
return "origin/master";
|
|
12270
|
+
}
|
|
12271
|
+
logger.warn(`[Workspace] No upstream remote found, falling back to local HEAD`);
|
|
12272
|
+
return "HEAD";
|
|
12273
|
+
}
|
|
12274
|
+
/**
|
|
12275
|
+
* Fetch latest from origin, resolve the upstream default branch, and return
|
|
12276
|
+
* both the ref name and the resolved commit SHA.
|
|
12277
|
+
*/
|
|
12278
|
+
async fetchAndResolveUpstream() {
|
|
12279
|
+
logger.debug(`[Workspace] Fetching latest from origin`);
|
|
12280
|
+
const fetchResult = await commandExecutor.execute(
|
|
12281
|
+
`git -C ${shellEscape(this.originalPath)} fetch origin --prune 2>&1`,
|
|
12282
|
+
{ timeout: 12e4 }
|
|
12283
|
+
);
|
|
12284
|
+
if (fetchResult.exitCode !== 0) {
|
|
12285
|
+
logger.warn(`[Workspace] fetch origin failed (will use cached refs): ${fetchResult.stderr}`);
|
|
12286
|
+
}
|
|
12287
|
+
const upstreamRef = await this.resolveUpstreamRef();
|
|
12288
|
+
const shaResult = await commandExecutor.execute(
|
|
12289
|
+
`git -C ${shellEscape(this.originalPath)} rev-parse ${shellEscape(upstreamRef)}`,
|
|
12290
|
+
{ timeout: 1e4 }
|
|
12291
|
+
);
|
|
12292
|
+
if (shaResult.exitCode === 0) {
|
|
12293
|
+
return { upstreamRef, targetSha: shaResult.stdout.trim() };
|
|
12294
|
+
}
|
|
12295
|
+
logger.warn(
|
|
12296
|
+
`[Workspace] Could not resolve ${upstreamRef} (${shaResult.stderr.trim()}), falling back to HEAD`
|
|
12297
|
+
);
|
|
11441
12298
|
const headResult = await commandExecutor.execute(
|
|
11442
12299
|
`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`,
|
|
11443
|
-
{
|
|
11444
|
-
timeout: 1e4
|
|
11445
|
-
}
|
|
12300
|
+
{ timeout: 1e4 }
|
|
11446
12301
|
);
|
|
11447
12302
|
if (headResult.exitCode !== 0) {
|
|
11448
|
-
throw new Error(`
|
|
12303
|
+
throw new Error(`Repository has no commits \u2014 cannot create worktree: ${headResult.stderr}`);
|
|
12304
|
+
}
|
|
12305
|
+
return { upstreamRef: "HEAD", targetSha: headResult.stdout.trim() };
|
|
12306
|
+
}
|
|
12307
|
+
/**
|
|
12308
|
+
* Reset a worktree to a specific commit and clean all modifications.
|
|
12309
|
+
*/
|
|
12310
|
+
async resetAndCleanWorktree(worktreePath, targetSha) {
|
|
12311
|
+
const escapedPath = shellEscape(worktreePath);
|
|
12312
|
+
const escapedSha = shellEscape(targetSha);
|
|
12313
|
+
const resetResult = await commandExecutor.execute(
|
|
12314
|
+
`git -C ${escapedPath} reset --hard ${escapedSha}`,
|
|
12315
|
+
{ timeout: 1e4 }
|
|
12316
|
+
);
|
|
12317
|
+
if (resetResult.exitCode !== 0) {
|
|
12318
|
+
logger.warn(`[Workspace] reset --hard failed: ${resetResult.stderr}`);
|
|
12319
|
+
}
|
|
12320
|
+
const cleanResult = await commandExecutor.execute(`git -C ${escapedPath} clean -fdx`, {
|
|
12321
|
+
timeout: 3e4
|
|
12322
|
+
});
|
|
12323
|
+
if (cleanResult.exitCode !== 0) {
|
|
12324
|
+
logger.warn(`[Workspace] clean -fdx failed: ${cleanResult.stderr}`);
|
|
12325
|
+
}
|
|
12326
|
+
}
|
|
12327
|
+
/**
|
|
12328
|
+
* Refresh an existing worktree to the latest upstream default branch
|
|
12329
|
+
* and ensure it has no modified or untracked files.
|
|
12330
|
+
*/
|
|
12331
|
+
async refreshWorktreeToUpstream(worktreePath) {
|
|
12332
|
+
logger.info(`[Workspace] Refreshing worktree to latest upstream: ${worktreePath}`);
|
|
12333
|
+
try {
|
|
12334
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
12335
|
+
const checkoutResult = await commandExecutor.execute(
|
|
12336
|
+
`git -C ${shellEscape(worktreePath)} checkout --detach ${shellEscape(targetSha)}`,
|
|
12337
|
+
{ timeout: 3e4 }
|
|
12338
|
+
);
|
|
12339
|
+
if (checkoutResult.exitCode !== 0) {
|
|
12340
|
+
logger.warn(
|
|
12341
|
+
`[Workspace] checkout --detach failed (worktree stays at current commit): ${checkoutResult.stderr}`
|
|
12342
|
+
);
|
|
12343
|
+
await this.resetAndCleanWorktree(worktreePath, "HEAD");
|
|
12344
|
+
return;
|
|
12345
|
+
}
|
|
12346
|
+
await this.resetAndCleanWorktree(worktreePath, targetSha);
|
|
12347
|
+
logger.info(`[Workspace] Worktree updated to ${upstreamRef} (${targetSha.slice(0, 8)})`);
|
|
12348
|
+
} catch (error) {
|
|
12349
|
+
logger.warn(`[Workspace] Failed to refresh worktree (continuing with stale state): ${error}`);
|
|
11449
12350
|
}
|
|
11450
|
-
|
|
11451
|
-
|
|
12351
|
+
}
|
|
12352
|
+
/**
|
|
12353
|
+
* Create worktree for the main project.
|
|
12354
|
+
* See visor-disable comment above resolveUpstreamRef for why this doesn't use WorktreeManager.
|
|
12355
|
+
*/
|
|
12356
|
+
async createMainProjectWorktree(targetPath) {
|
|
12357
|
+
logger.debug(`Creating main project worktree: ${targetPath}`);
|
|
12358
|
+
const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
|
|
12359
|
+
const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(targetSha)}`;
|
|
11452
12360
|
const result = await commandExecutor.execute(createCmd, { timeout: 6e4 });
|
|
11453
12361
|
if (result.exitCode !== 0) {
|
|
11454
12362
|
throw new Error(`Failed to create main project worktree: ${result.stderr}`);
|
|
11455
12363
|
}
|
|
11456
|
-
|
|
12364
|
+
await this.resetAndCleanWorktree(targetPath, targetSha);
|
|
12365
|
+
logger.info(
|
|
12366
|
+
`Created main project worktree at ${targetPath} (${upstreamRef} -> ${targetSha.slice(0, 8)})`
|
|
12367
|
+
);
|
|
11457
12368
|
}
|
|
11458
12369
|
/**
|
|
11459
12370
|
* Remove main project worktree
|
|
@@ -11486,7 +12397,7 @@ var init_workspace_manager = __esm({
|
|
|
11486
12397
|
* Extract project name from path
|
|
11487
12398
|
*/
|
|
11488
12399
|
extractProjectName(dirPath) {
|
|
11489
|
-
return
|
|
12400
|
+
return path7.basename(dirPath);
|
|
11490
12401
|
}
|
|
11491
12402
|
/**
|
|
11492
12403
|
* Extract repository name from owner/repo format
|
|
@@ -11641,6 +12552,9 @@ async function initializeWorkspace(context2) {
|
|
|
11641
12552
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT = info.mainProjectPath;
|
|
11642
12553
|
process.env.VISOR_WORKSPACE_MAIN_PROJECT_NAME = info.mainProjectName;
|
|
11643
12554
|
process.env.VISOR_ORIGINAL_WORKDIR = originalPath;
|
|
12555
|
+
const basePath = workspaceConfig?.base_path || process.env.VISOR_WORKSPACE_PATH || "/tmp/visor-workspaces";
|
|
12556
|
+
const existing = process.env.GIT_CEILING_DIRECTORIES;
|
|
12557
|
+
process.env.GIT_CEILING_DIRECTORIES = existing ? `${existing}:${basePath}` : basePath;
|
|
11644
12558
|
} catch {
|
|
11645
12559
|
}
|
|
11646
12560
|
logger.info(`[Workspace] Initialized workspace: ${info.workspacePath}`);
|
|
@@ -11717,8 +12631,8 @@ var init_summary = __esm({
|
|
|
11717
12631
|
});
|
|
11718
12632
|
|
|
11719
12633
|
// src/state-machine-execution-engine.ts
|
|
11720
|
-
import * as
|
|
11721
|
-
import * as
|
|
12634
|
+
import * as path8 from "path";
|
|
12635
|
+
import * as fs6 from "fs";
|
|
11722
12636
|
function serializeRunState(state) {
|
|
11723
12637
|
return {
|
|
11724
12638
|
...state,
|
|
@@ -11815,7 +12729,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
11815
12729
|
try {
|
|
11816
12730
|
const map = options?.webhookContext?.webhookData;
|
|
11817
12731
|
if (map) {
|
|
11818
|
-
const { CheckProviderRegistry: CheckProviderRegistry2 } = await import("./check-provider-registry-
|
|
12732
|
+
const { CheckProviderRegistry: CheckProviderRegistry2 } = await import("./check-provider-registry-DVQDGTOE.mjs");
|
|
11819
12733
|
const reg = CheckProviderRegistry2.getInstance();
|
|
11820
12734
|
const p = reg.getProvider("http_input");
|
|
11821
12735
|
if (p && typeof p.setWebhookContext === "function") p.setWebhookContext(map);
|
|
@@ -11938,7 +12852,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
11938
12852
|
tag_filter: tagFilter
|
|
11939
12853
|
} : config;
|
|
11940
12854
|
try {
|
|
11941
|
-
const { CheckProviderRegistry: CheckProviderRegistry2 } = await import("./check-provider-registry-
|
|
12855
|
+
const { CheckProviderRegistry: CheckProviderRegistry2 } = await import("./check-provider-registry-DVQDGTOE.mjs");
|
|
11942
12856
|
const registry = CheckProviderRegistry2.getInstance();
|
|
11943
12857
|
registry.setCustomTools(configWithTagFilter.tools || {});
|
|
11944
12858
|
} catch (error) {
|
|
@@ -12002,7 +12916,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12002
12916
|
try {
|
|
12003
12917
|
const webhookData = this.executionContext?.webhookContext?.webhookData;
|
|
12004
12918
|
if (webhookData instanceof Map) {
|
|
12005
|
-
const { extractSlackContext: extractSlackContext2 } = await import("./schedule-tool-handler-
|
|
12919
|
+
const { extractSlackContext: extractSlackContext2 } = await import("./schedule-tool-handler-YUC6CAXX.mjs");
|
|
12006
12920
|
const slackCtx = extractSlackContext2(webhookData);
|
|
12007
12921
|
if (slackCtx) {
|
|
12008
12922
|
const payload = Array.from(webhookData.values())[0];
|
|
@@ -12031,7 +12945,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12031
12945
|
if (Array.isArray(configWithTagFilter.frontends) && configWithTagFilter.frontends.length > 0) {
|
|
12032
12946
|
try {
|
|
12033
12947
|
const { EventBus } = await import("./event-bus-5K3Y2FCS.mjs");
|
|
12034
|
-
const { FrontendsHost } = await import("./host-
|
|
12948
|
+
const { FrontendsHost } = await import("./host-H7IX4GBK.mjs");
|
|
12035
12949
|
const bus = new EventBus();
|
|
12036
12950
|
context2.eventBus = bus;
|
|
12037
12951
|
frontendsHost = new FrontendsHost(bus, logger);
|
|
@@ -12132,9 +13046,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12132
13046
|
}
|
|
12133
13047
|
const checkId = String(ev?.checkId || "unknown");
|
|
12134
13048
|
const threadKey = ev?.threadKey || (channel && threadTs ? `${channel}:${threadTs}` : "session");
|
|
12135
|
-
const baseDir = process.env.VISOR_SNAPSHOT_DIR ||
|
|
12136
|
-
|
|
12137
|
-
const filePath =
|
|
13049
|
+
const baseDir = process.env.VISOR_SNAPSHOT_DIR || path8.resolve(process.cwd(), ".visor", "snapshots");
|
|
13050
|
+
fs6.mkdirSync(baseDir, { recursive: true });
|
|
13051
|
+
const filePath = path8.join(baseDir, `${threadKey}-${checkId}.json`);
|
|
12138
13052
|
await this.saveSnapshotToFile(filePath);
|
|
12139
13053
|
logger.info(`[Snapshot] Saved run snapshot: ${filePath}`);
|
|
12140
13054
|
try {
|
|
@@ -12275,7 +13189,7 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12275
13189
|
* Does not include secrets. Intended for debugging and future resume support.
|
|
12276
13190
|
*/
|
|
12277
13191
|
async saveSnapshotToFile(filePath) {
|
|
12278
|
-
const
|
|
13192
|
+
const fs12 = await import("fs/promises");
|
|
12279
13193
|
const ctx = this._lastContext;
|
|
12280
13194
|
const runner = this._lastRunner;
|
|
12281
13195
|
if (!ctx || !runner) {
|
|
@@ -12295,14 +13209,14 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12295
13209
|
journal: entries,
|
|
12296
13210
|
requestedChecks: ctx.requestedChecks || []
|
|
12297
13211
|
};
|
|
12298
|
-
await
|
|
13212
|
+
await fs12.writeFile(filePath, JSON.stringify(payload, null, 2), "utf8");
|
|
12299
13213
|
}
|
|
12300
13214
|
/**
|
|
12301
13215
|
* Load a snapshot JSON from file and return it. Resume support can build on this.
|
|
12302
13216
|
*/
|
|
12303
13217
|
async loadSnapshotFromFile(filePath) {
|
|
12304
|
-
const
|
|
12305
|
-
const raw = await
|
|
13218
|
+
const fs12 = await import("fs/promises");
|
|
13219
|
+
const raw = await fs12.readFile(filePath, "utf8");
|
|
12306
13220
|
return JSON.parse(raw);
|
|
12307
13221
|
}
|
|
12308
13222
|
/**
|
|
@@ -12381,9 +13295,9 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12381
13295
|
* @returns Array of failure condition evaluation results
|
|
12382
13296
|
*/
|
|
12383
13297
|
async evaluateFailureConditions(checkName, reviewSummary, config, previousOutputs, authorAssociation) {
|
|
12384
|
-
const { FailureConditionEvaluator: FailureConditionEvaluator2 } = await import("./failure-condition-evaluator-
|
|
13298
|
+
const { FailureConditionEvaluator: FailureConditionEvaluator2 } = await import("./failure-condition-evaluator-IVCTD4BZ.mjs");
|
|
12385
13299
|
const evaluator = new FailureConditionEvaluator2();
|
|
12386
|
-
const { addEvent: addEvent3 } = await import("./trace-helpers-
|
|
13300
|
+
const { addEvent: addEvent3 } = await import("./trace-helpers-6ROJR7N3.mjs");
|
|
12387
13301
|
const { addFailIfTriggered } = await import("./metrics-I6A7IHG4.mjs");
|
|
12388
13302
|
const checkConfig = config.checks?.[checkName];
|
|
12389
13303
|
if (!checkConfig) {
|
|
@@ -12609,6 +13523,13 @@ var init_state_machine_execution_engine = __esm({
|
|
|
12609
13523
|
|
|
12610
13524
|
// src/scheduler/scheduler.ts
|
|
12611
13525
|
import cron from "node-cron";
|
|
13526
|
+
function getScheduler(visorConfig, config) {
|
|
13527
|
+
if (!schedulerInstance && visorConfig) {
|
|
13528
|
+
schedulerInstance = new Scheduler(visorConfig, config);
|
|
13529
|
+
}
|
|
13530
|
+
return schedulerInstance;
|
|
13531
|
+
}
|
|
13532
|
+
var Scheduler, schedulerInstance;
|
|
12612
13533
|
var init_scheduler = __esm({
|
|
12613
13534
|
"src/scheduler/scheduler.ts"() {
|
|
12614
13535
|
"use strict";
|
|
@@ -12616,6 +13537,1443 @@ var init_scheduler = __esm({
|
|
|
12616
13537
|
init_schedule_parser();
|
|
12617
13538
|
init_logger();
|
|
12618
13539
|
init_state_machine_execution_engine();
|
|
13540
|
+
Scheduler = class {
|
|
13541
|
+
store;
|
|
13542
|
+
visorConfig;
|
|
13543
|
+
checkIntervalMs;
|
|
13544
|
+
defaultTimezone;
|
|
13545
|
+
checkInterval = null;
|
|
13546
|
+
cronJobs = /* @__PURE__ */ new Map();
|
|
13547
|
+
oneTimeTimeouts = /* @__PURE__ */ new Map();
|
|
13548
|
+
running = false;
|
|
13549
|
+
engine;
|
|
13550
|
+
outputAdapters = /* @__PURE__ */ new Map();
|
|
13551
|
+
executionContext = {};
|
|
13552
|
+
contextEnricher;
|
|
13553
|
+
// HA fields
|
|
13554
|
+
haConfig;
|
|
13555
|
+
nodeId;
|
|
13556
|
+
heartbeatInterval = null;
|
|
13557
|
+
heldLocks = /* @__PURE__ */ new Map();
|
|
13558
|
+
// scheduleId → lockToken
|
|
13559
|
+
constructor(visorConfig, config) {
|
|
13560
|
+
this.visorConfig = visorConfig;
|
|
13561
|
+
this.checkIntervalMs = config?.checkIntervalMs ?? 6e4;
|
|
13562
|
+
this.defaultTimezone = config?.defaultTimezone ?? "UTC";
|
|
13563
|
+
this.haConfig = config?.ha;
|
|
13564
|
+
this.nodeId = config?.ha?.node_id || `${__require("os").hostname()}-${process.pid}`;
|
|
13565
|
+
const storeConfig = {
|
|
13566
|
+
path: config?.storagePath,
|
|
13567
|
+
storage: config?.storage,
|
|
13568
|
+
ha: config?.ha
|
|
13569
|
+
};
|
|
13570
|
+
this.store = ScheduleStore.getInstance(storeConfig, config?.limits);
|
|
13571
|
+
}
|
|
13572
|
+
/**
|
|
13573
|
+
* Set the execution engine (called after construction to avoid circular deps)
|
|
13574
|
+
*/
|
|
13575
|
+
setEngine(engine) {
|
|
13576
|
+
this.engine = engine;
|
|
13577
|
+
}
|
|
13578
|
+
/**
|
|
13579
|
+
* Set the execution context (e.g., Slack client) for workflow executions
|
|
13580
|
+
*/
|
|
13581
|
+
setExecutionContext(context2) {
|
|
13582
|
+
this.executionContext = { ...this.executionContext, ...context2 };
|
|
13583
|
+
}
|
|
13584
|
+
/**
|
|
13585
|
+
* Register an output adapter for a specific type
|
|
13586
|
+
*/
|
|
13587
|
+
registerOutputAdapter(adapter) {
|
|
13588
|
+
this.outputAdapters.set(adapter.type, adapter);
|
|
13589
|
+
logger.debug(`[Scheduler] Registered output adapter: ${adapter.type}`);
|
|
13590
|
+
}
|
|
13591
|
+
/**
|
|
13592
|
+
* Register a context enricher for frontend-specific functionality
|
|
13593
|
+
* This allows frontends to inject thread history, prompt state, etc.
|
|
13594
|
+
*/
|
|
13595
|
+
registerContextEnricher(enricher) {
|
|
13596
|
+
this.contextEnricher = enricher;
|
|
13597
|
+
logger.debug("[Scheduler] Registered context enricher");
|
|
13598
|
+
}
|
|
13599
|
+
/**
|
|
13600
|
+
* Get the schedule store instance
|
|
13601
|
+
*/
|
|
13602
|
+
getStore() {
|
|
13603
|
+
return this.store;
|
|
13604
|
+
}
|
|
13605
|
+
/**
|
|
13606
|
+
* Cancel a schedule's in-memory job (cron or timeout).
|
|
13607
|
+
* Called after deleting from DB to ensure the job doesn't fire again.
|
|
13608
|
+
*/
|
|
13609
|
+
cancelSchedule(scheduleId) {
|
|
13610
|
+
const cronJob = this.cronJobs.get(scheduleId);
|
|
13611
|
+
if (cronJob) {
|
|
13612
|
+
cronJob.stop();
|
|
13613
|
+
this.cronJobs.delete(scheduleId);
|
|
13614
|
+
logger.debug(`[Scheduler] Cancelled cron job for schedule ${scheduleId}`);
|
|
13615
|
+
return;
|
|
13616
|
+
}
|
|
13617
|
+
const timeout = this.oneTimeTimeouts.get(scheduleId);
|
|
13618
|
+
if (timeout) {
|
|
13619
|
+
clearTimeout(timeout);
|
|
13620
|
+
this.oneTimeTimeouts.delete(scheduleId);
|
|
13621
|
+
logger.debug(`[Scheduler] Cancelled timeout for schedule ${scheduleId}`);
|
|
13622
|
+
}
|
|
13623
|
+
}
|
|
13624
|
+
/**
|
|
13625
|
+
* Start the scheduler
|
|
13626
|
+
*/
|
|
13627
|
+
async start() {
|
|
13628
|
+
if (this.running) {
|
|
13629
|
+
logger.warn("[Scheduler] Already running");
|
|
13630
|
+
return;
|
|
13631
|
+
}
|
|
13632
|
+
await this.store.initialize();
|
|
13633
|
+
try {
|
|
13634
|
+
await this.loadStaticCronJobs();
|
|
13635
|
+
} catch (err) {
|
|
13636
|
+
logger.error(
|
|
13637
|
+
`[Scheduler] Failed to load static cron jobs: ${err instanceof Error ? err.message : err}`
|
|
13638
|
+
);
|
|
13639
|
+
}
|
|
13640
|
+
try {
|
|
13641
|
+
await this.restoreSchedules();
|
|
13642
|
+
} catch (err) {
|
|
13643
|
+
logger.error(
|
|
13644
|
+
`[Scheduler] Failed to restore schedules: ${err instanceof Error ? err.message : err}`
|
|
13645
|
+
);
|
|
13646
|
+
}
|
|
13647
|
+
this.checkInterval = setInterval(() => {
|
|
13648
|
+
this.checkDueSchedules().catch((error) => {
|
|
13649
|
+
logger.error(
|
|
13650
|
+
`[Scheduler] Error checking due schedules: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
13651
|
+
);
|
|
13652
|
+
});
|
|
13653
|
+
}, this.checkIntervalMs);
|
|
13654
|
+
if (this.haConfig?.enabled) {
|
|
13655
|
+
this.startHeartbeat();
|
|
13656
|
+
}
|
|
13657
|
+
this.running = true;
|
|
13658
|
+
logger.info("[Scheduler] Started");
|
|
13659
|
+
}
|
|
13660
|
+
/**
|
|
13661
|
+
* Load and schedule static cron jobs from visor config
|
|
13662
|
+
* These are defined in scheduler.cron section and always run regardless of permissions
|
|
13663
|
+
*/
|
|
13664
|
+
async loadStaticCronJobs() {
|
|
13665
|
+
const schedulerCfg = this.visorConfig.scheduler;
|
|
13666
|
+
if (!schedulerCfg?.cron) {
|
|
13667
|
+
return;
|
|
13668
|
+
}
|
|
13669
|
+
const cronJobs = schedulerCfg.cron;
|
|
13670
|
+
let loadedCount = 0;
|
|
13671
|
+
for (const [jobId, job] of Object.entries(cronJobs)) {
|
|
13672
|
+
if (job.enabled === false) {
|
|
13673
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' is disabled, skipping`);
|
|
13674
|
+
continue;
|
|
13675
|
+
}
|
|
13676
|
+
try {
|
|
13677
|
+
await this.scheduleStaticCronJob(jobId, job);
|
|
13678
|
+
loadedCount++;
|
|
13679
|
+
} catch (error) {
|
|
13680
|
+
logger.error(
|
|
13681
|
+
`[Scheduler] Failed to load static cron job '${jobId}': ${error instanceof Error ? error.message : "Unknown error"}`
|
|
13682
|
+
);
|
|
13683
|
+
}
|
|
13684
|
+
}
|
|
13685
|
+
if (loadedCount > 0) {
|
|
13686
|
+
logger.info(`[Scheduler] Loaded ${loadedCount} static cron job(s) from config`);
|
|
13687
|
+
}
|
|
13688
|
+
}
|
|
13689
|
+
/**
|
|
13690
|
+
* Schedule a static cron job from config
|
|
13691
|
+
*/
|
|
13692
|
+
async scheduleStaticCronJob(jobId, job) {
|
|
13693
|
+
if (!cron.validate(job.schedule)) {
|
|
13694
|
+
throw new Error(`Invalid cron expression: ${job.schedule}`);
|
|
13695
|
+
}
|
|
13696
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
13697
|
+
if (!allChecks.includes(job.workflow)) {
|
|
13698
|
+
throw new Error(`Workflow "${job.workflow}" not found in configuration`);
|
|
13699
|
+
}
|
|
13700
|
+
const internalId = `__static_cron__:${jobId}`;
|
|
13701
|
+
const cronJob = cron.schedule(
|
|
13702
|
+
job.schedule,
|
|
13703
|
+
async () => {
|
|
13704
|
+
logger.info(`[Scheduler] Executing static cron job '${jobId}': workflow="${job.workflow}"`);
|
|
13705
|
+
await this.executeStaticCronJob(jobId, job);
|
|
13706
|
+
},
|
|
13707
|
+
{
|
|
13708
|
+
scheduled: true,
|
|
13709
|
+
timezone: job.timezone || this.defaultTimezone
|
|
13710
|
+
}
|
|
13711
|
+
);
|
|
13712
|
+
this.cronJobs.set(internalId, cronJob);
|
|
13713
|
+
try {
|
|
13714
|
+
const nextRun = getNextRunTime(job.schedule, job.timezone || this.defaultTimezone);
|
|
13715
|
+
const description = job.description ? ` (${job.description})` : "";
|
|
13716
|
+
logger.debug(
|
|
13717
|
+
`[Scheduler] Scheduled static cron job '${jobId}'${description}: ${job.schedule} \u2192 ${job.workflow}, next run: ${nextRun.toISOString()}`
|
|
13718
|
+
);
|
|
13719
|
+
} catch {
|
|
13720
|
+
}
|
|
13721
|
+
}
|
|
13722
|
+
/**
|
|
13723
|
+
* Execute a static cron job
|
|
13724
|
+
*/
|
|
13725
|
+
async executeStaticCronJob(jobId, job) {
|
|
13726
|
+
if (this.haConfig?.enabled) {
|
|
13727
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
13728
|
+
const backend = this.store.getBackend();
|
|
13729
|
+
const lockId = `__static_cron__:${jobId}`;
|
|
13730
|
+
const lockToken = await backend.tryAcquireLock(lockId, this.nodeId, ttl);
|
|
13731
|
+
if (!lockToken) {
|
|
13732
|
+
logger.debug(`[Scheduler] Static cron job '${jobId}' locked by another node, skipping`);
|
|
13733
|
+
return;
|
|
13734
|
+
}
|
|
13735
|
+
this.heldLocks.set(lockId, lockToken);
|
|
13736
|
+
try {
|
|
13737
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
13738
|
+
} finally {
|
|
13739
|
+
await backend.releaseLock(lockId, lockToken);
|
|
13740
|
+
this.heldLocks.delete(lockId);
|
|
13741
|
+
}
|
|
13742
|
+
} else {
|
|
13743
|
+
await this.doExecuteStaticCronJob(jobId, job);
|
|
13744
|
+
}
|
|
13745
|
+
}
|
|
13746
|
+
/**
|
|
13747
|
+
* Internal: execute a static cron job (after lock is held in HA mode)
|
|
13748
|
+
*/
|
|
13749
|
+
async doExecuteStaticCronJob(jobId, job) {
|
|
13750
|
+
const startTime = Date.now();
|
|
13751
|
+
let result;
|
|
13752
|
+
const syntheticSchedule = {
|
|
13753
|
+
id: `__static_cron__:${jobId}`,
|
|
13754
|
+
creatorId: "system",
|
|
13755
|
+
creatorContext: "config",
|
|
13756
|
+
timezone: job.timezone || this.defaultTimezone,
|
|
13757
|
+
schedule: job.schedule,
|
|
13758
|
+
isRecurring: true,
|
|
13759
|
+
originalExpression: job.schedule,
|
|
13760
|
+
workflow: job.workflow,
|
|
13761
|
+
workflowInputs: job.inputs,
|
|
13762
|
+
outputContext: job.output ? {
|
|
13763
|
+
type: job.output.type,
|
|
13764
|
+
target: job.output.target,
|
|
13765
|
+
threadId: job.output.thread_id
|
|
13766
|
+
} : void 0,
|
|
13767
|
+
status: "active",
|
|
13768
|
+
createdAt: 0,
|
|
13769
|
+
runCount: 0,
|
|
13770
|
+
failureCount: 0
|
|
13771
|
+
};
|
|
13772
|
+
try {
|
|
13773
|
+
const output = await this.executeWorkflow(syntheticSchedule);
|
|
13774
|
+
result = {
|
|
13775
|
+
success: true,
|
|
13776
|
+
output,
|
|
13777
|
+
executionTimeMs: Date.now() - startTime
|
|
13778
|
+
};
|
|
13779
|
+
logger.info(
|
|
13780
|
+
`[Scheduler] Static cron job '${jobId}' completed in ${result.executionTimeMs}ms`
|
|
13781
|
+
);
|
|
13782
|
+
} catch (error) {
|
|
13783
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
13784
|
+
result = {
|
|
13785
|
+
success: false,
|
|
13786
|
+
error: errorMsg,
|
|
13787
|
+
executionTimeMs: Date.now() - startTime
|
|
13788
|
+
};
|
|
13789
|
+
logger.error(`[Scheduler] Static cron job '${jobId}' failed: ${errorMsg}`);
|
|
13790
|
+
}
|
|
13791
|
+
await this.sendResult(syntheticSchedule, result);
|
|
13792
|
+
}
|
|
13793
|
+
/**
|
|
13794
|
+
* Stop the scheduler
|
|
13795
|
+
*/
|
|
13796
|
+
async stop() {
|
|
13797
|
+
if (!this.running) {
|
|
13798
|
+
return;
|
|
13799
|
+
}
|
|
13800
|
+
this.stopHeartbeat();
|
|
13801
|
+
if (this.heldLocks.size > 0) {
|
|
13802
|
+
const backend = this.store.getBackend();
|
|
13803
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
13804
|
+
await backend.releaseLock(scheduleId, lockToken).catch(() => {
|
|
13805
|
+
});
|
|
13806
|
+
}
|
|
13807
|
+
this.heldLocks.clear();
|
|
13808
|
+
}
|
|
13809
|
+
if (this.checkInterval) {
|
|
13810
|
+
clearInterval(this.checkInterval);
|
|
13811
|
+
this.checkInterval = null;
|
|
13812
|
+
}
|
|
13813
|
+
for (const [id, job] of this.cronJobs.entries()) {
|
|
13814
|
+
job.stop();
|
|
13815
|
+
logger.debug(`[Scheduler] Stopped cron job for schedule ${id}`);
|
|
13816
|
+
}
|
|
13817
|
+
this.cronJobs.clear();
|
|
13818
|
+
for (const [id, timeout] of this.oneTimeTimeouts.entries()) {
|
|
13819
|
+
clearTimeout(timeout);
|
|
13820
|
+
logger.debug(`[Scheduler] Cleared timeout for schedule ${id}`);
|
|
13821
|
+
}
|
|
13822
|
+
this.oneTimeTimeouts.clear();
|
|
13823
|
+
await this.store.flush();
|
|
13824
|
+
this.running = false;
|
|
13825
|
+
logger.info("[Scheduler] Stopped");
|
|
13826
|
+
}
|
|
13827
|
+
/**
|
|
13828
|
+
* Start the heartbeat timer for renewing HA locks
|
|
13829
|
+
*/
|
|
13830
|
+
startHeartbeat() {
|
|
13831
|
+
if (this.heartbeatInterval) return;
|
|
13832
|
+
const intervalMs = (this.haConfig?.heartbeat_interval ?? 15) * 1e3;
|
|
13833
|
+
this.heartbeatInterval = setInterval(async () => {
|
|
13834
|
+
const backend = this.store.getBackend();
|
|
13835
|
+
const ttl = this.haConfig?.lock_ttl ?? 60;
|
|
13836
|
+
for (const [scheduleId, lockToken] of this.heldLocks.entries()) {
|
|
13837
|
+
const renewed = await backend.renewLock(scheduleId, lockToken, ttl);
|
|
13838
|
+
if (!renewed) {
|
|
13839
|
+
logger.warn(`[Scheduler] Failed to renew lock for schedule ${scheduleId}, lock lost`);
|
|
13840
|
+
this.heldLocks.delete(scheduleId);
|
|
13841
|
+
}
|
|
13842
|
+
}
|
|
13843
|
+
}, intervalMs);
|
|
13844
|
+
logger.debug(`[Scheduler] Heartbeat started (interval: ${intervalMs}ms)`);
|
|
13845
|
+
}
|
|
13846
|
+
/**
|
|
13847
|
+
* Stop the heartbeat timer
|
|
13848
|
+
*/
|
|
13849
|
+
stopHeartbeat() {
|
|
13850
|
+
if (this.heartbeatInterval) {
|
|
13851
|
+
clearInterval(this.heartbeatInterval);
|
|
13852
|
+
this.heartbeatInterval = null;
|
|
13853
|
+
}
|
|
13854
|
+
}
|
|
13855
|
+
/**
|
|
13856
|
+
* Restore schedules from persistent storage
|
|
13857
|
+
*/
|
|
13858
|
+
async restoreSchedules() {
|
|
13859
|
+
const activeSchedules = await this.store.getActiveSchedulesAsync();
|
|
13860
|
+
logger.info(`[Scheduler] Restoring ${activeSchedules.length} active schedules`);
|
|
13861
|
+
for (const schedule of activeSchedules) {
|
|
13862
|
+
try {
|
|
13863
|
+
await this.scheduleExecution(schedule);
|
|
13864
|
+
} catch (error) {
|
|
13865
|
+
logger.error(
|
|
13866
|
+
`[Scheduler] Failed to restore schedule ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
13867
|
+
);
|
|
13868
|
+
}
|
|
13869
|
+
}
|
|
13870
|
+
}
|
|
13871
|
+
/**
|
|
13872
|
+
* Schedule a workflow for execution
|
|
13873
|
+
*/
|
|
13874
|
+
async scheduleExecution(schedule) {
|
|
13875
|
+
if (schedule.isRecurring) {
|
|
13876
|
+
await this.scheduleRecurring(schedule);
|
|
13877
|
+
} else {
|
|
13878
|
+
await this.scheduleOneTime(schedule);
|
|
13879
|
+
}
|
|
13880
|
+
}
|
|
13881
|
+
/**
|
|
13882
|
+
* Schedule a recurring workflow using cron
|
|
13883
|
+
*/
|
|
13884
|
+
async scheduleRecurring(schedule) {
|
|
13885
|
+
if (!cron.validate(schedule.schedule)) {
|
|
13886
|
+
logger.error(
|
|
13887
|
+
`[Scheduler] Invalid cron expression for schedule ${schedule.id}: ${schedule.schedule}`
|
|
13888
|
+
);
|
|
13889
|
+
const existingJob2 = this.cronJobs.get(schedule.id);
|
|
13890
|
+
if (existingJob2) {
|
|
13891
|
+
existingJob2.stop();
|
|
13892
|
+
this.cronJobs.delete(schedule.id);
|
|
13893
|
+
}
|
|
13894
|
+
await this.store.updateAsync(schedule.id, {
|
|
13895
|
+
status: "failed",
|
|
13896
|
+
lastError: "Invalid cron expression"
|
|
13897
|
+
});
|
|
13898
|
+
return;
|
|
13899
|
+
}
|
|
13900
|
+
const existingJob = this.cronJobs.get(schedule.id);
|
|
13901
|
+
if (existingJob) {
|
|
13902
|
+
existingJob.stop();
|
|
13903
|
+
}
|
|
13904
|
+
const job = cron.schedule(
|
|
13905
|
+
schedule.schedule,
|
|
13906
|
+
async () => {
|
|
13907
|
+
await this.executeSchedule(schedule);
|
|
13908
|
+
},
|
|
13909
|
+
{
|
|
13910
|
+
scheduled: true,
|
|
13911
|
+
timezone: schedule.timezone || this.defaultTimezone
|
|
13912
|
+
}
|
|
13913
|
+
);
|
|
13914
|
+
this.cronJobs.set(schedule.id, job);
|
|
13915
|
+
try {
|
|
13916
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
13917
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
13918
|
+
} catch (error) {
|
|
13919
|
+
logger.warn(
|
|
13920
|
+
`[Scheduler] Could not compute next run time for ${schedule.id}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
13921
|
+
);
|
|
13922
|
+
}
|
|
13923
|
+
logger.debug(`[Scheduler] Scheduled recurring execution ${schedule.id}: ${schedule.schedule}`);
|
|
13924
|
+
}
|
|
13925
|
+
/**
|
|
13926
|
+
* Schedule a one-time workflow using setTimeout
|
|
13927
|
+
*/
|
|
13928
|
+
async scheduleOneTime(schedule) {
|
|
13929
|
+
const existingTimeout = this.oneTimeTimeouts.get(schedule.id);
|
|
13930
|
+
if (existingTimeout) {
|
|
13931
|
+
clearTimeout(existingTimeout);
|
|
13932
|
+
}
|
|
13933
|
+
if (!schedule.runAt) {
|
|
13934
|
+
logger.error(`[Scheduler] One-time schedule ${schedule.id} has no runAt time`);
|
|
13935
|
+
return;
|
|
13936
|
+
}
|
|
13937
|
+
const delayMs = schedule.runAt - Date.now();
|
|
13938
|
+
if (delayMs <= 0) {
|
|
13939
|
+
await this.executeSchedule(schedule);
|
|
13940
|
+
return;
|
|
13941
|
+
}
|
|
13942
|
+
const timeout = setTimeout(async () => {
|
|
13943
|
+
this.oneTimeTimeouts.delete(schedule.id);
|
|
13944
|
+
await this.executeSchedule(schedule);
|
|
13945
|
+
}, delayMs);
|
|
13946
|
+
this.oneTimeTimeouts.set(schedule.id, timeout);
|
|
13947
|
+
logger.debug(
|
|
13948
|
+
`[Scheduler] Scheduled one-time execution ${schedule.id} for ${new Date(schedule.runAt).toISOString()}`
|
|
13949
|
+
);
|
|
13950
|
+
}
|
|
13951
|
+
/**
|
|
13952
|
+
* Check for and execute due schedules
|
|
13953
|
+
*/
|
|
13954
|
+
async checkDueSchedules() {
|
|
13955
|
+
const dueSchedules = await this.store.getDueSchedulesAsync();
|
|
13956
|
+
for (const schedule of dueSchedules) {
|
|
13957
|
+
if (this.cronJobs.has(schedule.id) || this.oneTimeTimeouts.has(schedule.id)) {
|
|
13958
|
+
continue;
|
|
13959
|
+
}
|
|
13960
|
+
if (this.haConfig?.enabled) {
|
|
13961
|
+
const ttl = this.haConfig.lock_ttl ?? 60;
|
|
13962
|
+
const backend = this.store.getBackend();
|
|
13963
|
+
const lockToken = await backend.tryAcquireLock(schedule.id, this.nodeId, ttl);
|
|
13964
|
+
if (!lockToken) {
|
|
13965
|
+
logger.debug(`[Scheduler] Schedule ${schedule.id} locked by another node, skipping`);
|
|
13966
|
+
continue;
|
|
13967
|
+
}
|
|
13968
|
+
this.heldLocks.set(schedule.id, lockToken);
|
|
13969
|
+
try {
|
|
13970
|
+
await this.executeSchedule(schedule);
|
|
13971
|
+
} finally {
|
|
13972
|
+
await backend.releaseLock(schedule.id, lockToken);
|
|
13973
|
+
this.heldLocks.delete(schedule.id);
|
|
13974
|
+
}
|
|
13975
|
+
} else {
|
|
13976
|
+
await this.executeSchedule(schedule);
|
|
13977
|
+
}
|
|
13978
|
+
}
|
|
13979
|
+
}
|
|
13980
|
+
/**
|
|
13981
|
+
* Execute a scheduled workflow
|
|
13982
|
+
*/
|
|
13983
|
+
async executeSchedule(schedule) {
|
|
13984
|
+
try {
|
|
13985
|
+
const fresh = await this.store.getAsync(schedule.id);
|
|
13986
|
+
if (!fresh || fresh.status !== "active") {
|
|
13987
|
+
logger.info(
|
|
13988
|
+
`[Scheduler] Schedule ${schedule.id} is no longer active (${fresh ? fresh.status : "deleted"}), skipping execution`
|
|
13989
|
+
);
|
|
13990
|
+
this.cancelSchedule(schedule.id);
|
|
13991
|
+
return;
|
|
13992
|
+
}
|
|
13993
|
+
} catch {
|
|
13994
|
+
logger.warn(
|
|
13995
|
+
`[Scheduler] Could not verify schedule ${schedule.id} freshness, proceeding with execution`
|
|
13996
|
+
);
|
|
13997
|
+
}
|
|
13998
|
+
const description = schedule.workflow || "reminder";
|
|
13999
|
+
logger.info(`[Scheduler] Executing schedule ${schedule.id}: ${description}`);
|
|
14000
|
+
const startTime = Date.now();
|
|
14001
|
+
let result;
|
|
14002
|
+
try {
|
|
14003
|
+
const output = await this.executeWorkflow(schedule);
|
|
14004
|
+
result = {
|
|
14005
|
+
success: true,
|
|
14006
|
+
output,
|
|
14007
|
+
executionTimeMs: Date.now() - startTime
|
|
14008
|
+
};
|
|
14009
|
+
const now = Date.now();
|
|
14010
|
+
await this.store.updateAsync(schedule.id, {
|
|
14011
|
+
lastRunAt: now,
|
|
14012
|
+
runCount: schedule.runCount + 1,
|
|
14013
|
+
failureCount: 0,
|
|
14014
|
+
// Reset on success
|
|
14015
|
+
lastError: void 0
|
|
14016
|
+
});
|
|
14017
|
+
if (!schedule.isRecurring) {
|
|
14018
|
+
await this.store.updateAsync(schedule.id, { status: "completed" });
|
|
14019
|
+
await this.store.deleteAsync(schedule.id);
|
|
14020
|
+
logger.info(`[Scheduler] One-time schedule ${schedule.id} completed and removed`);
|
|
14021
|
+
} else {
|
|
14022
|
+
try {
|
|
14023
|
+
const nextRun = getNextRunTime(schedule.schedule, schedule.timezone);
|
|
14024
|
+
await this.store.updateAsync(schedule.id, { nextRunAt: nextRun.getTime() });
|
|
14025
|
+
} catch (err) {
|
|
14026
|
+
logger.warn(
|
|
14027
|
+
`[Scheduler] Failed to compute next run time for ${schedule.id}, pausing schedule: ${err instanceof Error ? err.message : err}`
|
|
14028
|
+
);
|
|
14029
|
+
await this.store.updateAsync(schedule.id, {
|
|
14030
|
+
status: "paused",
|
|
14031
|
+
lastError: `Failed to compute next run time: ${err instanceof Error ? err.message : err}`
|
|
14032
|
+
});
|
|
14033
|
+
}
|
|
14034
|
+
}
|
|
14035
|
+
} catch (error) {
|
|
14036
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
14037
|
+
result = {
|
|
14038
|
+
success: false,
|
|
14039
|
+
error: errorMsg,
|
|
14040
|
+
executionTimeMs: Date.now() - startTime
|
|
14041
|
+
};
|
|
14042
|
+
await this.handleScheduleFailure(schedule, error);
|
|
14043
|
+
}
|
|
14044
|
+
await this.sendResult(schedule, result);
|
|
14045
|
+
}
|
|
14046
|
+
/**
|
|
14047
|
+
* Helper to prepare execution environment - reduces duplication between workflow and reminder execution
|
|
14048
|
+
*/
|
|
14049
|
+
prepareExecution(schedule, cliMessage) {
|
|
14050
|
+
const config = JSON.parse(JSON.stringify(this.visorConfig));
|
|
14051
|
+
const fronts = Array.isArray(config.frontends) ? config.frontends : [];
|
|
14052
|
+
const hasSlackFrontend = fronts.some((f) => f && f.name === "slack");
|
|
14053
|
+
if (!hasSlackFrontend && (cliMessage || schedule.outputContext?.type === "slack")) {
|
|
14054
|
+
fronts.push({ name: "slack" });
|
|
14055
|
+
}
|
|
14056
|
+
config.frontends = fronts;
|
|
14057
|
+
const engine = new StateMachineExecutionEngine();
|
|
14058
|
+
const responseRef = {};
|
|
14059
|
+
const responseCapture = (text) => {
|
|
14060
|
+
responseRef.captured = text;
|
|
14061
|
+
logger.debug(
|
|
14062
|
+
`[Scheduler] Captured AI response for schedule ${schedule.id} (${text.length} chars)`
|
|
14063
|
+
);
|
|
14064
|
+
};
|
|
14065
|
+
engine.setExecutionContext({
|
|
14066
|
+
...this.executionContext,
|
|
14067
|
+
cliMessage,
|
|
14068
|
+
responseCapture
|
|
14069
|
+
});
|
|
14070
|
+
return { engine, config, responseRef };
|
|
14071
|
+
}
|
|
14072
|
+
/**
|
|
14073
|
+
* Execute the workflow for a schedule
|
|
14074
|
+
*/
|
|
14075
|
+
async executeWorkflow(schedule) {
|
|
14076
|
+
if (!schedule.workflow) {
|
|
14077
|
+
return this.executeSimpleReminder(schedule);
|
|
14078
|
+
}
|
|
14079
|
+
if (!this.engine) {
|
|
14080
|
+
logger.warn("[Scheduler] No execution engine set, skipping workflow execution");
|
|
14081
|
+
return { message: "No execution engine configured" };
|
|
14082
|
+
}
|
|
14083
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
14084
|
+
if (!allChecks.includes(schedule.workflow)) {
|
|
14085
|
+
throw new Error(`Workflow "${schedule.workflow}" not found in configuration`);
|
|
14086
|
+
}
|
|
14087
|
+
const syntheticPayload = {
|
|
14088
|
+
event: {
|
|
14089
|
+
type: "schedule_triggered",
|
|
14090
|
+
schedule_id: schedule.id,
|
|
14091
|
+
workflow: schedule.workflow,
|
|
14092
|
+
creator_id: schedule.creatorId,
|
|
14093
|
+
creator_context: schedule.creatorContext,
|
|
14094
|
+
timestamp: Date.now()
|
|
14095
|
+
},
|
|
14096
|
+
schedule: {
|
|
14097
|
+
id: schedule.id,
|
|
14098
|
+
workflow: schedule.workflow,
|
|
14099
|
+
workflowInputs: schedule.workflowInputs,
|
|
14100
|
+
isRecurring: schedule.isRecurring,
|
|
14101
|
+
outputContext: schedule.outputContext
|
|
14102
|
+
}
|
|
14103
|
+
};
|
|
14104
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
14105
|
+
const endpoint = "/scheduler/trigger";
|
|
14106
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
14107
|
+
try {
|
|
14108
|
+
const { refreshGitHubCredentials } = await import("./github-auth-UPBBBOME.mjs");
|
|
14109
|
+
await refreshGitHubCredentials();
|
|
14110
|
+
} catch {
|
|
14111
|
+
}
|
|
14112
|
+
const { engine: runEngine, config: cfgForRun } = this.prepareExecution(schedule);
|
|
14113
|
+
await runEngine.executeChecks({
|
|
14114
|
+
checks: [schedule.workflow],
|
|
14115
|
+
showDetails: true,
|
|
14116
|
+
outputFormat: "json",
|
|
14117
|
+
config: cfgForRun,
|
|
14118
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
14119
|
+
debug: process.env.VISOR_DEBUG === "true",
|
|
14120
|
+
inputs: schedule.workflowInputs
|
|
14121
|
+
});
|
|
14122
|
+
return { message: "Workflow completed", workflow: schedule.workflow };
|
|
14123
|
+
}
|
|
14124
|
+
/**
|
|
14125
|
+
* Execute a simple reminder by running it through the visor pipeline
|
|
14126
|
+
* Treats the reminder text as if the user sent it as a message
|
|
14127
|
+
*/
|
|
14128
|
+
async executeSimpleReminder(schedule) {
|
|
14129
|
+
const reminderText = schedule.workflowInputs?.text;
|
|
14130
|
+
if (!reminderText) {
|
|
14131
|
+
return { message: "Reminder!", type: "simple_reminder" };
|
|
14132
|
+
}
|
|
14133
|
+
const allChecks = Object.keys(this.visorConfig.checks || {});
|
|
14134
|
+
if (allChecks.length === 0) {
|
|
14135
|
+
logger.warn("[Scheduler] No checks configured, returning reminder text directly");
|
|
14136
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
14137
|
+
}
|
|
14138
|
+
logger.info(`[Scheduler] Running reminder through visor pipeline (${allChecks.length} checks)`);
|
|
14139
|
+
const channel = schedule.outputContext?.target || "";
|
|
14140
|
+
const threadId = schedule.outputContext?.threadId;
|
|
14141
|
+
let threadMessages = [];
|
|
14142
|
+
let additionalPayload = {};
|
|
14143
|
+
if (this.contextEnricher?.enrichContext) {
|
|
14144
|
+
try {
|
|
14145
|
+
const enriched = await this.contextEnricher.enrichContext(schedule);
|
|
14146
|
+
threadMessages = enriched.threadMessages || [];
|
|
14147
|
+
additionalPayload = enriched.additionalPayload || {};
|
|
14148
|
+
if (threadMessages.length > 0) {
|
|
14149
|
+
logger.debug(
|
|
14150
|
+
`[Scheduler] Context enricher provided ${threadMessages.length} thread messages`
|
|
14151
|
+
);
|
|
14152
|
+
}
|
|
14153
|
+
} catch (error) {
|
|
14154
|
+
logger.warn(
|
|
14155
|
+
`[Scheduler] Context enrichment failed: ${error instanceof Error ? error.message : error}`
|
|
14156
|
+
);
|
|
14157
|
+
}
|
|
14158
|
+
}
|
|
14159
|
+
let contextualReminderText = reminderText;
|
|
14160
|
+
if (schedule.isRecurring && schedule.previousResponse) {
|
|
14161
|
+
contextualReminderText = `${reminderText}
|
|
14162
|
+
|
|
14163
|
+
---
|
|
14164
|
+
**Previous Response (for context):**
|
|
14165
|
+
${schedule.previousResponse}
|
|
14166
|
+
---
|
|
14167
|
+
|
|
14168
|
+
Please provide an updated response based on the reminder above. You may reference or build upon the previous response if relevant.`;
|
|
14169
|
+
}
|
|
14170
|
+
const conversationData = {
|
|
14171
|
+
current: {
|
|
14172
|
+
user: schedule.creatorName || schedule.creatorId,
|
|
14173
|
+
text: contextualReminderText
|
|
14174
|
+
},
|
|
14175
|
+
messages: threadMessages.length > 0 ? [
|
|
14176
|
+
...threadMessages,
|
|
14177
|
+
{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }
|
|
14178
|
+
] : [{ user: schedule.creatorName || schedule.creatorId, text: contextualReminderText }]
|
|
14179
|
+
};
|
|
14180
|
+
const syntheticPayload = {
|
|
14181
|
+
event: {
|
|
14182
|
+
type: "message",
|
|
14183
|
+
subtype: "scheduled_reminder",
|
|
14184
|
+
text: contextualReminderText,
|
|
14185
|
+
user: schedule.creatorId,
|
|
14186
|
+
channel,
|
|
14187
|
+
ts: String(Date.now() / 1e3),
|
|
14188
|
+
thread_ts: threadId
|
|
14189
|
+
},
|
|
14190
|
+
// Include both for compatibility (slack_conversation for existing checks, conversation for generic)
|
|
14191
|
+
slack_conversation: conversationData,
|
|
14192
|
+
conversation: conversationData,
|
|
14193
|
+
// Include schedule context for any checks that need it
|
|
14194
|
+
schedule: {
|
|
14195
|
+
id: schedule.id,
|
|
14196
|
+
isReminder: true,
|
|
14197
|
+
creatorId: schedule.creatorId,
|
|
14198
|
+
creatorContext: schedule.creatorContext,
|
|
14199
|
+
previousResponse: schedule.previousResponse
|
|
14200
|
+
},
|
|
14201
|
+
// Merge any additional frontend-specific payload
|
|
14202
|
+
...additionalPayload
|
|
14203
|
+
};
|
|
14204
|
+
const endpoint = this.contextEnricher?.getWebhookEndpoint?.() || this.visorConfig.slack?.endpoint || "/bots/slack/support";
|
|
14205
|
+
const webhookData = /* @__PURE__ */ new Map();
|
|
14206
|
+
webhookData.set(endpoint, syntheticPayload);
|
|
14207
|
+
if (this.contextEnricher?.prepareExecution) {
|
|
14208
|
+
try {
|
|
14209
|
+
await this.contextEnricher.prepareExecution(schedule, reminderText);
|
|
14210
|
+
} catch (error) {
|
|
14211
|
+
logger.warn(
|
|
14212
|
+
`[Scheduler] Execution preparation failed: ${error instanceof Error ? error.message : error}`
|
|
14213
|
+
);
|
|
14214
|
+
}
|
|
14215
|
+
}
|
|
14216
|
+
try {
|
|
14217
|
+
const { refreshGitHubCredentials } = await import("./github-auth-UPBBBOME.mjs");
|
|
14218
|
+
await refreshGitHubCredentials();
|
|
14219
|
+
} catch {
|
|
14220
|
+
}
|
|
14221
|
+
const {
|
|
14222
|
+
engine: runEngine,
|
|
14223
|
+
config: cfgForRun,
|
|
14224
|
+
responseRef
|
|
14225
|
+
} = this.prepareExecution(schedule, reminderText);
|
|
14226
|
+
try {
|
|
14227
|
+
await runEngine.executeChecks({
|
|
14228
|
+
checks: allChecks,
|
|
14229
|
+
showDetails: true,
|
|
14230
|
+
outputFormat: "json",
|
|
14231
|
+
config: cfgForRun,
|
|
14232
|
+
webhookContext: { webhookData, eventType: "schedule" },
|
|
14233
|
+
debug: process.env.VISOR_DEBUG === "true"
|
|
14234
|
+
});
|
|
14235
|
+
if (schedule.isRecurring && responseRef.captured) {
|
|
14236
|
+
await this.store.updateAsync(schedule.id, { previousResponse: responseRef.captured });
|
|
14237
|
+
logger.info(
|
|
14238
|
+
`[Scheduler] Saved previousResponse for recurring schedule ${schedule.id} (${responseRef.captured.length} chars)`
|
|
14239
|
+
);
|
|
14240
|
+
}
|
|
14241
|
+
return {
|
|
14242
|
+
message: "Reminder processed through pipeline",
|
|
14243
|
+
type: "pipeline_executed",
|
|
14244
|
+
reminderText,
|
|
14245
|
+
capturedResponse: responseRef.captured
|
|
14246
|
+
};
|
|
14247
|
+
} catch (error) {
|
|
14248
|
+
logger.error(
|
|
14249
|
+
`[Scheduler] Failed to run reminder through pipeline: ${error instanceof Error ? error.message : error}`
|
|
14250
|
+
);
|
|
14251
|
+
return { message: reminderText, type: "simple_reminder" };
|
|
14252
|
+
}
|
|
14253
|
+
}
|
|
14254
|
+
/**
|
|
14255
|
+
* Handle schedule execution failure
|
|
14256
|
+
*/
|
|
14257
|
+
async handleScheduleFailure(schedule, error) {
|
|
14258
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
14259
|
+
logger.error(`[Scheduler] Schedule ${schedule.id} failed: ${errorMsg}`);
|
|
14260
|
+
const newFailureCount = schedule.failureCount + 1;
|
|
14261
|
+
await this.store.updateAsync(schedule.id, {
|
|
14262
|
+
failureCount: newFailureCount,
|
|
14263
|
+
lastError: errorMsg
|
|
14264
|
+
});
|
|
14265
|
+
if (newFailureCount >= 3) {
|
|
14266
|
+
await this.store.updateAsync(schedule.id, { status: "failed" });
|
|
14267
|
+
const job = this.cronJobs.get(schedule.id);
|
|
14268
|
+
if (job) {
|
|
14269
|
+
job.stop();
|
|
14270
|
+
this.cronJobs.delete(schedule.id);
|
|
14271
|
+
}
|
|
14272
|
+
logger.warn(`[Scheduler] Schedule ${schedule.id} paused after 3 consecutive failures`);
|
|
14273
|
+
}
|
|
14274
|
+
}
|
|
14275
|
+
/**
|
|
14276
|
+
* Send execution result to the appropriate output adapter
|
|
14277
|
+
*/
|
|
14278
|
+
async sendResult(schedule, result) {
|
|
14279
|
+
const outputType = schedule.outputContext?.type || "none";
|
|
14280
|
+
const adapter = this.outputAdapters.get(outputType);
|
|
14281
|
+
if (!adapter) {
|
|
14282
|
+
if (outputType !== "none") {
|
|
14283
|
+
logger.warn(`[Scheduler] No output adapter registered for type: ${outputType}`);
|
|
14284
|
+
}
|
|
14285
|
+
return;
|
|
14286
|
+
}
|
|
14287
|
+
try {
|
|
14288
|
+
await adapter.sendResult(schedule, result);
|
|
14289
|
+
} catch (error) {
|
|
14290
|
+
logger.error(
|
|
14291
|
+
`[Scheduler] Failed to send result via ${outputType} adapter: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
14292
|
+
);
|
|
14293
|
+
}
|
|
14294
|
+
}
|
|
14295
|
+
/**
|
|
14296
|
+
* Check if scheduler is running
|
|
14297
|
+
*/
|
|
14298
|
+
isRunning() {
|
|
14299
|
+
return this.running;
|
|
14300
|
+
}
|
|
14301
|
+
/**
|
|
14302
|
+
* Get scheduler stats
|
|
14303
|
+
*/
|
|
14304
|
+
async getStats() {
|
|
14305
|
+
return {
|
|
14306
|
+
running: this.running,
|
|
14307
|
+
activeCronJobs: this.cronJobs.size,
|
|
14308
|
+
pendingOneTimeSchedules: this.oneTimeTimeouts.size,
|
|
14309
|
+
storeStats: await this.store.getStatsAsync()
|
|
14310
|
+
};
|
|
14311
|
+
}
|
|
14312
|
+
};
|
|
14313
|
+
}
|
|
14314
|
+
});
|
|
14315
|
+
|
|
14316
|
+
// src/scheduler/schedule-tool.ts
|
|
14317
|
+
function matchGlobPattern(pattern, value) {
|
|
14318
|
+
const regexPattern = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
14319
|
+
return new RegExp(`^${regexPattern}$`).test(value);
|
|
14320
|
+
}
|
|
14321
|
+
function isWorkflowAllowedByPatterns(workflow, allowedPatterns, deniedPatterns) {
|
|
14322
|
+
if (deniedPatterns && deniedPatterns.length > 0) {
|
|
14323
|
+
for (const pattern of deniedPatterns) {
|
|
14324
|
+
if (matchGlobPattern(pattern, workflow)) {
|
|
14325
|
+
return {
|
|
14326
|
+
allowed: false,
|
|
14327
|
+
reason: `Workflow "${workflow}" matches denied pattern "${pattern}"`
|
|
14328
|
+
};
|
|
14329
|
+
}
|
|
14330
|
+
}
|
|
14331
|
+
}
|
|
14332
|
+
if (allowedPatterns && allowedPatterns.length > 0) {
|
|
14333
|
+
for (const pattern of allowedPatterns) {
|
|
14334
|
+
if (matchGlobPattern(pattern, workflow)) {
|
|
14335
|
+
return { allowed: true };
|
|
14336
|
+
}
|
|
14337
|
+
}
|
|
14338
|
+
return {
|
|
14339
|
+
allowed: false,
|
|
14340
|
+
reason: `Workflow "${workflow}" does not match any allowed patterns: ${allowedPatterns.join(", ")}`
|
|
14341
|
+
};
|
|
14342
|
+
}
|
|
14343
|
+
return { allowed: true };
|
|
14344
|
+
}
|
|
14345
|
+
function checkSchedulePermissions(context2, workflow, requestedScheduleType) {
|
|
14346
|
+
const permissions = context2.permissions;
|
|
14347
|
+
const scheduleType = requestedScheduleType || context2.scheduleType || "personal";
|
|
14348
|
+
if (context2.allowedScheduleType && scheduleType !== context2.allowedScheduleType) {
|
|
14349
|
+
const contextNames = {
|
|
14350
|
+
personal: "a direct message (DM)",
|
|
14351
|
+
channel: "a channel",
|
|
14352
|
+
dm: "a group DM"
|
|
14353
|
+
};
|
|
14354
|
+
const targetNames = {
|
|
14355
|
+
personal: "personal",
|
|
14356
|
+
channel: "channel",
|
|
14357
|
+
dm: "group"
|
|
14358
|
+
};
|
|
14359
|
+
return {
|
|
14360
|
+
allowed: false,
|
|
14361
|
+
reason: `From ${contextNames[context2.allowedScheduleType]}, you can only create ${targetNames[context2.allowedScheduleType]} schedules. To create a ${targetNames[scheduleType]} schedule, please use the appropriate context.`
|
|
14362
|
+
};
|
|
14363
|
+
}
|
|
14364
|
+
if (!permissions) {
|
|
14365
|
+
return { allowed: true };
|
|
14366
|
+
}
|
|
14367
|
+
switch (scheduleType) {
|
|
14368
|
+
case "personal":
|
|
14369
|
+
if (permissions.allowPersonal === false) {
|
|
14370
|
+
return {
|
|
14371
|
+
allowed: false,
|
|
14372
|
+
reason: "Personal schedules are not allowed in this configuration"
|
|
14373
|
+
};
|
|
14374
|
+
}
|
|
14375
|
+
break;
|
|
14376
|
+
case "channel":
|
|
14377
|
+
if (permissions.allowChannel === false) {
|
|
14378
|
+
return {
|
|
14379
|
+
allowed: false,
|
|
14380
|
+
reason: "Channel schedules are not allowed in this configuration"
|
|
14381
|
+
};
|
|
14382
|
+
}
|
|
14383
|
+
break;
|
|
14384
|
+
case "dm":
|
|
14385
|
+
if (permissions.allowDm === false) {
|
|
14386
|
+
return {
|
|
14387
|
+
allowed: false,
|
|
14388
|
+
reason: "DM schedules are not allowed in this configuration"
|
|
14389
|
+
};
|
|
14390
|
+
}
|
|
14391
|
+
break;
|
|
14392
|
+
}
|
|
14393
|
+
return isWorkflowAllowedByPatterns(
|
|
14394
|
+
workflow,
|
|
14395
|
+
permissions.allowedWorkflows,
|
|
14396
|
+
permissions.deniedWorkflows
|
|
14397
|
+
);
|
|
14398
|
+
}
|
|
14399
|
+
function formatSchedule(schedule) {
|
|
14400
|
+
const time = schedule.isRecurring ? schedule.originalExpression : new Date(schedule.runAt).toLocaleString();
|
|
14401
|
+
const status = schedule.status !== "active" ? ` (${schedule.status})` : "";
|
|
14402
|
+
const displayName = schedule.workflow || schedule.workflowInputs?.text || "scheduled message";
|
|
14403
|
+
const truncatedName = displayName.length > 30 ? displayName.substring(0, 27) + "..." : displayName;
|
|
14404
|
+
const output = schedule.outputContext?.type || "none";
|
|
14405
|
+
return `\`${schedule.id.substring(0, 8)}\` - "${truncatedName}" - ${time} (\u2192 ${output})${status}`;
|
|
14406
|
+
}
|
|
14407
|
+
function formatCreateConfirmation(schedule) {
|
|
14408
|
+
const outputDesc = schedule.outputContext?.type ? `${schedule.outputContext.type}${schedule.outputContext.target ? `:${schedule.outputContext.target}` : ""}` : "none";
|
|
14409
|
+
const displayName = schedule.workflow || schedule.workflowInputs?.text || "scheduled message";
|
|
14410
|
+
if (schedule.isRecurring) {
|
|
14411
|
+
const nextRun = schedule.nextRunAt ? new Date(schedule.nextRunAt).toLocaleString("en-US", {
|
|
14412
|
+
weekday: "long",
|
|
14413
|
+
month: "short",
|
|
14414
|
+
day: "numeric",
|
|
14415
|
+
hour: "numeric",
|
|
14416
|
+
minute: "2-digit"
|
|
14417
|
+
}) : "calculating...";
|
|
14418
|
+
return `**Schedule created!**
|
|
14419
|
+
|
|
14420
|
+
**${schedule.workflow ? "Workflow" : "Reminder"}**: ${displayName}
|
|
14421
|
+
**When**: ${schedule.originalExpression}
|
|
14422
|
+
**Output**: ${outputDesc}
|
|
14423
|
+
**Next run**: ${nextRun}
|
|
14424
|
+
|
|
14425
|
+
ID: \`${schedule.id.substring(0, 8)}\``;
|
|
14426
|
+
} else {
|
|
14427
|
+
const when = new Date(schedule.runAt).toLocaleString("en-US", {
|
|
14428
|
+
weekday: "long",
|
|
14429
|
+
month: "short",
|
|
14430
|
+
day: "numeric",
|
|
14431
|
+
hour: "numeric",
|
|
14432
|
+
minute: "2-digit"
|
|
14433
|
+
});
|
|
14434
|
+
return `**Schedule created!**
|
|
14435
|
+
|
|
14436
|
+
**${schedule.workflow ? "Workflow" : "Reminder"}**: ${displayName}
|
|
14437
|
+
**When**: ${when}
|
|
14438
|
+
**Output**: ${outputDesc}
|
|
14439
|
+
|
|
14440
|
+
ID: \`${schedule.id.substring(0, 8)}\``;
|
|
14441
|
+
}
|
|
14442
|
+
}
|
|
14443
|
+
function formatScheduleList(schedules) {
|
|
14444
|
+
if (schedules.length === 0) {
|
|
14445
|
+
return `You don't have any active schedules.
|
|
14446
|
+
|
|
14447
|
+
To create one: "remind me every Monday at 9am to check PRs" or "schedule %daily-report every Monday at 9am"`;
|
|
14448
|
+
}
|
|
14449
|
+
const lines = schedules.map((s, i) => `${i + 1}. ${formatSchedule(s)}`);
|
|
14450
|
+
return `**Your active schedules:**
|
|
14451
|
+
|
|
14452
|
+
${lines.join("\n")}
|
|
14453
|
+
|
|
14454
|
+
To cancel: "cancel schedule <id>"
|
|
14455
|
+
To pause: "pause schedule <id>"`;
|
|
14456
|
+
}
|
|
14457
|
+
async function handleScheduleAction(args, context2) {
|
|
14458
|
+
const store = ScheduleStore.getInstance();
|
|
14459
|
+
if (!store.isInitialized()) {
|
|
14460
|
+
await store.initialize();
|
|
14461
|
+
}
|
|
14462
|
+
switch (args.action) {
|
|
14463
|
+
case "create":
|
|
14464
|
+
return handleCreate(args, context2, store);
|
|
14465
|
+
case "list":
|
|
14466
|
+
return handleList(context2, store);
|
|
14467
|
+
case "cancel":
|
|
14468
|
+
return handleCancel(args, context2, store);
|
|
14469
|
+
case "pause":
|
|
14470
|
+
return handlePauseResume(args, context2, store, "paused");
|
|
14471
|
+
case "resume":
|
|
14472
|
+
return handlePauseResume(args, context2, store, "active");
|
|
14473
|
+
default:
|
|
14474
|
+
return {
|
|
14475
|
+
success: false,
|
|
14476
|
+
message: `Unknown action: ${args.action}`,
|
|
14477
|
+
error: `Supported actions: create, list, cancel, pause, resume`
|
|
14478
|
+
};
|
|
14479
|
+
}
|
|
14480
|
+
}
|
|
14481
|
+
async function handleCreate(args, context2, store) {
|
|
14482
|
+
if (!args.reminder_text && !args.workflow) {
|
|
14483
|
+
return {
|
|
14484
|
+
success: false,
|
|
14485
|
+
message: "Missing reminder content",
|
|
14486
|
+
error: "Please specify either reminder_text (what to say) or workflow (what to run)"
|
|
14487
|
+
};
|
|
14488
|
+
}
|
|
14489
|
+
if (!args.cron && !args.run_at) {
|
|
14490
|
+
return {
|
|
14491
|
+
success: false,
|
|
14492
|
+
message: "Missing schedule timing",
|
|
14493
|
+
error: 'Please specify either cron (for recurring, e.g., "* * * * *") or run_at (ISO timestamp for one-time)'
|
|
14494
|
+
};
|
|
14495
|
+
}
|
|
14496
|
+
if (args.cron && !isValidCronExpression(args.cron)) {
|
|
14497
|
+
return {
|
|
14498
|
+
success: false,
|
|
14499
|
+
message: "Invalid cron expression",
|
|
14500
|
+
error: `"${args.cron}" is not a valid cron expression. Format: "minute hour day-of-month month day-of-week"`
|
|
14501
|
+
};
|
|
14502
|
+
}
|
|
14503
|
+
let runAtTimestamp;
|
|
14504
|
+
if (args.run_at) {
|
|
14505
|
+
const parsed = new Date(args.run_at);
|
|
14506
|
+
if (isNaN(parsed.getTime())) {
|
|
14507
|
+
return {
|
|
14508
|
+
success: false,
|
|
14509
|
+
message: "Invalid run_at timestamp",
|
|
14510
|
+
error: `"${args.run_at}" is not a valid ISO 8601 timestamp`
|
|
14511
|
+
};
|
|
14512
|
+
}
|
|
14513
|
+
if (parsed.getTime() <= Date.now()) {
|
|
14514
|
+
return {
|
|
14515
|
+
success: false,
|
|
14516
|
+
message: "run_at must be in the future",
|
|
14517
|
+
error: "Cannot schedule a reminder in the past"
|
|
14518
|
+
};
|
|
14519
|
+
}
|
|
14520
|
+
runAtTimestamp = parsed.getTime();
|
|
14521
|
+
}
|
|
14522
|
+
if (args.target_type && !args.target_id) {
|
|
14523
|
+
return {
|
|
14524
|
+
success: false,
|
|
14525
|
+
message: "Missing target_id",
|
|
14526
|
+
error: `target_type "${args.target_type}" requires a target_id (channel ID, user ID, or thread_ts)`
|
|
14527
|
+
};
|
|
14528
|
+
}
|
|
14529
|
+
let scheduleType = "personal";
|
|
14530
|
+
if (args.target_type === "channel") {
|
|
14531
|
+
scheduleType = "channel";
|
|
14532
|
+
} else if (args.target_type === "user") {
|
|
14533
|
+
scheduleType = "dm";
|
|
14534
|
+
}
|
|
14535
|
+
const workflowName = args.workflow || "reminder";
|
|
14536
|
+
const permissionCheck = checkSchedulePermissions(context2, workflowName, scheduleType);
|
|
14537
|
+
if (!permissionCheck.allowed) {
|
|
14538
|
+
logger.warn(
|
|
14539
|
+
`[ScheduleTool] Permission denied for user ${context2.userId}: ${permissionCheck.reason}`
|
|
14540
|
+
);
|
|
14541
|
+
return {
|
|
14542
|
+
success: false,
|
|
14543
|
+
message: "Permission denied",
|
|
14544
|
+
error: permissionCheck.reason || "You do not have permission to create this schedule"
|
|
14545
|
+
};
|
|
14546
|
+
}
|
|
14547
|
+
if (args.workflow && context2.availableWorkflows && !context2.availableWorkflows.includes(args.workflow)) {
|
|
14548
|
+
return {
|
|
14549
|
+
success: false,
|
|
14550
|
+
message: `Workflow "${args.workflow}" not found`,
|
|
14551
|
+
error: `Available workflows: ${context2.availableWorkflows.slice(0, 5).join(", ")}${context2.availableWorkflows.length > 5 ? "..." : ""}`
|
|
14552
|
+
};
|
|
14553
|
+
}
|
|
14554
|
+
try {
|
|
14555
|
+
const timezone = context2.timezone || "UTC";
|
|
14556
|
+
const isRecurring = args.is_recurring === true || !!args.cron;
|
|
14557
|
+
let outputContext;
|
|
14558
|
+
if (args.target_type && args.target_id) {
|
|
14559
|
+
outputContext = {
|
|
14560
|
+
type: "slack",
|
|
14561
|
+
// Currently only Slack supported
|
|
14562
|
+
target: args.target_id,
|
|
14563
|
+
// Channel ID (C... or D...)
|
|
14564
|
+
threadId: args.thread_ts,
|
|
14565
|
+
// Thread timestamp for replies
|
|
14566
|
+
metadata: {
|
|
14567
|
+
targetType: args.target_type,
|
|
14568
|
+
reminderText: args.reminder_text
|
|
14569
|
+
}
|
|
14570
|
+
};
|
|
14571
|
+
}
|
|
14572
|
+
let nextRunAt;
|
|
14573
|
+
if (isRecurring && args.cron) {
|
|
14574
|
+
nextRunAt = getNextRunTime(args.cron, timezone).getTime();
|
|
14575
|
+
} else if (runAtTimestamp) {
|
|
14576
|
+
nextRunAt = runAtTimestamp;
|
|
14577
|
+
}
|
|
14578
|
+
const schedule = await store.createAsync({
|
|
14579
|
+
creatorId: context2.userId,
|
|
14580
|
+
creatorContext: context2.contextType,
|
|
14581
|
+
creatorName: context2.userName,
|
|
14582
|
+
timezone,
|
|
14583
|
+
schedule: args.cron || "",
|
|
14584
|
+
runAt: runAtTimestamp,
|
|
14585
|
+
isRecurring,
|
|
14586
|
+
originalExpression: args.original_expression || args.cron || args.run_at || "",
|
|
14587
|
+
workflow: args.workflow,
|
|
14588
|
+
// Only set if explicitly provided
|
|
14589
|
+
workflowInputs: args.workflow_inputs || (args.reminder_text ? { text: args.reminder_text } : void 0),
|
|
14590
|
+
outputContext,
|
|
14591
|
+
nextRunAt
|
|
14592
|
+
});
|
|
14593
|
+
const displayText = args.reminder_text || args.workflow || "scheduled task";
|
|
14594
|
+
logger.info(
|
|
14595
|
+
`[ScheduleTool] Created schedule ${schedule.id} for user ${context2.userId}: "${displayText}"`
|
|
14596
|
+
);
|
|
14597
|
+
return {
|
|
14598
|
+
success: true,
|
|
14599
|
+
message: formatCreateConfirmation(schedule),
|
|
14600
|
+
schedule
|
|
14601
|
+
};
|
|
14602
|
+
} catch (error) {
|
|
14603
|
+
const errorMsg = error instanceof Error ? error.message : "Unknown error";
|
|
14604
|
+
logger.warn(`[ScheduleTool] Failed to create schedule: ${errorMsg}`);
|
|
14605
|
+
return {
|
|
14606
|
+
success: false,
|
|
14607
|
+
message: `Failed to create schedule: ${errorMsg}`,
|
|
14608
|
+
error: errorMsg
|
|
14609
|
+
};
|
|
14610
|
+
}
|
|
14611
|
+
}
|
|
14612
|
+
async function handleList(context2, store) {
|
|
14613
|
+
const allUserSchedules = await store.getByCreatorAsync(context2.userId);
|
|
14614
|
+
const schedules = allUserSchedules.filter((s) => s.status !== "completed");
|
|
14615
|
+
let filteredSchedules = schedules;
|
|
14616
|
+
if (context2.allowedScheduleType) {
|
|
14617
|
+
filteredSchedules = schedules.filter((s) => {
|
|
14618
|
+
const scheduleOutputType = s.outputContext?.type;
|
|
14619
|
+
if (!scheduleOutputType || scheduleOutputType === "none") {
|
|
14620
|
+
return context2.allowedScheduleType === "personal";
|
|
14621
|
+
}
|
|
14622
|
+
if (scheduleOutputType === "slack") {
|
|
14623
|
+
const target = s.outputContext?.target || "";
|
|
14624
|
+
if (target.startsWith("#") || target.match(/^C[A-Z0-9]+$/)) {
|
|
14625
|
+
return context2.allowedScheduleType === "channel";
|
|
14626
|
+
}
|
|
14627
|
+
if (target.startsWith("@") || target.match(/^U[A-Z0-9]+$/)) {
|
|
14628
|
+
return context2.allowedScheduleType === "dm";
|
|
14629
|
+
}
|
|
14630
|
+
}
|
|
14631
|
+
return context2.allowedScheduleType === "personal";
|
|
14632
|
+
});
|
|
14633
|
+
}
|
|
14634
|
+
return {
|
|
14635
|
+
success: true,
|
|
14636
|
+
message: formatScheduleList(filteredSchedules),
|
|
14637
|
+
schedules: filteredSchedules
|
|
14638
|
+
};
|
|
14639
|
+
}
|
|
14640
|
+
async function handleCancel(args, context2, store) {
|
|
14641
|
+
let schedule;
|
|
14642
|
+
if (args.schedule_id) {
|
|
14643
|
+
const userSchedules = await store.getByCreatorAsync(context2.userId);
|
|
14644
|
+
schedule = userSchedules.find((s) => s.id === args.schedule_id);
|
|
14645
|
+
if (!schedule) {
|
|
14646
|
+
schedule = userSchedules.find((s) => s.id.startsWith(args.schedule_id));
|
|
14647
|
+
}
|
|
14648
|
+
}
|
|
14649
|
+
if (!schedule) {
|
|
14650
|
+
return {
|
|
14651
|
+
success: false,
|
|
14652
|
+
message: "Schedule not found",
|
|
14653
|
+
error: `Could not find schedule with ID "${args.schedule_id}" in your schedules. Use "list my schedules" to see your schedules.`
|
|
14654
|
+
};
|
|
14655
|
+
}
|
|
14656
|
+
if (schedule.creatorId !== context2.userId) {
|
|
14657
|
+
logger.warn(
|
|
14658
|
+
`[ScheduleTool] Attempted cross-user schedule cancellation: ${context2.userId} tried to cancel ${schedule.id} owned by ${schedule.creatorId}`
|
|
14659
|
+
);
|
|
14660
|
+
return {
|
|
14661
|
+
success: false,
|
|
14662
|
+
message: "Not your schedule",
|
|
14663
|
+
error: "You can only cancel your own schedules."
|
|
14664
|
+
};
|
|
14665
|
+
}
|
|
14666
|
+
await store.deleteAsync(schedule.id);
|
|
14667
|
+
const scheduler = getScheduler();
|
|
14668
|
+
if (scheduler) {
|
|
14669
|
+
scheduler.cancelSchedule(schedule.id);
|
|
14670
|
+
}
|
|
14671
|
+
logger.info(`[ScheduleTool] Cancelled schedule ${schedule.id} for user ${context2.userId}`);
|
|
14672
|
+
return {
|
|
14673
|
+
success: true,
|
|
14674
|
+
message: `**Schedule cancelled!**
|
|
14675
|
+
|
|
14676
|
+
Was: "${schedule.workflow}" scheduled for ${schedule.originalExpression}`
|
|
14677
|
+
};
|
|
14678
|
+
}
|
|
14679
|
+
async function handlePauseResume(args, context2, store, newStatus) {
|
|
14680
|
+
if (!args.schedule_id) {
|
|
14681
|
+
return {
|
|
14682
|
+
success: false,
|
|
14683
|
+
message: "Missing schedule ID",
|
|
14684
|
+
error: "Please specify which schedule to pause/resume."
|
|
14685
|
+
};
|
|
14686
|
+
}
|
|
14687
|
+
const userSchedules = await store.getByCreatorAsync(context2.userId);
|
|
14688
|
+
let schedule = userSchedules.find((s) => s.id === args.schedule_id);
|
|
14689
|
+
if (!schedule) {
|
|
14690
|
+
schedule = userSchedules.find((s) => s.id.startsWith(args.schedule_id));
|
|
14691
|
+
}
|
|
14692
|
+
if (!schedule) {
|
|
14693
|
+
return {
|
|
14694
|
+
success: false,
|
|
14695
|
+
message: "Schedule not found",
|
|
14696
|
+
error: `Could not find schedule with ID "${args.schedule_id}" in your schedules.`
|
|
14697
|
+
};
|
|
14698
|
+
}
|
|
14699
|
+
if (schedule.creatorId !== context2.userId) {
|
|
14700
|
+
logger.warn(
|
|
14701
|
+
`[ScheduleTool] Attempted cross-user schedule modification: ${context2.userId} tried to modify ${schedule.id} owned by ${schedule.creatorId}`
|
|
14702
|
+
);
|
|
14703
|
+
return {
|
|
14704
|
+
success: false,
|
|
14705
|
+
message: "Not your schedule",
|
|
14706
|
+
error: "You can only modify your own schedules."
|
|
14707
|
+
};
|
|
14708
|
+
}
|
|
14709
|
+
const updated = await store.updateAsync(schedule.id, { status: newStatus });
|
|
14710
|
+
const action = newStatus === "paused" ? "paused" : "resumed";
|
|
14711
|
+
logger.info(`[ScheduleTool] ${action} schedule ${schedule.id} for user ${context2.userId}`);
|
|
14712
|
+
return {
|
|
14713
|
+
success: true,
|
|
14714
|
+
message: `**Schedule ${action}!**
|
|
14715
|
+
|
|
14716
|
+
"${schedule.workflow}" - ${schedule.originalExpression}`,
|
|
14717
|
+
schedule: updated
|
|
14718
|
+
};
|
|
14719
|
+
}
|
|
14720
|
+
function getScheduleToolDefinition() {
|
|
14721
|
+
return {
|
|
14722
|
+
name: "schedule",
|
|
14723
|
+
description: `Schedule, list, and manage reminders or workflow executions.
|
|
14724
|
+
|
|
14725
|
+
YOU (the AI) must extract and structure all scheduling parameters. Do NOT pass natural language time expressions - convert them to cron or ISO timestamps.
|
|
14726
|
+
|
|
14727
|
+
CRITICAL WORKFLOW RULE:
|
|
14728
|
+
- To schedule a WORKFLOW, the user MUST use a '%' prefix (e.g., "schedule %my-workflow daily").
|
|
14729
|
+
- If the '%' prefix is present, extract the word following it as the 'workflow' parameter (without the '%').
|
|
14730
|
+
- If the '%' prefix is NOT present, the request is a simple text reminder. The ENTIRE user request (excluding the schedule expression) MUST be placed in the 'reminder_text' parameter.
|
|
14731
|
+
- DO NOT guess or infer a workflow name from a user's request without the '%' prefix.
|
|
14732
|
+
|
|
14733
|
+
ACTIONS:
|
|
14734
|
+
- create: Schedule a new reminder or workflow
|
|
14735
|
+
- list: Show user's active schedules
|
|
14736
|
+
- cancel: Remove a schedule by ID
|
|
14737
|
+
- pause/resume: Temporarily disable/enable a schedule
|
|
14738
|
+
|
|
14739
|
+
FOR CREATE ACTION - Extract these from user's request:
|
|
14740
|
+
1. WHAT:
|
|
14741
|
+
- If user says "schedule %some-workflow ...", populate 'workflow' with "some-workflow".
|
|
14742
|
+
- Otherwise, populate 'reminder_text' with the user's full request text.
|
|
14743
|
+
2. WHERE: Use the CURRENT channel from context
|
|
14744
|
+
- target_id: The channel ID from context (C... for channels, D... for DMs)
|
|
14745
|
+
- target_type: "channel" for public/private channels, "dm" for direct messages
|
|
14746
|
+
- ONLY use target_type="thread" with thread_ts if user is INSIDE a thread
|
|
14747
|
+
- When NOT in a thread, reminders post as NEW messages (not thread replies)
|
|
14748
|
+
3. WHEN: Either cron (for recurring) OR run_at (ISO 8601 for one-time)
|
|
14749
|
+
- Recurring: Generate cron expression (minute hour day-of-month month day-of-week)
|
|
14750
|
+
- One-time: Generate ISO 8601 timestamp
|
|
14751
|
+
|
|
14752
|
+
CRON EXAMPLES:
|
|
14753
|
+
- "every minute" \u2192 cron: "* * * * *"
|
|
14754
|
+
- "every hour" \u2192 cron: "0 * * * *"
|
|
14755
|
+
- "every day at 9am" \u2192 cron: "0 9 * * *"
|
|
14756
|
+
- "every Monday at 9am" \u2192 cron: "0 9 * * 1"
|
|
14757
|
+
- "weekdays at 8:30am" \u2192 cron: "30 8 * * 1-5"
|
|
14758
|
+
- "every 5 minutes" \u2192 cron: "*/5 * * * *"
|
|
14759
|
+
|
|
14760
|
+
ONE-TIME EXAMPLES:
|
|
14761
|
+
- "in 2 hours" \u2192 run_at: "<ISO timestamp 2 hours from now>"
|
|
14762
|
+
- "tomorrow at 3pm" \u2192 run_at: "2026-02-08T15:00:00Z"
|
|
14763
|
+
|
|
14764
|
+
USAGE EXAMPLES:
|
|
14765
|
+
|
|
14766
|
+
User in DM: "remind me to check builds every day at 9am"
|
|
14767
|
+
\u2192 {
|
|
14768
|
+
"action": "create",
|
|
14769
|
+
"reminder_text": "check builds",
|
|
14770
|
+
"is_recurring": true,
|
|
14771
|
+
"cron": "0 9 * * *",
|
|
14772
|
+
"target_type": "dm",
|
|
14773
|
+
"target_id": "<DM channel ID from context, e.g., D09SZABNLG3>",
|
|
14774
|
+
"original_expression": "every day at 9am"
|
|
14775
|
+
}
|
|
14776
|
+
|
|
14777
|
+
User in #security channel: "schedule %security-scan every Monday at 10am"
|
|
14778
|
+
\u2192 {
|
|
14779
|
+
"action": "create",
|
|
14780
|
+
"workflow": "security-scan",
|
|
14781
|
+
"is_recurring": true,
|
|
14782
|
+
"cron": "0 10 * * 1",
|
|
14783
|
+
"target_type": "channel",
|
|
14784
|
+
"target_id": "<channel ID from context, e.g., C05ABC123>",
|
|
14785
|
+
"original_expression": "every Monday at 10am"
|
|
14786
|
+
}
|
|
14787
|
+
|
|
14788
|
+
User in #security channel: "run security-scan every Monday at 10am" (NO % prefix!)
|
|
14789
|
+
\u2192 {
|
|
14790
|
+
"action": "create",
|
|
14791
|
+
"reminder_text": "run security-scan every Monday at 10am",
|
|
14792
|
+
"is_recurring": true,
|
|
14793
|
+
"cron": "0 10 * * 1",
|
|
14794
|
+
"target_type": "channel",
|
|
14795
|
+
"target_id": "<channel ID from context, e.g., C05ABC123>",
|
|
14796
|
+
"original_expression": "every Monday at 10am"
|
|
14797
|
+
}
|
|
14798
|
+
|
|
14799
|
+
User in DM: "remind me in 2 hours to review the PR"
|
|
14800
|
+
\u2192 {
|
|
14801
|
+
"action": "create",
|
|
14802
|
+
"reminder_text": "review the PR",
|
|
14803
|
+
"is_recurring": false,
|
|
14804
|
+
"run_at": "2026-02-07T18:00:00Z",
|
|
14805
|
+
"target_type": "dm",
|
|
14806
|
+
"target_id": "<DM channel ID from context>",
|
|
14807
|
+
"original_expression": "in 2 hours"
|
|
14808
|
+
}
|
|
14809
|
+
|
|
14810
|
+
User inside a thread: "remind me about this tomorrow"
|
|
14811
|
+
\u2192 {
|
|
14812
|
+
"action": "create",
|
|
14813
|
+
"reminder_text": "Check this thread",
|
|
14814
|
+
"is_recurring": false,
|
|
14815
|
+
"run_at": "2026-02-08T09:00:00Z",
|
|
14816
|
+
"target_type": "thread",
|
|
14817
|
+
"target_id": "<channel ID>",
|
|
14818
|
+
"thread_ts": "<thread_ts from context>",
|
|
14819
|
+
"original_expression": "tomorrow"
|
|
14820
|
+
}
|
|
14821
|
+
|
|
14822
|
+
User: "list my schedules"
|
|
14823
|
+
\u2192 { "action": "list" }
|
|
14824
|
+
|
|
14825
|
+
User: "cancel schedule abc123"
|
|
14826
|
+
\u2192 { "action": "cancel", "schedule_id": "abc123" }`,
|
|
14827
|
+
inputSchema: {
|
|
14828
|
+
type: "object",
|
|
14829
|
+
properties: {
|
|
14830
|
+
action: {
|
|
14831
|
+
type: "string",
|
|
14832
|
+
enum: ["create", "list", "cancel", "pause", "resume"],
|
|
14833
|
+
description: "What to do: create new, list existing, cancel/pause/resume by ID"
|
|
14834
|
+
},
|
|
14835
|
+
// WHAT to do
|
|
14836
|
+
reminder_text: {
|
|
14837
|
+
type: "string",
|
|
14838
|
+
description: "For create: the message/reminder text to send when triggered"
|
|
14839
|
+
},
|
|
14840
|
+
workflow: {
|
|
14841
|
+
type: "string",
|
|
14842
|
+
description: 'For create: workflow ID to run. ONLY populate this if the user used the % prefix (e.g., "%my-workflow"). Extract the name without the % symbol. If no % prefix, use reminder_text instead.'
|
|
14843
|
+
},
|
|
14844
|
+
workflow_inputs: {
|
|
14845
|
+
type: "object",
|
|
14846
|
+
description: "For create: optional inputs to pass to the workflow"
|
|
14847
|
+
},
|
|
14848
|
+
// WHERE to send
|
|
14849
|
+
target_type: {
|
|
14850
|
+
type: "string",
|
|
14851
|
+
enum: ["channel", "dm", "thread", "user"],
|
|
14852
|
+
description: "For create: where to send output. channel=public/private channel, dm=DM to self (current DM channel), user=DM to specific user, thread=reply in current thread"
|
|
14853
|
+
},
|
|
14854
|
+
target_id: {
|
|
14855
|
+
type: "string",
|
|
14856
|
+
description: "For create: Slack channel ID. Channels start with C, DMs start with D. Always use the channel ID from the current context."
|
|
14857
|
+
},
|
|
14858
|
+
thread_ts: {
|
|
14859
|
+
type: "string",
|
|
14860
|
+
description: "For create with target_type=thread: the thread timestamp to reply to. Get this from the current thread context."
|
|
14861
|
+
},
|
|
14862
|
+
// WHEN to run
|
|
14863
|
+
is_recurring: {
|
|
14864
|
+
type: "boolean",
|
|
14865
|
+
description: "For create: true for recurring schedules (cron), false for one-time (run_at)"
|
|
14866
|
+
},
|
|
14867
|
+
cron: {
|
|
14868
|
+
type: "string",
|
|
14869
|
+
description: 'For create recurring: cron expression (minute hour day-of-month month day-of-week). Examples: "0 9 * * *" (daily 9am), "* * * * *" (every minute), "0 9 * * 1" (Mondays 9am)'
|
|
14870
|
+
},
|
|
14871
|
+
run_at: {
|
|
14872
|
+
type: "string",
|
|
14873
|
+
description: 'For create one-time: ISO 8601 timestamp when to run (e.g., "2026-02-07T15:00:00Z")'
|
|
14874
|
+
},
|
|
14875
|
+
original_expression: {
|
|
14876
|
+
type: "string",
|
|
14877
|
+
description: "For create: the original natural language expression from user (for display only)"
|
|
14878
|
+
},
|
|
14879
|
+
// For cancel/pause/resume
|
|
14880
|
+
schedule_id: {
|
|
14881
|
+
type: "string",
|
|
14882
|
+
description: "For cancel/pause/resume: the schedule ID to act on (first 8 chars is enough)"
|
|
14883
|
+
}
|
|
14884
|
+
},
|
|
14885
|
+
required: ["action"]
|
|
14886
|
+
},
|
|
14887
|
+
exec: ""
|
|
14888
|
+
// Not used - this tool has a custom handler
|
|
14889
|
+
};
|
|
14890
|
+
}
|
|
14891
|
+
function isScheduleTool(toolName) {
|
|
14892
|
+
return toolName === "schedule";
|
|
14893
|
+
}
|
|
14894
|
+
function determineScheduleType(contextType, outputType, outputTarget) {
|
|
14895
|
+
if (outputType === "slack" && outputTarget) {
|
|
14896
|
+
if (outputTarget.startsWith("#") || outputTarget.match(/^C[A-Z0-9]+$/)) {
|
|
14897
|
+
return "channel";
|
|
14898
|
+
}
|
|
14899
|
+
if (outputTarget.startsWith("@") || outputTarget.match(/^U[A-Z0-9]+$/)) {
|
|
14900
|
+
return "dm";
|
|
14901
|
+
}
|
|
14902
|
+
}
|
|
14903
|
+
if (contextType === "cli" || contextType.startsWith("github:")) {
|
|
14904
|
+
return "personal";
|
|
14905
|
+
}
|
|
14906
|
+
return "personal";
|
|
14907
|
+
}
|
|
14908
|
+
function slackChannelTypeToScheduleType(channelType) {
|
|
14909
|
+
switch (channelType) {
|
|
14910
|
+
case "channel":
|
|
14911
|
+
return "channel";
|
|
14912
|
+
case "group":
|
|
14913
|
+
return "dm";
|
|
14914
|
+
// Group DMs map to 'dm' schedule type
|
|
14915
|
+
case "dm":
|
|
14916
|
+
default:
|
|
14917
|
+
return "personal";
|
|
14918
|
+
}
|
|
14919
|
+
}
|
|
14920
|
+
function buildScheduleToolContext(sources, availableWorkflows, permissions, outputInfo) {
|
|
14921
|
+
if (sources.slackContext) {
|
|
14922
|
+
const contextType = `slack:${sources.slackContext.userId}`;
|
|
14923
|
+
const scheduleType = determineScheduleType(
|
|
14924
|
+
contextType,
|
|
14925
|
+
outputInfo?.outputType,
|
|
14926
|
+
outputInfo?.outputTarget
|
|
14927
|
+
);
|
|
14928
|
+
let allowedScheduleType;
|
|
14929
|
+
if (sources.slackContext.channelType) {
|
|
14930
|
+
allowedScheduleType = slackChannelTypeToScheduleType(sources.slackContext.channelType);
|
|
14931
|
+
}
|
|
14932
|
+
let finalScheduleType = scheduleType;
|
|
14933
|
+
if (!outputInfo?.outputType && sources.slackContext.channelType) {
|
|
14934
|
+
finalScheduleType = slackChannelTypeToScheduleType(sources.slackContext.channelType);
|
|
14935
|
+
}
|
|
14936
|
+
return {
|
|
14937
|
+
userId: sources.slackContext.userId,
|
|
14938
|
+
userName: sources.slackContext.userName,
|
|
14939
|
+
contextType,
|
|
14940
|
+
timezone: sources.slackContext.timezone,
|
|
14941
|
+
availableWorkflows,
|
|
14942
|
+
scheduleType: finalScheduleType,
|
|
14943
|
+
permissions,
|
|
14944
|
+
allowedScheduleType
|
|
14945
|
+
};
|
|
14946
|
+
}
|
|
14947
|
+
if (sources.githubContext) {
|
|
14948
|
+
return {
|
|
14949
|
+
userId: sources.githubContext.login,
|
|
14950
|
+
contextType: `github:${sources.githubContext.login}`,
|
|
14951
|
+
timezone: "UTC",
|
|
14952
|
+
// GitHub doesn't provide timezone
|
|
14953
|
+
availableWorkflows,
|
|
14954
|
+
scheduleType: "personal",
|
|
14955
|
+
permissions,
|
|
14956
|
+
allowedScheduleType: "personal"
|
|
14957
|
+
// GitHub context only allows personal schedules
|
|
14958
|
+
};
|
|
14959
|
+
}
|
|
14960
|
+
return {
|
|
14961
|
+
userId: sources.cliContext?.userId || process.env.USER || "cli-user",
|
|
14962
|
+
contextType: "cli",
|
|
14963
|
+
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone || "UTC",
|
|
14964
|
+
availableWorkflows,
|
|
14965
|
+
scheduleType: "personal",
|
|
14966
|
+
permissions,
|
|
14967
|
+
allowedScheduleType: "personal"
|
|
14968
|
+
// CLI context only allows personal schedules
|
|
14969
|
+
};
|
|
14970
|
+
}
|
|
14971
|
+
var init_schedule_tool = __esm({
|
|
14972
|
+
"src/scheduler/schedule-tool.ts"() {
|
|
14973
|
+
init_schedule_store();
|
|
14974
|
+
init_schedule_parser();
|
|
14975
|
+
init_scheduler();
|
|
14976
|
+
init_logger();
|
|
12619
14977
|
}
|
|
12620
14978
|
});
|
|
12621
14979
|
|
|
@@ -13489,6 +15847,13 @@ var init_mcp_custom_sse_server = __esm({
|
|
|
13489
15847
|
// src/utils/tool-resolver.ts
|
|
13490
15848
|
function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
13491
15849
|
const tools = /* @__PURE__ */ new Map();
|
|
15850
|
+
const registry = WorkflowRegistry.getInstance();
|
|
15851
|
+
const registeredWorkflows = registry.list().map((w) => w.id);
|
|
15852
|
+
if (toolItems.some((item) => typeof item !== "string" && isWorkflowToolReference(item))) {
|
|
15853
|
+
logger.info(
|
|
15854
|
+
`${logPrefix} Resolving ${toolItems.length} tool items. WorkflowRegistry has ${registeredWorkflows.length} workflows: [${registeredWorkflows.join(", ")}]`
|
|
15855
|
+
);
|
|
15856
|
+
}
|
|
13492
15857
|
for (const item of toolItems) {
|
|
13493
15858
|
const workflowTool = resolveWorkflowToolFromItem(item);
|
|
13494
15859
|
if (workflowTool) {
|
|
@@ -13505,7 +15870,9 @@ function resolveTools(toolItems, globalTools, logPrefix = "[ToolResolver]") {
|
|
|
13505
15870
|
}
|
|
13506
15871
|
logger.warn(`${logPrefix} Tool '${item}' not found in global tools or workflow registry`);
|
|
13507
15872
|
} else if (isWorkflowToolReference(item)) {
|
|
13508
|
-
logger.warn(
|
|
15873
|
+
logger.warn(
|
|
15874
|
+
`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry. Available: [${registeredWorkflows.join(", ")}]`
|
|
15875
|
+
);
|
|
13509
15876
|
}
|
|
13510
15877
|
}
|
|
13511
15878
|
if (tools.size === 0 && toolItems.length > 0 && !globalTools) {
|
|
@@ -13519,13 +15886,14 @@ var init_tool_resolver = __esm({
|
|
|
13519
15886
|
"src/utils/tool-resolver.ts"() {
|
|
13520
15887
|
"use strict";
|
|
13521
15888
|
init_workflow_tool_executor();
|
|
15889
|
+
init_workflow_registry();
|
|
13522
15890
|
init_logger();
|
|
13523
15891
|
}
|
|
13524
15892
|
});
|
|
13525
15893
|
|
|
13526
15894
|
// src/providers/ai-check-provider.ts
|
|
13527
|
-
import
|
|
13528
|
-
import
|
|
15895
|
+
import fs7 from "fs/promises";
|
|
15896
|
+
import path9 from "path";
|
|
13529
15897
|
var AICheckProvider;
|
|
13530
15898
|
var init_ai_check_provider = __esm({
|
|
13531
15899
|
"src/providers/ai-check-provider.ts"() {
|
|
@@ -13699,7 +16067,7 @@ var init_ai_check_provider = __esm({
|
|
|
13699
16067
|
const hasFileExtension = /\.[a-zA-Z0-9]{1,10}$/i.test(str);
|
|
13700
16068
|
const hasPathSeparators = /[\/\\]/.test(str);
|
|
13701
16069
|
const isRelativePath = /^\.{1,2}\//.test(str);
|
|
13702
|
-
const isAbsolutePath =
|
|
16070
|
+
const isAbsolutePath = path9.isAbsolute(str);
|
|
13703
16071
|
const hasTypicalFileChars = /^[a-zA-Z0-9._\-\/\\:~]+$/.test(str);
|
|
13704
16072
|
if (!(hasFileExtension || isRelativePath || isAbsolutePath || hasPathSeparators)) {
|
|
13705
16073
|
return false;
|
|
@@ -13709,14 +16077,14 @@ var init_ai_check_provider = __esm({
|
|
|
13709
16077
|
}
|
|
13710
16078
|
try {
|
|
13711
16079
|
let resolvedPath;
|
|
13712
|
-
if (
|
|
13713
|
-
resolvedPath =
|
|
16080
|
+
if (path9.isAbsolute(str)) {
|
|
16081
|
+
resolvedPath = path9.normalize(str);
|
|
13714
16082
|
} else {
|
|
13715
|
-
resolvedPath =
|
|
16083
|
+
resolvedPath = path9.resolve(process.cwd(), str);
|
|
13716
16084
|
}
|
|
13717
|
-
const
|
|
16085
|
+
const fs12 = __require("fs").promises;
|
|
13718
16086
|
try {
|
|
13719
|
-
const stat2 = await
|
|
16087
|
+
const stat2 = await fs12.stat(resolvedPath);
|
|
13720
16088
|
return stat2.isFile();
|
|
13721
16089
|
} catch {
|
|
13722
16090
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -13733,14 +16101,14 @@ var init_ai_check_provider = __esm({
|
|
|
13733
16101
|
throw new Error("Prompt file must have .liquid extension");
|
|
13734
16102
|
}
|
|
13735
16103
|
let resolvedPath;
|
|
13736
|
-
if (
|
|
16104
|
+
if (path9.isAbsolute(promptPath)) {
|
|
13737
16105
|
resolvedPath = promptPath;
|
|
13738
16106
|
} else {
|
|
13739
|
-
resolvedPath =
|
|
16107
|
+
resolvedPath = path9.resolve(process.cwd(), promptPath);
|
|
13740
16108
|
}
|
|
13741
|
-
if (!
|
|
13742
|
-
const normalizedPath =
|
|
13743
|
-
const currentDir =
|
|
16109
|
+
if (!path9.isAbsolute(promptPath)) {
|
|
16110
|
+
const normalizedPath = path9.normalize(resolvedPath);
|
|
16111
|
+
const currentDir = path9.resolve(process.cwd());
|
|
13744
16112
|
if (!normalizedPath.startsWith(currentDir)) {
|
|
13745
16113
|
throw new Error("Invalid prompt file path: path traversal detected");
|
|
13746
16114
|
}
|
|
@@ -13749,7 +16117,7 @@ var init_ai_check_provider = __esm({
|
|
|
13749
16117
|
throw new Error("Invalid prompt file path: path traversal detected");
|
|
13750
16118
|
}
|
|
13751
16119
|
try {
|
|
13752
|
-
const promptContent = await
|
|
16120
|
+
const promptContent = await fs7.readFile(resolvedPath, "utf-8");
|
|
13753
16121
|
return promptContent;
|
|
13754
16122
|
} catch (error) {
|
|
13755
16123
|
throw new Error(
|
|
@@ -14322,10 +16690,41 @@ ${preview}`);
|
|
|
14322
16690
|
if (Object.keys(dynamicServers).length > 0) {
|
|
14323
16691
|
Object.assign(mcpServers, dynamicServers);
|
|
14324
16692
|
}
|
|
16693
|
+
try {
|
|
16694
|
+
const span = trace.getSpan(context.active());
|
|
16695
|
+
if (span) {
|
|
16696
|
+
span.addEvent("tool_setup.mcp_servers_js", {
|
|
16697
|
+
"tool_setup.server_count": Object.keys(dynamicServers).length,
|
|
16698
|
+
"tool_setup.server_names": Object.keys(dynamicServers).join(","),
|
|
16699
|
+
"tool_setup.workflow_entries": Object.entries(dynamicServers).filter(([, cfg]) => cfg?.workflow).map(([name, cfg]) => `${name}\u2192${cfg.workflow}`).join(",")
|
|
16700
|
+
});
|
|
16701
|
+
}
|
|
16702
|
+
} catch {
|
|
16703
|
+
}
|
|
14325
16704
|
} catch (error) {
|
|
14326
|
-
|
|
14327
|
-
|
|
14328
|
-
|
|
16705
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
16706
|
+
logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${errMsg}`);
|
|
16707
|
+
try {
|
|
16708
|
+
const span = trace.getSpan(context.active());
|
|
16709
|
+
if (span) {
|
|
16710
|
+
span.addEvent("tool_setup.mcp_servers_js_error", {
|
|
16711
|
+
"tool_setup.error": errMsg
|
|
16712
|
+
});
|
|
16713
|
+
}
|
|
16714
|
+
} catch {
|
|
16715
|
+
}
|
|
16716
|
+
}
|
|
16717
|
+
} else if (mcpServersJsExpr && !_dependencyResults) {
|
|
16718
|
+
try {
|
|
16719
|
+
const span = trace.getSpan(context.active());
|
|
16720
|
+
if (span) {
|
|
16721
|
+
span.addEvent("tool_setup.mcp_servers_js_skipped", {
|
|
16722
|
+
"tool_setup.reason": "no_dependency_results",
|
|
16723
|
+
"tool_setup.has_expr": true,
|
|
16724
|
+
"tool_setup.has_deps": false
|
|
16725
|
+
});
|
|
16726
|
+
}
|
|
16727
|
+
} catch {
|
|
14329
16728
|
}
|
|
14330
16729
|
}
|
|
14331
16730
|
for (const serverConfig of Object.values(mcpServers)) {
|
|
@@ -14463,6 +16862,27 @@ ${preview}`);
|
|
|
14463
16862
|
}
|
|
14464
16863
|
try {
|
|
14465
16864
|
const customTools = this.loadCustomTools(customToolsToLoad, config);
|
|
16865
|
+
try {
|
|
16866
|
+
const span = trace.getSpan(context.active());
|
|
16867
|
+
if (span) {
|
|
16868
|
+
const requestedNames = customToolsToLoad.map(
|
|
16869
|
+
(item) => typeof item === "string" ? item : `${item.name || item.workflow}(wf:${item.workflow})`
|
|
16870
|
+
);
|
|
16871
|
+
span.addEvent("tool_setup.resolution", {
|
|
16872
|
+
"tool_setup.requested_count": customToolsToLoad.length,
|
|
16873
|
+
"tool_setup.requested_names": requestedNames.join(","),
|
|
16874
|
+
"tool_setup.resolved_count": customTools.size,
|
|
16875
|
+
"tool_setup.resolved_names": Array.from(customTools.keys()).join(","),
|
|
16876
|
+
"tool_setup.missing_count": customToolsToLoad.length - customTools.size
|
|
16877
|
+
});
|
|
16878
|
+
}
|
|
16879
|
+
} catch {
|
|
16880
|
+
}
|
|
16881
|
+
if (customToolsToLoad.length > 0 && customTools.size === 0) {
|
|
16882
|
+
logger.warn(
|
|
16883
|
+
`[AICheckProvider] All ${customToolsToLoad.length} custom tools failed to resolve! Requested: ${customToolsToLoad.map((item) => typeof item === "string" ? item : item.workflow).join(", ")}. AI will have no workflow tools available.`
|
|
16884
|
+
);
|
|
16885
|
+
}
|
|
14466
16886
|
if (scheduleToolEnabled) {
|
|
14467
16887
|
const scheduleTool = getScheduleToolDefinition();
|
|
14468
16888
|
customTools.set(scheduleTool.name, scheduleTool);
|
|
@@ -14500,11 +16920,35 @@ ${preview}`);
|
|
|
14500
16920
|
};
|
|
14501
16921
|
}
|
|
14502
16922
|
} catch (error) {
|
|
16923
|
+
const errMsg = error instanceof Error ? error.message : "Unknown error";
|
|
14503
16924
|
logger.error(
|
|
14504
|
-
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${
|
|
16925
|
+
`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${errMsg}`
|
|
14505
16926
|
);
|
|
16927
|
+
try {
|
|
16928
|
+
const span = trace.getSpan(context.active());
|
|
16929
|
+
if (span) {
|
|
16930
|
+
span.addEvent("tool_setup.sse_server_error", {
|
|
16931
|
+
"tool_setup.error": errMsg,
|
|
16932
|
+
"tool_setup.server_name": customToolsServerName || ""
|
|
16933
|
+
});
|
|
16934
|
+
}
|
|
16935
|
+
} catch {
|
|
16936
|
+
}
|
|
14506
16937
|
}
|
|
14507
16938
|
}
|
|
16939
|
+
try {
|
|
16940
|
+
const span = trace.getSpan(context.active());
|
|
16941
|
+
if (span) {
|
|
16942
|
+
const finalServerNames = Object.keys(mcpServers);
|
|
16943
|
+
span.addEvent("tool_setup.final", {
|
|
16944
|
+
"tool_setup.final_server_count": finalServerNames.length,
|
|
16945
|
+
"tool_setup.final_server_names": finalServerNames.join(","),
|
|
16946
|
+
"tool_setup.has_custom_tools_server": !!customToolsServer,
|
|
16947
|
+
"tool_setup.tools_disabled": !!config.ai?.disableTools
|
|
16948
|
+
});
|
|
16949
|
+
}
|
|
16950
|
+
} catch {
|
|
16951
|
+
}
|
|
14508
16952
|
if (Object.keys(mcpServers).length > 0 && !config.ai?.disableTools) {
|
|
14509
16953
|
aiConfig.mcpServers = mcpServers;
|
|
14510
16954
|
} else if (config.ai?.disableTools) {
|
|
@@ -15703,8 +18147,8 @@ var init_template_context = __esm({
|
|
|
15703
18147
|
});
|
|
15704
18148
|
|
|
15705
18149
|
// src/providers/http-client-provider.ts
|
|
15706
|
-
import * as
|
|
15707
|
-
import * as
|
|
18150
|
+
import * as fs8 from "fs";
|
|
18151
|
+
import * as path10 from "path";
|
|
15708
18152
|
var HttpClientProvider;
|
|
15709
18153
|
var init_http_client_provider = __esm({
|
|
15710
18154
|
"src/providers/http-client-provider.ts"() {
|
|
@@ -15809,14 +18253,14 @@ var init_http_client_provider = __esm({
|
|
|
15809
18253
|
const parentContext = context2?._parentContext;
|
|
15810
18254
|
const workingDirectory = parentContext?.workingDirectory;
|
|
15811
18255
|
const workspaceEnabled = parentContext?.workspace?.isEnabled?.();
|
|
15812
|
-
if (workspaceEnabled && workingDirectory && !
|
|
15813
|
-
resolvedOutputFile =
|
|
18256
|
+
if (workspaceEnabled && workingDirectory && !path10.isAbsolute(resolvedOutputFile)) {
|
|
18257
|
+
resolvedOutputFile = path10.join(workingDirectory, resolvedOutputFile);
|
|
15814
18258
|
logger.debug(
|
|
15815
18259
|
`[http_client] Resolved relative output_file to workspace: ${resolvedOutputFile}`
|
|
15816
18260
|
);
|
|
15817
18261
|
}
|
|
15818
|
-
if (skipIfExists &&
|
|
15819
|
-
const stats =
|
|
18262
|
+
if (skipIfExists && fs8.existsSync(resolvedOutputFile)) {
|
|
18263
|
+
const stats = fs8.statSync(resolvedOutputFile);
|
|
15820
18264
|
logger.verbose(`[http_client] File cached: ${resolvedOutputFile} (${stats.size} bytes)`);
|
|
15821
18265
|
return {
|
|
15822
18266
|
issues: [],
|
|
@@ -16027,13 +18471,13 @@ var init_http_client_provider = __esm({
|
|
|
16027
18471
|
]
|
|
16028
18472
|
};
|
|
16029
18473
|
}
|
|
16030
|
-
const parentDir =
|
|
16031
|
-
if (parentDir && !
|
|
16032
|
-
|
|
18474
|
+
const parentDir = path10.dirname(outputFile);
|
|
18475
|
+
if (parentDir && !fs8.existsSync(parentDir)) {
|
|
18476
|
+
fs8.mkdirSync(parentDir, { recursive: true });
|
|
16033
18477
|
}
|
|
16034
18478
|
const arrayBuffer = await response.arrayBuffer();
|
|
16035
18479
|
const buffer = Buffer.from(arrayBuffer);
|
|
16036
|
-
|
|
18480
|
+
fs8.writeFileSync(outputFile, buffer);
|
|
16037
18481
|
const contentType = response.headers.get("content-type") || "application/octet-stream";
|
|
16038
18482
|
logger.verbose(`[http_client] Downloaded: ${outputFile} (${buffer.length} bytes)`);
|
|
16039
18483
|
return {
|
|
@@ -16789,8 +19233,8 @@ var init_claude_code_types = __esm({
|
|
|
16789
19233
|
});
|
|
16790
19234
|
|
|
16791
19235
|
// src/providers/claude-code-check-provider.ts
|
|
16792
|
-
import
|
|
16793
|
-
import
|
|
19236
|
+
import fs9 from "fs/promises";
|
|
19237
|
+
import path11 from "path";
|
|
16794
19238
|
function isClaudeCodeConstructor(value) {
|
|
16795
19239
|
return typeof value === "function";
|
|
16796
19240
|
}
|
|
@@ -16949,7 +19393,7 @@ var init_claude_code_check_provider = __esm({
|
|
|
16949
19393
|
const hasFileExtension = /\.[a-zA-Z0-9]{1,10}$/i.test(str);
|
|
16950
19394
|
const hasPathSeparators = /[\/\\]/.test(str);
|
|
16951
19395
|
const isRelativePath = /^\.{1,2}\//.test(str);
|
|
16952
|
-
const isAbsolutePath =
|
|
19396
|
+
const isAbsolutePath = path11.isAbsolute(str);
|
|
16953
19397
|
const hasTypicalFileChars = /^[a-zA-Z0-9._\-\/\\:~]+$/.test(str);
|
|
16954
19398
|
if (!(hasFileExtension || isRelativePath || isAbsolutePath || hasPathSeparators)) {
|
|
16955
19399
|
return false;
|
|
@@ -16959,13 +19403,13 @@ var init_claude_code_check_provider = __esm({
|
|
|
16959
19403
|
}
|
|
16960
19404
|
try {
|
|
16961
19405
|
let resolvedPath;
|
|
16962
|
-
if (
|
|
16963
|
-
resolvedPath =
|
|
19406
|
+
if (path11.isAbsolute(str)) {
|
|
19407
|
+
resolvedPath = path11.normalize(str);
|
|
16964
19408
|
} else {
|
|
16965
|
-
resolvedPath =
|
|
19409
|
+
resolvedPath = path11.resolve(process.cwd(), str);
|
|
16966
19410
|
}
|
|
16967
19411
|
try {
|
|
16968
|
-
const stat2 = await
|
|
19412
|
+
const stat2 = await fs9.stat(resolvedPath);
|
|
16969
19413
|
return stat2.isFile();
|
|
16970
19414
|
} catch {
|
|
16971
19415
|
return hasFileExtension && (isRelativePath || isAbsolutePath || hasPathSeparators);
|
|
@@ -16982,14 +19426,14 @@ var init_claude_code_check_provider = __esm({
|
|
|
16982
19426
|
throw new Error("Prompt file must have .liquid extension");
|
|
16983
19427
|
}
|
|
16984
19428
|
let resolvedPath;
|
|
16985
|
-
if (
|
|
19429
|
+
if (path11.isAbsolute(promptPath)) {
|
|
16986
19430
|
resolvedPath = promptPath;
|
|
16987
19431
|
} else {
|
|
16988
|
-
resolvedPath =
|
|
19432
|
+
resolvedPath = path11.resolve(process.cwd(), promptPath);
|
|
16989
19433
|
}
|
|
16990
|
-
if (!
|
|
16991
|
-
const normalizedPath =
|
|
16992
|
-
const currentDir =
|
|
19434
|
+
if (!path11.isAbsolute(promptPath)) {
|
|
19435
|
+
const normalizedPath = path11.normalize(resolvedPath);
|
|
19436
|
+
const currentDir = path11.resolve(process.cwd());
|
|
16993
19437
|
if (!normalizedPath.startsWith(currentDir)) {
|
|
16994
19438
|
throw new Error("Invalid prompt file path: path traversal detected");
|
|
16995
19439
|
}
|
|
@@ -16998,7 +19442,7 @@ var init_claude_code_check_provider = __esm({
|
|
|
16998
19442
|
throw new Error("Invalid prompt file path: path traversal detected");
|
|
16999
19443
|
}
|
|
17000
19444
|
try {
|
|
17001
|
-
const promptContent = await
|
|
19445
|
+
const promptContent = await fs9.readFile(resolvedPath, "utf-8");
|
|
17002
19446
|
return promptContent;
|
|
17003
19447
|
} catch (error) {
|
|
17004
19448
|
throw new Error(
|
|
@@ -19568,14 +22012,14 @@ var require_util = __commonJS({
|
|
|
19568
22012
|
}
|
|
19569
22013
|
const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80;
|
|
19570
22014
|
let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`;
|
|
19571
|
-
let
|
|
22015
|
+
let path15 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`;
|
|
19572
22016
|
if (origin.endsWith("/")) {
|
|
19573
22017
|
origin = origin.substring(0, origin.length - 1);
|
|
19574
22018
|
}
|
|
19575
|
-
if (
|
|
19576
|
-
|
|
22019
|
+
if (path15 && !path15.startsWith("/")) {
|
|
22020
|
+
path15 = `/${path15}`;
|
|
19577
22021
|
}
|
|
19578
|
-
url = new URL(origin +
|
|
22022
|
+
url = new URL(origin + path15);
|
|
19579
22023
|
}
|
|
19580
22024
|
return url;
|
|
19581
22025
|
}
|
|
@@ -21189,20 +23633,20 @@ var require_parseParams = __commonJS({
|
|
|
21189
23633
|
var require_basename = __commonJS({
|
|
21190
23634
|
"node_modules/@fastify/busboy/lib/utils/basename.js"(exports, module) {
|
|
21191
23635
|
"use strict";
|
|
21192
|
-
module.exports = function basename2(
|
|
21193
|
-
if (typeof
|
|
23636
|
+
module.exports = function basename2(path15) {
|
|
23637
|
+
if (typeof path15 !== "string") {
|
|
21194
23638
|
return "";
|
|
21195
23639
|
}
|
|
21196
|
-
for (var i =
|
|
21197
|
-
switch (
|
|
23640
|
+
for (var i = path15.length - 1; i >= 0; --i) {
|
|
23641
|
+
switch (path15.charCodeAt(i)) {
|
|
21198
23642
|
case 47:
|
|
21199
23643
|
// '/'
|
|
21200
23644
|
case 92:
|
|
21201
|
-
|
|
21202
|
-
return
|
|
23645
|
+
path15 = path15.slice(i + 1);
|
|
23646
|
+
return path15 === ".." || path15 === "." ? "" : path15;
|
|
21203
23647
|
}
|
|
21204
23648
|
}
|
|
21205
|
-
return
|
|
23649
|
+
return path15 === ".." || path15 === "." ? "" : path15;
|
|
21206
23650
|
};
|
|
21207
23651
|
}
|
|
21208
23652
|
});
|
|
@@ -24233,7 +26677,7 @@ var require_request = __commonJS({
|
|
|
24233
26677
|
}
|
|
24234
26678
|
var Request = class _Request {
|
|
24235
26679
|
constructor(origin, {
|
|
24236
|
-
path:
|
|
26680
|
+
path: path15,
|
|
24237
26681
|
method,
|
|
24238
26682
|
body,
|
|
24239
26683
|
headers,
|
|
@@ -24247,11 +26691,11 @@ var require_request = __commonJS({
|
|
|
24247
26691
|
throwOnError,
|
|
24248
26692
|
expectContinue
|
|
24249
26693
|
}, handler) {
|
|
24250
|
-
if (typeof
|
|
26694
|
+
if (typeof path15 !== "string") {
|
|
24251
26695
|
throw new InvalidArgumentError("path must be a string");
|
|
24252
|
-
} else if (
|
|
26696
|
+
} else if (path15[0] !== "/" && !(path15.startsWith("http://") || path15.startsWith("https://")) && method !== "CONNECT") {
|
|
24253
26697
|
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
|
|
24254
|
-
} else if (invalidPathRegex.exec(
|
|
26698
|
+
} else if (invalidPathRegex.exec(path15) !== null) {
|
|
24255
26699
|
throw new InvalidArgumentError("invalid request path");
|
|
24256
26700
|
}
|
|
24257
26701
|
if (typeof method !== "string") {
|
|
@@ -24314,7 +26758,7 @@ var require_request = __commonJS({
|
|
|
24314
26758
|
this.completed = false;
|
|
24315
26759
|
this.aborted = false;
|
|
24316
26760
|
this.upgrade = upgrade || null;
|
|
24317
|
-
this.path = query ? util.buildURL(
|
|
26761
|
+
this.path = query ? util.buildURL(path15, query) : path15;
|
|
24318
26762
|
this.origin = origin;
|
|
24319
26763
|
this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent;
|
|
24320
26764
|
this.blocking = blocking == null ? false : blocking;
|
|
@@ -25322,9 +27766,9 @@ var require_RedirectHandler = __commonJS({
|
|
|
25322
27766
|
return this.handler.onHeaders(statusCode, headers, resume, statusText);
|
|
25323
27767
|
}
|
|
25324
27768
|
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)));
|
|
25325
|
-
const
|
|
27769
|
+
const path15 = search ? `${pathname}${search}` : pathname;
|
|
25326
27770
|
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin);
|
|
25327
|
-
this.opts.path =
|
|
27771
|
+
this.opts.path = path15;
|
|
25328
27772
|
this.opts.origin = origin;
|
|
25329
27773
|
this.opts.maxRedirections = 0;
|
|
25330
27774
|
this.opts.query = null;
|
|
@@ -26566,7 +29010,7 @@ var require_client = __commonJS({
|
|
|
26566
29010
|
writeH2(client, client[kHTTP2Session], request);
|
|
26567
29011
|
return;
|
|
26568
29012
|
}
|
|
26569
|
-
const { body, method, path:
|
|
29013
|
+
const { body, method, path: path15, host, upgrade, headers, blocking, reset } = request;
|
|
26570
29014
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
26571
29015
|
if (body && typeof body.read === "function") {
|
|
26572
29016
|
body.read(0);
|
|
@@ -26616,7 +29060,7 @@ var require_client = __commonJS({
|
|
|
26616
29060
|
if (blocking) {
|
|
26617
29061
|
socket[kBlocking] = true;
|
|
26618
29062
|
}
|
|
26619
|
-
let header = `${method} ${
|
|
29063
|
+
let header = `${method} ${path15} HTTP/1.1\r
|
|
26620
29064
|
`;
|
|
26621
29065
|
if (typeof host === "string") {
|
|
26622
29066
|
header += `host: ${host}\r
|
|
@@ -26679,7 +29123,7 @@ upgrade: ${upgrade}\r
|
|
|
26679
29123
|
return true;
|
|
26680
29124
|
}
|
|
26681
29125
|
function writeH2(client, session, request) {
|
|
26682
|
-
const { body, method, path:
|
|
29126
|
+
const { body, method, path: path15, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
|
|
26683
29127
|
let headers;
|
|
26684
29128
|
if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
|
|
26685
29129
|
else headers = reqHeaders;
|
|
@@ -26722,7 +29166,7 @@ upgrade: ${upgrade}\r
|
|
|
26722
29166
|
});
|
|
26723
29167
|
return true;
|
|
26724
29168
|
}
|
|
26725
|
-
headers[HTTP2_HEADER_PATH] =
|
|
29169
|
+
headers[HTTP2_HEADER_PATH] = path15;
|
|
26726
29170
|
headers[HTTP2_HEADER_SCHEME] = "https";
|
|
26727
29171
|
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
|
|
26728
29172
|
if (body && typeof body.read === "function") {
|
|
@@ -28965,20 +31409,20 @@ var require_mock_utils = __commonJS({
|
|
|
28965
31409
|
}
|
|
28966
31410
|
return true;
|
|
28967
31411
|
}
|
|
28968
|
-
function safeUrl(
|
|
28969
|
-
if (typeof
|
|
28970
|
-
return
|
|
31412
|
+
function safeUrl(path15) {
|
|
31413
|
+
if (typeof path15 !== "string") {
|
|
31414
|
+
return path15;
|
|
28971
31415
|
}
|
|
28972
|
-
const pathSegments =
|
|
31416
|
+
const pathSegments = path15.split("?");
|
|
28973
31417
|
if (pathSegments.length !== 2) {
|
|
28974
|
-
return
|
|
31418
|
+
return path15;
|
|
28975
31419
|
}
|
|
28976
31420
|
const qp = new URLSearchParams(pathSegments.pop());
|
|
28977
31421
|
qp.sort();
|
|
28978
31422
|
return [...pathSegments, qp.toString()].join("?");
|
|
28979
31423
|
}
|
|
28980
|
-
function matchKey(mockDispatch2, { path:
|
|
28981
|
-
const pathMatch = matchValue(mockDispatch2.path,
|
|
31424
|
+
function matchKey(mockDispatch2, { path: path15, method, body, headers }) {
|
|
31425
|
+
const pathMatch = matchValue(mockDispatch2.path, path15);
|
|
28982
31426
|
const methodMatch = matchValue(mockDispatch2.method, method);
|
|
28983
31427
|
const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true;
|
|
28984
31428
|
const headersMatch = matchHeaders(mockDispatch2, headers);
|
|
@@ -28996,7 +31440,7 @@ var require_mock_utils = __commonJS({
|
|
|
28996
31440
|
function getMockDispatch(mockDispatches, key) {
|
|
28997
31441
|
const basePath = key.query ? buildURL(key.path, key.query) : key.path;
|
|
28998
31442
|
const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath;
|
|
28999
|
-
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path:
|
|
31443
|
+
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path15 }) => matchValue(safeUrl(path15), resolvedPath));
|
|
29000
31444
|
if (matchedMockDispatches.length === 0) {
|
|
29001
31445
|
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`);
|
|
29002
31446
|
}
|
|
@@ -29033,9 +31477,9 @@ var require_mock_utils = __commonJS({
|
|
|
29033
31477
|
}
|
|
29034
31478
|
}
|
|
29035
31479
|
function buildKey(opts) {
|
|
29036
|
-
const { path:
|
|
31480
|
+
const { path: path15, method, body, headers, query } = opts;
|
|
29037
31481
|
return {
|
|
29038
|
-
path:
|
|
31482
|
+
path: path15,
|
|
29039
31483
|
method,
|
|
29040
31484
|
body,
|
|
29041
31485
|
headers,
|
|
@@ -29484,10 +31928,10 @@ var require_pending_interceptors_formatter = __commonJS({
|
|
|
29484
31928
|
}
|
|
29485
31929
|
format(pendingInterceptors) {
|
|
29486
31930
|
const withPrettyHeaders = pendingInterceptors.map(
|
|
29487
|
-
({ method, path:
|
|
31931
|
+
({ method, path: path15, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
|
|
29488
31932
|
Method: method,
|
|
29489
31933
|
Origin: origin,
|
|
29490
|
-
Path:
|
|
31934
|
+
Path: path15,
|
|
29491
31935
|
"Status code": statusCode,
|
|
29492
31936
|
Persistent: persist ? "\u2705" : "\u274C",
|
|
29493
31937
|
Invocations: timesInvoked,
|
|
@@ -34108,8 +36552,8 @@ var require_util6 = __commonJS({
|
|
|
34108
36552
|
}
|
|
34109
36553
|
}
|
|
34110
36554
|
}
|
|
34111
|
-
function validateCookiePath(
|
|
34112
|
-
for (const char of
|
|
36555
|
+
function validateCookiePath(path15) {
|
|
36556
|
+
for (const char of path15) {
|
|
34113
36557
|
const code = char.charCodeAt(0);
|
|
34114
36558
|
if (code < 33 || char === ";") {
|
|
34115
36559
|
throw new Error("Invalid cookie path");
|
|
@@ -35789,11 +38233,11 @@ var require_undici = __commonJS({
|
|
|
35789
38233
|
if (typeof opts.path !== "string") {
|
|
35790
38234
|
throw new InvalidArgumentError("invalid opts.path");
|
|
35791
38235
|
}
|
|
35792
|
-
let
|
|
38236
|
+
let path15 = opts.path;
|
|
35793
38237
|
if (!opts.path.startsWith("/")) {
|
|
35794
|
-
|
|
38238
|
+
path15 = `/${path15}`;
|
|
35795
38239
|
}
|
|
35796
|
-
url = new URL(util.parseOrigin(url).origin +
|
|
38240
|
+
url = new URL(util.parseOrigin(url).origin + path15);
|
|
35797
38241
|
} else {
|
|
35798
38242
|
if (!opts) {
|
|
35799
38243
|
opts = typeof url === "object" ? url : {};
|
|
@@ -36950,8 +39394,8 @@ var init_stdin_reader = __esm({
|
|
|
36950
39394
|
});
|
|
36951
39395
|
|
|
36952
39396
|
// src/providers/human-input-check-provider.ts
|
|
36953
|
-
import * as
|
|
36954
|
-
import * as
|
|
39397
|
+
import * as fs10 from "fs";
|
|
39398
|
+
import * as path12 from "path";
|
|
36955
39399
|
var HumanInputCheckProvider;
|
|
36956
39400
|
var init_human_input_check_provider = __esm({
|
|
36957
39401
|
"src/providers/human-input-check-provider.ts"() {
|
|
@@ -37134,19 +39578,19 @@ var init_human_input_check_provider = __esm({
|
|
|
37134
39578
|
*/
|
|
37135
39579
|
async tryReadFile(filePath) {
|
|
37136
39580
|
try {
|
|
37137
|
-
const absolutePath =
|
|
37138
|
-
const normalizedPath =
|
|
39581
|
+
const absolutePath = path12.isAbsolute(filePath) ? filePath : path12.resolve(process.cwd(), filePath);
|
|
39582
|
+
const normalizedPath = path12.normalize(absolutePath);
|
|
37139
39583
|
const cwd = process.cwd();
|
|
37140
|
-
if (!normalizedPath.startsWith(cwd +
|
|
39584
|
+
if (!normalizedPath.startsWith(cwd + path12.sep) && normalizedPath !== cwd) {
|
|
37141
39585
|
return null;
|
|
37142
39586
|
}
|
|
37143
39587
|
try {
|
|
37144
|
-
await
|
|
37145
|
-
const stats = await
|
|
39588
|
+
await fs10.promises.access(normalizedPath, fs10.constants.R_OK);
|
|
39589
|
+
const stats = await fs10.promises.stat(normalizedPath);
|
|
37146
39590
|
if (!stats.isFile()) {
|
|
37147
39591
|
return null;
|
|
37148
39592
|
}
|
|
37149
|
-
const content = await
|
|
39593
|
+
const content = await fs10.promises.readFile(normalizedPath, "utf-8");
|
|
37150
39594
|
return content.trim();
|
|
37151
39595
|
} catch {
|
|
37152
39596
|
return null;
|
|
@@ -37707,8 +40151,8 @@ function buildBuiltinGlobals(opts) {
|
|
|
37707
40151
|
const asyncFunctionNames = /* @__PURE__ */ new Set();
|
|
37708
40152
|
const scheduleFn = async (args = {}) => {
|
|
37709
40153
|
try {
|
|
37710
|
-
const { handleScheduleAction: handleScheduleAction2, buildScheduleToolContext: buildScheduleToolContext2 } = await import("./schedule-tool-
|
|
37711
|
-
const { extractSlackContext: extractSlackContext2 } = await import("./schedule-tool-handler-
|
|
40154
|
+
const { handleScheduleAction: handleScheduleAction2, buildScheduleToolContext: buildScheduleToolContext2 } = await import("./schedule-tool-CONR4VW3.mjs");
|
|
40155
|
+
const { extractSlackContext: extractSlackContext2 } = await import("./schedule-tool-handler-YUC6CAXX.mjs");
|
|
37712
40156
|
const parentCtx = opts.sessionInfo?._parentContext;
|
|
37713
40157
|
const webhookData = parentCtx?.prInfo?.eventContext?.webhookData;
|
|
37714
40158
|
const visorCfg = parentCtx?.config;
|
|
@@ -38292,9 +40736,9 @@ var init_script_check_provider = __esm({
|
|
|
38292
40736
|
});
|
|
38293
40737
|
|
|
38294
40738
|
// src/utils/worktree-manager.ts
|
|
38295
|
-
import * as
|
|
40739
|
+
import * as fs11 from "fs";
|
|
38296
40740
|
import * as fsp2 from "fs/promises";
|
|
38297
|
-
import * as
|
|
40741
|
+
import * as path13 from "path";
|
|
38298
40742
|
import * as crypto from "crypto";
|
|
38299
40743
|
var WorktreeManager, worktreeManager;
|
|
38300
40744
|
var init_worktree_manager = __esm({
|
|
@@ -38314,7 +40758,7 @@ var init_worktree_manager = __esm({
|
|
|
38314
40758
|
} catch {
|
|
38315
40759
|
cwd = "/tmp";
|
|
38316
40760
|
}
|
|
38317
|
-
const defaultBasePath = process.env.VISOR_WORKTREE_PATH ||
|
|
40761
|
+
const defaultBasePath = process.env.VISOR_WORKTREE_PATH || path13.join(cwd, ".visor", "worktrees");
|
|
38318
40762
|
this.config = {
|
|
38319
40763
|
enabled: true,
|
|
38320
40764
|
base_path: defaultBasePath,
|
|
@@ -38351,20 +40795,20 @@ var init_worktree_manager = __esm({
|
|
|
38351
40795
|
}
|
|
38352
40796
|
const reposDir = this.getReposDir();
|
|
38353
40797
|
const worktreesDir = this.getWorktreesDir();
|
|
38354
|
-
if (!
|
|
38355
|
-
|
|
40798
|
+
if (!fs11.existsSync(reposDir)) {
|
|
40799
|
+
fs11.mkdirSync(reposDir, { recursive: true });
|
|
38356
40800
|
logger.debug(`Created repos directory: ${reposDir}`);
|
|
38357
40801
|
}
|
|
38358
|
-
if (!
|
|
38359
|
-
|
|
40802
|
+
if (!fs11.existsSync(worktreesDir)) {
|
|
40803
|
+
fs11.mkdirSync(worktreesDir, { recursive: true });
|
|
38360
40804
|
logger.debug(`Created worktrees directory: ${worktreesDir}`);
|
|
38361
40805
|
}
|
|
38362
40806
|
}
|
|
38363
40807
|
getReposDir() {
|
|
38364
|
-
return
|
|
40808
|
+
return path13.join(this.config.base_path, "repos");
|
|
38365
40809
|
}
|
|
38366
40810
|
getWorktreesDir() {
|
|
38367
|
-
return
|
|
40811
|
+
return path13.join(this.config.base_path, "worktrees");
|
|
38368
40812
|
}
|
|
38369
40813
|
/**
|
|
38370
40814
|
* Generate a deterministic worktree ID based on repository and ref.
|
|
@@ -38382,8 +40826,8 @@ var init_worktree_manager = __esm({
|
|
|
38382
40826
|
async getOrCreateBareRepo(repository, repoUrl, _token, fetchDepth, cloneTimeoutMs) {
|
|
38383
40827
|
const reposDir = this.getReposDir();
|
|
38384
40828
|
const repoName = repository.replace(/\//g, "-");
|
|
38385
|
-
const bareRepoPath =
|
|
38386
|
-
if (
|
|
40829
|
+
const bareRepoPath = path13.join(reposDir, `${repoName}.git`);
|
|
40830
|
+
if (fs11.existsSync(bareRepoPath)) {
|
|
38387
40831
|
logger.debug(`Bare repository already exists: ${bareRepoPath}`);
|
|
38388
40832
|
const verifyResult = await this.verifyBareRepoRemote(bareRepoPath, repoUrl);
|
|
38389
40833
|
if (verifyResult === "timeout") {
|
|
@@ -38530,11 +40974,11 @@ var init_worktree_manager = __esm({
|
|
|
38530
40974
|
options.cloneTimeoutMs
|
|
38531
40975
|
);
|
|
38532
40976
|
const worktreeId = this.generateWorktreeId(repository, ref);
|
|
38533
|
-
let worktreePath = options.workingDirectory ||
|
|
40977
|
+
let worktreePath = options.workingDirectory || path13.join(this.getWorktreesDir(), worktreeId);
|
|
38534
40978
|
if (options.workingDirectory) {
|
|
38535
40979
|
worktreePath = this.validatePath(options.workingDirectory);
|
|
38536
40980
|
}
|
|
38537
|
-
if (
|
|
40981
|
+
if (fs11.existsSync(worktreePath)) {
|
|
38538
40982
|
logger.debug(`Worktree already exists: ${worktreePath}`);
|
|
38539
40983
|
const metadata2 = await this.loadMetadata(worktreePath);
|
|
38540
40984
|
if (metadata2) {
|
|
@@ -38775,31 +41219,48 @@ var init_worktree_manager = __esm({
|
|
|
38775
41219
|
const result = await this.executeGitCommand(removeCmd, { timeout: 3e4 });
|
|
38776
41220
|
if (result.exitCode !== 0) {
|
|
38777
41221
|
logger.warn(`Failed to remove worktree via git: ${result.stderr}`);
|
|
38778
|
-
if (
|
|
41222
|
+
if (fs11.existsSync(worktree_path)) {
|
|
38779
41223
|
logger.debug(`Manually removing worktree directory`);
|
|
38780
|
-
|
|
41224
|
+
fs11.rmSync(worktree_path, { recursive: true, force: true });
|
|
38781
41225
|
}
|
|
38782
41226
|
}
|
|
41227
|
+
const metadataPath = this.getMetadataPath(worktree_path);
|
|
41228
|
+
try {
|
|
41229
|
+
if (fs11.existsSync(metadataPath)) {
|
|
41230
|
+
fs11.unlinkSync(metadataPath);
|
|
41231
|
+
}
|
|
41232
|
+
} catch {
|
|
41233
|
+
}
|
|
38783
41234
|
this.activeWorktrees.delete(worktreeId);
|
|
38784
41235
|
logger.info(`Successfully removed worktree: ${worktreeId}`);
|
|
38785
41236
|
}
|
|
41237
|
+
/**
|
|
41238
|
+
* Get the metadata file path for a worktree.
|
|
41239
|
+
* Stored as a sibling file OUTSIDE the worktree to avoid being committed
|
|
41240
|
+
* when agents run `git add .` inside the checked-out repo.
|
|
41241
|
+
*/
|
|
41242
|
+
getMetadataPath(worktreePath) {
|
|
41243
|
+
return worktreePath.replace(/\/?$/, "") + ".metadata.json";
|
|
41244
|
+
}
|
|
38786
41245
|
/**
|
|
38787
41246
|
* Save worktree metadata
|
|
38788
41247
|
*/
|
|
38789
41248
|
async saveMetadata(worktreePath, metadata) {
|
|
38790
|
-
const metadataPath =
|
|
38791
|
-
|
|
41249
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
41250
|
+
fs11.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2), "utf8");
|
|
38792
41251
|
}
|
|
38793
41252
|
/**
|
|
38794
41253
|
* Load worktree metadata
|
|
38795
41254
|
*/
|
|
38796
41255
|
async loadMetadata(worktreePath) {
|
|
38797
|
-
const metadataPath =
|
|
38798
|
-
|
|
41256
|
+
const metadataPath = this.getMetadataPath(worktreePath);
|
|
41257
|
+
const legacyPath = path13.join(worktreePath, ".visor-metadata.json");
|
|
41258
|
+
const pathToRead = fs11.existsSync(metadataPath) ? metadataPath : fs11.existsSync(legacyPath) ? legacyPath : null;
|
|
41259
|
+
if (!pathToRead) {
|
|
38799
41260
|
return null;
|
|
38800
41261
|
}
|
|
38801
41262
|
try {
|
|
38802
|
-
const content =
|
|
41263
|
+
const content = fs11.readFileSync(pathToRead, "utf8");
|
|
38803
41264
|
return JSON.parse(content);
|
|
38804
41265
|
} catch (error) {
|
|
38805
41266
|
logger.warn(`Failed to load metadata: ${error}`);
|
|
@@ -38811,14 +41272,14 @@ var init_worktree_manager = __esm({
|
|
|
38811
41272
|
*/
|
|
38812
41273
|
async listWorktrees() {
|
|
38813
41274
|
const worktreesDir = this.getWorktreesDir();
|
|
38814
|
-
if (!
|
|
41275
|
+
if (!fs11.existsSync(worktreesDir)) {
|
|
38815
41276
|
return [];
|
|
38816
41277
|
}
|
|
38817
|
-
const entries =
|
|
41278
|
+
const entries = fs11.readdirSync(worktreesDir, { withFileTypes: true });
|
|
38818
41279
|
const worktrees = [];
|
|
38819
41280
|
for (const entry of entries) {
|
|
38820
41281
|
if (!entry.isDirectory()) continue;
|
|
38821
|
-
const worktreePath =
|
|
41282
|
+
const worktreePath = path13.join(worktreesDir, entry.name);
|
|
38822
41283
|
const metadata = await this.loadMetadata(worktreePath);
|
|
38823
41284
|
if (metadata) {
|
|
38824
41285
|
worktrees.push({
|
|
@@ -38950,8 +41411,8 @@ var init_worktree_manager = __esm({
|
|
|
38950
41411
|
* Validate path to prevent directory traversal
|
|
38951
41412
|
*/
|
|
38952
41413
|
validatePath(userPath) {
|
|
38953
|
-
const resolvedPath =
|
|
38954
|
-
if (!
|
|
41414
|
+
const resolvedPath = path13.resolve(userPath);
|
|
41415
|
+
if (!path13.isAbsolute(resolvedPath)) {
|
|
38955
41416
|
throw new Error("Path must be absolute");
|
|
38956
41417
|
}
|
|
38957
41418
|
const sensitivePatterns = [
|
|
@@ -39930,7 +42391,7 @@ var init_workflow_projection = __esm({
|
|
|
39930
42391
|
});
|
|
39931
42392
|
|
|
39932
42393
|
// src/providers/workflow-check-provider.ts
|
|
39933
|
-
import * as
|
|
42394
|
+
import * as path14 from "path";
|
|
39934
42395
|
import * as yaml from "js-yaml";
|
|
39935
42396
|
var WorkflowCheckProvider;
|
|
39936
42397
|
var init_workflow_check_provider = __esm({
|
|
@@ -40149,13 +42610,13 @@ var init_workflow_check_provider = __esm({
|
|
|
40149
42610
|
const loadConfigLiquid = createExtendedLiquid();
|
|
40150
42611
|
const loadConfig = (filePath) => {
|
|
40151
42612
|
try {
|
|
40152
|
-
const normalizedBasePath =
|
|
40153
|
-
const resolvedPath =
|
|
40154
|
-
const basePathWithSep = normalizedBasePath.endsWith(
|
|
42613
|
+
const normalizedBasePath = path14.normalize(basePath);
|
|
42614
|
+
const resolvedPath = path14.isAbsolute(filePath) ? path14.normalize(filePath) : path14.normalize(path14.resolve(basePath, filePath));
|
|
42615
|
+
const basePathWithSep = normalizedBasePath.endsWith(path14.sep) ? normalizedBasePath : normalizedBasePath + path14.sep;
|
|
40155
42616
|
if (!resolvedPath.startsWith(basePathWithSep) && resolvedPath !== normalizedBasePath) {
|
|
40156
42617
|
throw new Error(`Path '${filePath}' escapes base directory`);
|
|
40157
42618
|
}
|
|
40158
|
-
const configDir =
|
|
42619
|
+
const configDir = path14.dirname(resolvedPath);
|
|
40159
42620
|
const rawContent = __require("fs").readFileSync(resolvedPath, "utf-8");
|
|
40160
42621
|
const renderedContent = loadConfigLiquid.parseAndRenderSync(rawContent, {
|
|
40161
42622
|
basePath: configDir
|
|
@@ -40606,17 +43067,17 @@ var init_workflow_check_provider = __esm({
|
|
|
40606
43067
|
* so it can be executed by the state machine as a nested workflow.
|
|
40607
43068
|
*/
|
|
40608
43069
|
async loadWorkflowFromConfigPath(sourcePath, baseDir) {
|
|
40609
|
-
const
|
|
40610
|
-
const
|
|
43070
|
+
const path15 = __require("path");
|
|
43071
|
+
const fs12 = __require("fs");
|
|
40611
43072
|
const yaml2 = __require("js-yaml");
|
|
40612
|
-
const resolved =
|
|
40613
|
-
if (!
|
|
43073
|
+
const resolved = path15.isAbsolute(sourcePath) ? sourcePath : path15.resolve(baseDir, sourcePath);
|
|
43074
|
+
if (!fs12.existsSync(resolved)) {
|
|
40614
43075
|
throw new Error(`Workflow config not found at: ${resolved}`);
|
|
40615
43076
|
}
|
|
40616
|
-
const rawContent =
|
|
43077
|
+
const rawContent = fs12.readFileSync(resolved, "utf8");
|
|
40617
43078
|
const rawData = yaml2.load(rawContent);
|
|
40618
43079
|
if (rawData.imports && Array.isArray(rawData.imports)) {
|
|
40619
|
-
const configDir =
|
|
43080
|
+
const configDir = path15.dirname(resolved);
|
|
40620
43081
|
for (const source of rawData.imports) {
|
|
40621
43082
|
const results = await this.registry.import(source, {
|
|
40622
43083
|
basePath: configDir,
|
|
@@ -40646,8 +43107,8 @@ ${errors}`);
|
|
|
40646
43107
|
if (!steps || Object.keys(steps).length === 0) {
|
|
40647
43108
|
throw new Error(`Config '${resolved}' does not contain any steps to execute as a workflow`);
|
|
40648
43109
|
}
|
|
40649
|
-
const id =
|
|
40650
|
-
const name = loaded.name || `Workflow from ${
|
|
43110
|
+
const id = path15.basename(resolved).replace(/\.(ya?ml)$/i, "");
|
|
43111
|
+
const name = loaded.name || `Workflow from ${path15.basename(resolved)}`;
|
|
40651
43112
|
const workflowDef = {
|
|
40652
43113
|
id,
|
|
40653
43114
|
name,
|
|
@@ -40670,6 +43131,11 @@ ${errors}`);
|
|
|
40670
43131
|
export {
|
|
40671
43132
|
WorkflowCheckProvider,
|
|
40672
43133
|
init_workflow_check_provider,
|
|
43134
|
+
handleScheduleAction,
|
|
43135
|
+
getScheduleToolDefinition,
|
|
43136
|
+
isScheduleTool,
|
|
43137
|
+
buildScheduleToolContext,
|
|
43138
|
+
init_schedule_tool,
|
|
40673
43139
|
extractSlackContext,
|
|
40674
43140
|
createScheduleToolWithContext,
|
|
40675
43141
|
executeScheduleTool,
|
|
@@ -40690,4 +43156,4 @@ undici/lib/fetch/body.js:
|
|
|
40690
43156
|
undici/lib/websocket/frame.js:
|
|
40691
43157
|
(*! ws. MIT License. Einar Otto Stangvik <einaros@gmail.com> *)
|
|
40692
43158
|
*/
|
|
40693
|
-
//# sourceMappingURL=chunk-
|
|
43159
|
+
//# sourceMappingURL=chunk-H4AYMOAT.mjs.map
|