@runfusion/fusion 0.9.4 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.js +1382 -557
- package/dist/client/assets/{AgentDetailView-5W1q48YS.js → AgentDetailView-BtpZ4jxh.js} +1 -1
- package/dist/client/assets/{AgentsView-DcEnemu0.js → AgentsView-Dxdtt0Bm.js} +3 -3
- package/dist/client/assets/ChatView-Bra9fNAG.js +1 -0
- package/dist/client/assets/{DevServerView-LOrDrAYm.js → DevServerView-UkgjEw9-.js} +1 -1
- package/dist/client/assets/{DirectoryPicker-Bgp6PCbu.js → DirectoryPicker-Cls4HWxP.js} +1 -1
- package/dist/client/assets/{DocumentsView-CNbnZ7Q3.js → DocumentsView-BRBUPFVA.js} +1 -1
- package/dist/client/assets/{InsightsView-CmJwV-ZC.js → InsightsView-BRDqHCLb.js} +1 -1
- package/dist/client/assets/{MemoryView-Bwi5p79s.js → MemoryView-DvTrwFnQ.js} +1 -1
- package/dist/client/assets/{NodesView-1pZii99I.js → NodesView-C4Ffl_o0.js} +1 -1
- package/dist/client/assets/{PiExtensionsManager-CokhM-MB.js → PiExtensionsManager-CeI1syeZ.js} +3 -3
- package/dist/client/assets/{PluginManager-cHaGKMgY.js → PluginManager-BgeoYhLk.js} +1 -1
- package/dist/client/assets/{ResearchView-CQDI2y7Q.js → ResearchView-fmEOm4A2.js} +1 -1
- package/dist/client/assets/{RoadmapsView-BTo3BT0I.js → RoadmapsView-DNb4x75S.js} +1 -1
- package/dist/client/assets/SettingsModal-CDPDmHhd.css +1 -0
- package/dist/client/assets/{SettingsModal-D5slUUsC.js → SettingsModal-CRMr4tL6.js} +1 -1
- package/dist/client/assets/SettingsModal-D1xq0WZm.js +31 -0
- package/dist/client/assets/{SetupWizardModal-1qSn8Yl0.js → SetupWizardModal-tF8B_aG_.js} +1 -1
- package/dist/client/assets/{SkillsView-CY3I5OYc.js → SkillsView-uyl47gSf.js} +1 -1
- package/dist/client/assets/{TodoView-B1GDwwhR.js → TodoView-CbzDtV53.js} +1 -1
- package/dist/client/assets/{folder-open-DPESt6bg.js → folder-open-DPpmGJ-v.js} +1 -1
- package/dist/client/assets/{index-2_pvFDiN.css → index-BqK6TvSa.css} +1 -1
- package/dist/client/assets/index-DyXZm9QN.js +656 -0
- package/dist/client/assets/{list-checks-D7D9kx7Y.js → list-checks-D62pw1I8.js} +1 -1
- package/dist/client/assets/{star-C59_6aNu.js → star-B8EbxNgI.js} +1 -1
- package/dist/client/assets/{upload-1I0eQddJ.js → upload-CpnLno9z.js} +1 -1
- package/dist/client/assets/{users-DH50eBCX.js → users-B_C_0qzA.js} +1 -1
- package/dist/client/index.html +2 -2
- package/dist/client/version.json +1 -1
- package/dist/extension.js +1025 -358
- package/dist/pi-claude-cli/index.ts +31 -5
- package/dist/pi-claude-cli/package.json +1 -1
- package/dist/pi-claude-cli/src/__tests__/process-manager.test.ts +90 -0
- package/dist/pi-claude-cli/src/__tests__/provider.test.ts +13 -3
- package/dist/pi-claude-cli/src/process-manager.ts +65 -0
- package/package.json +1 -1
- package/dist/client/assets/ChatView-CTc6mP8y.js +0 -1
- package/dist/client/assets/SettingsModal-EEQwF0Ql.js +0 -31
- package/dist/client/assets/SettingsModal-FfIAhzcJ.css +0 -1
- package/dist/client/assets/index-DNIrnlpO.js +0 -656
package/dist/extension.js
CHANGED
|
@@ -1382,6 +1382,26 @@ var init_agent_prompts = __esm({
|
|
|
1382
1382
|
|
|
1383
1383
|
You are working in a git worktree isolated from the main branch. Your job is to implement the task described in the PROMPT.md specification you're given.
|
|
1384
1384
|
|
|
1385
|
+
## Turn-ending rules \u2014 read carefully
|
|
1386
|
+
|
|
1387
|
+
You MUST end every turn by either:
|
|
1388
|
+
- (a) calling another tool to make progress, OR
|
|
1389
|
+
- (b) calling \`fn_task_done\` if the entire task is complete, OR
|
|
1390
|
+
- (c) calling \`fn_task_done\` with a summary explaining what is blocked, if you cannot make progress for any reason
|
|
1391
|
+
|
|
1392
|
+
You MUST NOT end a turn by writing prose that asks the user a question, summarizes progress, or requests permission to continue. The following are FORBIDDEN turn-endings:
|
|
1393
|
+
- "If you want, I can continue with..."
|
|
1394
|
+
- "Should I proceed with...?"
|
|
1395
|
+
- "Let me know if you'd like me to..."
|
|
1396
|
+
- "Ready to move on to step N. Want me to continue?"
|
|
1397
|
+
- Any markdown progress summary at the end of a turn instead of a tool call
|
|
1398
|
+
|
|
1399
|
+
If you have just finished a step's work, immediately call \`fn_task_update\` to mark the step done and continue with the next pending step in the SAME turn. Do not pause to summarize.
|
|
1400
|
+
|
|
1401
|
+
The user is not watching this conversation in real-time. They will read the final result. Asking permission wastes a full retry cycle and may orphan committed work.
|
|
1402
|
+
|
|
1403
|
+
If you genuinely cannot proceed (blocked on a dependency, missing information, or an unresolvable error), call \`fn_task_done\` with a clear explanation of what is blocked and what is needed to unblock it. Never write the question as plain prose.
|
|
1404
|
+
|
|
1385
1405
|
## How to work
|
|
1386
1406
|
1. Read the PROMPT.md carefully \u2014 it contains your mission, steps, file scope, and acceptance criteria
|
|
1387
1407
|
2. Work through each step in order
|
|
@@ -1527,7 +1547,17 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
1527
1547
|
- Keep fixing failures until lint, the configured/full test suite, and typecheck all pass
|
|
1528
1548
|
- If the repository exposes a typecheck command, run it and keep fixing failures until it passes
|
|
1529
1549
|
- Do not stop at "out of scope" if additional fixes are required to restore green lint, tests, build, or typecheck
|
|
1530
|
-
- **CRITICAL: Resolve ALL lint failures and test failures before completing the task, even if they appear unrelated or pre-existing.** Unrelated failures left unfixed accumulate technical debt and block future integrations. Investigate and fix or suppress them \u2014 do not defer them to a separate task
|
|
1550
|
+
- **CRITICAL: Resolve ALL lint failures and test failures before completing the task, even if they appear unrelated or pre-existing.** Unrelated failures left unfixed accumulate technical debt and block future integrations. Investigate and fix or suppress them \u2014 do not defer them to a separate task.
|
|
1551
|
+
|
|
1552
|
+
## Verification commands \u2014 use fn_run_verification
|
|
1553
|
+
|
|
1554
|
+
For ALL test/lint/build/typecheck verification, use the \`fn_run_verification\` tool, NOT raw bash.
|
|
1555
|
+
The tool prevents your session from being killed by the inactivity watchdog during long compiles.
|
|
1556
|
+
|
|
1557
|
+
- Prefer **package-scoped** verification first: e.g. \`pnpm --filter @fusion/<pkg> test\` with \`scope: "package"\`. This is faster and isolated.
|
|
1558
|
+
- Only run **workspace-scoped** verification (\`pnpm test\`, \`pnpm lint\`, \`pnpm build\` from root) at the FINAL integration step, when you are about to call \`task_done()\`.
|
|
1559
|
+
- If you need to run \`pnpm install\` (e.g. you added a new package), use \`fn_run_verification\` with \`scope: "workspace"\` and \`timeoutSec: 600\`.
|
|
1560
|
+
- If a verification command times out, do NOT blindly retry \u2014 investigate. Check for hung subprocesses, infinite test loops, or tests waiting on missing dependencies. Use \`node_modules/.modules.yaml\` presence to confirm bootstrap.`;
|
|
1531
1561
|
TRIAGE_PROMPT_TEXT = `You are a task specification agent for "fn", an AI-orchestrated task board.
|
|
1532
1562
|
|
|
1533
1563
|
Your job: take a rough task description and produce a fully specified PROMPT.md that another AI agent can execute autonomously in a fresh context with zero memory of this conversation.
|
|
@@ -18108,10 +18138,10 @@ var init_central_core = __esm({
|
|
|
18108
18138
|
*/
|
|
18109
18139
|
async generateProjectName(projectPath) {
|
|
18110
18140
|
try {
|
|
18111
|
-
const { execFile:
|
|
18141
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
18112
18142
|
const { promisify: promisify13 } = await import("node:util");
|
|
18113
|
-
const
|
|
18114
|
-
const { stdout } = await
|
|
18143
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
18144
|
+
const { stdout } = await execFileAsync5(
|
|
18115
18145
|
"git",
|
|
18116
18146
|
["remote", "get-url", "origin"],
|
|
18117
18147
|
{ cwd: projectPath, timeout: 5e3 }
|
|
@@ -18621,7 +18651,7 @@ __export(migration_exports, {
|
|
|
18621
18651
|
MigrationCoordinator: () => MigrationCoordinator,
|
|
18622
18652
|
ProjectRequiredError: () => ProjectRequiredError
|
|
18623
18653
|
});
|
|
18624
|
-
import { existsSync as existsSync9 } from "node:fs";
|
|
18654
|
+
import { existsSync as existsSync9, readFileSync as readFileSync2 } from "node:fs";
|
|
18625
18655
|
import { homedir as homedir2, tmpdir } from "node:os";
|
|
18626
18656
|
import { isAbsolute as isAbsolute3, join as join12, resolve as resolve5, basename as basename3, dirname as dirname4 } from "node:path";
|
|
18627
18657
|
function getHomeDir2() {
|
|
@@ -18758,10 +18788,10 @@ var init_migration = __esm({
|
|
|
18758
18788
|
return basename3(projectPath);
|
|
18759
18789
|
}
|
|
18760
18790
|
try {
|
|
18761
|
-
const { execFile:
|
|
18791
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
18762
18792
|
const { promisify: promisify13 } = await import("node:util");
|
|
18763
|
-
const
|
|
18764
|
-
const { stdout } = await
|
|
18793
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
18794
|
+
const { stdout } = await execFileAsync5(
|
|
18765
18795
|
"git",
|
|
18766
18796
|
["remote", "get-url", "origin"],
|
|
18767
18797
|
{ cwd: projectPath, timeout: 1e3 }
|
|
@@ -18773,8 +18803,26 @@ var init_migration = __esm({
|
|
|
18773
18803
|
}
|
|
18774
18804
|
} catch {
|
|
18775
18805
|
}
|
|
18806
|
+
const remoteFromConfig = this.readOriginRemoteFromGitConfig(projectPath);
|
|
18807
|
+
if (remoteFromConfig) {
|
|
18808
|
+
const name = this.extractRepoName(remoteFromConfig);
|
|
18809
|
+
if (name) return name;
|
|
18810
|
+
}
|
|
18776
18811
|
return basename3(projectPath);
|
|
18777
18812
|
}
|
|
18813
|
+
readOriginRemoteFromGitConfig(projectPath) {
|
|
18814
|
+
try {
|
|
18815
|
+
const gitConfig = readFileSync2(join12(projectPath, ".git", "config"), "utf8");
|
|
18816
|
+
const originSectionMatch = gitConfig.match(/\[remote\s+"origin"\]([\s\S]*?)(?:\n\[|$)/);
|
|
18817
|
+
if (!originSectionMatch) {
|
|
18818
|
+
return null;
|
|
18819
|
+
}
|
|
18820
|
+
const urlMatch = originSectionMatch[1]?.match(/^\s*url\s*=\s*(.+)$/m);
|
|
18821
|
+
return urlMatch?.[1]?.trim() || null;
|
|
18822
|
+
} catch {
|
|
18823
|
+
return null;
|
|
18824
|
+
}
|
|
18825
|
+
}
|
|
18778
18826
|
/**
|
|
18779
18827
|
* Extract repository name from git remote URL.
|
|
18780
18828
|
*
|
|
@@ -28227,8 +28275,8 @@ var require_CronFileParser = __commonJS({
|
|
|
28227
28275
|
* @throws If file cannot be read
|
|
28228
28276
|
*/
|
|
28229
28277
|
static parseFileSync(filePath) {
|
|
28230
|
-
const { readFileSync:
|
|
28231
|
-
const data =
|
|
28278
|
+
const { readFileSync: readFileSync10 } = __require("fs");
|
|
28279
|
+
const data = readFileSync10(filePath, "utf8");
|
|
28232
28280
|
return _CronFileParser.#parseContent(data);
|
|
28233
28281
|
}
|
|
28234
28282
|
/**
|
|
@@ -29491,13 +29539,13 @@ async function searchWithQmd(rootDir, options) {
|
|
|
29491
29539
|
const command = "qmd";
|
|
29492
29540
|
const limit = Math.max(1, Math.min(options.limit ?? 5, 20));
|
|
29493
29541
|
try {
|
|
29494
|
-
const { execFile:
|
|
29542
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
29495
29543
|
const { promisify: promisify13 } = await import("node:util");
|
|
29496
|
-
const
|
|
29497
|
-
await ensureQmdProjectMemoryCollection(rootDir,
|
|
29544
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
29545
|
+
await ensureQmdProjectMemoryCollection(rootDir, execFileAsync5);
|
|
29498
29546
|
scheduleQmdProjectMemoryRefresh(rootDir);
|
|
29499
29547
|
const args = buildQmdSearchArgs(rootDir, options);
|
|
29500
|
-
const { stdout } = await
|
|
29548
|
+
const { stdout } = await execFileAsync5(command, args, {
|
|
29501
29549
|
cwd: rootDir,
|
|
29502
29550
|
timeout: 4e3,
|
|
29503
29551
|
maxBuffer: 1024 * 1024
|
|
@@ -29522,12 +29570,12 @@ async function searchWithQmd(rootDir, options) {
|
|
|
29522
29570
|
return [];
|
|
29523
29571
|
}
|
|
29524
29572
|
}
|
|
29525
|
-
async function ensureQmdProjectMemoryCollection(rootDir,
|
|
29573
|
+
async function ensureQmdProjectMemoryCollection(rootDir, execFileAsync5) {
|
|
29526
29574
|
const collectionName = qmdMemoryCollectionName(rootDir);
|
|
29527
29575
|
const memoryDir = memoryWorkspacePath(rootDir);
|
|
29528
29576
|
await mkdir6(memoryDir, { recursive: true });
|
|
29529
29577
|
try {
|
|
29530
|
-
await
|
|
29578
|
+
await execFileAsync5("qmd", buildQmdCollectionAddArgs(rootDir), {
|
|
29531
29579
|
cwd: rootDir,
|
|
29532
29580
|
timeout: 4e3,
|
|
29533
29581
|
maxBuffer: 512 * 1024
|
|
@@ -29543,9 +29591,9 @@ ${stderr}`)) {
|
|
|
29543
29591
|
return collectionName;
|
|
29544
29592
|
}
|
|
29545
29593
|
async function getDefaultExecFileAsync() {
|
|
29546
|
-
const { execFile:
|
|
29594
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
29547
29595
|
const { promisify: promisify13 } = await import("node:util");
|
|
29548
|
-
return promisify13(
|
|
29596
|
+
return promisify13(execFile7);
|
|
29549
29597
|
}
|
|
29550
29598
|
async function refreshQmdProjectMemoryIndex(rootDir, options) {
|
|
29551
29599
|
const key = resolve6(rootDir);
|
|
@@ -29560,14 +29608,14 @@ async function refreshQmdProjectMemoryIndex(rootDir, options) {
|
|
|
29560
29608
|
}
|
|
29561
29609
|
}
|
|
29562
29610
|
const promise = (async () => {
|
|
29563
|
-
const
|
|
29564
|
-
await ensureQmdProjectMemoryCollection(rootDir,
|
|
29565
|
-
await
|
|
29611
|
+
const execFileAsync5 = options?.execFileAsync ?? await getDefaultExecFileAsync();
|
|
29612
|
+
await ensureQmdProjectMemoryCollection(rootDir, execFileAsync5);
|
|
29613
|
+
await execFileAsync5("qmd", ["update"], {
|
|
29566
29614
|
cwd: rootDir,
|
|
29567
29615
|
timeout: 3e4,
|
|
29568
29616
|
maxBuffer: 1024 * 1024
|
|
29569
29617
|
});
|
|
29570
|
-
await
|
|
29618
|
+
await execFileAsync5("qmd", ["embed"], {
|
|
29571
29619
|
cwd: rootDir,
|
|
29572
29620
|
timeout: 12e4,
|
|
29573
29621
|
maxBuffer: 1024 * 1024
|
|
@@ -29592,8 +29640,8 @@ function scheduleQmdProjectMemoryRefresh(rootDir) {
|
|
|
29592
29640
|
}
|
|
29593
29641
|
async function isQmdAvailable() {
|
|
29594
29642
|
try {
|
|
29595
|
-
const
|
|
29596
|
-
await
|
|
29643
|
+
const execFileAsync5 = await getDefaultExecFileAsync();
|
|
29644
|
+
await execFileAsync5("qmd", ["--help"], {
|
|
29597
29645
|
timeout: 3e3,
|
|
29598
29646
|
maxBuffer: 128 * 1024
|
|
29599
29647
|
});
|
|
@@ -29603,12 +29651,12 @@ async function isQmdAvailable() {
|
|
|
29603
29651
|
}
|
|
29604
29652
|
}
|
|
29605
29653
|
async function installQmd(options) {
|
|
29606
|
-
const
|
|
29654
|
+
const execFileAsync5 = options?.execFileAsync ?? await getDefaultExecFileAsync();
|
|
29607
29655
|
const [command, ...args] = QMD_INSTALL_COMMAND.split(" ");
|
|
29608
29656
|
if (!command || args.length === 0) {
|
|
29609
29657
|
throw new MemoryBackendError("BACKEND_UNAVAILABLE", "qmd install command is not configured", "qmd");
|
|
29610
29658
|
}
|
|
29611
|
-
await
|
|
29659
|
+
await execFileAsync5(command, args, {
|
|
29612
29660
|
timeout: 12e4,
|
|
29613
29661
|
maxBuffer: 1024 * 1024
|
|
29614
29662
|
});
|
|
@@ -30403,8 +30451,8 @@ function assertSafeGitBranchName(name) {
|
|
|
30403
30451
|
}
|
|
30404
30452
|
}
|
|
30405
30453
|
function assertSafeAbsolutePath(path2) {
|
|
30406
|
-
const
|
|
30407
|
-
if (!path2 || path2.length > 4096 || !
|
|
30454
|
+
const isAbsolute14 = path2.startsWith("/") || /^[A-Za-z]:[\\/]/.test(path2);
|
|
30455
|
+
if (!path2 || path2.length > 4096 || !isAbsolute14 || path2.startsWith("-") || // Reject shell metacharacters, quotes, control chars, and NULs.
|
|
30408
30456
|
/["'`$\n\r\t;&|<>()*?[\]{}\\\0]/.test(
|
|
30409
30457
|
path2.replace(/^[A-Za-z]:/, "")
|
|
30410
30458
|
// ignore the drive-letter colon on Windows
|
|
@@ -32529,7 +32577,7 @@ ${newTask.description}
|
|
|
32529
32577
|
return dependency?.column === "done" || dependency?.column === "archived";
|
|
32530
32578
|
});
|
|
32531
32579
|
}
|
|
32532
|
-
async moveTask(id, toColumn) {
|
|
32580
|
+
async moveTask(id, toColumn, options) {
|
|
32533
32581
|
return this.withTaskLock(id, async () => {
|
|
32534
32582
|
const dir = this.taskDir(id);
|
|
32535
32583
|
let task;
|
|
@@ -32580,12 +32628,16 @@ ${newTask.description}
|
|
|
32580
32628
|
if (isReopenToTodoOrTriage) {
|
|
32581
32629
|
task.status = void 0;
|
|
32582
32630
|
task.error = void 0;
|
|
32583
|
-
task.worktree = void 0;
|
|
32584
32631
|
task.blockedBy = void 0;
|
|
32585
|
-
|
|
32586
|
-
|
|
32587
|
-
|
|
32588
|
-
|
|
32632
|
+
if (!options?.preserveResumeState) {
|
|
32633
|
+
task.worktree = void 0;
|
|
32634
|
+
task.executionStartedAt = void 0;
|
|
32635
|
+
task.executionCompletedAt = void 0;
|
|
32636
|
+
this.resetAllStepsToPending(task);
|
|
32637
|
+
await this.resetPromptCheckboxes(dir);
|
|
32638
|
+
} else {
|
|
32639
|
+
task.executionCompletedAt = void 0;
|
|
32640
|
+
}
|
|
32589
32641
|
}
|
|
32590
32642
|
if (toColumn === "in-review") {
|
|
32591
32643
|
task.recoveryRetryCount = void 0;
|
|
@@ -32984,6 +33036,19 @@ ${task.description}
|
|
|
32984
33036
|
`Step ${stepIndex} out of range (task has ${task.steps.length} steps)`
|
|
32985
33037
|
);
|
|
32986
33038
|
}
|
|
33039
|
+
const currentStatus = task.steps[stepIndex].status;
|
|
33040
|
+
if (status === "in-progress" && (currentStatus === "done" || currentStatus === "skipped")) {
|
|
33041
|
+
const ts = (/* @__PURE__ */ new Date()).toISOString();
|
|
33042
|
+
task.updatedAt = ts;
|
|
33043
|
+
task.log.push({
|
|
33044
|
+
timestamp: ts,
|
|
33045
|
+
action: `Ignored ${currentStatus}\u2192in-progress regression for step ${stepIndex} (${task.steps[stepIndex].name})`
|
|
33046
|
+
});
|
|
33047
|
+
await this.atomicWriteTaskJson(dir, task);
|
|
33048
|
+
if (this.isWatching) this.taskCache.set(id, { ...task });
|
|
33049
|
+
this.emit("task:updated", task);
|
|
33050
|
+
return task;
|
|
33051
|
+
}
|
|
32987
33052
|
task.steps[stepIndex].status = status;
|
|
32988
33053
|
task.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
32989
33054
|
if (status === "done") {
|
|
@@ -35398,7 +35463,7 @@ var init_daemon_token = __esm({
|
|
|
35398
35463
|
});
|
|
35399
35464
|
|
|
35400
35465
|
// ../core/src/pi-extensions.ts
|
|
35401
|
-
import { existsSync as existsSync14, mkdirSync as mkdirSync5, readdirSync, readFileSync as
|
|
35466
|
+
import { existsSync as existsSync14, mkdirSync as mkdirSync5, readdirSync, readFileSync as readFileSync3, statSync as statSync3, writeFileSync } from "node:fs";
|
|
35402
35467
|
import { homedir as homedir3 } from "node:os";
|
|
35403
35468
|
import { basename as basename5, isAbsolute as isAbsolute5, join as join17, relative as relative2, resolve as resolve7, sep as sep4, win32 } from "node:path";
|
|
35404
35469
|
function getHomeDir3(home) {
|
|
@@ -35443,7 +35508,7 @@ function extensionName(extensionPath) {
|
|
|
35443
35508
|
}
|
|
35444
35509
|
function readPiManifest(packageJsonPath) {
|
|
35445
35510
|
try {
|
|
35446
|
-
const parsed = JSON.parse(
|
|
35511
|
+
const parsed = JSON.parse(readFileSync3(packageJsonPath, "utf-8"));
|
|
35447
35512
|
if (parsed.pi && Array.isArray(parsed.pi.extensions)) {
|
|
35448
35513
|
return { extensions: parsed.pi.extensions.filter((entry) => typeof entry === "string") };
|
|
35449
35514
|
}
|
|
@@ -35523,7 +35588,7 @@ function getPiExtensionDiscoveryDirs(cwd, home) {
|
|
|
35523
35588
|
}
|
|
35524
35589
|
function readFusionDisabledExtensions(settingsPath) {
|
|
35525
35590
|
try {
|
|
35526
|
-
const parsed = JSON.parse(
|
|
35591
|
+
const parsed = JSON.parse(readFileSync3(settingsPath, "utf-8"));
|
|
35527
35592
|
const disabled = parsed[FUSION_DISABLED_EXTENSIONS_KEY];
|
|
35528
35593
|
return Array.isArray(disabled) ? disabled.filter((entry) => typeof entry === "string").map((entry) => resolve7(entry)) : [];
|
|
35529
35594
|
} catch {
|
|
@@ -35553,7 +35618,7 @@ function updatePiExtensionDisabledIds(cwd, disabledIds, home, extraKnownIds = []
|
|
|
35553
35618
|
const settingsPath = getFusionAgentSettingsPath(home);
|
|
35554
35619
|
const existing = (() => {
|
|
35555
35620
|
try {
|
|
35556
|
-
return JSON.parse(
|
|
35621
|
+
return JSON.parse(readFileSync3(settingsPath, "utf-8"));
|
|
35557
35622
|
} catch {
|
|
35558
35623
|
return {};
|
|
35559
35624
|
}
|
|
@@ -43690,8 +43755,8 @@ var require_YAMLMap = __commonJS({
|
|
|
43690
43755
|
* @param {Class} Type - If set, forces the returned collection type
|
|
43691
43756
|
* @returns Instance of Type, Map, or Object
|
|
43692
43757
|
*/
|
|
43693
|
-
toJSON(_, ctx,
|
|
43694
|
-
const map =
|
|
43758
|
+
toJSON(_, ctx, Type9) {
|
|
43759
|
+
const map = Type9 ? new Type9() : ctx?.mapAsMap ? /* @__PURE__ */ new Map() : {};
|
|
43695
43760
|
if (ctx?.onCreate)
|
|
43696
43761
|
ctx.onCreate(map);
|
|
43697
43762
|
for (const item of this.items)
|
|
@@ -48914,7 +48979,7 @@ var require_dist3 = __commonJS({
|
|
|
48914
48979
|
});
|
|
48915
48980
|
|
|
48916
48981
|
// ../core/src/agent-companies-parser.ts
|
|
48917
|
-
import { existsSync as existsSync17, mkdtempSync, readdirSync as readdirSync2, readFileSync as
|
|
48982
|
+
import { existsSync as existsSync17, mkdtempSync, readdirSync as readdirSync2, readFileSync as readFileSync4, rmSync, statSync as statSync4 } from "node:fs";
|
|
48918
48983
|
import { tmpdir as tmpdir2 } from "node:os";
|
|
48919
48984
|
import { join as join20, resolve as resolve9 } from "node:path";
|
|
48920
48985
|
function slugifyAgentReference(value) {
|
|
@@ -49132,7 +49197,7 @@ function parseSkillManifest(content) {
|
|
|
49132
49197
|
}
|
|
49133
49198
|
function parseManifestFile(filePath, parser) {
|
|
49134
49199
|
try {
|
|
49135
|
-
return parser(
|
|
49200
|
+
return parser(readFileSync4(filePath, "utf-8"));
|
|
49136
49201
|
} catch (error) {
|
|
49137
49202
|
if (error instanceof AgentCompaniesParseError) {
|
|
49138
49203
|
throw new AgentCompaniesParseError(`${filePath}: ${error.message}`);
|
|
@@ -49229,12 +49294,12 @@ function resolveExtractionRoot(tempDir) {
|
|
|
49229
49294
|
return tempDir;
|
|
49230
49295
|
}
|
|
49231
49296
|
async function extractTarArchive(archivePath, outputDir) {
|
|
49232
|
-
const [{ execFile:
|
|
49297
|
+
const [{ execFile: execFile7 }, { promisify: promisify13 }] = await Promise.all([
|
|
49233
49298
|
import("node:child_process"),
|
|
49234
49299
|
import("node:util")
|
|
49235
49300
|
]);
|
|
49236
|
-
const
|
|
49237
|
-
await
|
|
49301
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
49302
|
+
await execFileAsync5("tar", ["xzf", archivePath, "-C", outputDir]);
|
|
49238
49303
|
}
|
|
49239
49304
|
async function parseCompanyArchive(archivePath) {
|
|
49240
49305
|
const resolvedArchivePath = resolve9(archivePath);
|
|
@@ -50846,11 +50911,11 @@ async function refreshAgentMemoryQmdIndex(rootDir, agentMemory) {
|
|
|
50846
50911
|
return;
|
|
50847
50912
|
}
|
|
50848
50913
|
const promise = (async () => {
|
|
50849
|
-
const { execFile:
|
|
50914
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
50850
50915
|
const { promisify: promisify13 } = await import("node:util");
|
|
50851
|
-
const
|
|
50916
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
50852
50917
|
try {
|
|
50853
|
-
await
|
|
50918
|
+
await execFileAsync5("qmd", buildQmdAgentMemoryCollectionAddArgs(rootDir, agentMemory.agentId), {
|
|
50854
50919
|
cwd: rootDir,
|
|
50855
50920
|
timeout: 4e3,
|
|
50856
50921
|
maxBuffer: 512 * 1024
|
|
@@ -50863,8 +50928,8 @@ ${stderr}`)) {
|
|
|
50863
50928
|
throw error;
|
|
50864
50929
|
}
|
|
50865
50930
|
}
|
|
50866
|
-
await
|
|
50867
|
-
await
|
|
50931
|
+
await execFileAsync5("qmd", ["update"], { cwd: rootDir, timeout: 3e4, maxBuffer: 1024 * 1024 });
|
|
50932
|
+
await execFileAsync5("qmd", ["embed"], { cwd: rootDir, timeout: 12e4, maxBuffer: 1024 * 1024 });
|
|
50868
50933
|
})();
|
|
50869
50934
|
agentQmdRefreshState.set(key, { lastStartedAt: now, inFlight: promise });
|
|
50870
50935
|
try {
|
|
@@ -50885,10 +50950,10 @@ async function searchAgentMemoryWithQmd(rootDir, agentMemory, query, limit) {
|
|
|
50885
50950
|
}
|
|
50886
50951
|
try {
|
|
50887
50952
|
await refreshAgentMemoryQmdIndex(rootDir, agentMemory);
|
|
50888
|
-
const { execFile:
|
|
50953
|
+
const { execFile: execFile7 } = await import("node:child_process");
|
|
50889
50954
|
const { promisify: promisify13 } = await import("node:util");
|
|
50890
|
-
const
|
|
50891
|
-
const { stdout } = await
|
|
50955
|
+
const execFileAsync5 = promisify13(execFile7);
|
|
50956
|
+
const { stdout } = await execFileAsync5("qmd", buildQmdAgentMemorySearchArgs(rootDir, agentMemory.agentId, query, limit), {
|
|
50892
50957
|
cwd: rootDir,
|
|
50893
50958
|
timeout: 4e3,
|
|
50894
50959
|
maxBuffer: 1024 * 1024
|
|
@@ -51702,14 +51767,14 @@ var init_concurrency = __esm({
|
|
|
51702
51767
|
});
|
|
51703
51768
|
|
|
51704
51769
|
// ../engine/src/skill-resolver.ts
|
|
51705
|
-
import { existsSync as existsSync19, readFileSync as
|
|
51770
|
+
import { existsSync as existsSync19, readFileSync as readFileSync5 } from "node:fs";
|
|
51706
51771
|
import { join as join23 } from "node:path";
|
|
51707
51772
|
function readJsonObject(path2) {
|
|
51708
51773
|
if (!existsSync19(path2)) {
|
|
51709
51774
|
return {};
|
|
51710
51775
|
}
|
|
51711
51776
|
try {
|
|
51712
|
-
const parsed = JSON.parse(
|
|
51777
|
+
const parsed = JSON.parse(readFileSync5(path2, "utf-8"));
|
|
51713
51778
|
return parsed && typeof parsed === "object" ? parsed : {};
|
|
51714
51779
|
} catch {
|
|
51715
51780
|
return {};
|
|
@@ -51938,7 +52003,7 @@ var init_context_limit_detector = __esm({
|
|
|
51938
52003
|
});
|
|
51939
52004
|
|
|
51940
52005
|
// ../engine/src/auth-storage.ts
|
|
51941
|
-
import { existsSync as existsSync20, readFileSync as
|
|
52006
|
+
import { existsSync as existsSync20, readFileSync as readFileSync6 } from "node:fs";
|
|
51942
52007
|
import { homedir as homedir4 } from "node:os";
|
|
51943
52008
|
import { join as join24 } from "node:path";
|
|
51944
52009
|
import { AuthStorage } from "@mariozechner/pi-coding-agent";
|
|
@@ -51978,7 +52043,7 @@ function readLegacyCredentials(authPaths = getLegacyAuthPaths()) {
|
|
|
51978
52043
|
continue;
|
|
51979
52044
|
}
|
|
51980
52045
|
try {
|
|
51981
|
-
const parsed = JSON.parse(
|
|
52046
|
+
const parsed = JSON.parse(readFileSync6(authPath, "utf-8"));
|
|
51982
52047
|
for (const [provider, credential] of Object.entries(parsed)) {
|
|
51983
52048
|
credentials[provider] ??= credential;
|
|
51984
52049
|
}
|
|
@@ -52050,13 +52115,13 @@ var init_auth_storage = __esm({
|
|
|
52050
52115
|
});
|
|
52051
52116
|
|
|
52052
52117
|
// ../engine/src/custom-providers.ts
|
|
52053
|
-
import { readFileSync as
|
|
52118
|
+
import { readFileSync as readFileSync7 } from "node:fs";
|
|
52054
52119
|
import { homedir as homedir5 } from "node:os";
|
|
52055
52120
|
import { join as join25 } from "node:path";
|
|
52056
52121
|
function readCustomProviders() {
|
|
52057
52122
|
try {
|
|
52058
52123
|
const settingsPath = join25(homedir5(), ".fusion", "settings.json");
|
|
52059
|
-
const raw =
|
|
52124
|
+
const raw = readFileSync7(settingsPath, "utf-8");
|
|
52060
52125
|
const parsed = JSON.parse(raw);
|
|
52061
52126
|
return Array.isArray(parsed.customProviders) ? parsed.customProviders : [];
|
|
52062
52127
|
} catch {
|
|
@@ -52070,7 +52135,7 @@ var init_custom_providers = __esm({
|
|
|
52070
52135
|
});
|
|
52071
52136
|
|
|
52072
52137
|
// ../engine/src/pi.ts
|
|
52073
|
-
import { existsSync as existsSync21, readFileSync as
|
|
52138
|
+
import { existsSync as existsSync21, readFileSync as readFileSync8 } from "node:fs";
|
|
52074
52139
|
import { exec } from "node:child_process";
|
|
52075
52140
|
import { promisify as promisify2 } from "node:util";
|
|
52076
52141
|
import { createRequire as createRequire2 } from "node:module";
|
|
@@ -52332,7 +52397,7 @@ function readJsonObject2(path2) {
|
|
|
52332
52397
|
return {};
|
|
52333
52398
|
}
|
|
52334
52399
|
try {
|
|
52335
|
-
const parsed = JSON.parse(
|
|
52400
|
+
const parsed = JSON.parse(readFileSync8(path2, "utf-8"));
|
|
52336
52401
|
return parsed && typeof parsed === "object" ? parsed : {};
|
|
52337
52402
|
} catch {
|
|
52338
52403
|
return {};
|
|
@@ -52504,7 +52569,7 @@ function resolveVendoredClaudeCliEntry() {
|
|
|
52504
52569
|
try {
|
|
52505
52570
|
const require_ = createRequire2(import.meta.url);
|
|
52506
52571
|
const pkgJsonPath = require_.resolve("@fusion/pi-claude-cli/package.json");
|
|
52507
|
-
const pkgJson = JSON.parse(
|
|
52572
|
+
const pkgJson = JSON.parse(readFileSync8(pkgJsonPath, "utf-8"));
|
|
52508
52573
|
const extensions = pkgJson.pi?.extensions;
|
|
52509
52574
|
if (!Array.isArray(extensions) || extensions.length === 0) return null;
|
|
52510
52575
|
const entry = extensions[0];
|
|
@@ -54327,9 +54392,9 @@ async function readAttachmentContents(rootDir, taskId, attachments) {
|
|
|
54327
54392
|
return { attachmentContents, imageContents };
|
|
54328
54393
|
}
|
|
54329
54394
|
const { readFile: readFile19 } = await import("node:fs/promises");
|
|
54330
|
-
const { join:
|
|
54395
|
+
const { join: join42 } = await import("node:path");
|
|
54331
54396
|
for (const att of attachments) {
|
|
54332
|
-
const filePath =
|
|
54397
|
+
const filePath = join42(
|
|
54333
54398
|
rootDir,
|
|
54334
54399
|
".fusion",
|
|
54335
54400
|
"tasks",
|
|
@@ -55867,9 +55932,9 @@ Remove or replace these ids and call fn_task_create again.`
|
|
|
55867
55932
|
}
|
|
55868
55933
|
try {
|
|
55869
55934
|
const { readFile: readFile19 } = await import("node:fs/promises");
|
|
55870
|
-
const { join:
|
|
55935
|
+
const { join: join42 } = await import("node:path");
|
|
55871
55936
|
const promptContent = await readFile19(
|
|
55872
|
-
|
|
55937
|
+
join42(rootDir, promptPath),
|
|
55873
55938
|
"utf-8"
|
|
55874
55939
|
).catch((err) => {
|
|
55875
55940
|
const msg = err instanceof Error ? err.message : String(err);
|
|
@@ -60079,6 +60144,14 @@ import { exec as exec3 } from "node:child_process";
|
|
|
60079
60144
|
import { promisify as promisify4 } from "node:util";
|
|
60080
60145
|
import { existsSync as existsSync24, lstatSync, readdirSync as readdirSync4, rmSync as rmSync2 } from "node:fs";
|
|
60081
60146
|
import { join as join30, relative as relative5, resolve as resolve13, isAbsolute as isAbsolute9 } from "node:path";
|
|
60147
|
+
function getExecStdout(result) {
|
|
60148
|
+
if (typeof result === "string") return result;
|
|
60149
|
+
if (result && typeof result === "object" && "stdout" in result) {
|
|
60150
|
+
const stdout = result.stdout;
|
|
60151
|
+
return typeof stdout === "string" ? stdout : String(stdout ?? "");
|
|
60152
|
+
}
|
|
60153
|
+
return "";
|
|
60154
|
+
}
|
|
60082
60155
|
async function isGitRepository(dir) {
|
|
60083
60156
|
try {
|
|
60084
60157
|
await execAsync3("git rev-parse --git-dir", {
|
|
@@ -60094,10 +60167,11 @@ async function isGitRepository(dir) {
|
|
|
60094
60167
|
}
|
|
60095
60168
|
async function getRegisteredWorktreePaths(rootDir) {
|
|
60096
60169
|
try {
|
|
60097
|
-
const
|
|
60170
|
+
const result = await execAsync3("git worktree list --porcelain", {
|
|
60098
60171
|
cwd: rootDir,
|
|
60099
60172
|
encoding: "utf-8"
|
|
60100
60173
|
});
|
|
60174
|
+
const stdout = getExecStdout(result);
|
|
60101
60175
|
const paths = /* @__PURE__ */ new Set();
|
|
60102
60176
|
for (const line of stdout.split("\n")) {
|
|
60103
60177
|
if (line.startsWith("worktree ")) {
|
|
@@ -60249,10 +60323,11 @@ async function reapOrphanWorktrees(projectRoot) {
|
|
|
60249
60323
|
async function scanOrphanedBranches(rootDir, store) {
|
|
60250
60324
|
let allBranches;
|
|
60251
60325
|
try {
|
|
60252
|
-
const
|
|
60326
|
+
const result = await execAsync3("git branch --list 'fusion/*'", {
|
|
60253
60327
|
cwd: rootDir,
|
|
60254
60328
|
encoding: "utf-8"
|
|
60255
60329
|
});
|
|
60330
|
+
const stdout = getExecStdout(result);
|
|
60256
60331
|
allBranches = stdout.split("\n").map((line) => line.trim().replace(/^\*?\s*/, "")).filter((line) => line.startsWith("fusion/"));
|
|
60257
60332
|
} catch (err) {
|
|
60258
60333
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
@@ -61388,13 +61463,304 @@ var init_task_completion = __esm({
|
|
|
61388
61463
|
}
|
|
61389
61464
|
});
|
|
61390
61465
|
|
|
61466
|
+
// ../engine/src/run-verification-tool.ts
|
|
61467
|
+
import { spawn as spawn3 } from "node:child_process";
|
|
61468
|
+
import { existsSync as existsSync26 } from "node:fs";
|
|
61469
|
+
import { isAbsolute as isAbsolute10, join as join33 } from "node:path";
|
|
61470
|
+
import { Type as Type4 } from "@mariozechner/pi-ai";
|
|
61471
|
+
function appendToBuffer(buf, chunk) {
|
|
61472
|
+
const chunkBytes = Buffer.byteLength(chunk, "utf8");
|
|
61473
|
+
buf.totalBytes += chunkBytes;
|
|
61474
|
+
if (buf.totalBytes <= MAX_OUTPUT_BYTES) {
|
|
61475
|
+
buf.head += chunk;
|
|
61476
|
+
return;
|
|
61477
|
+
}
|
|
61478
|
+
const tailCap = MAX_OUTPUT_BYTES / 2;
|
|
61479
|
+
buf.tail += chunk;
|
|
61480
|
+
if (Buffer.byteLength(buf.tail, "utf8") > tailCap) {
|
|
61481
|
+
const bytes = Buffer.from(buf.tail, "utf8");
|
|
61482
|
+
buf.tail = bytes.subarray(bytes.length - tailCap).toString("utf8");
|
|
61483
|
+
}
|
|
61484
|
+
}
|
|
61485
|
+
function flattenBuffer(buf) {
|
|
61486
|
+
if (buf.tail.length === 0) return buf.head;
|
|
61487
|
+
return buf.head + `
|
|
61488
|
+
|
|
61489
|
+
[... output truncated \u2014 ${buf.totalBytes} bytes total, showing head + tail ...]
|
|
61490
|
+
|
|
61491
|
+
` + buf.tail;
|
|
61492
|
+
}
|
|
61493
|
+
async function runVerificationCommand2(opts) {
|
|
61494
|
+
const { command, cwd, timeoutMs, expectFailure = false, onHeartbeat, onLine } = opts;
|
|
61495
|
+
const startMs = Date.now();
|
|
61496
|
+
const warnings = [];
|
|
61497
|
+
const stdoutBuf = { head: "", tail: "", totalBytes: 0 };
|
|
61498
|
+
const stderrBuf = { head: "", tail: "", totalBytes: 0 };
|
|
61499
|
+
return new Promise((resolve19) => {
|
|
61500
|
+
const child = spawn3(command, {
|
|
61501
|
+
cwd,
|
|
61502
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
61503
|
+
env: { ...process.env },
|
|
61504
|
+
shell: true
|
|
61505
|
+
});
|
|
61506
|
+
let timedOut = false;
|
|
61507
|
+
let killed = false;
|
|
61508
|
+
let settled = false;
|
|
61509
|
+
let lastLineMs = Date.now();
|
|
61510
|
+
const quietTimer = setInterval(() => {
|
|
61511
|
+
const silenceMs = Date.now() - lastLineMs;
|
|
61512
|
+
if (silenceMs >= QUIET_HEARTBEAT_INTERVAL_MS) {
|
|
61513
|
+
executorLog.log(
|
|
61514
|
+
`[fn_run_verification] command quiet for ${Math.round(silenceMs / 1e3)}s, still running... (${command})`
|
|
61515
|
+
);
|
|
61516
|
+
onHeartbeat();
|
|
61517
|
+
}
|
|
61518
|
+
}, QUIET_HEARTBEAT_INTERVAL_MS);
|
|
61519
|
+
const hardTimer = setTimeout(() => {
|
|
61520
|
+
if (settled) return;
|
|
61521
|
+
timedOut = true;
|
|
61522
|
+
executorLog.warn(
|
|
61523
|
+
`[fn_run_verification] hard timeout (${timeoutMs / 1e3}s) \u2014 sending SIGTERM to: ${command}`
|
|
61524
|
+
);
|
|
61525
|
+
child.kill("SIGTERM");
|
|
61526
|
+
setTimeout(() => {
|
|
61527
|
+
if (!settled) {
|
|
61528
|
+
executorLog.warn(
|
|
61529
|
+
`[fn_run_verification] SIGTERM ignored \u2014 sending SIGKILL to: ${command}`
|
|
61530
|
+
);
|
|
61531
|
+
child.kill("SIGKILL");
|
|
61532
|
+
killed = true;
|
|
61533
|
+
}
|
|
61534
|
+
}, SIGKILL_GRACE_MS);
|
|
61535
|
+
}, timeoutMs);
|
|
61536
|
+
let stdoutRemainder = "";
|
|
61537
|
+
child.stdout.on("data", (chunk) => {
|
|
61538
|
+
const text = stdoutRemainder + chunk.toString("utf8");
|
|
61539
|
+
const lines = text.split("\n");
|
|
61540
|
+
stdoutRemainder = lines.pop() ?? "";
|
|
61541
|
+
for (const line of lines) {
|
|
61542
|
+
const lineWithNewline = line + "\n";
|
|
61543
|
+
appendToBuffer(stdoutBuf, lineWithNewline);
|
|
61544
|
+
lastLineMs = Date.now();
|
|
61545
|
+
onHeartbeat();
|
|
61546
|
+
onLine?.(lineWithNewline);
|
|
61547
|
+
}
|
|
61548
|
+
});
|
|
61549
|
+
let stderrRemainder = "";
|
|
61550
|
+
child.stderr.on("data", (chunk) => {
|
|
61551
|
+
const text = stderrRemainder + chunk.toString("utf8");
|
|
61552
|
+
const lines = text.split("\n");
|
|
61553
|
+
stderrRemainder = lines.pop() ?? "";
|
|
61554
|
+
for (const line of lines) {
|
|
61555
|
+
const lineWithNewline = line + "\n";
|
|
61556
|
+
appendToBuffer(stderrBuf, lineWithNewline);
|
|
61557
|
+
lastLineMs = Date.now();
|
|
61558
|
+
onHeartbeat();
|
|
61559
|
+
onLine?.(lineWithNewline);
|
|
61560
|
+
}
|
|
61561
|
+
});
|
|
61562
|
+
child.on("close", (code, signal) => {
|
|
61563
|
+
if (settled) return;
|
|
61564
|
+
settled = true;
|
|
61565
|
+
clearInterval(quietTimer);
|
|
61566
|
+
clearTimeout(hardTimer);
|
|
61567
|
+
if (stdoutRemainder) appendToBuffer(stdoutBuf, stdoutRemainder);
|
|
61568
|
+
if (stderrRemainder) appendToBuffer(stderrBuf, stderrRemainder);
|
|
61569
|
+
const exitCode = code ?? null;
|
|
61570
|
+
const durationMs = Date.now() - startMs;
|
|
61571
|
+
const zeroExit = exitCode === 0;
|
|
61572
|
+
const success = expectFailure ? true : zeroExit;
|
|
61573
|
+
if (!success && !timedOut) {
|
|
61574
|
+
executorLog.warn(
|
|
61575
|
+
`[fn_run_verification] command failed (exit=${exitCode}, signal=${signal ?? "none"}): ${command}`
|
|
61576
|
+
);
|
|
61577
|
+
}
|
|
61578
|
+
resolve19({
|
|
61579
|
+
success,
|
|
61580
|
+
exitCode,
|
|
61581
|
+
durationMs,
|
|
61582
|
+
stdout: flattenBuffer(stdoutBuf),
|
|
61583
|
+
stderr: flattenBuffer(stderrBuf),
|
|
61584
|
+
timedOut,
|
|
61585
|
+
killed,
|
|
61586
|
+
command,
|
|
61587
|
+
cwd,
|
|
61588
|
+
warnings
|
|
61589
|
+
});
|
|
61590
|
+
});
|
|
61591
|
+
child.on("error", (err) => {
|
|
61592
|
+
if (settled) return;
|
|
61593
|
+
settled = true;
|
|
61594
|
+
clearInterval(quietTimer);
|
|
61595
|
+
clearTimeout(hardTimer);
|
|
61596
|
+
const durationMs = Date.now() - startMs;
|
|
61597
|
+
warnings.push(`Spawn error: ${err.message}`);
|
|
61598
|
+
resolve19({
|
|
61599
|
+
success: false,
|
|
61600
|
+
exitCode: null,
|
|
61601
|
+
durationMs,
|
|
61602
|
+
stdout: flattenBuffer(stdoutBuf),
|
|
61603
|
+
stderr: flattenBuffer(stderrBuf) + `
|
|
61604
|
+
Spawn error: ${err.message}`,
|
|
61605
|
+
timedOut: false,
|
|
61606
|
+
killed: false,
|
|
61607
|
+
command,
|
|
61608
|
+
cwd,
|
|
61609
|
+
warnings
|
|
61610
|
+
});
|
|
61611
|
+
});
|
|
61612
|
+
});
|
|
61613
|
+
}
|
|
61614
|
+
function createRunVerificationTool(opts) {
|
|
61615
|
+
const { worktreePath, rootDir, taskId, recordActivity, log: log17 } = opts;
|
|
61616
|
+
return {
|
|
61617
|
+
name: "fn_run_verification",
|
|
61618
|
+
label: "Run Verification",
|
|
61619
|
+
description: "Run a verification command (tests, lint, build, typecheck) with timeout and progress heartbeat protection. Use this instead of bash for any pnpm/npm test/lint/build commands. Prevents the inactivity watchdog from killing your session during long compiles.",
|
|
61620
|
+
parameters: runVerificationParams,
|
|
61621
|
+
execute: async (_toolCallId, params) => {
|
|
61622
|
+
const { command, scope, expectFailure = false } = params;
|
|
61623
|
+
const warnings = [];
|
|
61624
|
+
if (scope === "workspace" && command.trimStart().startsWith("pnpm --filter")) {
|
|
61625
|
+
const msg = 'scope is "workspace" but command starts with "pnpm --filter" \u2014 consider using scope="package" for scoped commands.';
|
|
61626
|
+
warnings.push(msg);
|
|
61627
|
+
log17.warn(`[fn_run_verification] ${taskId}: ${msg}`);
|
|
61628
|
+
}
|
|
61629
|
+
let resolvedCwd;
|
|
61630
|
+
if (params.cwd && isAbsolute10(params.cwd)) {
|
|
61631
|
+
resolvedCwd = params.cwd;
|
|
61632
|
+
} else if (params.cwd) {
|
|
61633
|
+
resolvedCwd = join33(worktreePath, params.cwd);
|
|
61634
|
+
} else {
|
|
61635
|
+
resolvedCwd = worktreePath;
|
|
61636
|
+
}
|
|
61637
|
+
const defaultTimeoutSec = scope === "package" ? DEFAULT_TIMEOUT_PACKAGE_SEC : DEFAULT_TIMEOUT_WORKSPACE_SEC;
|
|
61638
|
+
const rawTimeoutSec = params.timeoutSec ?? defaultTimeoutSec;
|
|
61639
|
+
const timeoutSec = Math.min(rawTimeoutSec, MAX_TIMEOUT_SEC);
|
|
61640
|
+
const timeoutMs = timeoutSec * 1e3;
|
|
61641
|
+
if (rawTimeoutSec > MAX_TIMEOUT_SEC) {
|
|
61642
|
+
const msg = `timeoutSec ${rawTimeoutSec} exceeds hard cap of ${MAX_TIMEOUT_SEC}s \u2014 clamped.`;
|
|
61643
|
+
warnings.push(msg);
|
|
61644
|
+
log17.warn(`[fn_run_verification] ${taskId}: ${msg}`);
|
|
61645
|
+
}
|
|
61646
|
+
let effectiveCommand = command;
|
|
61647
|
+
if (command.trimStart().startsWith("pnpm --filter")) {
|
|
61648
|
+
const modulesYaml = join33(rootDir, "node_modules", ".modules.yaml");
|
|
61649
|
+
if (!existsSync26(modulesYaml)) {
|
|
61650
|
+
const installCmd = "pnpm install --prefer-offline";
|
|
61651
|
+
const msg = `node_modules/.modules.yaml not found in workspace root \u2014 auto-prepending \`${installCmd}\` before running the command.`;
|
|
61652
|
+
warnings.push(msg);
|
|
61653
|
+
log17.warn(`[fn_run_verification] ${taskId}: ${msg}`);
|
|
61654
|
+
effectiveCommand = `${installCmd} && ${command}`;
|
|
61655
|
+
}
|
|
61656
|
+
}
|
|
61657
|
+
log17.info(
|
|
61658
|
+
`[fn_run_verification] ${taskId}: scope=${scope} timeout=${timeoutSec}s cwd=${resolvedCwd} cmd=${effectiveCommand}`
|
|
61659
|
+
);
|
|
61660
|
+
const result = await runVerificationCommand2({
|
|
61661
|
+
command: effectiveCommand,
|
|
61662
|
+
cwd: resolvedCwd,
|
|
61663
|
+
timeoutMs,
|
|
61664
|
+
expectFailure,
|
|
61665
|
+
onHeartbeat: recordActivity
|
|
61666
|
+
});
|
|
61667
|
+
const allWarnings = [...warnings, ...result.warnings];
|
|
61668
|
+
const lines = [];
|
|
61669
|
+
if (allWarnings.length > 0) {
|
|
61670
|
+
lines.push(`Warnings:
|
|
61671
|
+
${allWarnings.map((w) => ` - ${w}`).join("\n")}
|
|
61672
|
+
`);
|
|
61673
|
+
}
|
|
61674
|
+
if (result.timedOut) {
|
|
61675
|
+
lines.push(
|
|
61676
|
+
`Command timed out after ${timeoutSec}s and was ${result.killed ? "killed (SIGKILL)" : "terminated (SIGTERM)"}.
|
|
61677
|
+
`
|
|
61678
|
+
);
|
|
61679
|
+
}
|
|
61680
|
+
lines.push(`Exit code: ${result.exitCode ?? "null (signal)"}`);
|
|
61681
|
+
lines.push(`Duration: ${(result.durationMs / 1e3).toFixed(1)}s`);
|
|
61682
|
+
lines.push(`Success: ${result.success}`);
|
|
61683
|
+
if (result.stdout.length > 0) {
|
|
61684
|
+
lines.push(`
|
|
61685
|
+
--- stdout ---
|
|
61686
|
+
${result.stdout}`);
|
|
61687
|
+
}
|
|
61688
|
+
if (result.stderr.length > 0) {
|
|
61689
|
+
lines.push(`
|
|
61690
|
+
--- stderr ---
|
|
61691
|
+
${result.stderr}`);
|
|
61692
|
+
}
|
|
61693
|
+
if (result.timedOut) {
|
|
61694
|
+
lines.push(
|
|
61695
|
+
"\nDo NOT blindly retry \u2014 investigate whether subprocesses are hung, test loops are infinite, or dependencies are missing."
|
|
61696
|
+
);
|
|
61697
|
+
}
|
|
61698
|
+
const text = lines.join("\n");
|
|
61699
|
+
log17.info(
|
|
61700
|
+
`[fn_run_verification] ${taskId}: done exit=${result.exitCode} duration=${result.durationMs}ms success=${result.success}`
|
|
61701
|
+
);
|
|
61702
|
+
return {
|
|
61703
|
+
content: [{ type: "text", text }],
|
|
61704
|
+
details: {
|
|
61705
|
+
success: result.success,
|
|
61706
|
+
exitCode: result.exitCode,
|
|
61707
|
+
durationMs: result.durationMs,
|
|
61708
|
+
timedOut: result.timedOut,
|
|
61709
|
+
killed: result.killed,
|
|
61710
|
+
command: result.command,
|
|
61711
|
+
cwd: result.cwd
|
|
61712
|
+
}
|
|
61713
|
+
};
|
|
61714
|
+
}
|
|
61715
|
+
};
|
|
61716
|
+
}
|
|
61717
|
+
var MAX_OUTPUT_BYTES, QUIET_HEARTBEAT_INTERVAL_MS, SIGKILL_GRACE_MS, DEFAULT_TIMEOUT_PACKAGE_SEC, DEFAULT_TIMEOUT_WORKSPACE_SEC, MAX_TIMEOUT_SEC, runVerificationParams;
|
|
61718
|
+
var init_run_verification_tool = __esm({
|
|
61719
|
+
"../engine/src/run-verification-tool.ts"() {
|
|
61720
|
+
"use strict";
|
|
61721
|
+
init_logger2();
|
|
61722
|
+
MAX_OUTPUT_BYTES = 200 * 1024;
|
|
61723
|
+
QUIET_HEARTBEAT_INTERVAL_MS = 6e4;
|
|
61724
|
+
SIGKILL_GRACE_MS = 1e4;
|
|
61725
|
+
DEFAULT_TIMEOUT_PACKAGE_SEC = 300;
|
|
61726
|
+
DEFAULT_TIMEOUT_WORKSPACE_SEC = 900;
|
|
61727
|
+
MAX_TIMEOUT_SEC = 1800;
|
|
61728
|
+
runVerificationParams = Type4.Object({
|
|
61729
|
+
command: Type4.String({
|
|
61730
|
+
description: 'The shell command to run, e.g. "pnpm --filter @fusion/droid-cli test", "pnpm lint", "pnpm build"'
|
|
61731
|
+
}),
|
|
61732
|
+
cwd: Type4.Optional(
|
|
61733
|
+
Type4.String({
|
|
61734
|
+
description: "Working directory for the command. Defaults to the task worktree root if omitted or relative."
|
|
61735
|
+
})
|
|
61736
|
+
),
|
|
61737
|
+
scope: Type4.Union(
|
|
61738
|
+
[Type4.Literal("package"), Type4.Literal("workspace")],
|
|
61739
|
+
{
|
|
61740
|
+
description: '"package" for scoped commands like `pnpm --filter <pkg>`, "workspace" for root-level commands like `pnpm test`.'
|
|
61741
|
+
}
|
|
61742
|
+
),
|
|
61743
|
+
timeoutSec: Type4.Optional(
|
|
61744
|
+
Type4.Number({
|
|
61745
|
+
description: "Override the default timeout in seconds. Default: 300 for package scope, 900 for workspace scope. Hard cap: 1800."
|
|
61746
|
+
})
|
|
61747
|
+
),
|
|
61748
|
+
expectFailure: Type4.Optional(
|
|
61749
|
+
Type4.Boolean({
|
|
61750
|
+
description: "If true, a non-zero exit code is reported but not flagged as an error. Default: false."
|
|
61751
|
+
})
|
|
61752
|
+
)
|
|
61753
|
+
});
|
|
61754
|
+
}
|
|
61755
|
+
});
|
|
61756
|
+
|
|
61391
61757
|
// ../engine/src/executor.ts
|
|
61392
61758
|
import { exec as exec5 } from "node:child_process";
|
|
61393
61759
|
import { promisify as promisify6 } from "node:util";
|
|
61394
|
-
import { isAbsolute as
|
|
61395
|
-
import { existsSync as
|
|
61760
|
+
import { isAbsolute as isAbsolute11, join as join34, relative as relative6, resolve as resolvePath } from "node:path";
|
|
61761
|
+
import { existsSync as existsSync27 } from "node:fs";
|
|
61396
61762
|
import { readFile as readFile14, writeFile as writeFile12 } from "node:fs/promises";
|
|
61397
|
-
import { Type as
|
|
61763
|
+
import { Type as Type5 } from "@mariozechner/pi-ai";
|
|
61398
61764
|
import { ModelRegistry as ModelRegistry2, SessionManager as SessionManager2 } from "@mariozechner/pi-coding-agent";
|
|
61399
61765
|
function truncateWorkflowScriptOutput2(output) {
|
|
61400
61766
|
if (output.length <= WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS2) return output;
|
|
@@ -61627,6 +61993,46 @@ ${comment.text}
|
|
|
61627
61993
|
|
|
61628
61994
|
Please adjust your approach based on this feedback.`;
|
|
61629
61995
|
}
|
|
61996
|
+
function detectPseudoPause(lastText) {
|
|
61997
|
+
if (!lastText || lastText.trim().length === 0) {
|
|
61998
|
+
return { kind: "none" };
|
|
61999
|
+
}
|
|
62000
|
+
const regexPatterns = [
|
|
62001
|
+
/\bif you (?:want|wish|need|like|prefer|'?d like)\b/i,
|
|
62002
|
+
/\bshould I (?:continue|proceed|go ahead|move on|start|begin)\b/i,
|
|
62003
|
+
/\blet me know\b/i,
|
|
62004
|
+
/\b(?:want|would you like) me to (?:continue|proceed|finish|complete|do)\b/i,
|
|
62005
|
+
/\bready to (?:proceed|continue|move on|begin)\b/i,
|
|
62006
|
+
/\bshall I\b/i,
|
|
62007
|
+
/\b(?:awaiting|waiting for) (?:your )?(?:approval|confirmation|go-ahead|response)\b/i
|
|
62008
|
+
];
|
|
62009
|
+
for (const pattern of regexPatterns) {
|
|
62010
|
+
const match = pattern.exec(lastText);
|
|
62011
|
+
if (match) {
|
|
62012
|
+
const start = Math.max(0, match.index - 40);
|
|
62013
|
+
const end = Math.min(lastText.length, match.index + match[0].length + 80);
|
|
62014
|
+
const snippet = lastText.slice(start, end).replace(/\n+/g, " ").trim();
|
|
62015
|
+
return { kind: "regex", matched: snippet };
|
|
62016
|
+
}
|
|
62017
|
+
}
|
|
62018
|
+
const trimmed = lastText.trimEnd();
|
|
62019
|
+
if (trimmed.length > 200) {
|
|
62020
|
+
if (trimmed.endsWith("?")) {
|
|
62021
|
+
const lastLine = trimmed.split("\n").at(-1) ?? trimmed;
|
|
62022
|
+
return { kind: "structural", matched: lastLine.trim() };
|
|
62023
|
+
}
|
|
62024
|
+
const nextStepsPattern = /(?:^|\n)#+\s*(?:notes?|next steps?|summary|what'?s? next)\s*:?\s*$/i;
|
|
62025
|
+
if (nextStepsPattern.test(trimmed)) {
|
|
62026
|
+
const lastLine = trimmed.split("\n").at(-1) ?? trimmed;
|
|
62027
|
+
return { kind: "structural", matched: lastLine.trim() };
|
|
62028
|
+
}
|
|
62029
|
+
if (/next steps?\s*:?\s*$/i.test(trimmed)) {
|
|
62030
|
+
const lastLine = trimmed.split("\n").at(-1) ?? trimmed;
|
|
62031
|
+
return { kind: "structural", matched: lastLine.trim() };
|
|
62032
|
+
}
|
|
62033
|
+
}
|
|
62034
|
+
return { kind: "none" };
|
|
62035
|
+
}
|
|
61630
62036
|
function detectReviewHandoffIntent(commentText) {
|
|
61631
62037
|
const text = commentText.toLowerCase();
|
|
61632
62038
|
const handoffPhrases = [
|
|
@@ -61669,6 +62075,7 @@ var init_executor = __esm({
|
|
|
61669
62075
|
init_agent_tools();
|
|
61670
62076
|
init_task_completion();
|
|
61671
62077
|
init_auth_storage();
|
|
62078
|
+
init_run_verification_tool();
|
|
61672
62079
|
init_agent_logger();
|
|
61673
62080
|
init_agent_tools();
|
|
61674
62081
|
execAsync5 = promisify6(exec5);
|
|
@@ -61681,38 +62088,38 @@ var init_executor = __esm({
|
|
|
61681
62088
|
WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS2 = 4e3;
|
|
61682
62089
|
NonRetryableWorktreeError = class extends Error {
|
|
61683
62090
|
};
|
|
61684
|
-
taskUpdateParams =
|
|
61685
|
-
step:
|
|
61686
|
-
status:
|
|
61687
|
-
STEP_STATUSES.map((s) =>
|
|
62091
|
+
taskUpdateParams = Type5.Object({
|
|
62092
|
+
step: Type5.Number({ description: "Step number (0-indexed)" }),
|
|
62093
|
+
status: Type5.Union(
|
|
62094
|
+
STEP_STATUSES.map((s) => Type5.Literal(s)),
|
|
61688
62095
|
{ description: "New status: pending, in-progress, done, or skipped" }
|
|
61689
62096
|
)
|
|
61690
62097
|
});
|
|
61691
|
-
taskAddDepParams =
|
|
61692
|
-
task_id:
|
|
61693
|
-
confirm:
|
|
62098
|
+
taskAddDepParams = Type5.Object({
|
|
62099
|
+
task_id: Type5.String({ description: 'The ID of the task to depend on (e.g. "KB-001")' }),
|
|
62100
|
+
confirm: Type5.Optional(Type5.Boolean({ description: "Set to true to confirm adding the dependency. Required because adding a dep to an in-progress task will stop execution and discard current work." }))
|
|
61694
62101
|
});
|
|
61695
|
-
spawnAgentParams =
|
|
61696
|
-
name:
|
|
61697
|
-
role:
|
|
61698
|
-
|
|
61699
|
-
|
|
61700
|
-
|
|
61701
|
-
|
|
61702
|
-
|
|
61703
|
-
|
|
62102
|
+
spawnAgentParams = Type5.Object({
|
|
62103
|
+
name: Type5.String({ description: "Name for the child agent" }),
|
|
62104
|
+
role: Type5.Union([
|
|
62105
|
+
Type5.Literal("triage"),
|
|
62106
|
+
Type5.Literal("executor"),
|
|
62107
|
+
Type5.Literal("reviewer"),
|
|
62108
|
+
Type5.Literal("merger"),
|
|
62109
|
+
Type5.Literal("engineer"),
|
|
62110
|
+
Type5.Literal("custom")
|
|
61704
62111
|
], { description: "Role for the child agent" }),
|
|
61705
|
-
task:
|
|
62112
|
+
task: Type5.String({ description: "Task description for the child agent to execute" })
|
|
61706
62113
|
});
|
|
61707
|
-
reviewStepParams =
|
|
61708
|
-
step:
|
|
61709
|
-
type:
|
|
61710
|
-
[
|
|
62114
|
+
reviewStepParams = Type5.Object({
|
|
62115
|
+
step: Type5.Number({ description: "Step number to review" }),
|
|
62116
|
+
type: Type5.Union(
|
|
62117
|
+
[Type5.Literal("plan"), Type5.Literal("code")],
|
|
61711
62118
|
{ description: 'Review type: "plan" or "code"' }
|
|
61712
62119
|
),
|
|
61713
|
-
step_name:
|
|
61714
|
-
baseline:
|
|
61715
|
-
|
|
62120
|
+
step_name: Type5.String({ description: "Name of the step being reviewed" }),
|
|
62121
|
+
baseline: Type5.Optional(
|
|
62122
|
+
Type5.String({
|
|
61716
62123
|
description: "Git commit SHA for code review diff baseline. Capture HEAD before starting a step and pass it here."
|
|
61717
62124
|
})
|
|
61718
62125
|
)
|
|
@@ -61725,6 +62132,26 @@ You are working in a git worktree isolated from the main branch. Your job is to
|
|
|
61725
62132
|
You are the primary implementation agent in Fusion.
|
|
61726
62133
|
You execute task specs in isolated worktrees, produce production-quality changes, and hand off work that can pass independent review and merge.
|
|
61727
62134
|
|
|
62135
|
+
## Turn-ending rules \u2014 read carefully
|
|
62136
|
+
|
|
62137
|
+
You MUST end every turn by either:
|
|
62138
|
+
- (a) calling another tool to make progress, OR
|
|
62139
|
+
- (b) calling \`fn_task_done\` if the entire task is complete, OR
|
|
62140
|
+
- (c) calling \`fn_task_done\` with a summary explaining what is blocked, if you cannot make progress for any reason
|
|
62141
|
+
|
|
62142
|
+
You MUST NOT end a turn by writing prose that asks the user a question, summarizes progress, or requests permission to continue. The following are FORBIDDEN turn-endings:
|
|
62143
|
+
- "If you want, I can continue with..."
|
|
62144
|
+
- "Should I proceed with...?"
|
|
62145
|
+
- "Let me know if you'd like me to..."
|
|
62146
|
+
- "Ready to move on to step N. Want me to continue?"
|
|
62147
|
+
- Any markdown progress summary at the end of a turn instead of a tool call
|
|
62148
|
+
|
|
62149
|
+
If you have just finished a step's work, immediately call \`fn_task_update\` to mark the step done and continue with the next pending step in the SAME turn. Do not pause to summarize.
|
|
62150
|
+
|
|
62151
|
+
The user is not watching this conversation in real-time. They will read the final result. Asking permission wastes a full retry cycle and may orphan committed work.
|
|
62152
|
+
|
|
62153
|
+
If you genuinely cannot proceed (blocked on a dependency, missing information, or an unresolvable error), call \`fn_task_done\` with a clear explanation of what is blocked and what is needed to unblock it. Never write the question as plain prose.
|
|
62154
|
+
|
|
61728
62155
|
## How to work
|
|
61729
62156
|
1. Read the PROMPT.md carefully \u2014 it contains your mission, steps, file scope, acceptance criteria, and Do NOT constraints
|
|
61730
62157
|
2. Before touching code, read all files listed in "Context to Read First" and understand the full step outcome
|
|
@@ -61890,6 +62317,16 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
61890
62317
|
- Update tests when intended behavior changed; fix implementation when behavior regressed unintentionally
|
|
61891
62318
|
- **CRITICAL: Resolve ALL lint failures and test failures before completing the task, even if they appear unrelated or pre-existing.** Unrelated failures left unfixed accumulate technical debt and block future integrations. Investigate and fix or suppress them \u2014 do not defer them to a separate task.
|
|
61892
62319
|
|
|
62320
|
+
## Verification commands \u2014 use fn_run_verification
|
|
62321
|
+
|
|
62322
|
+
For ALL test/lint/build/typecheck verification, use the \`fn_run_verification\` tool, NOT raw bash.
|
|
62323
|
+
The tool prevents your session from being killed by the inactivity watchdog during long compiles.
|
|
62324
|
+
|
|
62325
|
+
- Prefer **package-scoped** verification first: e.g. \`pnpm --filter @fusion/<pkg> test\` with \`scope: "package"\`. This is faster and isolated.
|
|
62326
|
+
- Only run **workspace-scoped** verification (\`pnpm test\`, \`pnpm lint\`, \`pnpm build\` from root) at the FINAL integration step, when you are about to call \`fn_task_done\`.
|
|
62327
|
+
- If you need to run \`pnpm install\` (e.g. you added a new package), use \`fn_run_verification\` with \`scope: "workspace"\` and \`timeoutSec: 600\`.
|
|
62328
|
+
- If a verification command times out, do NOT blindly retry \u2014 investigate. Check for hung subprocesses, infinite test loops, or tests waiting on missing dependencies. Use \`node_modules/.modules.yaml\` presence to confirm bootstrap.
|
|
62329
|
+
|
|
61893
62330
|
## Common Pitfalls
|
|
61894
62331
|
- Editing files outside the assigned worktree (except allowed memory/attachment paths)
|
|
61895
62332
|
- Skipping or partially running required quality gates
|
|
@@ -62346,7 +62783,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62346
62783
|
* this as a successful retry, since the original bounce may itself be
|
|
62347
62784
|
* stuck.
|
|
62348
62785
|
*/
|
|
62349
|
-
async performWorkflowRerunBounce(taskId, worktreePath) {
|
|
62786
|
+
async performWorkflowRerunBounce(taskId, worktreePath, preserveResumeState = true) {
|
|
62350
62787
|
if (this.workflowRerunPending.has(taskId)) {
|
|
62351
62788
|
executorLog.warn(`${taskId}: workflow rerun bounce already in flight \u2014 skipping re-entry`);
|
|
62352
62789
|
return "skipped-pending";
|
|
@@ -62359,7 +62796,11 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62359
62796
|
}
|
|
62360
62797
|
if (latestTask.column === "in-progress") {
|
|
62361
62798
|
const originalExecutionStartedAt = latestTask.executionStartedAt;
|
|
62362
|
-
|
|
62799
|
+
if (preserveResumeState) {
|
|
62800
|
+
await this.store.moveTask(taskId, "todo", { preserveResumeState: true });
|
|
62801
|
+
} else {
|
|
62802
|
+
await this.store.moveTask(taskId, "todo");
|
|
62803
|
+
}
|
|
62363
62804
|
await this.store.updateTask(taskId, {
|
|
62364
62805
|
worktree: worktreePath,
|
|
62365
62806
|
executionStartedAt: originalExecutionStartedAt ?? null
|
|
@@ -62377,11 +62818,11 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62377
62818
|
this.workflowRerunPending.delete(taskId);
|
|
62378
62819
|
}
|
|
62379
62820
|
}
|
|
62380
|
-
scheduleWorkflowRerun(taskId, worktreePath, successMessage) {
|
|
62821
|
+
scheduleWorkflowRerun(taskId, worktreePath, successMessage, preserveResumeState = true) {
|
|
62381
62822
|
this.clearWorkflowRerunWatchdog(taskId);
|
|
62382
62823
|
setTimeout(async () => {
|
|
62383
62824
|
try {
|
|
62384
|
-
const outcome = await this.performWorkflowRerunBounce(taskId, worktreePath);
|
|
62825
|
+
const outcome = await this.performWorkflowRerunBounce(taskId, worktreePath, preserveResumeState);
|
|
62385
62826
|
if (outcome === "bounced") {
|
|
62386
62827
|
executorLog.log(successMessage);
|
|
62387
62828
|
} else {
|
|
@@ -62413,7 +62854,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62413
62854
|
`Watchdog: workflow rerun handoff stalled for ${WORKFLOW_RERUN_WATCHDOG_MS / 1e3}s (still ${currentTask.column}) \u2014 retrying once`
|
|
62414
62855
|
).catch(() => void 0);
|
|
62415
62856
|
try {
|
|
62416
|
-
const outcome = await this.performWorkflowRerunBounce(taskId, worktreePath);
|
|
62857
|
+
const outcome = await this.performWorkflowRerunBounce(taskId, worktreePath, preserveResumeState);
|
|
62417
62858
|
if (outcome === "bounced") {
|
|
62418
62859
|
executorLog.warn(`${taskId}: workflow rerun watchdog retry succeeded`);
|
|
62419
62860
|
} else {
|
|
@@ -62551,7 +62992,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62551
62992
|
return false;
|
|
62552
62993
|
}
|
|
62553
62994
|
const settings = await this.store.getSettings();
|
|
62554
|
-
if (task.worktree &&
|
|
62995
|
+
if (task.worktree && existsSync27(task.worktree)) {
|
|
62555
62996
|
const modifiedFiles = await this.captureModifiedFiles(task.worktree, task.baseCommitSha);
|
|
62556
62997
|
if (modifiedFiles.length > 0) {
|
|
62557
62998
|
await this.store.updateTask(task.id, { modifiedFiles });
|
|
@@ -62560,7 +63001,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62560
63001
|
if (task.executionMode !== "fast") {
|
|
62561
63002
|
const workflowResult = await this.runWorkflowSteps(task, task.worktree, settings);
|
|
62562
63003
|
if (!workflowResult.allPassed) {
|
|
62563
|
-
await this.sendTaskBackForFix(task, task.worktree, workflowResult.feedback, workflowResult.stepName || "Unknown", "Workflow step failed during recovery");
|
|
63004
|
+
await this.sendTaskBackForFix(task, task.worktree, workflowResult.feedback, workflowResult.stepName || "Unknown", "Workflow step failed during recovery", false);
|
|
62564
63005
|
return true;
|
|
62565
63006
|
}
|
|
62566
63007
|
} else {
|
|
@@ -62712,7 +63153,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62712
63153
|
if (task.dependencies.length === 0) return null;
|
|
62713
63154
|
for (const depId of task.dependencies) {
|
|
62714
63155
|
const dep = allTasks.find((t) => t.id === depId);
|
|
62715
|
-
if (dep && dep.worktree && (dep.column === "done" || dep.column === "in-review") &&
|
|
63156
|
+
if (dep && dep.worktree && (dep.column === "done" || dep.column === "in-review") && existsSync27(dep.worktree)) {
|
|
62716
63157
|
return dep.worktree;
|
|
62717
63158
|
}
|
|
62718
63159
|
}
|
|
@@ -62763,7 +63204,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62763
63204
|
const activeMergeStatuses = /* @__PURE__ */ new Set(["merging", "merging-pr"]);
|
|
62764
63205
|
const isActiveTask = activeColumns.has(task.column) || activeMergeStatuses.has(task.status ?? "");
|
|
62765
63206
|
if (!isActiveTask) {
|
|
62766
|
-
const tasksDir =
|
|
63207
|
+
const tasksDir = join34(this.store.getFusionDir(), "tasks");
|
|
62767
63208
|
const promptPath = getPromptPath(tasksDir, task.id);
|
|
62768
63209
|
const staleness = await evaluateSpecStaleness({ settings, promptPath });
|
|
62769
63210
|
if (staleness.isStale) {
|
|
@@ -62810,7 +63251,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62810
63251
|
worktreeName = generateWorktreeName(this.rootDir);
|
|
62811
63252
|
break;
|
|
62812
63253
|
}
|
|
62813
|
-
worktreePath =
|
|
63254
|
+
worktreePath = join34(this.rootDir, ".worktrees", worktreeName);
|
|
62814
63255
|
}
|
|
62815
63256
|
let stuckRequeue = null;
|
|
62816
63257
|
let taskDone = false;
|
|
@@ -62833,8 +63274,8 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62833
63274
|
"Project directory is not a Git repository. Fusion requires a Git repository for worktree creation. Initialize with 'git init' or run from a Git project directory."
|
|
62834
63275
|
);
|
|
62835
63276
|
}
|
|
62836
|
-
const branchName = `fusion/${task.id.toLowerCase()}`;
|
|
62837
|
-
let isResume =
|
|
63277
|
+
const branchName = task.branch || `fusion/${task.id.toLowerCase()}`;
|
|
63278
|
+
let isResume = existsSync27(worktreePath);
|
|
62838
63279
|
let acquiredFromPool = false;
|
|
62839
63280
|
const baseBranch = task.baseBranch || null;
|
|
62840
63281
|
if (task.worktree && isResume && !await isUsableTaskWorktree(this.rootDir, worktreePath)) {
|
|
@@ -62847,8 +63288,8 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62847
63288
|
this.currentRunContext
|
|
62848
63289
|
);
|
|
62849
63290
|
await this.store.updateTask(task.id, { worktree: null, branch: null });
|
|
62850
|
-
worktreePath =
|
|
62851
|
-
isResume =
|
|
63291
|
+
worktreePath = join34(this.rootDir, ".worktrees", generateWorktreeName(this.rootDir));
|
|
63292
|
+
isResume = existsSync27(worktreePath);
|
|
62852
63293
|
}
|
|
62853
63294
|
if (!isResume) {
|
|
62854
63295
|
if (this.options.pool && settings.recycleWorktrees) {
|
|
@@ -62970,6 +63411,9 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
62970
63411
|
await this.store.updateStep(task.id, 0, "pending");
|
|
62971
63412
|
}
|
|
62972
63413
|
}
|
|
63414
|
+
if (isResume && task.branch && detail.steps.length > 0) {
|
|
63415
|
+
await this.reconcileStepsFromGitHistory(task.id, detail, worktreePath);
|
|
63416
|
+
}
|
|
62973
63417
|
const skillContext = await buildSessionSkillContext({
|
|
62974
63418
|
agentStore: this.options.agentStore,
|
|
62975
63419
|
task: detail,
|
|
@@ -63040,7 +63484,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63040
63484
|
if (this.pausedAborted.has(task.id)) {
|
|
63041
63485
|
this.pausedAborted.delete(task.id);
|
|
63042
63486
|
await this.store.logEntry(task.id, "Execution paused \u2014 step sessions terminated, moved to todo", void 0, this.currentRunContext);
|
|
63043
|
-
await this.store.moveTask(task.id, "todo");
|
|
63487
|
+
await this.store.moveTask(task.id, "todo", { preserveResumeState: true });
|
|
63044
63488
|
return;
|
|
63045
63489
|
}
|
|
63046
63490
|
if (this.stuckAborted.has(task.id)) {
|
|
@@ -63124,7 +63568,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63124
63568
|
} else if (this.pausedAborted.has(task.id)) {
|
|
63125
63569
|
this.pausedAborted.delete(task.id);
|
|
63126
63570
|
await this.store.logEntry(task.id, "Execution paused during step-session", void 0, this.currentRunContext);
|
|
63127
|
-
await this.store.moveTask(task.id, "todo");
|
|
63571
|
+
await this.store.moveTask(task.id, "todo", { preserveResumeState: true });
|
|
63128
63572
|
} else if (this.stuckAborted.has(task.id)) {
|
|
63129
63573
|
stuckRequeue = this.stuckAborted.get(task.id) ?? true;
|
|
63130
63574
|
this.stuckAborted.delete(task.id);
|
|
@@ -63142,7 +63586,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63142
63586
|
executorLog.warn(`\u26A1 ${task.id} transient error \u2014 retry ${attempt}/${MAX_RECOVERY_RETRIES} in ${delay2}: ${errorMessage}`);
|
|
63143
63587
|
await this.store.logEntry(task.id, `Transient error (retry ${attempt}/${MAX_RECOVERY_RETRIES} in ${delay2}): ${errorMessage}`, void 0, this.currentRunContext);
|
|
63144
63588
|
}
|
|
63145
|
-
if (worktreePath &&
|
|
63589
|
+
if (worktreePath && existsSync27(worktreePath)) {
|
|
63146
63590
|
try {
|
|
63147
63591
|
await execAsync5(`git worktree remove "${worktreePath}" --force`, { cwd: this.rootDir });
|
|
63148
63592
|
await audit.git({ type: "worktree:remove", target: worktreePath });
|
|
@@ -63201,7 +63645,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63201
63645
|
try {
|
|
63202
63646
|
const latestTask = await this.store.getTask(task.id);
|
|
63203
63647
|
await this.resetStepsIfWorkLost(latestTask);
|
|
63204
|
-
if (worktreePath &&
|
|
63648
|
+
if (worktreePath && existsSync27(worktreePath)) {
|
|
63205
63649
|
try {
|
|
63206
63650
|
await execAsync5(`git worktree remove "${worktreePath}" --force`, { cwd: this.rootDir });
|
|
63207
63651
|
} catch (wtErr) {
|
|
@@ -63243,6 +63687,17 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63243
63687
|
this.createTaskDoneTool(task.id, () => {
|
|
63244
63688
|
taskDone = true;
|
|
63245
63689
|
}),
|
|
63690
|
+
createRunVerificationTool({
|
|
63691
|
+
worktreePath,
|
|
63692
|
+
rootDir: this.rootDir,
|
|
63693
|
+
taskId: task.id,
|
|
63694
|
+
recordActivity: () => stuckDetector?.recordActivity(task.id),
|
|
63695
|
+
log: {
|
|
63696
|
+
info: (s) => executorLog.log(s),
|
|
63697
|
+
warn: (s) => executorLog.warn(s),
|
|
63698
|
+
error: (s) => executorLog.warn(s)
|
|
63699
|
+
}
|
|
63700
|
+
}),
|
|
63246
63701
|
// Skip fn_review_step tool in fast mode — fast mode bypasses automated review gates
|
|
63247
63702
|
...executionMode !== "fast" ? [
|
|
63248
63703
|
this.createReviewStepTool(task.id, worktreePath, detail.prompt, codeReviewVerdicts, sessionRef, stepCheckpoints, detail, stuckDetector)
|
|
@@ -63272,11 +63727,13 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63272
63727
|
// Add plugin tools from PluginRunner
|
|
63273
63728
|
...this.options.pluginRunner?.getPluginTools() ?? []
|
|
63274
63729
|
];
|
|
63730
|
+
let lastAssistantText = "";
|
|
63275
63731
|
const agentLogger = new AgentLogger({
|
|
63276
63732
|
store: this.store,
|
|
63277
63733
|
taskId: task.id,
|
|
63278
63734
|
agent: "executor",
|
|
63279
63735
|
onAgentText: (taskId, delta) => {
|
|
63736
|
+
lastAssistantText += delta;
|
|
63280
63737
|
stuckDetector?.recordActivity(taskId);
|
|
63281
63738
|
this.options.onAgentText?.(taskId, delta);
|
|
63282
63739
|
},
|
|
@@ -63294,7 +63751,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63294
63751
|
const executorFallbackProvider = settings.fallbackProvider;
|
|
63295
63752
|
const executorFallbackModelId = settings.fallbackModelId;
|
|
63296
63753
|
const executorThinkingLevel = detail.thinkingLevel ?? settings.defaultThinkingLevel;
|
|
63297
|
-
const isResuming = !!task.sessionFile &&
|
|
63754
|
+
const isResuming = !!task.sessionFile && existsSync27(task.sessionFile);
|
|
63298
63755
|
const sessionManager = isResuming ? SessionManager2.open(task.sessionFile) : SessionManager2.create(worktreePath);
|
|
63299
63756
|
executorLog.log(`${task.id}: creating agent session (provider=${executorProvider ?? "default"}, model=${executorModelId ?? "default"}, resuming=${isResuming})`);
|
|
63300
63757
|
const executorInstructions = await this.resolveInstructionsForRole("executor");
|
|
@@ -63429,7 +63886,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63429
63886
|
} else {
|
|
63430
63887
|
executorLog.log(`${task.id} paused (graceful session exit) \u2014 moving to todo`);
|
|
63431
63888
|
await this.store.logEntry(task.id, "Execution paused \u2014 session preserved for resume, moved to todo");
|
|
63432
|
-
await this.store.moveTask(task.id, "todo");
|
|
63889
|
+
await this.store.moveTask(task.id, "todo", { preserveResumeState: true });
|
|
63433
63890
|
}
|
|
63434
63891
|
return;
|
|
63435
63892
|
}
|
|
@@ -63493,6 +63950,19 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63493
63950
|
void 0,
|
|
63494
63951
|
this.currentRunContext
|
|
63495
63952
|
);
|
|
63953
|
+
const previousSessionText = lastAssistantText;
|
|
63954
|
+
const pseudoPause = detectPseudoPause(previousSessionText);
|
|
63955
|
+
if (pseudoPause.kind !== "none") {
|
|
63956
|
+
const shortMatch = (pseudoPause.matched ?? "").slice(0, 120);
|
|
63957
|
+
await this.store.logEntry(
|
|
63958
|
+
task.id,
|
|
63959
|
+
`Pseudo-pause detected (kind=${pseudoPause.kind}, matched='${shortMatch}')`,
|
|
63960
|
+
void 0,
|
|
63961
|
+
this.currentRunContext
|
|
63962
|
+
);
|
|
63963
|
+
executorLog.log(`${task.id} pseudo-pause detected (kind=${pseudoPause.kind}): ${shortMatch}`);
|
|
63964
|
+
}
|
|
63965
|
+
lastAssistantText = "";
|
|
63496
63966
|
this.activeSessions.delete(task.id);
|
|
63497
63967
|
this.tokenUsageBaselines.delete(task.id);
|
|
63498
63968
|
session.dispose();
|
|
@@ -63532,15 +64002,38 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63532
64002
|
lastModelId: detail.modelId
|
|
63533
64003
|
});
|
|
63534
64004
|
stuckDetector?.trackTask(task.id, retrySession);
|
|
63535
|
-
|
|
63536
|
-
|
|
63537
|
-
|
|
63538
|
-
|
|
63539
|
-
|
|
63540
|
-
|
|
63541
|
-
|
|
63542
|
-
|
|
63543
|
-
|
|
64005
|
+
let retryPrompt;
|
|
64006
|
+
if (pseudoPause.kind !== "none") {
|
|
64007
|
+
const shortMatch = (pseudoPause.matched ?? "").slice(0, 120);
|
|
64008
|
+
retryPrompt = [
|
|
64009
|
+
`Your previous turn ended with a pseudo-pause: "${shortMatch}". This is forbidden.`,
|
|
64010
|
+
"",
|
|
64011
|
+
"Turn-ending rules you violated:",
|
|
64012
|
+
"- You MUST NOT end a turn by asking the user a question, summarizing progress, or requesting permission to continue.",
|
|
64013
|
+
"- Phrases like 'If you want, I can continue', 'Should I proceed?', 'Let me know if...' are FORBIDDEN turn-endings.",
|
|
64014
|
+
"- The user is not watching this conversation. Questions written as prose are ignored.",
|
|
64015
|
+
"- If you genuinely cannot proceed, call fn_task_done with a clear explanation \u2014 never write the blocker as plain prose.",
|
|
64016
|
+
"",
|
|
64017
|
+
"What you must do now:",
|
|
64018
|
+
"1. Review the PROMPT.md steps and identify the next pending step.",
|
|
64019
|
+
"2. Do the work for that step immediately \u2014 call fn_task_update, write code, run tests.",
|
|
64020
|
+
"3. Continue until all steps are done, then call fn_task_done.",
|
|
64021
|
+
"Do NOT ask for permission. Do NOT write a summary. Just call a tool and keep working.",
|
|
64022
|
+
"",
|
|
64023
|
+
"Original task:",
|
|
64024
|
+
buildExecutionPrompt(detail, this.rootDir, settings, worktreePath)
|
|
64025
|
+
].join("\n");
|
|
64026
|
+
} else {
|
|
64027
|
+
retryPrompt = [
|
|
64028
|
+
"Your previous session ended without calling the fn_task_done tool.",
|
|
64029
|
+
"The task may already be complete \u2014 review the current state of the worktree and either:",
|
|
64030
|
+
"1. If the work is done, call fn_task_done with a summary of what was accomplished.",
|
|
64031
|
+
"2. If there is remaining work, finish it and then call fn_task_done.",
|
|
64032
|
+
"",
|
|
64033
|
+
"Original task:",
|
|
64034
|
+
buildExecutionPrompt(detail, this.rootDir, settings, worktreePath)
|
|
64035
|
+
].join("\n");
|
|
64036
|
+
}
|
|
63544
64037
|
stuckDetector?.recordActivity(task.id);
|
|
63545
64038
|
await promptWithFallback(retrySession, retryPrompt);
|
|
63546
64039
|
checkSessionError(retrySession);
|
|
@@ -63677,7 +64170,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63677
64170
|
this.options.onComplete?.(task);
|
|
63678
64171
|
} else {
|
|
63679
64172
|
executorLog.log(`${task.id} paused \u2014 moving to todo`);
|
|
63680
|
-
if (worktreePath &&
|
|
64173
|
+
if (worktreePath && existsSync27(worktreePath)) {
|
|
63681
64174
|
try {
|
|
63682
64175
|
await execAsync5(`git worktree remove "${worktreePath}" --force`, { cwd: this.rootDir });
|
|
63683
64176
|
executorLog.log(`Removed old worktree for paused task: ${worktreePath}`);
|
|
@@ -63747,9 +64240,10 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63747
64240
|
await this.store.logEntry(task.id, `Context-overflow fresh-session requeue (${attempt}/${MAX_RECOVERY_RETRIES} in ${delay2}): ${errorMessage}`, void 0, this.currentRunContext);
|
|
63748
64241
|
await this.store.updateTask(task.id, {
|
|
63749
64242
|
recoveryRetryCount: decision.nextState.recoveryRetryCount,
|
|
63750
|
-
nextRecoveryAt: decision.nextState.nextRecoveryAt
|
|
64243
|
+
nextRecoveryAt: decision.nextState.nextRecoveryAt,
|
|
64244
|
+
sessionFile: null
|
|
63751
64245
|
});
|
|
63752
|
-
await this.store.moveTask(task.id, "todo");
|
|
64246
|
+
await this.store.moveTask(task.id, "todo", { preserveResumeState: true });
|
|
63753
64247
|
return;
|
|
63754
64248
|
}
|
|
63755
64249
|
executorLog.error(`\u2717 ${task.id} context-overflow requeue budget exhausted (${MAX_RECOVERY_RETRIES} attempts): ${errorMessage}`);
|
|
@@ -63772,7 +64266,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63772
64266
|
executorLog.warn(`\u26A1 ${task.id} transient error \u2014 retry ${attempt}/${MAX_RECOVERY_RETRIES} in ${delay2}: ${errorMessage}`);
|
|
63773
64267
|
await this.store.logEntry(task.id, `Transient error (retry ${attempt}/${MAX_RECOVERY_RETRIES} in ${delay2}): ${errorMessage}`, void 0, this.currentRunContext);
|
|
63774
64268
|
}
|
|
63775
|
-
if (worktreePath &&
|
|
64269
|
+
if (worktreePath && existsSync27(worktreePath)) {
|
|
63776
64270
|
try {
|
|
63777
64271
|
await execAsync5(`git worktree remove "${worktreePath}" --force`, { cwd: this.rootDir });
|
|
63778
64272
|
executorLog.log(`Removed old worktree for transient retry: ${worktreePath}`);
|
|
@@ -63827,7 +64321,7 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63827
64321
|
try {
|
|
63828
64322
|
const latestTask = await this.store.getTask(task.id);
|
|
63829
64323
|
await this.resetStepsIfWorkLost(latestTask);
|
|
63830
|
-
if (worktreePath &&
|
|
64324
|
+
if (worktreePath && existsSync27(worktreePath)) {
|
|
63831
64325
|
try {
|
|
63832
64326
|
await execAsync5(`git worktree remove "${worktreePath}" --force`, { cwd: this.rootDir });
|
|
63833
64327
|
executorLog.log(`Removed old worktree for stuck-killed retry: ${worktreePath}`);
|
|
@@ -63874,19 +64368,39 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63874
64368
|
details: {}
|
|
63875
64369
|
};
|
|
63876
64370
|
}
|
|
63877
|
-
|
|
64371
|
+
const stepIndex = step - 1;
|
|
64372
|
+
if (!Number.isInteger(stepIndex) || stepIndex < 0) {
|
|
64373
|
+
return {
|
|
64374
|
+
content: [{
|
|
64375
|
+
type: "text",
|
|
64376
|
+
text: `Invalid step number: ${step}. Steps are 1-indexed.`
|
|
64377
|
+
}],
|
|
64378
|
+
details: {}
|
|
64379
|
+
};
|
|
64380
|
+
}
|
|
64381
|
+
const task = await store.updateStep(taskId, stepIndex, status);
|
|
64382
|
+
const stepInfo = task.steps[stepIndex];
|
|
64383
|
+
const persistedStatus = stepInfo.status;
|
|
64384
|
+
const progress = task.steps.filter((s) => s.status === "done").length;
|
|
64385
|
+
if (status === "in-progress" && persistedStatus === "in-progress" && sessionRef.current) {
|
|
63878
64386
|
const leafId = sessionRef.current.sessionManager.getLeafId();
|
|
63879
64387
|
if (leafId) {
|
|
63880
64388
|
stepCheckpoints.set(step, leafId);
|
|
63881
64389
|
}
|
|
63882
64390
|
}
|
|
63883
|
-
|
|
63884
|
-
|
|
63885
|
-
|
|
64391
|
+
if (persistedStatus !== status) {
|
|
64392
|
+
return {
|
|
64393
|
+
content: [{
|
|
64394
|
+
type: "text",
|
|
64395
|
+
text: `Step ${step} (${stepInfo.name}) is already ${persistedStatus} \u2014 ${status} request ignored to preserve completed work. Progress: ${progress}/${task.steps.length} done.`
|
|
64396
|
+
}],
|
|
64397
|
+
details: {}
|
|
64398
|
+
};
|
|
64399
|
+
}
|
|
63886
64400
|
return {
|
|
63887
64401
|
content: [{
|
|
63888
64402
|
type: "text",
|
|
63889
|
-
text: `Step ${step} (${stepInfo.name}) \u2192 ${
|
|
64403
|
+
text: `Step ${step} (${stepInfo.name}) \u2192 ${persistedStatus}. Progress: ${progress}/${task.steps.length} done.`
|
|
63890
64404
|
}],
|
|
63891
64405
|
details: {}
|
|
63892
64406
|
};
|
|
@@ -63981,8 +64495,8 @@ Lint, tests, and typecheck are also hard quality gates:
|
|
|
63981
64495
|
name: "fn_task_done",
|
|
63982
64496
|
label: "Mark Task Done",
|
|
63983
64497
|
description: "Signal that all steps are complete, tests pass, and documentation is updated. Call this as the final action after finishing all work. Automatically marks all remaining steps as done. Optionally provide a summary of what was changed/fixed.",
|
|
63984
|
-
parameters:
|
|
63985
|
-
summary:
|
|
64498
|
+
parameters: Type5.Object({
|
|
64499
|
+
summary: Type5.Optional(Type5.String({
|
|
63986
64500
|
description: "Optional summary of what was changed/fixed and what was verified (2-4 sentences)"
|
|
63987
64501
|
}))
|
|
63988
64502
|
}),
|
|
@@ -64288,7 +64802,7 @@ Take a different approach. Do NOT repeat the rejected strategy. Re-read the step
|
|
|
64288
64802
|
* The section is replaced entirely to avoid accumulation of old feedback.
|
|
64289
64803
|
*/
|
|
64290
64804
|
async injectWorkflowRevisionInstructions(task, feedback) {
|
|
64291
|
-
const promptPath =
|
|
64805
|
+
const promptPath = join34(this.store.getFusionDir(), "tasks", task.id, "PROMPT.md");
|
|
64292
64806
|
let content;
|
|
64293
64807
|
try {
|
|
64294
64808
|
content = await readFile14(promptPath, "utf-8");
|
|
@@ -64374,7 +64888,7 @@ ${feedback}
|
|
|
64374
64888
|
* Injects failure feedback into PROMPT.md, resets steps, clears session,
|
|
64375
64889
|
* and schedules a move to todo → in-progress after the executing guard clears.
|
|
64376
64890
|
*/
|
|
64377
|
-
async sendTaskBackForFix(task, worktreePath, failureFeedback, stepName, reason) {
|
|
64891
|
+
async sendTaskBackForFix(task, worktreePath, failureFeedback, stepName, reason, preserveResumeState = true) {
|
|
64378
64892
|
const taskId = task.id;
|
|
64379
64893
|
this.clearCompletedTaskWatchdog(taskId);
|
|
64380
64894
|
await this.store.addTaskComment(
|
|
@@ -64401,7 +64915,8 @@ Please fix the issues so the verification can pass on the next attempt.`,
|
|
|
64401
64915
|
this.scheduleWorkflowRerun(
|
|
64402
64916
|
taskId,
|
|
64403
64917
|
worktreePath,
|
|
64404
|
-
`${taskId}: sent back to in-progress for remediation
|
|
64918
|
+
`${taskId}: sent back to in-progress for remediation`,
|
|
64919
|
+
preserveResumeState
|
|
64405
64920
|
);
|
|
64406
64921
|
}
|
|
64407
64922
|
/**
|
|
@@ -64410,7 +64925,7 @@ Please fix the issues so the verification can pass on the next attempt.`,
|
|
|
64410
64925
|
* The section is replaced entirely to avoid accumulation of old feedback.
|
|
64411
64926
|
*/
|
|
64412
64927
|
async injectWorkflowStepFailureInstructions(task, failureFeedback, stepName, retryCount) {
|
|
64413
|
-
const promptPath =
|
|
64928
|
+
const promptPath = join34(this.store.getFusionDir(), "tasks", task.id, "PROMPT.md");
|
|
64414
64929
|
let content;
|
|
64415
64930
|
try {
|
|
64416
64931
|
content = await readFile14(promptPath, "utf-8");
|
|
@@ -65194,7 +65709,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65194
65709
|
* rather than fail the task permanently.
|
|
65195
65710
|
*/
|
|
65196
65711
|
async resolveWorktreeStartPoint(startPoint, taskId) {
|
|
65197
|
-
const command =
|
|
65712
|
+
const command = isAbsolute11(startPoint) && existsSync27(startPoint) ? `git -C "${startPoint}" rev-parse --verify HEAD^{commit}` : `git rev-parse --verify "${startPoint}^{commit}"`;
|
|
65198
65713
|
try {
|
|
65199
65714
|
const { stdout } = await execAsync5(command, { cwd: this.rootDir });
|
|
65200
65715
|
return stdout.trim() || startPoint;
|
|
@@ -65214,7 +65729,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65214
65729
|
*/
|
|
65215
65730
|
async tryCreateWorktree(branch, path2, taskId, startPoint, attemptNumber = 0, recoveryDepth = 0) {
|
|
65216
65731
|
await this.assertWorktreePathNotNested(path2, taskId);
|
|
65217
|
-
if (
|
|
65732
|
+
if (existsSync27(path2)) {
|
|
65218
65733
|
const isRegistered = await this.isRegisteredWorktree(path2);
|
|
65219
65734
|
if (!isRegistered) {
|
|
65220
65735
|
await this.store.logEntry(
|
|
@@ -65364,7 +65879,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65364
65879
|
taskId
|
|
65365
65880
|
);
|
|
65366
65881
|
if (shouldGenerateNewName) {
|
|
65367
|
-
const newPath =
|
|
65882
|
+
const newPath = join34(this.rootDir, ".worktrees", generateWorktreeName(this.rootDir));
|
|
65368
65883
|
for (let suffix = 2; suffix <= 6; suffix++) {
|
|
65369
65884
|
const suffixedBranch = `${branch}-${suffix}`;
|
|
65370
65885
|
try {
|
|
@@ -65412,7 +65927,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65412
65927
|
if (wt === rootResolved) continue;
|
|
65413
65928
|
if (wt === target) continue;
|
|
65414
65929
|
const rel = relative6(wt, target);
|
|
65415
|
-
if (rel && !rel.startsWith("..") && !
|
|
65930
|
+
if (rel && !rel.startsWith("..") && !isAbsolute11(rel)) {
|
|
65416
65931
|
await this.store.logEntry(
|
|
65417
65932
|
taskId,
|
|
65418
65933
|
`Refusing to create nested worktree`,
|
|
@@ -65656,6 +66171,69 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65656
66171
|
executorLog.log(`${taskId}: recovered ${recovered} approved step(s) on resume`);
|
|
65657
66172
|
}
|
|
65658
66173
|
}
|
|
66174
|
+
/**
|
|
66175
|
+
* On resume (task already has a branch from a prior run), walk git history
|
|
66176
|
+
* and mark steps as done when a commit matching the step-completion convention
|
|
66177
|
+
* is found. This prevents the agent from redoing already-committed work after
|
|
66178
|
+
* an auto-requeue.
|
|
66179
|
+
*
|
|
66180
|
+
* Commit message convention (case-insensitive):
|
|
66181
|
+
* feat|chore|fix(FN-XXXX): complete Step N
|
|
66182
|
+
*
|
|
66183
|
+
* Called after the worktree is acquired and before the agent session starts.
|
|
66184
|
+
*/
|
|
66185
|
+
async reconcileStepsFromGitHistory(taskId, detail, worktreePath) {
|
|
66186
|
+
const baseCommitSha = detail.baseCommitSha;
|
|
66187
|
+
if (!baseCommitSha) return;
|
|
66188
|
+
const pendingOrInProgressSteps = detail.steps.filter(
|
|
66189
|
+
(s, i) => (s.status === "pending" || s.status === "in-progress") && i > 0
|
|
66190
|
+
);
|
|
66191
|
+
if (pendingOrInProgressSteps.length === 0) return;
|
|
66192
|
+
let logOutput;
|
|
66193
|
+
try {
|
|
66194
|
+
const { stdout } = await execAsync5(
|
|
66195
|
+
`git log "${baseCommitSha}..HEAD" --oneline`,
|
|
66196
|
+
{ cwd: worktreePath }
|
|
66197
|
+
);
|
|
66198
|
+
logOutput = stdout;
|
|
66199
|
+
} catch (err) {
|
|
66200
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
66201
|
+
executorLog.warn(`${taskId}: reconcileStepsFromGitHistory \u2014 git log failed: ${msg}`);
|
|
66202
|
+
return;
|
|
66203
|
+
}
|
|
66204
|
+
if (!logOutput.trim()) return;
|
|
66205
|
+
const stepCommitRegex = /^(?:feat|chore|fix)\([Ff][Nn]-\d+\)(?:!)?:\s*complete\s+step\s+(\d+)/i;
|
|
66206
|
+
const reconciledStepIndices = /* @__PURE__ */ new Set();
|
|
66207
|
+
for (const line of logOutput.split("\n")) {
|
|
66208
|
+
const message = line.replace(/^[0-9a-f]+ /, "").trim();
|
|
66209
|
+
const match = message.match(stepCommitRegex);
|
|
66210
|
+
if (!match) continue;
|
|
66211
|
+
const stepIndex = parseInt(match[1], 10);
|
|
66212
|
+
if (Number.isNaN(stepIndex) || stepIndex < 0 || stepIndex >= detail.steps.length) continue;
|
|
66213
|
+
const step = detail.steps[stepIndex];
|
|
66214
|
+
if (step.status === "pending" || step.status === "in-progress") {
|
|
66215
|
+
reconciledStepIndices.add(stepIndex);
|
|
66216
|
+
}
|
|
66217
|
+
}
|
|
66218
|
+
for (const stepIndex of reconciledStepIndices) {
|
|
66219
|
+
await this.store.updateStep(taskId, stepIndex, "done");
|
|
66220
|
+
await this.store.logEntry(
|
|
66221
|
+
taskId,
|
|
66222
|
+
`Reconciled Step ${stepIndex} as done from git history (resume)`,
|
|
66223
|
+
void 0,
|
|
66224
|
+
this.currentRunContext
|
|
66225
|
+
);
|
|
66226
|
+
executorLog.log(`${taskId}: reconciled Step ${stepIndex} as done from git history`);
|
|
66227
|
+
}
|
|
66228
|
+
if (reconciledStepIndices.size > 0) {
|
|
66229
|
+
const updated = await this.store.getTask(taskId);
|
|
66230
|
+
const lowestPending = updated.steps.findIndex((s) => s.status === "pending" || s.status === "in-progress");
|
|
66231
|
+
if (lowestPending >= 0 && lowestPending !== updated.currentStep) {
|
|
66232
|
+
await this.store.updateTask(taskId, { currentStep: lowestPending });
|
|
66233
|
+
executorLog.log(`${taskId}: set currentStep to ${lowestPending} after step reconciliation`);
|
|
66234
|
+
}
|
|
66235
|
+
}
|
|
66236
|
+
}
|
|
65659
66237
|
/**
|
|
65660
66238
|
* Check whether the task's branch has any unique commits compared to main.
|
|
65661
66239
|
* If the branch has no unique commits and the task has steps marked done,
|
|
@@ -65901,7 +66479,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
|
|
|
65901
66479
|
metadata: { type: "spawned", parentTaskId: taskId }
|
|
65902
66480
|
});
|
|
65903
66481
|
const childWorktreeName = generateWorktreeName(this.rootDir);
|
|
65904
|
-
const childWorktreePath =
|
|
66482
|
+
const childWorktreePath = join34(this.rootDir, ".worktrees", childWorktreeName);
|
|
65905
66483
|
const childBranch = `fusion/spawn-${agent.id}`;
|
|
65906
66484
|
await this.createWorktree(childBranch, childWorktreePath, taskId, worktreePath);
|
|
65907
66485
|
await this.options.agentStore.updateAgentState(agent.id, "active");
|
|
@@ -66089,9 +66667,9 @@ var init_node_routing_policy = __esm({
|
|
|
66089
66667
|
});
|
|
66090
66668
|
|
|
66091
66669
|
// ../engine/src/scheduler.ts
|
|
66092
|
-
import { existsSync as
|
|
66670
|
+
import { existsSync as existsSync28 } from "node:fs";
|
|
66093
66671
|
import { readFile as readFile15 } from "node:fs/promises";
|
|
66094
|
-
import { basename as basename8, join as
|
|
66672
|
+
import { basename as basename8, join as join35 } from "node:path";
|
|
66095
66673
|
function pathsOverlap2(a, b) {
|
|
66096
66674
|
for (const pa of a) {
|
|
66097
66675
|
const prefixA = pa.endsWith("/*") ? pa.slice(0, -1) : null;
|
|
@@ -66261,12 +66839,12 @@ var init_scheduler = __esm({
|
|
|
66261
66839
|
* @returns Object with `valid: true` if checks pass, or `valid: false` with a `reason` string if they fail
|
|
66262
66840
|
*/
|
|
66263
66841
|
async validateTaskFilesystem(id) {
|
|
66264
|
-
const taskDir =
|
|
66265
|
-
if (!
|
|
66842
|
+
const taskDir = join35(this.store.getTasksDir(), id);
|
|
66843
|
+
if (!existsSync28(taskDir)) {
|
|
66266
66844
|
return { valid: false, reason: "missing directory" };
|
|
66267
66845
|
}
|
|
66268
|
-
const promptPath =
|
|
66269
|
-
if (!
|
|
66846
|
+
const promptPath = join35(taskDir, "PROMPT.md");
|
|
66847
|
+
if (!existsSync28(promptPath)) {
|
|
66270
66848
|
return { valid: false, reason: "missing or empty PROMPT.md" };
|
|
66271
66849
|
}
|
|
66272
66850
|
try {
|
|
@@ -66388,7 +66966,7 @@ var init_scheduler = __esm({
|
|
|
66388
66966
|
break;
|
|
66389
66967
|
}
|
|
66390
66968
|
reservedNames.add(worktreeName);
|
|
66391
|
-
return
|
|
66969
|
+
return join35(this.store.getRootDir(), ".worktrees", worktreeName);
|
|
66392
66970
|
}
|
|
66393
66971
|
/**
|
|
66394
66972
|
* Run one scheduling pass.
|
|
@@ -68343,7 +68921,7 @@ __export(agent_reflection_exports, {
|
|
|
68343
68921
|
AgentReflectionService: () => AgentReflectionService
|
|
68344
68922
|
});
|
|
68345
68923
|
import { readFile as readFile16 } from "node:fs/promises";
|
|
68346
|
-
import { isAbsolute as
|
|
68924
|
+
import { isAbsolute as isAbsolute12, resolve as resolve14 } from "node:path";
|
|
68347
68925
|
var reflectionLog, REFLECTION_SYSTEM_PROMPT, DEFAULT_OUTCOME_LIMIT, AgentReflectionService;
|
|
68348
68926
|
var init_agent_reflection = __esm({
|
|
68349
68927
|
"../engine/src/agent-reflection.ts"() {
|
|
@@ -68654,7 +69232,7 @@ Rules:
|
|
|
68654
69232
|
pieces.push(agent.instructionsText.trim());
|
|
68655
69233
|
}
|
|
68656
69234
|
if (agent.instructionsPath?.trim()) {
|
|
68657
|
-
const resolvedPath =
|
|
69235
|
+
const resolvedPath = isAbsolute12(agent.instructionsPath) ? agent.instructionsPath : resolve14(this.rootDir, agent.instructionsPath);
|
|
68658
69236
|
try {
|
|
68659
69237
|
const content = await readFile16(resolvedPath, "utf-8");
|
|
68660
69238
|
if (content.trim()) {
|
|
@@ -68693,7 +69271,7 @@ Rules:
|
|
|
68693
69271
|
});
|
|
68694
69272
|
|
|
68695
69273
|
// ../engine/src/agent-heartbeat.ts
|
|
68696
|
-
import { Type as
|
|
69274
|
+
import { Type as Type6 } from "@mariozechner/pi-ai";
|
|
68697
69275
|
function isBlockedStateDuplicate(current, previous) {
|
|
68698
69276
|
return current.blockedBy === previous.blockedBy && current.contextHash === previous.contextHash;
|
|
68699
69277
|
}
|
|
@@ -68897,8 +69475,8 @@ When sending messages:
|
|
|
68897
69475
|
Critical: a heartbeat without observable progress (a log, a document write, a
|
|
68898
69476
|
status change, a comment, a delegation, or an explicit "no-op with reason") is
|
|
68899
69477
|
a bug. Do not loop on the same plan across heartbeats without recording why.`;
|
|
68900
|
-
heartbeatDoneParams =
|
|
68901
|
-
summary:
|
|
69478
|
+
heartbeatDoneParams = Type6.Object({
|
|
69479
|
+
summary: Type6.Optional(Type6.String({ description: "Summary of what was accomplished this heartbeat" }))
|
|
68902
69480
|
});
|
|
68903
69481
|
HeartbeatMonitor = class {
|
|
68904
69482
|
store;
|
|
@@ -71563,7 +72141,7 @@ ${source.content ?? ""}`;
|
|
|
71563
72141
|
|
|
71564
72142
|
// ../engine/src/research/providers/local-docs-provider.ts
|
|
71565
72143
|
import { promises as fs } from "node:fs";
|
|
71566
|
-
import { extname as extname2, join as
|
|
72144
|
+
import { extname as extname2, join as join36, relative as relative7, resolve as resolve15 } from "node:path";
|
|
71567
72145
|
function buildExcerpt(content, terms) {
|
|
71568
72146
|
const lower = content.toLowerCase();
|
|
71569
72147
|
const first = terms.find((term) => lower.includes(term));
|
|
@@ -71692,7 +72270,7 @@ var init_local_docs_provider = __esm({
|
|
|
71692
72270
|
const rootEntries = await fs.readdir(this.projectRoot, { withFileTypes: true });
|
|
71693
72271
|
for (const entry of rootEntries) {
|
|
71694
72272
|
if (entry.isFile() && entry.name.toLowerCase().endsWith(".md")) {
|
|
71695
|
-
files.push(
|
|
72273
|
+
files.push(join36(this.projectRoot, entry.name));
|
|
71696
72274
|
}
|
|
71697
72275
|
}
|
|
71698
72276
|
return [...new Set(files)];
|
|
@@ -71702,7 +72280,7 @@ var init_local_docs_provider = __esm({
|
|
|
71702
72280
|
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
71703
72281
|
for (const entry of entries) {
|
|
71704
72282
|
this.throwIfAborted(signal);
|
|
71705
|
-
const fullPath =
|
|
72283
|
+
const fullPath = join36(dir, entry.name);
|
|
71706
72284
|
const relPath = relative7(this.projectRoot, fullPath).replace(/\\/g, "/");
|
|
71707
72285
|
if (matchesGitignore(relPath, ignorePatterns)) continue;
|
|
71708
72286
|
if (entry.isDirectory()) {
|
|
@@ -71727,7 +72305,7 @@ var init_local_docs_provider = __esm({
|
|
|
71727
72305
|
}
|
|
71728
72306
|
async readGitignore() {
|
|
71729
72307
|
try {
|
|
71730
|
-
const content = await fs.readFile(
|
|
72308
|
+
const content = await fs.readFile(join36(this.projectRoot, ".gitignore"), "utf-8");
|
|
71731
72309
|
return content.split(/\r?\n/).map((line) => line.trim()).filter((line) => line && !line.startsWith("#"));
|
|
71732
72310
|
} catch {
|
|
71733
72311
|
return [];
|
|
@@ -73243,7 +73821,22 @@ var init_shell_utils = __esm({
|
|
|
73243
73821
|
|
|
73244
73822
|
// ../engine/src/cron-runner.ts
|
|
73245
73823
|
import { exec as exec6 } from "node:child_process";
|
|
73246
|
-
|
|
73824
|
+
function execCommand(command, options) {
|
|
73825
|
+
return new Promise((resolve19, reject) => {
|
|
73826
|
+
exec6(command, options, (error, stdout, stderr) => {
|
|
73827
|
+
const stdoutText = typeof stdout === "string" ? stdout : String(stdout ?? "");
|
|
73828
|
+
const stderrText = typeof stderr === "string" ? stderr : String(stderr ?? "");
|
|
73829
|
+
if (error) {
|
|
73830
|
+
const errWithOutput = error;
|
|
73831
|
+
errWithOutput.stdout = stdoutText;
|
|
73832
|
+
errWithOutput.stderr = stderrText;
|
|
73833
|
+
reject(errWithOutput);
|
|
73834
|
+
return;
|
|
73835
|
+
}
|
|
73836
|
+
resolve19({ stdout: stdoutText, stderr: stderrText });
|
|
73837
|
+
});
|
|
73838
|
+
});
|
|
73839
|
+
}
|
|
73247
73840
|
async function createAiPromptExecutor(cwd) {
|
|
73248
73841
|
const disposeLog = createLogger2("cron-runner");
|
|
73249
73842
|
return async (prompt, modelProvider, modelId) => {
|
|
@@ -73271,17 +73864,19 @@ async function createAiPromptExecutor(cwd) {
|
|
|
73271
73864
|
};
|
|
73272
73865
|
}
|
|
73273
73866
|
function truncateOutput(stdout, stderr) {
|
|
73274
|
-
|
|
73275
|
-
|
|
73276
|
-
|
|
73277
|
-
|
|
73867
|
+
const out = stdout ?? "";
|
|
73868
|
+
const err = stderr ?? "";
|
|
73869
|
+
let combined = out;
|
|
73870
|
+
if (err) {
|
|
73871
|
+
combined += out ? "\n--- stderr ---\n" : "";
|
|
73872
|
+
combined += err;
|
|
73278
73873
|
}
|
|
73279
73874
|
if (combined.length > MAX_OUTPUT_LENGTH) {
|
|
73280
73875
|
combined = combined.slice(0, MAX_OUTPUT_LENGTH) + "\n[output truncated]";
|
|
73281
73876
|
}
|
|
73282
73877
|
return combined;
|
|
73283
73878
|
}
|
|
73284
|
-
var
|
|
73879
|
+
var log14, DEFAULT_TIMEOUT_MS6, MAX_BUFFER, MAX_OUTPUT_LENGTH, DEFAULT_POLL_INTERVAL_MS, MIN_POLL_INTERVAL_MS, CronRunner, AI_AUTOMATION_SYSTEM_PROMPT;
|
|
73285
73880
|
var init_cron_runner = __esm({
|
|
73286
73881
|
"../engine/src/cron-runner.ts"() {
|
|
73287
73882
|
"use strict";
|
|
@@ -73289,7 +73884,6 @@ var init_cron_runner = __esm({
|
|
|
73289
73884
|
init_logger2();
|
|
73290
73885
|
init_shell_utils();
|
|
73291
73886
|
init_pi();
|
|
73292
|
-
execAsync6 = promisify7(exec6);
|
|
73293
73887
|
log14 = createLogger2("cron-runner");
|
|
73294
73888
|
DEFAULT_TIMEOUT_MS6 = 5 * 60 * 1e3;
|
|
73295
73889
|
MAX_BUFFER = 1024 * 1024;
|
|
@@ -73433,7 +74027,7 @@ var init_cron_runner = __esm({
|
|
|
73433
74027
|
log14.log(`Executing ${schedule.name} (${schedule.id}): ${schedule.command}`);
|
|
73434
74028
|
try {
|
|
73435
74029
|
const timeoutMs = schedule.timeoutMs ?? DEFAULT_TIMEOUT_MS6;
|
|
73436
|
-
const { stdout, stderr } = await
|
|
74030
|
+
const { stdout, stderr } = await execCommand(schedule.command, {
|
|
73437
74031
|
timeout: timeoutMs,
|
|
73438
74032
|
maxBuffer: MAX_BUFFER,
|
|
73439
74033
|
shell: defaultShell
|
|
@@ -73550,7 +74144,7 @@ var init_cron_runner = __esm({
|
|
|
73550
74144
|
};
|
|
73551
74145
|
}
|
|
73552
74146
|
try {
|
|
73553
|
-
const { stdout, stderr } = await
|
|
74147
|
+
const { stdout, stderr } = await execCommand(step.command, {
|
|
73554
74148
|
timeout: timeoutMs,
|
|
73555
74149
|
maxBuffer: MAX_BUFFER,
|
|
73556
74150
|
shell: defaultShell
|
|
@@ -73623,8 +74217,9 @@ var init_cron_runner = __esm({
|
|
|
73623
74217
|
setTimeout(() => reject(new Error(`AI prompt step timed out after ${timeoutMs / 1e3}s`)), timeoutMs);
|
|
73624
74218
|
});
|
|
73625
74219
|
const response = await Promise.race([resultPromise, timeoutPromise]);
|
|
73626
|
-
const
|
|
73627
|
-
|
|
74220
|
+
const responseText = String(response ?? "");
|
|
74221
|
+
const output = responseText.length > MAX_OUTPUT_LENGTH ? responseText.slice(0, MAX_OUTPUT_LENGTH) + "\n[output truncated]" : responseText;
|
|
74222
|
+
log14.log(` \u2713 AI prompt step "${step.name}" completed (${responseText.length} chars)`);
|
|
73628
74223
|
return {
|
|
73629
74224
|
stepId: step.id,
|
|
73630
74225
|
stepName: step.name,
|
|
@@ -73721,7 +74316,7 @@ var init_cron_runner = __esm({
|
|
|
73721
74316
|
|
|
73722
74317
|
// ../engine/src/routine-runner.ts
|
|
73723
74318
|
import { exec as exec7 } from "node:child_process";
|
|
73724
|
-
import { promisify as
|
|
74319
|
+
import { promisify as promisify7 } from "node:util";
|
|
73725
74320
|
function truncateOutput2(stdout, stderr) {
|
|
73726
74321
|
let output = stdout;
|
|
73727
74322
|
if (stderr) {
|
|
@@ -73734,7 +74329,7 @@ function truncateOutput2(stdout, stderr) {
|
|
|
73734
74329
|
}
|
|
73735
74330
|
return output;
|
|
73736
74331
|
}
|
|
73737
|
-
var import_cron_parser4, log15,
|
|
74332
|
+
var import_cron_parser4, log15, execAsync6, DEFAULT_TIMEOUT_MS7, MAX_BUFFER2, MAX_OUTPUT_LENGTH2, MAX_CATCH_UP_INTERVALS, RoutineRunner;
|
|
73738
74333
|
var init_routine_runner = __esm({
|
|
73739
74334
|
"../engine/src/routine-runner.ts"() {
|
|
73740
74335
|
"use strict";
|
|
@@ -73742,7 +74337,7 @@ var init_routine_runner = __esm({
|
|
|
73742
74337
|
init_logger2();
|
|
73743
74338
|
init_shell_utils();
|
|
73744
74339
|
log15 = createLogger2("routine-runner");
|
|
73745
|
-
|
|
74340
|
+
execAsync6 = promisify7(exec7);
|
|
73746
74341
|
DEFAULT_TIMEOUT_MS7 = 5 * 60 * 1e3;
|
|
73747
74342
|
MAX_BUFFER2 = 1024 * 1024;
|
|
73748
74343
|
MAX_OUTPUT_LENGTH2 = 10 * 1024;
|
|
@@ -73897,7 +74492,7 @@ var init_routine_runner = __esm({
|
|
|
73897
74492
|
}
|
|
73898
74493
|
async executeCommand(command, timeoutMs, startedAt) {
|
|
73899
74494
|
try {
|
|
73900
|
-
const { stdout, stderr } = await
|
|
74495
|
+
const { stdout, stderr } = await execAsync6(command, {
|
|
73901
74496
|
timeout: timeoutMs ?? DEFAULT_TIMEOUT_MS7,
|
|
73902
74497
|
maxBuffer: MAX_BUFFER2,
|
|
73903
74498
|
shell: defaultShell
|
|
@@ -74596,9 +75191,9 @@ var init_stuck_task_detector = __esm({
|
|
|
74596
75191
|
|
|
74597
75192
|
// ../engine/src/self-healing.ts
|
|
74598
75193
|
import { exec as exec8 } from "node:child_process";
|
|
74599
|
-
import { promisify as
|
|
74600
|
-
import { existsSync as
|
|
74601
|
-
import { isAbsolute as
|
|
75194
|
+
import { promisify as promisify8 } from "node:util";
|
|
75195
|
+
import { existsSync as existsSync29, readdirSync as readdirSync5, rmSync as rmSync3, statSync as statSync5 } from "node:fs";
|
|
75196
|
+
import { isAbsolute as isAbsolute13, join as join37, relative as relative8, resolve as resolve16 } from "node:path";
|
|
74602
75197
|
function shellQuote(value) {
|
|
74603
75198
|
return `'${value.replace(/'/g, "'\\''")}'`;
|
|
74604
75199
|
}
|
|
@@ -74632,7 +75227,7 @@ function isNoTaskDoneFailure(task) {
|
|
|
74632
75227
|
function hasStepProgress(task) {
|
|
74633
75228
|
return task.steps.some((step) => step.status !== "pending");
|
|
74634
75229
|
}
|
|
74635
|
-
var log16,
|
|
75230
|
+
var log16, execAsync7, APPROVED_TRIAGE_RECOVERY_GRACE_MS, ORPHANED_EXECUTION_RECOVERY_GRACE_MS, ACTIVE_MERGE_STATUSES, NON_TERMINAL_STEP_STATUSES2, GHOST_REVIEW_PRESERVED_STATUSES, ORPHANED_WITH_WORKTREE_GRACE_MS, MAX_TASK_DONE_RETRIES, SelfHealingManager;
|
|
74636
75231
|
var init_self_healing = __esm({
|
|
74637
75232
|
"../engine/src/self-healing.ts"() {
|
|
74638
75233
|
"use strict";
|
|
@@ -74640,7 +75235,7 @@ var init_self_healing = __esm({
|
|
|
74640
75235
|
init_logger2();
|
|
74641
75236
|
init_worktree_pool();
|
|
74642
75237
|
log16 = createLogger2("self-healing");
|
|
74643
|
-
|
|
75238
|
+
execAsync7 = promisify8(exec8);
|
|
74644
75239
|
APPROVED_TRIAGE_RECOVERY_GRACE_MS = 6e4;
|
|
74645
75240
|
ORPHANED_EXECUTION_RECOVERY_GRACE_MS = 6e4;
|
|
74646
75241
|
ACTIVE_MERGE_STATUSES = /* @__PURE__ */ new Set(["merging", "merging-pr"]);
|
|
@@ -74845,12 +75440,12 @@ var init_self_healing = __esm({
|
|
|
74845
75440
|
if (completedSteps.length === 0) return;
|
|
74846
75441
|
const branchName = task.branch || `fusion/${task.id.toLowerCase()}`;
|
|
74847
75442
|
try {
|
|
74848
|
-
const { stdout: mergeBaseOut } = await
|
|
75443
|
+
const { stdout: mergeBaseOut } = await execAsync7(
|
|
74849
75444
|
`git merge-base "${branchName}" HEAD`,
|
|
74850
75445
|
{ cwd: this.options.rootDir, encoding: "utf-8", timeout: 3e4 }
|
|
74851
75446
|
);
|
|
74852
75447
|
const mergeBase = mergeBaseOut.trim();
|
|
74853
|
-
const { stdout: branchHeadOut } = await
|
|
75448
|
+
const { stdout: branchHeadOut } = await execAsync7(
|
|
74854
75449
|
`git rev-parse "${branchName}"`,
|
|
74855
75450
|
{ cwd: this.options.rootDir, encoding: "utf-8", timeout: 3e4 }
|
|
74856
75451
|
);
|
|
@@ -74898,11 +75493,11 @@ var init_self_healing = __esm({
|
|
|
74898
75493
|
const storedSha = task.mergeDetails?.commitSha;
|
|
74899
75494
|
if (storedSha) {
|
|
74900
75495
|
try {
|
|
74901
|
-
await
|
|
75496
|
+
await execAsync7(
|
|
74902
75497
|
`git merge-base --is-ancestor ${shellQuote(storedSha)} HEAD`,
|
|
74903
75498
|
{ cwd: this.options.rootDir }
|
|
74904
75499
|
);
|
|
74905
|
-
const { stdout: stdout2 } = await
|
|
75500
|
+
const { stdout: stdout2 } = await execAsync7(
|
|
74906
75501
|
`git log -1 --format=%H%x1f%s ${shellQuote(storedSha)}`,
|
|
74907
75502
|
{ cwd: this.options.rootDir, maxBuffer: 1024 * 1024 }
|
|
74908
75503
|
);
|
|
@@ -74910,7 +75505,7 @@ var init_self_healing = __esm({
|
|
|
74910
75505
|
if (sha2) {
|
|
74911
75506
|
const commit2 = { sha: sha2, subject: subject2 };
|
|
74912
75507
|
try {
|
|
74913
|
-
const stats = await
|
|
75508
|
+
const stats = await execAsync7(`git show --shortstat --format= ${shellQuote(sha2)}`, {
|
|
74914
75509
|
cwd: this.options.rootDir,
|
|
74915
75510
|
maxBuffer: 1024 * 1024
|
|
74916
75511
|
});
|
|
@@ -74931,7 +75526,7 @@ var init_self_healing = __esm({
|
|
|
74931
75526
|
`--grep=${grepArg}`,
|
|
74932
75527
|
shellQuote(range)
|
|
74933
75528
|
].join(" ");
|
|
74934
|
-
return
|
|
75529
|
+
return execAsync7(command, {
|
|
74935
75530
|
cwd: this.options.rootDir,
|
|
74936
75531
|
maxBuffer: 1024 * 1024
|
|
74937
75532
|
});
|
|
@@ -74971,7 +75566,7 @@ var init_self_healing = __esm({
|
|
|
74971
75566
|
if (!sha) return null;
|
|
74972
75567
|
const commit = { sha, subject };
|
|
74973
75568
|
try {
|
|
74974
|
-
const stats = await
|
|
75569
|
+
const stats = await execAsync7(`git show --shortstat --format= ${shellQuote(sha)}`, {
|
|
74975
75570
|
cwd: this.options.rootDir,
|
|
74976
75571
|
maxBuffer: 1024 * 1024
|
|
74977
75572
|
});
|
|
@@ -74985,9 +75580,9 @@ var init_self_healing = __esm({
|
|
|
74985
75580
|
return commit;
|
|
74986
75581
|
}
|
|
74987
75582
|
async cleanupInterruptedMergeArtifacts(task) {
|
|
74988
|
-
if (task.worktree &&
|
|
75583
|
+
if (task.worktree && existsSync29(task.worktree)) {
|
|
74989
75584
|
try {
|
|
74990
|
-
await
|
|
75585
|
+
await execAsync7(`git worktree remove ${shellQuote(task.worktree)} --force`, {
|
|
74991
75586
|
cwd: this.options.rootDir,
|
|
74992
75587
|
timeout: 12e4
|
|
74993
75588
|
});
|
|
@@ -75000,7 +75595,7 @@ var init_self_healing = __esm({
|
|
|
75000
75595
|
}
|
|
75001
75596
|
const branch = task.branch || `fusion/${task.id.toLowerCase()}`;
|
|
75002
75597
|
try {
|
|
75003
|
-
await
|
|
75598
|
+
await execAsync7(`git branch -D ${shellQuote(branch)}`, {
|
|
75004
75599
|
cwd: this.options.rootDir,
|
|
75005
75600
|
timeout: 12e4
|
|
75006
75601
|
});
|
|
@@ -75595,7 +76190,7 @@ var init_self_healing = __esm({
|
|
|
75595
76190
|
return false;
|
|
75596
76191
|
}
|
|
75597
76192
|
const staleness = now - new Date(t.updatedAt).getTime();
|
|
75598
|
-
const hasWorktree = t.worktree &&
|
|
76193
|
+
const hasWorktree = t.worktree && existsSync29(t.worktree);
|
|
75599
76194
|
const graceMs = hasWorktree ? ORPHANED_WITH_WORKTREE_GRACE_MS : ORPHANED_EXECUTION_RECOVERY_GRACE_MS;
|
|
75600
76195
|
return staleness >= graceMs;
|
|
75601
76196
|
});
|
|
@@ -75604,7 +76199,7 @@ var init_self_healing = __esm({
|
|
|
75604
76199
|
let recovered = 0;
|
|
75605
76200
|
for (const task of orphaned) {
|
|
75606
76201
|
try {
|
|
75607
|
-
const hadWorktree = task.worktree &&
|
|
76202
|
+
const hadWorktree = task.worktree && existsSync29(task.worktree);
|
|
75608
76203
|
const reason = hadWorktree ? "worktree exists but no active session" : "missing worktree/session";
|
|
75609
76204
|
await this.resetStepsIfWorkLost(task);
|
|
75610
76205
|
await this.store.updateTask(task.id, {
|
|
@@ -75750,9 +76345,9 @@ var init_self_healing = __esm({
|
|
|
75750
76345
|
}
|
|
75751
76346
|
}
|
|
75752
76347
|
async hasRecoverableGitWork(task) {
|
|
75753
|
-
if (task.worktree &&
|
|
76348
|
+
if (task.worktree && existsSync29(task.worktree)) {
|
|
75754
76349
|
try {
|
|
75755
|
-
const { stdout: status } = await
|
|
76350
|
+
const { stdout: status } = await execAsync7("git status --porcelain", {
|
|
75756
76351
|
cwd: task.worktree,
|
|
75757
76352
|
timeout: 3e4
|
|
75758
76353
|
});
|
|
@@ -75767,7 +76362,7 @@ var init_self_healing = __esm({
|
|
|
75767
76362
|
}
|
|
75768
76363
|
const branchName = task.branch || `fusion/${task.id.toLowerCase()}`;
|
|
75769
76364
|
try {
|
|
75770
|
-
await
|
|
76365
|
+
await execAsync7(`git rev-parse --verify "${branchName}"`, {
|
|
75771
76366
|
cwd: this.options.rootDir,
|
|
75772
76367
|
timeout: 3e4
|
|
75773
76368
|
});
|
|
@@ -75775,7 +76370,7 @@ var init_self_healing = __esm({
|
|
|
75775
76370
|
return false;
|
|
75776
76371
|
}
|
|
75777
76372
|
try {
|
|
75778
|
-
const { stdout: uniqueCommits } = await
|
|
76373
|
+
const { stdout: uniqueCommits } = await execAsync7(
|
|
75779
76374
|
`git rev-list --count HEAD.."${branchName}"`,
|
|
75780
76375
|
{ cwd: this.options.rootDir, timeout: 3e4 }
|
|
75781
76376
|
);
|
|
@@ -75875,7 +76470,7 @@ var init_self_healing = __esm({
|
|
|
75875
76470
|
/** Run `git worktree prune` to clean stale metadata. */
|
|
75876
76471
|
async pruneWorktrees() {
|
|
75877
76472
|
try {
|
|
75878
|
-
await
|
|
76473
|
+
await execAsync7("git worktree prune", {
|
|
75879
76474
|
cwd: this.options.rootDir,
|
|
75880
76475
|
timeout: 3e4
|
|
75881
76476
|
});
|
|
@@ -75908,7 +76503,7 @@ var init_self_healing = __esm({
|
|
|
75908
76503
|
let cleaned = 0;
|
|
75909
76504
|
for (const worktreePath of orphaned) {
|
|
75910
76505
|
try {
|
|
75911
|
-
await
|
|
76506
|
+
await execAsync7(`git worktree remove "${worktreePath}" --force`, {
|
|
75912
76507
|
cwd: this.options.rootDir,
|
|
75913
76508
|
timeout: 3e4
|
|
75914
76509
|
});
|
|
@@ -75935,11 +76530,11 @@ var init_self_healing = __esm({
|
|
|
75935
76530
|
* tracks registered idle worktrees, never these orphans.
|
|
75936
76531
|
*/
|
|
75937
76532
|
async reapUnregisteredOrphans() {
|
|
75938
|
-
const worktreesDir =
|
|
75939
|
-
if (!
|
|
76533
|
+
const worktreesDir = join37(this.options.rootDir, ".worktrees");
|
|
76534
|
+
if (!existsSync29(worktreesDir)) return 0;
|
|
75940
76535
|
let dirs;
|
|
75941
76536
|
try {
|
|
75942
|
-
dirs = readdirSync5(worktreesDir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) =>
|
|
76537
|
+
dirs = readdirSync5(worktreesDir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) => join37(worktreesDir, e.name));
|
|
75943
76538
|
} catch (err) {
|
|
75944
76539
|
log16.warn(`Failed to read .worktrees/ for unregistered orphan reap: ${err instanceof Error ? err.message : String(err)}`);
|
|
75945
76540
|
return 0;
|
|
@@ -75950,7 +76545,7 @@ var init_self_healing = __esm({
|
|
|
75950
76545
|
let cleaned = 0;
|
|
75951
76546
|
for (const path2 of unregistered) {
|
|
75952
76547
|
const rel = relative8(worktreesDir, path2);
|
|
75953
|
-
if (rel === "" || rel.startsWith("..") ||
|
|
76548
|
+
if (rel === "" || rel.startsWith("..") || isAbsolute13(rel)) {
|
|
75954
76549
|
log16.warn(`Refusing to remove path outside .worktrees: ${path2}`);
|
|
75955
76550
|
continue;
|
|
75956
76551
|
}
|
|
@@ -75988,7 +76583,7 @@ var init_self_healing = __esm({
|
|
|
75988
76583
|
const deletedBranches = [];
|
|
75989
76584
|
for (const branch of orphaned) {
|
|
75990
76585
|
try {
|
|
75991
|
-
await
|
|
76586
|
+
await execAsync7(`git branch -d "${branch}"`, {
|
|
75992
76587
|
cwd: this.options.rootDir,
|
|
75993
76588
|
timeout: 3e4
|
|
75994
76589
|
});
|
|
@@ -76001,7 +76596,7 @@ var init_self_healing = __esm({
|
|
|
76001
76596
|
`Safe delete failed for orphaned branch ${branch}: ${errorMessage} \u2014 attempting force delete`
|
|
76002
76597
|
);
|
|
76003
76598
|
try {
|
|
76004
|
-
await
|
|
76599
|
+
await execAsync7(`git branch -D "${branch}"`, {
|
|
76005
76600
|
cwd: this.options.rootDir,
|
|
76006
76601
|
timeout: 3e4
|
|
76007
76602
|
});
|
|
@@ -76044,8 +76639,8 @@ var init_self_healing = __esm({
|
|
|
76044
76639
|
}
|
|
76045
76640
|
/** Remove oldest idle worktrees if total count exceeds 2× maxWorktrees. */
|
|
76046
76641
|
async enforceWorktreeCap() {
|
|
76047
|
-
const worktreesDir =
|
|
76048
|
-
if (!
|
|
76642
|
+
const worktreesDir = join37(this.options.rootDir, ".worktrees");
|
|
76643
|
+
if (!existsSync29(worktreesDir)) return;
|
|
76049
76644
|
try {
|
|
76050
76645
|
const settings = await this.store.getSettings();
|
|
76051
76646
|
const cap = (settings.maxWorktrees ?? 4) * 2;
|
|
@@ -76069,7 +76664,7 @@ var init_self_healing = __esm({
|
|
|
76069
76664
|
for (const { path: worktreePath } of withMtime) {
|
|
76070
76665
|
if (removed >= excess) break;
|
|
76071
76666
|
try {
|
|
76072
|
-
await
|
|
76667
|
+
await execAsync7(`git worktree remove "${worktreePath}" --force`, {
|
|
76073
76668
|
cwd: this.options.rootDir,
|
|
76074
76669
|
timeout: 3e4
|
|
76075
76670
|
});
|
|
@@ -76092,7 +76687,7 @@ var init_self_healing = __esm({
|
|
|
76092
76687
|
});
|
|
76093
76688
|
|
|
76094
76689
|
// ../engine/src/plugin-runner.ts
|
|
76095
|
-
import { Type as
|
|
76690
|
+
import { Type as Type7 } from "@mariozechner/pi-ai";
|
|
76096
76691
|
var DEFAULT_HOOK_TIMEOUT_MS, PluginRunner;
|
|
76097
76692
|
var init_plugin_runner = __esm({
|
|
76098
76693
|
"../engine/src/plugin-runner.ts"() {
|
|
@@ -76407,7 +77002,7 @@ var init_plugin_runner = __esm({
|
|
|
76407
77002
|
};
|
|
76408
77003
|
}
|
|
76409
77004
|
};
|
|
76410
|
-
const anySchema =
|
|
77005
|
+
const anySchema = Type7.Any();
|
|
76411
77006
|
return {
|
|
76412
77007
|
name: `plugin_${pluginTool.name}`,
|
|
76413
77008
|
label: pluginTool.name,
|
|
@@ -77706,7 +78301,7 @@ var init_ipc_host = __esm({
|
|
|
77706
78301
|
import { EventEmitter as EventEmitter20 } from "node:events";
|
|
77707
78302
|
import { fork } from "node:child_process";
|
|
77708
78303
|
import { fileURLToPath as fileURLToPath3 } from "node:url";
|
|
77709
|
-
import { dirname as dirname10, join as
|
|
78304
|
+
import { dirname as dirname10, join as join38 } from "node:path";
|
|
77710
78305
|
var HealthMonitor, ChildProcessRuntime;
|
|
77711
78306
|
var init_child_process_runtime = __esm({
|
|
77712
78307
|
"../engine/src/runtimes/child-process-runtime.ts"() {
|
|
@@ -77868,7 +78463,7 @@ var init_child_process_runtime = __esm({
|
|
|
77868
78463
|
const isCompiled = !import.meta.url.endsWith(".ts");
|
|
77869
78464
|
const currentDir = dirname10(fileURLToPath3(import.meta.url));
|
|
77870
78465
|
const workerFile = isCompiled ? "child-process-worker.js" : "child-process-worker.ts";
|
|
77871
|
-
return
|
|
78466
|
+
return join38(currentDir, workerFile);
|
|
77872
78467
|
}
|
|
77873
78468
|
/**
|
|
77874
78469
|
* Set up event forwarding from IPC host to runtime listeners.
|
|
@@ -79339,7 +79934,8 @@ var init_provider_adapters = __esm({
|
|
|
79339
79934
|
|
|
79340
79935
|
// ../engine/src/remote-access/tunnel-process-manager.ts
|
|
79341
79936
|
import { EventEmitter as EventEmitter23 } from "node:events";
|
|
79342
|
-
import { spawn as
|
|
79937
|
+
import { exec as exec9, execFile as execFile3, spawn as spawn4 } from "node:child_process";
|
|
79938
|
+
import { promisify as promisify9 } from "node:util";
|
|
79343
79939
|
function nowIso() {
|
|
79344
79940
|
return (/* @__PURE__ */ new Date()).toISOString();
|
|
79345
79941
|
}
|
|
@@ -79379,7 +79975,7 @@ function toStateError(code, err) {
|
|
|
79379
79975
|
at: nowIso()
|
|
79380
79976
|
};
|
|
79381
79977
|
}
|
|
79382
|
-
var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2, LineBuffer, TunnelProcessManager;
|
|
79978
|
+
var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2, execFileAsync, execAsync8, LineBuffer, TunnelProcessManager;
|
|
79383
79979
|
var init_tunnel_process_manager = __esm({
|
|
79384
79980
|
"../engine/src/remote-access/tunnel-process-manager.ts"() {
|
|
79385
79981
|
"use strict";
|
|
@@ -79387,6 +79983,8 @@ var init_tunnel_process_manager = __esm({
|
|
|
79387
79983
|
init_provider_adapters();
|
|
79388
79984
|
DEFAULT_MAX_LOG_ENTRIES = 400;
|
|
79389
79985
|
DEFAULT_STOP_TIMEOUT_MS2 = 5e3;
|
|
79986
|
+
execFileAsync = promisify9(execFile3);
|
|
79987
|
+
execAsync8 = promisify9(exec9);
|
|
79390
79988
|
LineBuffer = class {
|
|
79391
79989
|
pending = "";
|
|
79392
79990
|
push(chunk) {
|
|
@@ -79427,7 +80025,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
79427
80025
|
super();
|
|
79428
80026
|
this.maxLogEntries = options.maxLogEntries ?? DEFAULT_MAX_LOG_ENTRIES;
|
|
79429
80027
|
this.defaultStopTimeoutMs = options.stopTimeoutMs ?? DEFAULT_STOP_TIMEOUT_MS2;
|
|
79430
|
-
this.spawnImpl = options.spawnImpl ??
|
|
80028
|
+
this.spawnImpl = options.spawnImpl ?? spawn4;
|
|
79431
80029
|
}
|
|
79432
80030
|
getStatus() {
|
|
79433
80031
|
return { ...this.status, lastError: this.status.lastError ? { ...this.status.lastError } : null };
|
|
@@ -79458,6 +80056,51 @@ var init_tunnel_process_manager = __esm({
|
|
|
79458
80056
|
await this.stopInternal();
|
|
79459
80057
|
});
|
|
79460
80058
|
}
|
|
80059
|
+
async detectExternalFunnel() {
|
|
80060
|
+
if (this.processHandle || this.status.state === "starting" || this.status.state === "running") {
|
|
80061
|
+
return null;
|
|
80062
|
+
}
|
|
80063
|
+
try {
|
|
80064
|
+
const { stdout } = await execFileAsync("tailscale", ["status", "--json"], { timeout: 3e3 });
|
|
80065
|
+
const data = JSON.parse(String(stdout));
|
|
80066
|
+
const dnsName = data.Self?.DNSName?.replace(/\.$/, "");
|
|
80067
|
+
if (!dnsName) {
|
|
80068
|
+
return null;
|
|
80069
|
+
}
|
|
80070
|
+
return {
|
|
80071
|
+
provider: "tailscale",
|
|
80072
|
+
url: `https://${dnsName}/`,
|
|
80073
|
+
pid: null
|
|
80074
|
+
};
|
|
80075
|
+
} catch {
|
|
80076
|
+
return null;
|
|
80077
|
+
}
|
|
80078
|
+
}
|
|
80079
|
+
async killExternalFunnel() {
|
|
80080
|
+
const resetCommands = [
|
|
80081
|
+
{ command: "tailscale", args: ["serve", "reset"] },
|
|
80082
|
+
{ command: "tailscale", args: ["funnel", "reset"] },
|
|
80083
|
+
{ command: "tailscale", args: ["funnel", "off"] }
|
|
80084
|
+
];
|
|
80085
|
+
for (const resetCommand of resetCommands) {
|
|
80086
|
+
try {
|
|
80087
|
+
await execFileAsync(resetCommand.command, resetCommand.args, { timeout: 5e3 });
|
|
80088
|
+
return;
|
|
80089
|
+
} catch {
|
|
80090
|
+
}
|
|
80091
|
+
}
|
|
80092
|
+
try {
|
|
80093
|
+
const { stdout } = await execAsync8('pgrep -f "tailscale funnel"', { timeout: 5e3 });
|
|
80094
|
+
const pids = stdout.split(/\s+/).map((value) => Number(value.trim())).filter((value) => Number.isInteger(value) && value > 0);
|
|
80095
|
+
await Promise.all(pids.map(async (pid) => {
|
|
80096
|
+
try {
|
|
80097
|
+
process.kill(pid, "SIGTERM");
|
|
80098
|
+
} catch {
|
|
80099
|
+
}
|
|
80100
|
+
}));
|
|
80101
|
+
} catch {
|
|
80102
|
+
}
|
|
80103
|
+
}
|
|
79461
80104
|
async switchProvider(target, config) {
|
|
79462
80105
|
return this.runExclusive(async () => {
|
|
79463
80106
|
const previousProvider = this.status.provider;
|
|
@@ -79740,7 +80383,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
79740
80383
|
});
|
|
79741
80384
|
|
|
79742
80385
|
// ../engine/src/project-engine.ts
|
|
79743
|
-
import { execFile as
|
|
80386
|
+
import { execFile as execFile4 } from "node:child_process";
|
|
79744
80387
|
import { promisify as promisify10 } from "node:util";
|
|
79745
80388
|
function formatErrorDetails(error) {
|
|
79746
80389
|
if (error instanceof Error) {
|
|
@@ -79752,7 +80395,7 @@ function formatErrorDetails(error) {
|
|
|
79752
80395
|
const detail = String(error);
|
|
79753
80396
|
return { message: detail, detail };
|
|
79754
80397
|
}
|
|
79755
|
-
var
|
|
80398
|
+
var execFileAsync2, MERGE_HANDOFF_GRACE_MS, isRemoteActive, ProjectEngine;
|
|
79756
80399
|
var init_project_engine = __esm({
|
|
79757
80400
|
"../engine/src/project-engine.ts"() {
|
|
79758
80401
|
"use strict";
|
|
@@ -79769,7 +80412,7 @@ var init_project_engine = __esm({
|
|
|
79769
80412
|
init_research_orchestrator();
|
|
79770
80413
|
init_research_step_runner();
|
|
79771
80414
|
init_tunnel_process_manager();
|
|
79772
|
-
|
|
80415
|
+
execFileAsync2 = promisify10(execFile4);
|
|
79773
80416
|
MERGE_HANDOFF_GRACE_MS = 300;
|
|
79774
80417
|
isRemoteActive = (ra) => ra?.activeProvider != null && (ra.providers[ra.activeProvider]?.enabled ?? false);
|
|
79775
80418
|
ProjectEngine = class _ProjectEngine {
|
|
@@ -80173,6 +80816,30 @@ ${detail}`
|
|
|
80173
80816
|
}
|
|
80174
80817
|
return manager.getStatus();
|
|
80175
80818
|
}
|
|
80819
|
+
async detectExternalTunnel() {
|
|
80820
|
+
const manager = this.remoteTunnelManager;
|
|
80821
|
+
if (!manager) {
|
|
80822
|
+
return null;
|
|
80823
|
+
}
|
|
80824
|
+
const settings = await this.runtime.getTaskStore().getSettings();
|
|
80825
|
+
const provider = settings.remoteAccess?.activeProvider ?? null;
|
|
80826
|
+
if (provider !== "tailscale") {
|
|
80827
|
+
return null;
|
|
80828
|
+
}
|
|
80829
|
+
return manager.detectExternalFunnel();
|
|
80830
|
+
}
|
|
80831
|
+
async killExternalTunnel() {
|
|
80832
|
+
const manager = this.remoteTunnelManager;
|
|
80833
|
+
if (!manager) {
|
|
80834
|
+
return;
|
|
80835
|
+
}
|
|
80836
|
+
const settings = await this.runtime.getTaskStore().getSettings();
|
|
80837
|
+
const provider = settings.remoteAccess?.activeProvider ?? null;
|
|
80838
|
+
if (provider !== "tailscale") {
|
|
80839
|
+
return;
|
|
80840
|
+
}
|
|
80841
|
+
await manager.killExternalFunnel();
|
|
80842
|
+
}
|
|
80176
80843
|
/** Get the RoutineRunner (if initialized). */
|
|
80177
80844
|
getRoutineRunner() {
|
|
80178
80845
|
return this.runtime.getRoutineRunner();
|
|
@@ -80382,7 +81049,7 @@ ${detail}`
|
|
|
80382
81049
|
async checkExecutableAvailable(command) {
|
|
80383
81050
|
const checker = process.platform === "win32" ? "where" : "which";
|
|
80384
81051
|
try {
|
|
80385
|
-
await
|
|
81052
|
+
await execFileAsync2(checker, [command]);
|
|
80386
81053
|
return { available: true };
|
|
80387
81054
|
} catch {
|
|
80388
81055
|
return {
|
|
@@ -83744,7 +84411,7 @@ var init_src3 = __esm({
|
|
|
83744
84411
|
});
|
|
83745
84412
|
|
|
83746
84413
|
// ../../plugins/fusion-plugin-hermes-runtime/dist/cli-spawn.js
|
|
83747
|
-
import { spawn as
|
|
84414
|
+
import { spawn as spawn5, spawnSync } from "node:child_process";
|
|
83748
84415
|
import os2 from "node:os";
|
|
83749
84416
|
import path, { sep as PATH_SEP } from "node:path";
|
|
83750
84417
|
function resolveBinaryForSpawn(binary) {
|
|
@@ -83854,7 +84521,7 @@ async function invokeHermesCli(prompt, settings, resumeSessionId, signal) {
|
|
|
83854
84521
|
if (settings.profile) {
|
|
83855
84522
|
spawnEnv.HERMES_HOME = hermesProfileHome(settings.profile);
|
|
83856
84523
|
}
|
|
83857
|
-
const child =
|
|
84524
|
+
const child = spawn5(binary, args, {
|
|
83858
84525
|
stdio: ["ignore", "pipe", "pipe"],
|
|
83859
84526
|
env: spawnEnv
|
|
83860
84527
|
});
|
|
@@ -84067,7 +84734,7 @@ var init_dist = __esm({
|
|
|
84067
84734
|
});
|
|
84068
84735
|
|
|
84069
84736
|
// ../../plugins/fusion-plugin-openclaw-runtime/dist/pi-module.js
|
|
84070
|
-
import { spawn as
|
|
84737
|
+
import { spawn as spawn6 } from "node:child_process";
|
|
84071
84738
|
import { randomUUID as randomUUID12 } from "node:crypto";
|
|
84072
84739
|
function asString(v) {
|
|
84073
84740
|
return typeof v === "string" && v.trim() !== "" ? v.trim() : void 0;
|
|
@@ -84144,7 +84811,7 @@ async function promptCli(session, message, config, callbacks, signal) {
|
|
|
84144
84811
|
cb.onToolStart?.("openclaw.agent", { sessionId: session.sessionId });
|
|
84145
84812
|
return new Promise((resolve19, reject) => {
|
|
84146
84813
|
let settled = false;
|
|
84147
|
-
const child =
|
|
84814
|
+
const child = spawn6(config.binaryPath, args, {
|
|
84148
84815
|
stdio: ["ignore", "pipe", "pipe"]
|
|
84149
84816
|
});
|
|
84150
84817
|
const hardKill = setTimeout(() => {
|
|
@@ -84295,7 +84962,7 @@ var init_runtime_adapter2 = __esm({
|
|
|
84295
84962
|
});
|
|
84296
84963
|
|
|
84297
84964
|
// ../../plugins/fusion-plugin-openclaw-runtime/dist/probe.js
|
|
84298
|
-
import { spawn as
|
|
84965
|
+
import { spawn as spawn7 } from "node:child_process";
|
|
84299
84966
|
async function probeOpenClawBinary(opts = {}) {
|
|
84300
84967
|
const startedAt = Date.now();
|
|
84301
84968
|
const binary = opts.binaryPath ?? "openclaw";
|
|
@@ -84306,7 +84973,7 @@ async function probeOpenClawBinary(opts = {}) {
|
|
|
84306
84973
|
resolvePromise({ ...partial, probeDurationMs: Date.now() - startedAt });
|
|
84307
84974
|
};
|
|
84308
84975
|
let settled = false;
|
|
84309
|
-
const child =
|
|
84976
|
+
const child = spawn7(resolvedPath ?? binary, ["--version"], {
|
|
84310
84977
|
stdio: ["ignore", "pipe", "pipe"]
|
|
84311
84978
|
});
|
|
84312
84979
|
const timer = setTimeout(() => {
|
|
@@ -84367,7 +85034,7 @@ async function probeOpenClawBinary(opts = {}) {
|
|
|
84367
85034
|
async function tryResolveBinaryPath(binary) {
|
|
84368
85035
|
return new Promise((resolvePromise) => {
|
|
84369
85036
|
const which = process.platform === "win32" ? "where" : "which";
|
|
84370
|
-
const child =
|
|
85037
|
+
const child = spawn7(which, [binary], { stdio: ["ignore", "pipe", "ignore"] });
|
|
84371
85038
|
let out = "";
|
|
84372
85039
|
child.stdout?.on("data", (chunk) => {
|
|
84373
85040
|
out += chunk.toString("utf-8");
|
|
@@ -90931,13 +91598,13 @@ var init_github_poll = __esm({
|
|
|
90931
91598
|
});
|
|
90932
91599
|
|
|
90933
91600
|
// ../dashboard/src/routes/resolve-diff-base.ts
|
|
90934
|
-
import { execFile as
|
|
91601
|
+
import { execFile as execFile5 } from "node:child_process";
|
|
90935
91602
|
import { promisify as promisify11 } from "node:util";
|
|
90936
|
-
var
|
|
91603
|
+
var execFileAsync3;
|
|
90937
91604
|
var init_resolve_diff_base = __esm({
|
|
90938
91605
|
"../dashboard/src/routes/resolve-diff-base.ts"() {
|
|
90939
91606
|
"use strict";
|
|
90940
|
-
|
|
91607
|
+
execFileAsync3 = promisify11(execFile5);
|
|
90941
91608
|
}
|
|
90942
91609
|
});
|
|
90943
91610
|
|
|
@@ -90956,7 +91623,7 @@ var init_register_git_github = __esm({
|
|
|
90956
91623
|
});
|
|
90957
91624
|
|
|
90958
91625
|
// ../dashboard/src/terminal.ts
|
|
90959
|
-
import { spawn as
|
|
91626
|
+
import { spawn as spawn8 } from "node:child_process";
|
|
90960
91627
|
import { randomUUID as randomUUID13 } from "node:crypto";
|
|
90961
91628
|
import { EventEmitter as EventEmitter29 } from "node:events";
|
|
90962
91629
|
function extractBaseCommand(command) {
|
|
@@ -91118,7 +91785,7 @@ var init_terminal = __esm({
|
|
|
91118
91785
|
return { sessionId: "", error: validation.error };
|
|
91119
91786
|
}
|
|
91120
91787
|
const sessionId = randomUUID13();
|
|
91121
|
-
const childProcess =
|
|
91788
|
+
const childProcess = spawn8(command, [], {
|
|
91122
91789
|
cwd,
|
|
91123
91790
|
shell: true,
|
|
91124
91791
|
stdio: ["pipe", "pipe", "pipe"],
|
|
@@ -91296,13 +91963,13 @@ var init_register_agents_projects_nodes = __esm({
|
|
|
91296
91963
|
});
|
|
91297
91964
|
|
|
91298
91965
|
// ../dashboard/src/exec-file.ts
|
|
91299
|
-
import { execFile as
|
|
91966
|
+
import { execFile as execFile6 } from "node:child_process";
|
|
91300
91967
|
import { promisify as promisify12 } from "node:util";
|
|
91301
|
-
var
|
|
91968
|
+
var execFileAsync4;
|
|
91302
91969
|
var init_exec_file = __esm({
|
|
91303
91970
|
"../dashboard/src/exec-file.ts"() {
|
|
91304
91971
|
"use strict";
|
|
91305
|
-
|
|
91972
|
+
execFileAsync4 = promisify12(execFile6);
|
|
91306
91973
|
}
|
|
91307
91974
|
});
|
|
91308
91975
|
|
|
@@ -91829,7 +92496,7 @@ function remapSpawnError(err, bin) {
|
|
|
91829
92496
|
return err instanceof Error ? err : new Error(String(err));
|
|
91830
92497
|
}
|
|
91831
92498
|
async function spawnPaperclipCliJson(args, opts) {
|
|
91832
|
-
const { spawn:
|
|
92499
|
+
const { spawn: spawn11 } = await import("node:child_process");
|
|
91833
92500
|
const bin = opts.cliBinaryPath ?? "paperclipai";
|
|
91834
92501
|
const fullArgs = [...args, "--json"];
|
|
91835
92502
|
if (opts.cliConfigPath) {
|
|
@@ -91840,7 +92507,7 @@ async function spawnPaperclipCliJson(args, opts) {
|
|
|
91840
92507
|
return new Promise((resolve19, reject) => {
|
|
91841
92508
|
let child;
|
|
91842
92509
|
try {
|
|
91843
|
-
child =
|
|
92510
|
+
child = spawn11(bin, fullArgs, { stdio: ["ignore", "pipe", "pipe"] });
|
|
91844
92511
|
} catch (err) {
|
|
91845
92512
|
reject(remapSpawnError(err, bin));
|
|
91846
92513
|
return;
|
|
@@ -96413,7 +97080,7 @@ var init_auth_middleware = __esm({
|
|
|
96413
97080
|
|
|
96414
97081
|
// ../dashboard/src/server.ts
|
|
96415
97082
|
import express from "express";
|
|
96416
|
-
import { join as
|
|
97083
|
+
import { join as join39, dirname as dirname11 } from "node:path";
|
|
96417
97084
|
import { fileURLToPath as fileURLToPath4 } from "node:url";
|
|
96418
97085
|
function clearAiSessionCleanupInterval() {
|
|
96419
97086
|
if (!aiSessionCleanupIntervalHandle) {
|
|
@@ -96702,8 +97369,8 @@ __export(task_exports, {
|
|
|
96702
97369
|
runTaskUpdate: () => runTaskUpdate
|
|
96703
97370
|
});
|
|
96704
97371
|
import { createInterface as createInterface2 } from "node:readline/promises";
|
|
96705
|
-
import { watchFile, unwatchFile, statSync as statSync6, existsSync as
|
|
96706
|
-
import { basename as basename10, join as
|
|
97372
|
+
import { watchFile, unwatchFile, statSync as statSync6, existsSync as existsSync30, readFileSync as readFileSync9 } from "node:fs";
|
|
97373
|
+
import { basename as basename10, join as join40 } from "node:path";
|
|
96707
97374
|
function getGitHubIssueUrl(sourceMetadata) {
|
|
96708
97375
|
if (!sourceMetadata || typeof sourceMetadata !== "object") return void 0;
|
|
96709
97376
|
const issueUrl = sourceMetadata.issueUrl;
|
|
@@ -97003,8 +97670,8 @@ async function runTaskLogs(id, options = {}, projectName) {
|
|
|
97003
97670
|
printEntries(filteredEntries);
|
|
97004
97671
|
if (options.follow) {
|
|
97005
97672
|
const projectPath = projectContext?.projectPath ?? process.cwd();
|
|
97006
|
-
const logPath =
|
|
97007
|
-
if (!
|
|
97673
|
+
const logPath = join40(projectPath, ".fusion", "tasks", id, "agent.log");
|
|
97674
|
+
if (!existsSync30(logPath)) {
|
|
97008
97675
|
console.log(`
|
|
97009
97676
|
Waiting for log file to be created...`);
|
|
97010
97677
|
}
|
|
@@ -97033,7 +97700,7 @@ async function runTaskLogs(id, options = {}, projectName) {
|
|
|
97033
97700
|
lastPosition = 0;
|
|
97034
97701
|
}
|
|
97035
97702
|
if (stats.size > lastPosition) {
|
|
97036
|
-
const content =
|
|
97703
|
+
const content = readFileSync9(logPath, "utf-8");
|
|
97037
97704
|
const lines = content.slice(lastPosition).split("\n");
|
|
97038
97705
|
for (const line of lines) {
|
|
97039
97706
|
if (!line.trim()) continue;
|
|
@@ -98034,7 +98701,7 @@ __export(skills_exports, {
|
|
|
98034
98701
|
runSkillsSearch: () => runSkillsSearch,
|
|
98035
98702
|
searchSkills: () => searchSkills
|
|
98036
98703
|
});
|
|
98037
|
-
import { spawn as
|
|
98704
|
+
import { spawn as spawn9 } from "node:child_process";
|
|
98038
98705
|
async function searchSkills(query, limit = 10) {
|
|
98039
98706
|
const url = `${SKILLS_API_BASE}/api/search?q=${encodeURIComponent(query)}&limit=${limit}`;
|
|
98040
98707
|
try {
|
|
@@ -98112,7 +98779,7 @@ async function runSkillsInstall(args, options) {
|
|
|
98112
98779
|
npxArgs.push("--skill", options.skill);
|
|
98113
98780
|
}
|
|
98114
98781
|
npxArgs.push("-y", "-a", "pi");
|
|
98115
|
-
const child =
|
|
98782
|
+
const child = spawn9("npx", npxArgs, {
|
|
98116
98783
|
cwd: process.cwd(),
|
|
98117
98784
|
stdio: "inherit",
|
|
98118
98785
|
shell: true
|
|
@@ -98144,12 +98811,12 @@ var init_skills = __esm({
|
|
|
98144
98811
|
// src/extension.ts
|
|
98145
98812
|
init_src();
|
|
98146
98813
|
init_gh_cli();
|
|
98147
|
-
import { Type as
|
|
98814
|
+
import { Type as Type8 } from "typebox";
|
|
98148
98815
|
import { StringEnum } from "@mariozechner/pi-ai";
|
|
98149
|
-
import { resolve as resolve18, basename as basename11, extname as extname3, join as
|
|
98816
|
+
import { resolve as resolve18, basename as basename11, extname as extname3, join as join41 } from "node:path";
|
|
98150
98817
|
import { readFile as readFile18 } from "node:fs/promises";
|
|
98151
|
-
import { existsSync as
|
|
98152
|
-
import { spawn as
|
|
98818
|
+
import { existsSync as existsSync31 } from "node:fs";
|
|
98819
|
+
import { spawn as spawn10 } from "node:child_process";
|
|
98153
98820
|
var MIME_TYPES2 = {
|
|
98154
98821
|
".png": "image/png",
|
|
98155
98822
|
".jpg": "image/jpeg",
|
|
@@ -98168,7 +98835,7 @@ var MIME_TYPES2 = {
|
|
|
98168
98835
|
function resolveProjectRoot(cwd) {
|
|
98169
98836
|
let current = resolve18(cwd);
|
|
98170
98837
|
while (true) {
|
|
98171
|
-
if (
|
|
98838
|
+
if (existsSync31(join41(current, ".fusion"))) {
|
|
98172
98839
|
return current;
|
|
98173
98840
|
}
|
|
98174
98841
|
const parent = resolve18(current, "..");
|
|
@@ -98189,7 +98856,7 @@ async function getStore2(cwd) {
|
|
|
98189
98856
|
return store;
|
|
98190
98857
|
}
|
|
98191
98858
|
function getFusionDir(cwd) {
|
|
98192
|
-
return
|
|
98859
|
+
return join41(resolveProjectRoot(cwd), ".fusion");
|
|
98193
98860
|
}
|
|
98194
98861
|
async function validateAssignableAgentId(cwd, agentId) {
|
|
98195
98862
|
const { AgentStore: AgentStore2, isEphemeralAgent: isEphemeralAgent2 } = await Promise.resolve().then(() => (init_src(), src_exports));
|
|
@@ -98250,15 +98917,15 @@ function kbExtension(pi) {
|
|
|
98250
98917
|
"Use fn_task_create for task tracking \u2014 be descriptive so the planning agent can write a good plan.",
|
|
98251
98918
|
"Include the problem AND desired outcome. For bugs, describe current vs expected behavior."
|
|
98252
98919
|
],
|
|
98253
|
-
parameters:
|
|
98254
|
-
description:
|
|
98255
|
-
depends:
|
|
98256
|
-
|
|
98920
|
+
parameters: Type8.Object({
|
|
98921
|
+
description: Type8.String({ description: "What needs to be done \u2014 be descriptive" }),
|
|
98922
|
+
depends: Type8.Optional(
|
|
98923
|
+
Type8.Array(Type8.String(), {
|
|
98257
98924
|
description: "Task IDs this depends on (e.g. ['FN-001', 'FN-002'])"
|
|
98258
98925
|
})
|
|
98259
98926
|
),
|
|
98260
|
-
agentId:
|
|
98261
|
-
|
|
98927
|
+
agentId: Type8.Optional(
|
|
98928
|
+
Type8.String({
|
|
98262
98929
|
description: "Agent ID to assign this task to (e.g. 'agent-abc123')"
|
|
98263
98930
|
})
|
|
98264
98931
|
)
|
|
@@ -98311,25 +98978,25 @@ Column: triage
|
|
|
98311
98978
|
"Use fn_task_update to modify task title, description, dependencies, or assigned agent after creation.",
|
|
98312
98979
|
"At least one field must be provided to update."
|
|
98313
98980
|
],
|
|
98314
|
-
parameters:
|
|
98315
|
-
id:
|
|
98316
|
-
title:
|
|
98317
|
-
description:
|
|
98318
|
-
depends:
|
|
98319
|
-
|
|
98981
|
+
parameters: Type8.Object({
|
|
98982
|
+
id: Type8.String({ description: "Task ID (e.g. FN-001)" }),
|
|
98983
|
+
title: Type8.Optional(Type8.String({ description: "New task title" })),
|
|
98984
|
+
description: Type8.Optional(Type8.String({ description: "New task description" })),
|
|
98985
|
+
depends: Type8.Optional(
|
|
98986
|
+
Type8.Array(Type8.String(), {
|
|
98320
98987
|
description: "New dependency list \u2014 replaces existing dependencies (e.g. ['FN-001', 'FN-002'])"
|
|
98321
98988
|
})
|
|
98322
98989
|
),
|
|
98323
|
-
agentId:
|
|
98324
|
-
|
|
98325
|
-
|
|
98326
|
-
|
|
98990
|
+
agentId: Type8.Optional(
|
|
98991
|
+
Type8.Union([
|
|
98992
|
+
Type8.String(),
|
|
98993
|
+
Type8.Null()
|
|
98327
98994
|
], {
|
|
98328
98995
|
description: "Agent ID to assign this task to, or null to clear (e.g. 'agent-abc123')"
|
|
98329
98996
|
})
|
|
98330
98997
|
),
|
|
98331
|
-
nodeId:
|
|
98332
|
-
|
|
98998
|
+
nodeId: Type8.Optional(
|
|
98999
|
+
Type8.Union([Type8.String(), Type8.Null()], {
|
|
98333
99000
|
description: "Node ID override for this task, or null to clear"
|
|
98334
99001
|
})
|
|
98335
99002
|
)
|
|
@@ -98410,14 +99077,14 @@ Column: triage
|
|
|
98410
99077
|
label: "fn: List Tasks",
|
|
98411
99078
|
description: "List all tasks on the Fusion board, grouped by column.",
|
|
98412
99079
|
promptSnippet: "List all tasks on the Fusion board grouped by column",
|
|
98413
|
-
parameters:
|
|
98414
|
-
column:
|
|
99080
|
+
parameters: Type8.Object({
|
|
99081
|
+
column: Type8.Optional(
|
|
98415
99082
|
StringEnum([...COLUMNS], {
|
|
98416
99083
|
description: "Filter to a specific column"
|
|
98417
99084
|
})
|
|
98418
99085
|
),
|
|
98419
|
-
limit:
|
|
98420
|
-
|
|
99086
|
+
limit: Type8.Optional(
|
|
99087
|
+
Type8.Number({
|
|
98421
99088
|
description: "Max tasks to show per column (default: 10)"
|
|
98422
99089
|
})
|
|
98423
99090
|
)
|
|
@@ -98459,8 +99126,8 @@ Column: triage
|
|
|
98459
99126
|
label: "fn: Show Task",
|
|
98460
99127
|
description: "Show full details for a task including steps, progress, and log entries.",
|
|
98461
99128
|
promptSnippet: "Show full details for a Fusion task",
|
|
98462
|
-
parameters:
|
|
98463
|
-
id:
|
|
99129
|
+
parameters: Type8.Object({
|
|
99130
|
+
id: Type8.String({ description: "Task ID (e.g. FN-001)" })
|
|
98464
99131
|
}),
|
|
98465
99132
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98466
99133
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98513,9 +99180,9 @@ Column: triage
|
|
|
98513
99180
|
label: "fn: Attach File",
|
|
98514
99181
|
description: "Attach a file to a task. Supports images (png, jpg, gif, webp) and text files (txt, log, json, yaml, yml, toml, csv, xml).",
|
|
98515
99182
|
promptSnippet: "Attach a file to a Fusion task",
|
|
98516
|
-
parameters:
|
|
98517
|
-
id:
|
|
98518
|
-
path:
|
|
99183
|
+
parameters: Type8.Object({
|
|
99184
|
+
id: Type8.String({ description: "Task ID (e.g. FN-001)" }),
|
|
99185
|
+
path: Type8.String({ description: "Path to the file to attach" })
|
|
98519
99186
|
}),
|
|
98520
99187
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98521
99188
|
const filePath = resolve18(ctx.cwd, params.path.replace(/^@/, ""));
|
|
@@ -98553,8 +99220,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98553
99220
|
label: "fn: Pause Task",
|
|
98554
99221
|
description: "Pause a task \u2014 stops all automated agent and scheduler interaction for this task.",
|
|
98555
99222
|
promptSnippet: "Pause a Fusion task (stops automation)",
|
|
98556
|
-
parameters:
|
|
98557
|
-
id:
|
|
99223
|
+
parameters: Type8.Object({
|
|
99224
|
+
id: Type8.String({ description: "Task ID (e.g. FN-001)" })
|
|
98558
99225
|
}),
|
|
98559
99226
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98560
99227
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98570,8 +99237,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98570
99237
|
label: "fn: Unpause Task",
|
|
98571
99238
|
description: "Unpause a task \u2014 resumes automated agent and scheduler interaction.",
|
|
98572
99239
|
promptSnippet: "Unpause a Fusion task (resumes automation)",
|
|
98573
|
-
parameters:
|
|
98574
|
-
id:
|
|
99240
|
+
parameters: Type8.Object({
|
|
99241
|
+
id: Type8.String({ description: "Task ID (e.g. FN-001)" })
|
|
98575
99242
|
}),
|
|
98576
99243
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98577
99244
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98592,8 +99259,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98592
99259
|
"Only tasks in 'failed' state can be retried",
|
|
98593
99260
|
"The task will be moved to the todo column with error state cleared"
|
|
98594
99261
|
],
|
|
98595
|
-
parameters:
|
|
98596
|
-
id:
|
|
99262
|
+
parameters: Type8.Object({
|
|
99263
|
+
id: Type8.String({ description: "Task ID to retry (e.g. FN-001). Must be in 'failed' state." })
|
|
98597
99264
|
}),
|
|
98598
99265
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98599
99266
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98633,8 +99300,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98633
99300
|
"The duplicated task will be placed in planning for replanning",
|
|
98634
99301
|
"Dependencies, attachments, and execution state are NOT copied"
|
|
98635
99302
|
],
|
|
98636
|
-
parameters:
|
|
98637
|
-
id:
|
|
99303
|
+
parameters: Type8.Object({
|
|
99304
|
+
id: Type8.String({ description: "Source task ID to duplicate (e.g. FN-001)" })
|
|
98638
99305
|
}),
|
|
98639
99306
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98640
99307
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98656,9 +99323,9 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98656
99323
|
"The refinement task will be created in planning and depend on the original task",
|
|
98657
99324
|
"Provide clear feedback about what needs to be refined or improved"
|
|
98658
99325
|
],
|
|
98659
|
-
parameters:
|
|
98660
|
-
id:
|
|
98661
|
-
feedback:
|
|
99326
|
+
parameters: Type8.Object({
|
|
99327
|
+
id: Type8.String({ description: "Task ID to refine (e.g. FN-001). Must be in 'done' or 'in-review' column." }),
|
|
99328
|
+
feedback: Type8.String({
|
|
98662
99329
|
description: "Description of what needs to be refined or improved",
|
|
98663
99330
|
minLength: 1,
|
|
98664
99331
|
maxLength: 2e3
|
|
@@ -98685,8 +99352,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98685
99352
|
"Only tasks in the 'done' column can be archived",
|
|
98686
99353
|
"Archived tasks can be unarchived later if needed"
|
|
98687
99354
|
],
|
|
98688
|
-
parameters:
|
|
98689
|
-
id:
|
|
99355
|
+
parameters: Type8.Object({
|
|
99356
|
+
id: Type8.String({ description: "Task ID to archive (e.g. FN-001). Must be in 'done' column." })
|
|
98690
99357
|
}),
|
|
98691
99358
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98692
99359
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98706,8 +99373,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98706
99373
|
"Use to restore an archived task back to the done column",
|
|
98707
99374
|
"Only tasks in the 'archived' column can be unarchived"
|
|
98708
99375
|
],
|
|
98709
|
-
parameters:
|
|
98710
|
-
id:
|
|
99376
|
+
parameters: Type8.Object({
|
|
99377
|
+
id: Type8.String({ description: "Task ID to unarchive (e.g. FN-001). Must be in 'archived' column." })
|
|
98711
99378
|
}),
|
|
98712
99379
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98713
99380
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98728,8 +99395,8 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98728
99395
|
"Tasks are permanently deleted and cannot be recovered",
|
|
98729
99396
|
"Consider archiving instead of deleting for completed work you may need to reference later"
|
|
98730
99397
|
],
|
|
98731
|
-
parameters:
|
|
98732
|
-
id:
|
|
99398
|
+
parameters: Type8.Object({
|
|
99399
|
+
id: Type8.String({ description: "Task ID to delete (e.g. FN-001)" })
|
|
98733
99400
|
}),
|
|
98734
99401
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
98735
99402
|
const store = await getStore2(ctx.cwd);
|
|
@@ -98751,20 +99418,20 @@ Path: .fusion/tasks/${params.id}/attachments/${attachment.filename}`
|
|
|
98751
99418
|
"Use --limit to control how many issues to import (default: 30)",
|
|
98752
99419
|
"Use --labels to filter by specific labels"
|
|
98753
99420
|
],
|
|
98754
|
-
parameters:
|
|
98755
|
-
ownerRepo:
|
|
99421
|
+
parameters: Type8.Object({
|
|
99422
|
+
ownerRepo: Type8.String({
|
|
98756
99423
|
description: "Repository in owner/repo format (e.g., 'dustinbyrne/fusion')",
|
|
98757
99424
|
pattern: "^[^/]+/[^/]+$"
|
|
98758
99425
|
}),
|
|
98759
|
-
limit:
|
|
98760
|
-
|
|
99426
|
+
limit: Type8.Optional(
|
|
99427
|
+
Type8.Number({
|
|
98761
99428
|
description: "Max issues to import (default: 30, max: 100)",
|
|
98762
99429
|
minimum: 1,
|
|
98763
99430
|
maximum: 100
|
|
98764
99431
|
})
|
|
98765
99432
|
),
|
|
98766
|
-
labels:
|
|
98767
|
-
|
|
99433
|
+
labels: Type8.Optional(
|
|
99434
|
+
Type8.Array(Type8.String(), {
|
|
98768
99435
|
description: "Label names to filter by"
|
|
98769
99436
|
})
|
|
98770
99437
|
)
|
|
@@ -98840,14 +99507,14 @@ ${createdTasks.map((task) => ` ${task.id}: ${task.title}`).join("\n") || " Non
|
|
|
98840
99507
|
"Uses gh CLI authentication (run 'gh auth login')",
|
|
98841
99508
|
"Skips import if the issue is already imported (checks for existing Source URL)"
|
|
98842
99509
|
],
|
|
98843
|
-
parameters:
|
|
98844
|
-
owner:
|
|
99510
|
+
parameters: Type8.Object({
|
|
99511
|
+
owner: Type8.String({
|
|
98845
99512
|
description: "Repository owner (e.g., 'dustinbyrne')"
|
|
98846
99513
|
}),
|
|
98847
|
-
repo:
|
|
99514
|
+
repo: Type8.String({
|
|
98848
99515
|
description: "Repository name (e.g., 'fusion')"
|
|
98849
99516
|
}),
|
|
98850
|
-
issueNumber:
|
|
99517
|
+
issueNumber: Type8.Number({
|
|
98851
99518
|
description: "GitHub issue number to import",
|
|
98852
99519
|
minimum: 1
|
|
98853
99520
|
})
|
|
@@ -98922,22 +99589,22 @@ ${sourceUrl}`
|
|
|
98922
99589
|
"Use --labels to filter by specific labels",
|
|
98923
99590
|
"Uses gh CLI authentication (run 'gh auth login')"
|
|
98924
99591
|
],
|
|
98925
|
-
parameters:
|
|
98926
|
-
owner:
|
|
99592
|
+
parameters: Type8.Object({
|
|
99593
|
+
owner: Type8.String({
|
|
98927
99594
|
description: "Repository owner (e.g., 'dustinbyrne')"
|
|
98928
99595
|
}),
|
|
98929
|
-
repo:
|
|
99596
|
+
repo: Type8.String({
|
|
98930
99597
|
description: "Repository name (e.g., 'fusion')"
|
|
98931
99598
|
}),
|
|
98932
|
-
limit:
|
|
98933
|
-
|
|
99599
|
+
limit: Type8.Optional(
|
|
99600
|
+
Type8.Number({
|
|
98934
99601
|
description: "Max issues to show (default: 30, max: 100)",
|
|
98935
99602
|
minimum: 1,
|
|
98936
99603
|
maximum: 100
|
|
98937
99604
|
})
|
|
98938
99605
|
),
|
|
98939
|
-
labels:
|
|
98940
|
-
|
|
99606
|
+
labels: Type8.Optional(
|
|
99607
|
+
Type8.Array(Type8.String(), {
|
|
98941
99608
|
description: "Label names to filter by"
|
|
98942
99609
|
})
|
|
98943
99610
|
)
|
|
@@ -98996,9 +99663,9 @@ ${sourceUrl}`
|
|
|
98996
99663
|
"Use for breaking down vague ideas into actionable tasks",
|
|
98997
99664
|
"The AI will ask clarifying questions before creating the task"
|
|
98998
99665
|
],
|
|
98999
|
-
parameters:
|
|
99000
|
-
description:
|
|
99001
|
-
|
|
99666
|
+
parameters: Type8.Object({
|
|
99667
|
+
description: Type8.Optional(
|
|
99668
|
+
Type8.String({
|
|
99002
99669
|
description: "Initial plan description (optional) \u2014 the AI will ask clarifying questions if not provided"
|
|
99003
99670
|
})
|
|
99004
99671
|
)
|
|
@@ -99053,13 +99720,13 @@ Planning session completed. Task ${taskId} is now in planning and will be auto-p
|
|
|
99053
99720
|
"Missions are broken down into milestones \u2192 slices \u2192 features \u2192 tasks",
|
|
99054
99721
|
"Be descriptive so the mission purpose is clear"
|
|
99055
99722
|
],
|
|
99056
|
-
parameters:
|
|
99057
|
-
title:
|
|
99058
|
-
description:
|
|
99059
|
-
|
|
99723
|
+
parameters: Type8.Object({
|
|
99724
|
+
title: Type8.String({ description: "Mission title \u2014 brief but descriptive" }),
|
|
99725
|
+
description: Type8.Optional(
|
|
99726
|
+
Type8.String({ description: "Detailed mission objectives and context" })
|
|
99060
99727
|
),
|
|
99061
|
-
autoAdvance:
|
|
99062
|
-
|
|
99728
|
+
autoAdvance: Type8.Optional(
|
|
99729
|
+
Type8.Boolean({ description: "Automatically activate the next pending slice when the current slice completes" })
|
|
99063
99730
|
)
|
|
99064
99731
|
}),
|
|
99065
99732
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
@@ -99100,7 +99767,7 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99100
99767
|
"Missions are grouped by status (active, planning, complete, etc.)",
|
|
99101
99768
|
"Use before fn_mission_show to find a specific mission ID"
|
|
99102
99769
|
],
|
|
99103
|
-
parameters:
|
|
99770
|
+
parameters: Type8.Object({}),
|
|
99104
99771
|
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
|
|
99105
99772
|
const store = await getStore2(ctx.cwd);
|
|
99106
99773
|
const missionStore = store.getMissionStore();
|
|
@@ -99145,8 +99812,8 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99145
99812
|
"Shows milestones, slices, and features in hierarchical order",
|
|
99146
99813
|
"Check slice status to see if features can be linked to tasks"
|
|
99147
99814
|
],
|
|
99148
|
-
parameters:
|
|
99149
|
-
id:
|
|
99815
|
+
parameters: Type8.Object({
|
|
99816
|
+
id: Type8.String({ description: "Mission ID (e.g., M-001)" })
|
|
99150
99817
|
}),
|
|
99151
99818
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99152
99819
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99200,8 +99867,8 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99200
99867
|
"Permanently deletes all milestones, slices, and features within the mission",
|
|
99201
99868
|
"Tasks linked to features are NOT deleted \u2014 only the feature links are removed"
|
|
99202
99869
|
],
|
|
99203
|
-
parameters:
|
|
99204
|
-
id:
|
|
99870
|
+
parameters: Type8.Object({
|
|
99871
|
+
id: Type8.String({ description: "Mission ID to delete (e.g., M-001)" })
|
|
99205
99872
|
}),
|
|
99206
99873
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99207
99874
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99230,10 +99897,10 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99230
99897
|
"Use to break down a mission into manageable phases",
|
|
99231
99898
|
"Milestones are ordered and contain slices (work units)"
|
|
99232
99899
|
],
|
|
99233
|
-
parameters:
|
|
99234
|
-
missionId:
|
|
99235
|
-
title:
|
|
99236
|
-
description:
|
|
99900
|
+
parameters: Type8.Object({
|
|
99901
|
+
missionId: Type8.String({ description: "Parent mission ID (e.g., M-001)" }),
|
|
99902
|
+
title: Type8.String({ description: "Milestone title" }),
|
|
99903
|
+
description: Type8.Optional(Type8.String({ description: "Milestone description" }))
|
|
99237
99904
|
}),
|
|
99238
99905
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99239
99906
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99268,10 +99935,10 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99268
99935
|
"Slices are activated for implementation, linking features to tasks",
|
|
99269
99936
|
"Order slices by priority \u2014 they execute in sequence"
|
|
99270
99937
|
],
|
|
99271
|
-
parameters:
|
|
99272
|
-
milestoneId:
|
|
99273
|
-
title:
|
|
99274
|
-
description:
|
|
99938
|
+
parameters: Type8.Object({
|
|
99939
|
+
milestoneId: Type8.String({ description: "Parent milestone ID (e.g., MS-001)" }),
|
|
99940
|
+
title: Type8.String({ description: "Slice title" }),
|
|
99941
|
+
description: Type8.Optional(Type8.String({ description: "Slice description" }))
|
|
99275
99942
|
}),
|
|
99276
99943
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99277
99944
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99306,12 +99973,12 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99306
99973
|
"Features start as 'defined' and progress through 'triaged' \u2192 'in-progress' \u2192 'done'",
|
|
99307
99974
|
"Link features to tasks using fn_feature_link_task"
|
|
99308
99975
|
],
|
|
99309
|
-
parameters:
|
|
99310
|
-
sliceId:
|
|
99311
|
-
title:
|
|
99312
|
-
description:
|
|
99313
|
-
acceptanceCriteria:
|
|
99314
|
-
|
|
99976
|
+
parameters: Type8.Object({
|
|
99977
|
+
sliceId: Type8.String({ description: "Parent slice ID (e.g., SL-001)" }),
|
|
99978
|
+
title: Type8.String({ description: "Feature title" }),
|
|
99979
|
+
description: Type8.Optional(Type8.String({ description: "Feature description" })),
|
|
99980
|
+
acceptanceCriteria: Type8.Optional(
|
|
99981
|
+
Type8.String({ description: "Acceptance criteria for completing the feature" })
|
|
99315
99982
|
)
|
|
99316
99983
|
}),
|
|
99317
99984
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
@@ -99348,8 +100015,8 @@ Status: ${createdMission.status}${createdMission.autoAdvance ? "\nAuto-advance:
|
|
|
99348
100015
|
"Only pending slices can be activated",
|
|
99349
100016
|
"Slice activation triggers auto-advance when linked tasks complete"
|
|
99350
100017
|
],
|
|
99351
|
-
parameters:
|
|
99352
|
-
id:
|
|
100018
|
+
parameters: Type8.Object({
|
|
100019
|
+
id: Type8.String({ description: "Slice ID to activate (e.g., SL-001)" })
|
|
99353
100020
|
}),
|
|
99354
100021
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99355
100022
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99393,9 +100060,9 @@ Status: ${activated.status}`
|
|
|
99393
100060
|
"Linking updates the feature status to 'triaged'",
|
|
99394
100061
|
"When the linked task moves to 'done', the feature status becomes 'done'"
|
|
99395
100062
|
],
|
|
99396
|
-
parameters:
|
|
99397
|
-
featureId:
|
|
99398
|
-
taskId:
|
|
100063
|
+
parameters: Type8.Object({
|
|
100064
|
+
featureId: Type8.String({ description: "Feature ID to link (e.g., F-001)" }),
|
|
100065
|
+
taskId: Type8.String({ description: "Task ID to link to (e.g., FN-001)" })
|
|
99399
100066
|
}),
|
|
99400
100067
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99401
100068
|
const store = await getStore2(ctx.cwd);
|
|
@@ -99441,8 +100108,8 @@ Status: ${updated.status}`
|
|
|
99441
100108
|
"Stopped agents can be resumed with fn_agent_start",
|
|
99442
100109
|
"Agents in 'idle', 'error', or 'terminated' state cannot be stopped"
|
|
99443
100110
|
],
|
|
99444
|
-
parameters:
|
|
99445
|
-
id:
|
|
100111
|
+
parameters: Type8.Object({
|
|
100112
|
+
id: Type8.String({ description: "Agent ID to stop (e.g., agent-abc123)" })
|
|
99446
100113
|
}),
|
|
99447
100114
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99448
100115
|
const { AgentStore: AgentStore2, AGENT_VALID_TRANSITIONS: AGENT_VALID_TRANSITIONS2 } = await Promise.resolve().then(() => (init_src(), src_exports));
|
|
@@ -99492,8 +100159,8 @@ Status: ${updated.status}`
|
|
|
99492
100159
|
"Only agents in 'paused' state can be started",
|
|
99493
100160
|
"Agents in 'idle' or 'error' state cannot be started \u2014 use reset instead"
|
|
99494
100161
|
],
|
|
99495
|
-
parameters:
|
|
99496
|
-
id:
|
|
100162
|
+
parameters: Type8.Object({
|
|
100163
|
+
id: Type8.String({ description: "Agent ID to start (e.g., agent-abc123)" })
|
|
99497
100164
|
}),
|
|
99498
100165
|
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
|
99499
100166
|
const { AgentStore: AgentStore2, AGENT_VALID_TRANSITIONS: AGENT_VALID_TRANSITIONS2 } = await Promise.resolve().then(() => (init_src(), src_exports));
|
|
@@ -99542,12 +100209,12 @@ Status: ${updated.status}`
|
|
|
99542
100209
|
"Use fn_skills_search to discover skills before installing",
|
|
99543
100210
|
"Returns skills sorted by popularity (install count)"
|
|
99544
100211
|
],
|
|
99545
|
-
parameters:
|
|
99546
|
-
query:
|
|
100212
|
+
parameters: Type8.Object({
|
|
100213
|
+
query: Type8.String({
|
|
99547
100214
|
description: "Search query \u2014 framework name, technology, or capability (e.g., 'react', 'firebase', 'testing', 'docker')"
|
|
99548
100215
|
}),
|
|
99549
|
-
limit:
|
|
99550
|
-
|
|
100216
|
+
limit: Type8.Optional(
|
|
100217
|
+
Type8.Number({
|
|
99551
100218
|
description: "Max results to return (default: 10, max: 50)",
|
|
99552
100219
|
minimum: 1,
|
|
99553
100220
|
maximum: 50
|
|
@@ -99596,12 +100263,12 @@ Status: ${updated.status}`
|
|
|
99596
100263
|
"The source is in owner/repo format (e.g., 'firebase/agent-skills')",
|
|
99597
100264
|
"Specify the skill name to install a specific skill, or omit to install all from the source"
|
|
99598
100265
|
],
|
|
99599
|
-
parameters:
|
|
99600
|
-
source:
|
|
100266
|
+
parameters: Type8.Object({
|
|
100267
|
+
source: Type8.String({
|
|
99601
100268
|
description: "GitHub source in owner/repo format (e.g., 'firebase/agent-skills')"
|
|
99602
100269
|
}),
|
|
99603
|
-
skill:
|
|
99604
|
-
|
|
100270
|
+
skill: Type8.Optional(
|
|
100271
|
+
Type8.String({
|
|
99605
100272
|
description: "Specific skill name to install (e.g., 'firebase-basics'). Omit to install all skills from the source."
|
|
99606
100273
|
})
|
|
99607
100274
|
)
|
|
@@ -99624,7 +100291,7 @@ Status: ${updated.status}`
|
|
|
99624
100291
|
npxArgs.push("--skill", params.skill);
|
|
99625
100292
|
}
|
|
99626
100293
|
npxArgs.push("-y", "-a", "pi");
|
|
99627
|
-
const child =
|
|
100294
|
+
const child = spawn10("npx", npxArgs, {
|
|
99628
100295
|
cwd: resolveProjectRoot(ctx.cwd),
|
|
99629
100296
|
stdio: "pipe",
|
|
99630
100297
|
shell: true
|
|
@@ -99706,7 +100373,7 @@ Status: ${updated.status}`
|
|
|
99706
100373
|
return;
|
|
99707
100374
|
}
|
|
99708
100375
|
const port = trimmed ? parseInt(trimmed, 10) || 4040 : 4040;
|
|
99709
|
-
const child =
|
|
100376
|
+
const child = spawn10("fn", ["dashboard", "--port", String(port)], {
|
|
99710
100377
|
cwd: resolveProjectRoot(ctx.cwd),
|
|
99711
100378
|
stdio: ["ignore", "pipe", "pipe"],
|
|
99712
100379
|
detached: false,
|