@oh-my-pi/pi-coding-agent 13.2.1 → 13.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +43 -2
  2. package/package.json +7 -7
  3. package/scripts/generate-docs-index.ts +2 -2
  4. package/src/cli/args.ts +2 -1
  5. package/src/cli/config-cli.ts +32 -20
  6. package/src/config/settings-schema.ts +96 -14
  7. package/src/config/settings.ts +10 -0
  8. package/src/discovery/claude.ts +24 -6
  9. package/src/discovery/helpers.ts +9 -2
  10. package/src/ipy/runtime.ts +1 -0
  11. package/src/mcp/config.ts +1 -1
  12. package/src/modes/components/settings-defs.ts +53 -1
  13. package/src/modes/components/status-line.ts +7 -5
  14. package/src/modes/controllers/mcp-command-controller.ts +4 -3
  15. package/src/modes/controllers/selector-controller.ts +46 -0
  16. package/src/modes/interactive-mode.ts +9 -0
  17. package/src/modes/oauth-manual-input.ts +42 -0
  18. package/src/modes/types.ts +2 -0
  19. package/src/patch/hashline.ts +19 -1
  20. package/src/patch/index.ts +7 -8
  21. package/src/prompts/system/commit-message-system.md +2 -0
  22. package/src/prompts/system/subagent-submit-reminder.md +3 -3
  23. package/src/prompts/system/subagent-system-prompt.md +4 -4
  24. package/src/prompts/system/system-prompt.md +13 -0
  25. package/src/prompts/tools/hashline.md +45 -1
  26. package/src/prompts/tools/task-summary.md +4 -4
  27. package/src/prompts/tools/task.md +1 -1
  28. package/src/sdk.ts +8 -0
  29. package/src/slash-commands/builtin-registry.ts +26 -1
  30. package/src/system-prompt.ts +4 -0
  31. package/src/task/index.ts +211 -70
  32. package/src/task/render.ts +44 -16
  33. package/src/task/types.ts +6 -1
  34. package/src/task/worktree.ts +394 -31
  35. package/src/tools/review.ts +50 -1
  36. package/src/tools/submit-result.ts +22 -23
  37. package/src/utils/commit-message-generator.ts +132 -0
  38. package/src/web/search/providers/exa.ts +41 -4
  39. package/src/web/search/providers/perplexity.ts +20 -8
@@ -1,16 +1,26 @@
1
+ import type { Dirent } from "node:fs";
1
2
  import * as fs from "node:fs/promises";
2
3
  import * as os from "node:os";
3
4
  import path from "node:path";
4
- import { getWorktreeDir, isEnoent, Snowflake } from "@oh-my-pi/pi-utils";
5
+ import { getWorktreeDir, isEnoent, logger, Snowflake } from "@oh-my-pi/pi-utils";
5
6
  import { $ } from "bun";
6
7
 
7
- export interface WorktreeBaseline {
8
+ /** Baseline state for a single git repository. */
9
+ export interface RepoBaseline {
8
10
  repoRoot: string;
11
+ headCommit: string;
9
12
  staged: string;
10
13
  unstaged: string;
11
14
  untracked: string[];
12
15
  }
13
16
 
17
+ /** Baseline state for the project, including any nested git repos. */
18
+ export interface WorktreeBaseline {
19
+ root: RepoBaseline;
20
+ /** Nested git repos (path relative to root.repoRoot). */
21
+ nested: Array<{ relativePath: string; baseline: RepoBaseline }>;
22
+ }
23
+
14
24
  export function getEncodedProjectName(cwd: string): string {
15
25
  return `--${cwd.replace(/^[/\\]/, "").replace(/[/\\:]/g, "-")}--`;
16
26
  }
@@ -38,7 +48,56 @@ export async function ensureWorktree(baseCwd: string, id: string): Promise<strin
38
48
  return worktreeDir;
39
49
  }
40
50
 
41
- export async function captureBaseline(repoRoot: string): Promise<WorktreeBaseline> {
51
+ /** Find nested git repositories (non-submodule) under the given root. */
52
+ async function discoverNestedRepos(repoRoot: string): Promise<string[]> {
53
+ // Get submodule paths so we can exclude them
54
+ const submoduleRaw = await $`git submodule --quiet foreach --recursive 'echo $sm_path'`
55
+ .cwd(repoRoot)
56
+ .quiet()
57
+ .nothrow()
58
+ .text();
59
+ const submodulePaths = new Set(
60
+ submoduleRaw
61
+ .split("\n")
62
+ .map(l => l.trim())
63
+ .filter(Boolean),
64
+ );
65
+
66
+ // Find all .git dirs/files that aren't the root or known submodules
67
+ const result: string[] = [];
68
+ async function walk(dir: string): Promise<void> {
69
+ let entries: Dirent[];
70
+ try {
71
+ entries = await fs.readdir(dir, { withFileTypes: true });
72
+ } catch {
73
+ return;
74
+ }
75
+ for (const entry of entries) {
76
+ if (entry.name === "node_modules" || entry.name === ".git") continue;
77
+ if (!entry.isDirectory()) continue;
78
+ const full = path.join(dir, entry.name);
79
+ const rel = path.relative(repoRoot, full);
80
+ // Check if this directory is itself a git repo
81
+ const gitDir = path.join(full, ".git");
82
+ let hasGit = false;
83
+ try {
84
+ await fs.access(gitDir);
85
+ hasGit = true;
86
+ } catch {}
87
+ if (hasGit && !submodulePaths.has(rel)) {
88
+ result.push(rel);
89
+ // Don't recurse into nested repos — they manage their own tree
90
+ continue;
91
+ }
92
+ await walk(full);
93
+ }
94
+ }
95
+ await walk(repoRoot);
96
+ return result;
97
+ }
98
+
99
+ async function captureRepoBaseline(repoRoot: string): Promise<RepoBaseline> {
100
+ const headCommit = (await $`git rev-parse HEAD`.cwd(repoRoot).quiet().text()).trim();
42
101
  const staged = await $`git diff --cached --binary`.cwd(repoRoot).quiet().text();
43
102
  const unstaged = await $`git diff --binary`.cwd(repoRoot).quiet().text();
44
103
  const untrackedRaw = await $`git ls-files --others --exclude-standard`.cwd(repoRoot).quiet().text();
@@ -46,13 +105,18 @@ export async function captureBaseline(repoRoot: string): Promise<WorktreeBaselin
46
105
  .split("\n")
47
106
  .map(line => line.trim())
48
107
  .filter(line => line.length > 0);
108
+ return { repoRoot, headCommit, staged, unstaged, untracked };
109
+ }
49
110
 
50
- return {
51
- repoRoot,
52
- staged,
53
- unstaged,
54
- untracked,
55
- };
111
+ export async function captureBaseline(repoRoot: string): Promise<WorktreeBaseline> {
112
+ const [root, nestedPaths] = await Promise.all([captureRepoBaseline(repoRoot), discoverNestedRepos(repoRoot)]);
113
+ const nested = await Promise.all(
114
+ nestedPaths.map(async relativePath => ({
115
+ relativePath,
116
+ baseline: await captureRepoBaseline(path.join(repoRoot, relativePath)),
117
+ })),
118
+ );
119
+ return { root, nested };
56
120
  }
57
121
 
58
122
  async function writeTempPatchFile(patch: string): Promise<string> {
@@ -80,13 +144,13 @@ async function applyPatch(
80
144
  }
81
145
  }
82
146
 
83
- export async function applyBaseline(worktreeDir: string, baseline: WorktreeBaseline): Promise<void> {
84
- await applyPatch(worktreeDir, baseline.staged, { cached: true });
85
- await applyPatch(worktreeDir, baseline.staged);
86
- await applyPatch(worktreeDir, baseline.unstaged);
147
+ async function applyRepoBaseline(worktreeDir: string, rb: RepoBaseline, sourceRoot: string): Promise<void> {
148
+ await applyPatch(worktreeDir, rb.staged, { cached: true });
149
+ await applyPatch(worktreeDir, rb.staged);
150
+ await applyPatch(worktreeDir, rb.unstaged);
87
151
 
88
- for (const entry of baseline.untracked) {
89
- const source = path.join(baseline.repoRoot, entry);
152
+ for (const entry of rb.untracked) {
153
+ const source = path.join(sourceRoot, entry);
90
154
  const destination = path.join(worktreeDir, entry);
91
155
  try {
92
156
  await fs.mkdir(path.dirname(destination), { recursive: true });
@@ -98,6 +162,39 @@ export async function applyBaseline(worktreeDir: string, baseline: WorktreeBasel
98
162
  }
99
163
  }
100
164
 
165
+ export async function applyBaseline(worktreeDir: string, baseline: WorktreeBaseline): Promise<void> {
166
+ await applyRepoBaseline(worktreeDir, baseline.root, baseline.root.repoRoot);
167
+
168
+ // Restore nested repos into the worktree
169
+ for (const entry of baseline.nested) {
170
+ const nestedDir = path.join(worktreeDir, entry.relativePath);
171
+ // Copy the nested repo wholesale (it's not managed by root git)
172
+ const sourceDir = path.join(baseline.root.repoRoot, entry.relativePath);
173
+ try {
174
+ await fs.cp(sourceDir, nestedDir, { recursive: true });
175
+ } catch (err) {
176
+ if (isEnoent(err)) continue;
177
+ throw err;
178
+ }
179
+ // Apply any uncommitted changes from the nested baseline
180
+ await applyRepoBaseline(nestedDir, entry.baseline, entry.baseline.repoRoot);
181
+ // Commit baseline state so captureRepoDeltaPatch can cleanly subtract it.
182
+ // Without this, `git add -A && git commit` by the task would include
183
+ // baseline untracked files in the diff-tree output.
184
+ const hasChanges = (await $`git status --porcelain`.cwd(nestedDir).quiet().nothrow().text()).trim();
185
+ if (hasChanges) {
186
+ await $`git add -A`.cwd(nestedDir).quiet();
187
+ await $`git commit -m omp-baseline --allow-empty`.cwd(nestedDir).quiet();
188
+ // Update baseline to reflect the committed state — prevents double-apply
189
+ // in captureRepoDeltaPatch's temp-index path
190
+ entry.baseline.headCommit = (await $`git rev-parse HEAD`.cwd(nestedDir).quiet().text()).trim();
191
+ entry.baseline.staged = "";
192
+ entry.baseline.unstaged = "";
193
+ entry.baseline.untracked = [];
194
+ }
195
+ }
196
+ }
197
+
101
198
  async function applyPatchToIndex(cwd: string, patch: string, indexFile: string): Promise<void> {
102
199
  if (!patch.trim()) return;
103
200
  const tempPath = await writeTempPatchFile(patch);
@@ -121,31 +218,62 @@ async function listUntracked(cwd: string): Promise<string[]> {
121
218
  .filter(line => line.length > 0);
122
219
  }
123
220
 
124
- export async function captureDeltaPatch(worktreeDir: string, baseline: WorktreeBaseline): Promise<string> {
125
- const tempIndex = path.join(os.tmpdir(), `omp-task-index-${Snowflake.next()}`);
126
- try {
127
- await $`git read-tree HEAD`.cwd(worktreeDir).env({
128
- GIT_INDEX_FILE: tempIndex,
129
- });
130
- await applyPatchToIndex(worktreeDir, baseline.staged, tempIndex);
131
- await applyPatchToIndex(worktreeDir, baseline.unstaged, tempIndex);
132
- const diff = await $`git diff --binary`
133
- .cwd(worktreeDir)
134
- .env({
135
- GIT_INDEX_FILE: tempIndex,
136
- })
221
+ async function captureRepoDeltaPatch(repoDir: string, rb: RepoBaseline): Promise<string> {
222
+ // Check if HEAD advanced (task committed changes)
223
+ const currentHead = (await $`git rev-parse HEAD`.cwd(repoDir).quiet().nothrow().text()).trim();
224
+ const headAdvanced = currentHead && currentHead !== rb.headCommit;
225
+
226
+ if (headAdvanced) {
227
+ // HEAD moved: use diff-tree to capture committed changes, plus any uncommitted on top
228
+ const parts: string[] = [];
229
+
230
+ // Committed changes since baseline
231
+ const committedDiff = await $`git diff-tree -r -p --binary ${rb.headCommit} ${currentHead}`
232
+ .cwd(repoDir)
137
233
  .quiet()
234
+ .nothrow()
138
235
  .text();
236
+ if (committedDiff.trim()) parts.push(committedDiff);
237
+
238
+ // Uncommitted changes on top of the new HEAD
239
+ const staged = await $`git diff --cached --binary`.cwd(repoDir).quiet().text();
240
+ const unstaged = await $`git diff --binary`.cwd(repoDir).quiet().text();
241
+ if (staged.trim()) parts.push(staged);
242
+ if (unstaged.trim()) parts.push(unstaged);
243
+
244
+ // New untracked files (relative to both baseline and current tracking)
245
+ const currentUntracked = await listUntracked(repoDir);
246
+ const baselineUntracked = new Set(rb.untracked);
247
+ const newUntracked = currentUntracked.filter(entry => !baselineUntracked.has(entry));
248
+ if (newUntracked.length > 0) {
249
+ const untrackedDiffs = await Promise.all(
250
+ newUntracked.map(entry =>
251
+ $`git diff --binary --no-index /dev/null ${entry}`.cwd(repoDir).quiet().nothrow().text(),
252
+ ),
253
+ );
254
+ parts.push(...untrackedDiffs.filter(d => d.trim()));
255
+ }
256
+
257
+ return parts.join("\n");
258
+ }
139
259
 
140
- const currentUntracked = await listUntracked(worktreeDir);
141
- const baselineUntracked = new Set(baseline.untracked);
260
+ // HEAD unchanged: use temp index approach (subtracts baseline from delta)
261
+ const tempIndex = path.join(os.tmpdir(), `omp-task-index-${Snowflake.next()}`);
262
+ try {
263
+ await $`git read-tree ${rb.headCommit}`.cwd(repoDir).env({ GIT_INDEX_FILE: tempIndex });
264
+ await applyPatchToIndex(repoDir, rb.staged, tempIndex);
265
+ await applyPatchToIndex(repoDir, rb.unstaged, tempIndex);
266
+ const diff = await $`git diff --binary`.cwd(repoDir).env({ GIT_INDEX_FILE: tempIndex }).quiet().text();
267
+
268
+ const currentUntracked = await listUntracked(repoDir);
269
+ const baselineUntracked = new Set(rb.untracked);
142
270
  const newUntracked = currentUntracked.filter(entry => !baselineUntracked.has(entry));
143
271
 
144
272
  if (newUntracked.length === 0) return diff;
145
273
 
146
274
  const untrackedDiffs = await Promise.all(
147
275
  newUntracked.map(entry =>
148
- $`git diff --binary --no-index /dev/null ${entry}`.cwd(worktreeDir).quiet().nothrow().text(),
276
+ $`git diff --binary --no-index /dev/null ${entry}`.cwd(repoDir).quiet().nothrow().text(),
149
277
  ),
150
278
  );
151
279
  return `${diff}${diff && !diff.endsWith("\n") ? "\n" : ""}${untrackedDiffs.join("\n")}`;
@@ -154,6 +282,76 @@ export async function captureDeltaPatch(worktreeDir: string, baseline: WorktreeB
154
282
  }
155
283
  }
156
284
 
285
+ export interface NestedRepoPatch {
286
+ relativePath: string;
287
+ patch: string;
288
+ }
289
+
290
+ export interface DeltaPatchResult {
291
+ rootPatch: string;
292
+ nestedPatches: NestedRepoPatch[];
293
+ }
294
+
295
+ export async function captureDeltaPatch(isolationDir: string, baseline: WorktreeBaseline): Promise<DeltaPatchResult> {
296
+ const rootPatch = await captureRepoDeltaPatch(isolationDir, baseline.root);
297
+ const nestedPatches: NestedRepoPatch[] = [];
298
+
299
+ for (const { relativePath, baseline: nb } of baseline.nested) {
300
+ const nestedDir = path.join(isolationDir, relativePath);
301
+ try {
302
+ await fs.access(path.join(nestedDir, ".git"));
303
+ } catch {
304
+ continue;
305
+ }
306
+ const patch = await captureRepoDeltaPatch(nestedDir, nb);
307
+ if (patch.trim()) nestedPatches.push({ relativePath, patch });
308
+ }
309
+
310
+ return { rootPatch, nestedPatches };
311
+ }
312
+
313
+ /**
314
+ * Apply nested repo patches directly to their working directories after parent merge.
315
+ * @param commitMessage Optional async function to generate a commit message from the combined diff.
316
+ * If omitted or returns null, falls back to a generic message.
317
+ */
318
+ export async function applyNestedPatches(
319
+ repoRoot: string,
320
+ patches: NestedRepoPatch[],
321
+ commitMessage?: (diff: string) => Promise<string | null>,
322
+ ): Promise<void> {
323
+ // Group patches by target repo to apply all at once and commit
324
+ const byRepo = new Map<string, NestedRepoPatch[]>();
325
+ for (const p of patches) {
326
+ if (!p.patch.trim()) continue;
327
+ const group = byRepo.get(p.relativePath) ?? [];
328
+ group.push(p);
329
+ byRepo.set(p.relativePath, group);
330
+ }
331
+
332
+ for (const [relativePath, repoPatches] of byRepo) {
333
+ const nestedDir = path.join(repoRoot, relativePath);
334
+ try {
335
+ await fs.access(path.join(nestedDir, ".git"));
336
+ } catch {
337
+ continue;
338
+ }
339
+
340
+ const combinedDiff = repoPatches.map(p => p.patch).join("\n");
341
+ for (const { patch } of repoPatches) {
342
+ await applyPatch(nestedDir, patch);
343
+ }
344
+
345
+ // Commit so nested repo history reflects the task changes
346
+ const hasChanges = (await $`git status --porcelain`.cwd(nestedDir).quiet().nothrow().text()).trim();
347
+ if (hasChanges) {
348
+ const msg = (await commitMessage?.(combinedDiff)) ?? "changes from isolated task(s)";
349
+ await $`git add -A`.cwd(nestedDir).quiet();
350
+ await $`git commit -m ${msg}`.cwd(nestedDir).quiet();
351
+ }
352
+ }
353
+ }
354
+
157
355
  export async function cleanupWorktree(dir: string): Promise<void> {
158
356
  try {
159
357
  const commonDirRaw = await $`git rev-parse --git-common-dir`.cwd(dir).quiet().nothrow().text();
@@ -167,3 +365,168 @@ export async function cleanupWorktree(dir: string): Promise<void> {
167
365
  await fs.rm(dir, { recursive: true, force: true });
168
366
  }
169
367
  }
368
+
369
+ // ═══════════════════════════════════════════════════════════════════════════
370
+ // Fuse-overlay isolation
371
+ // ═══════════════════════════════════════════════════════════════════════════
372
+
373
+ export async function ensureFuseOverlay(baseCwd: string, id: string): Promise<string> {
374
+ const repoRoot = await getRepoRoot(baseCwd);
375
+ const encodedProject = getEncodedProjectName(repoRoot);
376
+ const baseDir = getWorktreeDir(encodedProject, id);
377
+ const upperDir = path.join(baseDir, "upper");
378
+ const workDir = path.join(baseDir, "work");
379
+ const mergedDir = path.join(baseDir, "merged");
380
+
381
+ // Clean up any stale mount at this path
382
+ const fusermount = Bun.which("fusermount3") ?? Bun.which("fusermount");
383
+ if (fusermount) {
384
+ await $`${fusermount} -u ${mergedDir}`.quiet().nothrow();
385
+ }
386
+ await fs.rm(baseDir, { recursive: true, force: true });
387
+
388
+ await fs.mkdir(upperDir, { recursive: true });
389
+ await fs.mkdir(workDir, { recursive: true });
390
+ await fs.mkdir(mergedDir, { recursive: true });
391
+
392
+ const binary = Bun.which("fuse-overlayfs");
393
+ if (!binary) {
394
+ await fs.rm(baseDir, { recursive: true, force: true });
395
+ throw new Error(
396
+ "fuse-overlayfs not found. Install it (e.g. `apt install fuse-overlayfs` or `pacman -S fuse-overlayfs`) to use fuse-overlay isolation.",
397
+ );
398
+ }
399
+
400
+ const result = await $`${binary} -o lowerdir=${repoRoot},upperdir=${upperDir},workdir=${workDir} ${mergedDir}`
401
+ .quiet()
402
+ .nothrow();
403
+ if (result.exitCode !== 0) {
404
+ const stderr = result.stderr.toString().trim();
405
+ await fs.rm(baseDir, { recursive: true, force: true });
406
+ throw new Error(`fuse-overlayfs mount failed (exit ${result.exitCode}): ${stderr}`);
407
+ }
408
+
409
+ return mergedDir;
410
+ }
411
+
412
+ export async function cleanupFuseOverlay(mergedDir: string): Promise<void> {
413
+ try {
414
+ const fusermount = Bun.which("fusermount3") ?? Bun.which("fusermount");
415
+ if (fusermount) {
416
+ await $`${fusermount} -u ${mergedDir}`.quiet().nothrow();
417
+ }
418
+ } finally {
419
+ // baseDir is the parent of the merged directory
420
+ const baseDir = path.dirname(mergedDir);
421
+ await fs.rm(baseDir, { recursive: true, force: true });
422
+ }
423
+ }
424
+
425
+ // ═══════════════════════════════════════════════════════════════════════════
426
+ // Branch-mode isolation
427
+ // ═══════════════════════════════════════════════════════════════════════════
428
+
429
+ export interface CommitToBranchResult {
430
+ branchName?: string;
431
+ nestedPatches: NestedRepoPatch[];
432
+ }
433
+
434
+ /**
435
+ * Commit task-only changes to a new branch.
436
+ * Only root repo changes go on the branch. Nested repo patches are returned
437
+ * separately since the parent git can't track files inside gitlinks.
438
+ */
439
+ export async function commitToBranch(
440
+ isolationDir: string,
441
+ baseline: WorktreeBaseline,
442
+ taskId: string,
443
+ description: string | undefined,
444
+ commitMessage?: (diff: string) => Promise<string | null>,
445
+ ): Promise<CommitToBranchResult | null> {
446
+ const { rootPatch, nestedPatches } = await captureDeltaPatch(isolationDir, baseline);
447
+ if (!rootPatch.trim() && nestedPatches.length === 0) return null;
448
+
449
+ const repoRoot = baseline.root.repoRoot;
450
+ const branchName = `omp/task/${taskId}`;
451
+ const fallbackMessage = description || taskId;
452
+
453
+ // Only create a branch if the root repo has changes
454
+ if (rootPatch.trim()) {
455
+ await $`git branch ${branchName} HEAD`.cwd(repoRoot).quiet();
456
+ const tmpDir = path.join(os.tmpdir(), `omp-branch-${Snowflake.next()}`);
457
+ try {
458
+ await $`git worktree add ${tmpDir} ${branchName}`.cwd(repoRoot).quiet();
459
+ const patchPath = path.join(os.tmpdir(), `omp-branch-patch-${Snowflake.next()}.patch`);
460
+ try {
461
+ await Bun.write(patchPath, rootPatch);
462
+ const applyResult = await $`git apply --binary ${patchPath}`.cwd(tmpDir).quiet().nothrow();
463
+ if (applyResult.exitCode !== 0) {
464
+ const stderr = applyResult.stderr.toString().slice(0, 2000);
465
+ logger.error("commitToBranch: git apply failed", {
466
+ taskId,
467
+ exitCode: applyResult.exitCode,
468
+ stderr,
469
+ patchSize: rootPatch.length,
470
+ patchHead: rootPatch.slice(0, 500),
471
+ });
472
+ throw new Error(`git apply failed for task ${taskId}: ${stderr}`);
473
+ }
474
+ } finally {
475
+ await fs.rm(patchPath, { force: true });
476
+ }
477
+ await $`git add -A`.cwd(tmpDir).quiet();
478
+ const msg = (commitMessage && (await commitMessage(rootPatch))) || fallbackMessage;
479
+ await $`git commit -m ${msg}`.cwd(tmpDir).quiet();
480
+ } finally {
481
+ await $`git worktree remove -f ${tmpDir}`.cwd(repoRoot).quiet().nothrow();
482
+ await fs.rm(tmpDir, { recursive: true, force: true });
483
+ }
484
+ }
485
+
486
+ return { branchName: rootPatch.trim() ? branchName : undefined, nestedPatches };
487
+ }
488
+
489
+ export interface MergeBranchResult {
490
+ merged: string[];
491
+ failed: string[];
492
+ conflict?: string;
493
+ }
494
+
495
+ /**
496
+ * Cherry-pick task branch commits sequentially onto HEAD.
497
+ * Each branch has a single commit that gets replayed cleanly.
498
+ * Stops on first conflict and reports which branches succeeded.
499
+ */
500
+ export async function mergeTaskBranches(
501
+ repoRoot: string,
502
+ branches: Array<{ branchName: string; taskId: string; description?: string }>,
503
+ ): Promise<MergeBranchResult> {
504
+ const merged: string[] = [];
505
+ const failed: string[] = [];
506
+
507
+ for (const { branchName } of branches) {
508
+ const result = await $`git cherry-pick ${branchName}`.cwd(repoRoot).quiet().nothrow();
509
+
510
+ if (result.exitCode !== 0) {
511
+ await $`git cherry-pick --abort`.cwd(repoRoot).quiet().nothrow();
512
+ const stderr = result.stderr.toString().trim();
513
+ failed.push(branchName);
514
+ return {
515
+ merged,
516
+ failed: [...failed, ...branches.slice(merged.length + failed.length).map(b => b.branchName)],
517
+ conflict: `${branchName}: ${stderr}`,
518
+ };
519
+ }
520
+
521
+ merged.push(branchName);
522
+ }
523
+
524
+ return { merged, failed };
525
+ }
526
+
527
+ /** Clean up temporary task branches. */
528
+ export async function cleanupTaskBranches(repoRoot: string, branches: string[]): Promise<void> {
529
+ for (const branch of branches) {
530
+ await $`git branch -D ${branch}`.cwd(repoRoot).quiet().nothrow();
531
+ }
532
+ }
@@ -13,6 +13,7 @@ import type { AgentTool } from "@oh-my-pi/pi-agent-core";
13
13
  import { StringEnum } from "@oh-my-pi/pi-ai";
14
14
  import type { Component } from "@oh-my-pi/pi-tui";
15
15
  import { Container, Text } from "@oh-my-pi/pi-tui";
16
+ import { isRecord } from "@oh-my-pi/pi-utils";
16
17
  import { Type } from "@sinclair/typebox";
17
18
  import type { Theme, ThemeColor } from "../modes/theme/theme";
18
19
  import { subprocessToolRegistry } from "../task/subprocess-tool-registry";
@@ -82,6 +83,51 @@ interface ReportFindingDetails {
82
83
  line_end: number;
83
84
  }
84
85
 
86
+ function isFindingPriority(value: unknown): value is FindingPriority {
87
+ return value === "P0" || value === "P1" || value === "P2" || value === "P3";
88
+ }
89
+
90
+ export function parseReportFindingDetails(value: unknown): ReportFindingDetails | undefined {
91
+ if (!isRecord(value)) return undefined;
92
+
93
+ const title = typeof value.title === "string" ? value.title : undefined;
94
+ const body = typeof value.body === "string" ? value.body : undefined;
95
+ const priority = isFindingPriority(value.priority) ? value.priority : undefined;
96
+ const confidence =
97
+ typeof value.confidence === "number" &&
98
+ Number.isFinite(value.confidence) &&
99
+ value.confidence >= 0 &&
100
+ value.confidence <= 1
101
+ ? value.confidence
102
+ : undefined;
103
+ const filePath = typeof value.file_path === "string" && value.file_path.length > 0 ? value.file_path : undefined;
104
+ const lineStart =
105
+ typeof value.line_start === "number" && Number.isFinite(value.line_start) ? value.line_start : undefined;
106
+ const lineEnd = typeof value.line_end === "number" && Number.isFinite(value.line_end) ? value.line_end : undefined;
107
+
108
+ if (
109
+ title === undefined ||
110
+ body === undefined ||
111
+ priority === undefined ||
112
+ confidence === undefined ||
113
+ filePath === undefined ||
114
+ lineStart === undefined ||
115
+ lineEnd === undefined
116
+ ) {
117
+ return undefined;
118
+ }
119
+
120
+ return {
121
+ title,
122
+ body,
123
+ priority,
124
+ confidence,
125
+ file_path: filePath,
126
+ line_start: lineStart,
127
+ line_end: lineEnd,
128
+ };
129
+ }
130
+
85
131
  export const reportFindingTool: AgentTool<typeof ReportFindingParams, ReportFindingDetails, Theme> = {
86
132
  name: "report_finding",
87
133
  label: "Report Finding",
@@ -152,7 +198,10 @@ export type { ReportFindingDetails };
152
198
 
153
199
  // Register report_finding handler
154
200
  subprocessToolRegistry.register<ReportFindingDetails>("report_finding", {
155
- extractData: event => event.result?.details as ReportFindingDetails | undefined,
201
+ extractData: event => {
202
+ if (event.isError) return undefined;
203
+ return parseReportFindingDetails(event.result?.details);
204
+ },
156
205
 
157
206
  renderInline: (data, theme) => {
158
207
  const { label, icon, color } = getPriorityDisplay(data.priority, theme);
@@ -4,8 +4,7 @@
4
4
  * Subagents must call this tool to finish and return structured JSON output.
5
5
  */
6
6
  import type { AgentTool, AgentToolContext, AgentToolResult, AgentToolUpdateCallback } from "@oh-my-pi/pi-agent-core";
7
- import { StringEnum } from "@oh-my-pi/pi-ai";
8
- import type { Static, TObject } from "@sinclair/typebox";
7
+ import type { Static, TSchema } from "@sinclair/typebox";
9
8
  import { Type } from "@sinclair/typebox";
10
9
  import Ajv, { type ErrorObject, type ValidateFunction } from "ajv";
11
10
  import { subprocessToolRegistry } from "../task/subprocess-tool-registry";
@@ -52,13 +51,13 @@ function formatAjvErrors(errors: ErrorObject[] | null | undefined): string {
52
51
  .join("; ");
53
52
  }
54
53
 
55
- export class SubmitResultTool implements AgentTool<TObject, SubmitResultDetails> {
54
+ export class SubmitResultTool implements AgentTool<TSchema, SubmitResultDetails> {
56
55
  readonly name = "submit_result";
57
56
  readonly label = "Submit Result";
58
57
  readonly description =
59
58
  "Finish the task with structured JSON output. Call exactly once at the end of the task.\n\n" +
60
- "If you cannot complete the task, call with status='aborted' and an error message.";
61
- readonly parameters: TObject;
59
+ "If you cannot complete the task, call with an error message payload.";
60
+ readonly parameters: TSchema;
62
61
  readonly strict = true;
63
62
 
64
63
  readonly #validate?: ValidateFunction;
@@ -92,45 +91,45 @@ export class SubmitResultTool implements AgentTool<TObject, SubmitResultDetails>
92
91
  })
93
92
  : Type.Object({}, { additionalProperties: true, description: "Structured JSON output (no schema specified)" });
94
93
 
95
- this.parameters = Type.Object({
96
- data: Type.Optional(dataSchema),
97
- status: Type.Optional(
98
- StringEnum(["success", "aborted"], {
99
- description: "Use 'aborted' if the task cannot be completed, defaults to 'success'",
100
- }),
101
- ),
102
- error: Type.Optional(Type.String({ description: "Error message when status is 'aborted'" })),
103
- });
94
+ this.parameters = Type.Union([
95
+ Type.Object({
96
+ data: dataSchema,
97
+ }),
98
+ Type.Object({
99
+ error: Type.String({ description: "Error message when the task cannot be completed" }),
100
+ }),
101
+ ]);
104
102
  }
105
103
 
106
104
  async execute(
107
105
  _toolCallId: string,
108
- params: Static<TObject>,
106
+ params: Static<TSchema>,
109
107
  _signal?: AbortSignal,
110
108
  _onUpdate?: AgentToolUpdateCallback<SubmitResultDetails>,
111
109
  _context?: AgentToolContext,
112
110
  ): Promise<AgentToolResult<SubmitResultDetails>> {
113
- const status = (params.status ?? "success") as "success" | "aborted";
111
+ const raw = params as Record<string, unknown>;
112
+ const errorMessage = typeof raw.error === "string" ? raw.error : undefined;
113
+ const status = errorMessage !== undefined ? "aborted" : "success";
114
+ const data = raw.data;
114
115
 
115
- // Skip validation when aborting - data is optional for aborts
116
116
  if (status === "success") {
117
- if (params.data === undefined || params.data === null) {
118
- throw new Error("data is required when status is 'success' (got null/undefined)");
117
+ if (data === undefined || data === null) {
118
+ throw new Error("data is required when submit_result indicates success");
119
119
  }
120
120
  if (this.#schemaError) {
121
121
  throw new Error(`Invalid output schema: ${this.#schemaError}`);
122
122
  }
123
- if (this.#validate && !this.#validate(params.data)) {
123
+ if (this.#validate && !this.#validate(data)) {
124
124
  throw new Error(`Output does not match schema: ${formatAjvErrors(this.#validate.errors)}`);
125
125
  }
126
126
  }
127
127
 
128
- const responseText =
129
- status === "aborted" ? `Task aborted: ${params.error || "No reason provided"}` : "Result submitted.";
128
+ const responseText = status === "aborted" ? `Task aborted: ${errorMessage}` : "Result submitted.";
130
129
 
131
130
  return {
132
131
  content: [{ type: "text", text: responseText }],
133
- details: { data: params.data, status, error: params.error as string | undefined },
132
+ details: { data, status, error: errorMessage },
134
133
  };
135
134
  }
136
135
  }