@os-eco/overstory-cli 0.8.6 → 0.8.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -8
- package/package.json +1 -1
- package/src/agents/hooks-deployer.test.ts +185 -12
- package/src/agents/hooks-deployer.ts +57 -1
- package/src/commands/coordinator.test.ts +74 -5
- package/src/commands/coordinator.ts +27 -3
- package/src/commands/dashboard.ts +84 -18
- package/src/commands/ecosystem.test.ts +101 -0
- package/src/commands/init.test.ts +74 -0
- package/src/commands/init.ts +36 -14
- package/src/commands/sling.test.ts +33 -0
- package/src/commands/sling.ts +106 -38
- package/src/commands/supervisor.ts +2 -0
- package/src/index.ts +1 -1
- package/src/merge/resolver.test.ts +141 -7
- package/src/merge/resolver.ts +61 -8
- package/src/runtimes/claude.test.ts +32 -7
- package/src/runtimes/claude.ts +19 -4
- package/src/runtimes/codex.test.ts +13 -0
- package/src/runtimes/codex.ts +18 -2
- package/src/runtimes/copilot.ts +3 -0
- package/src/runtimes/cursor.test.ts +497 -0
- package/src/runtimes/cursor.ts +205 -0
- package/src/runtimes/gemini.ts +3 -0
- package/src/runtimes/opencode.ts +3 -0
- package/src/runtimes/pi.test.ts +1 -1
- package/src/runtimes/pi.ts +3 -0
- package/src/runtimes/registry.test.ts +21 -1
- package/src/runtimes/registry.ts +3 -0
- package/src/runtimes/sapling.ts +3 -0
- package/src/runtimes/types.ts +5 -0
- package/src/schema-consistency.test.ts +1 -0
- package/src/sessions/store.test.ts +178 -0
- package/src/sessions/store.ts +44 -8
- package/src/types.ts +8 -1
- package/src/worktree/tmux.test.ts +150 -0
- package/src/worktree/tmux.ts +126 -23
package/src/commands/sling.ts
CHANGED
|
@@ -42,8 +42,11 @@ import { createWorktree, rollbackWorktree } from "../worktree/manager.ts";
|
|
|
42
42
|
import { spawnHeadlessAgent } from "../worktree/process.ts";
|
|
43
43
|
import {
|
|
44
44
|
capturePaneContent,
|
|
45
|
+
checkSessionState,
|
|
45
46
|
createSession,
|
|
46
47
|
ensureTmuxAvailable,
|
|
48
|
+
isSessionAlive,
|
|
49
|
+
killSession,
|
|
47
50
|
sendKeys,
|
|
48
51
|
waitForTuiReady,
|
|
49
52
|
} from "../worktree/tmux.ts";
|
|
@@ -274,6 +277,27 @@ export function shouldShowScoutWarning(
|
|
|
274
277
|
return !parentHasScouts(sessions, parentAgent);
|
|
275
278
|
}
|
|
276
279
|
|
|
280
|
+
/**
|
|
281
|
+
* Resolve which canonical repo directories should be writable to an
|
|
282
|
+
* interactive agent runtime in addition to its worktree sandbox.
|
|
283
|
+
*
|
|
284
|
+
* All interactive agents need `.overstory` so they can access shared mail,
|
|
285
|
+
* metrics, and session state. Only `lead` agents need canonical `.git`
|
|
286
|
+
* because they can spawn child worktrees from inside the runtime.
|
|
287
|
+
*
|
|
288
|
+
* @param projectRoot - Absolute path to the canonical repository root
|
|
289
|
+
* @param capability - Capability being launched
|
|
290
|
+
*/
|
|
291
|
+
export function getSharedWritableDirs(projectRoot: string, capability: string): string[] {
|
|
292
|
+
const sharedWritableDirs = [join(projectRoot, ".overstory")];
|
|
293
|
+
|
|
294
|
+
if (capability === "lead") {
|
|
295
|
+
sharedWritableDirs.push(join(projectRoot, ".git"));
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
return sharedWritableDirs;
|
|
299
|
+
}
|
|
300
|
+
|
|
277
301
|
/**
|
|
278
302
|
* Check if any active agent is already working on the given task ID.
|
|
279
303
|
* Returns the agent name if locked, or null if the task is free.
|
|
@@ -569,47 +593,63 @@ export async function slingCommand(taskId: string, opts: SlingOptions): Promise<
|
|
|
569
593
|
// 4. Resolve or create run_id for this spawn
|
|
570
594
|
const overstoryDir = join(config.project.root, ".overstory");
|
|
571
595
|
const currentRunPath = join(overstoryDir, "current-run.txt");
|
|
572
|
-
let runId: string;
|
|
573
|
-
|
|
574
|
-
const currentRunFile = Bun.file(currentRunPath);
|
|
575
|
-
if (await currentRunFile.exists()) {
|
|
576
|
-
runId = (await currentRunFile.text()).trim();
|
|
577
|
-
} else {
|
|
578
|
-
runId = `run-${new Date().toISOString().replace(/[:.]/g, "-")}`;
|
|
579
|
-
const runStore = createRunStore(join(overstoryDir, "sessions.db"));
|
|
580
|
-
try {
|
|
581
|
-
runStore.createRun({
|
|
582
|
-
id: runId,
|
|
583
|
-
startedAt: new Date().toISOString(),
|
|
584
|
-
coordinatorSessionId: null,
|
|
585
|
-
status: "active",
|
|
586
|
-
});
|
|
587
|
-
} finally {
|
|
588
|
-
runStore.close();
|
|
589
|
-
}
|
|
590
|
-
await Bun.write(currentRunPath, runId);
|
|
591
|
-
}
|
|
592
|
-
|
|
593
|
-
// 4b. Check per-run session limit
|
|
594
|
-
if (config.agents.maxSessionsPerRun > 0) {
|
|
595
|
-
const runCheckStore = createRunStore(join(overstoryDir, "sessions.db"));
|
|
596
|
-
try {
|
|
597
|
-
const run = runCheckStore.getRun(runId);
|
|
598
|
-
if (run && checkRunSessionLimit(config.agents.maxSessionsPerRun, run.agentCount)) {
|
|
599
|
-
throw new AgentError(
|
|
600
|
-
`Run session limit reached: ${run.agentCount}/${config.agents.maxSessionsPerRun} agents spawned in run "${runId}". ` +
|
|
601
|
-
`Increase agents.maxSessionsPerRun in config.yaml or start a new run.`,
|
|
602
|
-
{ agentName: name },
|
|
603
|
-
);
|
|
604
|
-
}
|
|
605
|
-
} finally {
|
|
606
|
-
runCheckStore.close();
|
|
607
|
-
}
|
|
608
|
-
}
|
|
609
596
|
|
|
610
597
|
// 5. Check name uniqueness and concurrency limit against active sessions
|
|
598
|
+
// (Session store opened here so we can also use it for parent run ID inheritance in step 4.)
|
|
611
599
|
const { store } = openSessionStore(overstoryDir);
|
|
612
600
|
try {
|
|
601
|
+
// 4a. Resolve run ID: inherit from parent → current-run.txt fallback → create new.
|
|
602
|
+
// Parent inheritance ensures child agents belong to the same run as their coordinator.
|
|
603
|
+
const runId = await (async (): Promise<string> => {
|
|
604
|
+
if (parentAgent) {
|
|
605
|
+
const parentSession = store.getByName(parentAgent);
|
|
606
|
+
if (parentSession?.runId) {
|
|
607
|
+
return parentSession.runId;
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
// Fallback: read current-run.txt (backward compat with single-coordinator setups).
|
|
612
|
+
const currentRunFile = Bun.file(currentRunPath);
|
|
613
|
+
if (await currentRunFile.exists()) {
|
|
614
|
+
const text = (await currentRunFile.text()).trim();
|
|
615
|
+
if (text) return text;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
// Create a new run if none exists.
|
|
619
|
+
const newRunId = `run-${new Date().toISOString().replace(/[:.]/g, "-")}`;
|
|
620
|
+
const runStore = createRunStore(join(overstoryDir, "sessions.db"));
|
|
621
|
+
try {
|
|
622
|
+
runStore.createRun({
|
|
623
|
+
id: newRunId,
|
|
624
|
+
startedAt: new Date().toISOString(),
|
|
625
|
+
coordinatorSessionId: null,
|
|
626
|
+
coordinatorName: null,
|
|
627
|
+
status: "active",
|
|
628
|
+
});
|
|
629
|
+
} finally {
|
|
630
|
+
runStore.close();
|
|
631
|
+
}
|
|
632
|
+
await Bun.write(currentRunPath, newRunId);
|
|
633
|
+
return newRunId;
|
|
634
|
+
})();
|
|
635
|
+
|
|
636
|
+
// 4b. Check per-run session limit
|
|
637
|
+
if (config.agents.maxSessionsPerRun > 0) {
|
|
638
|
+
const runCheckStore = createRunStore(join(overstoryDir, "sessions.db"));
|
|
639
|
+
try {
|
|
640
|
+
const run = runCheckStore.getRun(runId);
|
|
641
|
+
if (run && checkRunSessionLimit(config.agents.maxSessionsPerRun, run.agentCount)) {
|
|
642
|
+
throw new AgentError(
|
|
643
|
+
`Run session limit reached: ${run.agentCount}/${config.agents.maxSessionsPerRun} agents spawned in run "${runId}". ` +
|
|
644
|
+
`Increase agents.maxSessionsPerRun in config.yaml or start a new run.`,
|
|
645
|
+
{ agentName: name },
|
|
646
|
+
);
|
|
647
|
+
}
|
|
648
|
+
} finally {
|
|
649
|
+
runCheckStore.close();
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
|
|
613
653
|
const activeSessions = store.getActive();
|
|
614
654
|
if (activeSessions.length >= config.agents.maxConcurrent) {
|
|
615
655
|
throw new AgentError(
|
|
@@ -858,6 +898,7 @@ export async function slingCommand(taskId: string, opts: SlingOptions): Promise<
|
|
|
858
898
|
...runtime.buildEnv(resolvedModel),
|
|
859
899
|
OVERSTORY_AGENT_NAME: name,
|
|
860
900
|
OVERSTORY_WORKTREE_PATH: worktreePath,
|
|
901
|
+
OVERSTORY_TASK_ID: taskId,
|
|
861
902
|
};
|
|
862
903
|
const argv = runtime.buildDirectSpawn({
|
|
863
904
|
cwd: worktreePath,
|
|
@@ -943,16 +984,19 @@ export async function slingCommand(taskId: string, opts: SlingOptions): Promise<
|
|
|
943
984
|
model: resolvedModel.model,
|
|
944
985
|
permissionMode: "bypass",
|
|
945
986
|
cwd: worktreePath,
|
|
987
|
+
sharedWritableDirs: getSharedWritableDirs(config.project.root, capability),
|
|
946
988
|
env: {
|
|
947
989
|
...runtime.buildEnv(resolvedModel),
|
|
948
990
|
OVERSTORY_AGENT_NAME: name,
|
|
949
991
|
OVERSTORY_WORKTREE_PATH: worktreePath,
|
|
992
|
+
OVERSTORY_TASK_ID: taskId,
|
|
950
993
|
},
|
|
951
994
|
});
|
|
952
995
|
const pid = await createSession(tmuxSessionName, worktreePath, spawnCmd, {
|
|
953
996
|
...runtime.buildEnv(resolvedModel),
|
|
954
997
|
OVERSTORY_AGENT_NAME: name,
|
|
955
998
|
OVERSTORY_WORKTREE_PATH: worktreePath,
|
|
999
|
+
OVERSTORY_TASK_ID: taskId,
|
|
956
1000
|
});
|
|
957
1001
|
|
|
958
1002
|
// 13. Record session BEFORE sending the beacon so that hook-triggered
|
|
@@ -998,7 +1042,31 @@ export async function slingCommand(taskId: string, opts: SlingOptions): Promise<
|
|
|
998
1042
|
// Wait for Claude Code TUI to render before sending input.
|
|
999
1043
|
// Polling capture-pane is more reliable than a fixed sleep because
|
|
1000
1044
|
// TUI init time varies by machine load and model state.
|
|
1001
|
-
await waitForTuiReady(tmuxSessionName, (content) =>
|
|
1045
|
+
const tuiReady = await waitForTuiReady(tmuxSessionName, (content) =>
|
|
1046
|
+
runtime.detectReady(content),
|
|
1047
|
+
);
|
|
1048
|
+
if (!tuiReady) {
|
|
1049
|
+
const alive = await isSessionAlive(tmuxSessionName);
|
|
1050
|
+
store.updateState(name, "completed");
|
|
1051
|
+
|
|
1052
|
+
if (alive) {
|
|
1053
|
+
await killSession(tmuxSessionName);
|
|
1054
|
+
throw new AgentError(
|
|
1055
|
+
`Agent tmux session "${tmuxSessionName}" did not become ready during startup. The runtime may still be waiting on an interactive dialog or initializing too slowly.`,
|
|
1056
|
+
{ agentName: name },
|
|
1057
|
+
);
|
|
1058
|
+
}
|
|
1059
|
+
|
|
1060
|
+
const sessionState = await checkSessionState(tmuxSessionName);
|
|
1061
|
+
const detail =
|
|
1062
|
+
sessionState === "no_server"
|
|
1063
|
+
? "The tmux server is no longer running. It may have crashed or been killed externally."
|
|
1064
|
+
: "The agent process may have crashed or exited immediately before the TUI became ready.";
|
|
1065
|
+
throw new AgentError(
|
|
1066
|
+
`Agent tmux session "${tmuxSessionName}" died during startup. ${detail}`,
|
|
1067
|
+
{ agentName: name },
|
|
1068
|
+
);
|
|
1069
|
+
}
|
|
1002
1070
|
// Buffer for the input handler to attach after initial render
|
|
1003
1071
|
await Bun.sleep(1_000);
|
|
1004
1072
|
|
|
@@ -185,11 +185,13 @@ async function startSupervisor(opts: {
|
|
|
185
185
|
env: {
|
|
186
186
|
...runtime.buildEnv(resolvedModel),
|
|
187
187
|
OVERSTORY_AGENT_NAME: opts.name,
|
|
188
|
+
OVERSTORY_TASK_ID: opts.task,
|
|
188
189
|
},
|
|
189
190
|
});
|
|
190
191
|
const pid = await createSession(tmuxSession, projectRoot, spawnCmd, {
|
|
191
192
|
...runtime.buildEnv(resolvedModel),
|
|
192
193
|
OVERSTORY_AGENT_NAME: opts.name,
|
|
194
|
+
OVERSTORY_TASK_ID: opts.task,
|
|
193
195
|
});
|
|
194
196
|
|
|
195
197
|
// Wait for Claude Code TUI to render before sending input
|
package/src/index.ts
CHANGED
|
@@ -49,7 +49,7 @@ import { ConfigError, OverstoryError, WorktreeError } from "./errors.ts";
|
|
|
49
49
|
import { jsonError } from "./json.ts";
|
|
50
50
|
import { brand, chalk, muted, setQuiet } from "./logging/color.ts";
|
|
51
51
|
|
|
52
|
-
export const VERSION = "0.8.
|
|
52
|
+
export const VERSION = "0.8.7";
|
|
53
53
|
|
|
54
54
|
const rawArgs = process.argv.slice(2);
|
|
55
55
|
|
|
@@ -22,6 +22,7 @@ import type { MergeEntry, ParsedConflictPattern } from "../types.ts";
|
|
|
22
22
|
import {
|
|
23
23
|
buildConflictHistory,
|
|
24
24
|
createMergeResolver,
|
|
25
|
+
hasContentfulCanonical,
|
|
25
26
|
looksLikeProse,
|
|
26
27
|
parseConflictPatterns,
|
|
27
28
|
resolveConflictsUnion,
|
|
@@ -86,6 +87,20 @@ async function setupContentConflict(dir: string, baseBranch: string): Promise<vo
|
|
|
86
87
|
await commitFile(dir, "src/test.ts", "main modified content\n");
|
|
87
88
|
}
|
|
88
89
|
|
|
90
|
+
/**
|
|
91
|
+
* Set up a conflict where the canonical (HEAD) side is empty in the conflict marker.
|
|
92
|
+
* Main deletes a shared line; feature replaces it. Git produces a conflict with an
|
|
93
|
+
* empty HEAD side, so hasContentfulCanonical returns false and auto-resolve can safely
|
|
94
|
+
* keep the incoming content.
|
|
95
|
+
*/
|
|
96
|
+
async function setupEmptyCanonicalConflict(dir: string, baseBranch: string): Promise<void> {
|
|
97
|
+
await commitFile(dir, "src/test.ts", "line1\nshared line\nline3\n");
|
|
98
|
+
await runGitInDir(dir, ["checkout", "-b", "feature-branch"]);
|
|
99
|
+
await commitFile(dir, "src/test.ts", "line1\nnew content\nline3\n");
|
|
100
|
+
await runGitInDir(dir, ["checkout", baseBranch]);
|
|
101
|
+
await commitFile(dir, "src/test.ts", "line1\nline3\n"); // main deletes "shared line"
|
|
102
|
+
}
|
|
103
|
+
|
|
89
104
|
/**
|
|
90
105
|
* Create a delete/modify conflict: file is deleted on main but modified on
|
|
91
106
|
* the feature branch. This produces a conflict with NO conflict markers in
|
|
@@ -431,11 +446,13 @@ describe("createMergeResolver", () => {
|
|
|
431
446
|
});
|
|
432
447
|
|
|
433
448
|
describe("Tier 1 fail -> Tier 2: Auto-resolve", () => {
|
|
434
|
-
test("auto-resolves conflicts
|
|
449
|
+
test("auto-resolves conflicts when canonical side is empty (keeps incoming)", async () => {
|
|
435
450
|
const repoDir = await createTempGitRepo();
|
|
436
451
|
try {
|
|
437
452
|
const defaultBranch = await getDefaultBranch(repoDir);
|
|
438
|
-
|
|
453
|
+
// Use empty-canonical setup: main deletes a line, feature replaces it.
|
|
454
|
+
// The conflict marker has an empty HEAD side, so auto-resolve is safe.
|
|
455
|
+
await setupEmptyCanonicalConflict(repoDir, defaultBranch);
|
|
439
456
|
|
|
440
457
|
const entry = makeTestEntry({
|
|
441
458
|
branchName: "feature-branch",
|
|
@@ -453,11 +470,12 @@ describe("createMergeResolver", () => {
|
|
|
453
470
|
expect(result.tier).toBe("auto-resolve");
|
|
454
471
|
expect(result.entry.status).toBe("merged");
|
|
455
472
|
expect(result.entry.resolvedTier).toBe("auto-resolve");
|
|
473
|
+
expect(result.warnings).toEqual([]);
|
|
456
474
|
|
|
457
475
|
// The resolved file should contain the incoming (feature branch) content
|
|
458
476
|
const file = Bun.file(join(repoDir, "src/test.ts"));
|
|
459
477
|
const content = await file.text();
|
|
460
|
-
expect(content).
|
|
478
|
+
expect(content).toContain("new content");
|
|
461
479
|
} finally {
|
|
462
480
|
await cleanupTempDir(repoDir);
|
|
463
481
|
}
|
|
@@ -688,6 +706,7 @@ describe("createMergeResolver", () => {
|
|
|
688
706
|
expect(result).toHaveProperty("tier");
|
|
689
707
|
expect(result).toHaveProperty("conflictFiles");
|
|
690
708
|
expect(result).toHaveProperty("errorMessage");
|
|
709
|
+
expect(result).toHaveProperty("warnings");
|
|
691
710
|
});
|
|
692
711
|
|
|
693
712
|
test("failed result preserves original entry fields", async () => {
|
|
@@ -806,6 +825,117 @@ describe("createMergeResolver", () => {
|
|
|
806
825
|
});
|
|
807
826
|
});
|
|
808
827
|
|
|
828
|
+
describe("hasContentfulCanonical", () => {
|
|
829
|
+
test("returns true when canonical side has content", () => {
|
|
830
|
+
const content = [
|
|
831
|
+
"<<<<<<< HEAD\n",
|
|
832
|
+
"canonical content\n",
|
|
833
|
+
"=======\n",
|
|
834
|
+
"incoming content\n",
|
|
835
|
+
">>>>>>> feature-branch\n",
|
|
836
|
+
].join("");
|
|
837
|
+
expect(hasContentfulCanonical(content)).toBe(true);
|
|
838
|
+
});
|
|
839
|
+
|
|
840
|
+
test("returns false when canonical side is empty", () => {
|
|
841
|
+
const content = [
|
|
842
|
+
"<<<<<<< HEAD\n",
|
|
843
|
+
"=======\n",
|
|
844
|
+
"incoming content\n",
|
|
845
|
+
">>>>>>> feature-branch\n",
|
|
846
|
+
].join("");
|
|
847
|
+
expect(hasContentfulCanonical(content)).toBe(false);
|
|
848
|
+
});
|
|
849
|
+
|
|
850
|
+
test("returns false when canonical is whitespace only", () => {
|
|
851
|
+
const content = [
|
|
852
|
+
"<<<<<<< HEAD\n",
|
|
853
|
+
" \n",
|
|
854
|
+
"\t\n",
|
|
855
|
+
"=======\n",
|
|
856
|
+
"incoming content\n",
|
|
857
|
+
">>>>>>> feature-branch\n",
|
|
858
|
+
].join("");
|
|
859
|
+
expect(hasContentfulCanonical(content)).toBe(false);
|
|
860
|
+
});
|
|
861
|
+
|
|
862
|
+
test("returns false when no conflict markers", () => {
|
|
863
|
+
expect(hasContentfulCanonical("no conflicts here\n")).toBe(false);
|
|
864
|
+
expect(hasContentfulCanonical("")).toBe(false);
|
|
865
|
+
});
|
|
866
|
+
|
|
867
|
+
test("returns true if ANY block has canonical content (multiple blocks)", () => {
|
|
868
|
+
const block1 = "<<<<<<< HEAD\n=======\nonly incoming\n>>>>>>> branch\n";
|
|
869
|
+
const block2 = "<<<<<<< HEAD\ncanonical content\n=======\nincoming\n>>>>>>> branch\n";
|
|
870
|
+
const content = `${block1}middle\n${block2}`;
|
|
871
|
+
expect(hasContentfulCanonical(content)).toBe(true);
|
|
872
|
+
});
|
|
873
|
+
});
|
|
874
|
+
|
|
875
|
+
describe("auto-resolve: content protection", () => {
|
|
876
|
+
test("auto-resolve skips files with contentful canonical, result includes warning", async () => {
|
|
877
|
+
const repoDir = await createTempGitRepo();
|
|
878
|
+
try {
|
|
879
|
+
const defaultBranch = await getDefaultBranch(repoDir);
|
|
880
|
+
// setupContentConflict: both canonical and incoming have content
|
|
881
|
+
await setupContentConflict(repoDir, defaultBranch);
|
|
882
|
+
|
|
883
|
+
const entry = makeTestEntry({
|
|
884
|
+
branchName: "feature-branch",
|
|
885
|
+
filesModified: ["src/test.ts"],
|
|
886
|
+
});
|
|
887
|
+
|
|
888
|
+
// AI and reimagine disabled — should FAIL because auto-resolve correctly refuses
|
|
889
|
+
const resolver = createMergeResolver({
|
|
890
|
+
aiResolveEnabled: false,
|
|
891
|
+
reimagineEnabled: false,
|
|
892
|
+
});
|
|
893
|
+
|
|
894
|
+
const result = await resolver.resolve(entry, defaultBranch, repoDir);
|
|
895
|
+
|
|
896
|
+
expect(result.success).toBe(false);
|
|
897
|
+
expect(result.warnings.length).toBeGreaterThan(0);
|
|
898
|
+
expect(result.warnings[0]).toContain("src/test.ts");
|
|
899
|
+
} finally {
|
|
900
|
+
await cleanupTempDir(repoDir);
|
|
901
|
+
}
|
|
902
|
+
});
|
|
903
|
+
});
|
|
904
|
+
|
|
905
|
+
describe("untracked files: no silent commit", () => {
|
|
906
|
+
test("untracked overlapping files are deleted, not committed", async () => {
|
|
907
|
+
const repoDir = await createTempGitRepo();
|
|
908
|
+
try {
|
|
909
|
+
const defaultBranch = await getDefaultBranch(repoDir);
|
|
910
|
+
await setupCleanMerge(repoDir, defaultBranch);
|
|
911
|
+
|
|
912
|
+
// Place an untracked file at the path the feature branch will bring in
|
|
913
|
+
await Bun.write(`${repoDir}/src/feature-file.ts`, "local untracked content\n");
|
|
914
|
+
|
|
915
|
+
const entry = makeTestEntry({
|
|
916
|
+
branchName: "feature-branch",
|
|
917
|
+
filesModified: ["src/feature-file.ts"],
|
|
918
|
+
});
|
|
919
|
+
|
|
920
|
+
const resolver = createMergeResolver({
|
|
921
|
+
aiResolveEnabled: false,
|
|
922
|
+
reimagineEnabled: false,
|
|
923
|
+
});
|
|
924
|
+
|
|
925
|
+
const result = await resolver.resolve(entry, defaultBranch, repoDir);
|
|
926
|
+
|
|
927
|
+
expect(result.success).toBe(true);
|
|
928
|
+
expect(result.warnings.some((w) => w.includes("src/feature-file.ts"))).toBe(true);
|
|
929
|
+
|
|
930
|
+
// Verify git log does NOT contain the "commit untracked files before merge" commit
|
|
931
|
+
const log = await runGitInDir(repoDir, ["log", "--oneline"]);
|
|
932
|
+
expect(log).not.toContain("commit untracked files before merge");
|
|
933
|
+
} finally {
|
|
934
|
+
await cleanupTempDir(repoDir);
|
|
935
|
+
}
|
|
936
|
+
});
|
|
937
|
+
});
|
|
938
|
+
|
|
809
939
|
describe("Tier 3: AI-resolve prose rejection", () => {
|
|
810
940
|
test("rejects prose output and falls through to failure", async () => {
|
|
811
941
|
const repoDir = await createTempGitRepo();
|
|
@@ -858,7 +988,8 @@ describe("createMergeResolver", () => {
|
|
|
858
988
|
const repoDir = await createTempGitRepo();
|
|
859
989
|
try {
|
|
860
990
|
const defaultBranch = await getDefaultBranch(repoDir);
|
|
861
|
-
|
|
991
|
+
// Use empty-canonical setup so auto-resolve completes successfully
|
|
992
|
+
await setupEmptyCanonicalConflict(repoDir, defaultBranch);
|
|
862
993
|
|
|
863
994
|
const entry = makeTestEntry({
|
|
864
995
|
branchName: "feature-branch",
|
|
@@ -884,7 +1015,8 @@ describe("createMergeResolver", () => {
|
|
|
884
1015
|
const repoDir = await createTempGitRepo();
|
|
885
1016
|
try {
|
|
886
1017
|
const defaultBranch = await getDefaultBranch(repoDir);
|
|
887
|
-
|
|
1018
|
+
// Use empty-canonical setup so auto-resolve completes successfully
|
|
1019
|
+
await setupEmptyCanonicalConflict(repoDir, defaultBranch);
|
|
888
1020
|
|
|
889
1021
|
const entry = makeTestEntry({
|
|
890
1022
|
branchName: "feature-branch",
|
|
@@ -1014,7 +1146,8 @@ describe("createMergeResolver", () => {
|
|
|
1014
1146
|
const repoDir = await createTempGitRepo();
|
|
1015
1147
|
try {
|
|
1016
1148
|
const defaultBranch = await getDefaultBranch(repoDir);
|
|
1017
|
-
|
|
1149
|
+
// Use empty-canonical setup so auto-resolve completes successfully
|
|
1150
|
+
await setupEmptyCanonicalConflict(repoDir, defaultBranch);
|
|
1018
1151
|
|
|
1019
1152
|
const entry = makeTestEntry({
|
|
1020
1153
|
branchName: "feature-branch",
|
|
@@ -1709,7 +1842,8 @@ describe("createMergeResolver", () => {
|
|
|
1709
1842
|
const repoDir = await createTempGitRepo();
|
|
1710
1843
|
try {
|
|
1711
1844
|
const defaultBranch = await getDefaultBranch(repoDir);
|
|
1712
|
-
|
|
1845
|
+
// Use empty-canonical setup so auto-resolve completes successfully
|
|
1846
|
+
await setupEmptyCanonicalConflict(repoDir, defaultBranch);
|
|
1713
1847
|
|
|
1714
1848
|
const entry = makeTestEntry({
|
|
1715
1849
|
branchName: "feature-branch",
|
package/src/merge/resolver.ts
CHANGED
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
* Disabled tiers are skipped. Uses Bun.spawn for all subprocess calls.
|
|
12
12
|
*/
|
|
13
13
|
|
|
14
|
+
import { unlinkSync } from "node:fs";
|
|
14
15
|
import { MergeError } from "../errors.ts";
|
|
15
16
|
import type { MulchClient } from "../mulch/client.ts";
|
|
16
17
|
import { getRuntime } from "../runtimes/registry.ts";
|
|
@@ -184,6 +185,24 @@ export function resolveConflictsUnion(content: string): string | null {
|
|
|
184
185
|
});
|
|
185
186
|
}
|
|
186
187
|
|
|
188
|
+
/**
|
|
189
|
+
* Detect if any conflict block has non-whitespace content on the canonical (HEAD) side.
|
|
190
|
+
* Returns true if auto-resolving with keep-incoming would silently discard canonical content.
|
|
191
|
+
* Use this before calling resolveConflictsKeepIncoming to prevent data loss.
|
|
192
|
+
*/
|
|
193
|
+
export function hasContentfulCanonical(content: string): boolean {
|
|
194
|
+
const conflictPattern = /^<{7} .+\n([\s\S]*?)^={7}\n([\s\S]*?)^>{7} .+\n?/gm;
|
|
195
|
+
let match = conflictPattern.exec(content);
|
|
196
|
+
while (match !== null) {
|
|
197
|
+
const canonical = match[1] ?? "";
|
|
198
|
+
if (canonical.trim().length > 0) {
|
|
199
|
+
return true;
|
|
200
|
+
}
|
|
201
|
+
match = conflictPattern.exec(content);
|
|
202
|
+
}
|
|
203
|
+
return false;
|
|
204
|
+
}
|
|
205
|
+
|
|
187
206
|
/**
|
|
188
207
|
* Check if a file has the `merge=union` gitattribute set.
|
|
189
208
|
* Returns true if `git check-attr merge -- <file>` ends with ": merge: union".
|
|
@@ -231,12 +250,15 @@ async function tryCleanMerge(
|
|
|
231
250
|
/**
|
|
232
251
|
* Tier 2: Auto-resolve conflicts by keeping incoming (agent) changes.
|
|
233
252
|
* Parses conflict markers and keeps the content between ======= and >>>>>>>.
|
|
253
|
+
* Skips files where the canonical side has non-whitespace content to prevent
|
|
254
|
+
* silent data loss — those files are escalated to higher tiers.
|
|
234
255
|
*/
|
|
235
256
|
async function tryAutoResolve(
|
|
236
257
|
conflictFiles: string[],
|
|
237
258
|
repoRoot: string,
|
|
238
|
-
): Promise<{ success: boolean; remainingConflicts: string[] }> {
|
|
259
|
+
): Promise<{ success: boolean; remainingConflicts: string[]; contentDropWarnings: string[] }> {
|
|
239
260
|
const remainingConflicts: string[] = [];
|
|
261
|
+
const contentDropWarnings: string[] = [];
|
|
240
262
|
|
|
241
263
|
for (const file of conflictFiles) {
|
|
242
264
|
const filePath = `${repoRoot}/${file}`;
|
|
@@ -244,6 +266,18 @@ async function tryAutoResolve(
|
|
|
244
266
|
try {
|
|
245
267
|
const content = await readFile(filePath);
|
|
246
268
|
const isUnion = await checkMergeUnion(repoRoot, file);
|
|
269
|
+
|
|
270
|
+
// For non-union files, check if the canonical side has content.
|
|
271
|
+
// If it does, auto-resolving would silently discard that content.
|
|
272
|
+
// Escalate to a higher tier instead.
|
|
273
|
+
if (!isUnion && hasContentfulCanonical(content)) {
|
|
274
|
+
contentDropWarnings.push(
|
|
275
|
+
`auto-resolve skipped for ${file}: canonical side has content that would be discarded`,
|
|
276
|
+
);
|
|
277
|
+
remainingConflicts.push(file);
|
|
278
|
+
continue;
|
|
279
|
+
}
|
|
280
|
+
|
|
247
281
|
const resolved = isUnion
|
|
248
282
|
? resolveConflictsUnion(content)
|
|
249
283
|
: resolveConflictsKeepIncoming(content);
|
|
@@ -265,12 +299,12 @@ async function tryAutoResolve(
|
|
|
265
299
|
}
|
|
266
300
|
|
|
267
301
|
if (remainingConflicts.length > 0) {
|
|
268
|
-
return { success: false, remainingConflicts };
|
|
302
|
+
return { success: false, remainingConflicts, contentDropWarnings };
|
|
269
303
|
}
|
|
270
304
|
|
|
271
305
|
// All files resolved — commit
|
|
272
306
|
const { exitCode } = await runGit(repoRoot, ["commit", "--no-edit"]);
|
|
273
|
-
return { success: exitCode === 0, remainingConflicts };
|
|
307
|
+
return { success: exitCode === 0, remainingConflicts, contentDropWarnings };
|
|
274
308
|
}
|
|
275
309
|
|
|
276
310
|
/**
|
|
@@ -689,13 +723,15 @@ export function createMergeResolver(options: {
|
|
|
689
723
|
didStash = true;
|
|
690
724
|
}
|
|
691
725
|
|
|
726
|
+
const warnings: string[] = [];
|
|
692
727
|
let lastTier: ResolutionTier = "clean-merge";
|
|
693
728
|
let conflictFiles: string[] = [];
|
|
694
729
|
|
|
695
730
|
try {
|
|
696
|
-
//
|
|
731
|
+
// Delete untracked files overlapping entry.filesModified before merging.
|
|
697
732
|
// git merge refuses to run if untracked files in the working tree would
|
|
698
|
-
// be overwritten by the incoming branch.
|
|
733
|
+
// be overwritten by the incoming branch. Deleting them lets the merge
|
|
734
|
+
// proceed and bring in the branch version.
|
|
699
735
|
const { stdout: untrackedOut } = await runGit(repoRoot, [
|
|
700
736
|
"ls-files",
|
|
701
737
|
"--others",
|
|
@@ -707,9 +743,18 @@ export function createMergeResolver(options: {
|
|
|
707
743
|
.filter((f) => f.length > 0);
|
|
708
744
|
const entryFileSet = new Set(entry.filesModified);
|
|
709
745
|
const overlappingUntracked = untrackedFiles.filter((f) => entryFileSet.has(f));
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
746
|
+
for (const file of overlappingUntracked) {
|
|
747
|
+
const filePath = `${repoRoot}/${file}`;
|
|
748
|
+
try {
|
|
749
|
+
if (await Bun.file(filePath).exists()) {
|
|
750
|
+
unlinkSync(filePath);
|
|
751
|
+
}
|
|
752
|
+
warnings.push(
|
|
753
|
+
`untracked file deleted before merge: ${file} (branch version will be used)`,
|
|
754
|
+
);
|
|
755
|
+
} catch {
|
|
756
|
+
// Ignore errors removing untracked files
|
|
757
|
+
}
|
|
713
758
|
}
|
|
714
759
|
|
|
715
760
|
// Tier 1: Clean merge
|
|
@@ -732,6 +777,7 @@ export function createMergeResolver(options: {
|
|
|
732
777
|
tier: "clean-merge",
|
|
733
778
|
conflictFiles: [],
|
|
734
779
|
errorMessage: null,
|
|
780
|
+
warnings,
|
|
735
781
|
};
|
|
736
782
|
}
|
|
737
783
|
conflictFiles = cleanResult.conflictFiles;
|
|
@@ -750,6 +796,9 @@ export function createMergeResolver(options: {
|
|
|
750
796
|
if (!history.skipTiers.includes("auto-resolve")) {
|
|
751
797
|
lastTier = "auto-resolve";
|
|
752
798
|
const autoResult = await tryAutoResolve(conflictFiles, repoRoot);
|
|
799
|
+
if (autoResult.contentDropWarnings.length > 0) {
|
|
800
|
+
warnings.push(...autoResult.contentDropWarnings);
|
|
801
|
+
}
|
|
753
802
|
if (autoResult.success) {
|
|
754
803
|
if (options.mulchClient) {
|
|
755
804
|
recordConflictPattern(
|
|
@@ -777,6 +826,7 @@ export function createMergeResolver(options: {
|
|
|
777
826
|
tier: "auto-resolve",
|
|
778
827
|
conflictFiles,
|
|
779
828
|
errorMessage: null,
|
|
829
|
+
warnings,
|
|
780
830
|
};
|
|
781
831
|
}
|
|
782
832
|
conflictFiles = autoResult.remainingConflicts;
|
|
@@ -812,6 +862,7 @@ export function createMergeResolver(options: {
|
|
|
812
862
|
tier: "ai-resolve",
|
|
813
863
|
conflictFiles,
|
|
814
864
|
errorMessage: null,
|
|
865
|
+
warnings,
|
|
815
866
|
};
|
|
816
867
|
}
|
|
817
868
|
conflictFiles = aiResult.remainingConflicts;
|
|
@@ -847,6 +898,7 @@ export function createMergeResolver(options: {
|
|
|
847
898
|
tier: "reimagine",
|
|
848
899
|
conflictFiles: [],
|
|
849
900
|
errorMessage: null,
|
|
901
|
+
warnings,
|
|
850
902
|
};
|
|
851
903
|
}
|
|
852
904
|
}
|
|
@@ -868,6 +920,7 @@ export function createMergeResolver(options: {
|
|
|
868
920
|
tier: lastTier,
|
|
869
921
|
conflictFiles,
|
|
870
922
|
errorMessage: `All enabled resolution tiers failed (last attempted: ${lastTier})`,
|
|
923
|
+
warnings,
|
|
871
924
|
};
|
|
872
925
|
} finally {
|
|
873
926
|
if (didStash) {
|