@ulpi/cli 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/{auth-PN7TMQHV-2W4ICG64.js → auth-FWM7MM4Q-VZC3U2XZ.js} +1 -1
- package/dist/{auth-ECQ3IB4E.js → auth-HDK7ECJL.js} +2 -1
- package/dist/{chunk-3SBPZRB5.js → chunk-3BCW6ABU.js} +402 -142
- package/dist/{chunk-JGBXM5NC.js → chunk-3WB5CXH4.js} +180 -5
- package/dist/{chunk-2HEE5OKX.js → chunk-4UCJIAOU.js} +2 -2
- package/dist/chunk-4XTHZVDS.js +109 -0
- package/dist/chunk-4ZPOZULQ.js +6522 -0
- package/dist/{chunk-SIAQVRKG.js → chunk-5MI5GIXM.js} +48 -2
- package/dist/{chunk-KLEASXUR.js → chunk-6ZL6NXMV.js} +1 -1
- package/dist/chunk-76D3BYJD.js +221 -0
- package/dist/{chunk-ZLYRPD7I.js → chunk-AWOSRA5F.js} +1 -1
- package/dist/{chunk-PDR55ZNW.js → chunk-BFEKZZHM.js} +274 -57
- package/dist/chunk-C7CLUQI6.js +1286 -0
- package/dist/{chunk-7AL4DOEJ.js → chunk-E3B5NROU.js} +7 -7
- package/dist/chunk-EJ7TW77N.js +1418 -0
- package/dist/{chunk-5J6NLQUN.js → chunk-IV6MWETF.js} +383 -168
- package/dist/chunk-IZPJHSPX.js +1478 -0
- package/dist/chunk-JLHNLM3C.js +228 -0
- package/dist/{chunk-BZL5H4YQ.js → chunk-KYYI23AQ.js} +2 -2
- package/dist/{chunk-2CLNOKPA.js → chunk-RSFJ6QSR.js} +18 -0
- package/dist/chunk-S6ANCSYO.js +1271 -0
- package/dist/chunk-SEU7WWNQ.js +1251 -0
- package/dist/chunk-SNQ7NAIS.js +453 -0
- package/dist/{ulpi-RMMCUAGP-JCJ273T6.js → chunk-TSLDGT5O.js} +73 -35
- package/dist/{chunk-SPOI23SB.js → chunk-UXHCHOWQ.js} +83 -62
- package/dist/chunk-V2H5D6Y3.js +146 -0
- package/dist/{chunk-QJ5GSMEC.js → chunk-VVEDXI7E.js} +2 -1
- package/dist/chunk-VXH5Y4FO.js +6761 -0
- package/dist/chunk-WED4LM5N.js +322 -0
- package/dist/{chunk-74WVVWJ4.js → chunk-YOKL7RB5.js} +184 -15
- package/dist/chunk-Z53CAR7G.js +298 -0
- package/dist/ci-X3U2W4HC.js +854 -0
- package/dist/cloud-2F3NLVHN.js +274 -0
- package/dist/{codemap-RKSD4MIE.js → codemap-XNGMAF3F.js} +37 -37
- package/dist/codex-MB5YTMRT.js +132 -0
- package/dist/{config-EGAXXCGL.js → config-OOELBYTH.js} +1 -1
- package/dist/dist-2BJYR5EI.js +59 -0
- package/dist/dist-2K7IEVTA.js +43 -0
- package/dist/dist-3EIQTZHT.js +1380 -0
- package/dist/{dist-YA2BWZB2.js → dist-4U5L2X2C.js} +2 -2
- package/dist/{dist-UKMCJBB2.js → dist-54KAMNLO.js} +16 -15
- package/dist/dist-6M4MZWZW.js +58 -0
- package/dist/dist-6X576SU2.js +27 -0
- package/dist/dist-7QOEYLFX.js +103 -0
- package/dist/dist-AYBGHEDY.js +2541 -0
- package/dist/dist-EK45QNEM.js +45 -0
- package/dist/{dist-CS2VKNYS.js → dist-FKFEJRPX.js} +16 -15
- package/dist/dist-GTEJUBBT.js +66 -0
- package/dist/dist-HA74OKJZ.js +40 -0
- package/dist/dist-HU5RZAON.js +48 -0
- package/dist/dist-IYE3OBRB.js +374 -0
- package/dist/{dist-GJYT2OQV.js → dist-JLU26AB6.js} +12 -9
- package/dist/{dist-6G7JC2RA.js → dist-KUCI6JFE.js} +49 -9
- package/dist/dist-NUEMFZFL.js +33 -0
- package/dist/{dist-RKOGLK7R.js → dist-NUXMDXZ3.js} +31 -3
- package/dist/{dist-QAU3LGJN.js → dist-YCNWHSLN.js} +15 -5
- package/dist/{dist-CB5D5LMO.js → dist-YFFG2ZD6.js} +9 -16
- package/dist/dist-ZG4OKCSR.js +15 -0
- package/dist/doctor-SI4LLLDZ.js +345 -0
- package/dist/{export-import-4A5MWLIA.js → export-import-JFQH4KSJ.js} +1 -1
- package/dist/{history-3MOBX4MA.js → history-5NE46ZAH.js} +7 -7
- package/dist/hooks-installer-UN5JZLDQ.js +19 -0
- package/dist/index.js +395 -619
- package/dist/{init-6CH4HV5T.js → init-5FK3VKRT.js} +79 -13
- package/dist/job-HIDMAFW2.js +376 -0
- package/dist/jobs.memory-PLMMSFHB-VBECCTHN.js +33 -0
- package/dist/kiro-VMUHDFGK.js +153 -0
- package/dist/{launchd-LF2QMSKZ.js → launchd-6AWT54HR.js} +9 -17
- package/dist/mcp-PDUD7SGP.js +249 -0
- package/dist/mcp-installer-PQU3XOGO.js +259 -0
- package/dist/mcp-setup-OA7IB3H3.js +263 -0
- package/dist/{memory-Y6OZTXJ2.js → memory-ZNAEAK3B.js} +17 -17
- package/dist/{ollama-3XCUZMZT-FYKHW4TZ.js → ollama-3XCUZMZT-4JMH6B7P.js} +1 -1
- package/dist/{openai-E7G2YAHU-UYY4ZWON.js → openai-E7G2YAHU-T3HMBPH7.js} +2 -2
- package/dist/portal-JYWVHXDU.js +210 -0
- package/dist/prd-Q4J5NVAR.js +408 -0
- package/dist/repos-WWZXNN3P.js +271 -0
- package/dist/review-integration-5WHEJU2A.js +14 -0
- package/dist/{rules-E427DKYJ.js → rules-Y4VSOY5Y.js} +3 -3
- package/dist/run-VPNXEIBY.js +687 -0
- package/dist/server-COL4AXKU-P7S7NNF6.js +11 -0
- package/dist/server-KKSETHDV-XSSLEENT.js +20 -0
- package/dist/{skills-CX73O3IV.js → skills-QEYU2N27.js} +4 -2
- package/dist/start-JYOEL7AJ.js +303 -0
- package/dist/{status-4DFHDJMN.js → status-BHQYYGAL.js} +2 -2
- package/dist/{templates-U7T6MARD.js → templates-CBRUJ66V.js} +4 -3
- package/dist/tui-DP7736EX.js +61 -0
- package/dist/ulpi-5EN6JCAS-LFE3WSL4.js +10 -0
- package/dist/{uninstall-6SW35IK4.js → uninstall-ICUV6DDV.js} +3 -3
- package/dist/{update-M6IBJNYP.js → update-7ZMAYRBH.js} +3 -3
- package/dist/{version-checker-Q6YTYAGP.js → version-checker-4ZFMZA7Y.js} +2 -2
- package/package.json +39 -31
- package/dist/chunk-2MZER6ND.js +0 -415
- package/dist/chunk-2VYFVYJL.js +0 -4273
- package/dist/chunk-6OCEY7JY.js +0 -422
- package/dist/chunk-7LXY5UVC.js +0 -330
- package/dist/chunk-B55DDP24.js +0 -136
- package/dist/chunk-JWUUVXIV.js +0 -13694
- package/dist/chunk-MIAQVCFW.js +0 -39
- package/dist/chunk-YM2HV4IA.js +0 -505
- package/dist/ci-STSL2LSP.js +0 -370
- package/dist/mcp-installer-NQCGKQ23.js +0 -124
- package/dist/projects-ATHDD3D6.js +0 -271
- package/dist/review-ADUPV3PN.js +0 -152
- package/dist/server-USLHY6GH-AEOJC5ST.js +0 -18
- package/dist/server-X5P6WH2M-7K2RY34N.js +0 -11
- package/dist/skills/ulpi-generate-guardian/SKILL.md +0 -750
- package/dist/skills/ulpi-generate-guardian/references/framework-rules.md +0 -849
- package/dist/skills/ulpi-generate-guardian/references/language-rules.md +0 -591
- package/dist/ui-OWXZ3YSR.js +0 -167
- package/dist/ui.html +0 -698
|
@@ -0,0 +1,2541 @@
|
|
|
1
|
+
import {
|
|
2
|
+
DEFAULT_ENGINE_CONFIG,
|
|
3
|
+
ExecutionEngine
|
|
4
|
+
} from "./chunk-EJ7TW77N.js";
|
|
5
|
+
import "./chunk-S6ANCSYO.js";
|
|
6
|
+
import "./chunk-76D3BYJD.js";
|
|
7
|
+
import "./chunk-IZPJHSPX.js";
|
|
8
|
+
import "./chunk-5MI5GIXM.js";
|
|
9
|
+
import "./chunk-SEU7WWNQ.js";
|
|
10
|
+
import "./chunk-C7CLUQI6.js";
|
|
11
|
+
import "./chunk-KIKPIH6N.js";
|
|
12
|
+
import "./chunk-4VNS5WPM.js";
|
|
13
|
+
|
|
14
|
+
// ../../packages/parallel-engine/dist/index.js
|
|
15
|
+
import { execFileSync } from "child_process";
|
|
16
|
+
import * as fs from "fs";
|
|
17
|
+
import * as path from "path";
|
|
18
|
+
import { execFileSync as execFileSync2 } from "child_process";
|
|
19
|
+
import { readFileSync, writeFileSync } from "fs";
|
|
20
|
+
import { execFileSync as execFileSync3 } from "child_process";
|
|
21
|
+
import * as fs2 from "fs";
|
|
22
|
+
import * as path2 from "path";
|
|
23
|
+
import * as fs3 from "fs";
|
|
24
|
+
import * as path3 from "path";
|
|
25
|
+
import { execFileSync as execFileSync4 } from "child_process";
|
|
26
|
+
import { execFileSync as execFileSync5 } from "child_process";
|
|
27
|
+
var DEFAULT_PARALLEL_CONFIG = {
|
|
28
|
+
maxWorkers: 3,
|
|
29
|
+
cwd: process.cwd(),
|
|
30
|
+
maxIterationsPerWorker: 10,
|
|
31
|
+
aiConflictResolution: true,
|
|
32
|
+
maxRequeueCount: 1,
|
|
33
|
+
minFreeDiskSpace: 500 * 1024 * 1024,
|
|
34
|
+
enableValidation: true,
|
|
35
|
+
validationMaxIterations: 15
|
|
36
|
+
};
|
|
37
|
+
function validateDAG(tasks) {
|
|
38
|
+
const taskMap = /* @__PURE__ */ new Map();
|
|
39
|
+
for (const task of tasks) {
|
|
40
|
+
taskMap.set(task.id, task);
|
|
41
|
+
}
|
|
42
|
+
const nodes = buildGraph(tasks, taskMap);
|
|
43
|
+
const { cyclicIds } = topologicalSort(nodes);
|
|
44
|
+
return {
|
|
45
|
+
valid: cyclicIds.size === 0,
|
|
46
|
+
cyclicTaskIds: [...cyclicIds]
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
function topologicalSortTasks(tasks) {
|
|
50
|
+
const taskMap = /* @__PURE__ */ new Map();
|
|
51
|
+
for (const task of tasks) {
|
|
52
|
+
taskMap.set(task.id, task);
|
|
53
|
+
}
|
|
54
|
+
const nodes = buildGraph(tasks, taskMap);
|
|
55
|
+
const { order, cyclicIds } = topologicalSort(nodes);
|
|
56
|
+
if (cyclicIds.size > 0) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
`Dependency cycle detected involving tasks: ${[...cyclicIds].join(", ")}`
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
return order;
|
|
62
|
+
}
|
|
63
|
+
function buildParallelGroups(tasks) {
|
|
64
|
+
const analysis = analyzeTaskGraph(tasks);
|
|
65
|
+
return analysis.groups;
|
|
66
|
+
}
|
|
67
|
+
function analyzeTaskGraph(tasks) {
|
|
68
|
+
const taskMap = /* @__PURE__ */ new Map();
|
|
69
|
+
for (const task of tasks) {
|
|
70
|
+
taskMap.set(task.id, task);
|
|
71
|
+
}
|
|
72
|
+
const nodes = buildGraph(tasks, taskMap);
|
|
73
|
+
const { depths, order, cyclicIds } = topologicalSort(nodes);
|
|
74
|
+
for (const id of cyclicIds) {
|
|
75
|
+
const node = nodes.get(id);
|
|
76
|
+
if (node) {
|
|
77
|
+
node.inCycle = true;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
for (const [id, depth] of depths) {
|
|
81
|
+
const node = nodes.get(id);
|
|
82
|
+
if (node) {
|
|
83
|
+
node.depth = depth;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
const groups = groupByDepth(nodes, depths, cyclicIds);
|
|
87
|
+
const warnings = [];
|
|
88
|
+
if (cyclicIds.size > 0) {
|
|
89
|
+
warnings.push(
|
|
90
|
+
`Dependency cycle detected involving ${cyclicIds.size} task(s): ${[...cyclicIds].join(", ")}. These tasks cannot be scheduled.`
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
const affectedFiles = /* @__PURE__ */ new Map();
|
|
94
|
+
for (const task of tasks) {
|
|
95
|
+
const affects = task.metadata?.affects;
|
|
96
|
+
if (Array.isArray(affects)) {
|
|
97
|
+
for (const file of affects) {
|
|
98
|
+
if (typeof file === "string") {
|
|
99
|
+
const existing = affectedFiles.get(file) ?? [];
|
|
100
|
+
existing.push(task.id);
|
|
101
|
+
affectedFiles.set(file, existing);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
for (const [file, taskIds] of affectedFiles) {
|
|
107
|
+
if (taskIds.length > 1) {
|
|
108
|
+
for (let i = 0; i < taskIds.length; i++) {
|
|
109
|
+
for (let j = i + 1; j < taskIds.length; j++) {
|
|
110
|
+
const a = taskIds[i];
|
|
111
|
+
const b = taskIds[j];
|
|
112
|
+
const nodeA = nodes.get(a);
|
|
113
|
+
const nodeB = nodes.get(b);
|
|
114
|
+
if (nodeA && nodeB && !nodeA.dependencies.includes(b) && !nodeB.dependencies.includes(a)) {
|
|
115
|
+
warnings.push(
|
|
116
|
+
`Tasks "${a}" and "${b}" both affect "${file}" but have no explicit dependency. Consider adding a dependency to avoid merge conflicts.`
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
const maxParallelism = groups.length > 0 ? Math.max(...groups.map((g) => g.tasks.length)) : 0;
|
|
124
|
+
return {
|
|
125
|
+
groups,
|
|
126
|
+
order,
|
|
127
|
+
warnings,
|
|
128
|
+
nodes,
|
|
129
|
+
cyclicTaskIds: [...cyclicIds],
|
|
130
|
+
maxParallelism
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
function buildGraph(tasks, taskMap) {
|
|
134
|
+
const nodes = /* @__PURE__ */ new Map();
|
|
135
|
+
for (const task of tasks) {
|
|
136
|
+
nodes.set(task.id, {
|
|
137
|
+
task,
|
|
138
|
+
dependencies: [],
|
|
139
|
+
dependents: [],
|
|
140
|
+
depth: 0,
|
|
141
|
+
inCycle: false
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
for (const task of tasks) {
|
|
145
|
+
const node = nodes.get(task.id);
|
|
146
|
+
if (!node) continue;
|
|
147
|
+
if (task.dependsOn) {
|
|
148
|
+
for (const depId of task.dependsOn) {
|
|
149
|
+
if (taskMap.has(depId) && !node.dependencies.includes(depId)) {
|
|
150
|
+
node.dependencies.push(depId);
|
|
151
|
+
const depNode = nodes.get(depId);
|
|
152
|
+
if (depNode && !depNode.dependents.includes(task.id)) {
|
|
153
|
+
depNode.dependents.push(task.id);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
if (task.blocks) {
|
|
159
|
+
for (const blockedId of task.blocks) {
|
|
160
|
+
if (taskMap.has(blockedId)) {
|
|
161
|
+
const blockedNode = nodes.get(blockedId);
|
|
162
|
+
if (blockedNode && !blockedNode.dependencies.includes(task.id)) {
|
|
163
|
+
blockedNode.dependencies.push(task.id);
|
|
164
|
+
}
|
|
165
|
+
if (!node.dependents.includes(blockedId)) {
|
|
166
|
+
node.dependents.push(blockedId);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
return nodes;
|
|
173
|
+
}
|
|
174
|
+
function topologicalSort(nodes) {
|
|
175
|
+
const inDegree = /* @__PURE__ */ new Map();
|
|
176
|
+
for (const [id, node] of nodes) {
|
|
177
|
+
inDegree.set(id, node.dependencies.length);
|
|
178
|
+
}
|
|
179
|
+
let currentLevel = [];
|
|
180
|
+
for (const [id, degree] of inDegree) {
|
|
181
|
+
if (degree === 0) {
|
|
182
|
+
currentLevel.push(id);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
const depths = /* @__PURE__ */ new Map();
|
|
186
|
+
const order = [];
|
|
187
|
+
let depth = 0;
|
|
188
|
+
let processedCount = 0;
|
|
189
|
+
while (currentLevel.length > 0) {
|
|
190
|
+
const nextLevel = [];
|
|
191
|
+
currentLevel.sort((a, b) => {
|
|
192
|
+
const nodeA = nodes.get(a);
|
|
193
|
+
const nodeB = nodes.get(b);
|
|
194
|
+
return (nodeA?.task.priority ?? 2) - (nodeB?.task.priority ?? 2);
|
|
195
|
+
});
|
|
196
|
+
for (const id of currentLevel) {
|
|
197
|
+
depths.set(id, depth);
|
|
198
|
+
order.push(id);
|
|
199
|
+
processedCount++;
|
|
200
|
+
const node = nodes.get(id);
|
|
201
|
+
if (!node) continue;
|
|
202
|
+
for (const dependentId of node.dependents) {
|
|
203
|
+
const currentDeg = inDegree.get(dependentId) ?? 0;
|
|
204
|
+
const newDeg = currentDeg - 1;
|
|
205
|
+
inDegree.set(dependentId, newDeg);
|
|
206
|
+
if (newDeg === 0) {
|
|
207
|
+
nextLevel.push(dependentId);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
currentLevel = nextLevel;
|
|
212
|
+
depth++;
|
|
213
|
+
}
|
|
214
|
+
const cyclicIds = /* @__PURE__ */ new Set();
|
|
215
|
+
if (processedCount < nodes.size) {
|
|
216
|
+
for (const [id] of nodes) {
|
|
217
|
+
if (!depths.has(id)) {
|
|
218
|
+
cyclicIds.add(id);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
return { depths, order, cyclicIds };
|
|
223
|
+
}
|
|
224
|
+
function groupByDepth(nodes, depths, cyclicIds) {
|
|
225
|
+
const depthBuckets = /* @__PURE__ */ new Map();
|
|
226
|
+
for (const [id, d] of depths) {
|
|
227
|
+
if (cyclicIds.has(id)) continue;
|
|
228
|
+
const node = nodes.get(id);
|
|
229
|
+
if (!node) continue;
|
|
230
|
+
let bucket = depthBuckets.get(d);
|
|
231
|
+
if (!bucket) {
|
|
232
|
+
bucket = [];
|
|
233
|
+
depthBuckets.set(d, bucket);
|
|
234
|
+
}
|
|
235
|
+
bucket.push(node.task);
|
|
236
|
+
}
|
|
237
|
+
const sortedDepths = [...depthBuckets.keys()].sort((a, b) => a - b);
|
|
238
|
+
const groups = [];
|
|
239
|
+
for (const d of sortedDepths) {
|
|
240
|
+
const tasks = depthBuckets.get(d);
|
|
241
|
+
if (!tasks || tasks.length === 0) continue;
|
|
242
|
+
tasks.sort((a, b) => a.priority - b.priority);
|
|
243
|
+
groups.push({
|
|
244
|
+
depth: d,
|
|
245
|
+
tasks
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
return groups;
|
|
249
|
+
}
|
|
250
|
+
var DEFAULT_MIN_FREE_DISK_SPACE = 500 * 1024 * 1024;
|
|
251
|
+
var DEFAULT_MAX_WORKTREES = 8;
|
|
252
|
+
function sanitizeBranchName(taskId) {
|
|
253
|
+
let sanitized = taskId;
|
|
254
|
+
sanitized = sanitized.replace(/[\s~^:?*[\]\\@{]/g, "-");
|
|
255
|
+
sanitized = sanitized.replace(/\/+/g, "/").replace(/-+/g, "-");
|
|
256
|
+
sanitized = sanitized.replace(/\.{2,}/g, ".");
|
|
257
|
+
sanitized = sanitized.replace(/^[./-]+|[./-]+$/g, "");
|
|
258
|
+
if (sanitized.endsWith(".lock")) {
|
|
259
|
+
sanitized = sanitized.slice(0, -5);
|
|
260
|
+
}
|
|
261
|
+
if (!sanitized) {
|
|
262
|
+
sanitized = Buffer.from(taskId).toString("base64").replace(/[^a-zA-Z0-9]/g, "").slice(0, 8) || "task";
|
|
263
|
+
}
|
|
264
|
+
return sanitized;
|
|
265
|
+
}
|
|
266
|
+
function getWorktreeBaseDir(cwd) {
|
|
267
|
+
const parentDir = path.dirname(cwd);
|
|
268
|
+
const projectName = path.basename(cwd);
|
|
269
|
+
return path.join(parentDir, ".ulpi-worktrees", projectName);
|
|
270
|
+
}
|
|
271
|
+
var WorktreeManager = class {
|
|
272
|
+
cwd;
|
|
273
|
+
worktreeBaseDir;
|
|
274
|
+
maxWorktrees;
|
|
275
|
+
minFreeDiskSpace;
|
|
276
|
+
worktrees = /* @__PURE__ */ new Map();
|
|
277
|
+
constructor(config) {
|
|
278
|
+
this.cwd = config.cwd;
|
|
279
|
+
this.worktreeBaseDir = getWorktreeBaseDir(config.cwd);
|
|
280
|
+
this.maxWorktrees = config.maxWorktrees ?? DEFAULT_MAX_WORKTREES;
|
|
281
|
+
this.minFreeDiskSpace = config.minFreeDiskSpace ?? DEFAULT_MIN_FREE_DISK_SPACE;
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Create a worktree for a worker.
|
|
285
|
+
* Creates a new git worktree with a dedicated branch from current HEAD.
|
|
286
|
+
*
|
|
287
|
+
* @param workerId - Identifier for the worker using this worktree
|
|
288
|
+
* @param taskId - Task that will be executed in this worktree
|
|
289
|
+
* @returns Information about the created worktree
|
|
290
|
+
* @throws If worktree creation fails or disk space is insufficient
|
|
291
|
+
*/
|
|
292
|
+
async create(workerId, taskId) {
|
|
293
|
+
const activeCount = this.getActiveCount();
|
|
294
|
+
if (activeCount >= this.maxWorktrees) {
|
|
295
|
+
throw new Error(
|
|
296
|
+
`Maximum worktrees reached (${this.maxWorktrees}). Release existing worktrees before creating new ones.`
|
|
297
|
+
);
|
|
298
|
+
}
|
|
299
|
+
await this.checkDiskSpace();
|
|
300
|
+
const worktreeId = `worker-${workerId}`;
|
|
301
|
+
const sanitizedTaskId = sanitizeBranchName(taskId);
|
|
302
|
+
const branchName = `ulpi-parallel/${sanitizedTaskId}`;
|
|
303
|
+
const worktreePath = path.join(this.worktreeBaseDir, worktreeId);
|
|
304
|
+
fs.mkdirSync(this.worktreeBaseDir, { recursive: true });
|
|
305
|
+
await this.cleanupStale(worktreePath, branchName);
|
|
306
|
+
this.git(["worktree", "add", "-b", branchName, worktreePath, "HEAD"]);
|
|
307
|
+
this.copyConfig(worktreePath);
|
|
308
|
+
const info = {
|
|
309
|
+
id: worktreeId,
|
|
310
|
+
path: worktreePath,
|
|
311
|
+
branch: branchName,
|
|
312
|
+
workerId,
|
|
313
|
+
taskId,
|
|
314
|
+
active: true,
|
|
315
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
316
|
+
};
|
|
317
|
+
this.worktrees.set(worktreeId, info);
|
|
318
|
+
return info;
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* Remove a specific worktree but KEEP the branch for merging.
|
|
322
|
+
* The branch is cleaned up later by `cleanup()` at session end.
|
|
323
|
+
*
|
|
324
|
+
* @param workerId - Worker ID whose worktree to remove
|
|
325
|
+
*/
|
|
326
|
+
async remove(workerId) {
|
|
327
|
+
const worktreeId = `worker-${workerId}`;
|
|
328
|
+
const info = this.worktrees.get(worktreeId);
|
|
329
|
+
if (!info) return;
|
|
330
|
+
await this.removeWorktreeDir(info);
|
|
331
|
+
info.active = false;
|
|
332
|
+
}
|
|
333
|
+
/**
|
|
334
|
+
* List all active (tracked) worktrees.
|
|
335
|
+
*/
|
|
336
|
+
listActive() {
|
|
337
|
+
return [...this.worktrees.values()].filter((w) => w.active);
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Get information about all managed worktrees.
|
|
341
|
+
*/
|
|
342
|
+
getAll() {
|
|
343
|
+
return [...this.worktrees.values()];
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Get information about a specific worktree.
|
|
347
|
+
*/
|
|
348
|
+
get(workerId) {
|
|
349
|
+
return this.worktrees.get(`worker-${workerId}`);
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* Remove ALL worktrees for this project.
|
|
353
|
+
* Called at session end or during crash recovery cleanup.
|
|
354
|
+
*/
|
|
355
|
+
async cleanup() {
|
|
356
|
+
const errors = [];
|
|
357
|
+
for (const [id, info] of this.worktrees) {
|
|
358
|
+
try {
|
|
359
|
+
await this.removeWorktree(info);
|
|
360
|
+
} catch (err) {
|
|
361
|
+
errors.push(`Failed to clean up ${id}: ${err}`);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
this.worktrees.clear();
|
|
365
|
+
try {
|
|
366
|
+
const entries = fs.readdirSync(this.worktreeBaseDir);
|
|
367
|
+
if (entries.length === 0) {
|
|
368
|
+
fs.rmdirSync(this.worktreeBaseDir);
|
|
369
|
+
const parentDir = path.dirname(this.worktreeBaseDir);
|
|
370
|
+
const parentEntries = fs.readdirSync(parentDir);
|
|
371
|
+
if (parentEntries.length === 0) {
|
|
372
|
+
fs.rmdirSync(parentDir);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
} catch {
|
|
376
|
+
}
|
|
377
|
+
try {
|
|
378
|
+
this.git(["worktree", "prune"]);
|
|
379
|
+
} catch {
|
|
380
|
+
}
|
|
381
|
+
try {
|
|
382
|
+
const branches = this.git(["branch", "--list", "ulpi-parallel/*"]);
|
|
383
|
+
for (const line of branches.split("\n")) {
|
|
384
|
+
const name = line.trim().replace(/^\*\s*/, "");
|
|
385
|
+
if (name) {
|
|
386
|
+
try {
|
|
387
|
+
this.git(["branch", "-D", name]);
|
|
388
|
+
} catch {
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
} catch {
|
|
393
|
+
}
|
|
394
|
+
if (errors.length > 0) {
|
|
395
|
+
throw new Error(
|
|
396
|
+
`Worktree cleanup had ${errors.length} error(s):
|
|
397
|
+
${errors.join("\n")}`
|
|
398
|
+
);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Check if there is enough disk space to create a worktree.
|
|
403
|
+
* Uses Node.js fs.statfs() first, then falls back to `df` on failure.
|
|
404
|
+
* @throws If available space is below the minimum threshold
|
|
405
|
+
*/
|
|
406
|
+
async checkDiskSpace() {
|
|
407
|
+
const minimumRequired = this.minFreeDiskSpace;
|
|
408
|
+
try {
|
|
409
|
+
let available = await this.getAvailableFromStatFs();
|
|
410
|
+
if (available === null || available <= 0) {
|
|
411
|
+
available = this.getAvailableFromDf();
|
|
412
|
+
}
|
|
413
|
+
if (available === null) {
|
|
414
|
+
return;
|
|
415
|
+
}
|
|
416
|
+
if (available < minimumRequired) {
|
|
417
|
+
const availMB = Math.round(available / (1024 * 1024));
|
|
418
|
+
const reqMB = Math.round(minimumRequired / (1024 * 1024));
|
|
419
|
+
throw new Error(
|
|
420
|
+
`Insufficient disk space for worktree: ${availMB}MB available, ${reqMB}MB required`
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
} catch (err) {
|
|
424
|
+
if (err instanceof Error && err.message.includes("Insufficient disk space")) {
|
|
425
|
+
throw err;
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
// ─── Private Methods ───
|
|
430
|
+
getActiveCount() {
|
|
431
|
+
let count = 0;
|
|
432
|
+
for (const info of this.worktrees.values()) {
|
|
433
|
+
if (info.active) count++;
|
|
434
|
+
}
|
|
435
|
+
return count;
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Remove the worktree directory only, keeping the branch alive for merging.
|
|
439
|
+
*/
|
|
440
|
+
async removeWorktreeDir(info) {
|
|
441
|
+
try {
|
|
442
|
+
this.git(["worktree", "remove", "--force", info.path]);
|
|
443
|
+
} catch {
|
|
444
|
+
if (fs.existsSync(info.path)) {
|
|
445
|
+
fs.rmSync(info.path, { recursive: true, force: true });
|
|
446
|
+
}
|
|
447
|
+
try {
|
|
448
|
+
this.git(["worktree", "prune"]);
|
|
449
|
+
} catch {
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
/**
|
|
454
|
+
* Remove the worktree directory AND delete the branch.
|
|
455
|
+
* Used by cleanup() for full teardown.
|
|
456
|
+
*/
|
|
457
|
+
async removeWorktree(info) {
|
|
458
|
+
await this.removeWorktreeDir(info);
|
|
459
|
+
try {
|
|
460
|
+
this.git(["branch", "-D", info.branch]);
|
|
461
|
+
} catch {
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
async cleanupStale(worktreePath, branchName) {
|
|
465
|
+
if (fs.existsSync(worktreePath)) {
|
|
466
|
+
try {
|
|
467
|
+
this.git(["worktree", "remove", "--force", worktreePath]);
|
|
468
|
+
} catch {
|
|
469
|
+
fs.rmSync(worktreePath, { recursive: true, force: true });
|
|
470
|
+
this.git(["worktree", "prune"]);
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
try {
|
|
474
|
+
this.git(["branch", "-D", branchName]);
|
|
475
|
+
} catch {
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
/**
|
|
479
|
+
* Copy .ulpi configuration into a worktree so the agent has project context.
|
|
480
|
+
*/
|
|
481
|
+
copyConfig(worktreePath) {
|
|
482
|
+
const configDir = path.join(this.cwd, ".ulpi");
|
|
483
|
+
const targetDir = path.join(worktreePath, ".ulpi");
|
|
484
|
+
if (!fs.existsSync(configDir)) return;
|
|
485
|
+
try {
|
|
486
|
+
fs.mkdirSync(targetDir, { recursive: true });
|
|
487
|
+
const guardsFile = path.join(configDir, "guards.yml");
|
|
488
|
+
if (fs.existsSync(guardsFile)) {
|
|
489
|
+
fs.copyFileSync(guardsFile, path.join(targetDir, "guards.yml"));
|
|
490
|
+
}
|
|
491
|
+
const entries = fs.readdirSync(configDir);
|
|
492
|
+
for (const entry of entries) {
|
|
493
|
+
if (entry.endsWith(".yml") || entry.endsWith(".yaml") || entry.endsWith(".json")) {
|
|
494
|
+
const srcPath = path.join(configDir, entry);
|
|
495
|
+
const dstPath = path.join(targetDir, entry);
|
|
496
|
+
if (!fs.existsSync(dstPath)) {
|
|
497
|
+
fs.copyFileSync(srcPath, dstPath);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
} catch {
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
/**
|
|
505
|
+
* Read available bytes from fs.statfs.
|
|
506
|
+
* Returns null if unavailable or unreadable.
|
|
507
|
+
*/
|
|
508
|
+
async getAvailableFromStatFs() {
|
|
509
|
+
try {
|
|
510
|
+
const stats = await fs.promises.statfs(this.cwd);
|
|
511
|
+
const available = Number(stats.bavail) * Number(stats.bsize);
|
|
512
|
+
if (!Number.isFinite(available)) {
|
|
513
|
+
return null;
|
|
514
|
+
}
|
|
515
|
+
return available;
|
|
516
|
+
} catch {
|
|
517
|
+
return null;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
/**
|
|
521
|
+
* Read available bytes from `df -k <path>`.
|
|
522
|
+
* Returns null if parsing fails or output is unavailable.
|
|
523
|
+
*/
|
|
524
|
+
getAvailableFromDf() {
|
|
525
|
+
try {
|
|
526
|
+
const output = execFileSync("df", ["-k", this.cwd], {
|
|
527
|
+
encoding: "utf-8",
|
|
528
|
+
timeout: 5e3
|
|
529
|
+
});
|
|
530
|
+
return this.parseDfAvailableBytes(output);
|
|
531
|
+
} catch {
|
|
532
|
+
return null;
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
/**
|
|
536
|
+
* Parse `df` output and return available bytes.
|
|
537
|
+
*/
|
|
538
|
+
parseDfAvailableBytes(output) {
|
|
539
|
+
const lines = output.trim().split("\n").filter((line) => line.trim().length > 0);
|
|
540
|
+
if (lines.length < 2) return null;
|
|
541
|
+
const header = lines[0]?.toLowerCase();
|
|
542
|
+
if (!header) return null;
|
|
543
|
+
const normalizedHeader = header.trim().split(/\s+/).map((value) => value.replace("%", "").trim());
|
|
544
|
+
const availIndex = normalizedHeader.findIndex(
|
|
545
|
+
(h) => h === "avail" || h === "available"
|
|
546
|
+
);
|
|
547
|
+
if (availIndex < 0) return null;
|
|
548
|
+
const dataLine = lines.at(-1);
|
|
549
|
+
if (!dataLine) return null;
|
|
550
|
+
const values = dataLine.trim().split(/\s+/);
|
|
551
|
+
if (values.length <= availIndex) return null;
|
|
552
|
+
const availableKb = Number.parseInt(values[availIndex] ?? "", 10);
|
|
553
|
+
if (Number.isNaN(availableKb) || !Number.isFinite(availableKb) || availableKb < 0) {
|
|
554
|
+
return null;
|
|
555
|
+
}
|
|
556
|
+
return availableKb * 1024;
|
|
557
|
+
}
|
|
558
|
+
/**
|
|
559
|
+
* Execute a git command in the main repository.
|
|
560
|
+
* Uses execFileSync with argument array to prevent shell injection.
|
|
561
|
+
*/
|
|
562
|
+
git(args) {
|
|
563
|
+
return execFileSync("git", ["-C", this.cwd, ...args], {
|
|
564
|
+
encoding: "utf-8",
|
|
565
|
+
timeout: 3e4,
|
|
566
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
567
|
+
});
|
|
568
|
+
}
|
|
569
|
+
};
|
|
570
|
+
function validateGitRef(ref, context) {
|
|
571
|
+
if (!ref || ref.trim() === "") {
|
|
572
|
+
throw new Error(`Invalid git ref for ${context}: ref is empty`);
|
|
573
|
+
}
|
|
574
|
+
if (ref === "@") {
|
|
575
|
+
throw new Error(`Invalid git ref for ${context}: is '@'`);
|
|
576
|
+
}
|
|
577
|
+
if (ref.includes(" ")) {
|
|
578
|
+
throw new Error(`Invalid git ref for ${context}: contains spaces`);
|
|
579
|
+
}
|
|
580
|
+
if (ref.includes("..")) {
|
|
581
|
+
throw new Error(`Invalid git ref for ${context}: contains '..'`);
|
|
582
|
+
}
|
|
583
|
+
for (let i = 0; i < ref.length; i++) {
|
|
584
|
+
const code = ref.charCodeAt(i);
|
|
585
|
+
if (code < 32 || code === 127) {
|
|
586
|
+
throw new Error(
|
|
587
|
+
`Invalid git ref for ${context}: contains control characters`
|
|
588
|
+
);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
if (ref.startsWith(".") || ref.includes("/.")) {
|
|
592
|
+
throw new Error(`Invalid git ref for ${context}: starts with '.'`);
|
|
593
|
+
}
|
|
594
|
+
if (ref.endsWith(".")) {
|
|
595
|
+
throw new Error(`Invalid git ref for ${context}: ends with '.'`);
|
|
596
|
+
}
|
|
597
|
+
if (ref.includes("//")) {
|
|
598
|
+
throw new Error(
|
|
599
|
+
`Invalid git ref for ${context}: contains consecutive slashes`
|
|
600
|
+
);
|
|
601
|
+
}
|
|
602
|
+
if (ref.endsWith(".lock")) {
|
|
603
|
+
throw new Error(`Invalid git ref for ${context}: ends with '.lock'`);
|
|
604
|
+
}
|
|
605
|
+
if (ref.endsWith("/")) {
|
|
606
|
+
throw new Error(`Invalid git ref for ${context}: ends with '/'`);
|
|
607
|
+
}
|
|
608
|
+
if (/[~^:?*[\]\\]/.test(ref)) {
|
|
609
|
+
throw new Error(
|
|
610
|
+
`Invalid git ref for ${context}: contains invalid characters`
|
|
611
|
+
);
|
|
612
|
+
}
|
|
613
|
+
if (ref.includes("@{")) {
|
|
614
|
+
throw new Error(
|
|
615
|
+
`Invalid git ref for ${context}: contains '@{' sequence`
|
|
616
|
+
);
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
var MergeEngine = class {
|
|
620
|
+
cwd;
|
|
621
|
+
queue = [];
|
|
622
|
+
processing = false;
|
|
623
|
+
sessionStartTag = null;
|
|
624
|
+
listeners = [];
|
|
625
|
+
/** Saved tracker state files (path -> content) for save/restore around merges */
|
|
626
|
+
savedTrackerState = null;
|
|
627
|
+
constructor(cwd) {
|
|
628
|
+
this.cwd = cwd;
|
|
629
|
+
}
|
|
630
|
+
/**
|
|
631
|
+
* Register an event listener.
|
|
632
|
+
* @returns Unsubscribe function
|
|
633
|
+
*/
|
|
634
|
+
on(listener) {
|
|
635
|
+
this.listeners.push(listener);
|
|
636
|
+
return () => {
|
|
637
|
+
const idx = this.listeners.indexOf(listener);
|
|
638
|
+
if (idx >= 0) this.listeners.splice(idx, 1);
|
|
639
|
+
};
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Create a session-level backup tag before any merges begin.
|
|
643
|
+
* Used for full rollback of the entire parallel session.
|
|
644
|
+
*/
|
|
645
|
+
createSessionBackup(sessionId) {
|
|
646
|
+
const tag = `ulpi/session-start/${sessionId}`;
|
|
647
|
+
validateGitRef(tag, "sessionBackupTag");
|
|
648
|
+
this.git(["tag", tag, "HEAD"]);
|
|
649
|
+
this.sessionStartTag = tag;
|
|
650
|
+
return tag;
|
|
651
|
+
}
|
|
652
|
+
/**
|
|
653
|
+
* Get the session start tag for full rollback.
|
|
654
|
+
*/
|
|
655
|
+
getSessionStartTag() {
|
|
656
|
+
return this.sessionStartTag;
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Enqueue a completed worker's branch for merging.
|
|
660
|
+
*
|
|
661
|
+
* @param sourceBranch - Branch name to merge from
|
|
662
|
+
* @param targetBranch - Branch name to merge into (typically current branch)
|
|
663
|
+
* @param taskId - Task ID associated with this merge
|
|
664
|
+
* @returns The created merge operation
|
|
665
|
+
*/
|
|
666
|
+
enqueue(sourceBranch, targetBranch, taskId) {
|
|
667
|
+
const operation = {
|
|
668
|
+
sourceBranch,
|
|
669
|
+
targetBranch,
|
|
670
|
+
taskId,
|
|
671
|
+
status: "queued",
|
|
672
|
+
backupTag: `ulpi/backup/${Date.now()}`,
|
|
673
|
+
commitMessage: `feat(parallel): merge task ${taskId}`
|
|
674
|
+
};
|
|
675
|
+
this.queue.push(operation);
|
|
676
|
+
return operation;
|
|
677
|
+
}
|
|
678
|
+
/**
|
|
679
|
+
* Process all queued merges sequentially.
|
|
680
|
+
* @returns Array of merge results
|
|
681
|
+
*/
|
|
682
|
+
async processAll() {
|
|
683
|
+
const results = [];
|
|
684
|
+
while (true) {
|
|
685
|
+
const result = await this.processNext();
|
|
686
|
+
if (!result) break;
|
|
687
|
+
results.push(result);
|
|
688
|
+
}
|
|
689
|
+
return results;
|
|
690
|
+
}
|
|
691
|
+
/**
|
|
692
|
+
* Process the next merge in the queue.
|
|
693
|
+
* Returns the merge result, or null if the queue is empty or already processing.
|
|
694
|
+
*/
|
|
695
|
+
async processNext() {
|
|
696
|
+
if (this.processing) return null;
|
|
697
|
+
const operation = this.queue.find((op) => op.status === "queued");
|
|
698
|
+
if (!operation) return null;
|
|
699
|
+
this.processing = true;
|
|
700
|
+
try {
|
|
701
|
+
return await this.executeMerge(operation);
|
|
702
|
+
} finally {
|
|
703
|
+
this.processing = false;
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
/**
|
|
707
|
+
* Get the current merge queue.
|
|
708
|
+
*/
|
|
709
|
+
getQueue() {
|
|
710
|
+
return this.queue;
|
|
711
|
+
}
|
|
712
|
+
/**
|
|
713
|
+
* Get the number of pending merges.
|
|
714
|
+
*/
|
|
715
|
+
getPendingCount() {
|
|
716
|
+
return this.queue.filter((op) => op.status === "queued").length;
|
|
717
|
+
}
|
|
718
|
+
/**
|
|
719
|
+
* Rollback a specific merge operation using its backup tag.
|
|
720
|
+
*/
|
|
721
|
+
rollbackMerge(operation) {
|
|
722
|
+
validateGitRef(operation.backupTag, "backupTag");
|
|
723
|
+
this.git(["reset", "--hard", operation.backupTag]);
|
|
724
|
+
this.git(["clean", "-fd"]);
|
|
725
|
+
operation.status = "rolled-back";
|
|
726
|
+
}
|
|
727
|
+
/**
|
|
728
|
+
* Rollback all merges in this session to the session start point.
|
|
729
|
+
*/
|
|
730
|
+
rollbackSession() {
|
|
731
|
+
if (!this.sessionStartTag) {
|
|
732
|
+
throw new Error("No session start tag available for rollback");
|
|
733
|
+
}
|
|
734
|
+
validateGitRef(this.sessionStartTag, "sessionStartTag");
|
|
735
|
+
this.git(["reset", "--hard", this.sessionStartTag]);
|
|
736
|
+
this.git(["clean", "-fd"]);
|
|
737
|
+
for (const op of this.queue) {
|
|
738
|
+
if (op.status === "completed") {
|
|
739
|
+
op.status = "rolled-back";
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Clean up backup tags created during this session.
|
|
745
|
+
*/
|
|
746
|
+
cleanupTags() {
|
|
747
|
+
for (const op of this.queue) {
|
|
748
|
+
try {
|
|
749
|
+
this.git(["tag", "-d", op.backupTag]);
|
|
750
|
+
} catch {
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
if (this.sessionStartTag) {
|
|
754
|
+
try {
|
|
755
|
+
this.git(["tag", "-d", this.sessionStartTag]);
|
|
756
|
+
} catch {
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
/**
|
|
761
|
+
* Save tracker state files before a merge operation.
|
|
762
|
+
* Prevents merge from overwriting tracker state with stale worktree copies.
|
|
763
|
+
*
|
|
764
|
+
* @param stateFiles - Array of absolute file paths to save
|
|
765
|
+
*/
|
|
766
|
+
saveTrackerState(stateFiles) {
|
|
767
|
+
const saved = /* @__PURE__ */ new Map();
|
|
768
|
+
for (const filePath of stateFiles) {
|
|
769
|
+
try {
|
|
770
|
+
const content = readFileSync(filePath, "utf-8");
|
|
771
|
+
saved.set(filePath, content);
|
|
772
|
+
} catch {
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
this.savedTrackerState = saved;
|
|
776
|
+
}
|
|
777
|
+
/**
|
|
778
|
+
* Restore tracker state files after a merge operation.
|
|
779
|
+
* Ensures tracker state is not overwritten by stale worktree versions.
|
|
780
|
+
*/
|
|
781
|
+
restoreTrackerState() {
|
|
782
|
+
if (!this.savedTrackerState) return;
|
|
783
|
+
for (const [filePath, content] of this.savedTrackerState) {
|
|
784
|
+
try {
|
|
785
|
+
writeFileSync(filePath, content, "utf-8");
|
|
786
|
+
} catch {
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
this.savedTrackerState = null;
|
|
790
|
+
}
|
|
791
|
+
// ─── Private Methods ───
|
|
792
|
+
/**
|
|
793
|
+
* Execute a single merge operation.
|
|
794
|
+
*/
|
|
795
|
+
async executeMerge(operation) {
|
|
796
|
+
operation.status = "in-progress";
|
|
797
|
+
const taskId = operation.taskId;
|
|
798
|
+
this.emit({
|
|
799
|
+
type: "merge-started",
|
|
800
|
+
taskId,
|
|
801
|
+
sourceBranch: operation.sourceBranch,
|
|
802
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
803
|
+
});
|
|
804
|
+
if (!this.branchHasCommits(operation.sourceBranch)) {
|
|
805
|
+
operation.status = "completed";
|
|
806
|
+
this.emit({
|
|
807
|
+
type: "merge-completed",
|
|
808
|
+
taskId,
|
|
809
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
810
|
+
});
|
|
811
|
+
return {
|
|
812
|
+
success: true,
|
|
813
|
+
strategy: "fast-forward",
|
|
814
|
+
filesChanged: 0
|
|
815
|
+
};
|
|
816
|
+
}
|
|
817
|
+
try {
|
|
818
|
+
validateGitRef(operation.backupTag, "backupTag");
|
|
819
|
+
this.git(["tag", operation.backupTag, "HEAD"]);
|
|
820
|
+
} catch (err) {
|
|
821
|
+
return this.failMerge(
|
|
822
|
+
operation,
|
|
823
|
+
`Failed to create backup tag: ${err}`
|
|
824
|
+
);
|
|
825
|
+
}
|
|
826
|
+
try {
|
|
827
|
+
validateGitRef(operation.sourceBranch, "sourceBranch");
|
|
828
|
+
this.git(["merge", "--ff-only", operation.sourceBranch]);
|
|
829
|
+
const filesChanged = this.getFilesChangedCount(operation.backupTag);
|
|
830
|
+
const commitSha = this.git(["rev-parse", "--short", "HEAD"]).trim();
|
|
831
|
+
return this.completeMerge(operation, "fast-forward", filesChanged, commitSha);
|
|
832
|
+
} catch {
|
|
833
|
+
}
|
|
834
|
+
try {
|
|
835
|
+
this.git([
|
|
836
|
+
"merge",
|
|
837
|
+
"--no-edit",
|
|
838
|
+
"-m",
|
|
839
|
+
operation.commitMessage,
|
|
840
|
+
operation.sourceBranch
|
|
841
|
+
]);
|
|
842
|
+
const commitSha = this.git(["rev-parse", "--short", "HEAD"]).trim();
|
|
843
|
+
const filesChanged = this.getFilesChangedCount(operation.backupTag);
|
|
844
|
+
return this.completeMerge(operation, "merge-commit", filesChanged, commitSha);
|
|
845
|
+
} catch {
|
|
846
|
+
}
|
|
847
|
+
const conflictedFiles = this.getConflictedFiles();
|
|
848
|
+
if (conflictedFiles.length > 0) {
|
|
849
|
+
operation.conflictFiles = conflictedFiles;
|
|
850
|
+
operation.status = "conflicted";
|
|
851
|
+
try {
|
|
852
|
+
this.git(["merge", "--abort"]);
|
|
853
|
+
} catch {
|
|
854
|
+
}
|
|
855
|
+
this.git(["reset", "--hard", operation.backupTag]);
|
|
856
|
+
this.git(["clean", "-fd"]);
|
|
857
|
+
const result = {
|
|
858
|
+
success: false,
|
|
859
|
+
conflictFiles: conflictedFiles,
|
|
860
|
+
strategy: "merge-commit",
|
|
861
|
+
error: `Merge conflicts in ${conflictedFiles.length} file(s): ${conflictedFiles.join(", ")}`
|
|
862
|
+
};
|
|
863
|
+
this.emit({
|
|
864
|
+
type: "merge-failed",
|
|
865
|
+
taskId,
|
|
866
|
+
error: result.error ?? "Unknown merge error",
|
|
867
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
868
|
+
});
|
|
869
|
+
return result;
|
|
870
|
+
}
|
|
871
|
+
try {
|
|
872
|
+
this.git(["reset", "--hard", operation.backupTag]);
|
|
873
|
+
this.git(["clean", "-fd"]);
|
|
874
|
+
} catch {
|
|
875
|
+
}
|
|
876
|
+
return this.failMerge(operation, "Merge failed for unknown reason");
|
|
877
|
+
}
|
|
878
|
+
/**
|
|
879
|
+
* Complete a successful merge operation.
|
|
880
|
+
*/
|
|
881
|
+
completeMerge(operation, strategy, filesChanged, commitSha) {
|
|
882
|
+
operation.status = "completed";
|
|
883
|
+
this.emit({
|
|
884
|
+
type: "merge-completed",
|
|
885
|
+
taskId: operation.taskId,
|
|
886
|
+
commitSha,
|
|
887
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
888
|
+
});
|
|
889
|
+
return {
|
|
890
|
+
success: true,
|
|
891
|
+
commitSha,
|
|
892
|
+
strategy,
|
|
893
|
+
filesChanged
|
|
894
|
+
};
|
|
895
|
+
}
|
|
896
|
+
/**
|
|
897
|
+
* Fail a merge operation.
|
|
898
|
+
*/
|
|
899
|
+
failMerge(operation, error) {
|
|
900
|
+
operation.status = "failed";
|
|
901
|
+
this.emit({
|
|
902
|
+
type: "merge-failed",
|
|
903
|
+
taskId: operation.taskId,
|
|
904
|
+
error,
|
|
905
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
906
|
+
});
|
|
907
|
+
return {
|
|
908
|
+
success: false,
|
|
909
|
+
error
|
|
910
|
+
};
|
|
911
|
+
}
|
|
912
|
+
/**
|
|
913
|
+
* Check if a branch has commits ahead of current HEAD.
|
|
914
|
+
*/
|
|
915
|
+
branchHasCommits(branchName) {
|
|
916
|
+
try {
|
|
917
|
+
validateGitRef(branchName, "branchName");
|
|
918
|
+
const output = this.git(["rev-list", "--count", `HEAD..${branchName}`]);
|
|
919
|
+
const count = parseInt(output.trim(), 10);
|
|
920
|
+
return count > 0;
|
|
921
|
+
} catch {
|
|
922
|
+
return false;
|
|
923
|
+
}
|
|
924
|
+
}
|
|
925
|
+
/**
|
|
926
|
+
* Get the number of files changed between a tag and current HEAD.
|
|
927
|
+
*/
|
|
928
|
+
getFilesChangedCount(fromTag) {
|
|
929
|
+
try {
|
|
930
|
+
validateGitRef(fromTag, "fromTag");
|
|
931
|
+
const output = this.git(["diff", "--name-only", fromTag, "HEAD"]);
|
|
932
|
+
return output.trim().split("\n").filter((l) => l.trim()).length;
|
|
933
|
+
} catch {
|
|
934
|
+
return 0;
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
/**
|
|
938
|
+
* Get list of conflicted files from git status.
|
|
939
|
+
*/
|
|
940
|
+
getConflictedFiles() {
|
|
941
|
+
try {
|
|
942
|
+
const output = this.git(["status", "--porcelain"]);
|
|
943
|
+
const conflicted = [];
|
|
944
|
+
for (const line of output.split("\n")) {
|
|
945
|
+
const status = line.substring(0, 2);
|
|
946
|
+
if (status === "UU" || status === "AA" || status === "DD" || status === "AU" || status === "UA" || status === "DU" || status === "UD") {
|
|
947
|
+
conflicted.push(line.substring(3).trim());
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
return conflicted;
|
|
951
|
+
} catch {
|
|
952
|
+
return [];
|
|
953
|
+
}
|
|
954
|
+
}
|
|
955
|
+
/**
|
|
956
|
+
* Emit a parallel event to all listeners.
|
|
957
|
+
*/
|
|
958
|
+
emit(event) {
|
|
959
|
+
for (const listener of this.listeners) {
|
|
960
|
+
try {
|
|
961
|
+
listener(event);
|
|
962
|
+
} catch {
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
/**
|
|
967
|
+
* Execute a git command in the main repository.
|
|
968
|
+
* Uses execFileSync with argument array to prevent shell injection.
|
|
969
|
+
*/
|
|
970
|
+
git(args) {
|
|
971
|
+
return execFileSync2("git", ["-C", this.cwd, ...args], {
|
|
972
|
+
encoding: "utf-8",
|
|
973
|
+
timeout: 3e4,
|
|
974
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
975
|
+
});
|
|
976
|
+
}
|
|
977
|
+
};
|
|
978
|
+
async function enhanceParallelGroups(groups, projectDir) {
|
|
979
|
+
try {
|
|
980
|
+
const { loadGraph, computeCoupling } = await import("./dist-4U5L2X2C.js");
|
|
981
|
+
const { getCurrentBranch } = await import("./dist-NUXMDXZ3.js");
|
|
982
|
+
const branch = getCurrentBranch(projectDir);
|
|
983
|
+
const graph = loadGraph(projectDir, branch);
|
|
984
|
+
if (!graph) {
|
|
985
|
+
return { groups, warnings: [] };
|
|
986
|
+
}
|
|
987
|
+
const coupling = computeCoupling(graph);
|
|
988
|
+
const warnings = [];
|
|
989
|
+
const couplingMap = /* @__PURE__ */ new Map();
|
|
990
|
+
for (const c of coupling) {
|
|
991
|
+
couplingMap.set(c.filePath, {
|
|
992
|
+
afferentCoupling: c.afferentCoupling,
|
|
993
|
+
efferentCoupling: c.efferentCoupling,
|
|
994
|
+
instability: c.instability
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
const dependsOn = /* @__PURE__ */ new Map();
|
|
998
|
+
for (const edge of graph.edges) {
|
|
999
|
+
if (!dependsOn.has(edge.source)) {
|
|
1000
|
+
dependsOn.set(edge.source, /* @__PURE__ */ new Set());
|
|
1001
|
+
}
|
|
1002
|
+
dependsOn.get(edge.source).add(edge.target);
|
|
1003
|
+
}
|
|
1004
|
+
for (const group of groups) {
|
|
1005
|
+
if (group.tasks.length < 2) continue;
|
|
1006
|
+
const taskFiles = /* @__PURE__ */ new Map();
|
|
1007
|
+
for (const task of group.tasks) {
|
|
1008
|
+
const files = extractFileReferences(
|
|
1009
|
+
`${task.title} ${task.description ?? ""}`
|
|
1010
|
+
);
|
|
1011
|
+
taskFiles.set(task.id, files);
|
|
1012
|
+
}
|
|
1013
|
+
const taskIds = group.tasks.map((t) => t.id);
|
|
1014
|
+
for (let i = 0; i < taskIds.length; i++) {
|
|
1015
|
+
const filesA = taskFiles.get(taskIds[i]) ?? /* @__PURE__ */ new Set();
|
|
1016
|
+
for (let j = i + 1; j < taskIds.length; j++) {
|
|
1017
|
+
const filesB = taskFiles.get(taskIds[j]) ?? /* @__PURE__ */ new Set();
|
|
1018
|
+
for (const fileA of filesA) {
|
|
1019
|
+
const depsA = dependsOn.get(fileA);
|
|
1020
|
+
if (!depsA) continue;
|
|
1021
|
+
for (const fileB of filesB) {
|
|
1022
|
+
if (depsA.has(fileB) || dependsOn.get(fileB)?.has(fileA)) {
|
|
1023
|
+
const info = couplingMap.get(fileA);
|
|
1024
|
+
if (info && info.instability > 0.5) {
|
|
1025
|
+
warnings.push({
|
|
1026
|
+
groupDepth: group.depth,
|
|
1027
|
+
file: fileA,
|
|
1028
|
+
coupledWith: [fileB],
|
|
1029
|
+
instability: info.instability
|
|
1030
|
+
});
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
return { groups, warnings };
|
|
1039
|
+
} catch {
|
|
1040
|
+
return { groups, warnings: [] };
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1043
|
+
async function getConflictContext(files, projectDir) {
|
|
1044
|
+
try {
|
|
1045
|
+
const { loadGraph, computeCoupling } = await import("./dist-4U5L2X2C.js");
|
|
1046
|
+
const { loadPageRankMap } = await import("./dist-4U5L2X2C.js");
|
|
1047
|
+
const { getCurrentBranch } = await import("./dist-NUXMDXZ3.js");
|
|
1048
|
+
const branch = getCurrentBranch(projectDir);
|
|
1049
|
+
const graph = loadGraph(projectDir, branch);
|
|
1050
|
+
if (!graph) return "";
|
|
1051
|
+
const pageRanks = loadPageRankMap(projectDir, branch);
|
|
1052
|
+
const coupling = computeCoupling(graph);
|
|
1053
|
+
const couplingMap = /* @__PURE__ */ new Map();
|
|
1054
|
+
for (const c of coupling) {
|
|
1055
|
+
couplingMap.set(c.filePath, {
|
|
1056
|
+
afferentCoupling: c.afferentCoupling,
|
|
1057
|
+
efferentCoupling: c.efferentCoupling,
|
|
1058
|
+
instability: c.instability
|
|
1059
|
+
});
|
|
1060
|
+
}
|
|
1061
|
+
const lines = [];
|
|
1062
|
+
lines.push("## Dependency Analysis for Conflicting Files");
|
|
1063
|
+
lines.push("");
|
|
1064
|
+
for (const file of files) {
|
|
1065
|
+
const rank = pageRanks?.get(file);
|
|
1066
|
+
const cInfo = couplingMap.get(file);
|
|
1067
|
+
lines.push(`### ${file}`);
|
|
1068
|
+
if (rank !== void 0) {
|
|
1069
|
+
lines.push(
|
|
1070
|
+
`- **PageRank**: ${rank.toFixed(6)} (${rank > 0.01 ? "high importance" : rank > 1e-3 ? "medium importance" : "low importance"})`
|
|
1071
|
+
);
|
|
1072
|
+
}
|
|
1073
|
+
if (cInfo) {
|
|
1074
|
+
lines.push(`- **Fan-in** (files depending on this): ${cInfo.afferentCoupling}`);
|
|
1075
|
+
lines.push(`- **Fan-out** (files this depends on): ${cInfo.efferentCoupling}`);
|
|
1076
|
+
lines.push(
|
|
1077
|
+
`- **Instability**: ${cInfo.instability.toFixed(2)} (${cInfo.instability > 0.7 ? "unstable \u2014 easy to change" : cInfo.instability < 0.3 ? "stable \u2014 changes here affect many" : "moderate"})`
|
|
1078
|
+
);
|
|
1079
|
+
}
|
|
1080
|
+
lines.push("");
|
|
1081
|
+
}
|
|
1082
|
+
if (pageRanks && files.length >= 2) {
|
|
1083
|
+
const ranked = files.map((f) => ({ file: f, rank: pageRanks.get(f) ?? 0 })).sort((a, b) => b.rank - a.rank);
|
|
1084
|
+
if (ranked[0].rank > ranked[1].rank * 2) {
|
|
1085
|
+
lines.push(
|
|
1086
|
+
`**Note**: ${ranked[0].file} has significantly higher architectural importance. Prefer preserving its interfaces when resolving conflicts.`
|
|
1087
|
+
);
|
|
1088
|
+
}
|
|
1089
|
+
}
|
|
1090
|
+
return lines.join("\n");
|
|
1091
|
+
} catch {
|
|
1092
|
+
return "";
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
function extractFileReferences(text) {
|
|
1096
|
+
const files = /* @__PURE__ */ new Set();
|
|
1097
|
+
const pattern = /(?:^|\s)([\w./-]+\.\w{1,6})(?:\s|$|[,;:])/g;
|
|
1098
|
+
let match;
|
|
1099
|
+
while ((match = pattern.exec(text)) !== null) {
|
|
1100
|
+
const candidate = match[1];
|
|
1101
|
+
if (candidate.includes("/") && !candidate.startsWith("http")) {
|
|
1102
|
+
files.add(candidate);
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
return files;
|
|
1106
|
+
}
|
|
1107
|
+
var ConflictResolver = class {
|
|
1108
|
+
cwd;
|
|
1109
|
+
llmResolver = null;
|
|
1110
|
+
constructor(cwd) {
|
|
1111
|
+
this.cwd = cwd;
|
|
1112
|
+
}
|
|
1113
|
+
/**
|
|
1114
|
+
* Set the LLM resolver callback.
|
|
1115
|
+
* Called by the executor to inject the LLM-backed resolver.
|
|
1116
|
+
*/
|
|
1117
|
+
setLlmResolver(resolver) {
|
|
1118
|
+
this.llmResolver = resolver;
|
|
1119
|
+
}
|
|
1120
|
+
/**
|
|
1121
|
+
* Extract conflict information for a file from the git index.
|
|
1122
|
+
* Uses git's merge stages: :1: (base), :2: (ours), :3: (theirs)
|
|
1123
|
+
*
|
|
1124
|
+
* @param filePath - Path to the conflicting file (relative to repo root)
|
|
1125
|
+
* @returns ConflictInfo with base/ours/theirs content, or null if extraction fails
|
|
1126
|
+
*/
|
|
1127
|
+
extractConflict(filePath) {
|
|
1128
|
+
try {
|
|
1129
|
+
const base = this.gitContent(`:1:${filePath}`);
|
|
1130
|
+
const ours = this.gitContent(`:2:${filePath}`);
|
|
1131
|
+
const theirs = this.gitContent(`:3:${filePath}`);
|
|
1132
|
+
return { file: filePath, base, ours, theirs };
|
|
1133
|
+
} catch {
|
|
1134
|
+
return null;
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
/**
|
|
1138
|
+
* Attempt to resolve a conflict using fast-path heuristics.
|
|
1139
|
+
* Avoids LLM call for trivial cases.
|
|
1140
|
+
*
|
|
1141
|
+
* @param conflict - The conflict to resolve
|
|
1142
|
+
* @returns ResolutionResult if a fast-path applies, or null to fall through to LLM
|
|
1143
|
+
*/
|
|
1144
|
+
tryFastPath(conflict) {
|
|
1145
|
+
const { base, ours, theirs } = conflict;
|
|
1146
|
+
if (ours === theirs) {
|
|
1147
|
+
return {
|
|
1148
|
+
resolved: true,
|
|
1149
|
+
content: ours,
|
|
1150
|
+
strategy: "fast-path-identical"
|
|
1151
|
+
};
|
|
1152
|
+
}
|
|
1153
|
+
const normalizeWs = (s) => s.replace(/\s+/g, " ").trim();
|
|
1154
|
+
if (normalizeWs(ours) === normalizeWs(theirs)) {
|
|
1155
|
+
return {
|
|
1156
|
+
resolved: true,
|
|
1157
|
+
content: ours,
|
|
1158
|
+
strategy: "fast-path-whitespace"
|
|
1159
|
+
};
|
|
1160
|
+
}
|
|
1161
|
+
if (isImportReorderOnly(base, ours, theirs)) {
|
|
1162
|
+
return {
|
|
1163
|
+
resolved: true,
|
|
1164
|
+
content: ours,
|
|
1165
|
+
strategy: "fast-path-import-reorder"
|
|
1166
|
+
};
|
|
1167
|
+
}
|
|
1168
|
+
return null;
|
|
1169
|
+
}
|
|
1170
|
+
/**
|
|
1171
|
+
* Resolve a conflict using the LLM resolver (or fast-path).
|
|
1172
|
+
*
|
|
1173
|
+
* @param conflict - The conflict to resolve
|
|
1174
|
+
* @param taskContext - Context about the task for the LLM prompt
|
|
1175
|
+
* @returns ResolutionResult
|
|
1176
|
+
*/
|
|
1177
|
+
async resolve(conflict, taskContext) {
|
|
1178
|
+
const fastResult = this.tryFastPath(conflict);
|
|
1179
|
+
if (fastResult) {
|
|
1180
|
+
return fastResult;
|
|
1181
|
+
}
|
|
1182
|
+
let depgraphContext = "";
|
|
1183
|
+
try {
|
|
1184
|
+
depgraphContext = await getConflictContext([conflict.file], this.cwd);
|
|
1185
|
+
} catch {
|
|
1186
|
+
}
|
|
1187
|
+
const prompt = buildConflictPrompt(conflict, taskContext, depgraphContext);
|
|
1188
|
+
if (!this.llmResolver) {
|
|
1189
|
+
return {
|
|
1190
|
+
resolved: false,
|
|
1191
|
+
content: prompt,
|
|
1192
|
+
strategy: "llm",
|
|
1193
|
+
error: "No LLM resolver configured. Prompt generated but not executed."
|
|
1194
|
+
};
|
|
1195
|
+
}
|
|
1196
|
+
try {
|
|
1197
|
+
const resolved = await this.llmResolver(prompt);
|
|
1198
|
+
if (resolved !== null) {
|
|
1199
|
+
return {
|
|
1200
|
+
resolved: true,
|
|
1201
|
+
content: resolved,
|
|
1202
|
+
strategy: "llm"
|
|
1203
|
+
};
|
|
1204
|
+
}
|
|
1205
|
+
return {
|
|
1206
|
+
resolved: false,
|
|
1207
|
+
strategy: "failed",
|
|
1208
|
+
error: "LLM resolver returned null"
|
|
1209
|
+
};
|
|
1210
|
+
} catch (err) {
|
|
1211
|
+
return {
|
|
1212
|
+
resolved: false,
|
|
1213
|
+
strategy: "failed",
|
|
1214
|
+
error: `LLM resolver failed: ${err instanceof Error ? err.message : String(err)}`
|
|
1215
|
+
};
|
|
1216
|
+
}
|
|
1217
|
+
}
|
|
1218
|
+
/**
|
|
1219
|
+
* Apply a resolution to a conflicted file.
|
|
1220
|
+
* Writes the resolved content and stages it with git add.
|
|
1221
|
+
*
|
|
1222
|
+
* @param filePath - Path to the file (relative to repo root)
|
|
1223
|
+
* @param content - Resolved content to write
|
|
1224
|
+
*/
|
|
1225
|
+
applyResolution(filePath, content) {
|
|
1226
|
+
const absPath = path2.resolve(this.cwd, filePath);
|
|
1227
|
+
fs2.writeFileSync(absPath, content, "utf-8");
|
|
1228
|
+
this.git(["add", filePath]);
|
|
1229
|
+
}
|
|
1230
|
+
/**
|
|
1231
|
+
* Resolve all conflicts for a merge.
|
|
1232
|
+
* Expects the merge to be in a conflicted state (git merge was run, not aborted).
|
|
1233
|
+
*
|
|
1234
|
+
* @param conflictFiles - List of conflicted file paths
|
|
1235
|
+
* @param taskContext - Context about the task
|
|
1236
|
+
* @returns Array of resolution results
|
|
1237
|
+
*/
|
|
1238
|
+
async resolveAll(conflictFiles, taskContext) {
|
|
1239
|
+
const results = [];
|
|
1240
|
+
for (const filePath of conflictFiles) {
|
|
1241
|
+
const conflict = this.extractConflict(filePath);
|
|
1242
|
+
if (!conflict) {
|
|
1243
|
+
results.push({
|
|
1244
|
+
resolved: false,
|
|
1245
|
+
strategy: "failed",
|
|
1246
|
+
error: `Failed to extract conflict data for ${filePath}`
|
|
1247
|
+
});
|
|
1248
|
+
continue;
|
|
1249
|
+
}
|
|
1250
|
+
const result = await this.resolve(conflict, taskContext);
|
|
1251
|
+
if (result.resolved && result.content) {
|
|
1252
|
+
this.applyResolution(filePath, result.content);
|
|
1253
|
+
}
|
|
1254
|
+
results.push(result);
|
|
1255
|
+
if (!result.resolved) {
|
|
1256
|
+
break;
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
return results;
|
|
1260
|
+
}
|
|
1261
|
+
// ─── Private Methods ───
|
|
1262
|
+
/**
|
|
1263
|
+
* Get file content from a git index stage.
|
|
1264
|
+
*/
|
|
1265
|
+
gitContent(ref) {
|
|
1266
|
+
try {
|
|
1267
|
+
return this.git(["show", ref]);
|
|
1268
|
+
} catch {
|
|
1269
|
+
return "";
|
|
1270
|
+
}
|
|
1271
|
+
}
|
|
1272
|
+
/**
|
|
1273
|
+
* Execute a git command in the main repository.
|
|
1274
|
+
* Uses execFileSync with argument array to prevent shell injection.
|
|
1275
|
+
*/
|
|
1276
|
+
git(args) {
|
|
1277
|
+
return execFileSync3("git", ["-C", this.cwd, ...args], {
|
|
1278
|
+
encoding: "utf-8",
|
|
1279
|
+
timeout: 6e4,
|
|
1280
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
1281
|
+
});
|
|
1282
|
+
}
|
|
1283
|
+
};
|
|
1284
|
+
function buildConflictPrompt(conflict, ctx, depgraphContext) {
|
|
1285
|
+
const taskLine = ctx ? `Task: ${ctx.taskTitle} (${ctx.taskId})` : "Task: (no context provided)";
|
|
1286
|
+
const depgraphSection = depgraphContext ? `
|
|
1287
|
+
${depgraphContext}
|
|
1288
|
+
` : "";
|
|
1289
|
+
return `You are resolving a git merge conflict. Output ONLY the resolved file content.
|
|
1290
|
+
|
|
1291
|
+
## Context
|
|
1292
|
+
File: ${conflict.file}
|
|
1293
|
+
${taskLine}
|
|
1294
|
+
${depgraphSection}
|
|
1295
|
+
## Base Version (common ancestor)
|
|
1296
|
+
\`\`\`
|
|
1297
|
+
${conflict.base || "(file did not exist)"}
|
|
1298
|
+
\`\`\`
|
|
1299
|
+
|
|
1300
|
+
## Main Branch (ours)
|
|
1301
|
+
\`\`\`
|
|
1302
|
+
${conflict.ours}
|
|
1303
|
+
\`\`\`
|
|
1304
|
+
|
|
1305
|
+
## Worker Branch (theirs - implementing the task)
|
|
1306
|
+
\`\`\`
|
|
1307
|
+
${conflict.theirs}
|
|
1308
|
+
\`\`\`
|
|
1309
|
+
|
|
1310
|
+
## Instructions
|
|
1311
|
+
1. Merge intelligently - keep changes from both branches where they don't conflict
|
|
1312
|
+
2. The worker was implementing the task - preserve their functional changes
|
|
1313
|
+
3. Keep main branch updates (formatting, unrelated fixes) where possible
|
|
1314
|
+
4. If in doubt, prefer the worker's changes since they implement the requested task
|
|
1315
|
+
|
|
1316
|
+
OUTPUT ONLY THE RESOLVED FILE CONTENT. No explanation, no markdown code fences.`;
|
|
1317
|
+
}
|
|
1318
|
+
function isImportReorderOnly(base, ours, theirs) {
|
|
1319
|
+
if (!base.trim() || !ours.trim() || !theirs.trim()) {
|
|
1320
|
+
return false;
|
|
1321
|
+
}
|
|
1322
|
+
const getImportLines = (content) => content.split("\n").filter(
|
|
1323
|
+
(line) => line.trim().startsWith("import ") || line.trim().startsWith("from ") || line.trim().startsWith("require(") || line.trim().startsWith("const ") && line.includes("require(")
|
|
1324
|
+
).map((l) => l.trim());
|
|
1325
|
+
const getNonImportLines = (content) => content.split("\n").filter(
|
|
1326
|
+
(line) => !line.trim().startsWith("import ") && !line.trim().startsWith("from ") && !line.trim().startsWith("require(") && !(line.trim().startsWith("const ") && line.includes("require("))
|
|
1327
|
+
);
|
|
1328
|
+
const oursImports = getImportLines(ours);
|
|
1329
|
+
const theirsImports = getImportLines(theirs);
|
|
1330
|
+
const oursNonImport = getNonImportLines(ours);
|
|
1331
|
+
const theirsNonImport = getNonImportLines(theirs);
|
|
1332
|
+
if (oursNonImport.join("\n") !== theirsNonImport.join("\n")) {
|
|
1333
|
+
return false;
|
|
1334
|
+
}
|
|
1335
|
+
const sortedOurs = [...oursImports].sort();
|
|
1336
|
+
const sortedTheirs = [...theirsImports].sort();
|
|
1337
|
+
if (sortedOurs.length !== sortedTheirs.length) {
|
|
1338
|
+
return false;
|
|
1339
|
+
}
|
|
1340
|
+
for (let i = 0; i < sortedOurs.length; i++) {
|
|
1341
|
+
if (sortedOurs[i] !== sortedTheirs[i]) {
|
|
1342
|
+
return false;
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
return true;
|
|
1346
|
+
}
|
|
1347
|
+
var ParallelWorker = class {
|
|
1348
|
+
id;
|
|
1349
|
+
config;
|
|
1350
|
+
status = "pending";
|
|
1351
|
+
startTime = 0;
|
|
1352
|
+
endTime = 0;
|
|
1353
|
+
commitCount = 0;
|
|
1354
|
+
error;
|
|
1355
|
+
listeners = [];
|
|
1356
|
+
constructor(config) {
|
|
1357
|
+
this.id = config.id;
|
|
1358
|
+
this.config = config;
|
|
1359
|
+
}
|
|
1360
|
+
/**
|
|
1361
|
+
* Register a parallel event listener.
|
|
1362
|
+
* @returns Unsubscribe function
|
|
1363
|
+
*/
|
|
1364
|
+
on(listener) {
|
|
1365
|
+
this.listeners.push(listener);
|
|
1366
|
+
return () => {
|
|
1367
|
+
const idx = this.listeners.indexOf(listener);
|
|
1368
|
+
if (idx >= 0) this.listeners.splice(idx, 1);
|
|
1369
|
+
};
|
|
1370
|
+
}
|
|
1371
|
+
/**
|
|
1372
|
+
* Start the worker's execution.
|
|
1373
|
+
* Calls the provided executor with the task and worktree info.
|
|
1374
|
+
* Returns when the executor completes (task done, max iterations, or error).
|
|
1375
|
+
*
|
|
1376
|
+
* @param executor - Callback that runs the actual task execution
|
|
1377
|
+
* @returns WorkerResult with execution outcome
|
|
1378
|
+
*/
|
|
1379
|
+
async start(executor) {
|
|
1380
|
+
this.status = "running";
|
|
1381
|
+
this.startTime = Date.now();
|
|
1382
|
+
this.commitCount = 0;
|
|
1383
|
+
this.error = void 0;
|
|
1384
|
+
this.emit({
|
|
1385
|
+
type: "worker-started",
|
|
1386
|
+
workerId: this.id,
|
|
1387
|
+
taskId: this.config.task.id,
|
|
1388
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1389
|
+
});
|
|
1390
|
+
try {
|
|
1391
|
+
const result = await executor(
|
|
1392
|
+
this.config.task,
|
|
1393
|
+
this.config.worktreePath,
|
|
1394
|
+
this.config.branchName
|
|
1395
|
+
);
|
|
1396
|
+
this.commitCount = result.commitCount ?? 0;
|
|
1397
|
+
this.endTime = Date.now();
|
|
1398
|
+
if (result.success) {
|
|
1399
|
+
this.status = "completed";
|
|
1400
|
+
this.emit({
|
|
1401
|
+
type: "worker-completed",
|
|
1402
|
+
workerId: this.id,
|
|
1403
|
+
taskId: this.config.task.id,
|
|
1404
|
+
success: true,
|
|
1405
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1406
|
+
});
|
|
1407
|
+
return {
|
|
1408
|
+
workerId: this.id,
|
|
1409
|
+
task: this.config.task,
|
|
1410
|
+
success: true,
|
|
1411
|
+
durationMs: Date.now() - this.startTime,
|
|
1412
|
+
branchName: this.config.branchName,
|
|
1413
|
+
commitCount: this.commitCount,
|
|
1414
|
+
worktreePath: this.config.worktreePath
|
|
1415
|
+
};
|
|
1416
|
+
}
|
|
1417
|
+
this.status = "failed";
|
|
1418
|
+
this.error = result.error ?? "Task execution failed";
|
|
1419
|
+
this.emit({
|
|
1420
|
+
type: "worker-failed",
|
|
1421
|
+
workerId: this.id,
|
|
1422
|
+
taskId: this.config.task.id,
|
|
1423
|
+
error: this.error,
|
|
1424
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1425
|
+
});
|
|
1426
|
+
return {
|
|
1427
|
+
workerId: this.id,
|
|
1428
|
+
task: this.config.task,
|
|
1429
|
+
success: false,
|
|
1430
|
+
durationMs: Date.now() - this.startTime,
|
|
1431
|
+
error: this.error,
|
|
1432
|
+
branchName: this.config.branchName,
|
|
1433
|
+
commitCount: this.commitCount,
|
|
1434
|
+
worktreePath: this.config.worktreePath
|
|
1435
|
+
};
|
|
1436
|
+
} catch (err) {
|
|
1437
|
+
this.endTime = Date.now();
|
|
1438
|
+
this.status = "failed";
|
|
1439
|
+
this.error = err instanceof Error ? err.message : String(err);
|
|
1440
|
+
this.emit({
|
|
1441
|
+
type: "worker-failed",
|
|
1442
|
+
workerId: this.id,
|
|
1443
|
+
taskId: this.config.task.id,
|
|
1444
|
+
error: this.error,
|
|
1445
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1446
|
+
});
|
|
1447
|
+
return {
|
|
1448
|
+
workerId: this.id,
|
|
1449
|
+
task: this.config.task,
|
|
1450
|
+
success: false,
|
|
1451
|
+
durationMs: Date.now() - this.startTime,
|
|
1452
|
+
error: this.error,
|
|
1453
|
+
branchName: this.config.branchName,
|
|
1454
|
+
commitCount: this.commitCount,
|
|
1455
|
+
worktreePath: this.config.worktreePath
|
|
1456
|
+
};
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
/**
|
|
1460
|
+
* Get the current worker state for monitoring.
|
|
1461
|
+
*/
|
|
1462
|
+
getState() {
|
|
1463
|
+
return {
|
|
1464
|
+
id: this.id,
|
|
1465
|
+
status: this.status,
|
|
1466
|
+
task: this.config.task,
|
|
1467
|
+
branchName: this.config.branchName,
|
|
1468
|
+
worktreePath: this.config.worktreePath,
|
|
1469
|
+
elapsedMs: this.startTime > 0 ? (this.endTime || Date.now()) - this.startTime : 0,
|
|
1470
|
+
error: this.error,
|
|
1471
|
+
commitCount: this.commitCount
|
|
1472
|
+
};
|
|
1473
|
+
}
|
|
1474
|
+
/**
|
|
1475
|
+
* Get the current worker status.
|
|
1476
|
+
*/
|
|
1477
|
+
getStatus() {
|
|
1478
|
+
return this.status;
|
|
1479
|
+
}
|
|
1480
|
+
/**
|
|
1481
|
+
* Get the task assigned to this worker.
|
|
1482
|
+
*/
|
|
1483
|
+
getTask() {
|
|
1484
|
+
return this.config.task;
|
|
1485
|
+
}
|
|
1486
|
+
/**
|
|
1487
|
+
* Emit a parallel event to all listeners.
|
|
1488
|
+
*/
|
|
1489
|
+
emit(event) {
|
|
1490
|
+
for (const listener of this.listeners) {
|
|
1491
|
+
try {
|
|
1492
|
+
listener(event);
|
|
1493
|
+
} catch {
|
|
1494
|
+
}
|
|
1495
|
+
}
|
|
1496
|
+
}
|
|
1497
|
+
};
|
|
1498
|
+
var WorkerTrackerProxy = class {
|
|
1499
|
+
meta = { id: "worker-proxy", name: "Worker Proxy", description: "In-memory proxy for parallel workers" };
|
|
1500
|
+
task;
|
|
1501
|
+
taskStatus = "open";
|
|
1502
|
+
completed = false;
|
|
1503
|
+
constructor(task) {
|
|
1504
|
+
this.task = { ...task, status: "open" };
|
|
1505
|
+
}
|
|
1506
|
+
async initialize() {
|
|
1507
|
+
}
|
|
1508
|
+
async isReady() {
|
|
1509
|
+
return true;
|
|
1510
|
+
}
|
|
1511
|
+
async getTasks(filter) {
|
|
1512
|
+
if (filter?.status && !filter.status.includes(this.taskStatus)) return [];
|
|
1513
|
+
if (filter?.excludeIds?.includes(this.task.id)) return [];
|
|
1514
|
+
return [{ ...this.task, status: this.taskStatus }];
|
|
1515
|
+
}
|
|
1516
|
+
async getTask(id) {
|
|
1517
|
+
return id === this.task.id ? { ...this.task, status: this.taskStatus } : void 0;
|
|
1518
|
+
}
|
|
1519
|
+
async getNextTask(filter) {
|
|
1520
|
+
if (this.completed) return void 0;
|
|
1521
|
+
if (filter?.status && !filter.status.includes(this.taskStatus)) return void 0;
|
|
1522
|
+
if (filter?.excludeIds?.includes(this.task.id)) return void 0;
|
|
1523
|
+
return { ...this.task, status: this.taskStatus };
|
|
1524
|
+
}
|
|
1525
|
+
async completeTask(id) {
|
|
1526
|
+
if (id !== this.task.id) return { success: false, message: "Unknown task" };
|
|
1527
|
+
this.taskStatus = "done";
|
|
1528
|
+
this.completed = true;
|
|
1529
|
+
return { success: true, message: "Task completed" };
|
|
1530
|
+
}
|
|
1531
|
+
async updateTaskStatus(id, status) {
|
|
1532
|
+
if (id !== this.task.id) return void 0;
|
|
1533
|
+
this.taskStatus = status;
|
|
1534
|
+
if (status === "done") this.completed = true;
|
|
1535
|
+
return { ...this.task, status: this.taskStatus };
|
|
1536
|
+
}
|
|
1537
|
+
async isComplete() {
|
|
1538
|
+
return this.completed;
|
|
1539
|
+
}
|
|
1540
|
+
async isTaskReady() {
|
|
1541
|
+
return !this.completed;
|
|
1542
|
+
}
|
|
1543
|
+
async sync() {
|
|
1544
|
+
return { success: true, message: "Proxy sync", added: 0, updated: 0, removed: 0 };
|
|
1545
|
+
}
|
|
1546
|
+
getStateFiles() {
|
|
1547
|
+
return [];
|
|
1548
|
+
}
|
|
1549
|
+
getTemplate() {
|
|
1550
|
+
return "";
|
|
1551
|
+
}
|
|
1552
|
+
async dispose() {
|
|
1553
|
+
}
|
|
1554
|
+
/** Check if the forced task was completed. */
|
|
1555
|
+
wasTaskCompleted() {
|
|
1556
|
+
return this.completed;
|
|
1557
|
+
}
|
|
1558
|
+
};
|
|
1559
|
+
var SESSION_FILE = ".ulpi/parallel-session.json";
|
|
1560
|
+
function createParallelSession(sessionId, dagAnalysis, sessionStartTag) {
|
|
1561
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1562
|
+
return {
|
|
1563
|
+
sessionId,
|
|
1564
|
+
dagAnalysis,
|
|
1565
|
+
lastCompletedGroupIndex: -1,
|
|
1566
|
+
mergedTaskIds: [],
|
|
1567
|
+
failedTaskIds: [],
|
|
1568
|
+
sessionStartTag,
|
|
1569
|
+
startedAt: now,
|
|
1570
|
+
lastUpdatedAt: now
|
|
1571
|
+
};
|
|
1572
|
+
}
|
|
1573
|
+
function saveParallelSession(cwd, state) {
|
|
1574
|
+
const filePath = path3.join(cwd, SESSION_FILE);
|
|
1575
|
+
const dir = path3.dirname(filePath);
|
|
1576
|
+
fs3.mkdirSync(dir, { recursive: true });
|
|
1577
|
+
const serializable = {
|
|
1578
|
+
...state,
|
|
1579
|
+
dagAnalysis: serializeDAGAnalysis(state.dagAnalysis)
|
|
1580
|
+
};
|
|
1581
|
+
fs3.writeFileSync(filePath, JSON.stringify(serializable, null, 2), "utf-8");
|
|
1582
|
+
}
|
|
1583
|
+
function loadParallelSession(cwd) {
|
|
1584
|
+
const filePath = path3.join(cwd, SESSION_FILE);
|
|
1585
|
+
try {
|
|
1586
|
+
const content = fs3.readFileSync(filePath, "utf-8");
|
|
1587
|
+
const parsed = JSON.parse(content);
|
|
1588
|
+
return {
|
|
1589
|
+
...parsed,
|
|
1590
|
+
dagAnalysis: deserializeDAGAnalysis(
|
|
1591
|
+
parsed.dagAnalysis
|
|
1592
|
+
)
|
|
1593
|
+
};
|
|
1594
|
+
} catch {
|
|
1595
|
+
return null;
|
|
1596
|
+
}
|
|
1597
|
+
}
|
|
1598
|
+
function deleteParallelSession(cwd) {
|
|
1599
|
+
const filePath = path3.join(cwd, SESSION_FILE);
|
|
1600
|
+
try {
|
|
1601
|
+
fs3.unlinkSync(filePath);
|
|
1602
|
+
} catch {
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
function hasParallelSession(cwd) {
|
|
1606
|
+
const filePath = path3.join(cwd, SESSION_FILE);
|
|
1607
|
+
return fs3.existsSync(filePath);
|
|
1608
|
+
}
|
|
1609
|
+
function updateSessionAfterGroup(state, groupIndex, mergedTaskIds, failedTaskIds) {
|
|
1610
|
+
return {
|
|
1611
|
+
...state,
|
|
1612
|
+
lastCompletedGroupIndex: groupIndex,
|
|
1613
|
+
mergedTaskIds: [...state.mergedTaskIds, ...mergedTaskIds],
|
|
1614
|
+
failedTaskIds: [...state.failedTaskIds, ...failedTaskIds],
|
|
1615
|
+
lastUpdatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1616
|
+
};
|
|
1617
|
+
}
|
|
1618
|
+
function findOrphanedWorktrees(cwd) {
|
|
1619
|
+
const parentDir = path3.dirname(cwd);
|
|
1620
|
+
const projectName = path3.basename(cwd);
|
|
1621
|
+
const worktreeBase = path3.join(parentDir, ".ulpi-worktrees", projectName);
|
|
1622
|
+
if (!fs3.existsSync(worktreeBase)) {
|
|
1623
|
+
return [];
|
|
1624
|
+
}
|
|
1625
|
+
try {
|
|
1626
|
+
const entries = fs3.readdirSync(worktreeBase, { withFileTypes: true });
|
|
1627
|
+
return entries.filter((e) => e.isDirectory()).map((e) => path3.join(worktreeBase, e.name));
|
|
1628
|
+
} catch {
|
|
1629
|
+
return [];
|
|
1630
|
+
}
|
|
1631
|
+
}
|
|
1632
|
+
function cleanupOrphanedSession(cwd) {
|
|
1633
|
+
const orphans = findOrphanedWorktrees(cwd);
|
|
1634
|
+
const errors = [];
|
|
1635
|
+
let cleaned = 0;
|
|
1636
|
+
for (const worktreePath of orphans) {
|
|
1637
|
+
try {
|
|
1638
|
+
execFileSync4("git", ["-C", cwd, "worktree", "remove", "--force", worktreePath], {
|
|
1639
|
+
encoding: "utf-8",
|
|
1640
|
+
timeout: 1e4,
|
|
1641
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
1642
|
+
});
|
|
1643
|
+
cleaned++;
|
|
1644
|
+
} catch {
|
|
1645
|
+
try {
|
|
1646
|
+
fs3.rmSync(worktreePath, { recursive: true, force: true });
|
|
1647
|
+
execFileSync4("git", ["-C", cwd, "worktree", "prune"], {
|
|
1648
|
+
encoding: "utf-8",
|
|
1649
|
+
timeout: 1e4,
|
|
1650
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
1651
|
+
});
|
|
1652
|
+
cleaned++;
|
|
1653
|
+
} catch (err) {
|
|
1654
|
+
errors.push(`Failed to clean up ${worktreePath}: ${err}`);
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
1657
|
+
}
|
|
1658
|
+
try {
|
|
1659
|
+
const branches = execFileSync4(
|
|
1660
|
+
"git",
|
|
1661
|
+
["-C", cwd, "branch", "--list", "ulpi-parallel/*"],
|
|
1662
|
+
{
|
|
1663
|
+
encoding: "utf-8",
|
|
1664
|
+
timeout: 1e4,
|
|
1665
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
1666
|
+
}
|
|
1667
|
+
);
|
|
1668
|
+
for (const line of branches.split("\n")) {
|
|
1669
|
+
const name = line.trim().replace(/^\*\s*/, "");
|
|
1670
|
+
if (name) {
|
|
1671
|
+
try {
|
|
1672
|
+
execFileSync4("git", ["-C", cwd, "branch", "-D", name], {
|
|
1673
|
+
encoding: "utf-8",
|
|
1674
|
+
timeout: 1e4,
|
|
1675
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
1676
|
+
});
|
|
1677
|
+
} catch {
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1680
|
+
}
|
|
1681
|
+
} catch {
|
|
1682
|
+
}
|
|
1683
|
+
const parentDir = path3.dirname(cwd);
|
|
1684
|
+
const projectName = path3.basename(cwd);
|
|
1685
|
+
const worktreeBase = path3.join(parentDir, ".ulpi-worktrees", projectName);
|
|
1686
|
+
try {
|
|
1687
|
+
const entries = fs3.readdirSync(worktreeBase);
|
|
1688
|
+
if (entries.length === 0) {
|
|
1689
|
+
fs3.rmdirSync(worktreeBase);
|
|
1690
|
+
const parent = path3.join(parentDir, ".ulpi-worktrees");
|
|
1691
|
+
const parentEntries = fs3.readdirSync(parent);
|
|
1692
|
+
if (parentEntries.length === 0) {
|
|
1693
|
+
fs3.rmdirSync(parent);
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
} catch {
|
|
1697
|
+
}
|
|
1698
|
+
deleteParallelSession(cwd);
|
|
1699
|
+
return { cleaned, errors };
|
|
1700
|
+
}
|
|
1701
|
+
function isOrphanedSession(cwd) {
|
|
1702
|
+
if (!hasParallelSession(cwd)) {
|
|
1703
|
+
return false;
|
|
1704
|
+
}
|
|
1705
|
+
const orphans = findOrphanedWorktrees(cwd);
|
|
1706
|
+
return orphans.length > 0;
|
|
1707
|
+
}
|
|
1708
|
+
function serializeDAGAnalysis(analysis) {
|
|
1709
|
+
return {
|
|
1710
|
+
...analysis,
|
|
1711
|
+
nodes: [...analysis.nodes.entries()]
|
|
1712
|
+
};
|
|
1713
|
+
}
|
|
1714
|
+
function deserializeDAGAnalysis(data) {
|
|
1715
|
+
const entries = data.nodes;
|
|
1716
|
+
return {
|
|
1717
|
+
...data,
|
|
1718
|
+
nodes: new Map(entries)
|
|
1719
|
+
};
|
|
1720
|
+
}
|
|
1721
|
+
var DEFAULT_ENGINE_CONFIG_FOR_PARALLEL = {
|
|
1722
|
+
...DEFAULT_ENGINE_CONFIG,
|
|
1723
|
+
autoCommit: true,
|
|
1724
|
+
workerMode: true
|
|
1725
|
+
};
|
|
1726
|
+
var ParallelExecutor = class {
|
|
1727
|
+
config;
|
|
1728
|
+
worktreeManager;
|
|
1729
|
+
mergeEngine;
|
|
1730
|
+
conflictResolver;
|
|
1731
|
+
status = "idle";
|
|
1732
|
+
dagAnalysis = null;
|
|
1733
|
+
currentGroupIndex = 0;
|
|
1734
|
+
activeWorkers = [];
|
|
1735
|
+
allWorkerStates = [];
|
|
1736
|
+
totalTasksCompleted = 0;
|
|
1737
|
+
totalTasksFailed = 0;
|
|
1738
|
+
startedAt = null;
|
|
1739
|
+
sessionId;
|
|
1740
|
+
shouldStop = false;
|
|
1741
|
+
/** Re-queue counts per task to prevent infinite retry loops */
|
|
1742
|
+
requeueCounts = /* @__PURE__ */ new Map();
|
|
1743
|
+
/** Track completed task summaries for validation */
|
|
1744
|
+
taskSummaries = [];
|
|
1745
|
+
listeners = [];
|
|
1746
|
+
/** Task executor callback -- injected by the caller (legacy mode) */
|
|
1747
|
+
taskExecutor = null;
|
|
1748
|
+
/** Base engine config for engine-based execution (new mode) */
|
|
1749
|
+
baseEngineConfig;
|
|
1750
|
+
constructor(config, baseEngineConfig) {
|
|
1751
|
+
this.config = {
|
|
1752
|
+
...DEFAULT_PARALLEL_CONFIG,
|
|
1753
|
+
...config
|
|
1754
|
+
};
|
|
1755
|
+
this.baseEngineConfig = baseEngineConfig;
|
|
1756
|
+
this.sessionId = `parallel-${Date.now()}`;
|
|
1757
|
+
this.worktreeManager = new WorktreeManager({
|
|
1758
|
+
cwd: this.config.cwd,
|
|
1759
|
+
maxWorktrees: this.config.maxWorkers * 2,
|
|
1760
|
+
// Buffer for re-queued tasks
|
|
1761
|
+
minFreeDiskSpace: this.config.minFreeDiskSpace
|
|
1762
|
+
});
|
|
1763
|
+
this.mergeEngine = new MergeEngine(this.config.cwd);
|
|
1764
|
+
this.conflictResolver = new ConflictResolver(this.config.cwd);
|
|
1765
|
+
this.mergeEngine.on((event) => this.emit(event));
|
|
1766
|
+
}
|
|
1767
|
+
/**
|
|
1768
|
+
* Register a parallel event listener.
|
|
1769
|
+
* @returns Unsubscribe function
|
|
1770
|
+
*/
|
|
1771
|
+
on(listener) {
|
|
1772
|
+
this.listeners.push(listener);
|
|
1773
|
+
return () => {
|
|
1774
|
+
const idx = this.listeners.indexOf(listener);
|
|
1775
|
+
if (idx >= 0) this.listeners.splice(idx, 1);
|
|
1776
|
+
};
|
|
1777
|
+
}
|
|
1778
|
+
/**
|
|
1779
|
+
* Set the task executor callback.
|
|
1780
|
+
* This is called for each task to perform the actual work in the worktree.
|
|
1781
|
+
*/
|
|
1782
|
+
setTaskExecutor(executor) {
|
|
1783
|
+
this.taskExecutor = executor;
|
|
1784
|
+
}
|
|
1785
|
+
/**
|
|
1786
|
+
* Set the LLM resolver for AI conflict resolution.
|
|
1787
|
+
*/
|
|
1788
|
+
setLlmResolver(resolver) {
|
|
1789
|
+
this.conflictResolver.setLlmResolver(resolver);
|
|
1790
|
+
}
|
|
1791
|
+
/**
|
|
1792
|
+
* Initialize the executor with tasks.
|
|
1793
|
+
* Validates the DAG, builds parallel groups, and checks disk space.
|
|
1794
|
+
*
|
|
1795
|
+
* @param tasks - All tasks to execute in parallel
|
|
1796
|
+
* @throws If the DAG has cycles or disk space is insufficient
|
|
1797
|
+
*/
|
|
1798
|
+
async initialize(tasks) {
|
|
1799
|
+
this.status = "analyzing";
|
|
1800
|
+
this.dagAnalysis = analyzeTaskGraph(tasks);
|
|
1801
|
+
if (this.dagAnalysis.cyclicTaskIds.length > 0) {
|
|
1802
|
+
throw new Error(
|
|
1803
|
+
`Cannot execute: dependency cycle detected involving tasks: ${this.dagAnalysis.cyclicTaskIds.join(", ")}`
|
|
1804
|
+
);
|
|
1805
|
+
}
|
|
1806
|
+
if (this.dagAnalysis.groups.length === 0) {
|
|
1807
|
+
throw new Error("No tasks to execute after DAG analysis");
|
|
1808
|
+
}
|
|
1809
|
+
await this.worktreeManager.checkDiskSpace();
|
|
1810
|
+
this.status = "idle";
|
|
1811
|
+
}
|
|
1812
|
+
/**
|
|
1813
|
+
* Execute all parallel groups.
|
|
1814
|
+
* Main entry point for the parallel execution flow.
|
|
1815
|
+
*
|
|
1816
|
+
* @throws If initialize() has not been called or no task executor is set
|
|
1817
|
+
*/
|
|
1818
|
+
async execute() {
|
|
1819
|
+
if (!this.dagAnalysis) {
|
|
1820
|
+
throw new Error("ParallelExecutor not initialized. Call initialize() first.");
|
|
1821
|
+
}
|
|
1822
|
+
if (!this.taskExecutor && !this.baseEngineConfig) {
|
|
1823
|
+
throw new Error("No task executor or engine config set. Call setTaskExecutor() or provide baseEngineConfig.");
|
|
1824
|
+
}
|
|
1825
|
+
this.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1826
|
+
this.status = "executing";
|
|
1827
|
+
this.totalTasksCompleted = 0;
|
|
1828
|
+
this.totalTasksFailed = 0;
|
|
1829
|
+
this.taskSummaries = [];
|
|
1830
|
+
const totalTasks = this.dagAnalysis.groups.reduce(
|
|
1831
|
+
(sum, g) => sum + g.tasks.length,
|
|
1832
|
+
0
|
|
1833
|
+
);
|
|
1834
|
+
const backupTag = this.mergeEngine.createSessionBackup(this.sessionId);
|
|
1835
|
+
let sessionState = createParallelSession(
|
|
1836
|
+
this.sessionId,
|
|
1837
|
+
this.dagAnalysis,
|
|
1838
|
+
backupTag
|
|
1839
|
+
);
|
|
1840
|
+
saveParallelSession(this.config.cwd, sessionState);
|
|
1841
|
+
try {
|
|
1842
|
+
try {
|
|
1843
|
+
const { warnings } = await enhanceParallelGroups(
|
|
1844
|
+
this.dagAnalysis.groups,
|
|
1845
|
+
this.config.cwd
|
|
1846
|
+
);
|
|
1847
|
+
for (const warning of warnings) {
|
|
1848
|
+
this.emit({
|
|
1849
|
+
type: "merge-failed",
|
|
1850
|
+
taskId: warning.file,
|
|
1851
|
+
error: `Coupling warning: ${warning.file} (instability: ${warning.instability.toFixed(2)}) is coupled with ${warning.coupledWith.join(", ")} in group ${warning.groupDepth}`,
|
|
1852
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1853
|
+
});
|
|
1854
|
+
}
|
|
1855
|
+
} catch {
|
|
1856
|
+
}
|
|
1857
|
+
const allTasks = /* @__PURE__ */ new Map();
|
|
1858
|
+
for (const group of this.dagAnalysis.groups) {
|
|
1859
|
+
for (const task of group.tasks) {
|
|
1860
|
+
allTasks.set(task.id, task);
|
|
1861
|
+
}
|
|
1862
|
+
}
|
|
1863
|
+
for (let i = 0; i < this.dagAnalysis.groups.length; i++) {
|
|
1864
|
+
if (this.shouldStop) break;
|
|
1865
|
+
this.currentGroupIndex = i;
|
|
1866
|
+
const group = this.dagAnalysis.groups[i];
|
|
1867
|
+
const { merged, failed } = await this.executeGroup(group, i);
|
|
1868
|
+
for (const taskId of merged) {
|
|
1869
|
+
const task = allTasks.get(taskId);
|
|
1870
|
+
this.taskSummaries.push({
|
|
1871
|
+
taskId,
|
|
1872
|
+
title: task?.title ?? taskId,
|
|
1873
|
+
success: true,
|
|
1874
|
+
commitShas: this.getCommitShasForTask(taskId)
|
|
1875
|
+
});
|
|
1876
|
+
}
|
|
1877
|
+
for (const taskId of failed) {
|
|
1878
|
+
const task = allTasks.get(taskId);
|
|
1879
|
+
this.taskSummaries.push({
|
|
1880
|
+
taskId,
|
|
1881
|
+
title: task?.title ?? taskId,
|
|
1882
|
+
success: false,
|
|
1883
|
+
error: "Task failed or merge conflict unresolved"
|
|
1884
|
+
});
|
|
1885
|
+
}
|
|
1886
|
+
sessionState = updateSessionAfterGroup(
|
|
1887
|
+
sessionState,
|
|
1888
|
+
i,
|
|
1889
|
+
merged,
|
|
1890
|
+
failed
|
|
1891
|
+
);
|
|
1892
|
+
saveParallelSession(this.config.cwd, sessionState);
|
|
1893
|
+
}
|
|
1894
|
+
if (!this.shouldStop && this.config.enableValidation && this.baseEngineConfig && this.taskSummaries.some((t) => t.success)) {
|
|
1895
|
+
await this.runValidationWorker();
|
|
1896
|
+
}
|
|
1897
|
+
this.status = this.shouldStop ? "interrupted" : "completed";
|
|
1898
|
+
this.emit({
|
|
1899
|
+
type: "all-completed",
|
|
1900
|
+
totalCompleted: this.totalTasksCompleted,
|
|
1901
|
+
totalFailed: this.totalTasksFailed,
|
|
1902
|
+
durationMs: Date.now() - new Date(this.startedAt).getTime(),
|
|
1903
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1904
|
+
});
|
|
1905
|
+
} catch (err) {
|
|
1906
|
+
this.status = "failed";
|
|
1907
|
+
throw err;
|
|
1908
|
+
} finally {
|
|
1909
|
+
await this.cleanup();
|
|
1910
|
+
if (this.status === "completed") {
|
|
1911
|
+
deleteParallelSession(this.config.cwd);
|
|
1912
|
+
}
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
/**
|
|
1916
|
+
* Stop parallel execution gracefully.
|
|
1917
|
+
*/
|
|
1918
|
+
stop() {
|
|
1919
|
+
this.shouldStop = true;
|
|
1920
|
+
this.status = "interrupted";
|
|
1921
|
+
}
|
|
1922
|
+
/**
|
|
1923
|
+
* Get the current executor state for monitoring.
|
|
1924
|
+
*/
|
|
1925
|
+
getState() {
|
|
1926
|
+
return {
|
|
1927
|
+
status: this.status,
|
|
1928
|
+
dagAnalysis: this.dagAnalysis,
|
|
1929
|
+
currentGroupIndex: this.currentGroupIndex,
|
|
1930
|
+
totalGroups: this.dagAnalysis?.groups.length ?? 0,
|
|
1931
|
+
workers: this.activeWorkers.map((w) => w.getState()),
|
|
1932
|
+
allWorkers: [
|
|
1933
|
+
...this.allWorkerStates,
|
|
1934
|
+
...this.activeWorkers.map((w) => w.getState())
|
|
1935
|
+
],
|
|
1936
|
+
mergeQueue: [...this.mergeEngine.getQueue()],
|
|
1937
|
+
totalTasksCompleted: this.totalTasksCompleted,
|
|
1938
|
+
totalTasks: this.dagAnalysis?.groups.reduce(
|
|
1939
|
+
(sum, g) => sum + g.tasks.length,
|
|
1940
|
+
0
|
|
1941
|
+
) ?? 0,
|
|
1942
|
+
startedAt: this.startedAt,
|
|
1943
|
+
elapsedMs: this.startedAt ? Date.now() - new Date(this.startedAt).getTime() : 0
|
|
1944
|
+
};
|
|
1945
|
+
}
|
|
1946
|
+
/**
|
|
1947
|
+
* Get the DAG analysis results.
|
|
1948
|
+
*/
|
|
1949
|
+
getDAGAnalysis() {
|
|
1950
|
+
return this.dagAnalysis;
|
|
1951
|
+
}
|
|
1952
|
+
/**
|
|
1953
|
+
* Check if a previous session can be resumed.
|
|
1954
|
+
*/
|
|
1955
|
+
canResume() {
|
|
1956
|
+
return hasParallelSession(this.config.cwd);
|
|
1957
|
+
}
|
|
1958
|
+
/**
|
|
1959
|
+
* Load a previous session for resume.
|
|
1960
|
+
*/
|
|
1961
|
+
loadPreviousSession() {
|
|
1962
|
+
const state = loadParallelSession(this.config.cwd);
|
|
1963
|
+
if (!state) return null;
|
|
1964
|
+
return { state };
|
|
1965
|
+
}
|
|
1966
|
+
// ─── Private Methods ───
|
|
1967
|
+
/**
|
|
1968
|
+
* Execute a single parallel group.
|
|
1969
|
+
* Tasks within a group have no mutual dependencies and can run in parallel.
|
|
1970
|
+
*/
|
|
1971
|
+
async executeGroup(group, groupIndex) {
|
|
1972
|
+
const merged = [];
|
|
1973
|
+
const failed = [];
|
|
1974
|
+
const batches = this.batchTasks(group.tasks);
|
|
1975
|
+
for (const batch of batches) {
|
|
1976
|
+
if (this.shouldStop) break;
|
|
1977
|
+
const results = await this.executeBatch(batch);
|
|
1978
|
+
this.status = "merging";
|
|
1979
|
+
for (const result of results) {
|
|
1980
|
+
if (this.shouldStop) {
|
|
1981
|
+
failed.push(result.task.id);
|
|
1982
|
+
this.totalTasksFailed++;
|
|
1983
|
+
continue;
|
|
1984
|
+
}
|
|
1985
|
+
if (result.success && result.commitCount > 0) {
|
|
1986
|
+
const currentBranch = this.getCurrentBranch();
|
|
1987
|
+
this.mergeEngine.enqueue(
|
|
1988
|
+
result.branchName,
|
|
1989
|
+
currentBranch,
|
|
1990
|
+
result.task.id
|
|
1991
|
+
);
|
|
1992
|
+
const mergeResult = await this.mergeEngine.processNext();
|
|
1993
|
+
if (mergeResult?.success) {
|
|
1994
|
+
merged.push(result.task.id);
|
|
1995
|
+
this.totalTasksCompleted++;
|
|
1996
|
+
} else if (mergeResult?.conflictFiles && mergeResult.conflictFiles.length > 0) {
|
|
1997
|
+
const resolved = await this.handleConflicts(
|
|
1998
|
+
result,
|
|
1999
|
+
mergeResult.conflictFiles
|
|
2000
|
+
);
|
|
2001
|
+
if (resolved) {
|
|
2002
|
+
merged.push(result.task.id);
|
|
2003
|
+
this.totalTasksCompleted++;
|
|
2004
|
+
} else {
|
|
2005
|
+
failed.push(result.task.id);
|
|
2006
|
+
this.totalTasksFailed++;
|
|
2007
|
+
}
|
|
2008
|
+
} else {
|
|
2009
|
+
failed.push(result.task.id);
|
|
2010
|
+
this.totalTasksFailed++;
|
|
2011
|
+
}
|
|
2012
|
+
} else if (result.success && result.commitCount === 0) {
|
|
2013
|
+
merged.push(result.task.id);
|
|
2014
|
+
this.totalTasksCompleted++;
|
|
2015
|
+
} else {
|
|
2016
|
+
const taskId = result.task.id;
|
|
2017
|
+
const count = this.requeueCounts.get(taskId) ?? 0;
|
|
2018
|
+
if (count < this.config.maxRequeueCount) {
|
|
2019
|
+
this.requeueCounts.set(taskId, count + 1);
|
|
2020
|
+
const retryResult = await this.executeSingleTask(result.task);
|
|
2021
|
+
if (retryResult.success) {
|
|
2022
|
+
if (retryResult.commitCount > 0) {
|
|
2023
|
+
const currentBranch = this.getCurrentBranch();
|
|
2024
|
+
this.mergeEngine.enqueue(
|
|
2025
|
+
retryResult.branchName,
|
|
2026
|
+
currentBranch,
|
|
2027
|
+
taskId
|
|
2028
|
+
);
|
|
2029
|
+
const mergeResult = await this.mergeEngine.processNext();
|
|
2030
|
+
if (mergeResult?.success) {
|
|
2031
|
+
merged.push(taskId);
|
|
2032
|
+
this.totalTasksCompleted++;
|
|
2033
|
+
} else {
|
|
2034
|
+
failed.push(taskId);
|
|
2035
|
+
this.totalTasksFailed++;
|
|
2036
|
+
}
|
|
2037
|
+
} else {
|
|
2038
|
+
merged.push(taskId);
|
|
2039
|
+
this.totalTasksCompleted++;
|
|
2040
|
+
}
|
|
2041
|
+
} else {
|
|
2042
|
+
failed.push(taskId);
|
|
2043
|
+
this.totalTasksFailed++;
|
|
2044
|
+
}
|
|
2045
|
+
} else {
|
|
2046
|
+
failed.push(taskId);
|
|
2047
|
+
this.totalTasksFailed++;
|
|
2048
|
+
}
|
|
2049
|
+
}
|
|
2050
|
+
}
|
|
2051
|
+
if (!this.shouldStop) {
|
|
2052
|
+
this.status = "executing";
|
|
2053
|
+
}
|
|
2054
|
+
}
|
|
2055
|
+
this.emit({
|
|
2056
|
+
type: "group-completed",
|
|
2057
|
+
groupIndex,
|
|
2058
|
+
tasksCompleted: merged.length,
|
|
2059
|
+
tasksFailed: failed.length,
|
|
2060
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2061
|
+
});
|
|
2062
|
+
return { merged, failed };
|
|
2063
|
+
}
|
|
2064
|
+
/**
|
|
2065
|
+
* Create a TaskExecutor callback that wraps an ExecutionEngine for a given worktree.
|
|
2066
|
+
* Used when baseEngineConfig is provided (engine-based mode).
|
|
2067
|
+
*/
|
|
2068
|
+
createEngineExecutor(workerId) {
|
|
2069
|
+
return async (task, worktreePath, _branchName) => {
|
|
2070
|
+
const proxy = new WorkerTrackerProxy(task);
|
|
2071
|
+
const engineConfig = {
|
|
2072
|
+
...DEFAULT_ENGINE_CONFIG_FOR_PARALLEL,
|
|
2073
|
+
...this.baseEngineConfig,
|
|
2074
|
+
workingDir: worktreePath,
|
|
2075
|
+
sourceProjectDir: this.config.cwd,
|
|
2076
|
+
autoCommit: this.baseEngineConfig?.autoCommit ?? true,
|
|
2077
|
+
// Wire parallel config's per-worker iteration limit to the engine
|
|
2078
|
+
maxIterations: this.config.maxIterationsPerWorker
|
|
2079
|
+
};
|
|
2080
|
+
const engine = new ExecutionEngine(engineConfig);
|
|
2081
|
+
engine.on((event) => {
|
|
2082
|
+
const base = {
|
|
2083
|
+
type: "worker-progress",
|
|
2084
|
+
workerId,
|
|
2085
|
+
taskId: task.id,
|
|
2086
|
+
engineEventType: event.type,
|
|
2087
|
+
timestamp: event.timestamp
|
|
2088
|
+
};
|
|
2089
|
+
if (event.type === "iteration:started") {
|
|
2090
|
+
base.iteration = event.iteration;
|
|
2091
|
+
base.message = "Iteration started";
|
|
2092
|
+
} else if (event.type === "iteration:completed") {
|
|
2093
|
+
const res = event.result;
|
|
2094
|
+
base.iteration = res?.iteration;
|
|
2095
|
+
base.message = res?.taskCompleted ? `Task completed (${Math.round((res?.durationMs ?? 0) / 1e3)}s)` : `Iteration done, no completion signal (${Math.round((res?.durationMs ?? 0) / 1e3)}s)`;
|
|
2096
|
+
} else if (event.type === "iteration:failed") {
|
|
2097
|
+
base.message = `Failed: ${event.error ?? "unknown"}`;
|
|
2098
|
+
} else if (event.type === "task:completed") {
|
|
2099
|
+
base.message = "Task completed";
|
|
2100
|
+
} else if (event.type === "task:skipped") {
|
|
2101
|
+
base.message = `Skipped: ${event.reason ?? "unknown"}`;
|
|
2102
|
+
} else if (event.type === "engine:stopped") {
|
|
2103
|
+
base.message = `Engine stopped: ${event.reason ?? "unknown"}`;
|
|
2104
|
+
} else if (event.type === "agent:output") {
|
|
2105
|
+
const ev = event;
|
|
2106
|
+
base.stream = ev.stream;
|
|
2107
|
+
base.data = ev.data;
|
|
2108
|
+
}
|
|
2109
|
+
this.emit(base);
|
|
2110
|
+
});
|
|
2111
|
+
await engine.initialize({ tracker: proxy, forcedTask: task });
|
|
2112
|
+
await engine.start();
|
|
2113
|
+
try {
|
|
2114
|
+
const statusOutput = execFileSync5("git", ["status", "--porcelain"], {
|
|
2115
|
+
cwd: worktreePath,
|
|
2116
|
+
encoding: "utf-8",
|
|
2117
|
+
timeout: 1e4
|
|
2118
|
+
}).toString().trim();
|
|
2119
|
+
if (statusOutput.length > 0) {
|
|
2120
|
+
execFileSync5("git", ["add", "-A"], { cwd: worktreePath, timeout: 1e4 });
|
|
2121
|
+
execFileSync5("git", ["commit", "-m", `feat: ${task.id} - ${task.title}`], {
|
|
2122
|
+
cwd: worktreePath,
|
|
2123
|
+
encoding: "utf-8",
|
|
2124
|
+
timeout: 1e4
|
|
2125
|
+
});
|
|
2126
|
+
}
|
|
2127
|
+
} catch {
|
|
2128
|
+
}
|
|
2129
|
+
let commitCount = 0;
|
|
2130
|
+
try {
|
|
2131
|
+
const sourceBranch = this.getCurrentBranch();
|
|
2132
|
+
const countOutput = execFileSync5("git", ["rev-list", "--count", `${sourceBranch}..HEAD`], {
|
|
2133
|
+
cwd: worktreePath,
|
|
2134
|
+
encoding: "utf-8",
|
|
2135
|
+
timeout: 1e4
|
|
2136
|
+
}).toString().trim();
|
|
2137
|
+
commitCount = parseInt(countOutput, 10) || 0;
|
|
2138
|
+
} catch {
|
|
2139
|
+
const state2 = engine.getState();
|
|
2140
|
+
commitCount = state2.iterations.filter((it) => it.commitSha).length;
|
|
2141
|
+
}
|
|
2142
|
+
const state = engine.getState();
|
|
2143
|
+
return {
|
|
2144
|
+
success: proxy.wasTaskCompleted(),
|
|
2145
|
+
commitCount,
|
|
2146
|
+
error: state.failedTasks.length > 0 ? `Task failed after ${state.currentIteration} iterations` : void 0
|
|
2147
|
+
};
|
|
2148
|
+
};
|
|
2149
|
+
}
|
|
2150
|
+
/**
|
|
2151
|
+
* Execute a batch of tasks in parallel using workers.
|
|
2152
|
+
*/
|
|
2153
|
+
async executeBatch(tasks) {
|
|
2154
|
+
this.activeWorkers = [];
|
|
2155
|
+
const workerPromises = [];
|
|
2156
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
2157
|
+
const task = tasks[i];
|
|
2158
|
+
const workerId = `${this.currentGroupIndex}-${i}`;
|
|
2159
|
+
try {
|
|
2160
|
+
const worktreeInfo = await this.worktreeManager.create(workerId, task.id);
|
|
2161
|
+
const worker = new ParallelWorker({
|
|
2162
|
+
id: workerId,
|
|
2163
|
+
task,
|
|
2164
|
+
worktreePath: worktreeInfo.path,
|
|
2165
|
+
branchName: worktreeInfo.branch,
|
|
2166
|
+
cwd: this.config.cwd
|
|
2167
|
+
});
|
|
2168
|
+
worker.on((event) => this.emit(event));
|
|
2169
|
+
this.activeWorkers.push(worker);
|
|
2170
|
+
const executor = this.baseEngineConfig ? this.createEngineExecutor(workerId) : this.taskExecutor;
|
|
2171
|
+
workerPromises.push(worker.start(executor));
|
|
2172
|
+
} catch (err) {
|
|
2173
|
+
workerPromises.push(
|
|
2174
|
+
Promise.resolve({
|
|
2175
|
+
workerId,
|
|
2176
|
+
task,
|
|
2177
|
+
success: false,
|
|
2178
|
+
durationMs: 0,
|
|
2179
|
+
error: err instanceof Error ? err.message : String(err),
|
|
2180
|
+
branchName: "",
|
|
2181
|
+
commitCount: 0,
|
|
2182
|
+
worktreePath: ""
|
|
2183
|
+
})
|
|
2184
|
+
);
|
|
2185
|
+
}
|
|
2186
|
+
}
|
|
2187
|
+
const settledResults = await Promise.allSettled(workerPromises);
|
|
2188
|
+
const results = settledResults.map((result, i) => {
|
|
2189
|
+
if (result.status === "fulfilled") {
|
|
2190
|
+
return result.value;
|
|
2191
|
+
}
|
|
2192
|
+
return {
|
|
2193
|
+
workerId: `${this.currentGroupIndex}-${i}`,
|
|
2194
|
+
task: tasks[i],
|
|
2195
|
+
success: false,
|
|
2196
|
+
durationMs: 0,
|
|
2197
|
+
error: result.reason instanceof Error ? result.reason.message : String(result.reason),
|
|
2198
|
+
branchName: "",
|
|
2199
|
+
commitCount: 0,
|
|
2200
|
+
worktreePath: ""
|
|
2201
|
+
};
|
|
2202
|
+
});
|
|
2203
|
+
for (const worker of this.activeWorkers) {
|
|
2204
|
+
this.allWorkerStates.push(worker.getState());
|
|
2205
|
+
}
|
|
2206
|
+
for (const worker of this.activeWorkers) {
|
|
2207
|
+
try {
|
|
2208
|
+
await this.worktreeManager.remove(worker.id);
|
|
2209
|
+
} catch {
|
|
2210
|
+
}
|
|
2211
|
+
}
|
|
2212
|
+
this.activeWorkers = [];
|
|
2213
|
+
return results;
|
|
2214
|
+
}
|
|
2215
|
+
/**
|
|
2216
|
+
* Execute a single task (for retries).
|
|
2217
|
+
*/
|
|
2218
|
+
async executeSingleTask(task) {
|
|
2219
|
+
const workerId = `retry-${task.id}-${Date.now()}`;
|
|
2220
|
+
try {
|
|
2221
|
+
const worktreeInfo = await this.worktreeManager.create(workerId, task.id);
|
|
2222
|
+
const worker = new ParallelWorker({
|
|
2223
|
+
id: workerId,
|
|
2224
|
+
task,
|
|
2225
|
+
worktreePath: worktreeInfo.path,
|
|
2226
|
+
branchName: worktreeInfo.branch,
|
|
2227
|
+
cwd: this.config.cwd
|
|
2228
|
+
});
|
|
2229
|
+
worker.on((event) => this.emit(event));
|
|
2230
|
+
const executor = this.baseEngineConfig ? this.createEngineExecutor(workerId) : this.taskExecutor;
|
|
2231
|
+
const result = await worker.start(executor);
|
|
2232
|
+
await this.worktreeManager.remove(workerId);
|
|
2233
|
+
return result;
|
|
2234
|
+
} catch (err) {
|
|
2235
|
+
return {
|
|
2236
|
+
workerId,
|
|
2237
|
+
task,
|
|
2238
|
+
success: false,
|
|
2239
|
+
durationMs: 0,
|
|
2240
|
+
error: err instanceof Error ? err.message : String(err),
|
|
2241
|
+
branchName: "",
|
|
2242
|
+
commitCount: 0,
|
|
2243
|
+
worktreePath: ""
|
|
2244
|
+
};
|
|
2245
|
+
}
|
|
2246
|
+
}
|
|
2247
|
+
/**
|
|
2248
|
+
* Handle merge conflicts for a worker result.
|
|
2249
|
+
* Attempts AI conflict resolution if configured.
|
|
2250
|
+
*
|
|
2251
|
+
* @returns true if conflicts were resolved, false otherwise
|
|
2252
|
+
*/
|
|
2253
|
+
async handleConflicts(workerResult, conflictFiles) {
|
|
2254
|
+
if (!this.config.aiConflictResolution) {
|
|
2255
|
+
return false;
|
|
2256
|
+
}
|
|
2257
|
+
try {
|
|
2258
|
+
try {
|
|
2259
|
+
execFileSyncSafe("git", [
|
|
2260
|
+
"-C",
|
|
2261
|
+
this.config.cwd,
|
|
2262
|
+
"merge",
|
|
2263
|
+
"--no-commit",
|
|
2264
|
+
workerResult.branchName
|
|
2265
|
+
]);
|
|
2266
|
+
} catch {
|
|
2267
|
+
}
|
|
2268
|
+
const results = await this.conflictResolver.resolveAll(conflictFiles, {
|
|
2269
|
+
taskId: workerResult.task.id,
|
|
2270
|
+
taskTitle: workerResult.task.title
|
|
2271
|
+
});
|
|
2272
|
+
const allResolved = results.every((r) => r.resolved);
|
|
2273
|
+
if (allResolved) {
|
|
2274
|
+
try {
|
|
2275
|
+
execFileSyncSafe("git", [
|
|
2276
|
+
"-C",
|
|
2277
|
+
this.config.cwd,
|
|
2278
|
+
"commit",
|
|
2279
|
+
"--no-edit",
|
|
2280
|
+
"-m",
|
|
2281
|
+
`feat(parallel): merge task ${workerResult.task.id} (conflicts resolved)`
|
|
2282
|
+
]);
|
|
2283
|
+
return true;
|
|
2284
|
+
} catch {
|
|
2285
|
+
}
|
|
2286
|
+
}
|
|
2287
|
+
try {
|
|
2288
|
+
execFileSyncSafe("git", ["-C", this.config.cwd, "merge", "--abort"]);
|
|
2289
|
+
} catch {
|
|
2290
|
+
}
|
|
2291
|
+
return false;
|
|
2292
|
+
} catch {
|
|
2293
|
+
try {
|
|
2294
|
+
execFileSyncSafe("git", ["-C", this.config.cwd, "merge", "--abort"]);
|
|
2295
|
+
} catch {
|
|
2296
|
+
}
|
|
2297
|
+
return false;
|
|
2298
|
+
}
|
|
2299
|
+
}
|
|
2300
|
+
/**
|
|
2301
|
+
* Run a validation worker after all parallel groups complete.
|
|
2302
|
+
* The validation worker operates in the main working directory (all merges are done)
|
|
2303
|
+
* and validates + fixes the integrated work against the original PRD.
|
|
2304
|
+
*/
|
|
2305
|
+
async runValidationWorker() {
|
|
2306
|
+
const mergedCount = this.taskSummaries.filter((t) => t.success).length;
|
|
2307
|
+
const failedCount = this.taskSummaries.filter((t) => !t.success).length;
|
|
2308
|
+
this.emit({
|
|
2309
|
+
type: "validation-started",
|
|
2310
|
+
totalMerged: mergedCount,
|
|
2311
|
+
totalFailed: failedCount,
|
|
2312
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2313
|
+
});
|
|
2314
|
+
const validationStart = Date.now();
|
|
2315
|
+
try {
|
|
2316
|
+
let prdMarkdown = this.config.prdMarkdown ?? "";
|
|
2317
|
+
if (!prdMarkdown) {
|
|
2318
|
+
try {
|
|
2319
|
+
const fs4 = await import("fs");
|
|
2320
|
+
const path4 = await import("path");
|
|
2321
|
+
const prdDir = path4.join(this.config.cwd, ".ulpi", "prds");
|
|
2322
|
+
const files = fs4.readdirSync(prdDir).filter((f) => f.endsWith(".md"));
|
|
2323
|
+
if (files.length > 0) {
|
|
2324
|
+
const sorted = files.map((f) => ({
|
|
2325
|
+
name: f,
|
|
2326
|
+
mtime: fs4.statSync(path4.join(prdDir, f)).mtimeMs
|
|
2327
|
+
})).sort((a, b) => b.mtime - a.mtime);
|
|
2328
|
+
prdMarkdown = fs4.readFileSync(path4.join(prdDir, sorted[0].name), "utf-8");
|
|
2329
|
+
}
|
|
2330
|
+
} catch {
|
|
2331
|
+
}
|
|
2332
|
+
}
|
|
2333
|
+
let validationPrompt;
|
|
2334
|
+
try {
|
|
2335
|
+
const prdEngine = await import("@ulpi/prd-engine");
|
|
2336
|
+
validationPrompt = prdEngine.buildPrdValidationPrompt(prdMarkdown, this.taskSummaries);
|
|
2337
|
+
} catch {
|
|
2338
|
+
validationPrompt = buildFallbackValidationPrompt(prdMarkdown, this.taskSummaries);
|
|
2339
|
+
}
|
|
2340
|
+
const validationTask = {
|
|
2341
|
+
id: "VALIDATION",
|
|
2342
|
+
title: "Post-Parallel Validation & Integration Fix",
|
|
2343
|
+
status: "open",
|
|
2344
|
+
priority: 0,
|
|
2345
|
+
// Critical
|
|
2346
|
+
description: validationPrompt,
|
|
2347
|
+
labels: ["validation", "integration"],
|
|
2348
|
+
type: "validation"
|
|
2349
|
+
};
|
|
2350
|
+
const proxy = new WorkerTrackerProxy(validationTask);
|
|
2351
|
+
const engineConfig = {
|
|
2352
|
+
...DEFAULT_ENGINE_CONFIG_FOR_PARALLEL,
|
|
2353
|
+
...this.baseEngineConfig,
|
|
2354
|
+
workingDir: this.config.cwd,
|
|
2355
|
+
sourceProjectDir: this.config.cwd,
|
|
2356
|
+
autoCommit: true,
|
|
2357
|
+
maxIterations: this.config.validationMaxIterations,
|
|
2358
|
+
// Override agent for verification if configured
|
|
2359
|
+
...this.config.verificationAgentName ? { agentName: this.config.verificationAgentName } : {},
|
|
2360
|
+
...this.config.verificationAgentConfig ? { agentConfig: this.config.verificationAgentConfig } : {}
|
|
2361
|
+
};
|
|
2362
|
+
const engine = new ExecutionEngine(engineConfig);
|
|
2363
|
+
engine.on((event) => {
|
|
2364
|
+
this.emit({
|
|
2365
|
+
type: "worker-progress",
|
|
2366
|
+
workerId: "validation",
|
|
2367
|
+
taskId: "VALIDATION",
|
|
2368
|
+
engineEventType: event.type,
|
|
2369
|
+
timestamp: event.timestamp,
|
|
2370
|
+
message: event.type === "iteration:completed" ? "Validation iteration completed" : event.type === "task:completed" ? "Validation completed" : void 0
|
|
2371
|
+
});
|
|
2372
|
+
});
|
|
2373
|
+
await engine.initialize({ tracker: proxy, forcedTask: validationTask });
|
|
2374
|
+
await engine.start();
|
|
2375
|
+
let commitCount = 0;
|
|
2376
|
+
try {
|
|
2377
|
+
const statusOutput = execFileSync5("git", ["status", "--porcelain"], {
|
|
2378
|
+
cwd: this.config.cwd,
|
|
2379
|
+
encoding: "utf-8",
|
|
2380
|
+
timeout: 1e4
|
|
2381
|
+
}).toString().trim();
|
|
2382
|
+
if (statusOutput.length > 0) {
|
|
2383
|
+
execFileSync5("git", ["add", "-A"], { cwd: this.config.cwd, timeout: 1e4 });
|
|
2384
|
+
execFileSync5("git", ["commit", "-m", "fix: post-parallel validation fixes"], {
|
|
2385
|
+
cwd: this.config.cwd,
|
|
2386
|
+
encoding: "utf-8",
|
|
2387
|
+
timeout: 1e4
|
|
2388
|
+
});
|
|
2389
|
+
commitCount++;
|
|
2390
|
+
}
|
|
2391
|
+
} catch {
|
|
2392
|
+
}
|
|
2393
|
+
const state = engine.getState();
|
|
2394
|
+
commitCount += state.iterations.filter((it) => it.commitSha).length;
|
|
2395
|
+
this.emit({
|
|
2396
|
+
type: "validation-completed",
|
|
2397
|
+
success: proxy.wasTaskCompleted(),
|
|
2398
|
+
commitCount,
|
|
2399
|
+
durationMs: Date.now() - validationStart,
|
|
2400
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2401
|
+
});
|
|
2402
|
+
} catch (err) {
|
|
2403
|
+
this.emit({
|
|
2404
|
+
type: "validation-completed",
|
|
2405
|
+
success: false,
|
|
2406
|
+
commitCount: 0,
|
|
2407
|
+
durationMs: Date.now() - validationStart,
|
|
2408
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2409
|
+
});
|
|
2410
|
+
}
|
|
2411
|
+
}
|
|
2412
|
+
/**
|
|
2413
|
+
* Get commit SHAs for a merged task from the allWorkerStates.
|
|
2414
|
+
*/
|
|
2415
|
+
getCommitShasForTask(taskId) {
|
|
2416
|
+
const workerState = this.allWorkerStates.find(
|
|
2417
|
+
(w) => w.task.id === taskId && w.status === "completed"
|
|
2418
|
+
);
|
|
2419
|
+
if (!workerState?.branchName) return [];
|
|
2420
|
+
try {
|
|
2421
|
+
const currentBranch = this.getCurrentBranch();
|
|
2422
|
+
const output = execFileSyncSafe("git", [
|
|
2423
|
+
"-C",
|
|
2424
|
+
this.config.cwd,
|
|
2425
|
+
"log",
|
|
2426
|
+
"--oneline",
|
|
2427
|
+
"--grep",
|
|
2428
|
+
`merge task ${taskId}`,
|
|
2429
|
+
"-n",
|
|
2430
|
+
"1",
|
|
2431
|
+
"--format=%H"
|
|
2432
|
+
]).trim();
|
|
2433
|
+
return output ? [output] : [];
|
|
2434
|
+
} catch {
|
|
2435
|
+
return [];
|
|
2436
|
+
}
|
|
2437
|
+
}
|
|
2438
|
+
/**
|
|
2439
|
+
* Get the current branch name.
|
|
2440
|
+
*/
|
|
2441
|
+
getCurrentBranch() {
|
|
2442
|
+
try {
|
|
2443
|
+
return execFileSyncSafe("git", [
|
|
2444
|
+
"-C",
|
|
2445
|
+
this.config.cwd,
|
|
2446
|
+
"rev-parse",
|
|
2447
|
+
"--abbrev-ref",
|
|
2448
|
+
"HEAD"
|
|
2449
|
+
]).trim();
|
|
2450
|
+
} catch {
|
|
2451
|
+
return "HEAD";
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
/**
|
|
2455
|
+
* Split tasks into batches of maxWorkers size.
|
|
2456
|
+
*/
|
|
2457
|
+
batchTasks(tasks) {
|
|
2458
|
+
const batches = [];
|
|
2459
|
+
for (let i = 0; i < tasks.length; i += this.config.maxWorkers) {
|
|
2460
|
+
batches.push(tasks.slice(i, i + this.config.maxWorkers));
|
|
2461
|
+
}
|
|
2462
|
+
return batches;
|
|
2463
|
+
}
|
|
2464
|
+
/**
|
|
2465
|
+
* Clean up all resources.
|
|
2466
|
+
*/
|
|
2467
|
+
async cleanup() {
|
|
2468
|
+
try {
|
|
2469
|
+
await this.worktreeManager.cleanup();
|
|
2470
|
+
} catch {
|
|
2471
|
+
}
|
|
2472
|
+
try {
|
|
2473
|
+
this.mergeEngine.cleanupTags();
|
|
2474
|
+
} catch {
|
|
2475
|
+
}
|
|
2476
|
+
}
|
|
2477
|
+
/**
|
|
2478
|
+
* Emit a parallel event to all listeners.
|
|
2479
|
+
*/
|
|
2480
|
+
emit(event) {
|
|
2481
|
+
for (const listener of this.listeners) {
|
|
2482
|
+
try {
|
|
2483
|
+
listener(event);
|
|
2484
|
+
} catch {
|
|
2485
|
+
}
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2488
|
+
};
|
|
2489
|
+
function execFileSyncSafe(command, args) {
|
|
2490
|
+
return execFileSync5(command, args, {
|
|
2491
|
+
encoding: "utf-8",
|
|
2492
|
+
timeout: 3e4,
|
|
2493
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
2494
|
+
});
|
|
2495
|
+
}
|
|
2496
|
+
function buildFallbackValidationPrompt(prdMarkdown, _taskSummaries) {
|
|
2497
|
+
const parts = [];
|
|
2498
|
+
parts.push("## Your Role\n");
|
|
2499
|
+
parts.push("You are a senior engineer performing post-implementation validation.");
|
|
2500
|
+
parts.push("Multiple agents worked on tasks in parallel. Their work has been merged into this branch.");
|
|
2501
|
+
parts.push("You do NOT care about what the agents did or didn't do. You only care about the current state of the code.\n");
|
|
2502
|
+
parts.push("## Instructions\n");
|
|
2503
|
+
parts.push("### Step 1: Run the build\n");
|
|
2504
|
+
parts.push("Run the project's build command first. Note any errors.\n");
|
|
2505
|
+
parts.push("### Step 2: Validate task by task\n");
|
|
2506
|
+
parts.push("Go through each task in the PRD below, one by one.");
|
|
2507
|
+
parts.push("For each task: read the files, check if the code exists and is correct.");
|
|
2508
|
+
parts.push("If it's there and correct \u2014 move on. If missing or broken \u2014 implement or fix it.\n");
|
|
2509
|
+
parts.push("### Step 3: Fix build errors\n");
|
|
2510
|
+
parts.push("Run the build again. Fix errors. Repeat until clean.\n");
|
|
2511
|
+
if (prdMarkdown) {
|
|
2512
|
+
parts.push("## PRD\n");
|
|
2513
|
+
parts.push(prdMarkdown);
|
|
2514
|
+
}
|
|
2515
|
+
return parts.join("\n");
|
|
2516
|
+
}
|
|
2517
|
+
export {
|
|
2518
|
+
ConflictResolver,
|
|
2519
|
+
DEFAULT_PARALLEL_CONFIG,
|
|
2520
|
+
MergeEngine,
|
|
2521
|
+
ParallelExecutor,
|
|
2522
|
+
ParallelWorker,
|
|
2523
|
+
WorkerTrackerProxy,
|
|
2524
|
+
WorktreeManager,
|
|
2525
|
+
analyzeTaskGraph,
|
|
2526
|
+
buildConflictPrompt,
|
|
2527
|
+
buildParallelGroups,
|
|
2528
|
+
cleanupOrphanedSession,
|
|
2529
|
+
createParallelSession,
|
|
2530
|
+
deleteParallelSession,
|
|
2531
|
+
enhanceParallelGroups,
|
|
2532
|
+
findOrphanedWorktrees,
|
|
2533
|
+
getConflictContext,
|
|
2534
|
+
hasParallelSession,
|
|
2535
|
+
isOrphanedSession,
|
|
2536
|
+
loadParallelSession,
|
|
2537
|
+
saveParallelSession,
|
|
2538
|
+
topologicalSortTasks,
|
|
2539
|
+
updateSessionAfterGroup,
|
|
2540
|
+
validateDAG
|
|
2541
|
+
};
|