panopticon-cli 0.5.4 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{agents-HNMF52RM.js → agents-5HWTDR4S.js} +12 -9
- package/dist/archive-planning-U3AZAKWI.js +16 -0
- package/dist/{chunk-KBHRXV5T.js → chunk-43F4LDZ4.js} +3 -3
- package/dist/chunk-6OYUJ4AJ.js +146 -0
- package/dist/chunk-6OYUJ4AJ.js.map +1 -0
- package/dist/{chunk-MOPGR3CL.js → chunk-AAP4G6U7.js} +1 -1
- package/dist/chunk-AAP4G6U7.js.map +1 -0
- package/dist/{chunk-4HST45MO.js → chunk-BYWVPPAZ.js} +19 -12
- package/dist/chunk-BYWVPPAZ.js.map +1 -0
- package/dist/{chunk-CFCUOV3Q.js → chunk-DMRTN432.js} +4 -1
- package/dist/chunk-DMRTN432.js.map +1 -0
- package/dist/{chunk-HOGYHJ2G.js → chunk-DW3PKGIS.js} +2 -2
- package/dist/{chunk-KY2E2Q3T.js → chunk-FUUP55PE.js} +104 -46
- package/dist/chunk-FUUP55PE.js.map +1 -0
- package/dist/chunk-GUV2EPBG.js +692 -0
- package/dist/chunk-GUV2EPBG.js.map +1 -0
- package/dist/{chunk-44EOY2ZL.js → chunk-HHL3AWXA.js} +46 -2
- package/dist/chunk-HHL3AWXA.js.map +1 -0
- package/dist/{chunk-6N2KBSJA.js → chunk-IZIXJYXZ.js} +40 -6
- package/dist/chunk-IZIXJYXZ.js.map +1 -0
- package/dist/chunk-MJXYTGK5.js +64 -0
- package/dist/chunk-MJXYTGK5.js.map +1 -0
- package/dist/chunk-OJF4QS3S.js +269 -0
- package/dist/chunk-OJF4QS3S.js.map +1 -0
- package/dist/{chunk-FQ66DECN.js → chunk-QAJAJBFW.js} +1 -1
- package/dist/chunk-QAJAJBFW.js.map +1 -0
- package/dist/chunk-R4KPLLRB.js +36 -0
- package/dist/chunk-R4KPLLRB.js.map +1 -0
- package/dist/{chunk-DFNVHK3N.js → chunk-SUM2WVPF.js} +4 -4
- package/dist/{chunk-T7BBPDEJ.js → chunk-UKSGE6RH.js} +45 -15
- package/dist/chunk-UKSGE6RH.js.map +1 -0
- package/dist/chunk-W2OTF6OS.js +201 -0
- package/dist/chunk-W2OTF6OS.js.map +1 -0
- package/dist/chunk-WEQW3EAT.js +78 -0
- package/dist/chunk-WEQW3EAT.js.map +1 -0
- package/dist/{chunk-ID4OYXVH.js → chunk-WJJ3ZIQ6.js} +112 -45
- package/dist/chunk-WJJ3ZIQ6.js.map +1 -0
- package/dist/chunk-YAAT66RT.js +70 -0
- package/dist/chunk-YAAT66RT.js.map +1 -0
- package/dist/{chunk-RLZQB7HS.js → chunk-ZMJFEHGF.js} +13 -1
- package/dist/chunk-ZMJFEHGF.js.map +1 -0
- package/dist/{chunk-HRU7S4TA.js → chunk-ZN5RHWGR.js} +18 -208
- package/dist/{chunk-HRU7S4TA.js.map → chunk-ZN5RHWGR.js.map} +1 -1
- package/dist/{chunk-ZTYHZMEC.js → chunk-ZWZNEA26.js} +2 -2
- package/dist/clean-planning-7Z5YY64X.js +9 -0
- package/dist/cli/index.js +1301 -2142
- package/dist/cli/index.js.map +1 -1
- package/dist/close-issue-CTZK777I.js +9 -0
- package/dist/compact-beads-72SHALOL.js +9 -0
- package/dist/{config-4CJNUE3O.js → config-FFTMBVHM.js} +2 -2
- package/dist/dashboard/public/assets/{index-DSvt5pPn.css → index-Bx4NCn9A.css} +1 -1
- package/dist/dashboard/public/assets/index-Db9NOz4z.js +756 -0
- package/dist/dashboard/public/index.html +3 -2
- package/dist/dashboard/server.js +34714 -34296
- package/dist/{feedback-writer-T43PI5S2.js → feedback-writer-T2WCT6EZ.js} +2 -2
- package/dist/{hume-CKJJ3OUU.js → hume-GVTB5BKW.js} +3 -3
- package/dist/index.d.ts +24 -16
- package/dist/index.js +4 -4
- package/dist/label-cleanup-4HJVX6NP.js +103 -0
- package/dist/label-cleanup-4HJVX6NP.js.map +1 -0
- package/dist/merge-agent-WM7ZKUET.js +1725 -0
- package/dist/merge-agent-WM7ZKUET.js.map +1 -0
- package/dist/{projects-KVM3MN3Y.js → projects-3CRF57ZU.js} +2 -2
- package/dist/{rally-RKFSWC7E.js → rally-LBY24P4C.js} +2 -2
- package/dist/{remote-agents-ULPD6C5U.js → remote-agents-3NZPSHYG.js} +2 -3
- package/dist/{remote-workspace-XX6ARE6I.js → remote-workspace-M4IULGFZ.js} +24 -49
- package/dist/remote-workspace-M4IULGFZ.js.map +1 -0
- package/dist/{review-status-XKUKZF6J.js → review-status-J2YJGL3E.js} +2 -2
- package/dist/{specialist-context-C66TEMXS.js → specialist-context-74RQF5SR.js} +7 -5
- package/dist/{specialist-context-C66TEMXS.js.map → specialist-context-74RQF5SR.js.map} +1 -1
- package/dist/{specialist-logs-CJKXM3SR.js → specialist-logs-T5GW7CSU.js} +6 -4
- package/dist/{specialists-NXYD4Z62.js → specialists-HTYYFXHQ.js} +6 -4
- package/dist/specialists-HTYYFXHQ.js.map +1 -0
- package/dist/tmux-X2I5SAIJ.js +31 -0
- package/dist/tmux-X2I5SAIJ.js.map +1 -0
- package/dist/{traefik-5GL3Q7DJ.js → traefik-QXLZ4PO2.js} +4 -4
- package/dist/traefik-QXLZ4PO2.js.map +1 -0
- package/dist/{tunnel-BKC7KLBX.js → tunnel-7IOSRZVH.js} +3 -3
- package/dist/tunnel-7IOSRZVH.js.map +1 -0
- package/dist/{workspace-manager-ALBR62AS.js → workspace-manager-G6TTBPC3.js} +6 -6
- package/dist/workspace-manager-G6TTBPC3.js.map +1 -0
- package/package.json +2 -2
- package/scripts/build-cost-script.mjs +17 -0
- package/scripts/heartbeat-hook +28 -8
- package/scripts/record-cost-event.js +46 -7
- package/scripts/record-cost-event.ts +2 -1
- package/dist/chunk-44EOY2ZL.js.map +0 -1
- package/dist/chunk-4HST45MO.js.map +0 -1
- package/dist/chunk-565HZ6VV.js +0 -159
- package/dist/chunk-565HZ6VV.js.map +0 -1
- package/dist/chunk-6N2KBSJA.js.map +0 -1
- package/dist/chunk-CFCUOV3Q.js.map +0 -1
- package/dist/chunk-FQ66DECN.js.map +0 -1
- package/dist/chunk-ID4OYXVH.js.map +0 -1
- package/dist/chunk-KY2E2Q3T.js.map +0 -1
- package/dist/chunk-MOPGR3CL.js.map +0 -1
- package/dist/chunk-RLZQB7HS.js.map +0 -1
- package/dist/chunk-T7BBPDEJ.js.map +0 -1
- package/dist/chunk-ZDNQFWR5.js +0 -650
- package/dist/chunk-ZDNQFWR5.js.map +0 -1
- package/dist/dashboard/public/assets/index-DA6pnizT.js +0 -767
- package/dist/remote-workspace-XX6ARE6I.js.map +0 -1
- /package/dist/{agents-HNMF52RM.js.map → agents-5HWTDR4S.js.map} +0 -0
- /package/dist/{config-4CJNUE3O.js.map → archive-planning-U3AZAKWI.js.map} +0 -0
- /package/dist/{chunk-KBHRXV5T.js.map → chunk-43F4LDZ4.js.map} +0 -0
- /package/dist/{chunk-HOGYHJ2G.js.map → chunk-DW3PKGIS.js.map} +0 -0
- /package/dist/{chunk-DFNVHK3N.js.map → chunk-SUM2WVPF.js.map} +0 -0
- /package/dist/{chunk-ZTYHZMEC.js.map → chunk-ZWZNEA26.js.map} +0 -0
- /package/dist/{hume-CKJJ3OUU.js.map → clean-planning-7Z5YY64X.js.map} +0 -0
- /package/dist/{projects-KVM3MN3Y.js.map → close-issue-CTZK777I.js.map} +0 -0
- /package/dist/{rally-RKFSWC7E.js.map → compact-beads-72SHALOL.js.map} +0 -0
- /package/dist/{remote-agents-ULPD6C5U.js.map → config-FFTMBVHM.js.map} +0 -0
- /package/dist/{feedback-writer-T43PI5S2.js.map → feedback-writer-T2WCT6EZ.js.map} +0 -0
- /package/dist/{review-status-XKUKZF6J.js.map → hume-GVTB5BKW.js.map} +0 -0
- /package/dist/{specialist-logs-CJKXM3SR.js.map → projects-3CRF57ZU.js.map} +0 -0
- /package/dist/{specialists-NXYD4Z62.js.map → rally-LBY24P4C.js.map} +0 -0
- /package/dist/{traefik-5GL3Q7DJ.js.map → remote-agents-3NZPSHYG.js.map} +0 -0
- /package/dist/{tunnel-BKC7KLBX.js.map → review-status-J2YJGL3E.js.map} +0 -0
- /package/dist/{workspace-manager-ALBR62AS.js.map → specialist-logs-T5GW7CSU.js.map} +0 -0
|
@@ -0,0 +1,1725 @@
|
|
|
1
|
+
import {
|
|
2
|
+
resolveGitHubIssue
|
|
3
|
+
} from "./chunk-YAAT66RT.js";
|
|
4
|
+
import {
|
|
5
|
+
getTmuxSessionName,
|
|
6
|
+
init_specialists,
|
|
7
|
+
isRunning,
|
|
8
|
+
recordWake,
|
|
9
|
+
spawnEphemeralSpecialist,
|
|
10
|
+
wakeSpecialist
|
|
11
|
+
} from "./chunk-WJJ3ZIQ6.js";
|
|
12
|
+
import "./chunk-JQBV3Q2W.js";
|
|
13
|
+
import {
|
|
14
|
+
init_workspace_config,
|
|
15
|
+
replacePlaceholders
|
|
16
|
+
} from "./chunk-AAP4G6U7.js";
|
|
17
|
+
import "./chunk-ZN5RHWGR.js";
|
|
18
|
+
import "./chunk-USYP2SBE.js";
|
|
19
|
+
import {
|
|
20
|
+
init_config,
|
|
21
|
+
loadConfig
|
|
22
|
+
} from "./chunk-QAJAJBFW.js";
|
|
23
|
+
import {
|
|
24
|
+
init_projects,
|
|
25
|
+
loadProjectsConfig,
|
|
26
|
+
resolveProjectFromIssue
|
|
27
|
+
} from "./chunk-ZMJFEHGF.js";
|
|
28
|
+
import {
|
|
29
|
+
init_tmux,
|
|
30
|
+
sendKeysAsync,
|
|
31
|
+
sessionExists
|
|
32
|
+
} from "./chunk-W2OTF6OS.js";
|
|
33
|
+
import "./chunk-ZP6EWSZV.js";
|
|
34
|
+
import {
|
|
35
|
+
PANOPTICON_HOME,
|
|
36
|
+
init_paths
|
|
37
|
+
} from "./chunk-ZTFNYOC7.js";
|
|
38
|
+
import {
|
|
39
|
+
init_esm_shims
|
|
40
|
+
} from "./chunk-ZHC57RCV.js";
|
|
41
|
+
|
|
42
|
+
// src/lib/cloister/merge-agent.ts
|
|
43
|
+
init_esm_shims();
|
|
44
|
+
init_tmux();
|
|
45
|
+
init_paths();
|
|
46
|
+
import { readFileSync, existsSync as existsSync3, mkdirSync, appendFileSync } from "fs";
|
|
47
|
+
import { join as join3, dirname, basename, relative } from "path";
|
|
48
|
+
import { fileURLToPath } from "url";
|
|
49
|
+
import { exec as exec3 } from "child_process";
|
|
50
|
+
import { promisify as promisify3 } from "util";
|
|
51
|
+
init_specialists();
|
|
52
|
+
init_projects();
|
|
53
|
+
|
|
54
|
+
// src/lib/cloister/validation.ts
|
|
55
|
+
init_esm_shims();
|
|
56
|
+
init_workspace_config();
|
|
57
|
+
init_config();
|
|
58
|
+
import { exec } from "child_process";
|
|
59
|
+
import { promisify } from "util";
|
|
60
|
+
import { join } from "path";
|
|
61
|
+
import { existsSync } from "fs";
|
|
62
|
+
var execAsync = promisify(exec);
|
|
63
|
+
function parseValidationOutput(output, exitCode) {
|
|
64
|
+
const lines = output.split("\n");
|
|
65
|
+
const failures = [];
|
|
66
|
+
let conflictMarkersFound = false;
|
|
67
|
+
let buildPassed = null;
|
|
68
|
+
let testsPassed = null;
|
|
69
|
+
let inConflictCheck = false;
|
|
70
|
+
let inBuildCheck = false;
|
|
71
|
+
let inTestCheck = false;
|
|
72
|
+
const conflictFiles = [];
|
|
73
|
+
for (const line of lines) {
|
|
74
|
+
const trimmed = line.trim();
|
|
75
|
+
if (trimmed.startsWith("Checking for conflict markers")) {
|
|
76
|
+
inConflictCheck = true;
|
|
77
|
+
inBuildCheck = false;
|
|
78
|
+
inTestCheck = false;
|
|
79
|
+
} else if (trimmed.startsWith("Running build")) {
|
|
80
|
+
inConflictCheck = false;
|
|
81
|
+
inBuildCheck = true;
|
|
82
|
+
inTestCheck = false;
|
|
83
|
+
} else if (trimmed.startsWith("Running tests")) {
|
|
84
|
+
inConflictCheck = false;
|
|
85
|
+
inBuildCheck = false;
|
|
86
|
+
inTestCheck = true;
|
|
87
|
+
}
|
|
88
|
+
if (inConflictCheck) {
|
|
89
|
+
if (trimmed.startsWith("ERROR: Conflict")) {
|
|
90
|
+
conflictMarkersFound = true;
|
|
91
|
+
} else if (trimmed.includes("/") && !trimmed.startsWith("ERROR")) {
|
|
92
|
+
conflictFiles.push(trimmed);
|
|
93
|
+
} else if (trimmed.startsWith("\u2713 No conflict markers found")) {
|
|
94
|
+
conflictMarkersFound = false;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
if (inBuildCheck) {
|
|
98
|
+
if (trimmed.startsWith("\u2713 Build passed")) {
|
|
99
|
+
buildPassed = true;
|
|
100
|
+
} else if (trimmed.startsWith("ERROR: Build failed") || trimmed.includes("VALIDATION FAILED: Build errors detected")) {
|
|
101
|
+
buildPassed = false;
|
|
102
|
+
} else if (trimmed.includes("skipping build check")) {
|
|
103
|
+
buildPassed = null;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
if (inTestCheck) {
|
|
107
|
+
if (trimmed.startsWith("\u2713 Tests passed")) {
|
|
108
|
+
testsPassed = true;
|
|
109
|
+
} else if (trimmed.startsWith("ERROR: Tests failed") || trimmed.includes("VALIDATION FAILED: Test failures detected")) {
|
|
110
|
+
testsPassed = false;
|
|
111
|
+
} else if (trimmed.includes("skipping test check")) {
|
|
112
|
+
testsPassed = null;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (conflictMarkersFound) {
|
|
117
|
+
failures.push({
|
|
118
|
+
type: "conflict",
|
|
119
|
+
files: conflictFiles.length > 0 ? conflictFiles : void 0,
|
|
120
|
+
message: "Conflict markers detected in merged code"
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
if (buildPassed === false) {
|
|
124
|
+
failures.push({
|
|
125
|
+
type: "build",
|
|
126
|
+
message: "Build failed after merge"
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
if (testsPassed === false) {
|
|
130
|
+
failures.push({
|
|
131
|
+
type: "test",
|
|
132
|
+
message: "Tests failed after merge"
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
const valid = exitCode === 0 && !conflictMarkersFound && (buildPassed === null || buildPassed === true) && (testsPassed === null || testsPassed === true);
|
|
136
|
+
return {
|
|
137
|
+
success: true,
|
|
138
|
+
// Script ran successfully
|
|
139
|
+
valid,
|
|
140
|
+
conflictMarkersFound,
|
|
141
|
+
buildPassed,
|
|
142
|
+
testsPassed,
|
|
143
|
+
failures,
|
|
144
|
+
output
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
async function runMergeValidation(context) {
|
|
148
|
+
const { projectPath, validationScript } = context;
|
|
149
|
+
const scriptPath = validationScript || join(projectPath, "scripts", "validate-merge.sh");
|
|
150
|
+
if (!existsSync(scriptPath)) {
|
|
151
|
+
console.log(`[validation] No validation script at ${scriptPath}, skipping (specialist already validated)`);
|
|
152
|
+
return {
|
|
153
|
+
success: true,
|
|
154
|
+
valid: true,
|
|
155
|
+
skipped: true,
|
|
156
|
+
conflictMarkersFound: false,
|
|
157
|
+
buildPassed: null,
|
|
158
|
+
testsPassed: null,
|
|
159
|
+
failures: [],
|
|
160
|
+
output: ""
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
console.log(`[validation] Running validation script: ${scriptPath}`);
|
|
164
|
+
console.log(`[validation] Project path: ${projectPath}`);
|
|
165
|
+
try {
|
|
166
|
+
const env = { ...process.env };
|
|
167
|
+
if (context.baselineTestFailures !== void 0) {
|
|
168
|
+
env.BASELINE_FAILURES = String(context.baselineTestFailures);
|
|
169
|
+
console.log(`[validation] Baseline comparison mode: ${context.baselineTestFailures} pre-existing failures`);
|
|
170
|
+
}
|
|
171
|
+
const { stdout, stderr } = await execAsync(
|
|
172
|
+
`bash "${scriptPath}" "${projectPath}"`,
|
|
173
|
+
{
|
|
174
|
+
cwd: projectPath,
|
|
175
|
+
env,
|
|
176
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
177
|
+
// 10MB buffer for large outputs
|
|
178
|
+
timeout: 10 * 60 * 1e3
|
|
179
|
+
// 10 minute timeout
|
|
180
|
+
}
|
|
181
|
+
);
|
|
182
|
+
const output = stdout + stderr;
|
|
183
|
+
console.log(`[validation] \u2713 Validation passed`);
|
|
184
|
+
return parseValidationOutput(output, 0);
|
|
185
|
+
} catch (error) {
|
|
186
|
+
const exitCode = error.code || 1;
|
|
187
|
+
const output = (error.stdout || "") + (error.stderr || "");
|
|
188
|
+
console.log(`[validation] \u2717 Validation failed (exit code ${exitCode})`);
|
|
189
|
+
const result = parseValidationOutput(output, exitCode);
|
|
190
|
+
return result;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
async function autoRevertMerge(projectPath) {
|
|
194
|
+
console.log(`[validation] Auto-reverting merge in ${projectPath}`);
|
|
195
|
+
try {
|
|
196
|
+
const { stdout: beforeCommit } = await execAsync("git rev-parse HEAD", {
|
|
197
|
+
cwd: projectPath
|
|
198
|
+
});
|
|
199
|
+
await execAsync("git reset --hard ORIG_HEAD", {
|
|
200
|
+
cwd: projectPath
|
|
201
|
+
});
|
|
202
|
+
const { stdout: afterCommit } = await execAsync("git rev-parse HEAD", {
|
|
203
|
+
cwd: projectPath
|
|
204
|
+
});
|
|
205
|
+
console.log(
|
|
206
|
+
`[validation] \u2713 Auto-revert successful: ${beforeCommit.trim()} -> ${afterCommit.trim()} (via ORIG_HEAD)`
|
|
207
|
+
);
|
|
208
|
+
return true;
|
|
209
|
+
} catch (error) {
|
|
210
|
+
console.error(`[validation] \u2717 Auto-revert failed:`, error.message);
|
|
211
|
+
return false;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
async function runQualityGates(gates, projectPath, phase = "pre_push", opts = {}) {
|
|
215
|
+
if (opts.isRemote && !opts.vmName) {
|
|
216
|
+
throw new Error("Remote workspace requires vmName");
|
|
217
|
+
}
|
|
218
|
+
if (opts.isRemote && opts.vmName) {
|
|
219
|
+
if (!/^[a-z0-9][a-z0-9-]*$/.test(opts.vmName)) {
|
|
220
|
+
throw new Error(`Invalid vmName for SSH: ${opts.vmName}`);
|
|
221
|
+
}
|
|
222
|
+
if (!/^[a-zA-Z0-9/_\-.]+$/.test(projectPath)) {
|
|
223
|
+
throw new Error(`Workspace path contains unsafe characters: ${projectPath}`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
const results = [];
|
|
227
|
+
for (const [name, gate] of Object.entries(gates)) {
|
|
228
|
+
const gatePhase = gate.phase || "pre_push";
|
|
229
|
+
if (gatePhase !== phase) continue;
|
|
230
|
+
const required = gate.required !== false;
|
|
231
|
+
const cwd = gate.path ? join(projectPath, gate.path) : projectPath;
|
|
232
|
+
console.log(`[quality-gate] Running "${name}" (${required ? "required" : "optional"}) in ${cwd}`);
|
|
233
|
+
const startTime = Date.now();
|
|
234
|
+
if (gate.type === "http_health") {
|
|
235
|
+
const result = await runHttpHealthGate(name, gate, required);
|
|
236
|
+
results.push(result);
|
|
237
|
+
if (!result.passed && required) {
|
|
238
|
+
console.log(`[quality-gate] \u2717 Required gate "${name}" failed \u2014 stopping`);
|
|
239
|
+
break;
|
|
240
|
+
}
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
const isRemote = opts.isRemote && opts.vmName;
|
|
244
|
+
let resolvedCommand;
|
|
245
|
+
if (isRemote) {
|
|
246
|
+
if (!/^[a-zA-Z0-9/_\-.]+$/.test(cwd)) {
|
|
247
|
+
throw new Error(`Gate "${name}" path resolves to unsafe characters for SSH: ${cwd}`);
|
|
248
|
+
}
|
|
249
|
+
if (gate.command.includes('"')) {
|
|
250
|
+
throw new Error(`Gate "${name}" command contains double quotes which are unsafe in SSH context`);
|
|
251
|
+
}
|
|
252
|
+
const flyAppName = loadConfig().remote?.fly?.app ?? "pan-workspaces";
|
|
253
|
+
resolvedCommand = `fly ssh console -a ${flyAppName} -C "cd ${cwd} && ${gate.command}"`;
|
|
254
|
+
} else if (gate.container && gate.container_name) {
|
|
255
|
+
let containerName = gate.container_name;
|
|
256
|
+
if (opts.placeholders) {
|
|
257
|
+
containerName = replacePlaceholders(containerName, opts.placeholders);
|
|
258
|
+
}
|
|
259
|
+
const containerWorkdir = gate.path ? `/workspaces/feature/${gate.path}` : "/workspaces/feature";
|
|
260
|
+
const envFlags = gate.env ? Object.entries(gate.env).map(([k, v]) => `-e ${k}="${v}"`).join(" ") : "";
|
|
261
|
+
resolvedCommand = `docker exec ${envFlags} -w "${containerWorkdir}" "${containerName}" ${gate.command}`;
|
|
262
|
+
console.log(`[quality-gate] Running in container: ${containerName} (workdir: ${containerWorkdir})`);
|
|
263
|
+
} else {
|
|
264
|
+
resolvedCommand = gate.command;
|
|
265
|
+
}
|
|
266
|
+
try {
|
|
267
|
+
const useHostCwd = !isRemote && !(gate.container && gate.container_name);
|
|
268
|
+
const env = { ...process.env, ...gate.env };
|
|
269
|
+
const { stdout, stderr } = await execAsync(resolvedCommand, {
|
|
270
|
+
cwd: useHostCwd ? cwd : void 0,
|
|
271
|
+
env,
|
|
272
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
273
|
+
// 10MB
|
|
274
|
+
timeout: 5 * 60 * 1e3
|
|
275
|
+
// 5 minute timeout per gate
|
|
276
|
+
});
|
|
277
|
+
const durationMs = Date.now() - startTime;
|
|
278
|
+
console.log(`[quality-gate] \u2713 "${name}" passed (${durationMs}ms)`);
|
|
279
|
+
results.push({
|
|
280
|
+
name,
|
|
281
|
+
passed: true,
|
|
282
|
+
required,
|
|
283
|
+
output: (stdout + stderr).slice(-2e3),
|
|
284
|
+
// keep last 2KB
|
|
285
|
+
durationMs
|
|
286
|
+
});
|
|
287
|
+
} catch (error) {
|
|
288
|
+
const durationMs = Date.now() - startTime;
|
|
289
|
+
const output = ((error.stdout || "") + (error.stderr || "")).slice(-2e3);
|
|
290
|
+
console.log(`[quality-gate] \u2717 "${name}" failed (${durationMs}ms): ${error.message?.slice(0, 200)}`);
|
|
291
|
+
results.push({
|
|
292
|
+
name,
|
|
293
|
+
passed: false,
|
|
294
|
+
required,
|
|
295
|
+
output,
|
|
296
|
+
durationMs,
|
|
297
|
+
error: error.message?.slice(0, 500)
|
|
298
|
+
});
|
|
299
|
+
if (required) {
|
|
300
|
+
console.log(`[quality-gate] \u2717 Required gate "${name}" failed \u2014 stopping`);
|
|
301
|
+
break;
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
const passed = results.filter((r) => r.passed).length;
|
|
306
|
+
const failed = results.filter((r) => !r.passed).length;
|
|
307
|
+
console.log(`[quality-gate] Complete: ${passed} passed, ${failed} failed out of ${results.length} gates`);
|
|
308
|
+
return results;
|
|
309
|
+
}
|
|
310
|
+
async function runHttpHealthGate(name, gate, required) {
|
|
311
|
+
const url = gate.url;
|
|
312
|
+
if (!url) {
|
|
313
|
+
return {
|
|
314
|
+
name,
|
|
315
|
+
passed: false,
|
|
316
|
+
required,
|
|
317
|
+
output: "",
|
|
318
|
+
durationMs: 0,
|
|
319
|
+
error: "http_health gate missing url"
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
const waitSeconds = gate.wait || 120;
|
|
323
|
+
const expectStatus = gate.expect_status || 200;
|
|
324
|
+
const startTime = Date.now();
|
|
325
|
+
console.log(`[quality-gate] Waiting ${waitSeconds}s for deployment, then checking ${url}`);
|
|
326
|
+
await new Promise((resolve) => setTimeout(resolve, waitSeconds * 1e3));
|
|
327
|
+
try {
|
|
328
|
+
const { stdout } = await execAsync(
|
|
329
|
+
`curl -sL -o /dev/null -w '%{http_code}' --max-time 30 '${url}'`,
|
|
330
|
+
{ timeout: 60 * 1e3 }
|
|
331
|
+
);
|
|
332
|
+
const statusCode = parseInt(stdout.trim(), 10);
|
|
333
|
+
const passed = statusCode === expectStatus;
|
|
334
|
+
const durationMs = Date.now() - startTime;
|
|
335
|
+
console.log(`[quality-gate] Health check ${url}: ${statusCode} (expected ${expectStatus}) \u2014 ${passed ? "PASS" : "FAIL"}`);
|
|
336
|
+
return {
|
|
337
|
+
name,
|
|
338
|
+
passed,
|
|
339
|
+
required,
|
|
340
|
+
output: `HTTP ${statusCode} from ${url}`,
|
|
341
|
+
durationMs,
|
|
342
|
+
error: passed ? void 0 : `Expected HTTP ${expectStatus}, got ${statusCode}`
|
|
343
|
+
};
|
|
344
|
+
} catch (error) {
|
|
345
|
+
return {
|
|
346
|
+
name,
|
|
347
|
+
passed: false,
|
|
348
|
+
required,
|
|
349
|
+
output: error.message || "",
|
|
350
|
+
durationMs: Date.now() - startTime,
|
|
351
|
+
error: `Health check failed: ${error.message?.slice(0, 200)}`
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// src/lib/cloister/merge-agent.ts
|
|
357
|
+
init_projects();
|
|
358
|
+
|
|
359
|
+
// src/lib/git-utils.ts
|
|
360
|
+
init_esm_shims();
|
|
361
|
+
import { existsSync as existsSync2, unlinkSync, readdirSync } from "fs";
|
|
362
|
+
import { join as join2 } from "path";
|
|
363
|
+
import { exec as exec2 } from "child_process";
|
|
364
|
+
import { promisify as promisify2 } from "util";
|
|
365
|
+
var execAsync2 = promisify2(exec2);
|
|
366
|
+
async function hasRunningGitProcesses(repoPath) {
|
|
367
|
+
try {
|
|
368
|
+
try {
|
|
369
|
+
const gitDir = join2(repoPath, ".git");
|
|
370
|
+
const { stdout } = await execAsync2(`fuser "${gitDir}" 2>/dev/null`, {
|
|
371
|
+
encoding: "utf-8"
|
|
372
|
+
});
|
|
373
|
+
return stdout.trim().length > 0;
|
|
374
|
+
} catch {
|
|
375
|
+
try {
|
|
376
|
+
const { stdout } = await execAsync2(
|
|
377
|
+
`ps aux | grep -E "git.*${repoPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}" | grep -v grep`,
|
|
378
|
+
{ encoding: "utf-8" }
|
|
379
|
+
);
|
|
380
|
+
return stdout.trim().length > 0;
|
|
381
|
+
} catch {
|
|
382
|
+
return false;
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
} catch {
|
|
386
|
+
return false;
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
function findGitLockFiles(repoPath) {
|
|
390
|
+
const lockFiles = [];
|
|
391
|
+
const indexLock = join2(repoPath, ".git", "index.lock");
|
|
392
|
+
if (existsSync2(indexLock)) {
|
|
393
|
+
lockFiles.push(indexLock);
|
|
394
|
+
}
|
|
395
|
+
const refsDir = join2(repoPath, ".git", "refs");
|
|
396
|
+
if (existsSync2(refsDir)) {
|
|
397
|
+
const findLocksRecursive = (dir) => {
|
|
398
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
399
|
+
for (const entry of entries) {
|
|
400
|
+
const fullPath = join2(dir, entry.name);
|
|
401
|
+
if (entry.isDirectory()) {
|
|
402
|
+
findLocksRecursive(fullPath);
|
|
403
|
+
} else if (entry.name.endsWith(".lock")) {
|
|
404
|
+
lockFiles.push(fullPath);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
};
|
|
408
|
+
try {
|
|
409
|
+
findLocksRecursive(refsDir);
|
|
410
|
+
} catch {
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
return lockFiles;
|
|
414
|
+
}
|
|
415
|
+
async function cleanupStaleLocks(repoPath) {
|
|
416
|
+
const result = {
|
|
417
|
+
found: [],
|
|
418
|
+
removed: [],
|
|
419
|
+
errors: []
|
|
420
|
+
};
|
|
421
|
+
const lockFiles = findGitLockFiles(repoPath);
|
|
422
|
+
result.found = lockFiles;
|
|
423
|
+
if (lockFiles.length === 0) {
|
|
424
|
+
return result;
|
|
425
|
+
}
|
|
426
|
+
const hasGitProcesses = await hasRunningGitProcesses(repoPath);
|
|
427
|
+
if (hasGitProcesses) {
|
|
428
|
+
result.errors.push({
|
|
429
|
+
file: "N/A",
|
|
430
|
+
error: "Git processes are running - not safe to remove locks"
|
|
431
|
+
});
|
|
432
|
+
return result;
|
|
433
|
+
}
|
|
434
|
+
for (const lockFile of lockFiles) {
|
|
435
|
+
try {
|
|
436
|
+
unlinkSync(lockFile);
|
|
437
|
+
result.removed.push(lockFile);
|
|
438
|
+
} catch (error) {
|
|
439
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
440
|
+
result.errors.push({ file: lockFile, error: msg });
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
return result;
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// src/lib/cloister/merge-agent.ts
|
|
447
|
+
var execAsync3 = promisify3(exec3);
|
|
448
|
+
var __filename2 = fileURLToPath(import.meta.url);
|
|
449
|
+
var __dirname2 = dirname(__filename2);
|
|
450
|
+
var SPECIALISTS_DIR = join3(PANOPTICON_HOME, "specialists");
|
|
451
|
+
var MERGE_HISTORY_DIR = join3(SPECIALISTS_DIR, "merge-agent");
|
|
452
|
+
var MERGE_HISTORY_FILE = join3(MERGE_HISTORY_DIR, "history.jsonl");
|
|
453
|
+
var MERGE_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
454
|
+
function buildMergePrompt(context) {
|
|
455
|
+
const templatePath = join3(__dirname2, "prompts", "merge-agent.md");
|
|
456
|
+
if (!existsSync3(templatePath)) {
|
|
457
|
+
throw new Error(`Merge agent prompt template not found at ${templatePath}`);
|
|
458
|
+
}
|
|
459
|
+
const template = readFileSync(templatePath, "utf-8");
|
|
460
|
+
const prompt = template.replace(/\{\{projectPath\}\}/g, context.projectPath).replace(/\{\{sourceBranch\}\}/g, context.sourceBranch).replace(/\{\{targetBranch\}\}/g, context.targetBranch).replace(/\{\{issueId\}\}/g, context.issueId).replace(
|
|
461
|
+
/\{\{conflictFiles\}\}/g,
|
|
462
|
+
context.conflictFiles.map((f) => ` - ${f}`).join("\n")
|
|
463
|
+
).replace(/\{\{testCommand\}\}/g, context.testCommand || "skip").replace(/\{\{apiUrl\}\}/g, process.env.DASHBOARD_URL || `http://localhost:${process.env.API_PORT || process.env.PORT || "3011"}`);
|
|
464
|
+
return `<!-- panopticon:orchestration-context-start -->
|
|
465
|
+
${prompt}
|
|
466
|
+
<!-- panopticon:orchestration-context-end -->`;
|
|
467
|
+
}
|
|
468
|
+
function detectTestCommand(projectPath) {
|
|
469
|
+
const packageJsonPath = join3(projectPath, "package.json");
|
|
470
|
+
if (existsSync3(packageJsonPath)) {
|
|
471
|
+
try {
|
|
472
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
|
473
|
+
if (packageJson.scripts?.test) {
|
|
474
|
+
return "npm test";
|
|
475
|
+
}
|
|
476
|
+
} catch {
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
if (existsSync3(join3(projectPath, "pom.xml"))) {
|
|
480
|
+
return "mvn test";
|
|
481
|
+
}
|
|
482
|
+
if (existsSync3(join3(projectPath, "Cargo.toml"))) {
|
|
483
|
+
return "cargo test";
|
|
484
|
+
}
|
|
485
|
+
if (existsSync3(join3(projectPath, "pytest.ini")) || existsSync3(join3(projectPath, "setup.py"))) {
|
|
486
|
+
return "pytest";
|
|
487
|
+
}
|
|
488
|
+
return "skip";
|
|
489
|
+
}
|
|
490
|
+
async function notifyTldrDaemon(projectPath, sourceBranch) {
|
|
491
|
+
try {
|
|
492
|
+
console.log(`[merge-agent] Notifying TLDR daemon to reindex changed files...`);
|
|
493
|
+
const venvPath = join3(projectPath, ".venv");
|
|
494
|
+
if (!existsSync3(venvPath)) {
|
|
495
|
+
console.log(`[merge-agent] No .venv found, skipping TLDR notification`);
|
|
496
|
+
return;
|
|
497
|
+
}
|
|
498
|
+
const { stdout } = await execAsync3(`git diff --name-only HEAD~1 HEAD`, {
|
|
499
|
+
cwd: projectPath,
|
|
500
|
+
encoding: "utf-8"
|
|
501
|
+
});
|
|
502
|
+
const changedFiles = stdout.trim().split("\n").filter((f) => f.trim().length > 0).filter((f) => {
|
|
503
|
+
const ext = f.split(".").pop()?.toLowerCase();
|
|
504
|
+
return ext && ["ts", "js", "tsx", "jsx", "py", "java", "go", "rs", "cpp", "c", "h"].includes(ext);
|
|
505
|
+
});
|
|
506
|
+
if (changedFiles.length === 0) {
|
|
507
|
+
console.log(`[merge-agent] No source files changed, skipping TLDR notification`);
|
|
508
|
+
return;
|
|
509
|
+
}
|
|
510
|
+
console.log(`[merge-agent] Found ${changedFiles.length} changed source files to reindex`);
|
|
511
|
+
const { getTldrDaemonService } = await import("./tldr-daemon-T3THOUGT.js");
|
|
512
|
+
const tldrService = getTldrDaemonService(projectPath, venvPath);
|
|
513
|
+
const status = await tldrService.getStatus();
|
|
514
|
+
if (!status.running) {
|
|
515
|
+
console.log(`[merge-agent] TLDR daemon not running, skipping notification`);
|
|
516
|
+
return;
|
|
517
|
+
}
|
|
518
|
+
console.log(`[merge-agent] Triggering TLDR index warm...`);
|
|
519
|
+
await tldrService.warm(true);
|
|
520
|
+
console.log(`[merge-agent] \u2713 TLDR daemon notified to reindex`);
|
|
521
|
+
logActivity("tldr_notified", `Notified TLDR daemon to reindex ${changedFiles.length} files`);
|
|
522
|
+
} catch (error) {
|
|
523
|
+
console.warn(`[merge-agent] Failed to notify TLDR daemon: ${error.message}`);
|
|
524
|
+
logActivity("tldr_notify_error", `TLDR notification failed: ${error.message}`);
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
var _completedPostMerge = /* @__PURE__ */ new Set();
|
|
528
|
+
var _closeIssueFailures = /* @__PURE__ */ new Map();
|
|
529
|
+
var MAX_CLOSE_RETRIES = 3;
|
|
530
|
+
async function postMergeLifecycle(issueId, projectPath) {
|
|
531
|
+
if (_completedPostMerge.has(issueId)) {
|
|
532
|
+
console.log(`[merge-agent] postMergeLifecycle already completed for ${issueId}, skipping`);
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
console.log(`[merge-agent] Running post-merge cleanup for ${issueId}`);
|
|
536
|
+
try {
|
|
537
|
+
const { movePrd } = await import("./archive-planning-U3AZAKWI.js");
|
|
538
|
+
const prdResult = await movePrd({ issueId, projectPath });
|
|
539
|
+
if (prdResult.success && !prdResult.skipped) {
|
|
540
|
+
console.log(`[merge-agent] \u2713 ${prdResult.details?.join("; ")}`);
|
|
541
|
+
logActivity("prd_moved", `Moved ${issueId} PRD to completed directory`);
|
|
542
|
+
} else if (prdResult.skipped) {
|
|
543
|
+
console.log(`[merge-agent] PRD move skipped: ${prdResult.details?.join("; ")}`);
|
|
544
|
+
} else {
|
|
545
|
+
console.warn(`[merge-agent] PRD move failed: ${prdResult.error}`);
|
|
546
|
+
}
|
|
547
|
+
} catch (err) {
|
|
548
|
+
console.warn(`[merge-agent] Could not move PRD: ${err}`);
|
|
549
|
+
}
|
|
550
|
+
try {
|
|
551
|
+
const { cleanPlanningArtifacts } = await import("./clean-planning-7Z5YY64X.js");
|
|
552
|
+
const cleanResult = await cleanPlanningArtifacts({ issueId, projectPath });
|
|
553
|
+
if (cleanResult.success && !cleanResult.skipped) {
|
|
554
|
+
console.log(`[merge-agent] \u2713 ${cleanResult.details?.join("; ")}`);
|
|
555
|
+
logActivity("planning_artifacts_cleaned", cleanResult.details?.join("; ") || "Planning artifacts removed");
|
|
556
|
+
} else if (cleanResult.skipped) {
|
|
557
|
+
console.log(`[merge-agent] Planning artifact cleanup skipped: ${cleanResult.details?.join("; ")}`);
|
|
558
|
+
} else {
|
|
559
|
+
console.warn(`[merge-agent] Planning artifact cleanup failed: ${cleanResult.error}`);
|
|
560
|
+
}
|
|
561
|
+
} catch (err) {
|
|
562
|
+
console.warn(`[merge-agent] Could not clean planning artifacts: ${err}`);
|
|
563
|
+
}
|
|
564
|
+
closeIssueWithCircuitBreaker(issueId, projectPath);
|
|
565
|
+
try {
|
|
566
|
+
const { cleanupMergedLabels } = await import("./label-cleanup-4HJVX6NP.js");
|
|
567
|
+
const ghResolved = resolveGitHubIssue(issueId);
|
|
568
|
+
const labelCtx = ghResolved.isGitHub ? { issueId, projectPath, github: { owner: ghResolved.owner, repo: ghResolved.repo, number: ghResolved.number } } : { issueId, projectPath };
|
|
569
|
+
const labelResult = await cleanupMergedLabels(labelCtx);
|
|
570
|
+
if (labelResult.success && !labelResult.skipped) {
|
|
571
|
+
console.log(`[merge-agent] \u2713 ${labelResult.details?.join("; ")}`);
|
|
572
|
+
logActivity("labels_cleaned", labelResult.details?.join("; ") || "Labels cleaned");
|
|
573
|
+
} else if (labelResult.skipped) {
|
|
574
|
+
console.log(`[merge-agent] Label cleanup skipped: ${labelResult.details?.join("; ")}`);
|
|
575
|
+
} else {
|
|
576
|
+
console.warn(`[merge-agent] Label cleanup failed (non-fatal): ${labelResult.error}`);
|
|
577
|
+
}
|
|
578
|
+
} catch (err) {
|
|
579
|
+
console.warn(`[merge-agent] Could not clean labels: ${err}`);
|
|
580
|
+
}
|
|
581
|
+
try {
|
|
582
|
+
const { compactBeads } = await import("./compact-beads-72SHALOL.js");
|
|
583
|
+
const beadsResult = await compactBeads({ issueId, projectPath });
|
|
584
|
+
if (beadsResult.success && !beadsResult.skipped) {
|
|
585
|
+
console.log(`[merge-agent] \u2713 ${beadsResult.details?.join("; ")}`);
|
|
586
|
+
logActivity("beads_compaction_complete", beadsResult.details?.join("; ") || "Beads compacted");
|
|
587
|
+
}
|
|
588
|
+
} catch (err) {
|
|
589
|
+
console.warn(`[merge-agent] Beads compaction failed: ${err}`);
|
|
590
|
+
}
|
|
591
|
+
try {
|
|
592
|
+
const { getAgentState, saveAgentState } = await import("./agents-5HWTDR4S.js");
|
|
593
|
+
const { killSession, sessionExists: sessionExists2 } = await import("./tmux-X2I5SAIJ.js");
|
|
594
|
+
const agentId = `agent-${issueId.toLowerCase()}`;
|
|
595
|
+
const agentState = getAgentState(agentId);
|
|
596
|
+
if (agentState && sessionExists2(agentId)) {
|
|
597
|
+
killSession(agentId);
|
|
598
|
+
agentState.status = "stopped";
|
|
599
|
+
saveAgentState(agentState);
|
|
600
|
+
console.log(`[merge-agent] \u2713 Killed work agent session ${agentId} to free resources`);
|
|
601
|
+
logActivity("agent_session_killed", `Freed resources: killed tmux session for ${agentId}`);
|
|
602
|
+
}
|
|
603
|
+
const planningId = `planning-${issueId.toLowerCase()}`;
|
|
604
|
+
if (sessionExists2(planningId)) {
|
|
605
|
+
killSession(planningId);
|
|
606
|
+
console.log(`[merge-agent] \u2713 Killed planning agent session ${planningId}`);
|
|
607
|
+
}
|
|
608
|
+
} catch (err) {
|
|
609
|
+
console.warn(`[merge-agent] Could not kill agent sessions: ${err}`);
|
|
610
|
+
}
|
|
611
|
+
try {
|
|
612
|
+
const { findWorkspacePath } = await import("./archive-planning-U3AZAKWI.js");
|
|
613
|
+
const { stopWorkspaceDocker } = await import("./workspace-manager-G6TTBPC3.js");
|
|
614
|
+
const issueLower = issueId.toLowerCase();
|
|
615
|
+
const workspacePath = findWorkspacePath(projectPath, issueLower);
|
|
616
|
+
if (workspacePath) {
|
|
617
|
+
const projName = basename(projectPath);
|
|
618
|
+
const dockerResult = await stopWorkspaceDocker(workspacePath, projName, issueLower);
|
|
619
|
+
if (dockerResult.containersFound) {
|
|
620
|
+
console.log(`[merge-agent] \u2713 Stopped Docker containers: ${dockerResult.steps.join("; ")}`);
|
|
621
|
+
logActivity("docker_cleanup", `Stopped Docker for ${issueId}: ${dockerResult.steps.join("; ")}`);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
} catch (err) {
|
|
625
|
+
console.warn(`[merge-agent] Docker cleanup failed (non-fatal): ${err}`);
|
|
626
|
+
}
|
|
627
|
+
_completedPostMerge.add(issueId);
|
|
628
|
+
console.log(`[merge-agent] Post-merge cleanup completed for ${issueId}. Issue moved to Done \u2014 awaiting close-out.`);
|
|
629
|
+
logActivity("merge_complete", `Merged ${issueId}. Issue moved to Done \u2014 awaiting close-out.`);
|
|
630
|
+
}
|
|
631
|
+
function closeIssueWithCircuitBreaker(issueId, projectPath) {
|
|
632
|
+
const failures = _closeIssueFailures.get(issueId) || 0;
|
|
633
|
+
if (failures >= MAX_CLOSE_RETRIES) {
|
|
634
|
+
console.log(`[merge-agent] Circuit breaker open for ${issueId} issue close (${failures} failures). Will be closed during close-out ceremony.`);
|
|
635
|
+
return;
|
|
636
|
+
}
|
|
637
|
+
(async () => {
|
|
638
|
+
try {
|
|
639
|
+
const { closeIssue } = await import("./close-issue-CTZK777I.js");
|
|
640
|
+
const ghResolved = resolveGitHubIssue(issueId);
|
|
641
|
+
const ctx = ghResolved.isGitHub ? { issueId, projectPath, github: { owner: ghResolved.owner, repo: ghResolved.repo, number: ghResolved.number } } : { issueId, projectPath };
|
|
642
|
+
const results = await closeIssue(ctx, { applyLabel: false, comment: "Merged to main via Panopticon merge-agent" });
|
|
643
|
+
let anyFailure = false;
|
|
644
|
+
for (const r of results) {
|
|
645
|
+
if (r.success && !r.skipped) {
|
|
646
|
+
console.log(`[merge-agent] \u2713 ${r.details?.join("; ")}`);
|
|
647
|
+
logActivity(r.step, r.details?.join("; ") || r.step);
|
|
648
|
+
} else if (!r.skipped) {
|
|
649
|
+
console.warn(`[merge-agent] \u2717 ${r.step} failed: ${r.error}`);
|
|
650
|
+
anyFailure = true;
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
if (anyFailure) {
|
|
654
|
+
const newCount = (_closeIssueFailures.get(issueId) || 0) + 1;
|
|
655
|
+
_closeIssueFailures.set(issueId, newCount);
|
|
656
|
+
if (newCount >= MAX_CLOSE_RETRIES) {
|
|
657
|
+
console.warn(`[merge-agent] Circuit breaker tripped for ${issueId} after ${newCount} failures. Issue close deferred to close-out ceremony.`);
|
|
658
|
+
}
|
|
659
|
+
} else {
|
|
660
|
+
_closeIssueFailures.delete(issueId);
|
|
661
|
+
}
|
|
662
|
+
} catch (err) {
|
|
663
|
+
const newCount = (_closeIssueFailures.get(issueId) || 0) + 1;
|
|
664
|
+
_closeIssueFailures.set(issueId, newCount);
|
|
665
|
+
console.warn(`[merge-agent] Could not move issue to Done (attempt ${newCount}/${MAX_CLOSE_RETRIES}): ${err}`);
|
|
666
|
+
}
|
|
667
|
+
})();
|
|
668
|
+
}
|
|
669
|
+
function resetPostMergeState(issueId) {
|
|
670
|
+
_completedPostMerge.delete(issueId);
|
|
671
|
+
_closeIssueFailures.delete(issueId);
|
|
672
|
+
}
|
|
673
|
+
function parseAgentOutput(output) {
|
|
674
|
+
const lines = output.split("\n");
|
|
675
|
+
let mergeResult = null;
|
|
676
|
+
let resolvedFiles = [];
|
|
677
|
+
let failedFiles = [];
|
|
678
|
+
let testsStatus = null;
|
|
679
|
+
let validationStatus = null;
|
|
680
|
+
let reason = "";
|
|
681
|
+
let notes = "";
|
|
682
|
+
for (const line of lines) {
|
|
683
|
+
const trimmed = line.trim();
|
|
684
|
+
if (trimmed.startsWith("MERGE_RESULT:")) {
|
|
685
|
+
const value = trimmed.substring("MERGE_RESULT:".length).trim();
|
|
686
|
+
if (value === "SUCCESS" || value === "FAILURE") {
|
|
687
|
+
mergeResult = value;
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
if (trimmed.startsWith("RESOLVED_FILES:")) {
|
|
691
|
+
const value = trimmed.substring("RESOLVED_FILES:".length).trim();
|
|
692
|
+
resolvedFiles = value.split(",").map((f) => f.trim()).filter((f) => f.length > 0);
|
|
693
|
+
}
|
|
694
|
+
if (trimmed.startsWith("FAILED_FILES:")) {
|
|
695
|
+
const value = trimmed.substring("FAILED_FILES:".length).trim();
|
|
696
|
+
failedFiles = value.split(",").map((f) => f.trim()).filter((f) => f.length > 0);
|
|
697
|
+
}
|
|
698
|
+
if (trimmed.startsWith("TESTS:")) {
|
|
699
|
+
const value = trimmed.substring("TESTS:".length).trim();
|
|
700
|
+
if (value === "PASS" || value === "FAIL" || value === "SKIP") {
|
|
701
|
+
testsStatus = value;
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
if (trimmed.startsWith("VALIDATION:")) {
|
|
705
|
+
const value = trimmed.substring("VALIDATION:".length).trim();
|
|
706
|
+
if (value === "PASS" || value === "FAIL") {
|
|
707
|
+
validationStatus = value;
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
if (trimmed.startsWith("REASON:")) {
|
|
711
|
+
reason = trimmed.substring("REASON:".length).trim();
|
|
712
|
+
}
|
|
713
|
+
if (trimmed.startsWith("NOTES:")) {
|
|
714
|
+
notes = trimmed.substring("NOTES:".length).trim();
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
if (mergeResult === "SUCCESS") {
|
|
718
|
+
return {
|
|
719
|
+
success: true,
|
|
720
|
+
resolvedFiles,
|
|
721
|
+
testsStatus: testsStatus || "SKIP",
|
|
722
|
+
validationStatus: validationStatus || "NOT_RUN",
|
|
723
|
+
notes,
|
|
724
|
+
output
|
|
725
|
+
};
|
|
726
|
+
} else if (mergeResult === "FAILURE") {
|
|
727
|
+
return {
|
|
728
|
+
success: false,
|
|
729
|
+
failedFiles,
|
|
730
|
+
validationStatus: validationStatus || "NOT_RUN",
|
|
731
|
+
reason,
|
|
732
|
+
notes,
|
|
733
|
+
output
|
|
734
|
+
};
|
|
735
|
+
} else {
|
|
736
|
+
const lowerOutput = output.toLowerCase();
|
|
737
|
+
const successIndicators = [
|
|
738
|
+
"merge task complete",
|
|
739
|
+
"successfully merged",
|
|
740
|
+
"merge complete",
|
|
741
|
+
"pushed merge commit",
|
|
742
|
+
"successfully merged and pushed"
|
|
743
|
+
];
|
|
744
|
+
const failureIndicators = [
|
|
745
|
+
"merge failed",
|
|
746
|
+
"merge task failed",
|
|
747
|
+
"could not merge",
|
|
748
|
+
"conflict not resolved"
|
|
749
|
+
];
|
|
750
|
+
const hasSuccessIndicator = successIndicators.some((i) => lowerOutput.includes(i));
|
|
751
|
+
const hasFailureIndicator = failureIndicators.some((i) => lowerOutput.includes(i));
|
|
752
|
+
if (hasSuccessIndicator && !hasFailureIndicator) {
|
|
753
|
+
let detectedTestStatus = "SKIP";
|
|
754
|
+
if (lowerOutput.includes("tests: pass") || lowerOutput.includes("tests passed") || output.match(/\d+ passed/)) {
|
|
755
|
+
detectedTestStatus = "PASS";
|
|
756
|
+
} else if (lowerOutput.includes("tests: fail") || lowerOutput.includes("tests failed")) {
|
|
757
|
+
detectedTestStatus = "FAIL";
|
|
758
|
+
}
|
|
759
|
+
console.log("[merge-agent] Detected success from human-readable output");
|
|
760
|
+
return {
|
|
761
|
+
success: true,
|
|
762
|
+
testsStatus: detectedTestStatus,
|
|
763
|
+
validationStatus: "PASS",
|
|
764
|
+
notes: "Detected from human-readable output (agent did not use structured format)",
|
|
765
|
+
output
|
|
766
|
+
};
|
|
767
|
+
}
|
|
768
|
+
if (hasFailureIndicator) {
|
|
769
|
+
console.log("[merge-agent] Detected failure from human-readable output");
|
|
770
|
+
return {
|
|
771
|
+
success: false,
|
|
772
|
+
validationStatus: "NOT_RUN",
|
|
773
|
+
reason: "Detected merge failure from agent output",
|
|
774
|
+
output
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
return {
|
|
778
|
+
success: false,
|
|
779
|
+
validationStatus: "NOT_RUN",
|
|
780
|
+
reason: "Agent did not report result in expected format",
|
|
781
|
+
output
|
|
782
|
+
};
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
async function getConflictFiles(projectPath) {
|
|
786
|
+
try {
|
|
787
|
+
const { stdout: status } = await execAsync3("git diff --name-only --diff-filter=U", {
|
|
788
|
+
cwd: projectPath,
|
|
789
|
+
encoding: "utf-8"
|
|
790
|
+
});
|
|
791
|
+
return status.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
|
|
792
|
+
} catch (error) {
|
|
793
|
+
console.error("Failed to get conflict files:", error);
|
|
794
|
+
return [];
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
function logMergeHistory(context, result, sessionId) {
|
|
798
|
+
if (!existsSync3(MERGE_HISTORY_DIR)) {
|
|
799
|
+
mkdirSync(MERGE_HISTORY_DIR, { recursive: true });
|
|
800
|
+
}
|
|
801
|
+
const entry = {
|
|
802
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
803
|
+
issueId: context.issueId,
|
|
804
|
+
sourceBranch: context.sourceBranch,
|
|
805
|
+
targetBranch: context.targetBranch,
|
|
806
|
+
conflictFiles: context.conflictFiles,
|
|
807
|
+
result: {
|
|
808
|
+
...result,
|
|
809
|
+
output: void 0
|
|
810
|
+
// Don't store full output in history
|
|
811
|
+
},
|
|
812
|
+
sessionId
|
|
813
|
+
};
|
|
814
|
+
appendFileSync(MERGE_HISTORY_FILE, JSON.stringify(entry) + "\n", "utf-8");
|
|
815
|
+
}
|
|
816
|
+
function logActivity(action, details) {
|
|
817
|
+
const ACTIVITY_LOG = "/tmp/panopticon-activity.log";
|
|
818
|
+
try {
|
|
819
|
+
const entry = {
|
|
820
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
821
|
+
source: "merge-agent",
|
|
822
|
+
action,
|
|
823
|
+
details
|
|
824
|
+
};
|
|
825
|
+
appendFileSync(ACTIVITY_LOG, JSON.stringify(entry) + "\n");
|
|
826
|
+
} catch {
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
async function captureTmuxOutput(sessionName) {
|
|
830
|
+
try {
|
|
831
|
+
const { stdout } = await execAsync3(`tmux capture-pane -t "${sessionName}" -p`, { encoding: "utf-8" });
|
|
832
|
+
return stdout;
|
|
833
|
+
} catch {
|
|
834
|
+
return "";
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
async function isMergeAgentRunning() {
|
|
838
|
+
try {
|
|
839
|
+
await execAsync3(`tmux has-session -t specialist-merge-agent 2>/dev/null`, { encoding: "utf-8" });
|
|
840
|
+
return true;
|
|
841
|
+
} catch {
|
|
842
|
+
return false;
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
async function sendMessageToAgent(issueId, message) {
|
|
846
|
+
const sessionName = `agent-${issueId.toLowerCase()}`;
|
|
847
|
+
try {
|
|
848
|
+
if (!sessionExists(sessionName)) {
|
|
849
|
+
console.log(`[merge-agent] Could not send message to ${sessionName} (session does not exist)`);
|
|
850
|
+
return false;
|
|
851
|
+
}
|
|
852
|
+
await sendKeysAsync(sessionName, message);
|
|
853
|
+
console.log(`[merge-agent] Sent message to ${sessionName}`);
|
|
854
|
+
logActivity("agent_message", `Sent to ${sessionName}: ${message.slice(0, 100)}...`);
|
|
855
|
+
return true;
|
|
856
|
+
} catch {
|
|
857
|
+
console.log(`[merge-agent] Could not send message to ${sessionName} (session may not exist)`);
|
|
858
|
+
return false;
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
async function spawnMergeAgent(context) {
|
|
862
|
+
console.log(`[merge-agent] Starting conflict resolution for ${context.issueId}`);
|
|
863
|
+
logActivity("merge_start", `Starting merge for ${context.issueId}: ${context.conflictFiles.join(", ")}`);
|
|
864
|
+
if (!context.testCommand) {
|
|
865
|
+
context.testCommand = detectTestCommand(context.projectPath);
|
|
866
|
+
}
|
|
867
|
+
const tmuxSession = getTmuxSessionName("merge-agent");
|
|
868
|
+
console.log(`[merge-agent] Using tmux session: ${tmuxSession}`);
|
|
869
|
+
console.log(`[merge-agent] Test command: ${context.testCommand}`);
|
|
870
|
+
if (!await isMergeAgentRunning()) {
|
|
871
|
+
console.log(`[merge-agent] Session not running, cannot proceed`);
|
|
872
|
+
logActivity("merge_error", `Session ${tmuxSession} not running`);
|
|
873
|
+
return {
|
|
874
|
+
success: false,
|
|
875
|
+
reason: `Specialist ${tmuxSession} is not running. Start Cloister first.`
|
|
876
|
+
};
|
|
877
|
+
}
|
|
878
|
+
const prompt = buildMergePrompt(context);
|
|
879
|
+
try {
|
|
880
|
+
console.log(`[merge-agent] Sending task to ${tmuxSession}...`);
|
|
881
|
+
await sendKeysAsync(tmuxSession, prompt);
|
|
882
|
+
recordWake("merge-agent");
|
|
883
|
+
logActivity("merge_task_sent", `Task sent to ${tmuxSession}`);
|
|
884
|
+
console.log(`[merge-agent] Task sent, waiting for completion...`);
|
|
885
|
+
const startTime = Date.now();
|
|
886
|
+
const POLL_INTERVAL = 5e3;
|
|
887
|
+
let lastOutput = "";
|
|
888
|
+
while (Date.now() - startTime < MERGE_TIMEOUT_MS) {
|
|
889
|
+
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL));
|
|
890
|
+
const output = await captureTmuxOutput(tmuxSession);
|
|
891
|
+
if (output !== lastOutput) {
|
|
892
|
+
lastOutput = output;
|
|
893
|
+
const lowerOutput = output.toLowerCase();
|
|
894
|
+
const hasStructuredResult = output.includes("MERGE_RESULT:");
|
|
895
|
+
const hasHumanReadableResult = lowerOutput.includes("merge task complete") || lowerOutput.includes("successfully merged") || lowerOutput.includes("merge complete") || lowerOutput.includes("merge failed") || lowerOutput.includes("merge task failed");
|
|
896
|
+
if (hasStructuredResult || hasHumanReadableResult) {
|
|
897
|
+
console.log(`[merge-agent] Found result markers in output (structured: ${hasStructuredResult}, human-readable: ${hasHumanReadableResult})`);
|
|
898
|
+
const result = parseAgentOutput(output);
|
|
899
|
+
if (result.success) {
|
|
900
|
+
console.log(`[merge-agent] Agent reported success, running post-merge validation...`);
|
|
901
|
+
logActivity("merge_validation_start", `Running validation for ${context.issueId}`);
|
|
902
|
+
const baselineMatch = output.match(/Failed\s*│\s*(\d+)\s*│/);
|
|
903
|
+
const baselineTestFailures = baselineMatch ? parseInt(baselineMatch[1], 10) : void 0;
|
|
904
|
+
if (baselineTestFailures !== void 0) {
|
|
905
|
+
console.log(`[merge-agent] Extracted baseline failure count from agent: ${baselineTestFailures}`);
|
|
906
|
+
}
|
|
907
|
+
const validationResult = await runMergeValidation({
|
|
908
|
+
projectPath: context.projectPath,
|
|
909
|
+
issueId: context.issueId,
|
|
910
|
+
baselineTestFailures
|
|
911
|
+
});
|
|
912
|
+
if (validationResult.valid) {
|
|
913
|
+
console.log(`[merge-agent] \u2713 Validation passed`);
|
|
914
|
+
const gateResults = await runProjectQualityGates(context.projectPath, "pre_push");
|
|
915
|
+
const failedRequired = gateResults.filter((g) => !g.passed && g.required);
|
|
916
|
+
if (failedRequired.length > 0) {
|
|
917
|
+
const failedNames = failedRequired.map((g) => g.name).join(", ");
|
|
918
|
+
console.log(`[merge-agent] \u2717 Quality gates failed: ${failedNames}`);
|
|
919
|
+
logActivity("merge_quality_gate_fail", `Quality gates failed for ${context.issueId}: ${failedNames}`);
|
|
920
|
+
const revertSuccess = await autoRevertMerge(context.projectPath);
|
|
921
|
+
const revertNote = revertSuccess ? "Merge auto-reverted to clean state" : "WARNING: Auto-revert failed - manual cleanup required";
|
|
922
|
+
const failedResult = {
|
|
923
|
+
success: false,
|
|
924
|
+
validationStatus: "FAIL",
|
|
925
|
+
reason: `Quality gate(s) failed: ${failedNames}. ${revertNote}`,
|
|
926
|
+
notes: result.notes,
|
|
927
|
+
output
|
|
928
|
+
};
|
|
929
|
+
logMergeHistory(context, failedResult);
|
|
930
|
+
return failedResult;
|
|
931
|
+
}
|
|
932
|
+
logActivity("merge_success", `Merge and validation completed for ${context.issueId}`);
|
|
933
|
+
result.validationStatus = "PASS";
|
|
934
|
+
logMergeHistory(context, result);
|
|
935
|
+
await postMergeLifecycle(context.issueId, context.projectPath);
|
|
936
|
+
await notifyTldrDaemon(context.projectPath, context.sourceBranch);
|
|
937
|
+
return result;
|
|
938
|
+
} else {
|
|
939
|
+
console.log(`[merge-agent] \u2717 Validation failed:`, validationResult.failures);
|
|
940
|
+
logActivity("merge_validation_fail", `Validation failed for ${context.issueId}: ${validationResult.failures.map((f) => f.type).join(", ")}`);
|
|
941
|
+
const revertSuccess = await autoRevertMerge(context.projectPath);
|
|
942
|
+
const failureReason = validationResult.failures.map((f) => `${f.type}: ${f.message}`).join("; ");
|
|
943
|
+
const revertNote = revertSuccess ? "Merge auto-reverted to clean state" : "WARNING: Auto-revert failed - manual cleanup required";
|
|
944
|
+
console.log(`[merge-agent] ${revertNote}`);
|
|
945
|
+
logActivity("merge_auto_revert", revertNote);
|
|
946
|
+
const failedResult = {
|
|
947
|
+
success: false,
|
|
948
|
+
validationStatus: "FAIL",
|
|
949
|
+
reason: `Validation failed: ${failureReason}. ${revertNote}`,
|
|
950
|
+
notes: result.notes,
|
|
951
|
+
output
|
|
952
|
+
};
|
|
953
|
+
logMergeHistory(context, failedResult);
|
|
954
|
+
return failedResult;
|
|
955
|
+
}
|
|
956
|
+
} else {
|
|
957
|
+
logActivity("merge_failure", `Merge failed for ${context.issueId}: ${result.reason}`);
|
|
958
|
+
logMergeHistory(context, result);
|
|
959
|
+
return result;
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
}
|
|
963
|
+
const elapsed = Math.round((Date.now() - startTime) / 1e3);
|
|
964
|
+
if (elapsed % 30 === 0) {
|
|
965
|
+
console.log(`[merge-agent] Still working... (${elapsed}s elapsed)`);
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
console.log(`[merge-agent] Timeout after ${MERGE_TIMEOUT_MS / 1e3} seconds`);
|
|
969
|
+
logActivity("merge_timeout", `Merge timed out for ${context.issueId}`);
|
|
970
|
+
return {
|
|
971
|
+
success: false,
|
|
972
|
+
reason: `Timeout after ${MERGE_TIMEOUT_MS / 6e4} minutes`,
|
|
973
|
+
output: lastOutput
|
|
974
|
+
};
|
|
975
|
+
} catch (error) {
|
|
976
|
+
console.error(`[merge-agent] Failed:`, error);
|
|
977
|
+
logActivity("merge_error", `Error: ${error.message}`);
|
|
978
|
+
const result = {
|
|
979
|
+
success: false,
|
|
980
|
+
reason: error.message || "Unknown error"
|
|
981
|
+
};
|
|
982
|
+
logMergeHistory(context, result);
|
|
983
|
+
return result;
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
async function spawnMergeAgentForBranches(projectPath, sourceBranch, targetBranch, issueId, options) {
|
|
987
|
+
console.log(`[merge-agent] Waking specialist for merge of ${sourceBranch} into ${targetBranch}`);
|
|
988
|
+
logActivity("merge_attempt", `Waking specialist for merge: ${sourceBranch} -> ${targetBranch}`);
|
|
989
|
+
try {
|
|
990
|
+
console.log(`[merge-agent] Checking for stale git lock files...`);
|
|
991
|
+
const lockCleanup = await cleanupStaleLocks(projectPath);
|
|
992
|
+
if (lockCleanup.found.length > 0) {
|
|
993
|
+
console.log(`[merge-agent] Found ${lockCleanup.found.length} lock file(s)`);
|
|
994
|
+
if (lockCleanup.removed.length > 0) {
|
|
995
|
+
console.log(`[merge-agent] \u2713 Cleaned up ${lockCleanup.removed.length} stale lock file(s):`);
|
|
996
|
+
lockCleanup.removed.forEach((f) => console.log(` - ${f}`));
|
|
997
|
+
logActivity("git_lock_cleanup", `Removed ${lockCleanup.removed.length} stale lock file(s)`);
|
|
998
|
+
}
|
|
999
|
+
if (lockCleanup.errors.length > 0) {
|
|
1000
|
+
console.warn(`[merge-agent] \u26A0\uFE0F Failed to clean up some locks:`, lockCleanup.errors);
|
|
1001
|
+
if (lockCleanup.errors.some((e) => e.error.includes("Git processes are running"))) {
|
|
1002
|
+
const message = "Git processes are still running - cannot safely start merge";
|
|
1003
|
+
console.error(`[merge-agent] ${message}`);
|
|
1004
|
+
logActivity("merge_blocked", message);
|
|
1005
|
+
return { success: false, reason: message };
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
try {
|
|
1010
|
+
const { stdout: remoteBranches } = await execAsync3(`git ls-remote --heads origin ${sourceBranch}`, {
|
|
1011
|
+
cwd: projectPath,
|
|
1012
|
+
encoding: "utf-8"
|
|
1013
|
+
});
|
|
1014
|
+
if (!remoteBranches.trim()) {
|
|
1015
|
+
const message = `Branch ${sourceBranch} is not pushed to remote.`;
|
|
1016
|
+
console.error(`[merge-agent] ${message}`);
|
|
1017
|
+
logActivity("merge_blocked", message);
|
|
1018
|
+
const { writeFeedbackFile } = await import("./feedback-writer-T2WCT6EZ.js");
|
|
1019
|
+
const blockMsg = `# Merge Blocked
|
|
1020
|
+
|
|
1021
|
+
Branch "${sourceBranch}" is not pushed to remote.
|
|
1022
|
+
|
|
1023
|
+
## Required Action
|
|
1024
|
+
|
|
1025
|
+
Run: \`git push -u origin ${sourceBranch}\``;
|
|
1026
|
+
const fileResult = await writeFeedbackFile({
|
|
1027
|
+
issueId,
|
|
1028
|
+
specialist: "merge-agent",
|
|
1029
|
+
outcome: "blocked",
|
|
1030
|
+
summary: `Branch ${sourceBranch} not pushed`,
|
|
1031
|
+
markdownBody: blockMsg
|
|
1032
|
+
});
|
|
1033
|
+
if (fileResult.success) {
|
|
1034
|
+
await sendMessageToAgent(issueId, `SPECIALIST FEEDBACK: merge-agent reported BLOCKED for ${issueId}.
|
|
1035
|
+
Read and address: ${fileResult.relativePath}`);
|
|
1036
|
+
} else {
|
|
1037
|
+
console.error(`[merge-agent] Failed to write feedback file for ${issueId}: ${fileResult.error}`);
|
|
1038
|
+
}
|
|
1039
|
+
return { success: false, reason: message };
|
|
1040
|
+
}
|
|
1041
|
+
} catch {
|
|
1042
|
+
const message = `Cannot verify remote branch ${sourceBranch}.`;
|
|
1043
|
+
console.error(`[merge-agent] ${message}`);
|
|
1044
|
+
logActivity("merge_blocked", message);
|
|
1045
|
+
return { success: false, reason: message };
|
|
1046
|
+
}
|
|
1047
|
+
} catch (error) {
|
|
1048
|
+
return { success: false, reason: `Pre-flight check failed: ${error.message}` };
|
|
1049
|
+
}
|
|
1050
|
+
try {
|
|
1051
|
+
await execAsync3(`git fetch origin ${sourceBranch} ${targetBranch}`, {
|
|
1052
|
+
cwd: projectPath,
|
|
1053
|
+
encoding: "utf-8"
|
|
1054
|
+
});
|
|
1055
|
+
let isAlreadyMerged = false;
|
|
1056
|
+
try {
|
|
1057
|
+
await execAsync3(
|
|
1058
|
+
`git merge-base --is-ancestor origin/${sourceBranch} origin/${targetBranch}`,
|
|
1059
|
+
{ cwd: projectPath, encoding: "utf-8" }
|
|
1060
|
+
);
|
|
1061
|
+
isAlreadyMerged = true;
|
|
1062
|
+
} catch (e) {
|
|
1063
|
+
if (e.code !== 1) {
|
|
1064
|
+
throw e;
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
if (isAlreadyMerged) {
|
|
1068
|
+
const message = `Branch ${sourceBranch} is already integrated into ${targetBranch} \u2014 no merge needed`;
|
|
1069
|
+
console.log(`[merge-agent] ${message}`);
|
|
1070
|
+
logActivity("merge_skipped", message);
|
|
1071
|
+
return { success: true, reason: message };
|
|
1072
|
+
}
|
|
1073
|
+
} catch (ancestorErr) {
|
|
1074
|
+
console.warn(`[merge-agent] Ancestor check failed: ${ancestorErr.message} (continuing)`);
|
|
1075
|
+
}
|
|
1076
|
+
const { stdout: headBeforeRaw } = await execAsync3("git rev-parse HEAD", {
|
|
1077
|
+
cwd: projectPath,
|
|
1078
|
+
encoding: "utf-8"
|
|
1079
|
+
});
|
|
1080
|
+
const headBefore = headBeforeRaw.trim();
|
|
1081
|
+
let stashCreated = false;
|
|
1082
|
+
try {
|
|
1083
|
+
const { stdout: statusOut } = await execAsync3("git status --porcelain", {
|
|
1084
|
+
cwd: projectPath,
|
|
1085
|
+
encoding: "utf-8"
|
|
1086
|
+
});
|
|
1087
|
+
if (statusOut.trim()) {
|
|
1088
|
+
await execAsync3('git stash push -u -m "Pre-merge stash for ' + issueId + '"', {
|
|
1089
|
+
cwd: projectPath,
|
|
1090
|
+
encoding: "utf-8"
|
|
1091
|
+
});
|
|
1092
|
+
stashCreated = true;
|
|
1093
|
+
console.log(`[merge-agent] Stashed uncommitted changes before merge`);
|
|
1094
|
+
}
|
|
1095
|
+
} catch (stashErr) {
|
|
1096
|
+
console.warn(`[merge-agent] Failed to stash: ${stashErr.message} (continuing anyway)`);
|
|
1097
|
+
}
|
|
1098
|
+
const apiPort = process.env.API_PORT || process.env.PORT || "3011";
|
|
1099
|
+
const apiUrl = process.env.DASHBOARD_URL || `http://localhost:${apiPort}`;
|
|
1100
|
+
const skipDoneReport = options?.skipDoneReport ?? false;
|
|
1101
|
+
const doneReportInstructions = skipDoneReport ? `DO NOT call /api/specialists/done \u2014 the server manages status for this merge.
|
|
1102
|
+
After pushing, simply STOP. If you need to rollback, rollback and STOP.` : `Then report by calling the Panopticon API:
|
|
1103
|
+
curl -s -X POST ${apiUrl}/api/specialists/done \\
|
|
1104
|
+
-H "Content-Type: application/json" \\
|
|
1105
|
+
-d '{"specialist":"merge","issueId":"${issueId}","status":"<passed or failed>","notes":"<reason if failed>"}'
|
|
1106
|
+
|
|
1107
|
+
CRITICAL: You MUST call the /api/specialists/done endpoint whether you succeed or fail.`;
|
|
1108
|
+
const taskPrompt = `MERGE TASK for ${issueId}:
|
|
1109
|
+
|
|
1110
|
+
PROJECT: ${projectPath}
|
|
1111
|
+
SOURCE BRANCH: ${sourceBranch}
|
|
1112
|
+
TARGET BRANCH: ${targetBranch}
|
|
1113
|
+
|
|
1114
|
+
INSTRUCTIONS:
|
|
1115
|
+
|
|
1116
|
+
PHASE 1 \u2014 SYNC & BASELINE (before merge):
|
|
1117
|
+
1. cd ${projectPath}
|
|
1118
|
+
2. git checkout ${targetBranch}
|
|
1119
|
+
3. git fetch origin ${targetBranch}
|
|
1120
|
+
4. Sync local ${targetBranch} with origin/${targetBranch}:
|
|
1121
|
+
Run: git rev-list --left-right --count ${targetBranch}...origin/${targetBranch}
|
|
1122
|
+
(Output: "LOCAL_AHEAD REMOTE_AHEAD". If REMOTE_AHEAD > 0, local is behind origin.)
|
|
1123
|
+
If local is behind origin (REMOTE_AHEAD > 0):
|
|
1124
|
+
a. git rebase origin/${targetBranch}
|
|
1125
|
+
(Replays local commits on top of origin \u2014 preserves linear history, no merge commits, no data loss)
|
|
1126
|
+
b. If rebase conflicts: abort with git rebase --abort, then STOP \u2014 human intervention needed.
|
|
1127
|
+
c. If rebase succeeds: continue to next step
|
|
1128
|
+
If local is up-to-date or ahead-only (REMOTE_AHEAD = 0): continue to next step
|
|
1129
|
+
5. Run tests on the CURRENT ${targetBranch} to establish a baseline:
|
|
1130
|
+
- Use the Task tool with subagent_type="Bash" to run: npm test 2>&1 || true
|
|
1131
|
+
- Record the number of passing and failing tests as BASELINE_PASS and BASELINE_FAIL
|
|
1132
|
+
- This baseline is critical \u2014 you will compare post-merge results against it
|
|
1133
|
+
|
|
1134
|
+
PHASE 2 \u2014 MERGE:
|
|
1135
|
+
6. git merge ${sourceBranch}
|
|
1136
|
+
7. If clean merge: the merge commit is auto-created (or fast-forward). Skip to Phase 3.
|
|
1137
|
+
8. If conflicts: resolve them following these steps EXACTLY:
|
|
1138
|
+
a. For each conflicted file, read it, find all \`<<<<<<<\`, \`=======\`, \`>>>>>>>\` markers,
|
|
1139
|
+
and edit the file to resolve the conflict (keeping the correct code from both sides).
|
|
1140
|
+
b. After resolving EACH file, stage it immediately: \`git add <file>\`
|
|
1141
|
+
CRITICAL: You MUST run \`git add\` for every resolved file. Without this, git still
|
|
1142
|
+
considers the file unmerged and the merge commit will fail.
|
|
1143
|
+
c. If you use a subagent to resolve conflicts, the subagent MUST run \`git add <file>\`
|
|
1144
|
+
after editing each file. Include this instruction explicitly in any subagent prompt.
|
|
1145
|
+
d. After all files are resolved and staged, verify no conflict markers remain:
|
|
1146
|
+
\`grep -rn '<<<<<<< ' src/ tests/ --include='*.ts' --include='*.tsx' --include='*.js'\`
|
|
1147
|
+
(Ignore matches in documentation/prompt files that reference markers as examples.)
|
|
1148
|
+
e. If markers remain, resolve them and \`git add\` again.
|
|
1149
|
+
f. Complete the merge: \`git commit --no-edit\` (uses the auto-generated merge message)
|
|
1150
|
+
g. Verify the commit succeeded: \`git log --oneline -1\` should show a merge commit.
|
|
1151
|
+
|
|
1152
|
+
For .planning/ files: accept the source branch version or delete \u2014 these are ephemeral.
|
|
1153
|
+
For .claude/settings.local.json: merge both permission entries.
|
|
1154
|
+
|
|
1155
|
+
PHASE 3 \u2014 VERIFY:
|
|
1156
|
+
9. Build the project to verify no compile errors:
|
|
1157
|
+
- Use the Task tool with subagent_type="Bash" to run the build command
|
|
1158
|
+
- For Node.js: NODE_OPTIONS="--max-old-space-size=8192" npm run build
|
|
1159
|
+
- For Java/Maven: ./mvnw compile
|
|
1160
|
+
- Check package.json or pom.xml to determine the right command
|
|
1161
|
+
10. Run tests using the Task tool with subagent_type="Bash":
|
|
1162
|
+
- For Node.js: npm test
|
|
1163
|
+
- Record the number of passing and failing tests as MERGE_PASS and MERGE_FAIL
|
|
1164
|
+
|
|
1165
|
+
PHASE 4 \u2014 DECIDE:
|
|
1166
|
+
11. Compare results:
|
|
1167
|
+
- If build failed: ROLLBACK (go to step 12)
|
|
1168
|
+
- If MERGE_FAIL > BASELINE_FAIL (NEW test failures introduced): ROLLBACK (go to step 12)
|
|
1169
|
+
- If MERGE_FAIL <= BASELINE_FAIL (no new failures): PUSH (go to step 13)
|
|
1170
|
+
- Pre-existing failures on ${targetBranch} are NOT a reason to rollback
|
|
1171
|
+
12. ROLLBACK: git reset --hard ORIG_HEAD
|
|
1172
|
+
(ORIG_HEAD is set by git at merge time \u2014 always points to pre-merge state)
|
|
1173
|
+
${doneReportInstructions.includes("DO NOT") ? "Then STOP." : `Then report failure by calling the Panopticon API:
|
|
1174
|
+
curl -s -X POST ${apiUrl}/api/specialists/done \\
|
|
1175
|
+
-H "Content-Type: application/json" \\
|
|
1176
|
+
-d '{"specialist":"merge","issueId":"${issueId}","status":"failed","notes":"<reason for rollback>"}'
|
|
1177
|
+
Then STOP.`}
|
|
1178
|
+
13. PUSH: git push origin ${targetBranch}
|
|
1179
|
+
If push is rejected (non-fast-forward / "tip of your current branch is behind"):
|
|
1180
|
+
a. git fetch origin ${targetBranch}
|
|
1181
|
+
b. git rebase origin/${targetBranch}
|
|
1182
|
+
(Replay on top of any new remote commits \u2014 safe, no data loss)
|
|
1183
|
+
c. If rebase conflicts: abort with git rebase --abort, ROLLBACK (go to step 12)
|
|
1184
|
+
d. If rebase succeeds: retry git push origin ${targetBranch}
|
|
1185
|
+
e. If push fails again after one retry: ROLLBACK (go to step 12)
|
|
1186
|
+
${doneReportInstructions}
|
|
1187
|
+
|
|
1188
|
+
CRITICAL: You MUST complete this merge. The approve operation is waiting.
|
|
1189
|
+
|
|
1190
|
+
WHY USE SUBAGENTS FOR BUILD/TEST:
|
|
1191
|
+
- Subagents have isolated context and won't pollute your working memory
|
|
1192
|
+
- Build and test output can be verbose - subagents handle this cleanly
|
|
1193
|
+
- If tests fail, the subagent returns a clear summary
|
|
1194
|
+
|
|
1195
|
+
DO NOT:
|
|
1196
|
+
- Delete the feature branch (locally or remotely)
|
|
1197
|
+
- Clean up workspaces
|
|
1198
|
+
- Use git push --force or --force-with-lease \u2014 NEVER force-push under any circumstances
|
|
1199
|
+
- Skip the build step - compile errors after merge are common
|
|
1200
|
+
- Skip the baseline test run \u2014 without it you cannot distinguish new failures from pre-existing ones
|
|
1201
|
+
- Use HEAD~1 for rollback \u2014 use ORIG_HEAD which git sets automatically at merge time
|
|
1202
|
+
- Run git stash \u2014 the TypeScript layer handles stash/restore automatically
|
|
1203
|
+
- Do anything beyond the sync, merge, build, test, and push steps above
|
|
1204
|
+
|
|
1205
|
+
Report any issues or conflicts you encountered.`;
|
|
1206
|
+
const resolvedProject = resolveProjectFromIssue(issueId);
|
|
1207
|
+
const mergeProjectKey = resolvedProject?.projectKey ?? null;
|
|
1208
|
+
const mergeSession = getTmuxSessionName("merge-agent", mergeProjectKey ?? void 0);
|
|
1209
|
+
if (!resolvedProject) {
|
|
1210
|
+
console.warn(`[merge-agent] Could not resolve project for ${issueId} \u2014 falling back to global specialist. Check projects.yaml configuration.`);
|
|
1211
|
+
}
|
|
1212
|
+
if (mergeProjectKey) {
|
|
1213
|
+
const { getAgentRuntimeState, saveAgentRuntimeState } = await import("./agents-5HWTDR4S.js");
|
|
1214
|
+
const IDLE_POLL_INTERVAL = 3e3;
|
|
1215
|
+
const IDLE_MAX_WAIT = 36e4;
|
|
1216
|
+
const idleStart = Date.now();
|
|
1217
|
+
while (Date.now() - idleStart < IDLE_MAX_WAIT) {
|
|
1218
|
+
const state = getAgentRuntimeState(mergeSession);
|
|
1219
|
+
if (!state || state.state === "idle" || state.state === "suspended") {
|
|
1220
|
+
break;
|
|
1221
|
+
}
|
|
1222
|
+
try {
|
|
1223
|
+
await execAsync3(`tmux has-session -t "${mergeSession}" 2>/dev/null`);
|
|
1224
|
+
} catch {
|
|
1225
|
+
console.log(`[merge-agent] Specialist session ${mergeSession} is dead (state was ${state.state}), resetting to idle`);
|
|
1226
|
+
saveAgentRuntimeState(mergeSession, { state: "idle", lastActivity: (/* @__PURE__ */ new Date()).toISOString() });
|
|
1227
|
+
break;
|
|
1228
|
+
}
|
|
1229
|
+
console.log(`[merge-agent] Specialist busy (state: ${state.state}, issue: ${state.currentIssue}), waiting...`);
|
|
1230
|
+
await new Promise((resolve) => setTimeout(resolve, IDLE_POLL_INTERVAL));
|
|
1231
|
+
}
|
|
1232
|
+
const finalState = getAgentRuntimeState(mergeSession);
|
|
1233
|
+
if (finalState && finalState.state !== "idle" && finalState.state !== "suspended") {
|
|
1234
|
+
console.warn(`[merge-agent] Specialist still busy after ${IDLE_MAX_WAIT / 1e3}s, proceeding anyway`);
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
let wakeResult;
|
|
1238
|
+
if (mergeProjectKey) {
|
|
1239
|
+
console.log(`[merge-agent] Using per-project ephemeral specialist for ${issueId} (${mergeProjectKey})`);
|
|
1240
|
+
wakeResult = await spawnEphemeralSpecialist(mergeProjectKey, "merge-agent", {
|
|
1241
|
+
issueId,
|
|
1242
|
+
branch: sourceBranch,
|
|
1243
|
+
workspace: projectPath,
|
|
1244
|
+
promptOverride: taskPrompt
|
|
1245
|
+
});
|
|
1246
|
+
} else {
|
|
1247
|
+
console.log(`[merge-agent] Project resolution failed, falling back to legacy global specialist for ${issueId}`);
|
|
1248
|
+
wakeResult = await wakeSpecialist("merge-agent", taskPrompt, {
|
|
1249
|
+
waitForReady: true,
|
|
1250
|
+
startIfNotRunning: true,
|
|
1251
|
+
issueId
|
|
1252
|
+
});
|
|
1253
|
+
}
|
|
1254
|
+
if (!wakeResult.success) {
|
|
1255
|
+
console.error(`[merge-agent] Failed to wake specialist: ${wakeResult.message}`);
|
|
1256
|
+
logActivity("merge_error", `Failed to wake specialist: ${wakeResult.message}`);
|
|
1257
|
+
return {
|
|
1258
|
+
success: false,
|
|
1259
|
+
reason: `Failed to wake merge-agent specialist: ${wakeResult.message}`
|
|
1260
|
+
};
|
|
1261
|
+
}
|
|
1262
|
+
console.log(`[merge-agent] Specialist woken, waiting for merge completion...`);
|
|
1263
|
+
logActivity("merge_specialist_woken", `Specialist woken, task sent`);
|
|
1264
|
+
const POLL_INTERVAL = 5e3;
|
|
1265
|
+
const MAX_WAIT = 15 * 60 * 1e3;
|
|
1266
|
+
const startTime = Date.now();
|
|
1267
|
+
while (Date.now() - startTime < MAX_WAIT) {
|
|
1268
|
+
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL));
|
|
1269
|
+
try {
|
|
1270
|
+
const { stdout: currentBranchRaw } = await execAsync3("git branch --show-current", {
|
|
1271
|
+
cwd: projectPath,
|
|
1272
|
+
encoding: "utf-8"
|
|
1273
|
+
});
|
|
1274
|
+
const currentBranch = currentBranchRaw.trim();
|
|
1275
|
+
if (currentBranch !== targetBranch) {
|
|
1276
|
+
continue;
|
|
1277
|
+
}
|
|
1278
|
+
const { stdout: currentHeadRaw } = await execAsync3("git rev-parse HEAD", {
|
|
1279
|
+
cwd: projectPath,
|
|
1280
|
+
encoding: "utf-8"
|
|
1281
|
+
});
|
|
1282
|
+
const currentHead = currentHeadRaw.trim();
|
|
1283
|
+
if (currentHead !== headBefore) {
|
|
1284
|
+
{
|
|
1285
|
+
try {
|
|
1286
|
+
await execAsync3(`git fetch origin ${targetBranch}`, {
|
|
1287
|
+
cwd: projectPath,
|
|
1288
|
+
encoding: "utf-8",
|
|
1289
|
+
timeout: 1e4
|
|
1290
|
+
}).catch(() => {
|
|
1291
|
+
});
|
|
1292
|
+
const { stdout: remoteHeadRaw } = await execAsync3(`git rev-parse origin/${targetBranch}`, {
|
|
1293
|
+
cwd: projectPath,
|
|
1294
|
+
encoding: "utf-8"
|
|
1295
|
+
});
|
|
1296
|
+
const remoteHead = remoteHeadRaw.trim();
|
|
1297
|
+
if (remoteHead === currentHead) {
|
|
1298
|
+
console.log(`[merge-agent] Merge completed and pushed, running validation...`);
|
|
1299
|
+
logActivity("merge_validation_start", `Running post-merge validation for ${issueId}`);
|
|
1300
|
+
let specialistBaseline;
|
|
1301
|
+
try {
|
|
1302
|
+
const specialistOutput = await captureTmuxOutput(mergeSession);
|
|
1303
|
+
const baselineMatch = specialistOutput.match(/Failed\s*│\s*(\d+)\s*│/);
|
|
1304
|
+
specialistBaseline = baselineMatch ? parseInt(baselineMatch[1], 10) : void 0;
|
|
1305
|
+
if (specialistBaseline !== void 0) {
|
|
1306
|
+
console.log(`[merge-agent] Extracted baseline from specialist: ${specialistBaseline}`);
|
|
1307
|
+
}
|
|
1308
|
+
} catch {
|
|
1309
|
+
}
|
|
1310
|
+
const validationResult = await runMergeValidation({
|
|
1311
|
+
projectPath,
|
|
1312
|
+
issueId,
|
|
1313
|
+
baselineTestFailures: specialistBaseline
|
|
1314
|
+
});
|
|
1315
|
+
if (validationResult.valid) {
|
|
1316
|
+
const skipNote = validationResult.skipped ? " (no validation script, specialist already validated)" : "";
|
|
1317
|
+
console.log(`[merge-agent] \u2713 Merge validation passed${skipNote}`);
|
|
1318
|
+
const gateResults = await runProjectQualityGates(projectPath, "pre_push");
|
|
1319
|
+
const failedRequired = gateResults.filter((g) => !g.passed && g.required);
|
|
1320
|
+
if (failedRequired.length > 0) {
|
|
1321
|
+
const failedNames = failedRequired.map((g) => g.name).join(", ");
|
|
1322
|
+
console.log(`[merge-agent] \u2717 Quality gates failed: ${failedNames}`);
|
|
1323
|
+
logActivity("merge_quality_gate_fail", `Quality gates failed for ${issueId}: ${failedNames}`);
|
|
1324
|
+
const revertSuccess = await autoRevertMerge(projectPath);
|
|
1325
|
+
const revertNote = revertSuccess ? "Merge auto-reverted to clean state" : "WARNING: Auto-revert failed";
|
|
1326
|
+
return {
|
|
1327
|
+
success: false,
|
|
1328
|
+
validationStatus: "FAIL",
|
|
1329
|
+
reason: `Quality gate(s) failed: ${failedNames}. ${revertNote}`
|
|
1330
|
+
};
|
|
1331
|
+
}
|
|
1332
|
+
logActivity("merge_complete", `Merge completed by specialist${skipNote}`);
|
|
1333
|
+
await postMergeLifecycle(issueId, projectPath);
|
|
1334
|
+
if (stashCreated) {
|
|
1335
|
+
try {
|
|
1336
|
+
await execAsync3("git stash pop", { cwd: projectPath, encoding: "utf-8" });
|
|
1337
|
+
console.log(`[merge-agent] \u2713 Restored stashed changes after successful merge`);
|
|
1338
|
+
} catch (popErr) {
|
|
1339
|
+
console.warn(`[merge-agent] \u26A0 Failed to restore stash after merge: ${popErr.message}`);
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
return {
|
|
1343
|
+
success: true,
|
|
1344
|
+
validationStatus: "PASS",
|
|
1345
|
+
testsStatus: "SKIP",
|
|
1346
|
+
// Specialist ran tests, we trust the result
|
|
1347
|
+
notes: "Merge completed by merge-agent specialist and validation passed"
|
|
1348
|
+
};
|
|
1349
|
+
} else {
|
|
1350
|
+
console.log(`[merge-agent] \u2717 Validation failed:`, validationResult.failures);
|
|
1351
|
+
logActivity("merge_validation_fail", `Validation failed: ${validationResult.failures.map((f) => f.type).join(", ")}`);
|
|
1352
|
+
const revertSuccess = await autoRevertMerge(projectPath);
|
|
1353
|
+
if (revertSuccess) {
|
|
1354
|
+
try {
|
|
1355
|
+
await execAsync3(`git push --force-with-lease origin ${targetBranch}`, {
|
|
1356
|
+
cwd: projectPath,
|
|
1357
|
+
encoding: "utf-8"
|
|
1358
|
+
});
|
|
1359
|
+
console.log(`[merge-agent] \u2713 Auto-revert pushed to remote`);
|
|
1360
|
+
logActivity("merge_auto_revert", "Merge auto-reverted and pushed to remote");
|
|
1361
|
+
} catch (pushError) {
|
|
1362
|
+
console.error(`[merge-agent] \u2717 Failed to push revert: ${pushError.message}`);
|
|
1363
|
+
logActivity("merge_revert_push_fail", "Auto-revert successful but push failed");
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
if (stashCreated) {
|
|
1367
|
+
try {
|
|
1368
|
+
await execAsync3("git stash pop", { cwd: projectPath, encoding: "utf-8" });
|
|
1369
|
+
console.log(`[merge-agent] \u2713 Restored stashed changes after revert`);
|
|
1370
|
+
} catch (popErr) {
|
|
1371
|
+
console.warn(`[merge-agent] \u26A0 Failed to restore stash after revert: ${popErr.message}`);
|
|
1372
|
+
}
|
|
1373
|
+
}
|
|
1374
|
+
const failureReason = validationResult.failures.map((f) => `${f.type}: ${f.message}`).join("; ");
|
|
1375
|
+
const revertNote = revertSuccess ? "Merge auto-reverted and force-pushed to remote" : "WARNING: Auto-revert failed - manual cleanup required";
|
|
1376
|
+
return {
|
|
1377
|
+
success: false,
|
|
1378
|
+
validationStatus: "FAIL",
|
|
1379
|
+
reason: `Validation failed: ${failureReason}. ${revertNote}`,
|
|
1380
|
+
notes: "Merge completed but validation failed, auto-reverted"
|
|
1381
|
+
};
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
} catch {
|
|
1385
|
+
console.log(`[merge-agent] Merge completed locally, push status unknown`);
|
|
1386
|
+
}
|
|
1387
|
+
console.log(`[merge-agent] Merge commit detected, waiting for push...`);
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
if (!await isRunning("merge-agent", mergeProjectKey ?? void 0)) {
|
|
1391
|
+
console.error(`[merge-agent] Specialist stopped unexpectedly \u2014 checking for stranded merge commit`);
|
|
1392
|
+
logActivity("merge_error", "Specialist stopped unexpectedly");
|
|
1393
|
+
const salvageResult2 = await salvageStrandedMerge(projectPath, targetBranch, headBefore, issueId, logActivity);
|
|
1394
|
+
if (salvageResult2) return salvageResult2;
|
|
1395
|
+
return {
|
|
1396
|
+
success: false,
|
|
1397
|
+
reason: "merge-agent specialist stopped before completing the merge"
|
|
1398
|
+
};
|
|
1399
|
+
}
|
|
1400
|
+
} catch (pollError) {
|
|
1401
|
+
console.warn(`[merge-agent] Poll error: ${pollError.message}`);
|
|
1402
|
+
}
|
|
1403
|
+
}
|
|
1404
|
+
console.error(`[merge-agent] Timeout waiting for merge completion \u2014 checking for stranded merge commit`);
|
|
1405
|
+
logActivity("merge_timeout", "Timeout waiting for specialist to complete merge");
|
|
1406
|
+
const salvageResult = await salvageStrandedMerge(projectPath, targetBranch, headBefore, issueId, logActivity);
|
|
1407
|
+
if (salvageResult) return salvageResult;
|
|
1408
|
+
return {
|
|
1409
|
+
success: false,
|
|
1410
|
+
reason: "Timeout waiting for merge-agent specialist to complete merge (15 minutes)"
|
|
1411
|
+
};
|
|
1412
|
+
}
|
|
1413
|
+
async function salvageStrandedMerge(projectPath, targetBranch, headBefore, issueId, logActivity2) {
|
|
1414
|
+
try {
|
|
1415
|
+
const { stdout: currentHeadRaw } = await execAsync3("git rev-parse HEAD", {
|
|
1416
|
+
cwd: projectPath,
|
|
1417
|
+
encoding: "utf-8"
|
|
1418
|
+
});
|
|
1419
|
+
const currentHead = currentHeadRaw.trim();
|
|
1420
|
+
if (currentHead === headBefore) {
|
|
1421
|
+
return null;
|
|
1422
|
+
}
|
|
1423
|
+
await execAsync3(`git fetch origin ${targetBranch}`, {
|
|
1424
|
+
cwd: projectPath,
|
|
1425
|
+
encoding: "utf-8",
|
|
1426
|
+
timeout: 1e4
|
|
1427
|
+
}).catch(() => {
|
|
1428
|
+
});
|
|
1429
|
+
const { stdout: remoteHeadRaw } = await execAsync3(`git rev-parse origin/${targetBranch}`, {
|
|
1430
|
+
cwd: projectPath,
|
|
1431
|
+
encoding: "utf-8"
|
|
1432
|
+
});
|
|
1433
|
+
if (remoteHeadRaw.trim() === currentHead) {
|
|
1434
|
+
console.log(`[merge-agent] Salvage check: merge already pushed`);
|
|
1435
|
+
return { success: true };
|
|
1436
|
+
}
|
|
1437
|
+
console.log(`[merge-agent] SALVAGING stranded merge for ${issueId}: local HEAD ${currentHead.slice(0, 8)} != remote ${remoteHeadRaw.trim().slice(0, 8)}`);
|
|
1438
|
+
logActivity2("merge_salvage", `Pushing stranded merge commit ${currentHead.slice(0, 8)} for ${issueId}`);
|
|
1439
|
+
await execAsync3(`git push origin ${targetBranch}`, {
|
|
1440
|
+
cwd: projectPath,
|
|
1441
|
+
encoding: "utf-8",
|
|
1442
|
+
timeout: 3e4
|
|
1443
|
+
});
|
|
1444
|
+
console.log(`[merge-agent] Salvage push successful for ${issueId}`);
|
|
1445
|
+
logActivity2("merge_salvage_success", `Stranded merge pushed successfully`);
|
|
1446
|
+
return { success: true };
|
|
1447
|
+
} catch (error) {
|
|
1448
|
+
console.error(`[merge-agent] Salvage failed: ${error.message}`);
|
|
1449
|
+
logActivity2("merge_salvage_failed", `Salvage push failed: ${error.message}`);
|
|
1450
|
+
return null;
|
|
1451
|
+
}
|
|
1452
|
+
}
|
|
1453
|
+
async function scanForConflictMarkers(projectPath) {
|
|
1454
|
+
try {
|
|
1455
|
+
const { stdout } = await execAsync3("git diff --check 2>&1 || true", {
|
|
1456
|
+
cwd: projectPath,
|
|
1457
|
+
encoding: "utf-8"
|
|
1458
|
+
});
|
|
1459
|
+
const files = stdout.split("\n").filter((line) => line.includes("leftover conflict marker")).map((line) => line.split(":")[0].trim()).filter((f) => f.length > 0);
|
|
1460
|
+
return [...new Set(files)];
|
|
1461
|
+
} catch {
|
|
1462
|
+
return [];
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
async function syncMainIntoWorkspace(projectPath, issueId) {
|
|
1466
|
+
console.log(`[sync-main] Starting sync of main into workspace for ${issueId}`);
|
|
1467
|
+
logActivity("sync_main_start", `Starting sync for ${issueId}`);
|
|
1468
|
+
try {
|
|
1469
|
+
const { stdout: statusOut } = await execAsync3("git status --porcelain", {
|
|
1470
|
+
cwd: projectPath,
|
|
1471
|
+
encoding: "utf-8"
|
|
1472
|
+
});
|
|
1473
|
+
if (statusOut.trim()) {
|
|
1474
|
+
console.log(`[sync-main] Uncommitted changes detected, auto-committing...`);
|
|
1475
|
+
logActivity("sync_main_auto_commit", `Auto-committing uncommitted changes before sync`);
|
|
1476
|
+
try {
|
|
1477
|
+
await execAsync3('git add -A && git commit -m "WIP: auto-commit before sync with main"', {
|
|
1478
|
+
cwd: projectPath,
|
|
1479
|
+
encoding: "utf-8"
|
|
1480
|
+
});
|
|
1481
|
+
console.log(`[sync-main] Auto-commit successful`);
|
|
1482
|
+
} catch (commitErr) {
|
|
1483
|
+
const message = `Failed to auto-commit uncommitted changes: ${commitErr.message}`;
|
|
1484
|
+
console.error(`[sync-main] ${message}`);
|
|
1485
|
+
logActivity("sync_main_blocked", message);
|
|
1486
|
+
return { success: false, reason: message };
|
|
1487
|
+
}
|
|
1488
|
+
const { stdout: postCommitStatus } = await execAsync3("git status --porcelain", {
|
|
1489
|
+
cwd: projectPath,
|
|
1490
|
+
encoding: "utf-8"
|
|
1491
|
+
});
|
|
1492
|
+
if (postCommitStatus.trim()) {
|
|
1493
|
+
const message = "Uncommitted changes remain after auto-commit \u2014 aborting sync";
|
|
1494
|
+
console.error(`[sync-main] ${message}`);
|
|
1495
|
+
logActivity("sync_main_blocked", message);
|
|
1496
|
+
return { success: false, reason: message };
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
} catch (error) {
|
|
1500
|
+
return { success: false, reason: `Failed to check git status: ${error.message}` };
|
|
1501
|
+
}
|
|
1502
|
+
try {
|
|
1503
|
+
const lockCleanup = await cleanupStaleLocks(projectPath);
|
|
1504
|
+
if (lockCleanup.found.length > 0) {
|
|
1505
|
+
console.log(`[sync-main] Found ${lockCleanup.found.length} lock file(s)`);
|
|
1506
|
+
if (lockCleanup.removed.length > 0) {
|
|
1507
|
+
console.log(`[sync-main] Cleaned up ${lockCleanup.removed.length} stale lock file(s)`);
|
|
1508
|
+
logActivity("git_lock_cleanup", `Removed ${lockCleanup.removed.length} stale lock file(s)`);
|
|
1509
|
+
}
|
|
1510
|
+
if (lockCleanup.errors.some((e) => e.error.includes("Git processes are running"))) {
|
|
1511
|
+
const message = "Git processes are still running \u2014 cannot safely start sync";
|
|
1512
|
+
console.error(`[sync-main] ${message}`);
|
|
1513
|
+
logActivity("sync_main_blocked", message);
|
|
1514
|
+
return { success: false, reason: message };
|
|
1515
|
+
}
|
|
1516
|
+
}
|
|
1517
|
+
} catch (lockErr) {
|
|
1518
|
+
console.warn(`[sync-main] Lock cleanup warning: ${lockErr.message} (continuing)`);
|
|
1519
|
+
}
|
|
1520
|
+
try {
|
|
1521
|
+
console.log(`[sync-main] Fetching origin/main...`);
|
|
1522
|
+
await execAsync3("git fetch origin main", { cwd: projectPath, encoding: "utf-8" });
|
|
1523
|
+
} catch (error) {
|
|
1524
|
+
return { success: false, reason: `Failed to fetch origin/main: ${error.message}` };
|
|
1525
|
+
}
|
|
1526
|
+
let mergeOutput = "";
|
|
1527
|
+
let hasConflicts = false;
|
|
1528
|
+
try {
|
|
1529
|
+
const result = await execAsync3("git merge origin/main", { cwd: projectPath, encoding: "utf-8" });
|
|
1530
|
+
mergeOutput = (result.stdout || "") + (result.stderr || "");
|
|
1531
|
+
} catch (error) {
|
|
1532
|
+
mergeOutput = (error.stdout || "") + (error.stderr || "");
|
|
1533
|
+
hasConflicts = true;
|
|
1534
|
+
}
|
|
1535
|
+
if (mergeOutput.includes("Already up to date") || mergeOutput.includes("Already up-to-date")) {
|
|
1536
|
+
console.log(`[sync-main] Already up to date`);
|
|
1537
|
+
logActivity("sync_main_noop", `${issueId} already up to date with main`);
|
|
1538
|
+
return { success: true, alreadyUpToDate: true };
|
|
1539
|
+
}
|
|
1540
|
+
if (!hasConflicts) {
|
|
1541
|
+
console.log(`[sync-main] Clean merge completed`);
|
|
1542
|
+
logActivity("sync_main_success", `Clean merge of main into ${issueId}`);
|
|
1543
|
+
let changedFiles = [];
|
|
1544
|
+
let commitCount = 0;
|
|
1545
|
+
try {
|
|
1546
|
+
const { stdout: diffFiles } = await execAsync3(
|
|
1547
|
+
"git diff --name-only ORIG_HEAD HEAD 2>/dev/null || git diff --name-only HEAD~1 HEAD",
|
|
1548
|
+
{ cwd: projectPath, encoding: "utf-8" }
|
|
1549
|
+
);
|
|
1550
|
+
changedFiles = diffFiles.trim().split("\n").filter((f) => f.length > 0);
|
|
1551
|
+
} catch {
|
|
1552
|
+
}
|
|
1553
|
+
try {
|
|
1554
|
+
const { stdout: logOut } = await execAsync3(
|
|
1555
|
+
'git log ORIG_HEAD..HEAD --oneline 2>/dev/null || echo ""',
|
|
1556
|
+
{ cwd: projectPath, encoding: "utf-8" }
|
|
1557
|
+
);
|
|
1558
|
+
commitCount = logOut.trim().split("\n").filter((l) => l.length > 0).length;
|
|
1559
|
+
} catch {
|
|
1560
|
+
}
|
|
1561
|
+
return { success: true, commitCount, changedFiles };
|
|
1562
|
+
}
|
|
1563
|
+
const conflictFiles = await getConflictFiles(projectPath);
|
|
1564
|
+
console.log(`[sync-main] ${conflictFiles.length} conflict(s), waking merge-agent...`);
|
|
1565
|
+
logActivity("sync_main_conflicts", `${conflictFiles.length} conflict(s) in ${issueId}: ${conflictFiles.join(", ")}`);
|
|
1566
|
+
const workspaceBranch = await execAsync3("git branch --show-current", { cwd: projectPath, encoding: "utf-8" }).then((r) => r.stdout.trim()).catch(() => `feature/${issueId.toLowerCase()}`);
|
|
1567
|
+
const promptPath = join3(__dirname2, "prompts", "sync-main.md");
|
|
1568
|
+
let taskPrompt;
|
|
1569
|
+
try {
|
|
1570
|
+
const template = readFileSync(promptPath, "utf-8");
|
|
1571
|
+
taskPrompt = template.replace(/{{projectPath}}/g, projectPath).replace(/{{workspaceBranch}}/g, workspaceBranch).replace(/{{issueId}}/g, issueId).replace(/{{conflictFiles}}/g, conflictFiles.map((f) => `- ${f}`).join("\n"));
|
|
1572
|
+
} catch (templateErr) {
|
|
1573
|
+
console.error(`[sync-main] Could not load sync-main.md template: ${templateErr.message}`);
|
|
1574
|
+
logActivity("sync_main_error", `Template load failed: ${templateErr.message}`);
|
|
1575
|
+
try {
|
|
1576
|
+
await execAsync3("git merge --abort", { cwd: projectPath, encoding: "utf-8" });
|
|
1577
|
+
} catch {
|
|
1578
|
+
}
|
|
1579
|
+
return { success: false, conflictFiles, reason: "Internal error: sync-main prompt template not found" };
|
|
1580
|
+
}
|
|
1581
|
+
const syncResolvedProject = resolveProjectFromIssue(issueId);
|
|
1582
|
+
const syncProjectKey = syncResolvedProject?.projectKey ?? null;
|
|
1583
|
+
let syncWakeResult;
|
|
1584
|
+
if (syncProjectKey) {
|
|
1585
|
+
syncWakeResult = await spawnEphemeralSpecialist(syncProjectKey, "merge-agent", {
|
|
1586
|
+
issueId,
|
|
1587
|
+
branch: workspaceBranch,
|
|
1588
|
+
workspace: projectPath,
|
|
1589
|
+
promptOverride: taskPrompt
|
|
1590
|
+
});
|
|
1591
|
+
} else {
|
|
1592
|
+
syncWakeResult = await wakeSpecialist("merge-agent", taskPrompt, {
|
|
1593
|
+
waitForReady: true,
|
|
1594
|
+
startIfNotRunning: true,
|
|
1595
|
+
issueId
|
|
1596
|
+
});
|
|
1597
|
+
}
|
|
1598
|
+
if (!syncWakeResult.success) {
|
|
1599
|
+
try {
|
|
1600
|
+
await execAsync3("git merge --abort", { cwd: projectPath, encoding: "utf-8" });
|
|
1601
|
+
} catch {
|
|
1602
|
+
}
|
|
1603
|
+
const message = `Failed to wake merge-agent specialist: ${syncWakeResult.message}`;
|
|
1604
|
+
console.error(`[sync-main] ${message}`);
|
|
1605
|
+
logActivity("sync_main_error", message);
|
|
1606
|
+
return { success: false, conflictFiles, reason: message };
|
|
1607
|
+
}
|
|
1608
|
+
console.log(`[sync-main] Specialist woken, waiting for conflict resolution...`);
|
|
1609
|
+
logActivity("sync_main_agent_woken", `Agent resolving ${conflictFiles.length} conflict(s) for ${issueId}`);
|
|
1610
|
+
const tmuxSession = getTmuxSessionName("merge-agent", syncProjectKey ?? void 0);
|
|
1611
|
+
const startTime = Date.now();
|
|
1612
|
+
const POLL_INTERVAL = 5e3;
|
|
1613
|
+
const SYNC_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
1614
|
+
let lastOutput = "";
|
|
1615
|
+
while (Date.now() - startTime < SYNC_TIMEOUT_MS) {
|
|
1616
|
+
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL));
|
|
1617
|
+
const output = await captureTmuxOutput(tmuxSession);
|
|
1618
|
+
if (output !== lastOutput) {
|
|
1619
|
+
lastOutput = output;
|
|
1620
|
+
const hasStructured = output.includes("MERGE_RESULT:");
|
|
1621
|
+
const lowerOutput = output.toLowerCase();
|
|
1622
|
+
const hasHumanReadable = lowerOutput.includes("merge task complete") || lowerOutput.includes("successfully merged") || lowerOutput.includes("merge complete") || lowerOutput.includes("merge failed") || lowerOutput.includes("merge task failed");
|
|
1623
|
+
if (hasStructured || hasHumanReadable) {
|
|
1624
|
+
const agentResult = parseAgentOutput(output);
|
|
1625
|
+
if (agentResult.success) {
|
|
1626
|
+
const remaining = await scanForConflictMarkers(projectPath);
|
|
1627
|
+
if (remaining.length > 0) {
|
|
1628
|
+
try {
|
|
1629
|
+
await execAsync3("git merge --abort", { cwd: projectPath, encoding: "utf-8" });
|
|
1630
|
+
} catch {
|
|
1631
|
+
}
|
|
1632
|
+
const msg = `Agent reported success but ${remaining.length} conflict marker(s) remain in: ${remaining.join(", ")}`;
|
|
1633
|
+
console.error(`[sync-main] ${msg}`);
|
|
1634
|
+
logActivity("sync_main_markers_remain", msg);
|
|
1635
|
+
return { success: false, conflictFiles, reason: msg };
|
|
1636
|
+
}
|
|
1637
|
+
console.log(`[sync-main] \u2713 Conflicts resolved by agent`);
|
|
1638
|
+
logActivity("sync_main_success", `Merge agent resolved conflicts for ${issueId}`);
|
|
1639
|
+
let changedFiles = [];
|
|
1640
|
+
let commitCount = 0;
|
|
1641
|
+
try {
|
|
1642
|
+
const { stdout: diffFiles } = await execAsync3(
|
|
1643
|
+
"git diff --name-only ORIG_HEAD HEAD",
|
|
1644
|
+
{ cwd: projectPath, encoding: "utf-8" }
|
|
1645
|
+
);
|
|
1646
|
+
changedFiles = diffFiles.trim().split("\n").filter((f) => f.length > 0);
|
|
1647
|
+
const { stdout: logOut } = await execAsync3(
|
|
1648
|
+
"git log ORIG_HEAD..HEAD --oneline",
|
|
1649
|
+
{ cwd: projectPath, encoding: "utf-8" }
|
|
1650
|
+
);
|
|
1651
|
+
commitCount = logOut.trim().split("\n").filter((l) => l.length > 0).length;
|
|
1652
|
+
} catch {
|
|
1653
|
+
}
|
|
1654
|
+
return { success: true, commitCount, changedFiles };
|
|
1655
|
+
} else {
|
|
1656
|
+
try {
|
|
1657
|
+
await execAsync3("git merge --abort", { cwd: projectPath, encoding: "utf-8" });
|
|
1658
|
+
} catch {
|
|
1659
|
+
}
|
|
1660
|
+
console.log(`[sync-main] \u2717 Agent could not resolve conflicts`);
|
|
1661
|
+
logActivity("sync_main_agent_failed", `Agent failed to resolve conflicts for ${issueId}`);
|
|
1662
|
+
return {
|
|
1663
|
+
success: false,
|
|
1664
|
+
conflictFiles,
|
|
1665
|
+
reason: agentResult.reason || "Merge agent could not resolve conflicts"
|
|
1666
|
+
};
|
|
1667
|
+
}
|
|
1668
|
+
}
|
|
1669
|
+
}
|
|
1670
|
+
const elapsed = Math.round((Date.now() - startTime) / 1e3);
|
|
1671
|
+
if (elapsed % 30 === 0) {
|
|
1672
|
+
console.log(`[sync-main] Still waiting for agent... (${elapsed}s elapsed)`);
|
|
1673
|
+
}
|
|
1674
|
+
}
|
|
1675
|
+
try {
|
|
1676
|
+
await execAsync3("git merge --abort", { cwd: projectPath, encoding: "utf-8" });
|
|
1677
|
+
} catch {
|
|
1678
|
+
}
|
|
1679
|
+
logActivity("sync_main_timeout", `Sync timed out for ${issueId}`);
|
|
1680
|
+
return {
|
|
1681
|
+
success: false,
|
|
1682
|
+
conflictFiles,
|
|
1683
|
+
reason: `Timeout: merge agent did not complete within ${SYNC_TIMEOUT_MS / 6e4} minutes`
|
|
1684
|
+
};
|
|
1685
|
+
}
|
|
1686
|
+
async function runProjectQualityGates(projectPath, phase) {
|
|
1687
|
+
try {
|
|
1688
|
+
const config = loadProjectsConfig();
|
|
1689
|
+
const project = Object.values(config.projects).find((p) => projectPath.startsWith(p.path));
|
|
1690
|
+
if (!project?.quality_gates || Object.keys(project.quality_gates).length === 0) {
|
|
1691
|
+
console.log(`[merge-agent] No quality gates configured for ${projectPath}`);
|
|
1692
|
+
return [];
|
|
1693
|
+
}
|
|
1694
|
+
const repoRelPath = relative(project.path, projectPath);
|
|
1695
|
+
let gatesToRun = project.quality_gates;
|
|
1696
|
+
if (repoRelPath && !repoRelPath.startsWith("..")) {
|
|
1697
|
+
const filtered = Object.entries(project.quality_gates).filter(
|
|
1698
|
+
([, gate]) => gate.path === repoRelPath
|
|
1699
|
+
);
|
|
1700
|
+
if (filtered.length === 0) {
|
|
1701
|
+
console.log(`[merge-agent] No quality gates configured for repo path "${repoRelPath}"`);
|
|
1702
|
+
return [];
|
|
1703
|
+
}
|
|
1704
|
+
gatesToRun = Object.fromEntries(filtered);
|
|
1705
|
+
console.log(
|
|
1706
|
+
`[merge-agent] Polyrepo: running ${Object.keys(gatesToRun).length} gate(s) for path "${repoRelPath}"`
|
|
1707
|
+
);
|
|
1708
|
+
}
|
|
1709
|
+
console.log(`[merge-agent] Running ${phase} quality gates for project "${project.name}"`);
|
|
1710
|
+
return await runQualityGates(gatesToRun, projectPath, phase);
|
|
1711
|
+
} catch (error) {
|
|
1712
|
+
console.error(`[merge-agent] Failed to load quality gates: ${error.message}`);
|
|
1713
|
+
return [];
|
|
1714
|
+
}
|
|
1715
|
+
}
|
|
1716
|
+
export {
|
|
1717
|
+
postMergeLifecycle,
|
|
1718
|
+
resetPostMergeState,
|
|
1719
|
+
runProjectQualityGates,
|
|
1720
|
+
scanForConflictMarkers,
|
|
1721
|
+
spawnMergeAgent,
|
|
1722
|
+
spawnMergeAgentForBranches,
|
|
1723
|
+
syncMainIntoWorkspace
|
|
1724
|
+
};
|
|
1725
|
+
//# sourceMappingURL=merge-agent-WM7ZKUET.js.map
|