@ulpi/cli 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/{auth-PN7TMQHV-2W4ICG64.js → auth-FWM7MM4Q-VZC3U2XZ.js} +1 -1
- package/dist/{auth-ECQ3IB4E.js → auth-HDK7ECJL.js} +2 -1
- package/dist/{chunk-3SBPZRB5.js → chunk-3BCW6ABU.js} +402 -142
- package/dist/{chunk-JGBXM5NC.js → chunk-3WB5CXH4.js} +180 -5
- package/dist/{chunk-2HEE5OKX.js → chunk-4UCJIAOU.js} +2 -2
- package/dist/chunk-4XTHZVDS.js +109 -0
- package/dist/chunk-4ZPOZULQ.js +6522 -0
- package/dist/{chunk-SIAQVRKG.js → chunk-5MI5GIXM.js} +48 -2
- package/dist/{chunk-KLEASXUR.js → chunk-6ZL6NXMV.js} +1 -1
- package/dist/chunk-76D3BYJD.js +221 -0
- package/dist/{chunk-ZLYRPD7I.js → chunk-AWOSRA5F.js} +1 -1
- package/dist/{chunk-PDR55ZNW.js → chunk-BFEKZZHM.js} +274 -57
- package/dist/chunk-C7CLUQI6.js +1286 -0
- package/dist/{chunk-7AL4DOEJ.js → chunk-E3B5NROU.js} +7 -7
- package/dist/chunk-EJ7TW77N.js +1418 -0
- package/dist/{chunk-5J6NLQUN.js → chunk-IV6MWETF.js} +383 -168
- package/dist/chunk-IZPJHSPX.js +1478 -0
- package/dist/chunk-JLHNLM3C.js +228 -0
- package/dist/{chunk-BZL5H4YQ.js → chunk-KYYI23AQ.js} +2 -2
- package/dist/{chunk-2CLNOKPA.js → chunk-RSFJ6QSR.js} +18 -0
- package/dist/chunk-S6ANCSYO.js +1271 -0
- package/dist/chunk-SEU7WWNQ.js +1251 -0
- package/dist/chunk-SNQ7NAIS.js +453 -0
- package/dist/{ulpi-RMMCUAGP-JCJ273T6.js → chunk-TSLDGT5O.js} +73 -35
- package/dist/{chunk-SPOI23SB.js → chunk-UXHCHOWQ.js} +83 -62
- package/dist/chunk-V2H5D6Y3.js +146 -0
- package/dist/{chunk-QJ5GSMEC.js → chunk-VVEDXI7E.js} +2 -1
- package/dist/chunk-VXH5Y4FO.js +6761 -0
- package/dist/chunk-WED4LM5N.js +322 -0
- package/dist/{chunk-74WVVWJ4.js → chunk-YOKL7RB5.js} +184 -15
- package/dist/chunk-Z53CAR7G.js +298 -0
- package/dist/ci-X3U2W4HC.js +854 -0
- package/dist/cloud-2F3NLVHN.js +274 -0
- package/dist/{codemap-RKSD4MIE.js → codemap-XNGMAF3F.js} +37 -37
- package/dist/codex-MB5YTMRT.js +132 -0
- package/dist/{config-EGAXXCGL.js → config-OOELBYTH.js} +1 -1
- package/dist/dist-2BJYR5EI.js +59 -0
- package/dist/dist-2K7IEVTA.js +43 -0
- package/dist/dist-3EIQTZHT.js +1380 -0
- package/dist/{dist-YA2BWZB2.js → dist-4U5L2X2C.js} +2 -2
- package/dist/{dist-UKMCJBB2.js → dist-54KAMNLO.js} +16 -15
- package/dist/dist-6M4MZWZW.js +58 -0
- package/dist/dist-6X576SU2.js +27 -0
- package/dist/dist-7QOEYLFX.js +103 -0
- package/dist/dist-AYBGHEDY.js +2541 -0
- package/dist/dist-EK45QNEM.js +45 -0
- package/dist/{dist-CS2VKNYS.js → dist-FKFEJRPX.js} +16 -15
- package/dist/dist-GTEJUBBT.js +66 -0
- package/dist/dist-HA74OKJZ.js +40 -0
- package/dist/dist-HU5RZAON.js +48 -0
- package/dist/dist-IYE3OBRB.js +374 -0
- package/dist/{dist-GJYT2OQV.js → dist-JLU26AB6.js} +12 -9
- package/dist/{dist-6G7JC2RA.js → dist-KUCI6JFE.js} +49 -9
- package/dist/dist-NUEMFZFL.js +33 -0
- package/dist/{dist-RKOGLK7R.js → dist-NUXMDXZ3.js} +31 -3
- package/dist/{dist-QAU3LGJN.js → dist-YCNWHSLN.js} +15 -5
- package/dist/{dist-CB5D5LMO.js → dist-YFFG2ZD6.js} +9 -16
- package/dist/dist-ZG4OKCSR.js +15 -0
- package/dist/doctor-SI4LLLDZ.js +345 -0
- package/dist/{export-import-4A5MWLIA.js → export-import-JFQH4KSJ.js} +1 -1
- package/dist/{history-3MOBX4MA.js → history-5NE46ZAH.js} +7 -7
- package/dist/hooks-installer-UN5JZLDQ.js +19 -0
- package/dist/index.js +395 -619
- package/dist/{init-6CH4HV5T.js → init-5FK3VKRT.js} +79 -13
- package/dist/job-HIDMAFW2.js +376 -0
- package/dist/jobs.memory-PLMMSFHB-VBECCTHN.js +33 -0
- package/dist/kiro-VMUHDFGK.js +153 -0
- package/dist/{launchd-LF2QMSKZ.js → launchd-6AWT54HR.js} +9 -17
- package/dist/mcp-PDUD7SGP.js +249 -0
- package/dist/mcp-installer-PQU3XOGO.js +259 -0
- package/dist/mcp-setup-OA7IB3H3.js +263 -0
- package/dist/{memory-Y6OZTXJ2.js → memory-ZNAEAK3B.js} +17 -17
- package/dist/{ollama-3XCUZMZT-FYKHW4TZ.js → ollama-3XCUZMZT-4JMH6B7P.js} +1 -1
- package/dist/{openai-E7G2YAHU-UYY4ZWON.js → openai-E7G2YAHU-T3HMBPH7.js} +2 -2
- package/dist/portal-JYWVHXDU.js +210 -0
- package/dist/prd-Q4J5NVAR.js +408 -0
- package/dist/repos-WWZXNN3P.js +271 -0
- package/dist/review-integration-5WHEJU2A.js +14 -0
- package/dist/{rules-E427DKYJ.js → rules-Y4VSOY5Y.js} +3 -3
- package/dist/run-VPNXEIBY.js +687 -0
- package/dist/server-COL4AXKU-P7S7NNF6.js +11 -0
- package/dist/server-KKSETHDV-XSSLEENT.js +20 -0
- package/dist/{skills-CX73O3IV.js → skills-QEYU2N27.js} +4 -2
- package/dist/start-JYOEL7AJ.js +303 -0
- package/dist/{status-4DFHDJMN.js → status-BHQYYGAL.js} +2 -2
- package/dist/{templates-U7T6MARD.js → templates-CBRUJ66V.js} +4 -3
- package/dist/tui-DP7736EX.js +61 -0
- package/dist/ulpi-5EN6JCAS-LFE3WSL4.js +10 -0
- package/dist/{uninstall-6SW35IK4.js → uninstall-ICUV6DDV.js} +3 -3
- package/dist/{update-M6IBJNYP.js → update-7ZMAYRBH.js} +3 -3
- package/dist/{version-checker-Q6YTYAGP.js → version-checker-4ZFMZA7Y.js} +2 -2
- package/package.json +39 -31
- package/dist/chunk-2MZER6ND.js +0 -415
- package/dist/chunk-2VYFVYJL.js +0 -4273
- package/dist/chunk-6OCEY7JY.js +0 -422
- package/dist/chunk-7LXY5UVC.js +0 -330
- package/dist/chunk-B55DDP24.js +0 -136
- package/dist/chunk-JWUUVXIV.js +0 -13694
- package/dist/chunk-MIAQVCFW.js +0 -39
- package/dist/chunk-YM2HV4IA.js +0 -505
- package/dist/ci-STSL2LSP.js +0 -370
- package/dist/mcp-installer-NQCGKQ23.js +0 -124
- package/dist/projects-ATHDD3D6.js +0 -271
- package/dist/review-ADUPV3PN.js +0 -152
- package/dist/server-USLHY6GH-AEOJC5ST.js +0 -18
- package/dist/server-X5P6WH2M-7K2RY34N.js +0 -11
- package/dist/skills/ulpi-generate-guardian/SKILL.md +0 -750
- package/dist/skills/ulpi-generate-guardian/references/framework-rules.md +0 -849
- package/dist/skills/ulpi-generate-guardian/references/language-rules.md +0 -591
- package/dist/ui-OWXZ3YSR.js +0 -167
- package/dist/ui.html +0 -698
package/dist/chunk-2VYFVYJL.js
DELETED
|
@@ -1,4273 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
getApiSecret,
|
|
3
|
-
validateAuth,
|
|
4
|
-
validateLoopback
|
|
5
|
-
} from "./chunk-MIAQVCFW.js";
|
|
6
|
-
import {
|
|
7
|
-
ReviewHub
|
|
8
|
-
} from "./chunk-2MZER6ND.js";
|
|
9
|
-
import {
|
|
10
|
-
ReviewConfigSchema,
|
|
11
|
-
buildAiScoringPrompt,
|
|
12
|
-
extractSections,
|
|
13
|
-
listPlansWithMeta,
|
|
14
|
-
loadPlan,
|
|
15
|
-
parseMarkdownToBlocks,
|
|
16
|
-
savePlan,
|
|
17
|
-
scorePlanQuality
|
|
18
|
-
} from "./chunk-3SBPZRB5.js";
|
|
19
|
-
import {
|
|
20
|
-
detectStack
|
|
21
|
-
} from "./chunk-2CLNOKPA.js";
|
|
22
|
-
import {
|
|
23
|
-
getDefaultProject,
|
|
24
|
-
getProject,
|
|
25
|
-
listProjects,
|
|
26
|
-
registerProject,
|
|
27
|
-
scanForProjects,
|
|
28
|
-
setDefaultProject,
|
|
29
|
-
unregisterProject
|
|
30
|
-
} from "./chunk-SPOI23SB.js";
|
|
31
|
-
import {
|
|
32
|
-
BUNDLED_SKILLS,
|
|
33
|
-
composeTemplates,
|
|
34
|
-
listUserTemplates,
|
|
35
|
-
loadBundledTemplates,
|
|
36
|
-
resolveTemplate
|
|
37
|
-
} from "./chunk-6OCEY7JY.js";
|
|
38
|
-
import {
|
|
39
|
-
ResponseConfigSchema,
|
|
40
|
-
loadRulesSync
|
|
41
|
-
} from "./chunk-SIAQVRKG.js";
|
|
42
|
-
import {
|
|
43
|
-
commitInWorktree,
|
|
44
|
-
historyBranchExists,
|
|
45
|
-
initHistoryBranch,
|
|
46
|
-
readBranchMeta,
|
|
47
|
-
readHistoryEntry,
|
|
48
|
-
readTimeline,
|
|
49
|
-
updateEntryEnrichment,
|
|
50
|
-
withWorktree,
|
|
51
|
-
writeAndStage
|
|
52
|
-
} from "./chunk-JGBXM5NC.js";
|
|
53
|
-
import {
|
|
54
|
-
JsonSessionStore,
|
|
55
|
-
readEvents
|
|
56
|
-
} from "./chunk-YM2HV4IA.js";
|
|
57
|
-
import {
|
|
58
|
-
CLI_BIN_NAME,
|
|
59
|
-
DEFAULT_AI_MODEL,
|
|
60
|
-
GUARDS_FILENAME,
|
|
61
|
-
REVIEW_IMAGES_DIR,
|
|
62
|
-
VERSION_CACHE_FILE,
|
|
63
|
-
getApiPort,
|
|
64
|
-
getBinaryPath,
|
|
65
|
-
getCodemapBranch,
|
|
66
|
-
getCurrentBranch,
|
|
67
|
-
getHistoryBranch,
|
|
68
|
-
globalGuardsFile,
|
|
69
|
-
loadUlpiSettings,
|
|
70
|
-
projectConfigDir,
|
|
71
|
-
projectGuardsFile,
|
|
72
|
-
resolveApiKey,
|
|
73
|
-
resolveUlpiUrl,
|
|
74
|
-
saveUlpiSettings
|
|
75
|
-
} from "./chunk-7LXY5UVC.js";
|
|
76
|
-
|
|
77
|
-
// ../api/dist/chunk-P4BERD2G.js
|
|
78
|
-
import * as http from "http";
|
|
79
|
-
import * as fs12 from "fs";
|
|
80
|
-
import * as path9 from "path";
|
|
81
|
-
import * as fs from "fs";
|
|
82
|
-
import * as path from "path";
|
|
83
|
-
import { fileURLToPath } from "url";
|
|
84
|
-
import * as fs3 from "fs";
|
|
85
|
-
import * as fs2 from "fs";
|
|
86
|
-
import { stringify as stringifyYaml } from "yaml";
|
|
87
|
-
import * as fs4 from "fs";
|
|
88
|
-
import { parse as parseYaml, stringify as stringifyYaml2 } from "yaml";
|
|
89
|
-
import * as fs5 from "fs";
|
|
90
|
-
import * as path2 from "path";
|
|
91
|
-
import * as os from "os";
|
|
92
|
-
import * as fs6 from "fs";
|
|
93
|
-
import * as path3 from "path";
|
|
94
|
-
import * as fs7 from "fs";
|
|
95
|
-
import * as path4 from "path";
|
|
96
|
-
import * as fs8 from "fs";
|
|
97
|
-
import * as path5 from "path";
|
|
98
|
-
import { spawn } from "child_process";
|
|
99
|
-
import { spawn as spawn2, execFileSync } from "child_process";
|
|
100
|
-
import * as crypto from "crypto";
|
|
101
|
-
import * as fs9 from "fs";
|
|
102
|
-
import * as path6 from "path";
|
|
103
|
-
import { resolve4 as resolve42, resolve6 } from "dns/promises";
|
|
104
|
-
import * as fs11 from "fs";
|
|
105
|
-
import * as path8 from "path";
|
|
106
|
-
import { execFileSync as execFileSync3 } from "child_process";
|
|
107
|
-
import { execFileSync as execFileSync2, spawn as spawn3 } from "child_process";
|
|
108
|
-
import * as fs10 from "fs";
|
|
109
|
-
import * as os2 from "os";
|
|
110
|
-
import * as path7 from "path";
|
|
111
|
-
function getAllowedOrigin(req) {
|
|
112
|
-
const origin = req.headers.origin;
|
|
113
|
-
if (origin && /^https?:\/\/(localhost|127\.0\.0\.1)(:\d+)?$/.test(origin)) {
|
|
114
|
-
return origin;
|
|
115
|
-
}
|
|
116
|
-
return `http://localhost:${getApiPort()}`;
|
|
117
|
-
}
|
|
118
|
-
function handleCorsPreFlight(req, res) {
|
|
119
|
-
if ((req.method ?? "GET") !== "OPTIONS") return false;
|
|
120
|
-
res.writeHead(204, {
|
|
121
|
-
"Access-Control-Allow-Origin": getAllowedOrigin(req),
|
|
122
|
-
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
|
|
123
|
-
"Access-Control-Allow-Headers": "Content-Type"
|
|
124
|
-
});
|
|
125
|
-
res.end();
|
|
126
|
-
return true;
|
|
127
|
-
}
|
|
128
|
-
var store = /* @__PURE__ */ new Map();
|
|
129
|
-
var CLEANUP_INTERVAL = 6e4;
|
|
130
|
-
setInterval(() => {
|
|
131
|
-
const now = Date.now();
|
|
132
|
-
for (const [key, entry] of store) {
|
|
133
|
-
if (now >= entry.resetAt) {
|
|
134
|
-
store.delete(key);
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
}, CLEANUP_INTERVAL).unref();
|
|
138
|
-
function checkRateLimit(ip, limit = 200, windowMs = 6e4) {
|
|
139
|
-
const now = Date.now();
|
|
140
|
-
const entry = store.get(ip);
|
|
141
|
-
if (!entry || now >= entry.resetAt) {
|
|
142
|
-
store.set(ip, { count: 1, resetAt: now + windowMs });
|
|
143
|
-
return true;
|
|
144
|
-
}
|
|
145
|
-
entry.count++;
|
|
146
|
-
return entry.count <= limit;
|
|
147
|
-
}
|
|
148
|
-
function getCorsOrigin(req) {
|
|
149
|
-
if (req) {
|
|
150
|
-
const origin = req.headers.origin;
|
|
151
|
-
if (origin && /^https?:\/\/(localhost|127\.0\.0\.1)(:\d+)?$/.test(origin)) {
|
|
152
|
-
return origin;
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
return `http://localhost:${getApiPort()}`;
|
|
156
|
-
}
|
|
157
|
-
function corsHeaders(req) {
|
|
158
|
-
return {
|
|
159
|
-
"Access-Control-Allow-Origin": getCorsOrigin(req),
|
|
160
|
-
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
|
|
161
|
-
"Access-Control-Allow-Headers": "Content-Type"
|
|
162
|
-
};
|
|
163
|
-
}
|
|
164
|
-
function jsonResponse(res, data, status = 200, req) {
|
|
165
|
-
const body = JSON.stringify(data);
|
|
166
|
-
res.writeHead(status, {
|
|
167
|
-
"Content-Type": "application/json",
|
|
168
|
-
...corsHeaders(req)
|
|
169
|
-
});
|
|
170
|
-
res.end(body);
|
|
171
|
-
}
|
|
172
|
-
function notFound(res, message = "Not found", req) {
|
|
173
|
-
jsonResponse(res, { error: message }, 404, req);
|
|
174
|
-
}
|
|
175
|
-
function sseHeaders(res, req) {
|
|
176
|
-
res.writeHead(200, {
|
|
177
|
-
"Content-Type": "text/event-stream",
|
|
178
|
-
"Cache-Control": "no-cache",
|
|
179
|
-
Connection: "keep-alive",
|
|
180
|
-
...corsHeaders(req)
|
|
181
|
-
});
|
|
182
|
-
}
|
|
183
|
-
function createRouter() {
|
|
184
|
-
const routes = [];
|
|
185
|
-
function add(method, pattern, handler) {
|
|
186
|
-
routes.push({ method, pattern, handler });
|
|
187
|
-
}
|
|
188
|
-
return {
|
|
189
|
-
get(pattern, handler) {
|
|
190
|
-
add("GET", pattern, handler);
|
|
191
|
-
},
|
|
192
|
-
post(pattern, handler) {
|
|
193
|
-
add("POST", pattern, handler);
|
|
194
|
-
},
|
|
195
|
-
put(pattern, handler) {
|
|
196
|
-
add("PUT", pattern, handler);
|
|
197
|
-
},
|
|
198
|
-
delete(pattern, handler) {
|
|
199
|
-
add("DELETE", pattern, handler);
|
|
200
|
-
},
|
|
201
|
-
/**
|
|
202
|
-
* Try each registered route in order. For string patterns, performs an
|
|
203
|
-
* exact match against `ctx.pathname`. For RegExp patterns, tests the
|
|
204
|
-
* pathname and extracts named groups (or positional captures indexed by
|
|
205
|
-
* their number) into `params`.
|
|
206
|
-
*
|
|
207
|
-
* Returns `true` if a matching route was found (and its handler was
|
|
208
|
-
* invoked), `false` otherwise.
|
|
209
|
-
*/
|
|
210
|
-
async handle(ctx) {
|
|
211
|
-
for (const route of routes) {
|
|
212
|
-
if (route.method !== ctx.method) continue;
|
|
213
|
-
let params = {};
|
|
214
|
-
if (typeof route.pattern === "string") {
|
|
215
|
-
if (ctx.pathname !== route.pattern) continue;
|
|
216
|
-
} else {
|
|
217
|
-
const match = route.pattern.exec(ctx.pathname);
|
|
218
|
-
if (!match) continue;
|
|
219
|
-
if (match.groups) {
|
|
220
|
-
params = { ...match.groups };
|
|
221
|
-
}
|
|
222
|
-
for (let i = 1; i < match.length; i++) {
|
|
223
|
-
if (match[i] !== void 0) {
|
|
224
|
-
params[String(i)] = decodeURIComponent(match[i]);
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
}
|
|
228
|
-
await route.handler({ ...ctx, params });
|
|
229
|
-
return true;
|
|
230
|
-
}
|
|
231
|
-
return false;
|
|
232
|
-
}
|
|
233
|
-
};
|
|
234
|
-
}
|
|
235
|
-
function readBody(req, maxSize = 1048576) {
|
|
236
|
-
return new Promise((resolve7, reject) => {
|
|
237
|
-
const chunks = [];
|
|
238
|
-
let size = 0;
|
|
239
|
-
req.on("data", (chunk) => {
|
|
240
|
-
size += chunk.length;
|
|
241
|
-
if (size > maxSize) {
|
|
242
|
-
req.destroy();
|
|
243
|
-
reject(new Error("Request body too large"));
|
|
244
|
-
return;
|
|
245
|
-
}
|
|
246
|
-
chunks.push(chunk);
|
|
247
|
-
});
|
|
248
|
-
req.on("end", () => resolve7(Buffer.concat(chunks).toString("utf-8")));
|
|
249
|
-
req.on("error", reject);
|
|
250
|
-
});
|
|
251
|
-
}
|
|
252
|
-
function readBodyRaw(req, maxSize = 5 * 1024 * 1024) {
|
|
253
|
-
return new Promise((resolve7, reject) => {
|
|
254
|
-
const chunks = [];
|
|
255
|
-
let size = 0;
|
|
256
|
-
req.on("data", (chunk) => {
|
|
257
|
-
size += chunk.length;
|
|
258
|
-
if (size > maxSize) {
|
|
259
|
-
req.destroy();
|
|
260
|
-
reject(new Error("Request body too large"));
|
|
261
|
-
return;
|
|
262
|
-
}
|
|
263
|
-
chunks.push(chunk);
|
|
264
|
-
});
|
|
265
|
-
req.on("end", () => resolve7(Buffer.concat(chunks)));
|
|
266
|
-
req.on("error", reject);
|
|
267
|
-
});
|
|
268
|
-
}
|
|
269
|
-
async function parseJsonBody(req) {
|
|
270
|
-
try {
|
|
271
|
-
const raw = await readBody(req);
|
|
272
|
-
const parsed = JSON.parse(raw);
|
|
273
|
-
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
|
|
274
|
-
return { data: null, error: "Expected a JSON object" };
|
|
275
|
-
}
|
|
276
|
-
return { data: parsed, error: null };
|
|
277
|
-
} catch (err) {
|
|
278
|
-
const message = err instanceof Error ? err.message : "Invalid JSON body";
|
|
279
|
-
return { data: null, error: message };
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
function parseMultipartImage(body, boundary) {
|
|
283
|
-
const boundaryBuffer = Buffer.from(`--${boundary}`);
|
|
284
|
-
const start = body.indexOf(boundaryBuffer);
|
|
285
|
-
if (start === -1) return null;
|
|
286
|
-
const headerEnd = body.indexOf(Buffer.from("\r\n\r\n"), start);
|
|
287
|
-
if (headerEnd === -1) return null;
|
|
288
|
-
const headers = body.subarray(start + boundaryBuffer.length + 2, headerEnd).toString();
|
|
289
|
-
const ctMatch = headers.match(/Content-Type:\s*(.+)/i);
|
|
290
|
-
const contentType = ctMatch ? ctMatch[1].trim() : "image/png";
|
|
291
|
-
const dataStart = headerEnd + 4;
|
|
292
|
-
const nextBoundary = body.indexOf(boundaryBuffer, dataStart);
|
|
293
|
-
const dataEnd = nextBoundary !== -1 ? nextBoundary - 2 : body.length;
|
|
294
|
-
return { data: body.subarray(dataStart, dataEnd), contentType };
|
|
295
|
-
}
|
|
296
|
-
var apiVersion = "0.0.0";
|
|
297
|
-
try {
|
|
298
|
-
const thisFile = fileURLToPath(import.meta.url);
|
|
299
|
-
const pkgPath = path.resolve(path.dirname(thisFile), "../package.json");
|
|
300
|
-
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
|
|
301
|
-
if (typeof pkg.version === "string") apiVersion = pkg.version;
|
|
302
|
-
} catch {
|
|
303
|
-
}
|
|
304
|
-
async function healthHandler(ctx) {
|
|
305
|
-
let version = "0.0.0";
|
|
306
|
-
try {
|
|
307
|
-
version = globalThis.__ULPI_API_VERSION__ ?? "0.0.0";
|
|
308
|
-
} catch {
|
|
309
|
-
}
|
|
310
|
-
jsonResponse(ctx.res, { status: "ok", version }, 200, ctx.req);
|
|
311
|
-
}
|
|
312
|
-
async function versionHandler(ctx) {
|
|
313
|
-
const current = apiVersion;
|
|
314
|
-
let latest = current;
|
|
315
|
-
let updateAvailable = false;
|
|
316
|
-
try {
|
|
317
|
-
if (fs.existsSync(VERSION_CACHE_FILE)) {
|
|
318
|
-
const raw = fs.readFileSync(VERSION_CACHE_FILE, "utf-8");
|
|
319
|
-
const cache = JSON.parse(raw);
|
|
320
|
-
if (typeof cache.latest === "string" && /^\d+\.\d+\.\d+/.test(cache.latest)) {
|
|
321
|
-
latest = cache.latest;
|
|
322
|
-
const a = current.split(".").map(Number);
|
|
323
|
-
const b = latest.split(".").map(Number);
|
|
324
|
-
for (let i = 0; i < 3; i++) {
|
|
325
|
-
if ((b[i] ?? 0) > (a[i] ?? 0)) {
|
|
326
|
-
updateAvailable = true;
|
|
327
|
-
break;
|
|
328
|
-
}
|
|
329
|
-
if ((b[i] ?? 0) < (a[i] ?? 0)) break;
|
|
330
|
-
}
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
} catch {
|
|
334
|
-
}
|
|
335
|
-
jsonResponse(ctx.res, { current, latest, updateAvailable }, 200, ctx.req);
|
|
336
|
-
}
|
|
337
|
-
async function pathsHandler(ctx) {
|
|
338
|
-
const home = process.env.HOME || process.env.USERPROFILE || "";
|
|
339
|
-
const suggestions = [];
|
|
340
|
-
if (home) {
|
|
341
|
-
suggestions.push(home);
|
|
342
|
-
suggestions.push(path.join(home, "projects"));
|
|
343
|
-
suggestions.push(path.join(home, "work"));
|
|
344
|
-
suggestions.push(path.join(home, "dev"));
|
|
345
|
-
}
|
|
346
|
-
try {
|
|
347
|
-
suggestions.push(process.cwd());
|
|
348
|
-
} catch {
|
|
349
|
-
}
|
|
350
|
-
const unique = [...new Set(suggestions)];
|
|
351
|
-
jsonResponse(ctx.res, { home, cwd: process.cwd(), suggestions: unique }, 200, ctx.req);
|
|
352
|
-
}
|
|
353
|
-
async function getUsernameSettings(ctx) {
|
|
354
|
-
const settings = loadUlpiSettings();
|
|
355
|
-
const branch = getCurrentBranch(ctx.projectDir);
|
|
356
|
-
jsonResponse(ctx.res, {
|
|
357
|
-
username: settings.username ?? "",
|
|
358
|
-
resolvedCodemapBranch: getCodemapBranch(branch),
|
|
359
|
-
resolvedHistoryBranch: getHistoryBranch()
|
|
360
|
-
}, 200, ctx.req);
|
|
361
|
-
}
|
|
362
|
-
async function putUsernameSettings(ctx) {
|
|
363
|
-
const body = await readBody(ctx.req);
|
|
364
|
-
let data;
|
|
365
|
-
try {
|
|
366
|
-
data = JSON.parse(body);
|
|
367
|
-
} catch {
|
|
368
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
369
|
-
return;
|
|
370
|
-
}
|
|
371
|
-
if (typeof data.username !== "string" && data.username !== void 0) {
|
|
372
|
-
jsonResponse(ctx.res, { error: "username must be a string" }, 400, ctx.req);
|
|
373
|
-
return;
|
|
374
|
-
}
|
|
375
|
-
const username = typeof data.username === "string" ? data.username.trim() : void 0;
|
|
376
|
-
if (username && (username.length > 64 || !/^[a-zA-Z0-9_-]+$/.test(username))) {
|
|
377
|
-
jsonResponse(ctx.res, { error: "username must be alphanumeric with hyphens/underscores, max 64 chars" }, 400, ctx.req);
|
|
378
|
-
return;
|
|
379
|
-
}
|
|
380
|
-
try {
|
|
381
|
-
const settings = loadUlpiSettings();
|
|
382
|
-
settings.username = username || void 0;
|
|
383
|
-
saveUlpiSettings(settings);
|
|
384
|
-
} catch (err) {
|
|
385
|
-
const message = err instanceof Error ? err.message : "Failed to update settings";
|
|
386
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
387
|
-
return;
|
|
388
|
-
}
|
|
389
|
-
const branch = getCurrentBranch(ctx.projectDir);
|
|
390
|
-
jsonResponse(ctx.res, {
|
|
391
|
-
success: true,
|
|
392
|
-
username: username ?? "",
|
|
393
|
-
resolvedCodemapBranch: getCodemapBranch(branch),
|
|
394
|
-
resolvedHistoryBranch: getHistoryBranch()
|
|
395
|
-
}, 200, ctx.req);
|
|
396
|
-
}
|
|
397
|
-
function yamlEscapeString(s) {
|
|
398
|
-
return s.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
|
399
|
-
}
|
|
400
|
-
function writeYamlFields(obj, lines, indent) {
|
|
401
|
-
const pad = " ".repeat(indent);
|
|
402
|
-
for (const [k, v] of Object.entries(obj)) {
|
|
403
|
-
if (k === "id" || k === "type" || k === "source") continue;
|
|
404
|
-
if (typeof v === "string") {
|
|
405
|
-
lines.push(`${pad}${k}: "${yamlEscapeString(v)}"`);
|
|
406
|
-
} else if (typeof v === "boolean" || typeof v === "number") {
|
|
407
|
-
lines.push(`${pad}${k}: ${v}`);
|
|
408
|
-
} else if (Array.isArray(v)) {
|
|
409
|
-
lines.push(`${pad}${k}:`);
|
|
410
|
-
for (const item of v) {
|
|
411
|
-
if (typeof item === "string") {
|
|
412
|
-
lines.push(`${pad} - "${yamlEscapeString(item)}"`);
|
|
413
|
-
} else if (item && typeof item === "object") {
|
|
414
|
-
const entries = Object.entries(item);
|
|
415
|
-
if (entries.length > 0) {
|
|
416
|
-
const [firstKey, firstVal] = entries[0];
|
|
417
|
-
lines.push(`${pad} - ${firstKey}: ${formatScalar(firstVal)}`);
|
|
418
|
-
for (let i = 1; i < entries.length; i++) {
|
|
419
|
-
const [ek, ev] = entries[i];
|
|
420
|
-
lines.push(`${pad} ${ek}: ${formatScalar(ev)}`);
|
|
421
|
-
}
|
|
422
|
-
}
|
|
423
|
-
}
|
|
424
|
-
}
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
}
|
|
428
|
-
function formatScalar(v) {
|
|
429
|
-
if (typeof v === "string") return `"${yamlEscapeString(v)}"`;
|
|
430
|
-
if (typeof v === "boolean" || typeof v === "number") return String(v);
|
|
431
|
-
return `"${String(v)}"`;
|
|
432
|
-
}
|
|
433
|
-
function writeRulesConfig(rulesPath, config) {
|
|
434
|
-
const lines = [
|
|
435
|
-
"# ULPI \u2014 Rules Configuration",
|
|
436
|
-
"# Generated via Web UI",
|
|
437
|
-
"",
|
|
438
|
-
"project:",
|
|
439
|
-
` name: "${yamlEscapeString(config.project.name)}"`,
|
|
440
|
-
` runtime: "${yamlEscapeString(config.project.runtime)}"`,
|
|
441
|
-
` package_manager: "${yamlEscapeString(config.project.package_manager)}"`,
|
|
442
|
-
""
|
|
443
|
-
];
|
|
444
|
-
const sections = [
|
|
445
|
-
"preconditions",
|
|
446
|
-
"postconditions",
|
|
447
|
-
"permissions",
|
|
448
|
-
"pipelines"
|
|
449
|
-
];
|
|
450
|
-
for (const section of sections) {
|
|
451
|
-
const data = config[section];
|
|
452
|
-
if (data && Object.keys(data).length > 0) {
|
|
453
|
-
lines.push(`${section}:`);
|
|
454
|
-
for (const [key, rule] of Object.entries(data)) {
|
|
455
|
-
lines.push(` ${key}:`);
|
|
456
|
-
writeYamlFields(rule, lines, 4);
|
|
457
|
-
}
|
|
458
|
-
lines.push("");
|
|
459
|
-
}
|
|
460
|
-
}
|
|
461
|
-
if (config.responses) {
|
|
462
|
-
const responsesYaml = stringifyYaml(
|
|
463
|
-
{ responses: config.responses },
|
|
464
|
-
{ lineWidth: 120 }
|
|
465
|
-
);
|
|
466
|
-
lines.push(responsesYaml.trimEnd());
|
|
467
|
-
lines.push("");
|
|
468
|
-
}
|
|
469
|
-
fs2.writeFileSync(rulesPath, lines.join("\n") + "\n", "utf-8");
|
|
470
|
-
}
|
|
471
|
-
function getRulesPath(projectDir) {
|
|
472
|
-
const projectPath = projectGuardsFile(projectDir);
|
|
473
|
-
if (fs3.existsSync(projectPath)) return projectPath;
|
|
474
|
-
const userPath = globalGuardsFile();
|
|
475
|
-
if (fs3.existsSync(userPath)) return userPath;
|
|
476
|
-
return null;
|
|
477
|
-
}
|
|
478
|
-
function flattenRules(config) {
|
|
479
|
-
const result = [];
|
|
480
|
-
const sections = [
|
|
481
|
-
{ name: "preconditions", rules: config.preconditions },
|
|
482
|
-
{ name: "postconditions", rules: config.postconditions },
|
|
483
|
-
{ name: "permissions", rules: config.permissions },
|
|
484
|
-
{ name: "pipelines", rules: config.pipelines }
|
|
485
|
-
];
|
|
486
|
-
for (const section of sections) {
|
|
487
|
-
for (const [id, rule] of Object.entries(section.rules)) {
|
|
488
|
-
result.push({ id, section: section.name, ...rule });
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
return result;
|
|
492
|
-
}
|
|
493
|
-
async function listRulesHandler(ctx) {
|
|
494
|
-
const rulesPath = getRulesPath(ctx.projectDir);
|
|
495
|
-
if (!rulesPath) {
|
|
496
|
-
jsonResponse(ctx.res, [], 200, ctx.req);
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
const config = loadRulesSync(rulesPath);
|
|
500
|
-
if (!config) {
|
|
501
|
-
jsonResponse(ctx.res, [], 200, ctx.req);
|
|
502
|
-
return;
|
|
503
|
-
}
|
|
504
|
-
jsonResponse(ctx.res, flattenRules(config), 200, ctx.req);
|
|
505
|
-
}
|
|
506
|
-
async function createRuleHandler(ctx) {
|
|
507
|
-
const body = await readBody(ctx.req);
|
|
508
|
-
let ruleData;
|
|
509
|
-
try {
|
|
510
|
-
ruleData = JSON.parse(body);
|
|
511
|
-
} catch {
|
|
512
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
513
|
-
return;
|
|
514
|
-
}
|
|
515
|
-
if (typeof ruleData.type !== "string" || !ruleData.type) {
|
|
516
|
-
jsonResponse(ctx.res, { error: "Missing or invalid required field: type" }, 400, ctx.req);
|
|
517
|
-
return;
|
|
518
|
-
}
|
|
519
|
-
const ruleType = ruleData.type;
|
|
520
|
-
const ruleId = typeof ruleData.id === "string" && ruleData.id ? ruleData.id : typeof ruleData.matcher === "string" && ruleData.matcher ? `${ruleType}-${ruleData.matcher.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "")}` : "";
|
|
521
|
-
if (!ruleId) {
|
|
522
|
-
jsonResponse(ctx.res, { error: "Missing required field: id or matcher (for auto-generated id)" }, 400, ctx.req);
|
|
523
|
-
return;
|
|
524
|
-
}
|
|
525
|
-
const validTypes = ["precondition", "postcondition", "permission", "pipeline"];
|
|
526
|
-
if (!validTypes.includes(ruleType)) {
|
|
527
|
-
jsonResponse(ctx.res, { error: `Invalid rule type: ${ruleType}` }, 400, ctx.req);
|
|
528
|
-
return;
|
|
529
|
-
}
|
|
530
|
-
const rulesPath = projectGuardsFile(ctx.projectDir);
|
|
531
|
-
if (!fs3.existsSync(rulesPath)) {
|
|
532
|
-
jsonResponse(ctx.res, { error: "guards.yml not found. Run init first." }, 404, ctx.req);
|
|
533
|
-
return;
|
|
534
|
-
}
|
|
535
|
-
const config = loadRulesSync(rulesPath);
|
|
536
|
-
if (!config) {
|
|
537
|
-
jsonResponse(ctx.res, { error: "Failed to load guards.yml" }, 500, ctx.req);
|
|
538
|
-
return;
|
|
539
|
-
}
|
|
540
|
-
const sectionKey = `${ruleType}s`;
|
|
541
|
-
if (config[sectionKey][ruleId]) {
|
|
542
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" already exists in ${sectionKey}` }, 409, ctx.req);
|
|
543
|
-
return;
|
|
544
|
-
}
|
|
545
|
-
const { id: _, type: __, ...ruleFields } = ruleData;
|
|
546
|
-
const newRule = {
|
|
547
|
-
enabled: true,
|
|
548
|
-
trigger: "PreToolUse",
|
|
549
|
-
matcher: "*",
|
|
550
|
-
...ruleFields
|
|
551
|
-
};
|
|
552
|
-
config[sectionKey][ruleId] = newRule;
|
|
553
|
-
writeRulesConfig(rulesPath, config);
|
|
554
|
-
const createdRule = config[sectionKey][ruleId];
|
|
555
|
-
jsonResponse(ctx.res, { id: ruleId, section: sectionKey, ...createdRule }, 201, ctx.req);
|
|
556
|
-
}
|
|
557
|
-
async function getRuleHandler(ctx) {
|
|
558
|
-
const ruleId = ctx.params["1"];
|
|
559
|
-
const rulesPath = getRulesPath(ctx.projectDir);
|
|
560
|
-
if (!rulesPath) {
|
|
561
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
562
|
-
return;
|
|
563
|
-
}
|
|
564
|
-
const config = loadRulesSync(rulesPath);
|
|
565
|
-
if (!config) {
|
|
566
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
567
|
-
return;
|
|
568
|
-
}
|
|
569
|
-
const sections = ["preconditions", "postconditions", "permissions", "pipelines"];
|
|
570
|
-
for (const section of sections) {
|
|
571
|
-
if (config[section][ruleId]) {
|
|
572
|
-
const rule = config[section][ruleId];
|
|
573
|
-
jsonResponse(ctx.res, { id: ruleId, section, ...rule }, 200, ctx.req);
|
|
574
|
-
return;
|
|
575
|
-
}
|
|
576
|
-
}
|
|
577
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
578
|
-
}
|
|
579
|
-
async function toggleRuleHandler(ctx) {
|
|
580
|
-
const ruleId = ctx.params["1"];
|
|
581
|
-
let explicitEnabled;
|
|
582
|
-
try {
|
|
583
|
-
const body = await readBody(ctx.req);
|
|
584
|
-
if (body.trim()) {
|
|
585
|
-
const parsed = JSON.parse(body);
|
|
586
|
-
if (typeof parsed.enabled === "boolean") {
|
|
587
|
-
explicitEnabled = parsed.enabled;
|
|
588
|
-
}
|
|
589
|
-
}
|
|
590
|
-
} catch {
|
|
591
|
-
}
|
|
592
|
-
const rulesPath = projectGuardsFile(ctx.projectDir);
|
|
593
|
-
if (!fs3.existsSync(rulesPath)) {
|
|
594
|
-
jsonResponse(ctx.res, { error: "guards.yml not found" }, 404, ctx.req);
|
|
595
|
-
return;
|
|
596
|
-
}
|
|
597
|
-
const config = loadRulesSync(rulesPath);
|
|
598
|
-
if (!config) {
|
|
599
|
-
jsonResponse(ctx.res, { error: "Failed to load guards.yml" }, 500, ctx.req);
|
|
600
|
-
return;
|
|
601
|
-
}
|
|
602
|
-
const sections = ["preconditions", "postconditions", "permissions", "pipelines"];
|
|
603
|
-
let foundRule = null;
|
|
604
|
-
for (const section of sections) {
|
|
605
|
-
if (config[section][ruleId]) {
|
|
606
|
-
foundRule = config[section][ruleId];
|
|
607
|
-
break;
|
|
608
|
-
}
|
|
609
|
-
}
|
|
610
|
-
if (!foundRule) {
|
|
611
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
612
|
-
return;
|
|
613
|
-
}
|
|
614
|
-
const newEnabled = explicitEnabled ?? !(foundRule.enabled ?? true);
|
|
615
|
-
foundRule.enabled = newEnabled;
|
|
616
|
-
writeRulesConfig(rulesPath, config);
|
|
617
|
-
jsonResponse(ctx.res, { success: true, id: ruleId, enabled: newEnabled }, 200, ctx.req);
|
|
618
|
-
}
|
|
619
|
-
async function updateRuleHandler(ctx) {
|
|
620
|
-
const ruleId = ctx.params["1"];
|
|
621
|
-
const body = await readBody(ctx.req);
|
|
622
|
-
let ruleData;
|
|
623
|
-
try {
|
|
624
|
-
ruleData = JSON.parse(body);
|
|
625
|
-
} catch {
|
|
626
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
627
|
-
return;
|
|
628
|
-
}
|
|
629
|
-
const rulesPath = projectGuardsFile(ctx.projectDir);
|
|
630
|
-
if (!fs3.existsSync(rulesPath)) {
|
|
631
|
-
jsonResponse(ctx.res, { error: "guards.yml not found" }, 404, ctx.req);
|
|
632
|
-
return;
|
|
633
|
-
}
|
|
634
|
-
const config = loadRulesSync(rulesPath);
|
|
635
|
-
if (!config) {
|
|
636
|
-
jsonResponse(ctx.res, { error: "Failed to load guards.yml" }, 500, ctx.req);
|
|
637
|
-
return;
|
|
638
|
-
}
|
|
639
|
-
const sections = ["preconditions", "postconditions", "permissions", "pipelines"];
|
|
640
|
-
let foundSection = null;
|
|
641
|
-
let foundRule = null;
|
|
642
|
-
for (const section of sections) {
|
|
643
|
-
if (config[section][ruleId]) {
|
|
644
|
-
foundSection = section;
|
|
645
|
-
foundRule = config[section][ruleId];
|
|
646
|
-
break;
|
|
647
|
-
}
|
|
648
|
-
}
|
|
649
|
-
if (!foundSection || !foundRule) {
|
|
650
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
651
|
-
return;
|
|
652
|
-
}
|
|
653
|
-
if (foundRule.locked) {
|
|
654
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" is locked and cannot be edited` }, 403, ctx.req);
|
|
655
|
-
return;
|
|
656
|
-
}
|
|
657
|
-
const updatedRule = {
|
|
658
|
-
...foundRule,
|
|
659
|
-
...ruleData,
|
|
660
|
-
locked: foundRule.locked,
|
|
661
|
-
// Preserve locked status
|
|
662
|
-
id: ruleId
|
|
663
|
-
};
|
|
664
|
-
config[foundSection][ruleId] = updatedRule;
|
|
665
|
-
writeRulesConfig(rulesPath, config);
|
|
666
|
-
jsonResponse(ctx.res, { success: true, rule: updatedRule }, 200, ctx.req);
|
|
667
|
-
}
|
|
668
|
-
async function deleteRuleHandler(ctx) {
|
|
669
|
-
const ruleId = ctx.params["1"];
|
|
670
|
-
const rulesPath = projectGuardsFile(ctx.projectDir);
|
|
671
|
-
if (!fs3.existsSync(rulesPath)) {
|
|
672
|
-
jsonResponse(ctx.res, { error: "guards.yml not found" }, 404, ctx.req);
|
|
673
|
-
return;
|
|
674
|
-
}
|
|
675
|
-
const config = loadRulesSync(rulesPath);
|
|
676
|
-
if (!config) {
|
|
677
|
-
jsonResponse(ctx.res, { error: "Failed to load guards.yml" }, 500, ctx.req);
|
|
678
|
-
return;
|
|
679
|
-
}
|
|
680
|
-
const sections = ["preconditions", "postconditions", "permissions", "pipelines"];
|
|
681
|
-
let deleted = false;
|
|
682
|
-
for (const section of sections) {
|
|
683
|
-
if (config[section][ruleId]) {
|
|
684
|
-
const rule = config[section][ruleId];
|
|
685
|
-
if (rule.locked) {
|
|
686
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" is locked and cannot be deleted` }, 403, ctx.req);
|
|
687
|
-
return;
|
|
688
|
-
}
|
|
689
|
-
delete config[section][ruleId];
|
|
690
|
-
deleted = true;
|
|
691
|
-
break;
|
|
692
|
-
}
|
|
693
|
-
}
|
|
694
|
-
if (!deleted) {
|
|
695
|
-
jsonResponse(ctx.res, { error: `Rule "${ruleId}" not found` }, 404, ctx.req);
|
|
696
|
-
return;
|
|
697
|
-
}
|
|
698
|
-
writeRulesConfig(rulesPath, config);
|
|
699
|
-
jsonResponse(ctx.res, { success: true, id: ruleId }, 200, ctx.req);
|
|
700
|
-
}
|
|
701
|
-
function getRulesPath2(projectDir) {
|
|
702
|
-
const projectPath = projectGuardsFile(projectDir);
|
|
703
|
-
if (fs4.existsSync(projectPath)) return projectPath;
|
|
704
|
-
const userPath = globalGuardsFile();
|
|
705
|
-
if (fs4.existsSync(userPath)) return userPath;
|
|
706
|
-
return null;
|
|
707
|
-
}
|
|
708
|
-
async function getResponsesHandler(ctx) {
|
|
709
|
-
const rulesPath = getRulesPath2(ctx.projectDir);
|
|
710
|
-
if (!rulesPath) {
|
|
711
|
-
jsonResponse(ctx.res, { dedup_window_seconds: 5 }, 200, ctx.req);
|
|
712
|
-
return;
|
|
713
|
-
}
|
|
714
|
-
const config = loadRulesSync(rulesPath);
|
|
715
|
-
jsonResponse(ctx.res, config?.responses ?? { dedup_window_seconds: 5 }, 200, ctx.req);
|
|
716
|
-
}
|
|
717
|
-
async function updateResponsesHandler(ctx) {
|
|
718
|
-
const rulesPath = projectGuardsFile(ctx.projectDir);
|
|
719
|
-
if (!fs4.existsSync(rulesPath)) {
|
|
720
|
-
jsonResponse(ctx.res, { error: "guards.yml not found. Run init first." }, 404, ctx.req);
|
|
721
|
-
return;
|
|
722
|
-
}
|
|
723
|
-
const body = await readBody(ctx.req);
|
|
724
|
-
let responsesData;
|
|
725
|
-
try {
|
|
726
|
-
responsesData = JSON.parse(body);
|
|
727
|
-
} catch {
|
|
728
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
729
|
-
return;
|
|
730
|
-
}
|
|
731
|
-
const validation = ResponseConfigSchema.safeParse(responsesData);
|
|
732
|
-
if (!validation.success) {
|
|
733
|
-
const errors = validation.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ");
|
|
734
|
-
jsonResponse(ctx.res, { error: `Validation failed: ${errors}` }, 400, ctx.req);
|
|
735
|
-
return;
|
|
736
|
-
}
|
|
737
|
-
try {
|
|
738
|
-
const rawYaml = fs4.readFileSync(rulesPath, "utf-8");
|
|
739
|
-
const doc = parseYaml(rawYaml) ?? {};
|
|
740
|
-
doc.responses = validation.data;
|
|
741
|
-
const updatedYaml = stringifyYaml2(doc, { lineWidth: 120 });
|
|
742
|
-
fs4.writeFileSync(rulesPath, updatedYaml, "utf-8");
|
|
743
|
-
} catch (err) {
|
|
744
|
-
const message = err instanceof Error ? err.message : "Failed to update guards.yml";
|
|
745
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
746
|
-
return;
|
|
747
|
-
}
|
|
748
|
-
jsonResponse(ctx.res, { success: true, responses: validation.data }, 200, ctx.req);
|
|
749
|
-
}
|
|
750
|
-
async function listTemplatesHandler(ctx) {
|
|
751
|
-
const bundled = loadBundledTemplates();
|
|
752
|
-
const user = listUserTemplates();
|
|
753
|
-
jsonResponse(ctx.res, { bundled, user }, 200, ctx.req);
|
|
754
|
-
}
|
|
755
|
-
function parseSkillHeader(filePath, fallbackId) {
|
|
756
|
-
try {
|
|
757
|
-
const content = fs5.readFileSync(filePath, "utf-8");
|
|
758
|
-
const lines = content.split("\n");
|
|
759
|
-
let name = fallbackId;
|
|
760
|
-
let description = "";
|
|
761
|
-
for (const line of lines) {
|
|
762
|
-
const trimmed = line.trim();
|
|
763
|
-
if (!trimmed) continue;
|
|
764
|
-
if (trimmed.startsWith("# ") && name === fallbackId) {
|
|
765
|
-
name = trimmed.slice(2).trim();
|
|
766
|
-
continue;
|
|
767
|
-
}
|
|
768
|
-
if (name !== fallbackId && !trimmed.startsWith("#")) {
|
|
769
|
-
description = trimmed.slice(0, 200);
|
|
770
|
-
break;
|
|
771
|
-
}
|
|
772
|
-
}
|
|
773
|
-
return { name, description };
|
|
774
|
-
} catch {
|
|
775
|
-
return { name: fallbackId, description: "" };
|
|
776
|
-
}
|
|
777
|
-
}
|
|
778
|
-
function scanSkillsDirectory(dir, source, skills, seen) {
|
|
779
|
-
if (!fs5.existsSync(dir)) return;
|
|
780
|
-
let entries;
|
|
781
|
-
try {
|
|
782
|
-
entries = fs5.readdirSync(dir, { withFileTypes: true });
|
|
783
|
-
} catch {
|
|
784
|
-
return;
|
|
785
|
-
}
|
|
786
|
-
for (const entry of entries) {
|
|
787
|
-
if (!entry.isDirectory()) continue;
|
|
788
|
-
const skillDir = path2.join(dir, entry.name);
|
|
789
|
-
const skillMd = path2.join(skillDir, "SKILL.md");
|
|
790
|
-
if (!fs5.existsSync(skillMd)) continue;
|
|
791
|
-
const id = entry.name;
|
|
792
|
-
if (seen.has(id)) continue;
|
|
793
|
-
seen.add(id);
|
|
794
|
-
const { name, description } = parseSkillHeader(skillMd, id);
|
|
795
|
-
skills.push({
|
|
796
|
-
id,
|
|
797
|
-
name,
|
|
798
|
-
description,
|
|
799
|
-
filename: path2.relative(dir, skillMd),
|
|
800
|
-
source,
|
|
801
|
-
tags: [source]
|
|
802
|
-
});
|
|
803
|
-
}
|
|
804
|
-
}
|
|
805
|
-
function collectAllSkills(projectDir) {
|
|
806
|
-
const skills = [];
|
|
807
|
-
const seen = /* @__PURE__ */ new Set();
|
|
808
|
-
for (const s of BUNDLED_SKILLS) {
|
|
809
|
-
skills.push({
|
|
810
|
-
id: s.id,
|
|
811
|
-
name: s.name,
|
|
812
|
-
description: s.description,
|
|
813
|
-
filename: s.filename,
|
|
814
|
-
source: "bundled",
|
|
815
|
-
tags: [s.category]
|
|
816
|
-
});
|
|
817
|
-
seen.add(s.id);
|
|
818
|
-
}
|
|
819
|
-
const localSkillsDir = path2.join(projectDir, ".claude", "skills");
|
|
820
|
-
scanSkillsDirectory(localSkillsDir, "project", skills, seen);
|
|
821
|
-
const globalSkillsDir = path2.join(os.homedir(), ".claude", "skills");
|
|
822
|
-
scanSkillsDirectory(globalSkillsDir, "global", skills, seen);
|
|
823
|
-
return skills;
|
|
824
|
-
}
|
|
825
|
-
async function listSkillsHandler(ctx) {
|
|
826
|
-
const allSkills = collectAllSkills(ctx.projectDir);
|
|
827
|
-
jsonResponse(ctx.res, allSkills, 200, ctx.req);
|
|
828
|
-
}
|
|
829
|
-
function installHooksForProject(projectDir) {
|
|
830
|
-
const settingsDir = path3.join(projectDir, ".claude");
|
|
831
|
-
const settingsPath = path3.join(settingsDir, "settings.local.json");
|
|
832
|
-
let settings = {};
|
|
833
|
-
if (fs6.existsSync(settingsPath)) {
|
|
834
|
-
try {
|
|
835
|
-
settings = JSON.parse(fs6.readFileSync(settingsPath, "utf-8"));
|
|
836
|
-
} catch {
|
|
837
|
-
}
|
|
838
|
-
}
|
|
839
|
-
const binary = getBinaryPath();
|
|
840
|
-
const hookEvents = {
|
|
841
|
-
PreToolUse: "pre-tool",
|
|
842
|
-
PostToolUse: "post-tool",
|
|
843
|
-
PermissionRequest: "permission",
|
|
844
|
-
Notification: "notification",
|
|
845
|
-
Stop: "stop",
|
|
846
|
-
SessionStart: "session-start",
|
|
847
|
-
SessionEnd: "session-end"
|
|
848
|
-
};
|
|
849
|
-
settings.hooks = settings.hooks ?? {};
|
|
850
|
-
for (const [event, handler] of Object.entries(hookEvents)) {
|
|
851
|
-
const hookCommand = `${binary} ${handler}`;
|
|
852
|
-
const existing = settings.hooks[event] ?? [];
|
|
853
|
-
const alreadyInstalled = existing.some(
|
|
854
|
-
(h) => h.hooks.some((cmd) => {
|
|
855
|
-
const cmdStr = typeof cmd === "string" ? cmd : cmd.command;
|
|
856
|
-
return cmdStr.includes(CLI_BIN_NAME);
|
|
857
|
-
})
|
|
858
|
-
);
|
|
859
|
-
if (!alreadyInstalled) {
|
|
860
|
-
existing.push({
|
|
861
|
-
matcher: "",
|
|
862
|
-
hooks: [{ type: "command", command: hookCommand }]
|
|
863
|
-
});
|
|
864
|
-
settings.hooks[event] = existing;
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
fs6.mkdirSync(settingsDir, { recursive: true });
|
|
868
|
-
fs6.writeFileSync(settingsPath, JSON.stringify(settings, null, 2) + "\n", "utf-8");
|
|
869
|
-
}
|
|
870
|
-
async function listProjectsHandler(ctx) {
|
|
871
|
-
const projects = listProjects().map((p) => {
|
|
872
|
-
const rulesPath = projectGuardsFile(p.path);
|
|
873
|
-
const configStatus = fs6.existsSync(rulesPath) ? "configured" : "none";
|
|
874
|
-
return { ...p, configStatus };
|
|
875
|
-
});
|
|
876
|
-
const defaultEntry = getDefaultProject();
|
|
877
|
-
jsonResponse(ctx.res, { projects, defaultProject: defaultEntry?.id }, 200, ctx.req);
|
|
878
|
-
}
|
|
879
|
-
async function registerProjectHandler(ctx) {
|
|
880
|
-
const body = await readBody(ctx.req);
|
|
881
|
-
let projectPath;
|
|
882
|
-
try {
|
|
883
|
-
const parsed = JSON.parse(body);
|
|
884
|
-
projectPath = parsed.path;
|
|
885
|
-
} catch {
|
|
886
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
887
|
-
return;
|
|
888
|
-
}
|
|
889
|
-
if (!projectPath) {
|
|
890
|
-
jsonResponse(ctx.res, { error: "Missing required field: path" }, 400, ctx.req);
|
|
891
|
-
return;
|
|
892
|
-
}
|
|
893
|
-
const absPath = path3.resolve(projectPath);
|
|
894
|
-
if (!fs6.existsSync(absPath)) {
|
|
895
|
-
jsonResponse(ctx.res, { error: `Path does not exist: ${absPath}` }, 400, ctx.req);
|
|
896
|
-
return;
|
|
897
|
-
}
|
|
898
|
-
try {
|
|
899
|
-
const stats = fs6.statSync(absPath);
|
|
900
|
-
if (!stats.isDirectory()) {
|
|
901
|
-
jsonResponse(ctx.res, { error: `Path is not a directory: ${absPath}` }, 400, ctx.req);
|
|
902
|
-
return;
|
|
903
|
-
}
|
|
904
|
-
} catch (err) {
|
|
905
|
-
const message = err instanceof Error ? err.message : "Failed to stat path";
|
|
906
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
907
|
-
return;
|
|
908
|
-
}
|
|
909
|
-
try {
|
|
910
|
-
const project = registerProject(absPath);
|
|
911
|
-
jsonResponse(ctx.res, { success: true, project }, 201, ctx.req);
|
|
912
|
-
} catch (err) {
|
|
913
|
-
const message = err instanceof Error ? err.message : "Failed to register project";
|
|
914
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
915
|
-
}
|
|
916
|
-
}
|
|
917
|
-
async function setDefaultProjectHandler(ctx) {
|
|
918
|
-
const body = await readBody(ctx.req);
|
|
919
|
-
let projectId;
|
|
920
|
-
try {
|
|
921
|
-
const parsed = JSON.parse(body);
|
|
922
|
-
projectId = parsed.id;
|
|
923
|
-
} catch {
|
|
924
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
925
|
-
return;
|
|
926
|
-
}
|
|
927
|
-
if (!projectId) {
|
|
928
|
-
jsonResponse(ctx.res, { error: "Missing required field: id" }, 400, ctx.req);
|
|
929
|
-
return;
|
|
930
|
-
}
|
|
931
|
-
const entry = getProject(projectId);
|
|
932
|
-
if (!entry) {
|
|
933
|
-
jsonResponse(ctx.res, { error: `Project not found: ${projectId}` }, 404, ctx.req);
|
|
934
|
-
return;
|
|
935
|
-
}
|
|
936
|
-
setDefaultProject(projectId);
|
|
937
|
-
jsonResponse(ctx.res, { success: true, defaultProject: projectId }, 200, ctx.req);
|
|
938
|
-
}
|
|
939
|
-
async function scanProjectsHandler(ctx) {
|
|
940
|
-
const body = await readBody(ctx.req);
|
|
941
|
-
let rootDir;
|
|
942
|
-
let maxDepth = 3;
|
|
943
|
-
try {
|
|
944
|
-
const parsed = JSON.parse(body);
|
|
945
|
-
rootDir = parsed.rootDir;
|
|
946
|
-
if (typeof parsed.maxDepth === "number") {
|
|
947
|
-
maxDepth = parsed.maxDepth;
|
|
948
|
-
}
|
|
949
|
-
} catch {
|
|
950
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
951
|
-
return;
|
|
952
|
-
}
|
|
953
|
-
if (!rootDir) {
|
|
954
|
-
jsonResponse(ctx.res, { error: "Missing required field: rootDir" }, 400, ctx.req);
|
|
955
|
-
return;
|
|
956
|
-
}
|
|
957
|
-
const absRootDir = path3.resolve(rootDir);
|
|
958
|
-
if (!fs6.existsSync(absRootDir)) {
|
|
959
|
-
jsonResponse(ctx.res, { error: `Root directory does not exist: ${absRootDir}` }, 400, ctx.req);
|
|
960
|
-
return;
|
|
961
|
-
}
|
|
962
|
-
try {
|
|
963
|
-
const discoveredPaths = scanForProjects(absRootDir, maxDepth);
|
|
964
|
-
let registered = 0;
|
|
965
|
-
for (const pPath of discoveredPaths) {
|
|
966
|
-
const existingProjects = listProjects();
|
|
967
|
-
const alreadyRegistered = existingProjects.some((p) => p.path === pPath);
|
|
968
|
-
if (!alreadyRegistered) {
|
|
969
|
-
registerProject(pPath);
|
|
970
|
-
registered++;
|
|
971
|
-
}
|
|
972
|
-
}
|
|
973
|
-
jsonResponse(ctx.res, { success: true, projects: discoveredPaths, registered }, 200, ctx.req);
|
|
974
|
-
} catch (err) {
|
|
975
|
-
const message = err instanceof Error ? err.message : "Failed to scan for projects";
|
|
976
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
977
|
-
}
|
|
978
|
-
}
|
|
979
|
-
async function getProjectHandler(ctx) {
|
|
980
|
-
const projectId = ctx.params["1"];
|
|
981
|
-
const entry = getProject(projectId);
|
|
982
|
-
if (!entry) {
|
|
983
|
-
jsonResponse(ctx.res, { error: `Project not found: ${projectId}` }, 404, ctx.req);
|
|
984
|
-
return;
|
|
985
|
-
}
|
|
986
|
-
const rulesPath = projectGuardsFile(entry.path);
|
|
987
|
-
const configStatus = fs6.existsSync(rulesPath) ? "configured" : "none";
|
|
988
|
-
const defaultEntry = getDefaultProject();
|
|
989
|
-
jsonResponse(ctx.res, { ...entry, configStatus, isDefault: entry.id === defaultEntry?.id }, 200, ctx.req);
|
|
990
|
-
}
|
|
991
|
-
async function updateProjectHandler(ctx) {
|
|
992
|
-
const projectId = ctx.params["1"];
|
|
993
|
-
const project = getProject(projectId);
|
|
994
|
-
if (!project) {
|
|
995
|
-
jsonResponse(ctx.res, { error: `Project not found: ${projectId}` }, 404, ctx.req);
|
|
996
|
-
return;
|
|
997
|
-
}
|
|
998
|
-
const body = await readBody(ctx.req);
|
|
999
|
-
let payload;
|
|
1000
|
-
try {
|
|
1001
|
-
payload = JSON.parse(body);
|
|
1002
|
-
} catch {
|
|
1003
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1004
|
-
return;
|
|
1005
|
-
}
|
|
1006
|
-
const newName = typeof payload.name === "string" ? payload.name.trim() : void 0;
|
|
1007
|
-
if (newName !== void 0 && newName.length === 0) {
|
|
1008
|
-
jsonResponse(ctx.res, { error: "Name cannot be empty" }, 400, ctx.req);
|
|
1009
|
-
return;
|
|
1010
|
-
}
|
|
1011
|
-
const updated = registerProject(project.path, {
|
|
1012
|
-
name: newName ?? project.name,
|
|
1013
|
-
hooksInstalled: project.hooksInstalled,
|
|
1014
|
-
configStatus: project.configStatus,
|
|
1015
|
-
stack: project.stack
|
|
1016
|
-
});
|
|
1017
|
-
jsonResponse(ctx.res, { success: true, project: updated }, 200, ctx.req);
|
|
1018
|
-
}
|
|
1019
|
-
async function deleteProjectHandler(ctx) {
|
|
1020
|
-
const projectId = ctx.params["1"];
|
|
1021
|
-
const deleted = unregisterProject(projectId);
|
|
1022
|
-
if (!deleted) {
|
|
1023
|
-
jsonResponse(ctx.res, { error: `Project not found: ${projectId}` }, 404, ctx.req);
|
|
1024
|
-
return;
|
|
1025
|
-
}
|
|
1026
|
-
jsonResponse(ctx.res, { success: true, projectId }, 200, ctx.req);
|
|
1027
|
-
}
|
|
1028
|
-
async function initProjectHandler(ctx) {
|
|
1029
|
-
const projectId = ctx.params["1"];
|
|
1030
|
-
const entry = getProject(projectId);
|
|
1031
|
-
if (!entry) {
|
|
1032
|
-
jsonResponse(ctx.res, { error: `Project not found: ${projectId}` }, 404, ctx.req);
|
|
1033
|
-
return;
|
|
1034
|
-
}
|
|
1035
|
-
const targetDir = entry.path;
|
|
1036
|
-
const stack = detectStack(targetDir);
|
|
1037
|
-
const allTemplates = loadBundledTemplates();
|
|
1038
|
-
const selected = allTemplates.filter((t) => {
|
|
1039
|
-
if (t.id === "quality-of-life") return true;
|
|
1040
|
-
for (const det of [
|
|
1041
|
-
stack.runtime,
|
|
1042
|
-
stack.language,
|
|
1043
|
-
stack.packageManager,
|
|
1044
|
-
stack.framework,
|
|
1045
|
-
stack.formatter,
|
|
1046
|
-
stack.linter,
|
|
1047
|
-
stack.testRunner,
|
|
1048
|
-
stack.orm,
|
|
1049
|
-
stack.gitWorkflow
|
|
1050
|
-
]) {
|
|
1051
|
-
if (det && det.id === t.id) return true;
|
|
1052
|
-
}
|
|
1053
|
-
return false;
|
|
1054
|
-
});
|
|
1055
|
-
const composed = composeTemplates(selected);
|
|
1056
|
-
const config = {
|
|
1057
|
-
name: path3.basename(targetDir),
|
|
1058
|
-
runtime: stack.runtime?.id ?? "unknown",
|
|
1059
|
-
language: stack.language?.id,
|
|
1060
|
-
framework: stack.framework?.id,
|
|
1061
|
-
package_manager: stack.packageManager?.id ?? "npm",
|
|
1062
|
-
orm: stack.orm?.id,
|
|
1063
|
-
test_runner: stack.testRunner?.id,
|
|
1064
|
-
formatter: stack.formatter?.id,
|
|
1065
|
-
linter: stack.linter?.id,
|
|
1066
|
-
git_workflow: stack.gitWorkflow?.id
|
|
1067
|
-
};
|
|
1068
|
-
const resolved = resolveTemplate(composed, config);
|
|
1069
|
-
const rulesDir = projectConfigDir(targetDir);
|
|
1070
|
-
fs6.mkdirSync(rulesDir, { recursive: true });
|
|
1071
|
-
const rulesConfig = {
|
|
1072
|
-
project: config,
|
|
1073
|
-
preconditions: resolved.preconditions ?? {},
|
|
1074
|
-
postconditions: resolved.postconditions ?? {},
|
|
1075
|
-
permissions: resolved.permissions ?? {},
|
|
1076
|
-
pipelines: resolved.pipelines ?? {}
|
|
1077
|
-
};
|
|
1078
|
-
writeRulesConfig(path3.join(rulesDir, GUARDS_FILENAME), rulesConfig);
|
|
1079
|
-
installHooksForProject(targetDir);
|
|
1080
|
-
registerProject(targetDir);
|
|
1081
|
-
jsonResponse(ctx.res, { success: true, projectId, rulesCount: selected.length }, 200, ctx.req);
|
|
1082
|
-
}
|
|
1083
|
-
function findLatestSession(store2) {
|
|
1084
|
-
const ids = store2.list();
|
|
1085
|
-
if (ids.length === 0) return null;
|
|
1086
|
-
let latest = null;
|
|
1087
|
-
let latestTime = -Infinity;
|
|
1088
|
-
let latestActive = null;
|
|
1089
|
-
let latestActiveTime = -Infinity;
|
|
1090
|
-
for (const id of ids) {
|
|
1091
|
-
const s = store2.load(id);
|
|
1092
|
-
if (!s) continue;
|
|
1093
|
-
const t = new Date(s.startedAt).getTime();
|
|
1094
|
-
if (t > latestTime) {
|
|
1095
|
-
latestTime = t;
|
|
1096
|
-
latest = s;
|
|
1097
|
-
}
|
|
1098
|
-
const hasActivity = s.filesRead.length > 0 || s.filesWritten.length > 0 || s.commandsRun.length > 0 || s.rulesEnforced > 0;
|
|
1099
|
-
if (hasActivity && t > latestActiveTime) {
|
|
1100
|
-
latestActiveTime = t;
|
|
1101
|
-
latestActive = s;
|
|
1102
|
-
}
|
|
1103
|
-
}
|
|
1104
|
-
return latestActive ?? latest;
|
|
1105
|
-
}
|
|
1106
|
-
async function sessionsListHandler(ctx) {
|
|
1107
|
-
const store2 = new JsonSessionStore();
|
|
1108
|
-
const ids = store2.list();
|
|
1109
|
-
const active = [];
|
|
1110
|
-
const empty = [];
|
|
1111
|
-
for (const id of ids) {
|
|
1112
|
-
const s = store2.load(id);
|
|
1113
|
-
if (!s) continue;
|
|
1114
|
-
const summary = {
|
|
1115
|
-
sessionId: s.sessionId,
|
|
1116
|
-
startedAt: s.startedAt,
|
|
1117
|
-
projectDir: s.projectDir,
|
|
1118
|
-
sessionName: s.sessionName,
|
|
1119
|
-
filesRead: s.filesRead.length,
|
|
1120
|
-
filesWritten: s.filesWritten.length,
|
|
1121
|
-
commandsRun: s.commandsRun.length,
|
|
1122
|
-
rulesEnforced: s.rulesEnforced,
|
|
1123
|
-
actionsBlocked: s.actionsBlocked
|
|
1124
|
-
};
|
|
1125
|
-
const hasActivity = s.filesRead.length > 0 || s.filesWritten.length > 0 || s.commandsRun.length > 0 || s.rulesEnforced > 0;
|
|
1126
|
-
if (hasActivity) {
|
|
1127
|
-
active.push(summary);
|
|
1128
|
-
} else {
|
|
1129
|
-
empty.push(summary);
|
|
1130
|
-
}
|
|
1131
|
-
}
|
|
1132
|
-
empty.sort(
|
|
1133
|
-
(a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime()
|
|
1134
|
-
);
|
|
1135
|
-
const emptyCountByProject = /* @__PURE__ */ new Map();
|
|
1136
|
-
const filteredEmpty = [];
|
|
1137
|
-
for (const s of empty) {
|
|
1138
|
-
const count = emptyCountByProject.get(s.projectDir) ?? 0;
|
|
1139
|
-
if (count < 3) {
|
|
1140
|
-
filteredEmpty.push(s);
|
|
1141
|
-
emptyCountByProject.set(s.projectDir, count + 1);
|
|
1142
|
-
}
|
|
1143
|
-
}
|
|
1144
|
-
const summaries = [...active, ...filteredEmpty];
|
|
1145
|
-
summaries.sort(
|
|
1146
|
-
(a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime()
|
|
1147
|
-
);
|
|
1148
|
-
jsonResponse(ctx.res, summaries.slice(0, 50), 200, ctx.req);
|
|
1149
|
-
}
|
|
1150
|
-
async function sessionHandler(ctx) {
|
|
1151
|
-
const projectParam = ctx.url.searchParams.get("project");
|
|
1152
|
-
const sessionParam = ctx.url.searchParams.get("session");
|
|
1153
|
-
const store2 = projectParam ? new JsonSessionStore(void 0, ctx.projectDir) : new JsonSessionStore();
|
|
1154
|
-
if (sessionParam) {
|
|
1155
|
-
const state2 = store2.load(sessionParam);
|
|
1156
|
-
jsonResponse(ctx.res, state2, 200, ctx.req);
|
|
1157
|
-
return;
|
|
1158
|
-
}
|
|
1159
|
-
if (projectParam) {
|
|
1160
|
-
const state2 = store2.getLatestForProject(ctx.projectDir);
|
|
1161
|
-
jsonResponse(ctx.res, state2, 200, ctx.req);
|
|
1162
|
-
return;
|
|
1163
|
-
}
|
|
1164
|
-
const state = findLatestSession(store2);
|
|
1165
|
-
jsonResponse(ctx.res, state, 200, ctx.req);
|
|
1166
|
-
}
|
|
1167
|
-
async function eventsHandler(ctx) {
|
|
1168
|
-
const store2 = new JsonSessionStore();
|
|
1169
|
-
const projectParam = ctx.url.searchParams.get("project");
|
|
1170
|
-
const sessionParam = ctx.url.searchParams.get("session");
|
|
1171
|
-
if (sessionParam) {
|
|
1172
|
-
const events2 = readEvents(sessionParam, ctx.projectDir);
|
|
1173
|
-
jsonResponse(ctx.res, events2, 200, ctx.req);
|
|
1174
|
-
return;
|
|
1175
|
-
}
|
|
1176
|
-
if (projectParam) {
|
|
1177
|
-
const state = store2.getLatestForProject(ctx.projectDir);
|
|
1178
|
-
if (!state) {
|
|
1179
|
-
jsonResponse(ctx.res, [], 200, ctx.req);
|
|
1180
|
-
return;
|
|
1181
|
-
}
|
|
1182
|
-
const events2 = readEvents(state.sessionId, ctx.projectDir);
|
|
1183
|
-
jsonResponse(ctx.res, events2, 200, ctx.req);
|
|
1184
|
-
return;
|
|
1185
|
-
}
|
|
1186
|
-
const latestState = findLatestSession(store2);
|
|
1187
|
-
if (!latestState) {
|
|
1188
|
-
jsonResponse(ctx.res, [], 200, ctx.req);
|
|
1189
|
-
return;
|
|
1190
|
-
}
|
|
1191
|
-
const events = readEvents(latestState.sessionId, latestState.projectDir);
|
|
1192
|
-
jsonResponse(ctx.res, events, 200, ctx.req);
|
|
1193
|
-
}
|
|
1194
|
-
async function stackHandler(ctx) {
|
|
1195
|
-
const result = detectStack(ctx.projectDir);
|
|
1196
|
-
jsonResponse(ctx.res, result, 200, ctx.req);
|
|
1197
|
-
}
|
|
1198
|
-
async function initHandler(ctx) {
|
|
1199
|
-
const body = await readBody(ctx.req);
|
|
1200
|
-
let templateIds;
|
|
1201
|
-
try {
|
|
1202
|
-
const parsed = JSON.parse(body);
|
|
1203
|
-
if (typeof parsed === "object" && parsed !== null && Array.isArray(parsed.templateIds) && parsed.templateIds.length > 0) {
|
|
1204
|
-
templateIds = parsed.templateIds;
|
|
1205
|
-
}
|
|
1206
|
-
} catch {
|
|
1207
|
-
}
|
|
1208
|
-
const { projectDir } = ctx;
|
|
1209
|
-
const stack = detectStack(projectDir);
|
|
1210
|
-
const allTemplates = loadBundledTemplates();
|
|
1211
|
-
const selected = templateIds ? allTemplates.filter((t) => templateIds.includes(t.id)) : allTemplates.filter((t) => {
|
|
1212
|
-
if (t.id === "quality-of-life") return true;
|
|
1213
|
-
for (const det of [
|
|
1214
|
-
stack.runtime,
|
|
1215
|
-
stack.language,
|
|
1216
|
-
stack.packageManager,
|
|
1217
|
-
stack.framework,
|
|
1218
|
-
stack.formatter,
|
|
1219
|
-
stack.linter,
|
|
1220
|
-
stack.testRunner,
|
|
1221
|
-
stack.orm,
|
|
1222
|
-
stack.gitWorkflow
|
|
1223
|
-
]) {
|
|
1224
|
-
if (det && det.id === t.id) return true;
|
|
1225
|
-
}
|
|
1226
|
-
return false;
|
|
1227
|
-
});
|
|
1228
|
-
const composed = composeTemplates(selected);
|
|
1229
|
-
const config = {
|
|
1230
|
-
name: path4.basename(projectDir),
|
|
1231
|
-
runtime: stack.runtime?.id ?? "unknown",
|
|
1232
|
-
language: stack.language?.id,
|
|
1233
|
-
framework: stack.framework?.id,
|
|
1234
|
-
package_manager: stack.packageManager?.id ?? "npm",
|
|
1235
|
-
orm: stack.orm?.id,
|
|
1236
|
-
test_runner: stack.testRunner?.id,
|
|
1237
|
-
formatter: stack.formatter?.id,
|
|
1238
|
-
linter: stack.linter?.id,
|
|
1239
|
-
git_workflow: stack.gitWorkflow?.id
|
|
1240
|
-
};
|
|
1241
|
-
const resolved = resolveTemplate(composed, config);
|
|
1242
|
-
const rulesDir = projectConfigDir(projectDir);
|
|
1243
|
-
fs7.mkdirSync(rulesDir, { recursive: true });
|
|
1244
|
-
const rulesConfig = {
|
|
1245
|
-
project: config,
|
|
1246
|
-
preconditions: resolved.preconditions ?? {},
|
|
1247
|
-
postconditions: resolved.postconditions ?? {},
|
|
1248
|
-
permissions: resolved.permissions ?? {},
|
|
1249
|
-
pipelines: resolved.pipelines ?? {}
|
|
1250
|
-
};
|
|
1251
|
-
writeRulesConfig(path4.join(rulesDir, GUARDS_FILENAME), rulesConfig);
|
|
1252
|
-
jsonResponse(ctx.res, { success: true, rulesCount: selected.length }, 200, ctx.req);
|
|
1253
|
-
}
|
|
1254
|
-
var generationStatus = /* @__PURE__ */ new Map();
|
|
1255
|
-
var enrichmentStatus = /* @__PURE__ */ new Map();
|
|
1256
|
-
function parseAIScanResponse(response) {
|
|
1257
|
-
const yamlMatch = response.match(/```ya?ml\n([\s\S]*?)```/);
|
|
1258
|
-
if (yamlMatch) {
|
|
1259
|
-
return yamlMatch[1].trim();
|
|
1260
|
-
}
|
|
1261
|
-
const codeMatch = response.match(/```\n([\s\S]*?)```/);
|
|
1262
|
-
if (codeMatch) {
|
|
1263
|
-
return codeMatch[1].trim();
|
|
1264
|
-
}
|
|
1265
|
-
if (response.trim().match(/^[a-z_]+:/m)) {
|
|
1266
|
-
return response.trim();
|
|
1267
|
-
}
|
|
1268
|
-
return null;
|
|
1269
|
-
}
|
|
1270
|
-
function validateScanResult(yaml) {
|
|
1271
|
-
const warnings = [];
|
|
1272
|
-
const sections = [
|
|
1273
|
-
"preconditions",
|
|
1274
|
-
"postconditions",
|
|
1275
|
-
"permissions",
|
|
1276
|
-
"pipelines"
|
|
1277
|
-
];
|
|
1278
|
-
let foundSections = 0;
|
|
1279
|
-
for (const section of sections) {
|
|
1280
|
-
if (yaml.includes(`${section}:`)) {
|
|
1281
|
-
foundSections++;
|
|
1282
|
-
} else {
|
|
1283
|
-
warnings.push(`Missing section: ${section}`);
|
|
1284
|
-
}
|
|
1285
|
-
}
|
|
1286
|
-
return {
|
|
1287
|
-
valid: foundSections > 0,
|
|
1288
|
-
warnings
|
|
1289
|
-
};
|
|
1290
|
-
}
|
|
1291
|
-
async function generateStatusHandler(ctx) {
|
|
1292
|
-
const status = generationStatus.get(ctx.projectDir);
|
|
1293
|
-
if (!status) {
|
|
1294
|
-
jsonResponse(ctx.res, { status: "idle" }, 200, ctx.req);
|
|
1295
|
-
return;
|
|
1296
|
-
}
|
|
1297
|
-
if (status.status === "generating") {
|
|
1298
|
-
const elapsed = Date.now() - status.startedAt;
|
|
1299
|
-
const timeoutMs = 15 * 60 * 1e3;
|
|
1300
|
-
if (elapsed > timeoutMs) {
|
|
1301
|
-
if (status.childProcess && !status.childProcess.killed) {
|
|
1302
|
-
status.childProcess.kill();
|
|
1303
|
-
}
|
|
1304
|
-
const errorStatus = {
|
|
1305
|
-
status: "error",
|
|
1306
|
-
startedAt: status.startedAt,
|
|
1307
|
-
error: "Generation timed out after 15 minutes"
|
|
1308
|
-
};
|
|
1309
|
-
generationStatus.set(ctx.projectDir, errorStatus);
|
|
1310
|
-
jsonResponse(ctx.res, errorStatus, 200, ctx.req);
|
|
1311
|
-
return;
|
|
1312
|
-
}
|
|
1313
|
-
}
|
|
1314
|
-
const { childProcess: _cp, ...safeStatus } = status;
|
|
1315
|
-
jsonResponse(ctx.res, safeStatus, 200, ctx.req);
|
|
1316
|
-
}
|
|
1317
|
-
async function generateStatusClearHandler(ctx) {
|
|
1318
|
-
generationStatus.delete(ctx.projectDir);
|
|
1319
|
-
jsonResponse(ctx.res, {
|
|
1320
|
-
success: true,
|
|
1321
|
-
message: "Generation status cleared"
|
|
1322
|
-
}, 200, ctx.req);
|
|
1323
|
-
}
|
|
1324
|
-
async function generateHandler(ctx) {
|
|
1325
|
-
const { projectDir } = ctx;
|
|
1326
|
-
const currentStatus = generationStatus.get(projectDir);
|
|
1327
|
-
if (currentStatus?.status === "generating") {
|
|
1328
|
-
jsonResponse(ctx.res, { error: "Generation already in progress" }, 409, ctx.req);
|
|
1329
|
-
return;
|
|
1330
|
-
}
|
|
1331
|
-
const body = await readBody(ctx.req);
|
|
1332
|
-
let model = DEFAULT_AI_MODEL;
|
|
1333
|
-
if (body) {
|
|
1334
|
-
try {
|
|
1335
|
-
const payload = JSON.parse(body);
|
|
1336
|
-
if (typeof payload === "object" && payload !== null && typeof payload.model === "string") {
|
|
1337
|
-
model = payload.model;
|
|
1338
|
-
}
|
|
1339
|
-
} catch {
|
|
1340
|
-
}
|
|
1341
|
-
}
|
|
1342
|
-
const startedAt = Date.now();
|
|
1343
|
-
generationStatus.set(projectDir, {
|
|
1344
|
-
status: "generating",
|
|
1345
|
-
startedAt
|
|
1346
|
-
});
|
|
1347
|
-
jsonResponse(
|
|
1348
|
-
ctx.res,
|
|
1349
|
-
{ success: true, message: "Generation started" },
|
|
1350
|
-
202,
|
|
1351
|
-
ctx.req
|
|
1352
|
-
);
|
|
1353
|
-
(async () => {
|
|
1354
|
-
let claudeProcess;
|
|
1355
|
-
try {
|
|
1356
|
-
claudeProcess = spawn(
|
|
1357
|
-
"claude",
|
|
1358
|
-
[
|
|
1359
|
-
"--print",
|
|
1360
|
-
"--model",
|
|
1361
|
-
model,
|
|
1362
|
-
"--output-format",
|
|
1363
|
-
"text",
|
|
1364
|
-
"--permission-mode",
|
|
1365
|
-
"bypassPermissions",
|
|
1366
|
-
`/ulpi-generate-guardian ${projectDir}`
|
|
1367
|
-
],
|
|
1368
|
-
{
|
|
1369
|
-
cwd: projectDir,
|
|
1370
|
-
stdio: ["pipe", "pipe", "pipe"]
|
|
1371
|
-
}
|
|
1372
|
-
);
|
|
1373
|
-
const entry = generationStatus.get(projectDir);
|
|
1374
|
-
if (entry) {
|
|
1375
|
-
entry.childProcess = claudeProcess;
|
|
1376
|
-
}
|
|
1377
|
-
let stdout = "";
|
|
1378
|
-
let stderr = "";
|
|
1379
|
-
claudeProcess.stdout.on("data", (data) => {
|
|
1380
|
-
stdout += data.toString();
|
|
1381
|
-
});
|
|
1382
|
-
claudeProcess.stderr.on("data", (data) => {
|
|
1383
|
-
stderr += data.toString();
|
|
1384
|
-
});
|
|
1385
|
-
await new Promise((resolve7, reject) => {
|
|
1386
|
-
claudeProcess.on("close", (code) => {
|
|
1387
|
-
if (code !== 0) {
|
|
1388
|
-
reject(
|
|
1389
|
-
new Error(`Claude CLI exited with code ${code}: ${stderr}`)
|
|
1390
|
-
);
|
|
1391
|
-
} else {
|
|
1392
|
-
resolve7();
|
|
1393
|
-
}
|
|
1394
|
-
});
|
|
1395
|
-
claudeProcess.on("error", (err) => {
|
|
1396
|
-
reject(
|
|
1397
|
-
new Error(
|
|
1398
|
-
`Failed to execute claude command: ${err.message}. Make sure the claude CLI is installed.`
|
|
1399
|
-
)
|
|
1400
|
-
);
|
|
1401
|
-
});
|
|
1402
|
-
});
|
|
1403
|
-
const yaml = parseAIScanResponse(stdout);
|
|
1404
|
-
if (!yaml) {
|
|
1405
|
-
throw new Error("Failed to parse AI response as YAML");
|
|
1406
|
-
}
|
|
1407
|
-
const validation = validateScanResult(yaml);
|
|
1408
|
-
if (!validation.valid) {
|
|
1409
|
-
throw new Error(
|
|
1410
|
-
`AI response does not contain valid rules: ${validation.warnings.join(", ")}`
|
|
1411
|
-
);
|
|
1412
|
-
}
|
|
1413
|
-
const rulesDir = projectConfigDir(projectDir);
|
|
1414
|
-
fs8.mkdirSync(rulesDir, { recursive: true });
|
|
1415
|
-
const header = [
|
|
1416
|
-
"# ULPI \u2014 AI-Generated Rules",
|
|
1417
|
-
`# Generated by AI scan on ${(/* @__PURE__ */ new Date()).toISOString()}`,
|
|
1418
|
-
`# Model: ${model}`,
|
|
1419
|
-
"# Review these rules and adjust as needed.",
|
|
1420
|
-
""
|
|
1421
|
-
].join("\n");
|
|
1422
|
-
fs8.writeFileSync(
|
|
1423
|
-
path5.join(rulesDir, GUARDS_FILENAME),
|
|
1424
|
-
header + yaml + "\n",
|
|
1425
|
-
"utf-8"
|
|
1426
|
-
);
|
|
1427
|
-
generationStatus.set(projectDir, {
|
|
1428
|
-
status: "done",
|
|
1429
|
-
startedAt: generationStatus.get(projectDir)?.startedAt ?? Date.now(),
|
|
1430
|
-
warnings: validation.warnings
|
|
1431
|
-
});
|
|
1432
|
-
console.log(
|
|
1433
|
-
`[AI Generate] Completed successfully for ${projectDir}`
|
|
1434
|
-
);
|
|
1435
|
-
} catch (err) {
|
|
1436
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
1437
|
-
if (claudeProcess && !claudeProcess.killed) {
|
|
1438
|
-
claudeProcess.kill();
|
|
1439
|
-
}
|
|
1440
|
-
generationStatus.set(projectDir, {
|
|
1441
|
-
status: "error",
|
|
1442
|
-
startedAt: generationStatus.get(projectDir)?.startedAt ?? Date.now(),
|
|
1443
|
-
error: message
|
|
1444
|
-
});
|
|
1445
|
-
console.error(
|
|
1446
|
-
`[AI Generate] Failed for ${projectDir}:`,
|
|
1447
|
-
message
|
|
1448
|
-
);
|
|
1449
|
-
}
|
|
1450
|
-
})();
|
|
1451
|
-
}
|
|
1452
|
-
async function getReviewConfig(ctx) {
|
|
1453
|
-
const settings = loadUlpiSettings();
|
|
1454
|
-
jsonResponse(ctx.res, settings.review, 200, ctx.req);
|
|
1455
|
-
}
|
|
1456
|
-
async function putReviewConfig(ctx) {
|
|
1457
|
-
const body = await readBody(ctx.req);
|
|
1458
|
-
let reviewData;
|
|
1459
|
-
try {
|
|
1460
|
-
reviewData = JSON.parse(body);
|
|
1461
|
-
} catch {
|
|
1462
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1463
|
-
return;
|
|
1464
|
-
}
|
|
1465
|
-
const validation = ReviewConfigSchema.safeParse(reviewData);
|
|
1466
|
-
if (!validation.success) {
|
|
1467
|
-
const errors = validation.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ");
|
|
1468
|
-
jsonResponse(ctx.res, { error: `Validation failed: ${errors}` }, 400, ctx.req);
|
|
1469
|
-
return;
|
|
1470
|
-
}
|
|
1471
|
-
try {
|
|
1472
|
-
const settings = loadUlpiSettings();
|
|
1473
|
-
settings.review = validation.data;
|
|
1474
|
-
saveUlpiSettings(settings);
|
|
1475
|
-
} catch (err) {
|
|
1476
|
-
const message = err instanceof Error ? err.message : "Failed to update settings";
|
|
1477
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
1478
|
-
return;
|
|
1479
|
-
}
|
|
1480
|
-
jsonResponse(ctx.res, { success: true, review: validation.data }, 200, ctx.req);
|
|
1481
|
-
}
|
|
1482
|
-
async function listPlans(ctx) {
|
|
1483
|
-
const summaries = await listPlansWithMeta(ctx.projectDir);
|
|
1484
|
-
jsonResponse(ctx.res, summaries, 200, ctx.req);
|
|
1485
|
-
}
|
|
1486
|
-
async function listPendingPlansAll(ctx) {
|
|
1487
|
-
const summaries = await listPlansWithMeta();
|
|
1488
|
-
const pending = summaries.filter((s) => s.status === "pending");
|
|
1489
|
-
jsonResponse(ctx.res, pending, 200, ctx.req);
|
|
1490
|
-
}
|
|
1491
|
-
async function getPlan(ctx) {
|
|
1492
|
-
const slug = ctx.params["1"];
|
|
1493
|
-
const plan = await loadPlan(slug, ctx.projectDir);
|
|
1494
|
-
if (!plan) {
|
|
1495
|
-
jsonResponse(ctx.res, { error: `Plan not found: ${slug}` }, 404, ctx.req);
|
|
1496
|
-
return;
|
|
1497
|
-
}
|
|
1498
|
-
jsonResponse(ctx.res, plan, 200, ctx.req);
|
|
1499
|
-
}
|
|
1500
|
-
async function planDecisionGone(ctx) {
|
|
1501
|
-
jsonResponse(
|
|
1502
|
-
ctx.res,
|
|
1503
|
-
{ error: "Gone. Use POST /api/review/hub/session/:id/decision instead." },
|
|
1504
|
-
410,
|
|
1505
|
-
ctx.req
|
|
1506
|
-
);
|
|
1507
|
-
}
|
|
1508
|
-
async function savePlanVersion(ctx) {
|
|
1509
|
-
const slug = ctx.params["1"];
|
|
1510
|
-
const plan = await loadPlan(slug, ctx.projectDir);
|
|
1511
|
-
if (!plan) {
|
|
1512
|
-
jsonResponse(ctx.res, { error: `Plan not found: ${slug}` }, 404, ctx.req);
|
|
1513
|
-
return;
|
|
1514
|
-
}
|
|
1515
|
-
const body = await readBody(ctx.req);
|
|
1516
|
-
let payload;
|
|
1517
|
-
try {
|
|
1518
|
-
payload = JSON.parse(body);
|
|
1519
|
-
} catch {
|
|
1520
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1521
|
-
return;
|
|
1522
|
-
}
|
|
1523
|
-
const updatedVersion = {
|
|
1524
|
-
...plan.version,
|
|
1525
|
-
annotations: payload.annotations || plan.version.annotations || [],
|
|
1526
|
-
inlineEdits: payload.inlineEdits || plan.version.inlineEdits || [],
|
|
1527
|
-
instructions: payload.instructions || plan.version.instructions || [],
|
|
1528
|
-
priorities: payload.priorities || plan.version.priorities || [],
|
|
1529
|
-
risks: payload.risks || plan.version.risks || []
|
|
1530
|
-
};
|
|
1531
|
-
await savePlan(plan.plan, updatedVersion, slug, ctx.projectDir);
|
|
1532
|
-
jsonResponse(ctx.res, { success: true }, 200, ctx.req);
|
|
1533
|
-
}
|
|
1534
|
-
var SSEBroadcaster = class _SSEBroadcaster {
|
|
1535
|
-
static MAX_CLIENTS = 100;
|
|
1536
|
-
clients = /* @__PURE__ */ new Set();
|
|
1537
|
-
/** Check if the broadcaster can accept another client. */
|
|
1538
|
-
canAccept() {
|
|
1539
|
-
return this.clients.size < _SSEBroadcaster.MAX_CLIENTS;
|
|
1540
|
-
}
|
|
1541
|
-
/** Register a new SSE client. */
|
|
1542
|
-
add(res) {
|
|
1543
|
-
this.clients.add(res);
|
|
1544
|
-
}
|
|
1545
|
-
/** Remove a disconnected SSE client. */
|
|
1546
|
-
remove(res) {
|
|
1547
|
-
this.clients.delete(res);
|
|
1548
|
-
}
|
|
1549
|
-
/** Broadcast a named SSE event to every connected client. */
|
|
1550
|
-
broadcast(eventName, data) {
|
|
1551
|
-
const message = `event: ${eventName}
|
|
1552
|
-
data: ${JSON.stringify(data)}
|
|
1553
|
-
|
|
1554
|
-
`;
|
|
1555
|
-
for (const client of this.clients) {
|
|
1556
|
-
try {
|
|
1557
|
-
client.write(message);
|
|
1558
|
-
} catch {
|
|
1559
|
-
this.clients.delete(client);
|
|
1560
|
-
}
|
|
1561
|
-
}
|
|
1562
|
-
}
|
|
1563
|
-
/** Number of currently connected clients. */
|
|
1564
|
-
get size() {
|
|
1565
|
-
return this.clients.size;
|
|
1566
|
-
}
|
|
1567
|
-
};
|
|
1568
|
-
var reviewHub = new ReviewHub();
|
|
1569
|
-
var reviewSSE = new SSEBroadcaster();
|
|
1570
|
-
function broadcastReviewSSE(eventName, data) {
|
|
1571
|
-
reviewSSE.broadcast(eventName, data);
|
|
1572
|
-
}
|
|
1573
|
-
var _uiServerPort = 9800;
|
|
1574
|
-
function setUiServerPort(port) {
|
|
1575
|
-
_uiServerPort = port;
|
|
1576
|
-
}
|
|
1577
|
-
function getUiServerPort() {
|
|
1578
|
-
return _uiServerPort;
|
|
1579
|
-
}
|
|
1580
|
-
async function hubRegister(ctx) {
|
|
1581
|
-
const body = await readBody(ctx.req);
|
|
1582
|
-
let payload;
|
|
1583
|
-
try {
|
|
1584
|
-
payload = JSON.parse(body);
|
|
1585
|
-
} catch {
|
|
1586
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1587
|
-
return;
|
|
1588
|
-
}
|
|
1589
|
-
try {
|
|
1590
|
-
const result = await reviewHub.register({
|
|
1591
|
-
type: payload.type ?? "plan",
|
|
1592
|
-
projectPath: payload.projectPath ?? ctx.projectDir,
|
|
1593
|
-
plan: payload.plan ?? payload.markdown,
|
|
1594
|
-
diff: payload.diff,
|
|
1595
|
-
commitMessage: payload.commitMessage
|
|
1596
|
-
});
|
|
1597
|
-
const pubSession = reviewHub.getSession(result.sessionId);
|
|
1598
|
-
broadcastReviewSSE("session-added", pubSession ?? { id: result.sessionId });
|
|
1599
|
-
const reviewSettings = loadUlpiSettings().review;
|
|
1600
|
-
if (reviewSSE.size === 0 && reviewSettings.auto_open_browser) {
|
|
1601
|
-
try {
|
|
1602
|
-
const { exec } = await import("child_process");
|
|
1603
|
-
const reviewPath = result.session.type === "code" ? "review/code" : "review/plan";
|
|
1604
|
-
const port = getUiServerPort();
|
|
1605
|
-
exec(
|
|
1606
|
-
`open "http://localhost:${port}/${reviewPath}?session=${result.sessionId}&token=${result.token}"`
|
|
1607
|
-
);
|
|
1608
|
-
} catch {
|
|
1609
|
-
}
|
|
1610
|
-
}
|
|
1611
|
-
jsonResponse(
|
|
1612
|
-
ctx.res,
|
|
1613
|
-
{ success: true, sessionId: result.sessionId, token: result.token },
|
|
1614
|
-
201,
|
|
1615
|
-
ctx.req
|
|
1616
|
-
);
|
|
1617
|
-
} catch (err) {
|
|
1618
|
-
const message = err instanceof Error ? err.message : "Failed to register session";
|
|
1619
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
1620
|
-
}
|
|
1621
|
-
}
|
|
1622
|
-
async function hubListSessions(ctx) {
|
|
1623
|
-
const typeFilter = ctx.url.searchParams.get("type");
|
|
1624
|
-
const statusFilter = ctx.url.searchParams.get("status") ?? void 0;
|
|
1625
|
-
const sessions = reviewHub.listSessions({
|
|
1626
|
-
type: typeFilter ?? void 0,
|
|
1627
|
-
status: statusFilter
|
|
1628
|
-
});
|
|
1629
|
-
jsonResponse(ctx.res, sessions, 200, ctx.req);
|
|
1630
|
-
}
|
|
1631
|
-
async function hubEvents(ctx) {
|
|
1632
|
-
if (!reviewSSE.canAccept()) {
|
|
1633
|
-
jsonResponse(ctx.res, { error: "Too many SSE clients" }, 503, ctx.req);
|
|
1634
|
-
return;
|
|
1635
|
-
}
|
|
1636
|
-
sseHeaders(ctx.res, ctx.req);
|
|
1637
|
-
ctx.res.write('data: {"type":"connected"}\n\n');
|
|
1638
|
-
reviewSSE.add(ctx.res);
|
|
1639
|
-
ctx.req.on("close", () => {
|
|
1640
|
-
reviewSSE.remove(ctx.res);
|
|
1641
|
-
});
|
|
1642
|
-
}
|
|
1643
|
-
async function hubGetSession(ctx) {
|
|
1644
|
-
const id = ctx.params["1"];
|
|
1645
|
-
const session = reviewHub.getSession(id);
|
|
1646
|
-
if (!session) {
|
|
1647
|
-
jsonResponse(ctx.res, { error: "Session not found" }, 404, ctx.req);
|
|
1648
|
-
return;
|
|
1649
|
-
}
|
|
1650
|
-
jsonResponse(ctx.res, session, 200, ctx.req);
|
|
1651
|
-
}
|
|
1652
|
-
async function hubSubmitDecision(ctx) {
|
|
1653
|
-
const id = ctx.params["1"];
|
|
1654
|
-
const body = await readBody(ctx.req);
|
|
1655
|
-
let payload;
|
|
1656
|
-
try {
|
|
1657
|
-
payload = JSON.parse(body);
|
|
1658
|
-
} catch {
|
|
1659
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1660
|
-
return;
|
|
1661
|
-
}
|
|
1662
|
-
const token = payload.token ?? ctx.url.searchParams.get("token") ?? ctx.req.headers["x-review-token"];
|
|
1663
|
-
let decision;
|
|
1664
|
-
if (payload.decision && typeof payload.decision === "object") {
|
|
1665
|
-
decision = payload.decision;
|
|
1666
|
-
} else if ("behavior" in payload || "approved" in payload) {
|
|
1667
|
-
decision = payload;
|
|
1668
|
-
} else {
|
|
1669
|
-
jsonResponse(ctx.res, { error: "Missing decision payload" }, 400, ctx.req);
|
|
1670
|
-
return;
|
|
1671
|
-
}
|
|
1672
|
-
if (!token || !reviewHub.verifyToken(id, token)) {
|
|
1673
|
-
jsonResponse(ctx.res, { error: "Missing or invalid session token" }, 403, ctx.req);
|
|
1674
|
-
return;
|
|
1675
|
-
}
|
|
1676
|
-
const submitted = await reviewHub.submitDecision(id, decision, token);
|
|
1677
|
-
if (!submitted) {
|
|
1678
|
-
jsonResponse(
|
|
1679
|
-
ctx.res,
|
|
1680
|
-
{ error: "Session not found or already decided" },
|
|
1681
|
-
404,
|
|
1682
|
-
ctx.req
|
|
1683
|
-
);
|
|
1684
|
-
return;
|
|
1685
|
-
}
|
|
1686
|
-
const decidedSession = reviewHub.getSession(id);
|
|
1687
|
-
broadcastReviewSSE(
|
|
1688
|
-
"session-decided",
|
|
1689
|
-
decidedSession ?? { id, decision }
|
|
1690
|
-
);
|
|
1691
|
-
jsonResponse(ctx.res, { success: true }, 200, ctx.req);
|
|
1692
|
-
}
|
|
1693
|
-
async function hubAwaitDecision(ctx) {
|
|
1694
|
-
const id = ctx.params["1"];
|
|
1695
|
-
const token = ctx.url.searchParams.get("token") ?? ctx.req.headers["x-review-token"];
|
|
1696
|
-
if (!token || !reviewHub.verifyToken(id, token)) {
|
|
1697
|
-
jsonResponse(ctx.res, { error: "Missing or invalid session token" }, 403, ctx.req);
|
|
1698
|
-
return;
|
|
1699
|
-
}
|
|
1700
|
-
const session = reviewHub.getSession(id);
|
|
1701
|
-
if (!session) {
|
|
1702
|
-
jsonResponse(ctx.res, { error: "Session not found" }, 404, ctx.req);
|
|
1703
|
-
return;
|
|
1704
|
-
}
|
|
1705
|
-
if (session.status === "decided") {
|
|
1706
|
-
const rawDecision = reviewHub.getDecisionPayload(id);
|
|
1707
|
-
if (rawDecision) {
|
|
1708
|
-
jsonResponse(ctx.res, { decision: rawDecision }, 200, ctx.req);
|
|
1709
|
-
return;
|
|
1710
|
-
}
|
|
1711
|
-
}
|
|
1712
|
-
const decision = await reviewHub.awaitDecision(id, 3e4);
|
|
1713
|
-
if (decision) {
|
|
1714
|
-
jsonResponse(ctx.res, { decision }, 200, ctx.req);
|
|
1715
|
-
} else {
|
|
1716
|
-
ctx.res.writeHead(204);
|
|
1717
|
-
ctx.res.end();
|
|
1718
|
-
}
|
|
1719
|
-
}
|
|
1720
|
-
async function hubSaveVersion(ctx) {
|
|
1721
|
-
const id = ctx.params["1"];
|
|
1722
|
-
const body = await readBody(ctx.req);
|
|
1723
|
-
let version;
|
|
1724
|
-
try {
|
|
1725
|
-
version = JSON.parse(body);
|
|
1726
|
-
} catch {
|
|
1727
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1728
|
-
return;
|
|
1729
|
-
}
|
|
1730
|
-
const token = version.token;
|
|
1731
|
-
if (!token || !reviewHub.verifyToken(id, token)) {
|
|
1732
|
-
jsonResponse(ctx.res, { error: "Missing or invalid session token" }, 403, ctx.req);
|
|
1733
|
-
return;
|
|
1734
|
-
}
|
|
1735
|
-
const saved = await reviewHub.saveVersion(id, version);
|
|
1736
|
-
if (!saved) {
|
|
1737
|
-
jsonResponse(ctx.res, { error: "Session not found" }, 404, ctx.req);
|
|
1738
|
-
return;
|
|
1739
|
-
}
|
|
1740
|
-
broadcastReviewSSE("session-updated", { id });
|
|
1741
|
-
jsonResponse(ctx.res, { success: true }, 200, ctx.req);
|
|
1742
|
-
}
|
|
1743
|
-
async function hubGetToken(ctx) {
|
|
1744
|
-
const id = ctx.params["1"];
|
|
1745
|
-
const token = reviewHub.getSessionToken(id);
|
|
1746
|
-
if (!token) {
|
|
1747
|
-
jsonResponse(ctx.res, { error: "Session not found" }, 404, ctx.req);
|
|
1748
|
-
return;
|
|
1749
|
-
}
|
|
1750
|
-
jsonResponse(ctx.res, { token }, 200, ctx.req);
|
|
1751
|
-
}
|
|
1752
|
-
async function hubHealth(ctx) {
|
|
1753
|
-
const sessions = reviewHub.listSessions({});
|
|
1754
|
-
const pending = sessions.filter(
|
|
1755
|
-
(s) => s.status === "pending" || s.status === "active"
|
|
1756
|
-
).length;
|
|
1757
|
-
const decided = sessions.filter((s) => s.status === "decided").length;
|
|
1758
|
-
const orphaned = sessions.filter((s) => s.status === "orphaned").length;
|
|
1759
|
-
jsonResponse(ctx.res, {
|
|
1760
|
-
status: "ok",
|
|
1761
|
-
sessions: sessions.length,
|
|
1762
|
-
pending,
|
|
1763
|
-
decided,
|
|
1764
|
-
orphaned,
|
|
1765
|
-
sseClients: reviewSSE.size,
|
|
1766
|
-
uptimeMs: process.uptime() * 1e3
|
|
1767
|
-
}, 200, ctx.req);
|
|
1768
|
-
}
|
|
1769
|
-
var cachedClaudePath;
|
|
1770
|
-
function resolveClaudePath() {
|
|
1771
|
-
try {
|
|
1772
|
-
const result = execFileSync("which", ["claude"], {
|
|
1773
|
-
stdio: "pipe",
|
|
1774
|
-
timeout: 5e3
|
|
1775
|
-
}).toString().trim();
|
|
1776
|
-
return result || null;
|
|
1777
|
-
} catch {
|
|
1778
|
-
return null;
|
|
1779
|
-
}
|
|
1780
|
-
}
|
|
1781
|
-
function getClaudePath() {
|
|
1782
|
-
if (cachedClaudePath === void 0) {
|
|
1783
|
-
cachedClaudePath = resolveClaudePath();
|
|
1784
|
-
}
|
|
1785
|
-
return cachedClaudePath;
|
|
1786
|
-
}
|
|
1787
|
-
function isClaudeCliAvailable() {
|
|
1788
|
-
const p = getClaudePath();
|
|
1789
|
-
if (!p) return false;
|
|
1790
|
-
try {
|
|
1791
|
-
execFileSync(p, ["--version"], { stdio: "pipe", timeout: 5e3 });
|
|
1792
|
-
return true;
|
|
1793
|
-
} catch {
|
|
1794
|
-
return false;
|
|
1795
|
-
}
|
|
1796
|
-
}
|
|
1797
|
-
async function getAiAvailable(ctx) {
|
|
1798
|
-
const available = isClaudeCliAvailable();
|
|
1799
|
-
jsonResponse(ctx.res, { available }, 200, ctx.req);
|
|
1800
|
-
}
|
|
1801
|
-
async function postQualityScore(ctx) {
|
|
1802
|
-
const body = await readBody(ctx.req);
|
|
1803
|
-
let payload;
|
|
1804
|
-
try {
|
|
1805
|
-
payload = JSON.parse(body);
|
|
1806
|
-
} catch {
|
|
1807
|
-
jsonResponse(ctx.res, { error: "Invalid JSON body" }, 400, ctx.req);
|
|
1808
|
-
return;
|
|
1809
|
-
}
|
|
1810
|
-
const markdown = payload.markdown;
|
|
1811
|
-
if (!markdown) {
|
|
1812
|
-
jsonResponse(ctx.res, { error: "markdown field required" }, 400, ctx.req);
|
|
1813
|
-
return;
|
|
1814
|
-
}
|
|
1815
|
-
if (payload.ai === true) {
|
|
1816
|
-
const claudePath = getClaudePath();
|
|
1817
|
-
if (!claudePath) {
|
|
1818
|
-
jsonResponse(
|
|
1819
|
-
ctx.res,
|
|
1820
|
-
{
|
|
1821
|
-
error: "Claude CLI not found. Install it to enable AI scoring.",
|
|
1822
|
-
available: false
|
|
1823
|
-
},
|
|
1824
|
-
400,
|
|
1825
|
-
ctx.req
|
|
1826
|
-
);
|
|
1827
|
-
return;
|
|
1828
|
-
}
|
|
1829
|
-
const model = payload.model || DEFAULT_AI_MODEL;
|
|
1830
|
-
try {
|
|
1831
|
-
const prompt = buildAiScoringPrompt(markdown);
|
|
1832
|
-
const aiScoringSchema = JSON.stringify({
|
|
1833
|
-
type: "object",
|
|
1834
|
-
properties: {
|
|
1835
|
-
overall: {
|
|
1836
|
-
type: "number",
|
|
1837
|
-
description: "Weighted average score 0-100"
|
|
1838
|
-
},
|
|
1839
|
-
breakdown: {
|
|
1840
|
-
type: "array",
|
|
1841
|
-
items: {
|
|
1842
|
-
type: "object",
|
|
1843
|
-
properties: {
|
|
1844
|
-
name: { type: "string" },
|
|
1845
|
-
score: { type: "number" },
|
|
1846
|
-
found: { type: "boolean" },
|
|
1847
|
-
details: { type: "string" }
|
|
1848
|
-
},
|
|
1849
|
-
required: ["name", "score", "found", "details"]
|
|
1850
|
-
}
|
|
1851
|
-
},
|
|
1852
|
-
suggestions: { type: "array", items: { type: "string" } }
|
|
1853
|
-
},
|
|
1854
|
-
required: ["overall", "breakdown", "suggestions"]
|
|
1855
|
-
});
|
|
1856
|
-
const score2 = await new Promise(
|
|
1857
|
-
(resolve7, reject) => {
|
|
1858
|
-
const timeout = 2 * 60 * 1e3;
|
|
1859
|
-
const proc = spawn2(claudePath, [
|
|
1860
|
-
"--print",
|
|
1861
|
-
"--model",
|
|
1862
|
-
model,
|
|
1863
|
-
"--output-format",
|
|
1864
|
-
"json",
|
|
1865
|
-
"--json-schema",
|
|
1866
|
-
aiScoringSchema,
|
|
1867
|
-
"--permission-mode",
|
|
1868
|
-
"bypassPermissions"
|
|
1869
|
-
], {
|
|
1870
|
-
stdio: ["pipe", "pipe", "pipe"]
|
|
1871
|
-
});
|
|
1872
|
-
let stdout = "";
|
|
1873
|
-
let stderr = "";
|
|
1874
|
-
proc.stdout.on("data", (data) => {
|
|
1875
|
-
stdout += data.toString();
|
|
1876
|
-
});
|
|
1877
|
-
proc.stderr.on("data", (data) => {
|
|
1878
|
-
stderr += data.toString();
|
|
1879
|
-
});
|
|
1880
|
-
proc.stdin.write(prompt);
|
|
1881
|
-
proc.stdin.end();
|
|
1882
|
-
const timer = setTimeout(() => {
|
|
1883
|
-
proc.kill("SIGTERM");
|
|
1884
|
-
reject(
|
|
1885
|
-
new Error("AI scoring timed out after 2 minutes")
|
|
1886
|
-
);
|
|
1887
|
-
}, timeout);
|
|
1888
|
-
proc.on("close", (code) => {
|
|
1889
|
-
clearTimeout(timer);
|
|
1890
|
-
if (code !== 0) {
|
|
1891
|
-
reject(
|
|
1892
|
-
new Error(
|
|
1893
|
-
`Claude CLI exited with code ${code}: ${stderr.slice(0, 500)}`
|
|
1894
|
-
)
|
|
1895
|
-
);
|
|
1896
|
-
return;
|
|
1897
|
-
}
|
|
1898
|
-
try {
|
|
1899
|
-
const envelope = JSON.parse(stdout.trim());
|
|
1900
|
-
if (envelope.structured_output && typeof envelope.structured_output === "object") {
|
|
1901
|
-
resolve7(envelope.structured_output);
|
|
1902
|
-
return;
|
|
1903
|
-
}
|
|
1904
|
-
if (typeof envelope.result === "string" && envelope.result.trim().startsWith("{")) {
|
|
1905
|
-
resolve7(JSON.parse(envelope.result));
|
|
1906
|
-
return;
|
|
1907
|
-
}
|
|
1908
|
-
if (typeof envelope.overall === "number") {
|
|
1909
|
-
resolve7(envelope);
|
|
1910
|
-
return;
|
|
1911
|
-
}
|
|
1912
|
-
const jsonMatch = stdout.match(/\{[\s\S]*\}/);
|
|
1913
|
-
if (jsonMatch) {
|
|
1914
|
-
resolve7(JSON.parse(jsonMatch[0]));
|
|
1915
|
-
return;
|
|
1916
|
-
}
|
|
1917
|
-
reject(
|
|
1918
|
-
new Error("Failed to parse AI scoring response")
|
|
1919
|
-
);
|
|
1920
|
-
} catch (parseErr) {
|
|
1921
|
-
reject(
|
|
1922
|
-
new Error(
|
|
1923
|
-
`Failed to parse AI response: ${parseErr instanceof Error ? parseErr.message : String(parseErr)}`
|
|
1924
|
-
)
|
|
1925
|
-
);
|
|
1926
|
-
}
|
|
1927
|
-
});
|
|
1928
|
-
proc.on("error", (err) => {
|
|
1929
|
-
clearTimeout(timer);
|
|
1930
|
-
reject(
|
|
1931
|
-
new Error(`Failed to run Claude CLI: ${err.message}`)
|
|
1932
|
-
);
|
|
1933
|
-
});
|
|
1934
|
-
}
|
|
1935
|
-
);
|
|
1936
|
-
score2.scoredAt = Date.now();
|
|
1937
|
-
score2.isAiScore = true;
|
|
1938
|
-
score2.aiModel = model;
|
|
1939
|
-
jsonResponse(ctx.res, score2, 200, ctx.req);
|
|
1940
|
-
} catch (err) {
|
|
1941
|
-
jsonResponse(
|
|
1942
|
-
ctx.res,
|
|
1943
|
-
{
|
|
1944
|
-
error: `AI scoring failed: ${err instanceof Error ? err.message : String(err)}`
|
|
1945
|
-
},
|
|
1946
|
-
500,
|
|
1947
|
-
ctx.req
|
|
1948
|
-
);
|
|
1949
|
-
}
|
|
1950
|
-
return;
|
|
1951
|
-
}
|
|
1952
|
-
const blocks = parseMarkdownToBlocks(markdown);
|
|
1953
|
-
const sections = extractSections(blocks);
|
|
1954
|
-
const score = scorePlanQuality(blocks, sections);
|
|
1955
|
-
jsonResponse(ctx.res, score, 200, ctx.req);
|
|
1956
|
-
}
|
|
1957
|
-
function isSafePathSegment(value) {
|
|
1958
|
-
if (!value || value.length === 0 || value.length > 255) return false;
|
|
1959
|
-
return !/[/\\]|\.\./.test(value);
|
|
1960
|
-
}
|
|
1961
|
-
async function uploadImage(ctx) {
|
|
1962
|
-
const sessionId = ctx.url.searchParams.get("session");
|
|
1963
|
-
if (!sessionId) {
|
|
1964
|
-
jsonResponse(ctx.res, { error: "session query param required" }, 400, ctx.req);
|
|
1965
|
-
return;
|
|
1966
|
-
}
|
|
1967
|
-
if (!isSafePathSegment(sessionId)) {
|
|
1968
|
-
jsonResponse(ctx.res, { error: "Invalid session ID" }, 400, ctx.req);
|
|
1969
|
-
return;
|
|
1970
|
-
}
|
|
1971
|
-
const contentType = ctx.req.headers["content-type"] ?? "";
|
|
1972
|
-
const boundaryMatch = contentType.match(/boundary=(.+)/);
|
|
1973
|
-
if (!boundaryMatch) {
|
|
1974
|
-
jsonResponse(
|
|
1975
|
-
ctx.res,
|
|
1976
|
-
{ error: "multipart/form-data with boundary required" },
|
|
1977
|
-
400,
|
|
1978
|
-
ctx.req
|
|
1979
|
-
);
|
|
1980
|
-
return;
|
|
1981
|
-
}
|
|
1982
|
-
const rawBody = await readBodyRaw(ctx.req);
|
|
1983
|
-
if (rawBody.length > 5 * 1024 * 1024) {
|
|
1984
|
-
jsonResponse(ctx.res, { error: "Image too large (max 5MB)" }, 413, ctx.req);
|
|
1985
|
-
return;
|
|
1986
|
-
}
|
|
1987
|
-
const parsed = parseMultipartImage(rawBody, boundaryMatch[1]);
|
|
1988
|
-
if (!parsed) {
|
|
1989
|
-
jsonResponse(ctx.res, { error: "Failed to parse multipart image" }, 400, ctx.req);
|
|
1990
|
-
return;
|
|
1991
|
-
}
|
|
1992
|
-
const extMap = {
|
|
1993
|
-
"image/png": "png",
|
|
1994
|
-
"image/jpeg": "jpg",
|
|
1995
|
-
"image/gif": "gif",
|
|
1996
|
-
"image/webp": "webp"
|
|
1997
|
-
};
|
|
1998
|
-
const ext = extMap[parsed.contentType] ?? "png";
|
|
1999
|
-
const filename = `${crypto.randomUUID()}.${ext}`;
|
|
2000
|
-
const dir = path6.join(REVIEW_IMAGES_DIR, sessionId);
|
|
2001
|
-
fs9.mkdirSync(dir, { recursive: true });
|
|
2002
|
-
fs9.writeFileSync(path6.join(dir, filename), parsed.data);
|
|
2003
|
-
jsonResponse(ctx.res, { filename }, 200, ctx.req);
|
|
2004
|
-
}
|
|
2005
|
-
async function getImage(ctx) {
|
|
2006
|
-
const sessionId = ctx.url.searchParams.get("session");
|
|
2007
|
-
if (!sessionId) {
|
|
2008
|
-
jsonResponse(ctx.res, { error: "session query param required" }, 400, ctx.req);
|
|
2009
|
-
return;
|
|
2010
|
-
}
|
|
2011
|
-
const filename = ctx.params["1"];
|
|
2012
|
-
if (!isSafePathSegment(sessionId) || !isSafePathSegment(filename)) {
|
|
2013
|
-
jsonResponse(ctx.res, { error: "Invalid path" }, 400, ctx.req);
|
|
2014
|
-
return;
|
|
2015
|
-
}
|
|
2016
|
-
const filePath = path6.join(REVIEW_IMAGES_DIR, sessionId, filename);
|
|
2017
|
-
const resolved = path6.resolve(filePath);
|
|
2018
|
-
if (!resolved.startsWith(path6.resolve(REVIEW_IMAGES_DIR) + path6.sep)) {
|
|
2019
|
-
jsonResponse(ctx.res, { error: "Invalid path" }, 400, ctx.req);
|
|
2020
|
-
return;
|
|
2021
|
-
}
|
|
2022
|
-
if (!fs9.existsSync(filePath)) {
|
|
2023
|
-
jsonResponse(ctx.res, { error: "Image not found" }, 404, ctx.req);
|
|
2024
|
-
return;
|
|
2025
|
-
}
|
|
2026
|
-
const mimeMap = {
|
|
2027
|
-
".png": "image/png",
|
|
2028
|
-
".jpg": "image/jpeg",
|
|
2029
|
-
".jpeg": "image/jpeg",
|
|
2030
|
-
".gif": "image/gif",
|
|
2031
|
-
".webp": "image/webp"
|
|
2032
|
-
};
|
|
2033
|
-
const ext = path6.extname(filename).toLowerCase();
|
|
2034
|
-
const mime = mimeMap[ext] ?? "application/octet-stream";
|
|
2035
|
-
const buffer = fs9.readFileSync(filePath);
|
|
2036
|
-
ctx.res.writeHead(200, {
|
|
2037
|
-
"Content-Type": mime,
|
|
2038
|
-
"Content-Length": buffer.length,
|
|
2039
|
-
"Cache-Control": "public, max-age=86400"
|
|
2040
|
-
});
|
|
2041
|
-
ctx.res.end(buffer);
|
|
2042
|
-
}
|
|
2043
|
-
function isAllowedWebhookUrl(raw) {
|
|
2044
|
-
let parsed;
|
|
2045
|
-
try {
|
|
2046
|
-
parsed = new URL(raw);
|
|
2047
|
-
} catch {
|
|
2048
|
-
return false;
|
|
2049
|
-
}
|
|
2050
|
-
if (parsed.protocol !== "https:") return false;
|
|
2051
|
-
const hostname = parsed.hostname;
|
|
2052
|
-
if (hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1" || hostname === "[::1]" || hostname === "0.0.0.0") {
|
|
2053
|
-
return false;
|
|
2054
|
-
}
|
|
2055
|
-
if (/^10\./.test(hostname)) return false;
|
|
2056
|
-
if (/^172\.(1[6-9]|2\d|3[01])\./.test(hostname)) return false;
|
|
2057
|
-
if (/^192\.168\./.test(hostname)) return false;
|
|
2058
|
-
if (/^169\.254\./.test(hostname)) return false;
|
|
2059
|
-
if (/^0\./.test(hostname)) return false;
|
|
2060
|
-
if (hostname === "metadata.google.internal") return false;
|
|
2061
|
-
if (hostname === "169.254.169.254") return false;
|
|
2062
|
-
return true;
|
|
2063
|
-
}
|
|
2064
|
-
function isPrivateIPv4(ip) {
|
|
2065
|
-
if (ip === "127.0.0.1" || ip === "0.0.0.0") return true;
|
|
2066
|
-
if (/^10\./.test(ip)) return true;
|
|
2067
|
-
if (/^172\.(1[6-9]|2\d|3[01])\./.test(ip)) return true;
|
|
2068
|
-
if (/^192\.168\./.test(ip)) return true;
|
|
2069
|
-
if (/^169\.254\./.test(ip)) return true;
|
|
2070
|
-
if (/^0\./.test(ip)) return true;
|
|
2071
|
-
return false;
|
|
2072
|
-
}
|
|
2073
|
-
function isPrivateIP(ip) {
|
|
2074
|
-
if (ip === "::1" || ip === "::") return true;
|
|
2075
|
-
if (/^f[cd][0-9a-f]{2}:/i.test(ip)) return true;
|
|
2076
|
-
if (/^fe[89ab][0-9a-f]:/i.test(ip)) return true;
|
|
2077
|
-
if (/^::ffff:/i.test(ip)) {
|
|
2078
|
-
const v4 = ip.slice(7);
|
|
2079
|
-
return isPrivateIPv4(v4);
|
|
2080
|
-
}
|
|
2081
|
-
return isPrivateIPv4(ip);
|
|
2082
|
-
}
|
|
2083
|
-
async function resolveAndValidateHost(hostname) {
|
|
2084
|
-
try {
|
|
2085
|
-
const [v4addrs, v6addrs] = await Promise.all([
|
|
2086
|
-
resolve42(hostname).catch(() => []),
|
|
2087
|
-
resolve6(hostname).catch(() => [])
|
|
2088
|
-
]);
|
|
2089
|
-
const allAddrs = [...v4addrs, ...v6addrs];
|
|
2090
|
-
if (allAddrs.length === 0) return false;
|
|
2091
|
-
return allAddrs.every((addr) => !isPrivateIP(addr));
|
|
2092
|
-
} catch {
|
|
2093
|
-
return false;
|
|
2094
|
-
}
|
|
2095
|
-
}
|
|
2096
|
-
async function exportGithub(ctx) {
|
|
2097
|
-
const body = await readBody(ctx.req);
|
|
2098
|
-
let payload;
|
|
2099
|
-
try {
|
|
2100
|
-
payload = JSON.parse(body);
|
|
2101
|
-
} catch {
|
|
2102
|
-
jsonResponse(ctx.res, { error: "Invalid JSON" }, 400, ctx.req);
|
|
2103
|
-
return;
|
|
2104
|
-
}
|
|
2105
|
-
const title = payload.title || "Plan Review";
|
|
2106
|
-
const markdown = payload.markdown || "";
|
|
2107
|
-
const annotations = payload.annotations || [];
|
|
2108
|
-
const priorities = payload.priorities || [];
|
|
2109
|
-
const risks = payload.risks || [];
|
|
2110
|
-
let md = `# ${title}
|
|
2111
|
-
|
|
2112
|
-
## Review Summary
|
|
2113
|
-
|
|
2114
|
-
`;
|
|
2115
|
-
md += `- Annotations: ${annotations.length}
|
|
2116
|
-
`;
|
|
2117
|
-
md += `- Priorities: ${priorities.length}
|
|
2118
|
-
`;
|
|
2119
|
-
md += `- Risks: ${risks.length}
|
|
2120
|
-
|
|
2121
|
-
`;
|
|
2122
|
-
if (annotations.length > 0) {
|
|
2123
|
-
md += `### Comments
|
|
2124
|
-
|
|
2125
|
-
`;
|
|
2126
|
-
for (const a of annotations)
|
|
2127
|
-
md += `- [ ] **${a.type}**: ${a.text}
|
|
2128
|
-
`;
|
|
2129
|
-
md += "\n";
|
|
2130
|
-
}
|
|
2131
|
-
if (priorities.length > 0) {
|
|
2132
|
-
md += `### Priorities
|
|
2133
|
-
|
|
2134
|
-
`;
|
|
2135
|
-
for (const p of priorities)
|
|
2136
|
-
md += `- **${p.priority}**: ${p.sectionId}${p.note ? ` -- ${p.note}` : ""}
|
|
2137
|
-
`;
|
|
2138
|
-
md += "\n";
|
|
2139
|
-
}
|
|
2140
|
-
if (risks.length > 0) {
|
|
2141
|
-
md += `### Risks
|
|
2142
|
-
|
|
2143
|
-
`;
|
|
2144
|
-
for (const r of risks)
|
|
2145
|
-
md += `- **${r.level}**: ${r.sectionId}${r.description ? ` -- ${r.description}` : ""}
|
|
2146
|
-
`;
|
|
2147
|
-
md += "\n";
|
|
2148
|
-
}
|
|
2149
|
-
md += `<details><summary>Full Plan</summary>
|
|
2150
|
-
|
|
2151
|
-
${markdown}
|
|
2152
|
-
|
|
2153
|
-
</details>`;
|
|
2154
|
-
jsonResponse(ctx.res, { format: "github", content: md }, 200, ctx.req);
|
|
2155
|
-
}
|
|
2156
|
-
async function exportWebhook(ctx) {
|
|
2157
|
-
const body = await readBody(ctx.req);
|
|
2158
|
-
let payload;
|
|
2159
|
-
try {
|
|
2160
|
-
payload = JSON.parse(body);
|
|
2161
|
-
} catch {
|
|
2162
|
-
jsonResponse(ctx.res, { error: "Invalid JSON" }, 400, ctx.req);
|
|
2163
|
-
return;
|
|
2164
|
-
}
|
|
2165
|
-
const webhookUrl = payload.webhookUrl;
|
|
2166
|
-
if (!webhookUrl) {
|
|
2167
|
-
jsonResponse(ctx.res, { error: "webhookUrl required" }, 400, ctx.req);
|
|
2168
|
-
return;
|
|
2169
|
-
}
|
|
2170
|
-
if (!isAllowedWebhookUrl(webhookUrl)) {
|
|
2171
|
-
jsonResponse(
|
|
2172
|
-
ctx.res,
|
|
2173
|
-
{ error: "Webhook URL must be HTTPS and must not target private/internal networks" },
|
|
2174
|
-
400,
|
|
2175
|
-
ctx.req
|
|
2176
|
-
);
|
|
2177
|
-
return;
|
|
2178
|
-
}
|
|
2179
|
-
const parsedUrl = new URL(webhookUrl);
|
|
2180
|
-
const hostSafe = await resolveAndValidateHost(parsedUrl.hostname);
|
|
2181
|
-
if (!hostSafe) {
|
|
2182
|
-
jsonResponse(
|
|
2183
|
-
ctx.res,
|
|
2184
|
-
{ error: "Webhook URL resolves to a private/internal IP address" },
|
|
2185
|
-
403,
|
|
2186
|
-
ctx.req
|
|
2187
|
-
);
|
|
2188
|
-
return;
|
|
2189
|
-
}
|
|
2190
|
-
try {
|
|
2191
|
-
const webhookBody = JSON.stringify({
|
|
2192
|
-
title: payload.title,
|
|
2193
|
-
markdown: payload.markdown,
|
|
2194
|
-
annotations: payload.annotations,
|
|
2195
|
-
priorities: payload.priorities,
|
|
2196
|
-
risks: payload.risks,
|
|
2197
|
-
exportedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
2198
|
-
});
|
|
2199
|
-
const resp = await fetch(webhookUrl, {
|
|
2200
|
-
method: "POST",
|
|
2201
|
-
headers: { "Content-Type": "application/json" },
|
|
2202
|
-
body: webhookBody
|
|
2203
|
-
});
|
|
2204
|
-
const statusCode = resp.ok ? 200 : 502;
|
|
2205
|
-
jsonResponse(ctx.res, { success: resp.ok, status: resp.status }, statusCode, ctx.req);
|
|
2206
|
-
} catch (err) {
|
|
2207
|
-
jsonResponse(
|
|
2208
|
-
ctx.res,
|
|
2209
|
-
{
|
|
2210
|
-
error: `Webhook failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2211
|
-
},
|
|
2212
|
-
502,
|
|
2213
|
-
ctx.req
|
|
2214
|
-
);
|
|
2215
|
-
}
|
|
2216
|
-
}
|
|
2217
|
-
async function exportObsidian(ctx) {
|
|
2218
|
-
const body = await readBody(ctx.req);
|
|
2219
|
-
let payload;
|
|
2220
|
-
try {
|
|
2221
|
-
payload = JSON.parse(body);
|
|
2222
|
-
} catch {
|
|
2223
|
-
jsonResponse(ctx.res, { error: "Invalid JSON" }, 400, ctx.req);
|
|
2224
|
-
return;
|
|
2225
|
-
}
|
|
2226
|
-
const title = payload.title || "Plan Review";
|
|
2227
|
-
const markdown = payload.markdown || "";
|
|
2228
|
-
const annotations = payload.annotations || [];
|
|
2229
|
-
const priorities = payload.priorities || [];
|
|
2230
|
-
const risks = payload.risks || [];
|
|
2231
|
-
const tags = ["ulpi", "plan-review"];
|
|
2232
|
-
if (annotations.length > 0) tags.push("has-annotations");
|
|
2233
|
-
if (risks.some((r) => r.level === "high")) tags.push("high-risk");
|
|
2234
|
-
let md = `---
|
|
2235
|
-
`;
|
|
2236
|
-
md += `title: "${title.replace(/"/g, '\\"')}"
|
|
2237
|
-
`;
|
|
2238
|
-
md += `date: ${(/* @__PURE__ */ new Date()).toISOString().slice(0, 10)}
|
|
2239
|
-
`;
|
|
2240
|
-
md += `tags: [${tags.map((t) => `"${t}"`).join(", ")}]
|
|
2241
|
-
`;
|
|
2242
|
-
md += `annotations: ${annotations.length}
|
|
2243
|
-
`;
|
|
2244
|
-
md += `priorities: ${priorities.length}
|
|
2245
|
-
`;
|
|
2246
|
-
md += `risks: ${risks.length}
|
|
2247
|
-
`;
|
|
2248
|
-
md += `---
|
|
2249
|
-
|
|
2250
|
-
`;
|
|
2251
|
-
md += `# ${title}
|
|
2252
|
-
|
|
2253
|
-
`;
|
|
2254
|
-
if (annotations.length > 0) {
|
|
2255
|
-
md += `## Review Notes
|
|
2256
|
-
|
|
2257
|
-
`;
|
|
2258
|
-
for (const a of annotations) md += `- **${a.type}**: ${a.text}
|
|
2259
|
-
`;
|
|
2260
|
-
md += "\n";
|
|
2261
|
-
}
|
|
2262
|
-
md += markdown;
|
|
2263
|
-
jsonResponse(ctx.res, { format: "obsidian", content: md }, 200, ctx.req);
|
|
2264
|
-
}
|
|
2265
|
-
async function getEngine() {
|
|
2266
|
-
return await import("./dist-CB5D5LMO.js");
|
|
2267
|
-
}
|
|
2268
|
-
var runningPipelines = /* @__PURE__ */ new Map();
|
|
2269
|
-
var activeWatchers = /* @__PURE__ */ new Map();
|
|
2270
|
-
var pipelineProgress = /* @__PURE__ */ new Map();
|
|
2271
|
-
var pipelineErrors = /* @__PURE__ */ new Map();
|
|
2272
|
-
async function codemapStatusHandler(ctx) {
|
|
2273
|
-
try {
|
|
2274
|
-
const engine = await getEngine();
|
|
2275
|
-
const { getCurrentBranch: getCurrentBranch2 } = await import("./dist-RKOGLK7R.js");
|
|
2276
|
-
const branch = getCurrentBranch2(ctx.projectDir);
|
|
2277
|
-
const status = engine.getCodemapStatus(ctx.projectDir, branch);
|
|
2278
|
-
const isIndexing = runningPipelines.has(ctx.projectDir);
|
|
2279
|
-
const watcher = activeWatchers.get(ctx.projectDir);
|
|
2280
|
-
const isWatching = watcher?.isRunning ?? false;
|
|
2281
|
-
const progress = isIndexing ? pipelineProgress.get(ctx.projectDir) : void 0;
|
|
2282
|
-
const lastError = pipelineErrors.get(ctx.projectDir);
|
|
2283
|
-
const effectiveMode = isIndexing ? "indexing" : isWatching ? "watching" : status.mode;
|
|
2284
|
-
const response = {
|
|
2285
|
-
...status,
|
|
2286
|
-
mode: effectiveMode,
|
|
2287
|
-
initialized: isIndexing ? true : status.initialized,
|
|
2288
|
-
pollIntervalMs: isIndexing ? 2e3 : isWatching ? 5e3 : 15e3,
|
|
2289
|
-
progress: progress ?? void 0,
|
|
2290
|
-
totalFiles: isIndexing && progress ? progress.totalFiles : status.totalFiles,
|
|
2291
|
-
totalChunks: isIndexing && progress ? progress.totalChunks : status.totalChunks,
|
|
2292
|
-
staleFiles: isIndexing && progress ? progress.totalChunks - progress.current : status.staleFiles,
|
|
2293
|
-
indexSizeBytes: isIndexing ? 0 : status.indexSizeBytes,
|
|
2294
|
-
lastError: lastError ?? void 0,
|
|
2295
|
-
queueDepth: isWatching && watcher ? watcher.queueDepth : status.queueDepth,
|
|
2296
|
-
watcherStats: isWatching && watcher ? {
|
|
2297
|
-
recentEvents: watcher.recentEvents,
|
|
2298
|
-
totalProcessed: watcher.totalEventsProcessed,
|
|
2299
|
-
lastFlushAt: watcher.lastFlushAt
|
|
2300
|
-
} : void 0
|
|
2301
|
-
};
|
|
2302
|
-
jsonResponse(ctx.res, response, 200, ctx.req);
|
|
2303
|
-
} catch (err) {
|
|
2304
|
-
const message = err instanceof Error ? err.message : "Failed to get codemap status";
|
|
2305
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2306
|
-
}
|
|
2307
|
-
}
|
|
2308
|
-
async function codemapSearchHandler(ctx) {
|
|
2309
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2310
|
-
if (error || !data) {
|
|
2311
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2312
|
-
return;
|
|
2313
|
-
}
|
|
2314
|
-
const query = data.query;
|
|
2315
|
-
if (typeof query !== "string" || !query.trim()) {
|
|
2316
|
-
jsonResponse(ctx.res, { error: "Missing or empty 'query' field" }, 400, ctx.req);
|
|
2317
|
-
return;
|
|
2318
|
-
}
|
|
2319
|
-
if (query.length > 1e3) {
|
|
2320
|
-
jsonResponse(ctx.res, { error: "Query too long (max 1000 characters)" }, 400, ctx.req);
|
|
2321
|
-
return;
|
|
2322
|
-
}
|
|
2323
|
-
try {
|
|
2324
|
-
const engine = await getEngine();
|
|
2325
|
-
const { getCurrentBranch: getCurrentBranch2 } = await import("./dist-RKOGLK7R.js");
|
|
2326
|
-
const branch = getCurrentBranch2(ctx.projectDir);
|
|
2327
|
-
const status = engine.getCodemapStatus(ctx.projectDir, branch);
|
|
2328
|
-
if (!status.initialized) {
|
|
2329
|
-
jsonResponse(ctx.res, {
|
|
2330
|
-
query: query.trim(),
|
|
2331
|
-
durationMs: 0,
|
|
2332
|
-
source: "preview",
|
|
2333
|
-
results: []
|
|
2334
|
-
}, 200, ctx.req);
|
|
2335
|
-
return;
|
|
2336
|
-
}
|
|
2337
|
-
const result = await engine.searchCode(ctx.projectDir, query.trim(), {
|
|
2338
|
-
limit: typeof data.limit === "number" ? Math.min(Math.max(1, data.limit), 50) : 10,
|
|
2339
|
-
threshold: typeof data.threshold === "number" ? data.threshold : void 0,
|
|
2340
|
-
pathPrefix: typeof data.pathPrefix === "string" ? data.pathPrefix : void 0,
|
|
2341
|
-
includeTests: typeof data.includeTests === "boolean" ? data.includeTests : void 0,
|
|
2342
|
-
includeDocs: typeof data.includeDocs === "boolean" ? data.includeDocs : void 0,
|
|
2343
|
-
branch
|
|
2344
|
-
});
|
|
2345
|
-
const response = {
|
|
2346
|
-
query: result.query,
|
|
2347
|
-
durationMs: result.durationMs,
|
|
2348
|
-
source: "live",
|
|
2349
|
-
results: result.results.map((r) => ({
|
|
2350
|
-
filePath: r.filePath,
|
|
2351
|
-
startLine: r.startLine,
|
|
2352
|
-
endLine: r.endLine,
|
|
2353
|
-
score: r.score,
|
|
2354
|
-
snippet: r.snippet
|
|
2355
|
-
}))
|
|
2356
|
-
};
|
|
2357
|
-
jsonResponse(ctx.res, response, 200, ctx.req);
|
|
2358
|
-
} catch (err) {
|
|
2359
|
-
const message = err instanceof Error ? err.message : "Search failed";
|
|
2360
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2361
|
-
}
|
|
2362
|
-
}
|
|
2363
|
-
async function codemapConfigHandler(ctx) {
|
|
2364
|
-
try {
|
|
2365
|
-
const engine = await getEngine();
|
|
2366
|
-
const config = engine.loadCodemapConfig(ctx.projectDir);
|
|
2367
|
-
jsonResponse(ctx.res, config, 200, ctx.req);
|
|
2368
|
-
} catch (err) {
|
|
2369
|
-
const message = err instanceof Error ? err.message : "Failed to load codemap config";
|
|
2370
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2371
|
-
}
|
|
2372
|
-
}
|
|
2373
|
-
async function codemapConfigUpdateHandler(ctx) {
|
|
2374
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2375
|
-
if (error || !data) {
|
|
2376
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2377
|
-
return;
|
|
2378
|
-
}
|
|
2379
|
-
try {
|
|
2380
|
-
const engine = await getEngine();
|
|
2381
|
-
const existing = engine.loadCodemapConfig(ctx.projectDir);
|
|
2382
|
-
const merged = { ...existing };
|
|
2383
|
-
if (typeof data.embedding === "object" && data.embedding !== null) {
|
|
2384
|
-
merged.embedding = { ...merged.embedding, ...data.embedding };
|
|
2385
|
-
}
|
|
2386
|
-
engine.saveCodemapConfig(ctx.projectDir, merged);
|
|
2387
|
-
jsonResponse(ctx.res, merged, 200, ctx.req);
|
|
2388
|
-
} catch (err) {
|
|
2389
|
-
const message = err instanceof Error ? err.message : "Failed to update codemap config";
|
|
2390
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2391
|
-
}
|
|
2392
|
-
}
|
|
2393
|
-
async function codemapActionHandler(ctx) {
|
|
2394
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2395
|
-
if (error || !data) {
|
|
2396
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2397
|
-
return;
|
|
2398
|
-
}
|
|
2399
|
-
const action = data.action;
|
|
2400
|
-
const validActions = ["init", "watch", "stop-watch", "reindex", "export", "import", "serve"];
|
|
2401
|
-
if (!validActions.includes(action)) {
|
|
2402
|
-
jsonResponse(
|
|
2403
|
-
ctx.res,
|
|
2404
|
-
{ error: `Invalid action: ${action}. Valid: ${validActions.join(", ")}` },
|
|
2405
|
-
400,
|
|
2406
|
-
ctx.req
|
|
2407
|
-
);
|
|
2408
|
-
return;
|
|
2409
|
-
}
|
|
2410
|
-
try {
|
|
2411
|
-
const { getCurrentBranch: getCurrentBranch2 } = await import("./dist-RKOGLK7R.js");
|
|
2412
|
-
const branch = getCurrentBranch2(ctx.projectDir);
|
|
2413
|
-
switch (action) {
|
|
2414
|
-
case "init":
|
|
2415
|
-
case "reindex": {
|
|
2416
|
-
if (runningPipelines.has(ctx.projectDir)) {
|
|
2417
|
-
jsonResponse(
|
|
2418
|
-
ctx.res,
|
|
2419
|
-
{ ok: false, action, message: "Indexing already in progress" },
|
|
2420
|
-
409,
|
|
2421
|
-
ctx.req
|
|
2422
|
-
);
|
|
2423
|
-
return;
|
|
2424
|
-
}
|
|
2425
|
-
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
2426
|
-
const projectDir = ctx.projectDir;
|
|
2427
|
-
pipelineErrors.delete(projectDir);
|
|
2428
|
-
const pipeline = (async () => {
|
|
2429
|
-
const engine = await getEngine();
|
|
2430
|
-
await engine.runInitPipeline(projectDir, (p) => {
|
|
2431
|
-
const prev = pipelineProgress.get(projectDir);
|
|
2432
|
-
let totalFiles = prev?.totalFiles ?? 0;
|
|
2433
|
-
let totalChunks = prev?.totalChunks ?? 0;
|
|
2434
|
-
if (p.phase === "scanning" && p.current > 0) {
|
|
2435
|
-
totalFiles = p.current;
|
|
2436
|
-
}
|
|
2437
|
-
if (p.phase === "embedding" || p.phase === "storing") {
|
|
2438
|
-
totalChunks = p.total;
|
|
2439
|
-
}
|
|
2440
|
-
pipelineProgress.set(projectDir, {
|
|
2441
|
-
phase: p.phase,
|
|
2442
|
-
current: p.current,
|
|
2443
|
-
total: p.total,
|
|
2444
|
-
message: p.message,
|
|
2445
|
-
totalFiles,
|
|
2446
|
-
totalChunks
|
|
2447
|
-
});
|
|
2448
|
-
}, branch);
|
|
2449
|
-
try {
|
|
2450
|
-
const { installMcpServer } = await import("@ulpi/cli/mcp-installer");
|
|
2451
|
-
installMcpServer(projectDir);
|
|
2452
|
-
} catch {
|
|
2453
|
-
}
|
|
2454
|
-
})().catch((err) => {
|
|
2455
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
2456
|
-
console.error(`[codemap] Pipeline error: ${msg}`);
|
|
2457
|
-
pipelineErrors.set(projectDir, msg);
|
|
2458
|
-
}).finally(() => {
|
|
2459
|
-
runningPipelines.delete(projectDir);
|
|
2460
|
-
pipelineProgress.delete(projectDir);
|
|
2461
|
-
});
|
|
2462
|
-
runningPipelines.set(projectDir, pipeline);
|
|
2463
|
-
jsonResponse(
|
|
2464
|
-
ctx.res,
|
|
2465
|
-
{
|
|
2466
|
-
ok: true,
|
|
2467
|
-
action,
|
|
2468
|
-
message: action === "init" ? "Indexing started" : "Re-indexing started",
|
|
2469
|
-
startedAt
|
|
2470
|
-
},
|
|
2471
|
-
202,
|
|
2472
|
-
ctx.req
|
|
2473
|
-
);
|
|
2474
|
-
return;
|
|
2475
|
-
}
|
|
2476
|
-
case "watch": {
|
|
2477
|
-
if (runningPipelines.has(ctx.projectDir)) {
|
|
2478
|
-
jsonResponse(ctx.res, { ok: false, action, message: "Indexing in progress, cannot start watcher" }, 409, ctx.req);
|
|
2479
|
-
return;
|
|
2480
|
-
}
|
|
2481
|
-
const engine = await getEngine();
|
|
2482
|
-
const watchStatus = engine.getCodemapStatus(ctx.projectDir, branch);
|
|
2483
|
-
if (!watchStatus.initialized) {
|
|
2484
|
-
const projectDir = ctx.projectDir;
|
|
2485
|
-
pipelineErrors.delete(projectDir);
|
|
2486
|
-
const pipeline = (async () => {
|
|
2487
|
-
await engine.runInitPipeline(projectDir, (p) => {
|
|
2488
|
-
const prev = pipelineProgress.get(projectDir);
|
|
2489
|
-
let totalFiles = prev?.totalFiles ?? 0;
|
|
2490
|
-
let totalChunks = prev?.totalChunks ?? 0;
|
|
2491
|
-
if (p.phase === "scanning" && p.current > 0) totalFiles = p.current;
|
|
2492
|
-
if (p.phase === "embedding" || p.phase === "storing") totalChunks = p.total;
|
|
2493
|
-
pipelineProgress.set(projectDir, {
|
|
2494
|
-
phase: p.phase,
|
|
2495
|
-
current: p.current,
|
|
2496
|
-
total: p.total,
|
|
2497
|
-
message: p.message,
|
|
2498
|
-
totalFiles,
|
|
2499
|
-
totalChunks
|
|
2500
|
-
});
|
|
2501
|
-
}, branch);
|
|
2502
|
-
const watcher = new engine.CodemapWatcher({ projectDir, branch });
|
|
2503
|
-
activeWatchers.set(projectDir, watcher);
|
|
2504
|
-
await watcher.start();
|
|
2505
|
-
})().catch((err) => {
|
|
2506
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
2507
|
-
console.error(`[codemap] Watch+init error: ${msg}`);
|
|
2508
|
-
pipelineErrors.set(projectDir, msg);
|
|
2509
|
-
activeWatchers.delete(projectDir);
|
|
2510
|
-
}).finally(() => {
|
|
2511
|
-
runningPipelines.delete(projectDir);
|
|
2512
|
-
pipelineProgress.delete(projectDir);
|
|
2513
|
-
});
|
|
2514
|
-
runningPipelines.set(projectDir, pipeline);
|
|
2515
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Initializing index, then starting watcher\u2026" }, 202, ctx.req);
|
|
2516
|
-
return;
|
|
2517
|
-
}
|
|
2518
|
-
const existingWatcher = activeWatchers.get(ctx.projectDir);
|
|
2519
|
-
if (!existingWatcher || !existingWatcher.isRunning) {
|
|
2520
|
-
const watcher = new engine.CodemapWatcher({ projectDir: ctx.projectDir, branch });
|
|
2521
|
-
activeWatchers.set(ctx.projectDir, watcher);
|
|
2522
|
-
watcher.start().catch((err) => {
|
|
2523
|
-
console.error(`[codemap] Watcher error: ${err instanceof Error ? err.message : String(err)}`);
|
|
2524
|
-
activeWatchers.delete(ctx.projectDir);
|
|
2525
|
-
});
|
|
2526
|
-
}
|
|
2527
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Watcher started" }, 200, ctx.req);
|
|
2528
|
-
return;
|
|
2529
|
-
}
|
|
2530
|
-
case "stop-watch": {
|
|
2531
|
-
const existingWatcher = activeWatchers.get(ctx.projectDir);
|
|
2532
|
-
if (existingWatcher && existingWatcher.isRunning) {
|
|
2533
|
-
await existingWatcher.stop();
|
|
2534
|
-
activeWatchers.delete(ctx.projectDir);
|
|
2535
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Watcher stopped" }, 200, ctx.req);
|
|
2536
|
-
} else {
|
|
2537
|
-
activeWatchers.delete(ctx.projectDir);
|
|
2538
|
-
jsonResponse(ctx.res, { ok: true, action, message: "No watcher was running" }, 200, ctx.req);
|
|
2539
|
-
}
|
|
2540
|
-
return;
|
|
2541
|
-
}
|
|
2542
|
-
case "export": {
|
|
2543
|
-
const engine = await getEngine();
|
|
2544
|
-
const result = await engine.exportIndex(ctx.projectDir, branch);
|
|
2545
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2546
|
-
return;
|
|
2547
|
-
}
|
|
2548
|
-
case "import": {
|
|
2549
|
-
const engine = await getEngine();
|
|
2550
|
-
const result = await engine.importIndex(ctx.projectDir, branch);
|
|
2551
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2552
|
-
return;
|
|
2553
|
-
}
|
|
2554
|
-
case "serve": {
|
|
2555
|
-
jsonResponse(
|
|
2556
|
-
ctx.res,
|
|
2557
|
-
{ ok: false, action, message: "MCP server is CLI-only (stdio transport). Use 'ulpi codemap serve' from the command line." },
|
|
2558
|
-
400,
|
|
2559
|
-
ctx.req
|
|
2560
|
-
);
|
|
2561
|
-
return;
|
|
2562
|
-
}
|
|
2563
|
-
default:
|
|
2564
|
-
jsonResponse(ctx.res, { error: `Unknown action: ${action}` }, 400, ctx.req);
|
|
2565
|
-
}
|
|
2566
|
-
} catch (err) {
|
|
2567
|
-
const message = err instanceof Error ? err.message : "Action failed";
|
|
2568
|
-
jsonResponse(ctx.res, { ok: false, action, error: message }, 500, ctx.req);
|
|
2569
|
-
}
|
|
2570
|
-
}
|
|
2571
|
-
async function getEngine2() {
|
|
2572
|
-
return await import("./dist-GJYT2OQV.js");
|
|
2573
|
-
}
|
|
2574
|
-
async function memoryStatusHandler(ctx) {
|
|
2575
|
-
try {
|
|
2576
|
-
const engine = await getEngine2();
|
|
2577
|
-
const stats = await engine.getMemoryStats(ctx.projectDir);
|
|
2578
|
-
const config = engine.loadMemoryConfig(ctx.projectDir);
|
|
2579
|
-
const initialized = engine.isMemoryInitialized(ctx.projectDir);
|
|
2580
|
-
const classifyProgress = engine.readClassifyBatchProgress(ctx.projectDir);
|
|
2581
|
-
jsonResponse(ctx.res, { stats, config, initialized, classifyProgress }, 200, ctx.req);
|
|
2582
|
-
} catch (err) {
|
|
2583
|
-
const message = err instanceof Error ? err.message : "Failed to get memory status";
|
|
2584
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2585
|
-
}
|
|
2586
|
-
}
|
|
2587
|
-
async function memorySearchHandler(ctx) {
|
|
2588
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2589
|
-
if (error || !data) {
|
|
2590
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2591
|
-
return;
|
|
2592
|
-
}
|
|
2593
|
-
const query = data.query;
|
|
2594
|
-
if (typeof query !== "string" || !query.trim()) {
|
|
2595
|
-
jsonResponse(ctx.res, { error: "Missing or empty 'query' field" }, 400, ctx.req);
|
|
2596
|
-
return;
|
|
2597
|
-
}
|
|
2598
|
-
if (query.length > 1e3) {
|
|
2599
|
-
jsonResponse(ctx.res, { error: "Query too long (max 1000 characters)" }, 400, ctx.req);
|
|
2600
|
-
return;
|
|
2601
|
-
}
|
|
2602
|
-
try {
|
|
2603
|
-
const engine = await getEngine2();
|
|
2604
|
-
const initialized = engine.isMemoryInitialized(ctx.projectDir);
|
|
2605
|
-
if (!initialized) {
|
|
2606
|
-
jsonResponse(ctx.res, {
|
|
2607
|
-
query: query.trim(),
|
|
2608
|
-
durationMs: 0,
|
|
2609
|
-
results: []
|
|
2610
|
-
}, 200, ctx.req);
|
|
2611
|
-
return;
|
|
2612
|
-
}
|
|
2613
|
-
const result = await engine.searchMemory(ctx.projectDir, {
|
|
2614
|
-
query: query.trim(),
|
|
2615
|
-
limit: typeof data.limit === "number" ? Math.min(Math.max(1, data.limit), 50) : 10,
|
|
2616
|
-
types: Array.isArray(data.types) ? data.types : void 0,
|
|
2617
|
-
importance: Array.isArray(data.importance) ? data.importance : void 0,
|
|
2618
|
-
tags: Array.isArray(data.tags) ? data.tags : void 0,
|
|
2619
|
-
since: typeof data.since === "string" ? data.since : void 0
|
|
2620
|
-
});
|
|
2621
|
-
jsonResponse(ctx.res, {
|
|
2622
|
-
query: result.query,
|
|
2623
|
-
durationMs: result.durationMs,
|
|
2624
|
-
results: result.results.map((r) => ({
|
|
2625
|
-
entry: r.entry,
|
|
2626
|
-
score: r.score,
|
|
2627
|
-
finalScore: r.finalScore
|
|
2628
|
-
}))
|
|
2629
|
-
}, 200, ctx.req);
|
|
2630
|
-
} catch (err) {
|
|
2631
|
-
const message = err instanceof Error ? err.message : "Search failed";
|
|
2632
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2633
|
-
}
|
|
2634
|
-
}
|
|
2635
|
-
async function memoryRememberHandler(ctx) {
|
|
2636
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2637
|
-
if (error || !data) {
|
|
2638
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2639
|
-
return;
|
|
2640
|
-
}
|
|
2641
|
-
const summary = data.summary;
|
|
2642
|
-
if (typeof summary !== "string" || !summary.trim()) {
|
|
2643
|
-
jsonResponse(ctx.res, { error: "Missing or empty 'summary' field" }, 400, ctx.req);
|
|
2644
|
-
return;
|
|
2645
|
-
}
|
|
2646
|
-
const type = data.type;
|
|
2647
|
-
if (typeof type !== "string") {
|
|
2648
|
-
jsonResponse(ctx.res, { error: "Missing 'type' field" }, 400, ctx.req);
|
|
2649
|
-
return;
|
|
2650
|
-
}
|
|
2651
|
-
const validTypes = [
|
|
2652
|
-
"DECISION",
|
|
2653
|
-
"PATTERN",
|
|
2654
|
-
"BUG_ROOT_CAUSE",
|
|
2655
|
-
"PREFERENCE",
|
|
2656
|
-
"CONSTRAINT",
|
|
2657
|
-
"CONTEXT",
|
|
2658
|
-
"LESSON",
|
|
2659
|
-
"RELATIONSHIP"
|
|
2660
|
-
];
|
|
2661
|
-
if (!validTypes.includes(type)) {
|
|
2662
|
-
jsonResponse(ctx.res, { error: `Invalid type: ${type}. Valid: ${validTypes.join(", ")}` }, 400, ctx.req);
|
|
2663
|
-
return;
|
|
2664
|
-
}
|
|
2665
|
-
try {
|
|
2666
|
-
const engine = await getEngine2();
|
|
2667
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
2668
|
-
const id = engine.generateMemoryId(type, summary.trim());
|
|
2669
|
-
const entry = {
|
|
2670
|
-
id,
|
|
2671
|
-
type,
|
|
2672
|
-
summary: summary.trim(),
|
|
2673
|
-
detail: typeof data.detail === "string" ? data.detail : null,
|
|
2674
|
-
importance: typeof data.importance === "string" ? data.importance : "medium",
|
|
2675
|
-
tags: Array.isArray(data.tags) ? data.tags.filter((t) => typeof t === "string") : [],
|
|
2676
|
-
relatedFiles: Array.isArray(data.relatedFiles) ? data.relatedFiles.filter((f) => typeof f === "string") : [],
|
|
2677
|
-
source: {
|
|
2678
|
-
type: "explicit"
|
|
2679
|
-
},
|
|
2680
|
-
createdAt: now,
|
|
2681
|
-
updatedAt: now,
|
|
2682
|
-
accessCount: 0
|
|
2683
|
-
};
|
|
2684
|
-
await engine.rememberMemory(ctx.projectDir, entry);
|
|
2685
|
-
jsonResponse(ctx.res, { success: true, id }, 201, ctx.req);
|
|
2686
|
-
} catch (err) {
|
|
2687
|
-
const message = err instanceof Error ? err.message : "Failed to remember memory";
|
|
2688
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2689
|
-
}
|
|
2690
|
-
}
|
|
2691
|
-
async function memoryTimelineHandler(ctx) {
|
|
2692
|
-
try {
|
|
2693
|
-
const engine = await getEngine2();
|
|
2694
|
-
let entries = engine.listEntries(ctx.projectDir);
|
|
2695
|
-
const typeParam = ctx.url.searchParams.get("type");
|
|
2696
|
-
if (typeParam) {
|
|
2697
|
-
const types = typeParam.split(",");
|
|
2698
|
-
entries = entries.filter((e) => types.includes(e.type));
|
|
2699
|
-
}
|
|
2700
|
-
const sinceParam = ctx.url.searchParams.get("since");
|
|
2701
|
-
if (sinceParam) {
|
|
2702
|
-
entries = entries.filter((e) => e.createdAt >= sinceParam);
|
|
2703
|
-
}
|
|
2704
|
-
entries.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
|
|
2705
|
-
const limitParam = ctx.url.searchParams.get("limit");
|
|
2706
|
-
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam, 10) || 20), 200) : 20;
|
|
2707
|
-
entries = entries.slice(0, limit);
|
|
2708
|
-
jsonResponse(ctx.res, { entries }, 200, ctx.req);
|
|
2709
|
-
} catch (err) {
|
|
2710
|
-
const message = err instanceof Error ? err.message : "Failed to list timeline";
|
|
2711
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2712
|
-
}
|
|
2713
|
-
}
|
|
2714
|
-
async function memoryEntryHandler(ctx) {
|
|
2715
|
-
const id = ctx.params["1"];
|
|
2716
|
-
if (!id) {
|
|
2717
|
-
jsonResponse(ctx.res, { error: "Missing entry ID" }, 400, ctx.req);
|
|
2718
|
-
return;
|
|
2719
|
-
}
|
|
2720
|
-
try {
|
|
2721
|
-
const engine = await getEngine2();
|
|
2722
|
-
const entry = engine.loadEntry(ctx.projectDir, id);
|
|
2723
|
-
if (!entry) {
|
|
2724
|
-
jsonResponse(ctx.res, { error: "Entry not found" }, 404, ctx.req);
|
|
2725
|
-
return;
|
|
2726
|
-
}
|
|
2727
|
-
jsonResponse(ctx.res, entry, 200, ctx.req);
|
|
2728
|
-
} catch (err) {
|
|
2729
|
-
const message = err instanceof Error ? err.message : "Failed to load entry";
|
|
2730
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2731
|
-
}
|
|
2732
|
-
}
|
|
2733
|
-
async function memoryForgetHandler(ctx) {
|
|
2734
|
-
const id = ctx.params["1"];
|
|
2735
|
-
if (!id) {
|
|
2736
|
-
jsonResponse(ctx.res, { error: "Missing entry ID" }, 400, ctx.req);
|
|
2737
|
-
return;
|
|
2738
|
-
}
|
|
2739
|
-
try {
|
|
2740
|
-
const engine = await getEngine2();
|
|
2741
|
-
const updated = engine.updateEntry(ctx.projectDir, id, {
|
|
2742
|
-
supersededBy: "manual"
|
|
2743
|
-
});
|
|
2744
|
-
if (!updated) {
|
|
2745
|
-
jsonResponse(ctx.res, { error: "Entry not found" }, 404, ctx.req);
|
|
2746
|
-
return;
|
|
2747
|
-
}
|
|
2748
|
-
jsonResponse(ctx.res, { success: true }, 200, ctx.req);
|
|
2749
|
-
} catch (err) {
|
|
2750
|
-
const message = err instanceof Error ? err.message : "Failed to forget entry";
|
|
2751
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2752
|
-
}
|
|
2753
|
-
}
|
|
2754
|
-
async function memoryActionHandler(ctx) {
|
|
2755
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2756
|
-
if (error || !data) {
|
|
2757
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2758
|
-
return;
|
|
2759
|
-
}
|
|
2760
|
-
const action = data.action;
|
|
2761
|
-
const validActions = ["init", "export", "import", "enable", "disable", "classify", "classify-all", "classify-clear", "reindex"];
|
|
2762
|
-
if (!validActions.includes(action)) {
|
|
2763
|
-
jsonResponse(
|
|
2764
|
-
ctx.res,
|
|
2765
|
-
{ error: `Invalid action: ${action}. Valid: ${validActions.join(", ")}` },
|
|
2766
|
-
400,
|
|
2767
|
-
ctx.req
|
|
2768
|
-
);
|
|
2769
|
-
return;
|
|
2770
|
-
}
|
|
2771
|
-
try {
|
|
2772
|
-
const engine = await getEngine2();
|
|
2773
|
-
switch (action) {
|
|
2774
|
-
case "init": {
|
|
2775
|
-
const config = engine.loadMemoryConfig(ctx.projectDir);
|
|
2776
|
-
engine.saveMemoryConfig(ctx.projectDir, { ...config, enabled: true });
|
|
2777
|
-
if (!engine.memoryBranchExists(ctx.projectDir)) {
|
|
2778
|
-
try {
|
|
2779
|
-
engine.initMemoryBranch(ctx.projectDir);
|
|
2780
|
-
} catch {
|
|
2781
|
-
}
|
|
2782
|
-
}
|
|
2783
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Memory initialized" }, 200, ctx.req);
|
|
2784
|
-
return;
|
|
2785
|
-
}
|
|
2786
|
-
case "export": {
|
|
2787
|
-
const result = await engine.exportMemories(ctx.projectDir);
|
|
2788
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2789
|
-
return;
|
|
2790
|
-
}
|
|
2791
|
-
case "import": {
|
|
2792
|
-
const result = await engine.importMemories(ctx.projectDir);
|
|
2793
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2794
|
-
return;
|
|
2795
|
-
}
|
|
2796
|
-
case "enable": {
|
|
2797
|
-
const config = engine.loadMemoryConfig(ctx.projectDir);
|
|
2798
|
-
engine.saveMemoryConfig(ctx.projectDir, { ...config, enabled: true });
|
|
2799
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Memory enabled" }, 200, ctx.req);
|
|
2800
|
-
return;
|
|
2801
|
-
}
|
|
2802
|
-
case "disable": {
|
|
2803
|
-
const config = engine.loadMemoryConfig(ctx.projectDir);
|
|
2804
|
-
engine.saveMemoryConfig(ctx.projectDir, { ...config, enabled: false });
|
|
2805
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Memory disabled" }, 200, ctx.req);
|
|
2806
|
-
return;
|
|
2807
|
-
}
|
|
2808
|
-
case "classify": {
|
|
2809
|
-
const sessionId = typeof data.sessionId === "string" ? data.sessionId : void 0;
|
|
2810
|
-
if (!sessionId) {
|
|
2811
|
-
jsonResponse(ctx.res, { error: "Missing 'sessionId' for classify action" }, 400, ctx.req);
|
|
2812
|
-
return;
|
|
2813
|
-
}
|
|
2814
|
-
const result = await engine.classifySession(ctx.projectDir, sessionId);
|
|
2815
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2816
|
-
return;
|
|
2817
|
-
}
|
|
2818
|
-
case "classify-all": {
|
|
2819
|
-
const { spawn: spawn4 } = await import("child_process");
|
|
2820
|
-
const ulpiBin = process.argv[1];
|
|
2821
|
-
const args = ["memory", "classify", "-p", ctx.projectDir];
|
|
2822
|
-
if (typeof data.branch === "string" && data.branch) {
|
|
2823
|
-
args.push("--branch", data.branch);
|
|
2824
|
-
}
|
|
2825
|
-
if (data.export) args.push("--export");
|
|
2826
|
-
const child = spawn4(process.execPath, [ulpiBin, ...args], {
|
|
2827
|
-
detached: true,
|
|
2828
|
-
stdio: "ignore",
|
|
2829
|
-
env: { ...process.env, ULPI_BG_CLASSIFY: "1" }
|
|
2830
|
-
});
|
|
2831
|
-
child.unref();
|
|
2832
|
-
jsonResponse(ctx.res, {
|
|
2833
|
-
ok: true,
|
|
2834
|
-
action,
|
|
2835
|
-
message: "Classification started"
|
|
2836
|
-
}, 200, ctx.req);
|
|
2837
|
-
return;
|
|
2838
|
-
}
|
|
2839
|
-
case "classify-clear": {
|
|
2840
|
-
engine.clearClassifyBatchProgress(ctx.projectDir);
|
|
2841
|
-
jsonResponse(ctx.res, { ok: true, action, message: "Classification progress cleared" }, 200, ctx.req);
|
|
2842
|
-
return;
|
|
2843
|
-
}
|
|
2844
|
-
case "reindex": {
|
|
2845
|
-
const result = await engine.reindexMemories(ctx.projectDir);
|
|
2846
|
-
jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
|
|
2847
|
-
return;
|
|
2848
|
-
}
|
|
2849
|
-
default:
|
|
2850
|
-
jsonResponse(ctx.res, { error: `Unknown action: ${action}` }, 400, ctx.req);
|
|
2851
|
-
}
|
|
2852
|
-
} catch (err) {
|
|
2853
|
-
const message = err instanceof Error ? err.message : "Action failed";
|
|
2854
|
-
jsonResponse(ctx.res, { ok: false, action, error: message }, 500, ctx.req);
|
|
2855
|
-
}
|
|
2856
|
-
}
|
|
2857
|
-
async function memoryConfigHandler(ctx) {
|
|
2858
|
-
try {
|
|
2859
|
-
const engine = await getEngine2();
|
|
2860
|
-
const config = engine.loadMemoryConfig(ctx.projectDir);
|
|
2861
|
-
jsonResponse(ctx.res, config, 200, ctx.req);
|
|
2862
|
-
} catch (err) {
|
|
2863
|
-
const message = err instanceof Error ? err.message : "Failed to load memory config";
|
|
2864
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2865
|
-
}
|
|
2866
|
-
}
|
|
2867
|
-
async function memoryConfigUpdateHandler(ctx) {
|
|
2868
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2869
|
-
if (error || !data) {
|
|
2870
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2871
|
-
return;
|
|
2872
|
-
}
|
|
2873
|
-
try {
|
|
2874
|
-
const engine = await getEngine2();
|
|
2875
|
-
const existing = engine.loadMemoryConfig(ctx.projectDir);
|
|
2876
|
-
const merged = { ...existing };
|
|
2877
|
-
if (typeof data.enabled === "boolean") merged.enabled = data.enabled;
|
|
2878
|
-
if (typeof data.captureMode === "string") merged.captureMode = data.captureMode;
|
|
2879
|
-
if (typeof data.surfaceOnStart === "boolean") merged.surfaceOnStart = data.surfaceOnStart;
|
|
2880
|
-
if (typeof data.surfaceLimit === "number") merged.surfaceLimit = data.surfaceLimit;
|
|
2881
|
-
if (typeof data.autoExport === "boolean") merged.autoExport = data.autoExport;
|
|
2882
|
-
if (Array.isArray(data.redactPatterns)) merged.redactPatterns = data.redactPatterns;
|
|
2883
|
-
if (typeof data.classifier === "object" && data.classifier !== null) {
|
|
2884
|
-
merged.classifier = { ...merged.classifier, ...data.classifier };
|
|
2885
|
-
}
|
|
2886
|
-
if (typeof data.embedding === "object" && data.embedding !== null) {
|
|
2887
|
-
merged.embedding = { ...merged.embedding, ...data.embedding };
|
|
2888
|
-
}
|
|
2889
|
-
if (typeof data.retention === "object" && data.retention !== null) {
|
|
2890
|
-
merged.retention = { ...merged.retention, ...data.retention };
|
|
2891
|
-
}
|
|
2892
|
-
if (typeof data.ranking === "object" && data.ranking !== null) {
|
|
2893
|
-
merged.ranking = { ...merged.ranking, ...data.ranking };
|
|
2894
|
-
}
|
|
2895
|
-
engine.saveMemoryConfig(ctx.projectDir, merged);
|
|
2896
|
-
jsonResponse(ctx.res, merged, 200, ctx.req);
|
|
2897
|
-
} catch (err) {
|
|
2898
|
-
const message = err instanceof Error ? err.message : "Failed to update memory config";
|
|
2899
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2900
|
-
}
|
|
2901
|
-
}
|
|
2902
|
-
async function getDepgraph() {
|
|
2903
|
-
return await import("./dist-YA2BWZB2.js");
|
|
2904
|
-
}
|
|
2905
|
-
async function getBranch(projectDir) {
|
|
2906
|
-
const { getCurrentBranch: getCurrentBranch2 } = await import("./dist-RKOGLK7R.js");
|
|
2907
|
-
return getCurrentBranch2(projectDir);
|
|
2908
|
-
}
|
|
2909
|
-
async function depgraphStatusHandler(ctx) {
|
|
2910
|
-
try {
|
|
2911
|
-
const depgraph = await getDepgraph();
|
|
2912
|
-
const branch = await getBranch(ctx.projectDir);
|
|
2913
|
-
const graph = depgraph.loadGraph(ctx.projectDir, branch);
|
|
2914
|
-
if (!graph) {
|
|
2915
|
-
jsonResponse(ctx.res, {
|
|
2916
|
-
available: false,
|
|
2917
|
-
nodeCount: 0,
|
|
2918
|
-
edgeCount: 0,
|
|
2919
|
-
definitionCount: 0,
|
|
2920
|
-
referenceCount: 0,
|
|
2921
|
-
cycleCount: 0,
|
|
2922
|
-
topFiles: []
|
|
2923
|
-
}, 200, ctx.req);
|
|
2924
|
-
return;
|
|
2925
|
-
}
|
|
2926
|
-
const pageRank = depgraph.loadPageRank(ctx.projectDir, branch);
|
|
2927
|
-
const metrics = depgraph.loadMetrics(ctx.projectDir, branch);
|
|
2928
|
-
let definitionCount = 0;
|
|
2929
|
-
let referenceCount = 0;
|
|
2930
|
-
for (const node of Object.values(graph.nodes)) {
|
|
2931
|
-
definitionCount += node.definitionCount;
|
|
2932
|
-
referenceCount += node.referenceCount;
|
|
2933
|
-
}
|
|
2934
|
-
const topFiles = [];
|
|
2935
|
-
if (pageRank) {
|
|
2936
|
-
const sorted = Object.entries(pageRank.ranks).sort((a, b) => b[1] - a[1]).slice(0, 10);
|
|
2937
|
-
for (const [filePath, rank] of sorted) {
|
|
2938
|
-
topFiles.push({ filePath, rank });
|
|
2939
|
-
}
|
|
2940
|
-
}
|
|
2941
|
-
jsonResponse(ctx.res, {
|
|
2942
|
-
available: true,
|
|
2943
|
-
nodeCount: Object.keys(graph.nodes).length,
|
|
2944
|
-
edgeCount: graph.edges.length,
|
|
2945
|
-
definitionCount,
|
|
2946
|
-
referenceCount,
|
|
2947
|
-
cycleCount: metrics?.cycles?.length ?? 0,
|
|
2948
|
-
topFiles
|
|
2949
|
-
}, 200, ctx.req);
|
|
2950
|
-
} catch (err) {
|
|
2951
|
-
const message = err instanceof Error ? err.message : "Failed to get depgraph status";
|
|
2952
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
2953
|
-
}
|
|
2954
|
-
}
|
|
2955
|
-
async function depgraphSearchHandler(ctx) {
|
|
2956
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
2957
|
-
if (error || !data) {
|
|
2958
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
2959
|
-
return;
|
|
2960
|
-
}
|
|
2961
|
-
const filePath = data.filePath;
|
|
2962
|
-
if (typeof filePath !== "string" || !filePath.trim()) {
|
|
2963
|
-
jsonResponse(ctx.res, { error: "Missing or empty 'filePath' field" }, 400, ctx.req);
|
|
2964
|
-
return;
|
|
2965
|
-
}
|
|
2966
|
-
const direction = data.direction;
|
|
2967
|
-
if (direction !== "deps" && direction !== "dependents") {
|
|
2968
|
-
jsonResponse(ctx.res, { error: "Field 'direction' must be 'deps' or 'dependents'" }, 400, ctx.req);
|
|
2969
|
-
return;
|
|
2970
|
-
}
|
|
2971
|
-
const includeTransitive = data.includeTransitive === true;
|
|
2972
|
-
const maxDepth = typeof data.maxDepth === "number" ? Math.max(1, Math.min(data.maxDepth, 50)) : 10;
|
|
2973
|
-
try {
|
|
2974
|
-
const depgraph = await getDepgraph();
|
|
2975
|
-
const branch = await getBranch(ctx.projectDir);
|
|
2976
|
-
const graph = depgraph.loadGraph(ctx.projectDir, branch);
|
|
2977
|
-
if (!graph) {
|
|
2978
|
-
jsonResponse(ctx.res, {
|
|
2979
|
-
filePath: filePath.trim(),
|
|
2980
|
-
direction,
|
|
2981
|
-
results: []
|
|
2982
|
-
}, 200, ctx.req);
|
|
2983
|
-
return;
|
|
2984
|
-
}
|
|
2985
|
-
let results;
|
|
2986
|
-
if (includeTransitive) {
|
|
2987
|
-
const files = direction === "deps" ? depgraph.getTransitiveDeps(graph, filePath.trim(), maxDepth) : depgraph.getTransitiveRdeps(graph, filePath.trim(), maxDepth);
|
|
2988
|
-
results = files.map((f) => ({ filePath: f }));
|
|
2989
|
-
} else {
|
|
2990
|
-
const edges = direction === "deps" ? depgraph.getOutgoingEdges(graph, filePath.trim()) : depgraph.getIncomingEdges(graph, filePath.trim());
|
|
2991
|
-
results = edges.map((e) => ({
|
|
2992
|
-
filePath: direction === "deps" ? e.target : e.source,
|
|
2993
|
-
symbols: e.symbols
|
|
2994
|
-
}));
|
|
2995
|
-
}
|
|
2996
|
-
jsonResponse(ctx.res, {
|
|
2997
|
-
filePath: filePath.trim(),
|
|
2998
|
-
direction,
|
|
2999
|
-
results
|
|
3000
|
-
}, 200, ctx.req);
|
|
3001
|
-
} catch (err) {
|
|
3002
|
-
const message = err instanceof Error ? err.message : "Depgraph search failed";
|
|
3003
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
3004
|
-
}
|
|
3005
|
-
}
|
|
3006
|
-
async function depgraphRankingHandler(ctx) {
|
|
3007
|
-
try {
|
|
3008
|
-
const depgraph = await getDepgraph();
|
|
3009
|
-
const branch = await getBranch(ctx.projectDir);
|
|
3010
|
-
const pageRank = depgraph.loadPageRank(ctx.projectDir, branch);
|
|
3011
|
-
if (!pageRank) {
|
|
3012
|
-
jsonResponse(ctx.res, { totalFiles: 0, rankings: [] }, 200, ctx.req);
|
|
3013
|
-
return;
|
|
3014
|
-
}
|
|
3015
|
-
const limitParam = ctx.url.searchParams.get("limit");
|
|
3016
|
-
const limit = limitParam ? Math.max(1, Math.min(parseInt(limitParam, 10) || 20, 500)) : 20;
|
|
3017
|
-
const sorted = Object.entries(pageRank.ranks).sort((a, b) => b[1] - a[1]);
|
|
3018
|
-
const rankings = sorted.slice(0, limit).map(([filePath, rank], i) => ({
|
|
3019
|
-
filePath,
|
|
3020
|
-
rank,
|
|
3021
|
-
position: i + 1
|
|
3022
|
-
}));
|
|
3023
|
-
jsonResponse(ctx.res, {
|
|
3024
|
-
totalFiles: sorted.length,
|
|
3025
|
-
rankings
|
|
3026
|
-
}, 200, ctx.req);
|
|
3027
|
-
} catch (err) {
|
|
3028
|
-
const message = err instanceof Error ? err.message : "Failed to get depgraph ranking";
|
|
3029
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
3030
|
-
}
|
|
3031
|
-
}
|
|
3032
|
-
async function depgraphCyclesHandler(ctx) {
|
|
3033
|
-
try {
|
|
3034
|
-
const depgraph = await getDepgraph();
|
|
3035
|
-
const branch = await getBranch(ctx.projectDir);
|
|
3036
|
-
const graph = depgraph.loadGraph(ctx.projectDir, branch);
|
|
3037
|
-
if (!graph) {
|
|
3038
|
-
jsonResponse(ctx.res, { totalCycles: 0, cycles: [] }, 200, ctx.req);
|
|
3039
|
-
return;
|
|
3040
|
-
}
|
|
3041
|
-
const limitParam = ctx.url.searchParams.get("limit");
|
|
3042
|
-
const limit = limitParam ? Math.max(1, Math.min(parseInt(limitParam, 10) || 20, 500)) : 20;
|
|
3043
|
-
const cycles = depgraph.detectCycles(graph);
|
|
3044
|
-
jsonResponse(ctx.res, {
|
|
3045
|
-
totalCycles: cycles.length,
|
|
3046
|
-
cycles: cycles.slice(0, limit).map((c) => ({
|
|
3047
|
-
files: c.files,
|
|
3048
|
-
edgeCount: c.edgeCount
|
|
3049
|
-
}))
|
|
3050
|
-
}, 200, ctx.req);
|
|
3051
|
-
} catch (err) {
|
|
3052
|
-
const message = err instanceof Error ? err.message : "Failed to detect cycles";
|
|
3053
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
3054
|
-
}
|
|
3055
|
-
}
|
|
3056
|
-
async function depgraphMetricsHandler(ctx) {
|
|
3057
|
-
const { data, error } = await parseJsonBody(ctx.req);
|
|
3058
|
-
if (error || !data) {
|
|
3059
|
-
jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
|
|
3060
|
-
return;
|
|
3061
|
-
}
|
|
3062
|
-
const modulePath = typeof data.modulePath === "string" ? data.modulePath : "";
|
|
3063
|
-
try {
|
|
3064
|
-
const depgraph = await getDepgraph();
|
|
3065
|
-
const branch = await getBranch(ctx.projectDir);
|
|
3066
|
-
const graph = depgraph.loadGraph(ctx.projectDir, branch);
|
|
3067
|
-
if (!graph) {
|
|
3068
|
-
jsonResponse(ctx.res, {
|
|
3069
|
-
modulePath: modulePath.trim(),
|
|
3070
|
-
metrics: []
|
|
3071
|
-
}, 200, ctx.req);
|
|
3072
|
-
return;
|
|
3073
|
-
}
|
|
3074
|
-
const coupling = depgraph.computeCoupling(graph);
|
|
3075
|
-
const prefix = modulePath.trim();
|
|
3076
|
-
const filtered = coupling.filter((c) => c.filePath.startsWith(prefix));
|
|
3077
|
-
jsonResponse(ctx.res, {
|
|
3078
|
-
modulePath: prefix,
|
|
3079
|
-
metrics: filtered.map((c) => ({
|
|
3080
|
-
filePath: c.filePath,
|
|
3081
|
-
afferentCoupling: c.afferentCoupling,
|
|
3082
|
-
efferentCoupling: c.efferentCoupling,
|
|
3083
|
-
instability: c.instability
|
|
3084
|
-
}))
|
|
3085
|
-
}, 200, ctx.req);
|
|
3086
|
-
} catch (err) {
|
|
3087
|
-
const message = err instanceof Error ? err.message : "Failed to compute metrics";
|
|
3088
|
-
jsonResponse(ctx.res, { error: message }, 500, ctx.req);
|
|
3089
|
-
}
|
|
3090
|
-
}
|
|
3091
|
-
async function ulpiModelsHandler(ctx) {
|
|
3092
|
-
const apiKey = resolveApiKey("ulpi");
|
|
3093
|
-
const baseUrl = resolveUlpiUrl();
|
|
3094
|
-
if (!apiKey) {
|
|
3095
|
-
jsonResponse(
|
|
3096
|
-
ctx.res,
|
|
3097
|
-
{ error: "No ULPI API key configured. Set it with: ulpi config set ulpi-key <key>" },
|
|
3098
|
-
400,
|
|
3099
|
-
ctx.req
|
|
3100
|
-
);
|
|
3101
|
-
return;
|
|
3102
|
-
}
|
|
3103
|
-
try {
|
|
3104
|
-
const response = await fetch(`${baseUrl}/api/v1/models`, {
|
|
3105
|
-
headers: { Authorization: `Bearer ${apiKey}` },
|
|
3106
|
-
signal: AbortSignal.timeout(1e4)
|
|
3107
|
-
});
|
|
3108
|
-
if (!response.ok) {
|
|
3109
|
-
const text = await response.text().catch(() => "");
|
|
3110
|
-
jsonResponse(
|
|
3111
|
-
ctx.res,
|
|
3112
|
-
{ error: `ULPI API returned ${response.status}: ${text.slice(0, 200)}` },
|
|
3113
|
-
response.status === 401 || response.status === 403 ? 401 : 502,
|
|
3114
|
-
ctx.req
|
|
3115
|
-
);
|
|
3116
|
-
return;
|
|
3117
|
-
}
|
|
3118
|
-
const data = await response.json();
|
|
3119
|
-
jsonResponse(ctx.res, data, 200, ctx.req);
|
|
3120
|
-
} catch (err) {
|
|
3121
|
-
const message = err instanceof Error ? err.message : "Failed to fetch ULPI models";
|
|
3122
|
-
jsonResponse(ctx.res, { error: message }, 502, ctx.req);
|
|
3123
|
-
}
|
|
3124
|
-
}
|
|
3125
|
-
function resolveClaudePath2() {
|
|
3126
|
-
try {
|
|
3127
|
-
const result = execFileSync2("which", ["claude"], { stdio: "pipe", timeout: 3e3 }).toString().trim();
|
|
3128
|
-
if (result) return result;
|
|
3129
|
-
} catch {
|
|
3130
|
-
}
|
|
3131
|
-
const home = os2.homedir();
|
|
3132
|
-
const candidates = [
|
|
3133
|
-
path7.join(home, ".local", "bin", "claude"),
|
|
3134
|
-
path7.join(home, ".claude", "bin", "claude"),
|
|
3135
|
-
"/usr/local/bin/claude"
|
|
3136
|
-
];
|
|
3137
|
-
for (const p of candidates) {
|
|
3138
|
-
try {
|
|
3139
|
-
if (fs10.existsSync(p)) return p;
|
|
3140
|
-
} catch {
|
|
3141
|
-
}
|
|
3142
|
-
}
|
|
3143
|
-
return null;
|
|
3144
|
-
}
|
|
3145
|
-
var cachedClaudePath2;
|
|
3146
|
-
function getClaudePath2() {
|
|
3147
|
-
if (cachedClaudePath2 === void 0) {
|
|
3148
|
-
cachedClaudePath2 = resolveClaudePath2();
|
|
3149
|
-
}
|
|
3150
|
-
return cachedClaudePath2;
|
|
3151
|
-
}
|
|
3152
|
-
function isClaudeCliAvailable2() {
|
|
3153
|
-
const p = getClaudePath2();
|
|
3154
|
-
if (!p) return false;
|
|
3155
|
-
try {
|
|
3156
|
-
execFileSync2(p, ["--version"], { stdio: "pipe", timeout: 5e3 });
|
|
3157
|
-
return true;
|
|
3158
|
-
} catch {
|
|
3159
|
-
return false;
|
|
3160
|
-
}
|
|
3161
|
-
}
|
|
3162
|
-
function computeAntiPatternMetrics(events) {
|
|
3163
|
-
const rawFileEdits = {};
|
|
3164
|
-
for (const ev of events) {
|
|
3165
|
-
if ((ev.toolName === "Write" || ev.toolName === "Edit") && ev.filePath) {
|
|
3166
|
-
rawFileEdits[ev.filePath] = (rawFileEdits[ev.filePath] ?? 0) + 1;
|
|
3167
|
-
}
|
|
3168
|
-
}
|
|
3169
|
-
const fileEditCounts = {};
|
|
3170
|
-
for (const [file, count] of Object.entries(rawFileEdits)) {
|
|
3171
|
-
if (count >= 3) fileEditCounts[file] = count;
|
|
3172
|
-
}
|
|
3173
|
-
const postcondFailRates = {};
|
|
3174
|
-
for (const ev of events) {
|
|
3175
|
-
if (ev.event === "postcondition_run" && ev.command) {
|
|
3176
|
-
if (!postcondFailRates[ev.command]) {
|
|
3177
|
-
postcondFailRates[ev.command] = { success: 0, fail: 0 };
|
|
3178
|
-
}
|
|
3179
|
-
postcondFailRates[ev.command].success++;
|
|
3180
|
-
}
|
|
3181
|
-
if (ev.event === "postcondition_failed" && ev.command) {
|
|
3182
|
-
if (!postcondFailRates[ev.command]) {
|
|
3183
|
-
postcondFailRates[ev.command] = { success: 0, fail: 0 };
|
|
3184
|
-
}
|
|
3185
|
-
postcondFailRates[ev.command].fail++;
|
|
3186
|
-
}
|
|
3187
|
-
}
|
|
3188
|
-
const blockRetrySequences = [];
|
|
3189
|
-
let currentBlock = null;
|
|
3190
|
-
for (const ev of events) {
|
|
3191
|
-
if (ev.event === "tool_blocked" && ev.toolName) {
|
|
3192
|
-
const key = `${ev.toolName}|${ev.filePath ?? ""}|${ev.ruleName ?? ""}`;
|
|
3193
|
-
if (currentBlock && `${currentBlock.tool}|${currentBlock.file}|${currentBlock.rule}` === key) {
|
|
3194
|
-
currentBlock.count++;
|
|
3195
|
-
} else {
|
|
3196
|
-
if (currentBlock && currentBlock.count >= 2) {
|
|
3197
|
-
blockRetrySequences.push({ ...currentBlock });
|
|
3198
|
-
}
|
|
3199
|
-
currentBlock = {
|
|
3200
|
-
tool: ev.toolName,
|
|
3201
|
-
file: ev.filePath ?? "",
|
|
3202
|
-
rule: ev.ruleName ?? "",
|
|
3203
|
-
count: 1
|
|
3204
|
-
};
|
|
3205
|
-
}
|
|
3206
|
-
} else {
|
|
3207
|
-
if (currentBlock && currentBlock.count >= 2) {
|
|
3208
|
-
blockRetrySequences.push({ ...currentBlock });
|
|
3209
|
-
}
|
|
3210
|
-
currentBlock = null;
|
|
3211
|
-
}
|
|
3212
|
-
}
|
|
3213
|
-
if (currentBlock && currentBlock.count >= 2) {
|
|
3214
|
-
blockRetrySequences.push({ ...currentBlock });
|
|
3215
|
-
}
|
|
3216
|
-
const rawCmdCounts = {};
|
|
3217
|
-
for (const ev of events) {
|
|
3218
|
-
if (ev.toolName === "Bash" && ev.command) {
|
|
3219
|
-
rawCmdCounts[ev.command] = (rawCmdCounts[ev.command] ?? 0) + 1;
|
|
3220
|
-
}
|
|
3221
|
-
}
|
|
3222
|
-
const commandRepeatCounts = {};
|
|
3223
|
-
for (const [cmd, count] of Object.entries(rawCmdCounts)) {
|
|
3224
|
-
if (count >= 5) commandRepeatCounts[cmd] = count;
|
|
3225
|
-
}
|
|
3226
|
-
return { fileEditCounts, postcondFailRates, blockRetrySequences, commandRepeatCounts };
|
|
3227
|
-
}
|
|
3228
|
-
function buildEnrichmentPrompt(entry, rawEvents) {
|
|
3229
|
-
const sections = [];
|
|
3230
|
-
sections.push("You are analyzing a commit captured by ULPI, a rules engine for AI coding agents.");
|
|
3231
|
-
sections.push("Fill in the structured output fields:");
|
|
3232
|
-
sections.push("- summary: A concise 1-2 sentence description of what this commit accomplishes.");
|
|
3233
|
-
sections.push("- intent: The high-level goal or motivation behind the changes.");
|
|
3234
|
-
sections.push("- challenges: Specific difficulties encountered during this session (e.g. test failures, blocked tools, repeated edits). Be concrete, not vague.");
|
|
3235
|
-
sections.push("- ruleInsights: Observations about which guardian rules helped, which were too strict, or what rules could be added. Reference specific rule names when possible.");
|
|
3236
|
-
sections.push("");
|
|
3237
|
-
sections.push("## Commit");
|
|
3238
|
-
sections.push(`SHA: ${entry.commit.shortSha}`);
|
|
3239
|
-
sections.push(`Subject: ${entry.commit.subject}`);
|
|
3240
|
-
sections.push(`Author: ${entry.commit.authorName}`);
|
|
3241
|
-
sections.push(`Date: ${entry.commit.authorDate}`);
|
|
3242
|
-
sections.push(`Branch: ${entry.commit.branch}`);
|
|
3243
|
-
if (entry.commit.message !== entry.commit.subject) {
|
|
3244
|
-
sections.push(`Message:
|
|
3245
|
-
${entry.commit.message}`);
|
|
3246
|
-
}
|
|
3247
|
-
sections.push("");
|
|
3248
|
-
sections.push("## Diff Stats");
|
|
3249
|
-
sections.push(`Files changed: ${entry.diff.filesChanged}`);
|
|
3250
|
-
sections.push(`Insertions: +${entry.diff.insertions}`);
|
|
3251
|
-
sections.push(`Deletions: -${entry.diff.deletions}`);
|
|
3252
|
-
if (entry.diff.files.length > 0) {
|
|
3253
|
-
sections.push("Files:");
|
|
3254
|
-
for (const f of entry.diff.files.slice(0, 30)) {
|
|
3255
|
-
sections.push(` ${f.status} ${f.path} (+${f.additions}/-${f.deletions})`);
|
|
3256
|
-
}
|
|
3257
|
-
if (entry.diff.files.length > 30) {
|
|
3258
|
-
sections.push(` ... and ${entry.diff.files.length - 30} more files`);
|
|
3259
|
-
}
|
|
3260
|
-
}
|
|
3261
|
-
sections.push("");
|
|
3262
|
-
if (entry.session) {
|
|
3263
|
-
const s = entry.session;
|
|
3264
|
-
sections.push("## Session Summary");
|
|
3265
|
-
sections.push(`Session: ${s.sessionId}${s.sessionName ? ` (${s.sessionName})` : ""}`);
|
|
3266
|
-
sections.push(`Total events: ${s.totalEvents}`);
|
|
3267
|
-
sections.push(`Rules enforced: ${s.stats.rulesEnforced}`);
|
|
3268
|
-
sections.push(`Actions blocked: ${s.stats.actionsBlocked}`);
|
|
3269
|
-
sections.push(`Auto-approved: ${s.stats.autoActionsRun}`);
|
|
3270
|
-
sections.push(`Files read: ${s.stats.filesRead}, written: ${s.stats.filesWritten}, deleted: ${s.stats.filesDeleted}`);
|
|
3271
|
-
sections.push(`Commands run: ${s.stats.commandsRun}`);
|
|
3272
|
-
sections.push(`Tests: ${s.stats.testsRun ? s.stats.testsPassed ? "passed" : "failed/unknown" : "not run"}`);
|
|
3273
|
-
sections.push(`Lint: ${s.stats.lintRun ? "run" : "not run"}, Build: ${s.stats.buildRun ? "run" : "not run"}`);
|
|
3274
|
-
if (s.blockedTools.length > 0) {
|
|
3275
|
-
sections.push(`Blocked tools: ${s.blockedTools.join(", ")}`);
|
|
3276
|
-
}
|
|
3277
|
-
if (s.preconditionsFired.length > 0) {
|
|
3278
|
-
sections.push(`Preconditions fired: ${s.preconditionsFired.join(", ")}`);
|
|
3279
|
-
}
|
|
3280
|
-
if (s.postconditionsRun.length > 0) {
|
|
3281
|
-
sections.push(`Postconditions run: ${s.postconditionsRun.join(", ")}`);
|
|
3282
|
-
}
|
|
3283
|
-
if (s.skillsInjected.length > 0) {
|
|
3284
|
-
sections.push(`Skills injected: ${s.skillsInjected.join(", ")}`);
|
|
3285
|
-
}
|
|
3286
|
-
sections.push("");
|
|
3287
|
-
}
|
|
3288
|
-
if (rawEvents && rawEvents.length > 0) {
|
|
3289
|
-
const metrics = computeAntiPatternMetrics(rawEvents);
|
|
3290
|
-
const hasMetrics = Object.keys(metrics.fileEditCounts).length > 0 || Object.keys(metrics.postcondFailRates).length > 0 || metrics.blockRetrySequences.length > 0 || Object.keys(metrics.commandRepeatCounts).length > 0;
|
|
3291
|
-
if (hasMetrics) {
|
|
3292
|
-
sections.push("## Anti-Pattern Metrics");
|
|
3293
|
-
if (Object.keys(metrics.fileEditCounts).length > 0) {
|
|
3294
|
-
sections.push("Files edited 3+ times (possible churn):");
|
|
3295
|
-
for (const [file, count] of Object.entries(metrics.fileEditCounts)) {
|
|
3296
|
-
sections.push(` ${file}: ${count} edits`);
|
|
3297
|
-
}
|
|
3298
|
-
}
|
|
3299
|
-
if (metrics.blockRetrySequences.length > 0) {
|
|
3300
|
-
sections.push("Block-retry sequences (tool blocked then retried):");
|
|
3301
|
-
for (const seq of metrics.blockRetrySequences) {
|
|
3302
|
-
sections.push(` ${seq.tool}${seq.file ? ` on ${seq.file}` : ""}${seq.rule ? ` by ${seq.rule}` : ""}: ${seq.count}x`);
|
|
3303
|
-
}
|
|
3304
|
-
}
|
|
3305
|
-
const failedPostconds = Object.entries(metrics.postcondFailRates).filter(
|
|
3306
|
-
([, v]) => v.fail > 0
|
|
3307
|
-
);
|
|
3308
|
-
if (failedPostconds.length > 0) {
|
|
3309
|
-
sections.push("Postcondition failure rates:");
|
|
3310
|
-
for (const [cmd, rates] of failedPostconds) {
|
|
3311
|
-
sections.push(` ${cmd}: ${rates.success} pass, ${rates.fail} fail`);
|
|
3312
|
-
}
|
|
3313
|
-
}
|
|
3314
|
-
if (Object.keys(metrics.commandRepeatCounts).length > 0) {
|
|
3315
|
-
sections.push("Commands run 5+ times:");
|
|
3316
|
-
for (const [cmd, count] of Object.entries(metrics.commandRepeatCounts)) {
|
|
3317
|
-
sections.push(` ${cmd}: ${count}x`);
|
|
3318
|
-
}
|
|
3319
|
-
}
|
|
3320
|
-
sections.push("");
|
|
3321
|
-
}
|
|
3322
|
-
}
|
|
3323
|
-
if (entry.rawDiff) {
|
|
3324
|
-
const maxAiDiff = 1e4;
|
|
3325
|
-
const diffExcerpt = entry.rawDiff.length > maxAiDiff ? entry.rawDiff.slice(0, maxAiDiff) + "\n... (truncated)" : entry.rawDiff;
|
|
3326
|
-
sections.push("## Raw Diff (excerpt)");
|
|
3327
|
-
sections.push(diffExcerpt);
|
|
3328
|
-
sections.push("");
|
|
3329
|
-
}
|
|
3330
|
-
sections.push("Fill in all fields based on the data above. Be specific and concrete.");
|
|
3331
|
-
return sections.join("\n");
|
|
3332
|
-
}
|
|
3333
|
-
function toAiEnrichment(parsed, model, generatedAt) {
|
|
3334
|
-
return {
|
|
3335
|
-
summary: typeof parsed.summary === "string" ? parsed.summary : "",
|
|
3336
|
-
intent: typeof parsed.intent === "string" ? parsed.intent : void 0,
|
|
3337
|
-
challenges: Array.isArray(parsed.challenges) ? parsed.challenges : void 0,
|
|
3338
|
-
ruleInsights: Array.isArray(parsed.ruleInsights) ? parsed.ruleInsights : void 0,
|
|
3339
|
-
model,
|
|
3340
|
-
generatedAt
|
|
3341
|
-
};
|
|
3342
|
-
}
|
|
3343
|
-
function parseEnrichmentResponse(response, model) {
|
|
3344
|
-
const generatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
3345
|
-
try {
|
|
3346
|
-
const envelope = JSON.parse(response.trim());
|
|
3347
|
-
if (envelope.structured_output && typeof envelope.structured_output === "object") {
|
|
3348
|
-
return toAiEnrichment(envelope.structured_output, model, generatedAt);
|
|
3349
|
-
}
|
|
3350
|
-
if (typeof envelope.result === "string" && envelope.result.trim().startsWith("{")) {
|
|
3351
|
-
const parsed = JSON.parse(envelope.result);
|
|
3352
|
-
return toAiEnrichment(parsed, model, generatedAt);
|
|
3353
|
-
}
|
|
3354
|
-
if (typeof envelope.summary === "string") {
|
|
3355
|
-
return toAiEnrichment(envelope, model, generatedAt);
|
|
3356
|
-
}
|
|
3357
|
-
} catch {
|
|
3358
|
-
}
|
|
3359
|
-
const jsonMatch = response.match(/```json\s*\n?([\s\S]*?)\n?\s*```/);
|
|
3360
|
-
if (jsonMatch) {
|
|
3361
|
-
try {
|
|
3362
|
-
const parsed = JSON.parse(jsonMatch[1]);
|
|
3363
|
-
return toAiEnrichment(parsed, model, generatedAt);
|
|
3364
|
-
} catch {
|
|
3365
|
-
}
|
|
3366
|
-
}
|
|
3367
|
-
try {
|
|
3368
|
-
const parsed = JSON.parse(response.trim());
|
|
3369
|
-
return toAiEnrichment(parsed, model, generatedAt);
|
|
3370
|
-
} catch {
|
|
3371
|
-
}
|
|
3372
|
-
return {
|
|
3373
|
-
summary: response.trim().slice(0, 500),
|
|
3374
|
-
model,
|
|
3375
|
-
generatedAt
|
|
3376
|
-
};
|
|
3377
|
-
}
|
|
3378
|
-
var ENRICHMENT_SCHEMA = JSON.stringify({
|
|
3379
|
-
type: "object",
|
|
3380
|
-
properties: {
|
|
3381
|
-
summary: { type: "string", description: "Concise 1-2 sentence description of what this commit accomplishes" },
|
|
3382
|
-
intent: { type: "string", description: "High-level goal or motivation behind the changes" },
|
|
3383
|
-
challenges: {
|
|
3384
|
-
type: "array",
|
|
3385
|
-
items: { type: "string" },
|
|
3386
|
-
description: "Specific difficulties encountered (test failures, blocked tools, repeated edits)"
|
|
3387
|
-
},
|
|
3388
|
-
ruleInsights: {
|
|
3389
|
-
type: "array",
|
|
3390
|
-
items: { type: "string" },
|
|
3391
|
-
description: "Observations about which guardian rules helped, which were too strict, or what rules could be added"
|
|
3392
|
-
}
|
|
3393
|
-
},
|
|
3394
|
-
required: ["summary"]
|
|
3395
|
-
});
|
|
3396
|
-
function enrichEntry(entry, rawEvents, model = DEFAULT_AI_MODEL) {
|
|
3397
|
-
const prompt = buildEnrichmentPrompt(entry, rawEvents);
|
|
3398
|
-
return new Promise((resolve7, reject) => {
|
|
3399
|
-
const timeout = 2 * 60 * 1e3;
|
|
3400
|
-
const claudePath = getClaudePath2();
|
|
3401
|
-
if (!claudePath) {
|
|
3402
|
-
reject(new Error("Claude CLI not found"));
|
|
3403
|
-
return;
|
|
3404
|
-
}
|
|
3405
|
-
const proc = spawn3(claudePath, [
|
|
3406
|
-
"--print",
|
|
3407
|
-
"--model",
|
|
3408
|
-
model,
|
|
3409
|
-
"--output-format",
|
|
3410
|
-
"json",
|
|
3411
|
-
"--json-schema",
|
|
3412
|
-
ENRICHMENT_SCHEMA,
|
|
3413
|
-
"--permission-mode",
|
|
3414
|
-
"bypassPermissions"
|
|
3415
|
-
], {
|
|
3416
|
-
stdio: ["pipe", "pipe", "pipe"]
|
|
3417
|
-
});
|
|
3418
|
-
let stdout = "";
|
|
3419
|
-
let stderr = "";
|
|
3420
|
-
proc.stdout.on("data", (data) => {
|
|
3421
|
-
stdout += data.toString();
|
|
3422
|
-
});
|
|
3423
|
-
proc.stderr.on("data", (data) => {
|
|
3424
|
-
stderr += data.toString();
|
|
3425
|
-
});
|
|
3426
|
-
proc.stdin.write(prompt);
|
|
3427
|
-
proc.stdin.end();
|
|
3428
|
-
const timer = setTimeout(() => {
|
|
3429
|
-
proc.kill("SIGTERM");
|
|
3430
|
-
reject(new Error("AI enrichment timed out after 2 minutes"));
|
|
3431
|
-
}, timeout);
|
|
3432
|
-
proc.on("close", (code) => {
|
|
3433
|
-
clearTimeout(timer);
|
|
3434
|
-
if (code !== 0) {
|
|
3435
|
-
reject(new Error(`claude CLI exited with code ${code}: ${stderr.slice(0, 500)}`));
|
|
3436
|
-
} else {
|
|
3437
|
-
resolve7(parseEnrichmentResponse(stdout, model));
|
|
3438
|
-
}
|
|
3439
|
-
});
|
|
3440
|
-
proc.on("error", (err) => {
|
|
3441
|
-
clearTimeout(timer);
|
|
3442
|
-
reject(new Error(`Failed to run claude CLI: ${err.message}`));
|
|
3443
|
-
});
|
|
3444
|
-
});
|
|
3445
|
-
}
|
|
3446
|
-
var HISTORY_CONFIG_FIELDS = {
|
|
3447
|
-
enabled: "boolean",
|
|
3448
|
-
branchName: "string",
|
|
3449
|
-
aiEnrichment: "boolean",
|
|
3450
|
-
aiModel: "string",
|
|
3451
|
-
maxDiffSize: "number",
|
|
3452
|
-
maxAiDiffSize: "number",
|
|
3453
|
-
collectReviewPlans: "boolean",
|
|
3454
|
-
captureTranscript: "boolean",
|
|
3455
|
-
maxTranscriptSize: "number",
|
|
3456
|
-
captureStrategy: "string"
|
|
3457
|
-
};
|
|
3458
|
-
function getUlpiVersion() {
|
|
3459
|
-
try {
|
|
3460
|
-
return globalThis.__ULPI_API_VERSION__ ?? "0.0.0";
|
|
3461
|
-
} catch {
|
|
3462
|
-
return "0.0.0";
|
|
3463
|
-
}
|
|
3464
|
-
}
|
|
3465
|
-
async function historyStatusHandler(ctx) {
|
|
3466
|
-
const { projectDir, req, res } = ctx;
|
|
3467
|
-
const exists = historyBranchExists(projectDir);
|
|
3468
|
-
let totalEntries = 0;
|
|
3469
|
-
let collectReviewPlans = false;
|
|
3470
|
-
if (exists) {
|
|
3471
|
-
const meta = readBranchMeta(projectDir);
|
|
3472
|
-
if (meta) {
|
|
3473
|
-
totalEntries = meta.totalEntries;
|
|
3474
|
-
collectReviewPlans = meta.config?.collectReviewPlans ?? false;
|
|
3475
|
-
}
|
|
3476
|
-
}
|
|
3477
|
-
jsonResponse(res, { exists, totalEntries, collectReviewPlans }, 200, req);
|
|
3478
|
-
}
|
|
3479
|
-
async function historyInitHandler(ctx) {
|
|
3480
|
-
const { projectDir, req, res } = ctx;
|
|
3481
|
-
const alreadyExists = historyBranchExists(projectDir);
|
|
3482
|
-
try {
|
|
3483
|
-
if (!alreadyExists) {
|
|
3484
|
-
const projectName = path8.basename(projectDir);
|
|
3485
|
-
initHistoryBranch(projectDir, projectName, getUlpiVersion());
|
|
3486
|
-
}
|
|
3487
|
-
const body = await readBody(req);
|
|
3488
|
-
let options = {};
|
|
3489
|
-
try {
|
|
3490
|
-
options = JSON.parse(body);
|
|
3491
|
-
} catch {
|
|
3492
|
-
}
|
|
3493
|
-
if (options.collectReviewPlans) {
|
|
3494
|
-
const meta = readBranchMeta(projectDir);
|
|
3495
|
-
if (meta && !meta.config.collectReviewPlans) {
|
|
3496
|
-
meta.config.collectReviewPlans = true;
|
|
3497
|
-
await withWorktree(projectDir, getHistoryBranch(), (worktreeDir) => {
|
|
3498
|
-
writeAndStage(worktreeDir, "meta.json", JSON.stringify(meta, null, 2) + "\n");
|
|
3499
|
-
commitInWorktree(worktreeDir, "Enable review plan collection");
|
|
3500
|
-
});
|
|
3501
|
-
}
|
|
3502
|
-
}
|
|
3503
|
-
let gitHooksResult = { installed: [], skipped: [] };
|
|
3504
|
-
try {
|
|
3505
|
-
const { installGitHooks } = await import("./dist-QAU3LGJN.js");
|
|
3506
|
-
const binaryPath = getBinaryPath();
|
|
3507
|
-
gitHooksResult = installGitHooks(projectDir, binaryPath);
|
|
3508
|
-
} catch {
|
|
3509
|
-
}
|
|
3510
|
-
const status = readBranchMeta(projectDir);
|
|
3511
|
-
const totalEntries = status?.totalEntries ?? 0;
|
|
3512
|
-
jsonResponse(
|
|
3513
|
-
res,
|
|
3514
|
-
{
|
|
3515
|
-
success: true,
|
|
3516
|
-
totalEntries,
|
|
3517
|
-
config: status?.config,
|
|
3518
|
-
gitHooks: gitHooksResult,
|
|
3519
|
-
reinitialized: alreadyExists
|
|
3520
|
-
},
|
|
3521
|
-
alreadyExists ? 200 : 201,
|
|
3522
|
-
req
|
|
3523
|
-
);
|
|
3524
|
-
} catch (err) {
|
|
3525
|
-
const message = err instanceof Error ? err.message : "Failed to initialize history";
|
|
3526
|
-
jsonResponse(res, { error: message }, 500, req);
|
|
3527
|
-
}
|
|
3528
|
-
}
|
|
3529
|
-
async function historyBackfillHandler(ctx) {
|
|
3530
|
-
const { projectDir, req, res } = ctx;
|
|
3531
|
-
const exists = historyBranchExists(projectDir);
|
|
3532
|
-
if (!exists) {
|
|
3533
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3534
|
-
return;
|
|
3535
|
-
}
|
|
3536
|
-
const body = await readBody(req);
|
|
3537
|
-
let options = {};
|
|
3538
|
-
try {
|
|
3539
|
-
options = JSON.parse(body);
|
|
3540
|
-
} catch {
|
|
3541
|
-
}
|
|
3542
|
-
const limit = Math.min(options.limit ?? 20, 100);
|
|
3543
|
-
const branchOnly = options.branchOnly ?? false;
|
|
3544
|
-
const {
|
|
3545
|
-
listRecentCommits,
|
|
3546
|
-
listBranchOnlyCommits,
|
|
3547
|
-
getCommitMetadata,
|
|
3548
|
-
getCommitDiffStats,
|
|
3549
|
-
getCommitRawDiff,
|
|
3550
|
-
loadActiveGuards,
|
|
3551
|
-
findSessionForCommit,
|
|
3552
|
-
buildSessionSummary,
|
|
3553
|
-
entryExists,
|
|
3554
|
-
writeHistoryEntry,
|
|
3555
|
-
DEFAULT_HISTORY_CONFIG
|
|
3556
|
-
} = await import("./dist-QAU3LGJN.js");
|
|
3557
|
-
const commits = branchOnly ? listBranchOnlyCommits(projectDir, limit) : listRecentCommits(projectDir, limit);
|
|
3558
|
-
if (commits.length === 0) {
|
|
3559
|
-
jsonResponse(res, { captured: 0, skipped: 0, total: 0 }, 200, req);
|
|
3560
|
-
return;
|
|
3561
|
-
}
|
|
3562
|
-
const meta = readBranchMeta(projectDir);
|
|
3563
|
-
const config = meta?.config ?? DEFAULT_HISTORY_CONFIG;
|
|
3564
|
-
let captured = 0;
|
|
3565
|
-
let skipped = 0;
|
|
3566
|
-
for (const sha of commits) {
|
|
3567
|
-
if (entryExists(projectDir, sha)) {
|
|
3568
|
-
skipped++;
|
|
3569
|
-
continue;
|
|
3570
|
-
}
|
|
3571
|
-
try {
|
|
3572
|
-
const metadata = getCommitMetadata(projectDir, sha);
|
|
3573
|
-
const diffStats = getCommitDiffStats(projectDir, sha);
|
|
3574
|
-
const rawDiffResult = getCommitRawDiff(projectDir, sha, config.maxDiffSize);
|
|
3575
|
-
const guardsYaml = loadActiveGuards(projectDir);
|
|
3576
|
-
let sessionSummary = null;
|
|
3577
|
-
let sessionState = null;
|
|
3578
|
-
let sessionEvents = [];
|
|
3579
|
-
const sessionMatch = findSessionForCommit(projectDir, metadata.authorDate);
|
|
3580
|
-
if (sessionMatch) {
|
|
3581
|
-
sessionSummary = buildSessionSummary(sessionMatch.state, sessionMatch.events);
|
|
3582
|
-
sessionState = sessionMatch.state;
|
|
3583
|
-
sessionEvents = sessionMatch.events;
|
|
3584
|
-
}
|
|
3585
|
-
const entry = {
|
|
3586
|
-
version: 1,
|
|
3587
|
-
capturedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3588
|
-
commit: metadata,
|
|
3589
|
-
diff: diffStats,
|
|
3590
|
-
rawDiff: rawDiffResult.diff || void 0,
|
|
3591
|
-
diffTruncated: rawDiffResult.truncated || void 0,
|
|
3592
|
-
session: sessionSummary,
|
|
3593
|
-
enrichment: null,
|
|
3594
|
-
reviewPlans: null
|
|
3595
|
-
};
|
|
3596
|
-
await writeHistoryEntry(projectDir, entry, {
|
|
3597
|
-
state: sessionState,
|
|
3598
|
-
events: sessionEvents,
|
|
3599
|
-
guardsYaml
|
|
3600
|
-
});
|
|
3601
|
-
captured++;
|
|
3602
|
-
} catch {
|
|
3603
|
-
}
|
|
3604
|
-
}
|
|
3605
|
-
jsonResponse(res, { captured, skipped, total: commits.length }, 200, req);
|
|
3606
|
-
}
|
|
3607
|
-
async function historyRewindHandler(ctx) {
|
|
3608
|
-
const { projectDir, req, res } = ctx;
|
|
3609
|
-
const exists = historyBranchExists(projectDir);
|
|
3610
|
-
if (!exists) {
|
|
3611
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3612
|
-
return;
|
|
3613
|
-
}
|
|
3614
|
-
const body = await readBody(req);
|
|
3615
|
-
let payload = {};
|
|
3616
|
-
try {
|
|
3617
|
-
payload = JSON.parse(body);
|
|
3618
|
-
} catch {
|
|
3619
|
-
jsonResponse(res, { error: "Invalid JSON body" }, 400, req);
|
|
3620
|
-
return;
|
|
3621
|
-
}
|
|
3622
|
-
if (!payload.sha) {
|
|
3623
|
-
jsonResponse(res, { error: "sha is required" }, 400, req);
|
|
3624
|
-
return;
|
|
3625
|
-
}
|
|
3626
|
-
if (!/^[0-9a-f]{4,40}$/i.test(payload.sha)) {
|
|
3627
|
-
jsonResponse(res, { error: "Invalid SHA format" }, 400, req);
|
|
3628
|
-
return;
|
|
3629
|
-
}
|
|
3630
|
-
const timeline = readTimeline(projectDir);
|
|
3631
|
-
if (!timeline) {
|
|
3632
|
-
jsonResponse(res, { error: "No timeline found" }, 400, req);
|
|
3633
|
-
return;
|
|
3634
|
-
}
|
|
3635
|
-
const match = timeline.entries.find(
|
|
3636
|
-
(e) => e.sha === payload.sha || e.shortSha === payload.sha || e.sha.startsWith(payload.sha)
|
|
3637
|
-
);
|
|
3638
|
-
if (!match) {
|
|
3639
|
-
jsonResponse(res, { error: `No history entry found for SHA: ${payload.sha}` }, 404, req);
|
|
3640
|
-
return;
|
|
3641
|
-
}
|
|
3642
|
-
try {
|
|
3643
|
-
execFileSync3("git", ["diff", "--quiet"], {
|
|
3644
|
-
cwd: projectDir,
|
|
3645
|
-
encoding: "utf-8",
|
|
3646
|
-
timeout: 5e3,
|
|
3647
|
-
stdio: "pipe"
|
|
3648
|
-
});
|
|
3649
|
-
execFileSync3("git", ["diff", "--cached", "--quiet"], {
|
|
3650
|
-
cwd: projectDir,
|
|
3651
|
-
encoding: "utf-8",
|
|
3652
|
-
timeout: 5e3,
|
|
3653
|
-
stdio: "pipe"
|
|
3654
|
-
});
|
|
3655
|
-
} catch {
|
|
3656
|
-
jsonResponse(res, { error: "You have uncommitted changes. Commit or stash before rewinding." }, 409, req);
|
|
3657
|
-
return;
|
|
3658
|
-
}
|
|
3659
|
-
if (!payload.confirm) {
|
|
3660
|
-
let diffStat = "";
|
|
3661
|
-
try {
|
|
3662
|
-
diffStat = execFileSync3("git", ["diff", "--stat", `HEAD...${match.sha}`], {
|
|
3663
|
-
cwd: projectDir,
|
|
3664
|
-
encoding: "utf-8",
|
|
3665
|
-
timeout: 1e4,
|
|
3666
|
-
stdio: "pipe"
|
|
3667
|
-
}).trim();
|
|
3668
|
-
} catch {
|
|
3669
|
-
}
|
|
3670
|
-
jsonResponse(res, {
|
|
3671
|
-
preview: true,
|
|
3672
|
-
sha: match.sha,
|
|
3673
|
-
shortSha: match.shortSha,
|
|
3674
|
-
subject: match.subject,
|
|
3675
|
-
date: match.date,
|
|
3676
|
-
diffStat
|
|
3677
|
-
}, 200, req);
|
|
3678
|
-
return;
|
|
3679
|
-
}
|
|
3680
|
-
try {
|
|
3681
|
-
execFileSync3("git", ["reset", "--hard", match.sha], {
|
|
3682
|
-
cwd: projectDir,
|
|
3683
|
-
encoding: "utf-8",
|
|
3684
|
-
timeout: 1e4,
|
|
3685
|
-
stdio: "pipe"
|
|
3686
|
-
});
|
|
3687
|
-
jsonResponse(res, { success: true, sha: match.sha, shortSha: match.shortSha }, 200, req);
|
|
3688
|
-
} catch (err) {
|
|
3689
|
-
const message = err instanceof Error ? err.message : "Rewind failed";
|
|
3690
|
-
jsonResponse(res, { error: message }, 500, req);
|
|
3691
|
-
}
|
|
3692
|
-
}
|
|
3693
|
-
async function historyConfigGetHandler(ctx) {
|
|
3694
|
-
const { projectDir, req, res } = ctx;
|
|
3695
|
-
const exists = historyBranchExists(projectDir);
|
|
3696
|
-
if (!exists) {
|
|
3697
|
-
jsonResponse(res, {
|
|
3698
|
-
enabled: false,
|
|
3699
|
-
branchName: getHistoryBranch(),
|
|
3700
|
-
aiEnrichment: true,
|
|
3701
|
-
aiModel: DEFAULT_AI_MODEL,
|
|
3702
|
-
maxDiffSize: 5e4,
|
|
3703
|
-
maxAiDiffSize: 1e4,
|
|
3704
|
-
collectReviewPlans: false
|
|
3705
|
-
}, 200, req);
|
|
3706
|
-
return;
|
|
3707
|
-
}
|
|
3708
|
-
const meta = readBranchMeta(projectDir);
|
|
3709
|
-
if (!meta) {
|
|
3710
|
-
jsonResponse(res, { error: "Failed to read history branch metadata" }, 500, req);
|
|
3711
|
-
return;
|
|
3712
|
-
}
|
|
3713
|
-
jsonResponse(res, meta.config, 200, req);
|
|
3714
|
-
}
|
|
3715
|
-
async function historyConfigPutHandler(ctx) {
|
|
3716
|
-
const { projectDir, req, res } = ctx;
|
|
3717
|
-
const exists = historyBranchExists(projectDir);
|
|
3718
|
-
if (!exists) {
|
|
3719
|
-
jsonResponse(res, { error: `History branch not initialized. Run '${CLI_BIN_NAME} history init' first.` }, 400, req);
|
|
3720
|
-
return;
|
|
3721
|
-
}
|
|
3722
|
-
const body = await readBody(req);
|
|
3723
|
-
let updates;
|
|
3724
|
-
try {
|
|
3725
|
-
updates = JSON.parse(body);
|
|
3726
|
-
} catch {
|
|
3727
|
-
jsonResponse(res, { error: "Invalid JSON body" }, 400, req);
|
|
3728
|
-
return;
|
|
3729
|
-
}
|
|
3730
|
-
const filtered = {};
|
|
3731
|
-
for (const [key, value] of Object.entries(updates)) {
|
|
3732
|
-
const expectedType = HISTORY_CONFIG_FIELDS[key];
|
|
3733
|
-
if (expectedType && typeof value === expectedType) {
|
|
3734
|
-
filtered[key] = value;
|
|
3735
|
-
}
|
|
3736
|
-
}
|
|
3737
|
-
if (Object.keys(filtered).length === 0) {
|
|
3738
|
-
jsonResponse(res, { error: "No valid config fields provided" }, 400, req);
|
|
3739
|
-
return;
|
|
3740
|
-
}
|
|
3741
|
-
try {
|
|
3742
|
-
await withWorktree(projectDir, getHistoryBranch(), (worktreeDir) => {
|
|
3743
|
-
const metaPath = path8.join(worktreeDir, "meta.json");
|
|
3744
|
-
const raw = fs11.readFileSync(metaPath, "utf-8");
|
|
3745
|
-
const meta2 = JSON.parse(raw);
|
|
3746
|
-
meta2.config = { ...meta2.config, ...filtered };
|
|
3747
|
-
writeAndStage(worktreeDir, "meta.json", JSON.stringify(meta2, null, 2));
|
|
3748
|
-
commitInWorktree(worktreeDir, "chore(history): update config");
|
|
3749
|
-
});
|
|
3750
|
-
const meta = readBranchMeta(projectDir);
|
|
3751
|
-
jsonResponse(res, meta?.config ?? updates, 200, req);
|
|
3752
|
-
} catch (err) {
|
|
3753
|
-
const message = err instanceof Error ? err.message : "Failed to update config";
|
|
3754
|
-
jsonResponse(res, { error: message }, 500, req);
|
|
3755
|
-
}
|
|
3756
|
-
}
|
|
3757
|
-
async function historyStatsHandler(ctx) {
|
|
3758
|
-
const { projectDir, req, res } = ctx;
|
|
3759
|
-
const exists = historyBranchExists(projectDir);
|
|
3760
|
-
if (!exists) {
|
|
3761
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3762
|
-
return;
|
|
3763
|
-
}
|
|
3764
|
-
const timeline = readTimeline(projectDir);
|
|
3765
|
-
if (!timeline || timeline.entries.length === 0) {
|
|
3766
|
-
jsonResponse(res, {
|
|
3767
|
-
totalEntries: 0,
|
|
3768
|
-
dateRange: null,
|
|
3769
|
-
enrichmentCoverage: 0,
|
|
3770
|
-
avgFilesChanged: 0,
|
|
3771
|
-
avgInsertions: 0,
|
|
3772
|
-
avgDeletions: 0,
|
|
3773
|
-
topFiles: [],
|
|
3774
|
-
tagCounts: {},
|
|
3775
|
-
commitsByDay: {}
|
|
3776
|
-
}, 200, req);
|
|
3777
|
-
return;
|
|
3778
|
-
}
|
|
3779
|
-
const entries = timeline.entries;
|
|
3780
|
-
const totalEntries = entries.length;
|
|
3781
|
-
const dates = entries.map((e) => new Date(e.date).getTime()).filter((t) => !isNaN(t));
|
|
3782
|
-
const dateRange = dates.length > 0 ? { from: new Date(Math.min(...dates)).toISOString(), to: new Date(Math.max(...dates)).toISOString() } : null;
|
|
3783
|
-
const enriched = entries.filter((e) => e.hasEnrichment).length;
|
|
3784
|
-
const enrichmentCoverage = Math.round(enriched / totalEntries * 100);
|
|
3785
|
-
let totalFilesChanged = 0;
|
|
3786
|
-
let totalInsertions = 0;
|
|
3787
|
-
let totalDeletions = 0;
|
|
3788
|
-
const fileChangeCounts = /* @__PURE__ */ new Map();
|
|
3789
|
-
const tagCounts = {};
|
|
3790
|
-
const commitsByDay = {};
|
|
3791
|
-
for (const te of entries) {
|
|
3792
|
-
totalFilesChanged += te.filesChanged;
|
|
3793
|
-
try {
|
|
3794
|
-
const day = te.date.slice(0, 10);
|
|
3795
|
-
commitsByDay[day] = (commitsByDay[day] ?? 0) + 1;
|
|
3796
|
-
} catch {
|
|
3797
|
-
}
|
|
3798
|
-
if (te.tags) {
|
|
3799
|
-
for (const tag of te.tags) {
|
|
3800
|
-
tagCounts[tag] = (tagCounts[tag] ?? 0) + 1;
|
|
3801
|
-
}
|
|
3802
|
-
}
|
|
3803
|
-
const full = readHistoryEntry(projectDir, te.sha);
|
|
3804
|
-
if (full) {
|
|
3805
|
-
totalInsertions += full.diff.insertions;
|
|
3806
|
-
totalDeletions += full.diff.deletions;
|
|
3807
|
-
for (const f of full.diff.files) {
|
|
3808
|
-
fileChangeCounts.set(f.path, (fileChangeCounts.get(f.path) ?? 0) + 1);
|
|
3809
|
-
}
|
|
3810
|
-
}
|
|
3811
|
-
}
|
|
3812
|
-
const topFiles = [...fileChangeCounts.entries()].sort((a, b) => b[1] - a[1]).slice(0, 20).map(([file, count]) => ({ file, count }));
|
|
3813
|
-
jsonResponse(res, {
|
|
3814
|
-
totalEntries,
|
|
3815
|
-
dateRange,
|
|
3816
|
-
enrichmentCoverage,
|
|
3817
|
-
avgFilesChanged: Math.round(totalFilesChanged / totalEntries),
|
|
3818
|
-
avgInsertions: Math.round(totalInsertions / totalEntries),
|
|
3819
|
-
avgDeletions: Math.round(totalDeletions / totalEntries),
|
|
3820
|
-
topFiles,
|
|
3821
|
-
tagCounts,
|
|
3822
|
-
commitsByDay
|
|
3823
|
-
}, 200, req);
|
|
3824
|
-
}
|
|
3825
|
-
async function historyEntriesListHandler(ctx) {
|
|
3826
|
-
const { projectDir, url, req, res } = ctx;
|
|
3827
|
-
const exists = historyBranchExists(projectDir);
|
|
3828
|
-
if (!exists) {
|
|
3829
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3830
|
-
return;
|
|
3831
|
-
}
|
|
3832
|
-
const timeline = readTimeline(projectDir);
|
|
3833
|
-
if (!timeline) {
|
|
3834
|
-
jsonResponse(res, [], 200, req);
|
|
3835
|
-
return;
|
|
3836
|
-
}
|
|
3837
|
-
const tagFilter = url.searchParams.get("tag");
|
|
3838
|
-
let entries = timeline.entries;
|
|
3839
|
-
if (tagFilter) {
|
|
3840
|
-
entries = entries.filter((e) => e.tags?.includes(tagFilter));
|
|
3841
|
-
}
|
|
3842
|
-
jsonResponse(res, entries, 200, req);
|
|
3843
|
-
}
|
|
3844
|
-
async function historyEntryDetailHandler(ctx) {
|
|
3845
|
-
const { projectDir, params, req, res } = ctx;
|
|
3846
|
-
const sha = params.sha ?? params["1"] ?? "";
|
|
3847
|
-
const exists = historyBranchExists(projectDir);
|
|
3848
|
-
if (!exists) {
|
|
3849
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3850
|
-
return;
|
|
3851
|
-
}
|
|
3852
|
-
const entry = readHistoryEntry(projectDir, sha);
|
|
3853
|
-
if (!entry) {
|
|
3854
|
-
jsonResponse(res, { error: `Entry not found: ${sha}` }, 404, req);
|
|
3855
|
-
return;
|
|
3856
|
-
}
|
|
3857
|
-
jsonResponse(res, entry, 200, req);
|
|
3858
|
-
}
|
|
3859
|
-
async function historyEntryTagsHandler(ctx) {
|
|
3860
|
-
const { projectDir, params, req, res } = ctx;
|
|
3861
|
-
const sha = params.sha ?? params["1"] ?? "";
|
|
3862
|
-
const exists = historyBranchExists(projectDir);
|
|
3863
|
-
if (!exists) {
|
|
3864
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3865
|
-
return;
|
|
3866
|
-
}
|
|
3867
|
-
const body = await readBody(req);
|
|
3868
|
-
let payload;
|
|
3869
|
-
try {
|
|
3870
|
-
payload = JSON.parse(body);
|
|
3871
|
-
} catch {
|
|
3872
|
-
jsonResponse(res, { error: "Invalid JSON body" }, 400, req);
|
|
3873
|
-
return;
|
|
3874
|
-
}
|
|
3875
|
-
if (!Array.isArray(payload.tags) || !payload.tags.every((t) => typeof t === "string")) {
|
|
3876
|
-
jsonResponse(res, { error: "tags must be a string array" }, 400, req);
|
|
3877
|
-
return;
|
|
3878
|
-
}
|
|
3879
|
-
try {
|
|
3880
|
-
const { updateEntryTags } = await import("./dist-QAU3LGJN.js");
|
|
3881
|
-
await updateEntryTags(projectDir, sha, payload.tags);
|
|
3882
|
-
jsonResponse(res, { success: true, sha, tags: payload.tags }, 200, req);
|
|
3883
|
-
} catch (err) {
|
|
3884
|
-
const message = err instanceof Error ? err.message : "Failed to update tags";
|
|
3885
|
-
jsonResponse(res, { error: message }, 500, req);
|
|
3886
|
-
}
|
|
3887
|
-
}
|
|
3888
|
-
async function historyEntryTranscriptHandler(ctx) {
|
|
3889
|
-
const { projectDir, params, req, res } = ctx;
|
|
3890
|
-
const sha = params.sha ?? params["1"] ?? "";
|
|
3891
|
-
const exists = historyBranchExists(projectDir);
|
|
3892
|
-
if (!exists) {
|
|
3893
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3894
|
-
return;
|
|
3895
|
-
}
|
|
3896
|
-
const { readEntryTranscript } = await import("./dist-QAU3LGJN.js");
|
|
3897
|
-
const transcript = readEntryTranscript(projectDir, sha);
|
|
3898
|
-
if (!transcript) {
|
|
3899
|
-
jsonResponse(res, { error: "No transcript for this entry" }, 404, req);
|
|
3900
|
-
return;
|
|
3901
|
-
}
|
|
3902
|
-
jsonResponse(res, { sha, transcript }, 200, req);
|
|
3903
|
-
}
|
|
3904
|
-
async function historyExportHandler(ctx) {
|
|
3905
|
-
const { projectDir, req, res } = ctx;
|
|
3906
|
-
const exists = historyBranchExists(projectDir);
|
|
3907
|
-
if (!exists) {
|
|
3908
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3909
|
-
return;
|
|
3910
|
-
}
|
|
3911
|
-
const timeline = readTimeline(projectDir);
|
|
3912
|
-
const meta = readBranchMeta(projectDir);
|
|
3913
|
-
if (!timeline) {
|
|
3914
|
-
jsonResponse(res, { error: "No timeline data found" }, 404, req);
|
|
3915
|
-
return;
|
|
3916
|
-
}
|
|
3917
|
-
const fullEntries = [];
|
|
3918
|
-
for (const te of timeline.entries) {
|
|
3919
|
-
const full = readHistoryEntry(projectDir, te.sha);
|
|
3920
|
-
fullEntries.push({ timeline: te, entry: full });
|
|
3921
|
-
}
|
|
3922
|
-
jsonResponse(res, {
|
|
3923
|
-
exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3924
|
-
meta,
|
|
3925
|
-
timeline: timeline.entries,
|
|
3926
|
-
entries: fullEntries
|
|
3927
|
-
}, 200, req);
|
|
3928
|
-
}
|
|
3929
|
-
async function historyEnrichStatusGetHandler(ctx) {
|
|
3930
|
-
const { projectDir, req, res } = ctx;
|
|
3931
|
-
const status = enrichmentStatus.get(projectDir);
|
|
3932
|
-
if (!status) {
|
|
3933
|
-
jsonResponse(res, { status: "idle", total: 0, completed: 0, failed: 0 }, 200, req);
|
|
3934
|
-
} else {
|
|
3935
|
-
jsonResponse(res, status, 200, req);
|
|
3936
|
-
}
|
|
3937
|
-
}
|
|
3938
|
-
async function historyEnrichStatusDeleteHandler(ctx) {
|
|
3939
|
-
const { projectDir, req, res } = ctx;
|
|
3940
|
-
enrichmentStatus.delete(projectDir);
|
|
3941
|
-
jsonResponse(res, { success: true }, 200, req);
|
|
3942
|
-
}
|
|
3943
|
-
async function historyEnrichHandler(ctx) {
|
|
3944
|
-
const { projectDir, req, res } = ctx;
|
|
3945
|
-
const exists = historyBranchExists(projectDir);
|
|
3946
|
-
if (!exists) {
|
|
3947
|
-
jsonResponse(res, { error: "History branch not initialized" }, 400, req);
|
|
3948
|
-
return;
|
|
3949
|
-
}
|
|
3950
|
-
const current = enrichmentStatus.get(projectDir);
|
|
3951
|
-
if (current?.status === "enriching") {
|
|
3952
|
-
jsonResponse(res, { error: "Enrichment already in progress" }, 409, req);
|
|
3953
|
-
return;
|
|
3954
|
-
}
|
|
3955
|
-
if (!isClaudeCliAvailable2()) {
|
|
3956
|
-
jsonResponse(res, { error: "Claude CLI not found. Install it from https://claude.ai/download" }, 400, req);
|
|
3957
|
-
return;
|
|
3958
|
-
}
|
|
3959
|
-
const body = await readBody(req);
|
|
3960
|
-
let targetSha;
|
|
3961
|
-
let model = DEFAULT_AI_MODEL;
|
|
3962
|
-
if (body) {
|
|
3963
|
-
try {
|
|
3964
|
-
const payload = JSON.parse(body);
|
|
3965
|
-
if (payload.sha && typeof payload.sha === "string") targetSha = payload.sha;
|
|
3966
|
-
if (payload.model && typeof payload.model === "string") model = payload.model;
|
|
3967
|
-
} catch {
|
|
3968
|
-
}
|
|
3969
|
-
}
|
|
3970
|
-
const timeline = readTimeline(projectDir);
|
|
3971
|
-
if (!timeline || timeline.entries.length === 0) {
|
|
3972
|
-
jsonResponse(res, { error: "No history entries to enrich" }, 400, req);
|
|
3973
|
-
return;
|
|
3974
|
-
}
|
|
3975
|
-
let toEnrich;
|
|
3976
|
-
if (targetSha) {
|
|
3977
|
-
toEnrich = [targetSha];
|
|
3978
|
-
} else {
|
|
3979
|
-
toEnrich = timeline.entries.filter((e) => !e.hasEnrichment).map((e) => e.sha);
|
|
3980
|
-
}
|
|
3981
|
-
if (toEnrich.length === 0) {
|
|
3982
|
-
jsonResponse(res, { status: "done", total: 0, completed: 0, failed: 0 }, 200, req);
|
|
3983
|
-
return;
|
|
3984
|
-
}
|
|
3985
|
-
enrichmentStatus.set(projectDir, {
|
|
3986
|
-
status: "enriching",
|
|
3987
|
-
total: toEnrich.length,
|
|
3988
|
-
completed: 0,
|
|
3989
|
-
failed: 0
|
|
3990
|
-
});
|
|
3991
|
-
jsonResponse(res, { success: true, total: toEnrich.length }, 202, req);
|
|
3992
|
-
(async () => {
|
|
3993
|
-
let completed = 0;
|
|
3994
|
-
let failed = 0;
|
|
3995
|
-
for (const sha of toEnrich) {
|
|
3996
|
-
const statusEntry = enrichmentStatus.get(projectDir);
|
|
3997
|
-
if (!statusEntry || statusEntry.status !== "enriching") break;
|
|
3998
|
-
statusEntry.current = sha.slice(0, 7);
|
|
3999
|
-
try {
|
|
4000
|
-
const entry = readHistoryEntry(projectDir, sha);
|
|
4001
|
-
if (!entry) {
|
|
4002
|
-
console.error(`[enrich] Entry not found for sha ${sha.slice(0, 7)}`);
|
|
4003
|
-
failed++;
|
|
4004
|
-
continue;
|
|
4005
|
-
}
|
|
4006
|
-
let rawEvents;
|
|
4007
|
-
if (entry.session?.sessionId) {
|
|
4008
|
-
try {
|
|
4009
|
-
rawEvents = readEvents(entry.session.sessionId, projectDir);
|
|
4010
|
-
} catch {
|
|
4011
|
-
}
|
|
4012
|
-
}
|
|
4013
|
-
console.error(`[enrich] Enriching ${sha.slice(0, 7)} with model ${model}...`);
|
|
4014
|
-
const enrichment = await enrichEntry(entry, rawEvents, model);
|
|
4015
|
-
console.error(`[enrich] Got enrichment for ${sha.slice(0, 7)}, writing...`);
|
|
4016
|
-
await updateEntryEnrichment(projectDir, sha, enrichment);
|
|
4017
|
-
console.error(`[enrich] Successfully enriched ${sha.slice(0, 7)}`);
|
|
4018
|
-
completed++;
|
|
4019
|
-
} catch (err) {
|
|
4020
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
4021
|
-
console.error(`[enrich] Failed to enrich ${sha.slice(0, 7)}: ${msg}`);
|
|
4022
|
-
failed++;
|
|
4023
|
-
const s = enrichmentStatus.get(projectDir);
|
|
4024
|
-
if (s) s.error = msg;
|
|
4025
|
-
}
|
|
4026
|
-
const progress = enrichmentStatus.get(projectDir);
|
|
4027
|
-
if (progress) {
|
|
4028
|
-
progress.completed = completed;
|
|
4029
|
-
progress.failed = failed;
|
|
4030
|
-
}
|
|
4031
|
-
}
|
|
4032
|
-
enrichmentStatus.set(projectDir, {
|
|
4033
|
-
status: "done",
|
|
4034
|
-
total: toEnrich.length,
|
|
4035
|
-
completed,
|
|
4036
|
-
failed,
|
|
4037
|
-
current: void 0
|
|
4038
|
-
});
|
|
4039
|
-
})().catch((err) => {
|
|
4040
|
-
enrichmentStatus.set(projectDir, {
|
|
4041
|
-
status: "error",
|
|
4042
|
-
total: toEnrich.length,
|
|
4043
|
-
completed: 0,
|
|
4044
|
-
failed: 0,
|
|
4045
|
-
error: err instanceof Error ? err.message : String(err)
|
|
4046
|
-
});
|
|
4047
|
-
});
|
|
4048
|
-
}
|
|
4049
|
-
function registerHistoryRoutes(router) {
|
|
4050
|
-
router.get("/api/history/status", historyStatusHandler);
|
|
4051
|
-
router.post("/api/history/init", historyInitHandler);
|
|
4052
|
-
router.post("/api/history/backfill", historyBackfillHandler);
|
|
4053
|
-
router.post("/api/history/rewind", historyRewindHandler);
|
|
4054
|
-
router.get("/api/history/config", historyConfigGetHandler);
|
|
4055
|
-
router.put("/api/history/config", historyConfigPutHandler);
|
|
4056
|
-
router.get("/api/history/stats", historyStatsHandler);
|
|
4057
|
-
router.get(
|
|
4058
|
-
/^\/api\/history\/entries\/(?<sha>[^/]+)\/transcript$/,
|
|
4059
|
-
historyEntryTranscriptHandler
|
|
4060
|
-
);
|
|
4061
|
-
router.put(
|
|
4062
|
-
/^\/api\/history\/entries\/(?<sha>[^/]+)\/tags$/,
|
|
4063
|
-
historyEntryTagsHandler
|
|
4064
|
-
);
|
|
4065
|
-
router.get(
|
|
4066
|
-
/^\/api\/history\/entries\/(?<sha>[^/]+)$/,
|
|
4067
|
-
historyEntryDetailHandler
|
|
4068
|
-
);
|
|
4069
|
-
router.get("/api/history/entries", historyEntriesListHandler);
|
|
4070
|
-
router.get("/api/history/export", historyExportHandler);
|
|
4071
|
-
router.get("/api/history/enrich/status", historyEnrichStatusGetHandler);
|
|
4072
|
-
router.delete("/api/history/enrich/status", historyEnrichStatusDeleteHandler);
|
|
4073
|
-
router.post("/api/history/enrich", historyEnrichHandler);
|
|
4074
|
-
}
|
|
4075
|
-
function resolveHtmlPath() {
|
|
4076
|
-
const candidates = [];
|
|
4077
|
-
if (process.argv[1]) {
|
|
4078
|
-
const entryDir = path9.dirname(path9.resolve(process.argv[1]));
|
|
4079
|
-
candidates.push(path9.join(entryDir, "ui.html"));
|
|
4080
|
-
candidates.push(path9.join(entryDir, "../../web-ui/dist/index.html"));
|
|
4081
|
-
try {
|
|
4082
|
-
const realEntry = path9.dirname(fs12.realpathSync(process.argv[1]));
|
|
4083
|
-
if (realEntry !== entryDir) {
|
|
4084
|
-
candidates.push(path9.join(realEntry, "ui.html"));
|
|
4085
|
-
candidates.push(path9.join(realEntry, "../../web-ui/dist/index.html"));
|
|
4086
|
-
}
|
|
4087
|
-
} catch {
|
|
4088
|
-
}
|
|
4089
|
-
try {
|
|
4090
|
-
const shimContent = fs12.readFileSync(process.argv[1], "utf-8");
|
|
4091
|
-
const importMatch = shimContent.match(/import\(["']([^"']+)["']\)/);
|
|
4092
|
-
if (importMatch) {
|
|
4093
|
-
const shimTarget = path9.dirname(importMatch[1]);
|
|
4094
|
-
candidates.push(path9.join(shimTarget, "ui.html"));
|
|
4095
|
-
candidates.push(path9.join(shimTarget, "../../web-ui/dist/index.html"));
|
|
4096
|
-
}
|
|
4097
|
-
} catch {
|
|
4098
|
-
}
|
|
4099
|
-
}
|
|
4100
|
-
const thisDir = path9.dirname(new URL(import.meta.url).pathname);
|
|
4101
|
-
candidates.push(path9.join(thisDir, "ui.html"));
|
|
4102
|
-
candidates.push(path9.join(thisDir, "../../cli/dist/ui.html"));
|
|
4103
|
-
candidates.push(path9.join(thisDir, "../../web-ui/dist/index.html"));
|
|
4104
|
-
for (const p of candidates) {
|
|
4105
|
-
if (fs12.existsSync(p)) return p;
|
|
4106
|
-
}
|
|
4107
|
-
return null;
|
|
4108
|
-
}
|
|
4109
|
-
function createApiServer(projectDir) {
|
|
4110
|
-
const router = createRouter();
|
|
4111
|
-
router.get("/api/health", healthHandler);
|
|
4112
|
-
router.get("/api/version", versionHandler);
|
|
4113
|
-
router.get("/api/paths", pathsHandler);
|
|
4114
|
-
router.get("/api/rules", listRulesHandler);
|
|
4115
|
-
router.post("/api/rules", createRuleHandler);
|
|
4116
|
-
router.get(/^\/api\/rules\/([^/]+)$/, getRuleHandler);
|
|
4117
|
-
router.post(/^\/api\/rules\/([^/]+)\/toggle$/, toggleRuleHandler);
|
|
4118
|
-
router.put(/^\/api\/rules\/([^/]+)$/, updateRuleHandler);
|
|
4119
|
-
router.delete(/^\/api\/rules\/([^/]+)$/, deleteRuleHandler);
|
|
4120
|
-
router.get("/api/responses", getResponsesHandler);
|
|
4121
|
-
router.put("/api/responses", updateResponsesHandler);
|
|
4122
|
-
router.get("/api/templates", listTemplatesHandler);
|
|
4123
|
-
router.get("/api/skills", listSkillsHandler);
|
|
4124
|
-
router.get("/api/projects", listProjectsHandler);
|
|
4125
|
-
router.post("/api/projects", registerProjectHandler);
|
|
4126
|
-
router.post("/api/projects/default", setDefaultProjectHandler);
|
|
4127
|
-
router.post("/api/projects/scan", scanProjectsHandler);
|
|
4128
|
-
router.post(/^\/api\/projects\/([^/]+)\/init$/, initProjectHandler);
|
|
4129
|
-
router.get(/^\/api\/projects\/([^/]+)$/, getProjectHandler);
|
|
4130
|
-
router.put(/^\/api\/projects\/([^/]+)$/, updateProjectHandler);
|
|
4131
|
-
router.delete(/^\/api\/projects\/([^/]+)$/, deleteProjectHandler);
|
|
4132
|
-
router.get("/api/sessions", sessionsListHandler);
|
|
4133
|
-
router.get("/api/session", sessionHandler);
|
|
4134
|
-
router.get("/api/events", eventsHandler);
|
|
4135
|
-
router.get("/api/stack", stackHandler);
|
|
4136
|
-
router.post("/api/init", initHandler);
|
|
4137
|
-
router.post("/api/generate", generateHandler);
|
|
4138
|
-
router.get("/api/generate/status", generateStatusHandler);
|
|
4139
|
-
router.delete("/api/generate/status", generateStatusClearHandler);
|
|
4140
|
-
router.get("/api/review/config", getReviewConfig);
|
|
4141
|
-
router.put("/api/review/config", putReviewConfig);
|
|
4142
|
-
router.get("/api/review/plans", listPlans);
|
|
4143
|
-
router.get("/api/review/plans/pending-all", listPendingPlansAll);
|
|
4144
|
-
router.get(/^\/api\/review\/plans\/([^/]+)$/, getPlan);
|
|
4145
|
-
router.post(/^\/api\/review\/plans\/([^/]+)\/decision$/, planDecisionGone);
|
|
4146
|
-
router.post(/^\/api\/review\/plans\/([^/]+)\/save$/, savePlanVersion);
|
|
4147
|
-
router.post("/api/review/hub/register", hubRegister);
|
|
4148
|
-
router.get("/api/review/hub/sessions", hubListSessions);
|
|
4149
|
-
router.get("/api/review/hub/events", hubEvents);
|
|
4150
|
-
router.get("/api/review/hub/health", hubHealth);
|
|
4151
|
-
router.post(/^\/api\/review\/hub\/session\/([^/]+)\/decision$/, hubSubmitDecision);
|
|
4152
|
-
router.get(/^\/api\/review\/hub\/session\/([^/]+)\/await$/, hubAwaitDecision);
|
|
4153
|
-
router.get(/^\/api\/review\/hub\/session\/([^/]+)\/token$/, hubGetToken);
|
|
4154
|
-
router.post(/^\/api\/review\/hub\/session\/([^/]+)\/save$/, hubSaveVersion);
|
|
4155
|
-
router.get(/^\/api\/review\/hub\/session\/([^/]+)$/, hubGetSession);
|
|
4156
|
-
router.get("/api/review/quality/ai-available", getAiAvailable);
|
|
4157
|
-
router.post("/api/review/quality", postQualityScore);
|
|
4158
|
-
router.post("/api/review/images", uploadImage);
|
|
4159
|
-
router.get(/^\/api\/review\/images\/([^/]+)$/, getImage);
|
|
4160
|
-
router.post("/api/review/export/github", exportGithub);
|
|
4161
|
-
router.post("/api/review/export/webhook", exportWebhook);
|
|
4162
|
-
router.post("/api/review/export/obsidian", exportObsidian);
|
|
4163
|
-
router.get("/api/settings/username", getUsernameSettings);
|
|
4164
|
-
router.put("/api/settings/username", putUsernameSettings);
|
|
4165
|
-
router.get("/api/codemap/status", codemapStatusHandler);
|
|
4166
|
-
router.get("/api/codemap/config", codemapConfigHandler);
|
|
4167
|
-
router.put("/api/codemap/config", codemapConfigUpdateHandler);
|
|
4168
|
-
router.post("/api/codemap/search", codemapSearchHandler);
|
|
4169
|
-
router.post("/api/codemap/action", codemapActionHandler);
|
|
4170
|
-
router.get("/api/codemap/depgraph/status", depgraphStatusHandler);
|
|
4171
|
-
router.post("/api/codemap/depgraph/search", depgraphSearchHandler);
|
|
4172
|
-
router.get("/api/codemap/depgraph/ranking", depgraphRankingHandler);
|
|
4173
|
-
router.get("/api/codemap/depgraph/cycles", depgraphCyclesHandler);
|
|
4174
|
-
router.post("/api/codemap/depgraph/metrics", depgraphMetricsHandler);
|
|
4175
|
-
router.get("/api/memory/status", memoryStatusHandler);
|
|
4176
|
-
router.post("/api/memory/search", memorySearchHandler);
|
|
4177
|
-
router.post("/api/memory/remember", memoryRememberHandler);
|
|
4178
|
-
router.get("/api/memory/timeline", memoryTimelineHandler);
|
|
4179
|
-
router.get("/api/memory/config", memoryConfigHandler);
|
|
4180
|
-
router.put("/api/memory/config", memoryConfigUpdateHandler);
|
|
4181
|
-
router.post("/api/memory/action", memoryActionHandler);
|
|
4182
|
-
router.get(/^\/api\/memory\/entries\/([^/]+)$/, memoryEntryHandler);
|
|
4183
|
-
router.post(/^\/api\/memory\/entries\/([^/]+)\/forget$/, memoryForgetHandler);
|
|
4184
|
-
router.get("/api/ulpi/models", ulpiModelsHandler);
|
|
4185
|
-
registerHistoryRoutes(router);
|
|
4186
|
-
const server = http.createServer(async (req, res) => {
|
|
4187
|
-
const url = new URL(req.url ?? "/", `http://${req.headers.host}`);
|
|
4188
|
-
const method = req.method ?? "GET";
|
|
4189
|
-
if (handleCorsPreFlight(req, res)) return;
|
|
4190
|
-
const clientIp = req.socket.remoteAddress ?? "unknown";
|
|
4191
|
-
if (!checkRateLimit(clientIp)) {
|
|
4192
|
-
jsonResponse(res, { error: "Too many requests" }, 429, req);
|
|
4193
|
-
return;
|
|
4194
|
-
}
|
|
4195
|
-
if (url.pathname.startsWith("/api/")) {
|
|
4196
|
-
if (url.pathname !== "/api/health" && !validateAuth(req)) {
|
|
4197
|
-
jsonResponse(res, { error: "Unauthorized: only local connections allowed" }, 401, req);
|
|
4198
|
-
return;
|
|
4199
|
-
}
|
|
4200
|
-
if ((method === "POST" || method === "PUT") && req.headers["content-length"] && req.headers["content-length"] !== "0") {
|
|
4201
|
-
const ct = req.headers["content-type"] ?? "";
|
|
4202
|
-
if (!ct.includes("application/json") && !ct.includes("multipart/form-data") && !ct.includes("text/plain")) {
|
|
4203
|
-
jsonResponse(res, { error: "Unsupported Content-Type" }, 415, req);
|
|
4204
|
-
return;
|
|
4205
|
-
}
|
|
4206
|
-
}
|
|
4207
|
-
let resolvedProjectDir = projectDir;
|
|
4208
|
-
const projectParam = url.searchParams.get("project");
|
|
4209
|
-
if (projectParam) {
|
|
4210
|
-
const entry = getProject(projectParam);
|
|
4211
|
-
if (!entry) {
|
|
4212
|
-
jsonResponse(res, { error: `Unknown project: ${projectParam}` }, 404, req);
|
|
4213
|
-
return;
|
|
4214
|
-
}
|
|
4215
|
-
resolvedProjectDir = entry.path;
|
|
4216
|
-
}
|
|
4217
|
-
try {
|
|
4218
|
-
const matched = await router.handle({
|
|
4219
|
-
req,
|
|
4220
|
-
res,
|
|
4221
|
-
pathname: url.pathname,
|
|
4222
|
-
method,
|
|
4223
|
-
url,
|
|
4224
|
-
projectDir: resolvedProjectDir
|
|
4225
|
-
});
|
|
4226
|
-
if (!matched) {
|
|
4227
|
-
notFound(res, `Unknown API route: ${method} ${url.pathname}`, req);
|
|
4228
|
-
}
|
|
4229
|
-
} catch (err) {
|
|
4230
|
-
const rawMessage = err instanceof Error ? err.message : "Internal server error";
|
|
4231
|
-
const safeMessage = rawMessage.length > 200 ? rawMessage.slice(0, 200) : rawMessage;
|
|
4232
|
-
jsonResponse(res, { error: safeMessage }, 500, req);
|
|
4233
|
-
}
|
|
4234
|
-
return;
|
|
4235
|
-
}
|
|
4236
|
-
if (!validateLoopback(req)) {
|
|
4237
|
-
jsonResponse(res, { error: "Unauthorized: only local connections allowed" }, 401, req);
|
|
4238
|
-
return;
|
|
4239
|
-
}
|
|
4240
|
-
if (method === "GET") {
|
|
4241
|
-
const normalized = path9.normalize(url.pathname);
|
|
4242
|
-
if (normalized.includes("..")) {
|
|
4243
|
-
notFound(res, "Not found", req);
|
|
4244
|
-
return;
|
|
4245
|
-
}
|
|
4246
|
-
const htmlPath = resolveHtmlPath();
|
|
4247
|
-
if (!htmlPath) {
|
|
4248
|
-
res.writeHead(404, { "Content-Type": "text/plain" });
|
|
4249
|
-
res.end("UI HTML file not found. Build the web-ui package first.");
|
|
4250
|
-
return;
|
|
4251
|
-
}
|
|
4252
|
-
let html = fs12.readFileSync(htmlPath, "utf-8");
|
|
4253
|
-
const secret = getApiSecret();
|
|
4254
|
-
if (secret) {
|
|
4255
|
-
const injection = `<script>window.__ULPI_API_SECRET__="${secret}";</script>`;
|
|
4256
|
-
html = html.replace("</head>", `${injection}</head>`);
|
|
4257
|
-
}
|
|
4258
|
-
res.writeHead(200, {
|
|
4259
|
-
"Content-Type": "text/html; charset=utf-8",
|
|
4260
|
-
"Cache-Control": "no-cache"
|
|
4261
|
-
});
|
|
4262
|
-
res.end(html);
|
|
4263
|
-
return;
|
|
4264
|
-
}
|
|
4265
|
-
notFound(res, "Not found", req);
|
|
4266
|
-
});
|
|
4267
|
-
return server;
|
|
4268
|
-
}
|
|
4269
|
-
|
|
4270
|
-
export {
|
|
4271
|
-
setUiServerPort,
|
|
4272
|
-
createApiServer
|
|
4273
|
-
};
|