grepmax 0.13.6 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -0
- package/dist/commands/mcp.js +77 -4
- package/dist/commands/review.js +237 -0
- package/dist/index.js +2 -0
- package/dist/lib/daemon/daemon.js +113 -2
- package/dist/lib/daemon/ipc-handler.js +8 -0
- package/dist/lib/llm/diff.js +139 -0
- package/dist/lib/llm/report.js +141 -0
- package/dist/lib/llm/review.js +403 -0
- package/package.json +1 -1
- package/plugins/grepmax/.claude-plugin/plugin.json +1 -1
- package/plugins/grepmax/skills/grepmax/SKILL.md +9 -0
package/README.md
CHANGED
|
@@ -135,6 +135,9 @@ Plugins auto-update when you run `npm install -g grepmax@latest` — no need to
|
|
|
135
135
|
| `impact_analysis` | Dependents + affected tests for a symbol or file. |
|
|
136
136
|
| `find_similar` | Vector similarity search. |
|
|
137
137
|
| `build_context` | Token-budgeted topic summary. |
|
|
138
|
+
| `investigate` | Agentic codebase Q&A using local LLM + gmax tools. |
|
|
139
|
+
| `review_commit` | Review a git commit for bugs, security issues, and breaking changes. |
|
|
140
|
+
| `review_report` | Get accumulated code review findings for the current project. |
|
|
138
141
|
|
|
139
142
|
## Search Options
|
|
140
143
|
|
|
@@ -174,6 +177,61 @@ gmax status # See all projects + watcher status
|
|
|
174
177
|
|
|
175
178
|
The daemon auto-starts when you run `gmax add`, `gmax index`, `gmax remove`, or `gmax summarize`. It shuts down after 30 minutes of inactivity.
|
|
176
179
|
|
|
180
|
+
## Local LLM (optional)
|
|
181
|
+
|
|
182
|
+
gmax can use a local LLM (via llama-server) for agentic codebase investigation. This is entirely opt-in and disabled by default — gmax works fine without it.
|
|
183
|
+
|
|
184
|
+
```bash
|
|
185
|
+
gmax llm on # Enable LLM features (persists to config)
|
|
186
|
+
gmax llm start # Start llama-server (auto-starts daemon too)
|
|
187
|
+
gmax llm status # Check server status
|
|
188
|
+
gmax llm stop # Stop llama-server
|
|
189
|
+
gmax llm off # Disable LLM + stop server
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### Investigate
|
|
193
|
+
|
|
194
|
+
Ask questions about your codebase — the LLM autonomously uses gmax tools (search, trace, peek, impact, related) to gather evidence and synthesize an answer.
|
|
195
|
+
|
|
196
|
+
```bash
|
|
197
|
+
gmax investigate "how does authentication work?"
|
|
198
|
+
gmax investigate "what would break if I changed VectorDB?" -v
|
|
199
|
+
gmax investigate "where are API routes defined?" --root ~/project
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
### Review
|
|
203
|
+
|
|
204
|
+
Automatic code review on git commits. Extracts the diff, gathers codebase context (callers, dependents, related files), and prompts the LLM for structured findings.
|
|
205
|
+
|
|
206
|
+
```bash
|
|
207
|
+
gmax review # Review HEAD
|
|
208
|
+
gmax review --commit abc1234 # Review specific commit
|
|
209
|
+
gmax review --commit HEAD~3 -v # Verbose — shows context gathering + LLM progress
|
|
210
|
+
gmax review report # Show accumulated findings
|
|
211
|
+
gmax review report --json # Raw JSON output
|
|
212
|
+
gmax review clear # Clear report
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
#### Post-commit hook
|
|
216
|
+
|
|
217
|
+
Install a git hook that automatically reviews every commit in the background via the daemon:
|
|
218
|
+
|
|
219
|
+
```bash
|
|
220
|
+
gmax review install # Install in current repo
|
|
221
|
+
gmax review install ~/other-repo # Install in another repo
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
The hook sends an IPC message to the daemon and returns instantly — it never blocks `git commit`. Findings accumulate in the report.
|
|
225
|
+
|
|
226
|
+
### LLM Configuration
|
|
227
|
+
|
|
228
|
+
| Variable | Description | Default |
|
|
229
|
+
| --- | --- | --- |
|
|
230
|
+
| `GMAX_LLM_MODEL` | Path to GGUF model file | (none) |
|
|
231
|
+
| `GMAX_LLM_BINARY` | llama-server binary | `llama-server` |
|
|
232
|
+
| `GMAX_LLM_PORT` | Server port | `8079` |
|
|
233
|
+
| `GMAX_LLM_IDLE_TIMEOUT` | Minutes before auto-stop | `30` |
|
|
234
|
+
|
|
177
235
|
## Architecture
|
|
178
236
|
|
|
179
237
|
All data lives in `~/.gmax/`:
|
package/dist/commands/mcp.js
CHANGED
|
@@ -305,6 +305,28 @@ const TOOLS = [
|
|
|
305
305
|
required: ["question"],
|
|
306
306
|
},
|
|
307
307
|
},
|
|
308
|
+
{
|
|
309
|
+
name: "review_commit",
|
|
310
|
+
description: "Review a git commit for bugs, breaking changes, and security issues using local LLM + codebase context. Returns structured findings. Requires LLM to be enabled (gmax llm on).",
|
|
311
|
+
inputSchema: {
|
|
312
|
+
type: "object",
|
|
313
|
+
properties: {
|
|
314
|
+
commit: { type: "string", description: "Git ref to review (default: HEAD)" },
|
|
315
|
+
},
|
|
316
|
+
required: [],
|
|
317
|
+
},
|
|
318
|
+
},
|
|
319
|
+
{
|
|
320
|
+
name: "review_report",
|
|
321
|
+
description: "Get the accumulated code review report for the current project. Returns findings from all reviewed commits.",
|
|
322
|
+
inputSchema: {
|
|
323
|
+
type: "object",
|
|
324
|
+
properties: {
|
|
325
|
+
json: { type: "boolean", description: "Return raw JSON instead of text (default: false)" },
|
|
326
|
+
},
|
|
327
|
+
required: [],
|
|
328
|
+
},
|
|
329
|
+
},
|
|
308
330
|
];
|
|
309
331
|
// ---------------------------------------------------------------------------
|
|
310
332
|
// Helpers
|
|
@@ -1833,7 +1855,7 @@ exports.mcp = new commander_1.Command("mcp")
|
|
|
1833
1855
|
return { tools: TOOLS };
|
|
1834
1856
|
}));
|
|
1835
1857
|
server.setRequestHandler(types_js_1.CallToolRequestSchema, (request) => __awaiter(void 0, void 0, void 0, function* () {
|
|
1836
|
-
var _a, _b, _c, _d, _e, _f;
|
|
1858
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
1837
1859
|
const { name, arguments: args } = request.params;
|
|
1838
1860
|
const toolArgs = (args !== null && args !== void 0 ? args : {});
|
|
1839
1861
|
const startMs = Date.now();
|
|
@@ -1919,26 +1941,77 @@ exports.mcp = new commander_1.Command("mcp")
|
|
|
1919
1941
|
}
|
|
1920
1942
|
break;
|
|
1921
1943
|
}
|
|
1944
|
+
case "review_commit": {
|
|
1945
|
+
const commitRef = String(toolArgs.commit || "HEAD");
|
|
1946
|
+
try {
|
|
1947
|
+
const { isDaemonRunning, sendDaemonCommand } = yield Promise.resolve().then(() => __importStar(require("../lib/utils/daemon-client")));
|
|
1948
|
+
if (yield isDaemonRunning()) {
|
|
1949
|
+
const llmResp = yield sendDaemonCommand({ cmd: "llm-start" }, { timeoutMs: 90000 });
|
|
1950
|
+
if (!llmResp.ok) {
|
|
1951
|
+
result = err(`LLM server not available: ${llmResp.error}. Run \`gmax llm on && gmax llm start\`.`);
|
|
1952
|
+
break;
|
|
1953
|
+
}
|
|
1954
|
+
}
|
|
1955
|
+
else {
|
|
1956
|
+
result = err("LLM server not available. Run `gmax llm on && gmax llm start`.");
|
|
1957
|
+
break;
|
|
1958
|
+
}
|
|
1959
|
+
const { reviewCommit } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/review")));
|
|
1960
|
+
const rev = yield reviewCommit({ commitRef, projectRoot });
|
|
1961
|
+
if (rev.clean) {
|
|
1962
|
+
result = ok(`Clean commit (${rev.commit}) — no issues found in ${rev.duration}s.`);
|
|
1963
|
+
}
|
|
1964
|
+
else {
|
|
1965
|
+
const { readReport } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/report")));
|
|
1966
|
+
const report = readReport(projectRoot);
|
|
1967
|
+
const entry = report === null || report === void 0 ? void 0 : report.reviews.find((r) => r.commit === rev.commit);
|
|
1968
|
+
result = ok(JSON.stringify({ commit: rev.commit, findings: (_a = entry === null || entry === void 0 ? void 0 : entry.findings) !== null && _a !== void 0 ? _a : [], duration: rev.duration }, null, 2));
|
|
1969
|
+
}
|
|
1970
|
+
}
|
|
1971
|
+
catch (e) {
|
|
1972
|
+
result = err(`Review failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
1973
|
+
}
|
|
1974
|
+
break;
|
|
1975
|
+
}
|
|
1976
|
+
case "review_report": {
|
|
1977
|
+
try {
|
|
1978
|
+
const { readReport, formatReportText } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/report")));
|
|
1979
|
+
const report = readReport(projectRoot);
|
|
1980
|
+
if (!report || report.reviews.length === 0) {
|
|
1981
|
+
result = ok("No review findings yet.");
|
|
1982
|
+
}
|
|
1983
|
+
else if (toolArgs.json) {
|
|
1984
|
+
result = ok(JSON.stringify(report, null, 2));
|
|
1985
|
+
}
|
|
1986
|
+
else {
|
|
1987
|
+
result = ok(formatReportText(report));
|
|
1988
|
+
}
|
|
1989
|
+
}
|
|
1990
|
+
catch (e) {
|
|
1991
|
+
result = err(`Report failed: ${e instanceof Error ? e.message : String(e)}`);
|
|
1992
|
+
}
|
|
1993
|
+
break;
|
|
1994
|
+
}
|
|
1922
1995
|
default:
|
|
1923
1996
|
return err(`Unknown tool: ${name}`);
|
|
1924
1997
|
}
|
|
1925
1998
|
// Best-effort query logging
|
|
1926
1999
|
try {
|
|
1927
2000
|
const { logQuery } = yield Promise.resolve().then(() => __importStar(require("../lib/utils/query-log")));
|
|
1928
|
-
const text = (
|
|
2001
|
+
const text = (_d = (_c = (_b = result.content) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.text) !== null && _d !== void 0 ? _d : "";
|
|
1929
2002
|
const resultLines = text.split("\n").filter((l) => l.trim()).length;
|
|
1930
2003
|
logQuery({
|
|
1931
2004
|
ts: new Date().toISOString(),
|
|
1932
2005
|
source: "mcp",
|
|
1933
2006
|
tool: name,
|
|
1934
|
-
query: String((
|
|
2007
|
+
query: String((_g = (_f = (_e = toolArgs.query) !== null && _e !== void 0 ? _e : toolArgs.symbol) !== null && _f !== void 0 ? _f : toolArgs.target) !== null && _g !== void 0 ? _g : ""),
|
|
1935
2008
|
project: projectRoot,
|
|
1936
2009
|
results: resultLines,
|
|
1937
2010
|
ms: Date.now() - startMs,
|
|
1938
2011
|
error: result.isError ? text.slice(0, 200) : undefined,
|
|
1939
2012
|
});
|
|
1940
2013
|
}
|
|
1941
|
-
catch (
|
|
2014
|
+
catch (_h) { }
|
|
1942
2015
|
return result;
|
|
1943
2016
|
}));
|
|
1944
2017
|
yield server.connect(transport);
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.review = void 0;
|
|
46
|
+
const node_child_process_1 = require("node:child_process");
|
|
47
|
+
const fs = __importStar(require("node:fs"));
|
|
48
|
+
const path = __importStar(require("node:path"));
|
|
49
|
+
const commander_1 = require("commander");
|
|
50
|
+
const exit_1 = require("../lib/utils/exit");
|
|
51
|
+
const project_root_1 = require("../lib/utils/project-root");
|
|
52
|
+
exports.review = new commander_1.Command("review")
|
|
53
|
+
.description("Review code changes using local LLM + codebase context")
|
|
54
|
+
.option("--commit <ref>", "Commit to review", "HEAD")
|
|
55
|
+
.option("--root <dir>", "Project root directory")
|
|
56
|
+
.option("--background", "Run review asynchronously via daemon", false)
|
|
57
|
+
.option("-v, --verbose", "Print progress to stderr", false)
|
|
58
|
+
.addHelpText("after", `
|
|
59
|
+
Examples:
|
|
60
|
+
gmax review Review HEAD
|
|
61
|
+
gmax review --commit abc1234 Review specific commit
|
|
62
|
+
gmax review --background Run async via daemon
|
|
63
|
+
|
|
64
|
+
Subcommands:
|
|
65
|
+
gmax review report [--json] Show accumulated findings
|
|
66
|
+
gmax review clear Clear report
|
|
67
|
+
gmax review install [DIR] Install post-commit hook
|
|
68
|
+
`)
|
|
69
|
+
.action((opts) => __awaiter(void 0, void 0, void 0, function* () {
|
|
70
|
+
var _a;
|
|
71
|
+
try {
|
|
72
|
+
const root = opts.root ? path.resolve(opts.root) : process.cwd();
|
|
73
|
+
const projectRoot = (_a = (0, project_root_1.findProjectRoot)(root)) !== null && _a !== void 0 ? _a : root;
|
|
74
|
+
const commitRef = opts.commit;
|
|
75
|
+
if (opts.background) {
|
|
76
|
+
// Fire-and-forget via daemon
|
|
77
|
+
const { ensureDaemonRunning, sendDaemonCommand } = yield Promise.resolve().then(() => __importStar(require("../lib/utils/daemon-client")));
|
|
78
|
+
if (!(yield ensureDaemonRunning())) {
|
|
79
|
+
console.error("Failed to start daemon");
|
|
80
|
+
process.exitCode = 1;
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
const resp = yield sendDaemonCommand({ cmd: "review", root: projectRoot, commitRef }, { timeoutMs: 5000 });
|
|
84
|
+
if (!resp.ok) {
|
|
85
|
+
console.error(`Review failed: ${resp.error}`);
|
|
86
|
+
process.exitCode = 1;
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
console.log(`Review queued for ${commitRef}`);
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
// Foreground: ensure LLM server is running
|
|
93
|
+
const { ensureDaemonRunning, sendDaemonCommand } = yield Promise.resolve().then(() => __importStar(require("../lib/utils/daemon-client")));
|
|
94
|
+
if (!(yield ensureDaemonRunning())) {
|
|
95
|
+
console.error("Failed to start daemon");
|
|
96
|
+
process.exitCode = 1;
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
const llmResp = yield sendDaemonCommand({ cmd: "llm-start" }, { timeoutMs: 90000 });
|
|
100
|
+
if (!llmResp.ok) {
|
|
101
|
+
console.error(`LLM server error: ${llmResp.error}`);
|
|
102
|
+
console.error("Run `gmax llm on` to enable the LLM server.");
|
|
103
|
+
process.exitCode = 1;
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
const { reviewCommit } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/review")));
|
|
107
|
+
const result = yield reviewCommit({
|
|
108
|
+
commitRef,
|
|
109
|
+
projectRoot,
|
|
110
|
+
verbose: opts.verbose,
|
|
111
|
+
});
|
|
112
|
+
if (result.clean) {
|
|
113
|
+
console.log(`${result.commit} — clean (${result.duration}s)`);
|
|
114
|
+
}
|
|
115
|
+
else {
|
|
116
|
+
console.log(`${result.commit} — ${result.findingCount} finding(s) (${result.duration}s)`);
|
|
117
|
+
console.log("Run `gmax review report` to see details.");
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
catch (err) {
|
|
121
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
122
|
+
console.error(`Review failed: ${msg}`);
|
|
123
|
+
process.exitCode = 1;
|
|
124
|
+
}
|
|
125
|
+
finally {
|
|
126
|
+
yield (0, exit_1.gracefulExit)();
|
|
127
|
+
}
|
|
128
|
+
}));
|
|
129
|
+
// --- Subcommands ---
|
|
130
|
+
exports.review
|
|
131
|
+
.command("report")
|
|
132
|
+
.description("Show accumulated review findings")
|
|
133
|
+
.option("--json", "Output raw JSON", false)
|
|
134
|
+
.option("--root <dir>", "Project root directory")
|
|
135
|
+
.action((opts) => __awaiter(void 0, void 0, void 0, function* () {
|
|
136
|
+
var _a;
|
|
137
|
+
try {
|
|
138
|
+
const root = opts.root ? path.resolve(opts.root) : process.cwd();
|
|
139
|
+
const projectRoot = (_a = (0, project_root_1.findProjectRoot)(root)) !== null && _a !== void 0 ? _a : root;
|
|
140
|
+
const { readReport, formatReportText } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/report")));
|
|
141
|
+
const report = readReport(projectRoot);
|
|
142
|
+
if (!report || report.reviews.length === 0) {
|
|
143
|
+
console.log("No review findings yet.");
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
if (opts.json) {
|
|
147
|
+
console.log(JSON.stringify(report, null, 2));
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
console.log(formatReportText(report));
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
catch (err) {
|
|
154
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
155
|
+
console.error(`Report failed: ${msg}`);
|
|
156
|
+
process.exitCode = 1;
|
|
157
|
+
}
|
|
158
|
+
finally {
|
|
159
|
+
yield (0, exit_1.gracefulExit)();
|
|
160
|
+
}
|
|
161
|
+
}));
|
|
162
|
+
exports.review
|
|
163
|
+
.command("clear")
|
|
164
|
+
.description("Clear the review report")
|
|
165
|
+
.option("--root <dir>", "Project root directory")
|
|
166
|
+
.action((opts) => __awaiter(void 0, void 0, void 0, function* () {
|
|
167
|
+
var _a;
|
|
168
|
+
try {
|
|
169
|
+
const root = opts.root ? path.resolve(opts.root) : process.cwd();
|
|
170
|
+
const projectRoot = (_a = (0, project_root_1.findProjectRoot)(root)) !== null && _a !== void 0 ? _a : root;
|
|
171
|
+
const { clearReport } = yield Promise.resolve().then(() => __importStar(require("../lib/llm/report")));
|
|
172
|
+
clearReport(projectRoot);
|
|
173
|
+
console.log("Report cleared.");
|
|
174
|
+
}
|
|
175
|
+
finally {
|
|
176
|
+
yield (0, exit_1.gracefulExit)();
|
|
177
|
+
}
|
|
178
|
+
}));
|
|
179
|
+
exports.review
|
|
180
|
+
.command("install [dir]")
|
|
181
|
+
.description("Install post-commit hook for automatic review")
|
|
182
|
+
.action((dir) => __awaiter(void 0, void 0, void 0, function* () {
|
|
183
|
+
try {
|
|
184
|
+
let targetDir;
|
|
185
|
+
if (dir) {
|
|
186
|
+
targetDir = path.resolve(dir);
|
|
187
|
+
}
|
|
188
|
+
else {
|
|
189
|
+
try {
|
|
190
|
+
targetDir = (0, node_child_process_1.execFileSync)("git", ["rev-parse", "--show-toplevel"], {
|
|
191
|
+
encoding: "utf-8",
|
|
192
|
+
}).trim();
|
|
193
|
+
}
|
|
194
|
+
catch (_a) {
|
|
195
|
+
console.error("Not in a git repo and no directory specified.");
|
|
196
|
+
process.exitCode = 1;
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
const hooksDir = path.join(targetDir, ".git", "hooks");
|
|
201
|
+
if (!fs.existsSync(hooksDir)) {
|
|
202
|
+
console.error(`Not a git repo: ${targetDir}`);
|
|
203
|
+
process.exitCode = 1;
|
|
204
|
+
return;
|
|
205
|
+
}
|
|
206
|
+
const hookFile = path.join(hooksDir, "post-commit");
|
|
207
|
+
// Backup existing hook if it doesn't mention gmax
|
|
208
|
+
if (fs.existsSync(hookFile)) {
|
|
209
|
+
const existing = fs.readFileSync(hookFile, "utf-8");
|
|
210
|
+
if (!existing.includes("gmax review")) {
|
|
211
|
+
fs.copyFileSync(hookFile, `${hookFile}.gmax-backup`);
|
|
212
|
+
console.log("Backed up existing post-commit hook.");
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
// Resolve gmax binary path
|
|
216
|
+
let gmaxBin = "gmax";
|
|
217
|
+
try {
|
|
218
|
+
gmaxBin = (0, node_child_process_1.execFileSync)("which", ["gmax"], { encoding: "utf-8" }).trim();
|
|
219
|
+
}
|
|
220
|
+
catch (_b) { }
|
|
221
|
+
const hookContent = `#!/usr/bin/env bash
|
|
222
|
+
# gmax review — async code review on commit
|
|
223
|
+
# Always exits 0 to never block git
|
|
224
|
+
"${gmaxBin}" review --commit HEAD --background --root "${targetDir}" || true
|
|
225
|
+
`;
|
|
226
|
+
fs.writeFileSync(hookFile, hookContent, { mode: 0o755 });
|
|
227
|
+
console.log(`Installed post-commit hook in ${targetDir}`);
|
|
228
|
+
}
|
|
229
|
+
catch (err) {
|
|
230
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
231
|
+
console.error(`Install failed: ${msg}`);
|
|
232
|
+
process.exitCode = 1;
|
|
233
|
+
}
|
|
234
|
+
finally {
|
|
235
|
+
yield (0, exit_1.gracefulExit)();
|
|
236
|
+
}
|
|
237
|
+
}));
|
package/dist/index.js
CHANGED
|
@@ -57,6 +57,7 @@ const peek_1 = require("./commands/peek");
|
|
|
57
57
|
const project_1 = require("./commands/project");
|
|
58
58
|
const recent_1 = require("./commands/recent");
|
|
59
59
|
const related_1 = require("./commands/related");
|
|
60
|
+
const review_1 = require("./commands/review");
|
|
60
61
|
const opencode_1 = require("./commands/opencode");
|
|
61
62
|
const plugin_1 = require("./commands/plugin");
|
|
62
63
|
const remove_1 = require("./commands/remove");
|
|
@@ -114,6 +115,7 @@ commander_1.program.addCommand(mcp_1.mcp);
|
|
|
114
115
|
commander_1.program.addCommand(summarize_1.summarize);
|
|
115
116
|
commander_1.program.addCommand(llm_1.llm);
|
|
116
117
|
commander_1.program.addCommand(investigate_1.investigateCmd);
|
|
118
|
+
commander_1.program.addCommand(review_1.review);
|
|
117
119
|
// Setup & diagnostics
|
|
118
120
|
commander_1.program.addCommand(setup_1.setup);
|
|
119
121
|
commander_1.program.addCommand(config_1.config);
|
|
@@ -41,6 +41,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
41
41
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
42
|
});
|
|
43
43
|
};
|
|
44
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
45
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
46
|
+
var m = o[Symbol.asyncIterator], i;
|
|
47
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
48
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
49
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
50
|
+
};
|
|
44
51
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
45
52
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
46
53
|
};
|
|
@@ -131,8 +138,9 @@ class Daemon {
|
|
|
131
138
|
// 7. Register daemon (only after resources are open)
|
|
132
139
|
(0, watcher_store_1.registerDaemon)(process.pid);
|
|
133
140
|
// 7. Subscribe to all registered projects (skip missing directories)
|
|
134
|
-
const
|
|
135
|
-
|
|
141
|
+
const allProjects = (0, project_registry_1.listProjects)();
|
|
142
|
+
const indexed = allProjects.filter((p) => p.status === "indexed");
|
|
143
|
+
for (const p of indexed) {
|
|
136
144
|
if (!fs.existsSync(p.root)) {
|
|
137
145
|
console.log(`[daemon] Skipping ${path.basename(p.root)} — directory not found`);
|
|
138
146
|
continue;
|
|
@@ -144,6 +152,13 @@ class Daemon {
|
|
|
144
152
|
console.error(`[daemon] Failed to watch ${path.basename(p.root)}:`, err);
|
|
145
153
|
}
|
|
146
154
|
}
|
|
155
|
+
// 7b. Index pending projects in the background
|
|
156
|
+
const pending = allProjects.filter((p) => p.status === "pending" && fs.existsSync(p.root));
|
|
157
|
+
for (const p of pending) {
|
|
158
|
+
this.indexPendingProject(p.root).catch((err) => {
|
|
159
|
+
console.error(`[daemon] Failed to index pending ${path.basename(p.root)}:`, err);
|
|
160
|
+
});
|
|
161
|
+
}
|
|
147
162
|
// 8. Heartbeat
|
|
148
163
|
this.heartbeatInterval = setInterval(() => {
|
|
149
164
|
(0, watcher_store_1.heartbeat)(process.pid);
|
|
@@ -265,10 +280,88 @@ class Daemon {
|
|
|
265
280
|
status: "watching",
|
|
266
281
|
lastHeartbeat: Date.now(),
|
|
267
282
|
});
|
|
283
|
+
// Catchup scan — find files changed while daemon was offline
|
|
284
|
+
this.catchupScan(root, processor).catch((err) => {
|
|
285
|
+
console.error(`[daemon:${path.basename(root)}] Catchup scan failed:`, err);
|
|
286
|
+
});
|
|
268
287
|
this.pendingOps.delete(root);
|
|
269
288
|
console.log(`[daemon] Watching ${root}`);
|
|
270
289
|
});
|
|
271
290
|
}
|
|
291
|
+
catchupScan(root, processor) {
|
|
292
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
293
|
+
var _a, e_1, _b, _c;
|
|
294
|
+
const { walk } = yield Promise.resolve().then(() => __importStar(require("../index/walker")));
|
|
295
|
+
const { INDEXABLE_EXTENSIONS } = yield Promise.resolve().then(() => __importStar(require("../../config")));
|
|
296
|
+
const { isFileCached } = yield Promise.resolve().then(() => __importStar(require("../utils/cache-check")));
|
|
297
|
+
let queued = 0;
|
|
298
|
+
try {
|
|
299
|
+
for (var _d = true, _e = __asyncValues(walk(root, {
|
|
300
|
+
additionalPatterns: ["**/.git/**", "**/.gmax/**", "**/.osgrep/**"],
|
|
301
|
+
})), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
|
302
|
+
_c = _f.value;
|
|
303
|
+
_d = false;
|
|
304
|
+
const relPath = _c;
|
|
305
|
+
const absPath = path.join(root, relPath);
|
|
306
|
+
const ext = path.extname(absPath).toLowerCase();
|
|
307
|
+
const bn = path.basename(absPath).toLowerCase();
|
|
308
|
+
if (!INDEXABLE_EXTENSIONS.has(ext) && !INDEXABLE_EXTENSIONS.has(bn))
|
|
309
|
+
continue;
|
|
310
|
+
try {
|
|
311
|
+
const stats = yield fs.promises.stat(absPath);
|
|
312
|
+
const cached = this.metaCache.get(absPath);
|
|
313
|
+
if (!isFileCached(cached, stats)) {
|
|
314
|
+
processor.handleFileEvent("change", absPath);
|
|
315
|
+
queued++;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
catch (_g) { }
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
322
|
+
finally {
|
|
323
|
+
try {
|
|
324
|
+
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
|
325
|
+
}
|
|
326
|
+
finally { if (e_1) throw e_1.error; }
|
|
327
|
+
}
|
|
328
|
+
if (queued > 0) {
|
|
329
|
+
console.log(`[daemon:${path.basename(root)}] Catchup: ${queued} file(s) changed while offline`);
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
indexPendingProject(root) {
|
|
334
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
335
|
+
yield this.withProjectLock(root, () => __awaiter(this, void 0, void 0, function* () {
|
|
336
|
+
var _a;
|
|
337
|
+
if (!this.vectorDb || !this.metaCache)
|
|
338
|
+
return;
|
|
339
|
+
console.log(`[daemon] Indexing pending project: ${path.basename(root)}`);
|
|
340
|
+
this.vectorDb.pauseMaintenanceLoop();
|
|
341
|
+
try {
|
|
342
|
+
const result = yield (0, syncer_1.initialSync)({
|
|
343
|
+
projectRoot: root,
|
|
344
|
+
vectorDb: this.vectorDb,
|
|
345
|
+
metaCache: this.metaCache,
|
|
346
|
+
onProgress: () => { this.resetActivity(); },
|
|
347
|
+
});
|
|
348
|
+
const proj = (0, project_registry_1.getProject)(root);
|
|
349
|
+
if (proj) {
|
|
350
|
+
(0, project_registry_1.registerProject)(Object.assign(Object.assign({}, proj), { lastIndexed: new Date().toISOString(), chunkCount: result.indexed, status: "indexed" }));
|
|
351
|
+
}
|
|
352
|
+
yield this.watchProject(root);
|
|
353
|
+
console.log(`[daemon] Indexed ${path.basename(root)} (${result.total} files, ${result.indexed} chunks)`);
|
|
354
|
+
}
|
|
355
|
+
catch (err) {
|
|
356
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
357
|
+
console.error(`[daemon] indexPendingProject failed for ${path.basename(root)}: ${msg}`);
|
|
358
|
+
}
|
|
359
|
+
finally {
|
|
360
|
+
(_a = this.vectorDb) === null || _a === void 0 ? void 0 : _a.resumeMaintenanceLoop();
|
|
361
|
+
}
|
|
362
|
+
}));
|
|
363
|
+
});
|
|
364
|
+
}
|
|
272
365
|
unwatchProject(root) {
|
|
273
366
|
return __awaiter(this, void 0, void 0, function* () {
|
|
274
367
|
const processor = this.processors.get(root);
|
|
@@ -539,6 +632,24 @@ class Daemon {
|
|
|
539
632
|
var _a;
|
|
540
633
|
(_a = this.llmServer) === null || _a === void 0 ? void 0 : _a.touchIdle();
|
|
541
634
|
}
|
|
635
|
+
reviewCommit(root, commitRef) {
|
|
636
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
637
|
+
this.resetActivity();
|
|
638
|
+
try {
|
|
639
|
+
if (!this.llmServer) {
|
|
640
|
+
console.log("[review] daemon not initialized, skipping");
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
643
|
+
yield this.llmServer.ensure();
|
|
644
|
+
const { reviewCommit } = yield Promise.resolve().then(() => __importStar(require("../llm/review")));
|
|
645
|
+
const result = yield reviewCommit({ commitRef, projectRoot: root });
|
|
646
|
+
console.log(`[review] ${result.commit} — ${result.findingCount} finding(s) in ${result.duration}s`);
|
|
647
|
+
}
|
|
648
|
+
catch (err) {
|
|
649
|
+
console.error(`[review] failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
650
|
+
}
|
|
651
|
+
});
|
|
652
|
+
}
|
|
542
653
|
shutdown() {
|
|
543
654
|
return __awaiter(this, void 0, void 0, function* () {
|
|
544
655
|
var _a, _b, _c, _d;
|
|
@@ -102,6 +102,14 @@ function handleCommand(daemon, cmd, conn) {
|
|
|
102
102
|
return null;
|
|
103
103
|
}
|
|
104
104
|
// --- LLM server management ---
|
|
105
|
+
case "review": {
|
|
106
|
+
const root = String(cmd.root || "");
|
|
107
|
+
const commitRef = String(cmd.commitRef || "HEAD");
|
|
108
|
+
if (!root)
|
|
109
|
+
return { ok: false, error: "missing root" };
|
|
110
|
+
setImmediate(() => daemon.reviewCommit(root, commitRef));
|
|
111
|
+
return { ok: true };
|
|
112
|
+
}
|
|
105
113
|
case "llm-start":
|
|
106
114
|
return yield daemon.llmStart();
|
|
107
115
|
case "llm-stop":
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SYMBOL_MAX = exports.DIFF_MAX_LINES = void 0;
|
|
4
|
+
exports.extractDiff = extractDiff;
|
|
5
|
+
exports.readCommitInfo = readCommitInfo;
|
|
6
|
+
exports.extractChangedFiles = extractChangedFiles;
|
|
7
|
+
exports.extractSymbols = extractSymbols;
|
|
8
|
+
exports.detectLanguages = detectLanguages;
|
|
9
|
+
const node_child_process_1 = require("node:child_process");
|
|
10
|
+
exports.DIFF_MAX_LINES = 500;
|
|
11
|
+
exports.SYMBOL_MAX = 10;
|
|
12
|
+
const KEYWORD_SKIP = new Set([
|
|
13
|
+
"public", "private", "internal", "protected", "open", "final", "static",
|
|
14
|
+
"override", "class", "struct", "enum", "func", "function", "def", "const",
|
|
15
|
+
"let", "var", "export", "async", "await", "import", "return", "if", "else",
|
|
16
|
+
"for", "while", "switch", "case", "guard", "interface", "abstract", "sealed",
|
|
17
|
+
"data", "suspend", "inline", "typealias", "extension", "protocol", "throws",
|
|
18
|
+
"mutating", "nonmutating", "convenience", "required", "weak", "unowned",
|
|
19
|
+
"lazy", "dynamic", "optional", "objc", "nonisolated", "isolated",
|
|
20
|
+
"consuming", "borrowing",
|
|
21
|
+
]);
|
|
22
|
+
const DECL_RE = /(?:function|class|struct|enum|interface|func|def)\s+([a-zA-Z_][a-zA-Z0-9_]*)/g;
|
|
23
|
+
const IDENT_RE = /[a-zA-Z_][a-zA-Z0-9_]*/g;
|
|
24
|
+
const LANG_MAP = [
|
|
25
|
+
[/\.(ts|tsx|js|jsx)$/, "typescript"],
|
|
26
|
+
[/\.swift$/, "swift"],
|
|
27
|
+
[/\.(kt|kts)$/, "kotlin"],
|
|
28
|
+
[/\.py$/, "python"],
|
|
29
|
+
[/\.go$/, "go"],
|
|
30
|
+
[/\.rs$/, "rust"],
|
|
31
|
+
];
|
|
32
|
+
function git(args, root) {
|
|
33
|
+
return (0, node_child_process_1.execFileSync)("git", args, {
|
|
34
|
+
cwd: root,
|
|
35
|
+
encoding: "utf-8",
|
|
36
|
+
timeout: 10000,
|
|
37
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Extract unified diff from a commit. Returns null for empty diffs (merges, amends).
|
|
42
|
+
* Truncates to DIFF_MAX_LINES if needed.
|
|
43
|
+
*/
|
|
44
|
+
function extractDiff(ref, root) {
|
|
45
|
+
let raw;
|
|
46
|
+
try {
|
|
47
|
+
raw = git(["diff-tree", "-p", "--no-commit-id", ref], root);
|
|
48
|
+
}
|
|
49
|
+
catch (_a) {
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
if (!raw.trim())
|
|
53
|
+
return null;
|
|
54
|
+
const lines = raw.split("\n");
|
|
55
|
+
if (lines.length <= exports.DIFF_MAX_LINES)
|
|
56
|
+
return raw;
|
|
57
|
+
const truncated = lines.slice(0, exports.DIFF_MAX_LINES);
|
|
58
|
+
truncated.push("", `... [truncated — ${lines.length} total lines, showing first ${exports.DIFF_MAX_LINES}]`);
|
|
59
|
+
return truncated.join("\n");
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Read commit metadata. Throws if ref is invalid.
|
|
63
|
+
*/
|
|
64
|
+
function readCommitInfo(ref, root) {
|
|
65
|
+
const raw = git(["log", "-1", "--format=%H|%h|%s|%an|%ai", ref], root).trim();
|
|
66
|
+
const parts = raw.split("|");
|
|
67
|
+
return {
|
|
68
|
+
hash: parts[0],
|
|
69
|
+
short: parts[1],
|
|
70
|
+
message: parts.slice(2, -2).join("|"), // message may contain |
|
|
71
|
+
author: parts[parts.length - 2],
|
|
72
|
+
date: parts[parts.length - 1],
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* List files changed in a commit.
|
|
77
|
+
*/
|
|
78
|
+
function extractChangedFiles(ref, root) {
|
|
79
|
+
try {
|
|
80
|
+
const raw = git(["diff-tree", "--no-commit-id", "--name-only", "-r", ref], root);
|
|
81
|
+
return raw.trim().split("\n").filter(Boolean);
|
|
82
|
+
}
|
|
83
|
+
catch (_a) {
|
|
84
|
+
return [];
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Extract symbol names from a unified diff.
|
|
89
|
+
* Pass 1: hunk headers (git auto-detects enclosing function/class).
|
|
90
|
+
* Pass 2: added-line declaration patterns.
|
|
91
|
+
*/
|
|
92
|
+
function extractSymbols(diff) {
|
|
93
|
+
const symbols = new Set();
|
|
94
|
+
for (const line of diff.split("\n")) {
|
|
95
|
+
// Pass 1: hunk headers
|
|
96
|
+
if (line.startsWith("@@")) {
|
|
97
|
+
const ctx = line.replace(/^@@[^@]*@@\s*/, "");
|
|
98
|
+
if (!ctx)
|
|
99
|
+
continue;
|
|
100
|
+
const idents = [];
|
|
101
|
+
let m;
|
|
102
|
+
IDENT_RE.lastIndex = 0;
|
|
103
|
+
while ((m = IDENT_RE.exec(ctx)) !== null) {
|
|
104
|
+
if (!KEYWORD_SKIP.has(m[0]))
|
|
105
|
+
idents.push(m[0]);
|
|
106
|
+
}
|
|
107
|
+
if (idents.length > 0)
|
|
108
|
+
symbols.add(idents[idents.length - 1]);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
// Pass 2: added lines with declarations
|
|
112
|
+
if (line.startsWith("+") && !line.startsWith("+++")) {
|
|
113
|
+
DECL_RE.lastIndex = 0;
|
|
114
|
+
let m;
|
|
115
|
+
while ((m = DECL_RE.exec(line)) !== null) {
|
|
116
|
+
symbols.add(m[1]);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
// Filter short identifiers and cap
|
|
121
|
+
return [...symbols]
|
|
122
|
+
.filter((s) => s.length >= 2)
|
|
123
|
+
.slice(0, exports.SYMBOL_MAX);
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Detect languages from file extensions.
|
|
127
|
+
*/
|
|
128
|
+
function detectLanguages(files) {
|
|
129
|
+
const langs = new Set();
|
|
130
|
+
for (const file of files) {
|
|
131
|
+
for (const [re, lang] of LANG_MAP) {
|
|
132
|
+
if (re.test(file)) {
|
|
133
|
+
langs.add(lang);
|
|
134
|
+
break;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
return [...langs];
|
|
139
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.getReportPath = getReportPath;
|
|
37
|
+
exports.readReport = readReport;
|
|
38
|
+
exports.appendReview = appendReview;
|
|
39
|
+
exports.clearReport = clearReport;
|
|
40
|
+
exports.formatReportText = formatReportText;
|
|
41
|
+
const node_crypto_1 = require("node:crypto");
|
|
42
|
+
const fs = __importStar(require("node:fs"));
|
|
43
|
+
const path = __importStar(require("node:path"));
|
|
44
|
+
const config_1 = require("../../config");
|
|
45
|
+
function getReportPath(projectRoot) {
|
|
46
|
+
const hash = (0, node_crypto_1.createHash)("sha256")
|
|
47
|
+
.update(projectRoot)
|
|
48
|
+
.digest("hex")
|
|
49
|
+
.slice(0, 16);
|
|
50
|
+
return path.join(config_1.PATHS.cacheDir, `review-${hash}.json`);
|
|
51
|
+
}
|
|
52
|
+
function emptyReport() {
|
|
53
|
+
return {
|
|
54
|
+
session_start: new Date().toISOString(),
|
|
55
|
+
reviews: [],
|
|
56
|
+
summary: { commits_reviewed: 0, total_findings: 0, errors: 0, warnings: 0 },
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
function readReport(projectRoot) {
|
|
60
|
+
const p = getReportPath(projectRoot);
|
|
61
|
+
try {
|
|
62
|
+
const raw = fs.readFileSync(p, "utf-8");
|
|
63
|
+
return JSON.parse(raw);
|
|
64
|
+
}
|
|
65
|
+
catch (_a) {
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
function appendReview(projectRoot, entry) {
|
|
70
|
+
var _a;
|
|
71
|
+
const p = getReportPath(projectRoot);
|
|
72
|
+
const tmp = `${p}.tmp`;
|
|
73
|
+
try {
|
|
74
|
+
fs.mkdirSync(path.dirname(p), { recursive: true });
|
|
75
|
+
const report = (_a = readReport(projectRoot)) !== null && _a !== void 0 ? _a : emptyReport();
|
|
76
|
+
report.reviews.push(entry);
|
|
77
|
+
// Recompute summary
|
|
78
|
+
let errors = 0;
|
|
79
|
+
let warnings = 0;
|
|
80
|
+
let total = 0;
|
|
81
|
+
for (const r of report.reviews) {
|
|
82
|
+
for (const f of r.findings) {
|
|
83
|
+
total++;
|
|
84
|
+
if (f.severity === "error")
|
|
85
|
+
errors++;
|
|
86
|
+
else
|
|
87
|
+
warnings++;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
report.summary = {
|
|
91
|
+
commits_reviewed: report.reviews.length,
|
|
92
|
+
total_findings: total,
|
|
93
|
+
errors,
|
|
94
|
+
warnings,
|
|
95
|
+
};
|
|
96
|
+
fs.writeFileSync(tmp, JSON.stringify(report, null, 2));
|
|
97
|
+
fs.renameSync(tmp, p);
|
|
98
|
+
}
|
|
99
|
+
catch (err) {
|
|
100
|
+
// Clean up tmp on failure
|
|
101
|
+
try {
|
|
102
|
+
fs.unlinkSync(tmp);
|
|
103
|
+
}
|
|
104
|
+
catch (_b) { }
|
|
105
|
+
console.error(`[review] failed to write report: ${err instanceof Error ? err.message : String(err)}`);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
function clearReport(projectRoot) {
|
|
109
|
+
try {
|
|
110
|
+
fs.unlinkSync(getReportPath(projectRoot));
|
|
111
|
+
}
|
|
112
|
+
catch (_a) { }
|
|
113
|
+
}
|
|
114
|
+
function formatReportText(report) {
|
|
115
|
+
const { summary } = report;
|
|
116
|
+
const lines = [];
|
|
117
|
+
lines.push("=== Review Report ===");
|
|
118
|
+
lines.push(`${summary.commits_reviewed} commit(s) reviewed — ${summary.total_findings} finding(s) (${summary.errors} error, ${summary.warnings} warning)`);
|
|
119
|
+
lines.push("");
|
|
120
|
+
for (const rev of report.reviews) {
|
|
121
|
+
lines.push(`--- ${rev.commit} — ${rev.message} (${rev.duration_seconds}s) ---`);
|
|
122
|
+
if (rev.findings.length === 0) {
|
|
123
|
+
lines.push(" clean");
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
for (const f of rev.findings) {
|
|
127
|
+
const tag = f.severity === "error" ? "ERROR" : "WARN ";
|
|
128
|
+
lines.push(` ${tag} ${f.file}:${f.line} [${f.category}]`);
|
|
129
|
+
lines.push(` ${f.message}`);
|
|
130
|
+
for (const e of f.evidence) {
|
|
131
|
+
lines.push(` > ${e}`);
|
|
132
|
+
}
|
|
133
|
+
if (f.suggestion) {
|
|
134
|
+
lines.push(` fix: ${f.suggestion}`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
lines.push("");
|
|
139
|
+
}
|
|
140
|
+
return lines.join("\n");
|
|
141
|
+
}
|
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
45
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
46
|
+
};
|
|
47
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
|
+
exports.reviewCommit = reviewCommit;
|
|
49
|
+
const path = __importStar(require("node:path"));
|
|
50
|
+
const openai_1 = __importDefault(require("openai"));
|
|
51
|
+
const graph_builder_1 = require("../graph/graph-builder");
|
|
52
|
+
const searcher_1 = require("../search/searcher");
|
|
53
|
+
const vector_db_1 = require("../store/vector-db");
|
|
54
|
+
const project_root_1 = require("../utils/project-root");
|
|
55
|
+
const config_1 = require("./config");
|
|
56
|
+
const diff_1 = require("./diff");
|
|
57
|
+
const report_1 = require("./report");
|
|
58
|
+
const tools_1 = require("./tools");
|
|
59
|
+
function reviewCommit(opts) {
|
|
60
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
61
|
+
var _a, _b, _c, _d;
|
|
62
|
+
const { commitRef, projectRoot, verbose = false } = opts;
|
|
63
|
+
const wallStart = Date.now();
|
|
64
|
+
// 1. Extract diff
|
|
65
|
+
const diff = (0, diff_1.extractDiff)(commitRef, projectRoot);
|
|
66
|
+
if (!diff) {
|
|
67
|
+
if (verbose)
|
|
68
|
+
process.stderr.write("[review] empty diff, skipping\n");
|
|
69
|
+
return { commit: commitRef, findingCount: 0, duration: 0, clean: true };
|
|
70
|
+
}
|
|
71
|
+
// 2. Commit metadata
|
|
72
|
+
const info = (0, diff_1.readCommitInfo)(commitRef, projectRoot);
|
|
73
|
+
if (verbose)
|
|
74
|
+
process.stderr.write(`[review] ${info.short} — ${info.message}\n`);
|
|
75
|
+
// 3. Changed files & symbols
|
|
76
|
+
const changedFiles = (0, diff_1.extractChangedFiles)(commitRef, projectRoot);
|
|
77
|
+
const symbols = (0, diff_1.extractSymbols)(diff);
|
|
78
|
+
const languages = (0, diff_1.detectLanguages)(changedFiles);
|
|
79
|
+
if (verbose) {
|
|
80
|
+
process.stderr.write(`[review] files: ${changedFiles.length}, symbols: ${symbols.length}, langs: ${languages.join(", ")}\n`);
|
|
81
|
+
}
|
|
82
|
+
// 4. Gather context via gmax internal APIs
|
|
83
|
+
let contextStr = "";
|
|
84
|
+
const paths = (0, project_root_1.ensureProjectPaths)(projectRoot);
|
|
85
|
+
const vectorDb = new vector_db_1.VectorDB(paths.lancedbDir);
|
|
86
|
+
try {
|
|
87
|
+
const searcher = new searcher_1.Searcher(vectorDb);
|
|
88
|
+
const graphBuilder = new graph_builder_1.GraphBuilder(vectorDb, projectRoot);
|
|
89
|
+
const ctx = { vectorDb, searcher, graphBuilder, projectRoot };
|
|
90
|
+
contextStr = yield gatherContext(symbols, changedFiles, ctx, verbose);
|
|
91
|
+
}
|
|
92
|
+
catch (err) {
|
|
93
|
+
if (verbose) {
|
|
94
|
+
process.stderr.write(`[review] context gathering failed: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
finally {
|
|
98
|
+
yield vectorDb.close();
|
|
99
|
+
}
|
|
100
|
+
// 5. Build prompts
|
|
101
|
+
const systemPrompt = buildSystemPrompt(languages);
|
|
102
|
+
const userPrompt = buildUserPrompt(info, diff, symbols, contextStr);
|
|
103
|
+
// 6. Call LLM (single shot)
|
|
104
|
+
const config = (0, config_1.getLlmConfig)();
|
|
105
|
+
const modelName = path.basename(config.model, path.extname(config.model));
|
|
106
|
+
const client = new openai_1.default({
|
|
107
|
+
baseURL: `http://${config.host}:${config.port}/v1`,
|
|
108
|
+
apiKey: "local",
|
|
109
|
+
});
|
|
110
|
+
let content;
|
|
111
|
+
try {
|
|
112
|
+
if (verbose)
|
|
113
|
+
process.stderr.write("[review] calling LLM...\n");
|
|
114
|
+
const response = yield client.chat.completions.create({
|
|
115
|
+
model: modelName,
|
|
116
|
+
messages: [
|
|
117
|
+
{ role: "system", content: systemPrompt },
|
|
118
|
+
{ role: "user", content: userPrompt },
|
|
119
|
+
],
|
|
120
|
+
max_tokens: config.maxTokens,
|
|
121
|
+
temperature: 0,
|
|
122
|
+
});
|
|
123
|
+
content = (_d = (_c = (_b = (_a = response.choices) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.message) === null || _c === void 0 ? void 0 : _c.content) !== null && _d !== void 0 ? _d : "";
|
|
124
|
+
if (!content) {
|
|
125
|
+
if (verbose)
|
|
126
|
+
process.stderr.write("[review] empty LLM response\n");
|
|
127
|
+
content = "";
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
catch (err) {
|
|
131
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
132
|
+
if (verbose)
|
|
133
|
+
process.stderr.write(`[review] LLM call failed: ${msg}\n`);
|
|
134
|
+
const duration = Math.round((Date.now() - wallStart) / 1000);
|
|
135
|
+
return { commit: info.short, findingCount: 0, duration, clean: true };
|
|
136
|
+
}
|
|
137
|
+
// 7. Parse response
|
|
138
|
+
const { findings, summary } = parseFindings(content);
|
|
139
|
+
const duration = Math.round((Date.now() - wallStart) / 1000);
|
|
140
|
+
if (verbose) {
|
|
141
|
+
process.stderr.write(`[review] ${findings.length} finding(s) in ${duration}s\n`);
|
|
142
|
+
}
|
|
143
|
+
// 8. Append to report
|
|
144
|
+
const entry = {
|
|
145
|
+
commit: info.short,
|
|
146
|
+
message: info.message,
|
|
147
|
+
timestamp: new Date().toISOString(),
|
|
148
|
+
duration_seconds: duration,
|
|
149
|
+
findings,
|
|
150
|
+
summary,
|
|
151
|
+
clean: findings.length === 0,
|
|
152
|
+
};
|
|
153
|
+
(0, report_1.appendReview)(projectRoot, entry);
|
|
154
|
+
return {
|
|
155
|
+
commit: info.short,
|
|
156
|
+
findingCount: findings.length,
|
|
157
|
+
duration,
|
|
158
|
+
clean: findings.length === 0,
|
|
159
|
+
};
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
// ---------------------------------------------------------------------------
|
|
163
|
+
// Context gathering
|
|
164
|
+
// ---------------------------------------------------------------------------
|
|
165
|
+
const CONTEXT_CHAR_BUDGET = 12000;
|
|
166
|
+
const CONTEXT_TIMEOUT_MS = 15000;
|
|
167
|
+
function gatherContext(symbols, changedFiles, ctx, verbose) {
|
|
168
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
169
|
+
// Launch all tool calls in parallel
|
|
170
|
+
const peekPromises = symbols.map((s) => (0, tools_1.executeTool)("peek", { symbol: s }, ctx));
|
|
171
|
+
const impactPromises = symbols.map((s) => (0, tools_1.executeTool)("impact", { target: s, depth: 2 }, ctx));
|
|
172
|
+
const relatedPromises = changedFiles.map((f) => (0, tools_1.executeTool)("related", { file: f }, ctx));
|
|
173
|
+
const allPromises = [
|
|
174
|
+
...peekPromises.map((p) => p.then((r) => ({ type: "peek", result: r }))),
|
|
175
|
+
...impactPromises.map((p) => p.then((r) => ({ type: "impact", result: r }))),
|
|
176
|
+
...relatedPromises.map((p) => p.then((r) => ({ type: "related", result: r }))),
|
|
177
|
+
];
|
|
178
|
+
// Race against timeout
|
|
179
|
+
const timeout = new Promise((resolve) => setTimeout(() => resolve("timeout"), CONTEXT_TIMEOUT_MS));
|
|
180
|
+
const settled = yield Promise.race([
|
|
181
|
+
Promise.allSettled(allPromises),
|
|
182
|
+
timeout.then(() => "timeout"),
|
|
183
|
+
]);
|
|
184
|
+
const peekResults = [];
|
|
185
|
+
const impactResults = [];
|
|
186
|
+
const relatedResults = [];
|
|
187
|
+
if (settled !== "timeout") {
|
|
188
|
+
for (const s of settled) {
|
|
189
|
+
if (s.status !== "fulfilled")
|
|
190
|
+
continue;
|
|
191
|
+
const { type, result } = s.value;
|
|
192
|
+
if (result.startsWith("(") && (result.includes("not found") || result.includes("error") || result.includes("not indexed") || result.includes("none")))
|
|
193
|
+
continue;
|
|
194
|
+
if (type === "peek")
|
|
195
|
+
peekResults.push(result);
|
|
196
|
+
else if (type === "impact")
|
|
197
|
+
impactResults.push(result);
|
|
198
|
+
else
|
|
199
|
+
relatedResults.push(result);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
else if (verbose) {
|
|
203
|
+
process.stderr.write("[review] context gathering timed out\n");
|
|
204
|
+
}
|
|
205
|
+
// Assemble with char budget
|
|
206
|
+
let chars = 0;
|
|
207
|
+
const sections = [];
|
|
208
|
+
if (peekResults.length > 0) {
|
|
209
|
+
const section = `### Callers & Dependents\n${peekResults.join("\n")}\n`;
|
|
210
|
+
sections.push(section);
|
|
211
|
+
chars += section.length;
|
|
212
|
+
}
|
|
213
|
+
if (impactResults.length > 0 && chars < CONTEXT_CHAR_BUDGET) {
|
|
214
|
+
const section = `### Impact Analysis\n${impactResults.join("\n")}\n`;
|
|
215
|
+
sections.push(section);
|
|
216
|
+
chars += section.length;
|
|
217
|
+
}
|
|
218
|
+
if (relatedResults.length > 0 && chars < CONTEXT_CHAR_BUDGET) {
|
|
219
|
+
const section = `### Related Files\n${relatedResults.join("\n")}\n`;
|
|
220
|
+
sections.push(section);
|
|
221
|
+
}
|
|
222
|
+
if (verbose) {
|
|
223
|
+
process.stderr.write(`[review] context: ${peekResults.length} peek, ${impactResults.length} impact, ${relatedResults.length} related\n`);
|
|
224
|
+
}
|
|
225
|
+
return sections.join("\n");
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
// ---------------------------------------------------------------------------
|
|
229
|
+
// Prompt construction (ported from sentinel/src/lib/prompt.sh)
|
|
230
|
+
// ---------------------------------------------------------------------------
|
|
231
|
+
function buildSystemPrompt(languages) {
|
|
232
|
+
let prompt = `You are Sentinel, a senior code reviewer analyzing git commits. You review diffs alongside codebase context (call graphs, dependents, related files) provided by a semantic search tool.
|
|
233
|
+
|
|
234
|
+
Your job is to find issues that could cause bugs, crashes, security vulnerabilities, or breaking changes at runtime. You are not a linter — ignore style, formatting, naming conventions, and minor nitpicks.
|
|
235
|
+
|
|
236
|
+
Focus on:
|
|
237
|
+
- Changes that break callers or dependents (evidence provided)
|
|
238
|
+
- Logic errors, off-by-one, incorrect conditions
|
|
239
|
+
- Missing error handling that could crash at runtime
|
|
240
|
+
- Security issues: injection, auth bypass, secrets, unsafe input
|
|
241
|
+
- State mutations with unprotected concurrent access
|
|
242
|
+
- Resource leaks: unclosed connections, missing cleanup
|
|
243
|
+
- API contract violations: return type changes, removed fields, changed semantics
|
|
244
|
+
|
|
245
|
+
Do not flag:
|
|
246
|
+
- Style or formatting
|
|
247
|
+
- Missing comments or documentation
|
|
248
|
+
- Test coverage
|
|
249
|
+
- Performance unless it's a clear regression (N+1, unbounded loop)
|
|
250
|
+
- Things the compiler/type system already catches`;
|
|
251
|
+
if (languages.includes("typescript")) {
|
|
252
|
+
prompt += `
|
|
253
|
+
|
|
254
|
+
## TypeScript Checks
|
|
255
|
+
- Missing \`await\` on async calls
|
|
256
|
+
- \`any\` type hiding real type errors
|
|
257
|
+
- Non-exhaustive switch on discriminated unions
|
|
258
|
+
- Promise fire-and-forget without error handling
|
|
259
|
+
- Optional chaining masking bugs where value should never be null`;
|
|
260
|
+
}
|
|
261
|
+
if (languages.includes("swift")) {
|
|
262
|
+
prompt += `
|
|
263
|
+
|
|
264
|
+
## Swift Checks
|
|
265
|
+
- Force unwraps (\`!\`) outside test files
|
|
266
|
+
- Missing \`[weak self]\` in escaping closures
|
|
267
|
+
- Missing \`@MainActor\` on UI state mutations
|
|
268
|
+
- \`try!\` or \`try?\` silently swallowing errors
|
|
269
|
+
- Sendable violations in concurrent code`;
|
|
270
|
+
}
|
|
271
|
+
if (languages.includes("kotlin")) {
|
|
272
|
+
prompt += `
|
|
273
|
+
|
|
274
|
+
## Kotlin Checks
|
|
275
|
+
- Coroutine scope leaks (GlobalScope, unstructured)
|
|
276
|
+
- Missing null checks on Java interop boundaries
|
|
277
|
+
- Fragment lifecycle violations
|
|
278
|
+
- Hardcoded strings that should be resources`;
|
|
279
|
+
}
|
|
280
|
+
prompt += `
|
|
281
|
+
|
|
282
|
+
## Thinking
|
|
283
|
+
|
|
284
|
+
Before issuing your verdict, reason through:
|
|
285
|
+
1. What changed in this diff?
|
|
286
|
+
2. What is the blast radius? (use the provided caller/dependent evidence)
|
|
287
|
+
3. Could this break something at runtime that the compiler won't catch?
|
|
288
|
+
4. Is there a security implication?
|
|
289
|
+
|
|
290
|
+
## Output Format
|
|
291
|
+
|
|
292
|
+
Respond with ONLY valid JSON. No markdown, no explanation outside the JSON.
|
|
293
|
+
|
|
294
|
+
If no issues found:
|
|
295
|
+
{"findings": [], "summary": "Clean commit — no runtime risks identified."}
|
|
296
|
+
|
|
297
|
+
If issues found:
|
|
298
|
+
{
|
|
299
|
+
"findings": [
|
|
300
|
+
{
|
|
301
|
+
"file": "path/to/file.ts",
|
|
302
|
+
"line": 42,
|
|
303
|
+
"severity": "error|warning",
|
|
304
|
+
"category": "breaking_change|logic_error|security|resource_leak|concurrency|missing_error_handling",
|
|
305
|
+
"symbol": "functionName",
|
|
306
|
+
"message": "Brief description of the issue",
|
|
307
|
+
"evidence": ["CallerA.swift:34 still expects Optional<User>"],
|
|
308
|
+
"suggestion": "One-line fix suggestion"
|
|
309
|
+
}
|
|
310
|
+
],
|
|
311
|
+
"summary": "Brief summary of all findings."
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
Severity guide:
|
|
315
|
+
- error: will break at runtime, security vulnerability, data loss
|
|
316
|
+
- warning: could break under certain conditions, smells risky
|
|
317
|
+
|
|
318
|
+
Be concise. One sentence per message. Evidence from the codebase context, not speculation.`;
|
|
319
|
+
return prompt;
|
|
320
|
+
}
|
|
321
|
+
function buildUserPrompt(info, diff, symbols, context) {
|
|
322
|
+
let prompt = `## Commit
|
|
323
|
+
${info.short} — ${info.message}
|
|
324
|
+
|
|
325
|
+
## Diff
|
|
326
|
+
${diff}
|
|
327
|
+
|
|
328
|
+
## Codebase Context
|
|
329
|
+
`;
|
|
330
|
+
if (symbols.length > 0) {
|
|
331
|
+
prompt += `### Changed Symbols\n${symbols.join("\n")}\n\n`;
|
|
332
|
+
}
|
|
333
|
+
if (context) {
|
|
334
|
+
prompt += context;
|
|
335
|
+
}
|
|
336
|
+
prompt += `\nReview this commit. Think through the blast radius using the provided context, then output your findings as JSON.`;
|
|
337
|
+
return prompt;
|
|
338
|
+
}
|
|
339
|
+
// ---------------------------------------------------------------------------
|
|
340
|
+
// Response parsing
|
|
341
|
+
// ---------------------------------------------------------------------------
|
|
342
|
+
function stripThinkTags(text) {
|
|
343
|
+
return text
|
|
344
|
+
.replace(/<think(?:ing)?>[\s\S]*?<\/think(?:ing)?>/g, "")
|
|
345
|
+
.trim();
|
|
346
|
+
}
|
|
347
|
+
function parseFindings(content) {
|
|
348
|
+
const empty = { findings: [], summary: "Parse error — could not extract findings from model output." };
|
|
349
|
+
if (!content)
|
|
350
|
+
return empty;
|
|
351
|
+
// Strip think tags and markdown fences
|
|
352
|
+
let cleaned = stripThinkTags(content);
|
|
353
|
+
cleaned = cleaned.replace(/^```json\s*\n?/, "").replace(/\n?```\s*$/, "");
|
|
354
|
+
// Try direct parse
|
|
355
|
+
try {
|
|
356
|
+
const parsed = JSON.parse(cleaned);
|
|
357
|
+
if (Array.isArray(parsed.findings)) {
|
|
358
|
+
return {
|
|
359
|
+
findings: validateFindings(parsed.findings),
|
|
360
|
+
summary: String(parsed.summary || "No summary."),
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
catch (_a) { }
|
|
365
|
+
// Fallback: extract JSON between first { and last }
|
|
366
|
+
const firstBrace = cleaned.indexOf("{");
|
|
367
|
+
const lastBrace = cleaned.lastIndexOf("}");
|
|
368
|
+
if (firstBrace >= 0 && lastBrace > firstBrace) {
|
|
369
|
+
try {
|
|
370
|
+
const extracted = cleaned.slice(firstBrace, lastBrace + 1);
|
|
371
|
+
const parsed = JSON.parse(extracted);
|
|
372
|
+
if (Array.isArray(parsed.findings)) {
|
|
373
|
+
return {
|
|
374
|
+
findings: validateFindings(parsed.findings),
|
|
375
|
+
summary: String(parsed.summary || "No summary."),
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
catch (_b) { }
|
|
380
|
+
}
|
|
381
|
+
return empty;
|
|
382
|
+
}
|
|
383
|
+
function validateFindings(raw) {
|
|
384
|
+
const findings = [];
|
|
385
|
+
for (const item of raw) {
|
|
386
|
+
if (!item || typeof item !== "object")
|
|
387
|
+
continue;
|
|
388
|
+
const f = item;
|
|
389
|
+
if (!f.file || !f.message)
|
|
390
|
+
continue;
|
|
391
|
+
findings.push({
|
|
392
|
+
file: String(f.file),
|
|
393
|
+
line: Number(f.line) || 0,
|
|
394
|
+
severity: f.severity === "error" ? "error" : "warning",
|
|
395
|
+
category: String(f.category || "logic_error"),
|
|
396
|
+
symbol: String(f.symbol || ""),
|
|
397
|
+
message: String(f.message),
|
|
398
|
+
evidence: Array.isArray(f.evidence) ? f.evidence.map(String) : [],
|
|
399
|
+
suggestion: String(f.suggestion || ""),
|
|
400
|
+
});
|
|
401
|
+
}
|
|
402
|
+
return findings;
|
|
403
|
+
}
|
package/package.json
CHANGED
|
@@ -175,6 +175,14 @@ gmax context "payment flow" --budget 8000
|
|
|
175
175
|
gmax context src/lib/auth/ --budget 3000
|
|
176
176
|
```
|
|
177
177
|
|
|
178
|
+
### Investigate — `gmax investigate "question"` (requires LLM)
|
|
179
|
+
```
|
|
180
|
+
gmax investigate "how does authentication work?"
|
|
181
|
+
gmax investigate "what would break if I changed VectorDB?" -v
|
|
182
|
+
gmax investigate "where are API routes defined?" --root ~/project --rounds 5
|
|
183
|
+
```
|
|
184
|
+
Agentic Q&A: a local LLM autonomously uses gmax tools (search, trace, peek, impact, related) to gather evidence and answer. Requires `gmax llm on && gmax llm start`. Use `-v` to see tool calls and reasoning.
|
|
185
|
+
|
|
178
186
|
### Other
|
|
179
187
|
```
|
|
180
188
|
gmax status # show all indexed projects
|
|
@@ -185,6 +193,7 @@ gmax project --agent # compact: key\tvalue pairs
|
|
|
185
193
|
gmax index # reindex current directory
|
|
186
194
|
gmax config # view/change settings
|
|
187
195
|
gmax doctor # health check
|
|
196
|
+
gmax llm on/off/start/stop/status # manage local LLM server
|
|
188
197
|
```
|
|
189
198
|
|
|
190
199
|
## Workflow
|