@hasna/terminal 0.7.5 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +29 -0
- package/dist/file-index.js +62 -0
- package/dist/line-dedup.js +59 -0
- package/dist/mcp/server.js +54 -0
- package/dist/session-boot.js +59 -0
- package/package.json +1 -1
- package/src/cli.tsx +23 -0
- package/src/file-index.ts +93 -0
- package/src/line-dedup.ts +66 -0
- package/src/mcp/server.ts +75 -0
- package/src/session-boot.ts +64 -0
package/dist/cli.js
CHANGED
|
@@ -458,6 +458,35 @@ else if (args[0] === "sessions") {
|
|
|
458
458
|
}
|
|
459
459
|
}
|
|
460
460
|
}
|
|
461
|
+
// ── Overview command ─────────────────────────────────────────────────────────
|
|
462
|
+
else if (args[0] === "overview") {
|
|
463
|
+
const { existsSync, readFileSync } = await import("fs");
|
|
464
|
+
const { execSync } = await import("child_process");
|
|
465
|
+
const run = (cmd) => { try {
|
|
466
|
+
return execSync(cmd, { encoding: "utf8", cwd: process.cwd() }).trim();
|
|
467
|
+
}
|
|
468
|
+
catch {
|
|
469
|
+
return "";
|
|
470
|
+
} };
|
|
471
|
+
let pkg = null;
|
|
472
|
+
try {
|
|
473
|
+
pkg = JSON.parse(readFileSync("package.json", "utf8"));
|
|
474
|
+
}
|
|
475
|
+
catch { }
|
|
476
|
+
if (pkg) {
|
|
477
|
+
console.log(`${pkg.name}@${pkg.version}`);
|
|
478
|
+
if (pkg.scripts) {
|
|
479
|
+
console.log("\nScripts:");
|
|
480
|
+
for (const [k, v] of Object.entries(pkg.scripts).slice(0, 10))
|
|
481
|
+
console.log(` ${k}: ${v}`);
|
|
482
|
+
}
|
|
483
|
+
if (pkg.dependencies)
|
|
484
|
+
console.log(`\nDeps: ${Object.keys(pkg.dependencies).join(", ")}`);
|
|
485
|
+
}
|
|
486
|
+
const src = run("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null");
|
|
487
|
+
if (src)
|
|
488
|
+
console.log(`\nSource:\n${src.split("\n").map(f => " " + f).join("\n")}`);
|
|
489
|
+
}
|
|
461
490
|
// ── Repo command ─────────────────────────────────────────────────────────────
|
|
462
491
|
else if (args[0] === "repo") {
|
|
463
492
|
const { execSync } = await import("child_process");
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
// Pre-computed file index — build once, serve search from memory
|
|
2
|
+
// Eliminates subprocess spawning for repeat file queries
|
|
3
|
+
import { spawn } from "child_process";
|
|
4
|
+
let index = null;
|
|
5
|
+
let indexCwd = "";
|
|
6
|
+
let indexTime = 0;
|
|
7
|
+
let watcher = null;
|
|
8
|
+
const INDEX_TTL = 30_000; // 30 seconds
|
|
9
|
+
function exec(command, cwd) {
|
|
10
|
+
return new Promise((resolve) => {
|
|
11
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
12
|
+
let out = "";
|
|
13
|
+
proc.stdout?.on("data", (d) => { out += d.toString(); });
|
|
14
|
+
proc.on("close", () => resolve(out));
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
/** Build or return cached file index */
|
|
18
|
+
export async function getFileIndex(cwd) {
|
|
19
|
+
// Return cached if fresh
|
|
20
|
+
if (index && indexCwd === cwd && Date.now() - indexTime < INDEX_TTL) {
|
|
21
|
+
return index;
|
|
22
|
+
}
|
|
23
|
+
const raw = await exec("find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.next/*' -not -path '*/build/*' 2>/dev/null", cwd);
|
|
24
|
+
index = raw.split("\n").filter(l => l.trim()).map(p => {
|
|
25
|
+
const path = p.trim();
|
|
26
|
+
const parts = path.split("/");
|
|
27
|
+
const name = parts[parts.length - 1] ?? path;
|
|
28
|
+
const dir = parts.slice(0, -1).join("/") || ".";
|
|
29
|
+
const ext = name.includes(".") ? "." + name.split(".").pop() : "";
|
|
30
|
+
return { path, dir, name, ext };
|
|
31
|
+
});
|
|
32
|
+
indexCwd = cwd;
|
|
33
|
+
indexTime = Date.now();
|
|
34
|
+
return index;
|
|
35
|
+
}
|
|
36
|
+
/** Search file index by glob pattern (in-memory, no subprocess) */
|
|
37
|
+
export async function searchIndex(cwd, pattern) {
|
|
38
|
+
const idx = await getFileIndex(cwd);
|
|
39
|
+
// Convert glob to regex
|
|
40
|
+
const regex = new RegExp("^" + pattern
|
|
41
|
+
.replace(/\./g, "\\.")
|
|
42
|
+
.replace(/\*/g, ".*")
|
|
43
|
+
.replace(/\?/g, ".")
|
|
44
|
+
+ "$", "i");
|
|
45
|
+
return idx.filter(e => regex.test(e.name) || regex.test(e.path)).map(e => e.path);
|
|
46
|
+
}
|
|
47
|
+
/** Get file index stats */
|
|
48
|
+
export async function indexStats(cwd) {
|
|
49
|
+
const idx = await getFileIndex(cwd);
|
|
50
|
+
const byExt = {};
|
|
51
|
+
const byDir = {};
|
|
52
|
+
for (const e of idx) {
|
|
53
|
+
byExt[e.ext || "(none)"] = (byExt[e.ext || "(none)"] ?? 0) + 1;
|
|
54
|
+
const topDir = e.dir.split("/").slice(0, 2).join("/");
|
|
55
|
+
byDir[topDir] = (byDir[topDir] ?? 0) + 1;
|
|
56
|
+
}
|
|
57
|
+
return { totalFiles: idx.length, byExtension: byExt, byDir };
|
|
58
|
+
}
|
|
59
|
+
/** Invalidate index */
|
|
60
|
+
export function invalidateIndex() {
|
|
61
|
+
index = null;
|
|
62
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// Cross-command line deduplication — track lines already shown to agent
|
|
2
|
+
// When new output contains >50% already-seen lines, suppress them
|
|
3
|
+
const seenLines = new Set();
|
|
4
|
+
const MAX_SEEN = 5000;
|
|
5
|
+
function normalize(line) {
|
|
6
|
+
return line.trim().toLowerCase();
|
|
7
|
+
}
|
|
8
|
+
/** Deduplicate output lines against session history */
|
|
9
|
+
export function dedup(output) {
|
|
10
|
+
const lines = output.split("\n");
|
|
11
|
+
if (lines.length < 5) {
|
|
12
|
+
// Short output — add to seen, don't dedup
|
|
13
|
+
for (const l of lines) {
|
|
14
|
+
if (l.trim())
|
|
15
|
+
seenLines.add(normalize(l));
|
|
16
|
+
}
|
|
17
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
18
|
+
}
|
|
19
|
+
let novelCount = 0;
|
|
20
|
+
let seenCount = 0;
|
|
21
|
+
const novel = [];
|
|
22
|
+
for (const line of lines) {
|
|
23
|
+
const norm = normalize(line);
|
|
24
|
+
if (!norm) {
|
|
25
|
+
novel.push(line);
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
if (seenLines.has(norm)) {
|
|
29
|
+
seenCount++;
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
novelCount++;
|
|
33
|
+
novel.push(line);
|
|
34
|
+
seenLines.add(norm);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
// Evict oldest if too large
|
|
38
|
+
if (seenLines.size > MAX_SEEN) {
|
|
39
|
+
const entries = [...seenLines];
|
|
40
|
+
for (let i = 0; i < entries.length - MAX_SEEN; i++) {
|
|
41
|
+
seenLines.delete(entries[i]);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// Only dedup if >50% were already seen
|
|
45
|
+
if (seenCount > lines.length * 0.5) {
|
|
46
|
+
const result = novel.join("\n");
|
|
47
|
+
return { output: result + `\n(${seenCount} lines already shown, omitted)`, novelCount, seenCount, deduplicated: true };
|
|
48
|
+
}
|
|
49
|
+
// Add all to seen but return full output
|
|
50
|
+
for (const l of lines) {
|
|
51
|
+
if (l.trim())
|
|
52
|
+
seenLines.add(normalize(l));
|
|
53
|
+
}
|
|
54
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
55
|
+
}
|
|
56
|
+
/** Clear dedup history */
|
|
57
|
+
export function clearDedup() {
|
|
58
|
+
seenLines.clear();
|
|
59
|
+
}
|
package/dist/mcp/server.js
CHANGED
|
@@ -15,6 +15,7 @@ import { diffOutput } from "../diff-cache.js";
|
|
|
15
15
|
import { processOutput } from "../output-processor.js";
|
|
16
16
|
import { listSessions, getSessionInteractions, getSessionStats } from "../sessions-db.js";
|
|
17
17
|
import { cachedRead } from "../file-cache.js";
|
|
18
|
+
import { getBootContext } from "../session-boot.js";
|
|
18
19
|
import { storeOutput, expandOutput } from "../expand-store.js";
|
|
19
20
|
import { rewriteCommand } from "../command-rewriter.js";
|
|
20
21
|
import { shouldBeLazy, toLazy } from "../lazy-executor.js";
|
|
@@ -416,6 +417,59 @@ export function createServer() {
|
|
|
416
417
|
const sessions = listSessions(limit ?? 20);
|
|
417
418
|
return { content: [{ type: "text", text: JSON.stringify(sessions) }] };
|
|
418
419
|
});
|
|
420
|
+
// ── boot: session start context (replaces first 5 agent commands) ──────────
|
|
421
|
+
server.tool("boot", "Get everything an agent needs on session start in ONE call — git state, project info, source structure. Replaces: git status + git log + cat package.json + ls src/. Cached for the session.", async () => {
|
|
422
|
+
const ctx = await getBootContext(process.cwd());
|
|
423
|
+
return { content: [{ type: "text", text: JSON.stringify(ctx) }] };
|
|
424
|
+
});
|
|
425
|
+
// ── project_overview: orient agent in one call ─────────────────────────────
|
|
426
|
+
server.tool("project_overview", "Get project overview in one call — package.json info, source structure, config files. Replaces: cat package.json + ls src/ + cat tsconfig.json.", {
|
|
427
|
+
path: z.string().optional().describe("Project root (default: cwd)"),
|
|
428
|
+
}, async ({ path }) => {
|
|
429
|
+
const cwd = path ?? process.cwd();
|
|
430
|
+
const [pkgResult, srcResult, configResult] = await Promise.all([
|
|
431
|
+
exec("cat package.json 2>/dev/null", cwd),
|
|
432
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || ls -1 app/ 2>/dev/null", cwd),
|
|
433
|
+
exec("ls -1 *.json *.config.* .env* tsconfig* 2>/dev/null", cwd),
|
|
434
|
+
]);
|
|
435
|
+
let pkg = null;
|
|
436
|
+
try {
|
|
437
|
+
pkg = JSON.parse(pkgResult.stdout);
|
|
438
|
+
}
|
|
439
|
+
catch { }
|
|
440
|
+
return {
|
|
441
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
442
|
+
name: pkg?.name,
|
|
443
|
+
version: pkg?.version,
|
|
444
|
+
scripts: pkg?.scripts,
|
|
445
|
+
dependencies: pkg?.dependencies ? Object.keys(pkg.dependencies) : [],
|
|
446
|
+
devDependencies: pkg?.devDependencies ? Object.keys(pkg.devDependencies) : [],
|
|
447
|
+
sourceFiles: srcResult.stdout.split("\n").filter(l => l.trim()),
|
|
448
|
+
configFiles: configResult.stdout.split("\n").filter(l => l.trim()),
|
|
449
|
+
}) }],
|
|
450
|
+
};
|
|
451
|
+
});
|
|
452
|
+
// ── last_commit: what just happened ───────────────────────────────────────
|
|
453
|
+
server.tool("last_commit", "Get details of the last commit — hash, message, files changed, diff stats. Replaces: git log -1 + git show --stat + git diff HEAD~1.", {
|
|
454
|
+
path: z.string().optional().describe("Repo path (default: cwd)"),
|
|
455
|
+
}, async ({ path }) => {
|
|
456
|
+
const cwd = path ?? process.cwd();
|
|
457
|
+
const [logResult, statResult] = await Promise.all([
|
|
458
|
+
exec("git log -1 --format='%H%n%s%n%an%n%ai'", cwd),
|
|
459
|
+
exec("git show --stat --format='' HEAD", cwd),
|
|
460
|
+
]);
|
|
461
|
+
const [hash, message, author, date] = logResult.stdout.split("\n");
|
|
462
|
+
const filesChanged = statResult.stdout.split("\n").filter(l => l.trim() && !l.includes("changed"));
|
|
463
|
+
return {
|
|
464
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
465
|
+
hash: hash?.trim(),
|
|
466
|
+
message: message?.trim(),
|
|
467
|
+
author: author?.trim(),
|
|
468
|
+
date: date?.trim(),
|
|
469
|
+
filesChanged,
|
|
470
|
+
}) }],
|
|
471
|
+
};
|
|
472
|
+
});
|
|
419
473
|
// ── read_file: cached file reading ─────────────────────────────────────────
|
|
420
474
|
server.tool("read_file", "Read a file with session caching. Second read of unchanged file returns instantly from cache. Supports offset/limit for pagination without re-reading.", {
|
|
421
475
|
path: z.string().describe("File path"),
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// Session boot cache — precompute common data on first MCP call
|
|
2
|
+
// Agents always start with git status + file tree + package.json — do it once
|
|
3
|
+
import { spawn } from "child_process";
|
|
4
|
+
import { existsSync, readFileSync } from "fs";
|
|
5
|
+
import { join } from "path";
|
|
6
|
+
let bootCache = null;
|
|
7
|
+
let bootCwd = "";
|
|
8
|
+
function exec(command, cwd) {
|
|
9
|
+
return new Promise((resolve) => {
|
|
10
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
11
|
+
let out = "";
|
|
12
|
+
proc.stdout?.on("data", (d) => { out += d.toString(); });
|
|
13
|
+
proc.on("close", () => resolve(out.trim()));
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
/** Get or build session boot context */
|
|
17
|
+
export async function getBootContext(cwd) {
|
|
18
|
+
if (bootCache && bootCwd === cwd)
|
|
19
|
+
return bootCache;
|
|
20
|
+
const [branch, status, log, srcLs] = await Promise.all([
|
|
21
|
+
exec("git branch --show-current 2>/dev/null", cwd),
|
|
22
|
+
exec("git status --porcelain 2>/dev/null", cwd),
|
|
23
|
+
exec("git log --oneline -8 2>/dev/null", cwd),
|
|
24
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || echo ''", cwd),
|
|
25
|
+
]);
|
|
26
|
+
let pkg = null;
|
|
27
|
+
const pkgPath = join(cwd, "package.json");
|
|
28
|
+
if (existsSync(pkgPath)) {
|
|
29
|
+
try {
|
|
30
|
+
pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
31
|
+
}
|
|
32
|
+
catch { }
|
|
33
|
+
}
|
|
34
|
+
bootCache = {
|
|
35
|
+
cwd,
|
|
36
|
+
git: {
|
|
37
|
+
branch: branch || null,
|
|
38
|
+
dirty: status.length > 0,
|
|
39
|
+
changedFiles: status.split("\n").filter(l => l.trim()).length,
|
|
40
|
+
recentCommits: log.split("\n").filter(l => l.trim()).slice(0, 5).map(l => {
|
|
41
|
+
const m = l.match(/^([a-f0-9]+)\s+(.+)$/);
|
|
42
|
+
return m ? { hash: m[1], message: m[2] } : null;
|
|
43
|
+
}).filter(Boolean),
|
|
44
|
+
},
|
|
45
|
+
project: pkg ? {
|
|
46
|
+
name: pkg.name,
|
|
47
|
+
version: pkg.version,
|
|
48
|
+
scripts: pkg.scripts ? Object.keys(pkg.scripts) : [],
|
|
49
|
+
deps: pkg.dependencies ? Object.keys(pkg.dependencies).length : 0,
|
|
50
|
+
} : null,
|
|
51
|
+
sourceFiles: srcLs.split("\n").filter(l => l.trim()),
|
|
52
|
+
};
|
|
53
|
+
bootCwd = cwd;
|
|
54
|
+
return bootCache;
|
|
55
|
+
}
|
|
56
|
+
/** Invalidate boot cache (call after git operations or file changes) */
|
|
57
|
+
export function invalidateBootCache() {
|
|
58
|
+
bootCache = null;
|
|
59
|
+
}
|
package/package.json
CHANGED
package/src/cli.tsx
CHANGED
|
@@ -443,6 +443,29 @@ else if (args[0] === "sessions") {
|
|
|
443
443
|
}
|
|
444
444
|
}
|
|
445
445
|
|
|
446
|
+
// ── Overview command ─────────────────────────────────────────────────────────
|
|
447
|
+
|
|
448
|
+
else if (args[0] === "overview") {
|
|
449
|
+
const { existsSync, readFileSync } = await import("fs");
|
|
450
|
+
const { execSync } = await import("child_process");
|
|
451
|
+
const run = (cmd: string) => { try { return execSync(cmd, { encoding: "utf8", cwd: process.cwd() }).trim(); } catch { return ""; } };
|
|
452
|
+
|
|
453
|
+
let pkg: any = null;
|
|
454
|
+
try { pkg = JSON.parse(readFileSync("package.json", "utf8")); } catch {}
|
|
455
|
+
|
|
456
|
+
if (pkg) {
|
|
457
|
+
console.log(`${pkg.name}@${pkg.version}`);
|
|
458
|
+
if (pkg.scripts) {
|
|
459
|
+
console.log("\nScripts:");
|
|
460
|
+
for (const [k, v] of Object.entries(pkg.scripts).slice(0, 10)) console.log(` ${k}: ${v}`);
|
|
461
|
+
}
|
|
462
|
+
if (pkg.dependencies) console.log(`\nDeps: ${Object.keys(pkg.dependencies).join(", ")}`);
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
const src = run("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null");
|
|
466
|
+
if (src) console.log(`\nSource:\n${src.split("\n").map(f => " " + f).join("\n")}`);
|
|
467
|
+
}
|
|
468
|
+
|
|
446
469
|
// ── Repo command ─────────────────────────────────────────────────────────────
|
|
447
470
|
|
|
448
471
|
else if (args[0] === "repo") {
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
// Pre-computed file index — build once, serve search from memory
|
|
2
|
+
// Eliminates subprocess spawning for repeat file queries
|
|
3
|
+
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { watch, type FSWatcher } from "fs";
|
|
6
|
+
|
|
7
|
+
interface FileIndexEntry {
|
|
8
|
+
path: string;
|
|
9
|
+
dir: string;
|
|
10
|
+
name: string;
|
|
11
|
+
ext: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
let index: FileIndexEntry[] | null = null;
|
|
15
|
+
let indexCwd: string = "";
|
|
16
|
+
let indexTime: number = 0;
|
|
17
|
+
let watcher: FSWatcher | null = null;
|
|
18
|
+
|
|
19
|
+
const INDEX_TTL = 30_000; // 30 seconds
|
|
20
|
+
|
|
21
|
+
function exec(command: string, cwd: string): Promise<string> {
|
|
22
|
+
return new Promise((resolve) => {
|
|
23
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
24
|
+
let out = "";
|
|
25
|
+
proc.stdout?.on("data", (d: Buffer) => { out += d.toString(); });
|
|
26
|
+
proc.on("close", () => resolve(out));
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/** Build or return cached file index */
|
|
31
|
+
export async function getFileIndex(cwd: string): Promise<FileIndexEntry[]> {
|
|
32
|
+
// Return cached if fresh
|
|
33
|
+
if (index && indexCwd === cwd && Date.now() - indexTime < INDEX_TTL) {
|
|
34
|
+
return index;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const raw = await exec(
|
|
38
|
+
"find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.next/*' -not -path '*/build/*' 2>/dev/null",
|
|
39
|
+
cwd
|
|
40
|
+
);
|
|
41
|
+
|
|
42
|
+
index = raw.split("\n").filter(l => l.trim()).map(p => {
|
|
43
|
+
const path = p.trim();
|
|
44
|
+
const parts = path.split("/");
|
|
45
|
+
const name = parts[parts.length - 1] ?? path;
|
|
46
|
+
const dir = parts.slice(0, -1).join("/") || ".";
|
|
47
|
+
const ext = name.includes(".") ? "." + name.split(".").pop() : "";
|
|
48
|
+
return { path, dir, name, ext };
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
indexCwd = cwd;
|
|
52
|
+
indexTime = Date.now();
|
|
53
|
+
|
|
54
|
+
return index;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/** Search file index by glob pattern (in-memory, no subprocess) */
|
|
58
|
+
export async function searchIndex(cwd: string, pattern: string): Promise<string[]> {
|
|
59
|
+
const idx = await getFileIndex(cwd);
|
|
60
|
+
|
|
61
|
+
// Convert glob to regex
|
|
62
|
+
const regex = new RegExp(
|
|
63
|
+
"^" + pattern
|
|
64
|
+
.replace(/\./g, "\\.")
|
|
65
|
+
.replace(/\*/g, ".*")
|
|
66
|
+
.replace(/\?/g, ".")
|
|
67
|
+
+ "$",
|
|
68
|
+
"i"
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
return idx.filter(e => regex.test(e.name) || regex.test(e.path)).map(e => e.path);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/** Get file index stats */
|
|
75
|
+
export async function indexStats(cwd: string): Promise<{ totalFiles: number; byExtension: Record<string, number>; byDir: Record<string, number> }> {
|
|
76
|
+
const idx = await getFileIndex(cwd);
|
|
77
|
+
|
|
78
|
+
const byExt: Record<string, number> = {};
|
|
79
|
+
const byDir: Record<string, number> = {};
|
|
80
|
+
|
|
81
|
+
for (const e of idx) {
|
|
82
|
+
byExt[e.ext || "(none)"] = (byExt[e.ext || "(none)"] ?? 0) + 1;
|
|
83
|
+
const topDir = e.dir.split("/").slice(0, 2).join("/");
|
|
84
|
+
byDir[topDir] = (byDir[topDir] ?? 0) + 1;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return { totalFiles: idx.length, byExtension: byExt, byDir };
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/** Invalidate index */
|
|
91
|
+
export function invalidateIndex(): void {
|
|
92
|
+
index = null;
|
|
93
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
// Cross-command line deduplication — track lines already shown to agent
|
|
2
|
+
// When new output contains >50% already-seen lines, suppress them
|
|
3
|
+
|
|
4
|
+
const seenLines = new Set<string>();
|
|
5
|
+
const MAX_SEEN = 5000;
|
|
6
|
+
|
|
7
|
+
function normalize(line: string): string {
|
|
8
|
+
return line.trim().toLowerCase();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface DedupResult {
|
|
12
|
+
output: string;
|
|
13
|
+
novelCount: number;
|
|
14
|
+
seenCount: number;
|
|
15
|
+
deduplicated: boolean;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** Deduplicate output lines against session history */
|
|
19
|
+
export function dedup(output: string): DedupResult {
|
|
20
|
+
const lines = output.split("\n");
|
|
21
|
+
if (lines.length < 5) {
|
|
22
|
+
// Short output — add to seen, don't dedup
|
|
23
|
+
for (const l of lines) { if (l.trim()) seenLines.add(normalize(l)); }
|
|
24
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
let novelCount = 0;
|
|
28
|
+
let seenCount = 0;
|
|
29
|
+
const novel: string[] = [];
|
|
30
|
+
|
|
31
|
+
for (const line of lines) {
|
|
32
|
+
const norm = normalize(line);
|
|
33
|
+
if (!norm) { novel.push(line); continue; }
|
|
34
|
+
|
|
35
|
+
if (seenLines.has(norm)) {
|
|
36
|
+
seenCount++;
|
|
37
|
+
} else {
|
|
38
|
+
novelCount++;
|
|
39
|
+
novel.push(line);
|
|
40
|
+
seenLines.add(norm);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Evict oldest if too large
|
|
45
|
+
if (seenLines.size > MAX_SEEN) {
|
|
46
|
+
const entries = [...seenLines];
|
|
47
|
+
for (let i = 0; i < entries.length - MAX_SEEN; i++) {
|
|
48
|
+
seenLines.delete(entries[i]);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Only dedup if >50% were already seen
|
|
53
|
+
if (seenCount > lines.length * 0.5) {
|
|
54
|
+
const result = novel.join("\n");
|
|
55
|
+
return { output: result + `\n(${seenCount} lines already shown, omitted)`, novelCount, seenCount, deduplicated: true };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Add all to seen but return full output
|
|
59
|
+
for (const l of lines) { if (l.trim()) seenLines.add(normalize(l)); }
|
|
60
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/** Clear dedup history */
|
|
64
|
+
export function clearDedup(): void {
|
|
65
|
+
seenLines.clear();
|
|
66
|
+
}
|
package/src/mcp/server.ts
CHANGED
|
@@ -16,6 +16,7 @@ import { diffOutput } from "../diff-cache.js";
|
|
|
16
16
|
import { processOutput } from "../output-processor.js";
|
|
17
17
|
import { listSessions, getSessionInteractions, getSessionStats } from "../sessions-db.js";
|
|
18
18
|
import { cachedRead, cacheStats } from "../file-cache.js";
|
|
19
|
+
import { getBootContext } from "../session-boot.js";
|
|
19
20
|
import { storeOutput, expandOutput } from "../expand-store.js";
|
|
20
21
|
import { rewriteCommand } from "../command-rewriter.js";
|
|
21
22
|
import { shouldBeLazy, toLazy } from "../lazy-executor.js";
|
|
@@ -587,6 +588,80 @@ export function createServer(): McpServer {
|
|
|
587
588
|
}
|
|
588
589
|
);
|
|
589
590
|
|
|
591
|
+
// ── boot: session start context (replaces first 5 agent commands) ──────────
|
|
592
|
+
|
|
593
|
+
server.tool(
|
|
594
|
+
"boot",
|
|
595
|
+
"Get everything an agent needs on session start in ONE call — git state, project info, source structure. Replaces: git status + git log + cat package.json + ls src/. Cached for the session.",
|
|
596
|
+
async () => {
|
|
597
|
+
const ctx = await getBootContext(process.cwd());
|
|
598
|
+
return { content: [{ type: "text" as const, text: JSON.stringify(ctx) }] };
|
|
599
|
+
}
|
|
600
|
+
);
|
|
601
|
+
|
|
602
|
+
// ── project_overview: orient agent in one call ─────────────────────────────
|
|
603
|
+
|
|
604
|
+
server.tool(
|
|
605
|
+
"project_overview",
|
|
606
|
+
"Get project overview in one call — package.json info, source structure, config files. Replaces: cat package.json + ls src/ + cat tsconfig.json.",
|
|
607
|
+
{
|
|
608
|
+
path: z.string().optional().describe("Project root (default: cwd)"),
|
|
609
|
+
},
|
|
610
|
+
async ({ path }) => {
|
|
611
|
+
const cwd = path ?? process.cwd();
|
|
612
|
+
const [pkgResult, srcResult, configResult] = await Promise.all([
|
|
613
|
+
exec("cat package.json 2>/dev/null", cwd),
|
|
614
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || ls -1 app/ 2>/dev/null", cwd),
|
|
615
|
+
exec("ls -1 *.json *.config.* .env* tsconfig* 2>/dev/null", cwd),
|
|
616
|
+
]);
|
|
617
|
+
|
|
618
|
+
let pkg: any = null;
|
|
619
|
+
try { pkg = JSON.parse(pkgResult.stdout); } catch {}
|
|
620
|
+
|
|
621
|
+
return {
|
|
622
|
+
content: [{ type: "text" as const, text: JSON.stringify({
|
|
623
|
+
name: pkg?.name,
|
|
624
|
+
version: pkg?.version,
|
|
625
|
+
scripts: pkg?.scripts,
|
|
626
|
+
dependencies: pkg?.dependencies ? Object.keys(pkg.dependencies) : [],
|
|
627
|
+
devDependencies: pkg?.devDependencies ? Object.keys(pkg.devDependencies) : [],
|
|
628
|
+
sourceFiles: srcResult.stdout.split("\n").filter(l => l.trim()),
|
|
629
|
+
configFiles: configResult.stdout.split("\n").filter(l => l.trim()),
|
|
630
|
+
}) }],
|
|
631
|
+
};
|
|
632
|
+
}
|
|
633
|
+
);
|
|
634
|
+
|
|
635
|
+
// ── last_commit: what just happened ───────────────────────────────────────
|
|
636
|
+
|
|
637
|
+
server.tool(
|
|
638
|
+
"last_commit",
|
|
639
|
+
"Get details of the last commit — hash, message, files changed, diff stats. Replaces: git log -1 + git show --stat + git diff HEAD~1.",
|
|
640
|
+
{
|
|
641
|
+
path: z.string().optional().describe("Repo path (default: cwd)"),
|
|
642
|
+
},
|
|
643
|
+
async ({ path }) => {
|
|
644
|
+
const cwd = path ?? process.cwd();
|
|
645
|
+
const [logResult, statResult] = await Promise.all([
|
|
646
|
+
exec("git log -1 --format='%H%n%s%n%an%n%ai'", cwd),
|
|
647
|
+
exec("git show --stat --format='' HEAD", cwd),
|
|
648
|
+
]);
|
|
649
|
+
|
|
650
|
+
const [hash, message, author, date] = logResult.stdout.split("\n");
|
|
651
|
+
const filesChanged = statResult.stdout.split("\n").filter(l => l.trim() && !l.includes("changed"));
|
|
652
|
+
|
|
653
|
+
return {
|
|
654
|
+
content: [{ type: "text" as const, text: JSON.stringify({
|
|
655
|
+
hash: hash?.trim(),
|
|
656
|
+
message: message?.trim(),
|
|
657
|
+
author: author?.trim(),
|
|
658
|
+
date: date?.trim(),
|
|
659
|
+
filesChanged,
|
|
660
|
+
}) }],
|
|
661
|
+
};
|
|
662
|
+
}
|
|
663
|
+
);
|
|
664
|
+
|
|
590
665
|
// ── read_file: cached file reading ─────────────────────────────────────────
|
|
591
666
|
|
|
592
667
|
server.tool(
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
// Session boot cache — precompute common data on first MCP call
|
|
2
|
+
// Agents always start with git status + file tree + package.json — do it once
|
|
3
|
+
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { existsSync, readFileSync } from "fs";
|
|
6
|
+
import { join } from "path";
|
|
7
|
+
|
|
8
|
+
let bootCache: Record<string, unknown> | null = null;
|
|
9
|
+
let bootCwd: string = "";
|
|
10
|
+
|
|
11
|
+
function exec(command: string, cwd: string): Promise<string> {
|
|
12
|
+
return new Promise((resolve) => {
|
|
13
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
14
|
+
let out = "";
|
|
15
|
+
proc.stdout?.on("data", (d: Buffer) => { out += d.toString(); });
|
|
16
|
+
proc.on("close", () => resolve(out.trim()));
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** Get or build session boot context */
|
|
21
|
+
export async function getBootContext(cwd: string): Promise<Record<string, unknown>> {
|
|
22
|
+
if (bootCache && bootCwd === cwd) return bootCache;
|
|
23
|
+
|
|
24
|
+
const [branch, status, log, srcLs] = await Promise.all([
|
|
25
|
+
exec("git branch --show-current 2>/dev/null", cwd),
|
|
26
|
+
exec("git status --porcelain 2>/dev/null", cwd),
|
|
27
|
+
exec("git log --oneline -8 2>/dev/null", cwd),
|
|
28
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || echo ''", cwd),
|
|
29
|
+
]);
|
|
30
|
+
|
|
31
|
+
let pkg: any = null;
|
|
32
|
+
const pkgPath = join(cwd, "package.json");
|
|
33
|
+
if (existsSync(pkgPath)) {
|
|
34
|
+
try { pkg = JSON.parse(readFileSync(pkgPath, "utf8")); } catch {}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
bootCache = {
|
|
38
|
+
cwd,
|
|
39
|
+
git: {
|
|
40
|
+
branch: branch || null,
|
|
41
|
+
dirty: status.length > 0,
|
|
42
|
+
changedFiles: status.split("\n").filter(l => l.trim()).length,
|
|
43
|
+
recentCommits: log.split("\n").filter(l => l.trim()).slice(0, 5).map(l => {
|
|
44
|
+
const m = l.match(/^([a-f0-9]+)\s+(.+)$/);
|
|
45
|
+
return m ? { hash: m[1], message: m[2] } : null;
|
|
46
|
+
}).filter(Boolean),
|
|
47
|
+
},
|
|
48
|
+
project: pkg ? {
|
|
49
|
+
name: pkg.name,
|
|
50
|
+
version: pkg.version,
|
|
51
|
+
scripts: pkg.scripts ? Object.keys(pkg.scripts) : [],
|
|
52
|
+
deps: pkg.dependencies ? Object.keys(pkg.dependencies).length : 0,
|
|
53
|
+
} : null,
|
|
54
|
+
sourceFiles: srcLs.split("\n").filter(l => l.trim()),
|
|
55
|
+
};
|
|
56
|
+
bootCwd = cwd;
|
|
57
|
+
|
|
58
|
+
return bootCache;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/** Invalidate boot cache (call after git operations or file changes) */
|
|
62
|
+
export function invalidateBootCache(): void {
|
|
63
|
+
bootCache = null;
|
|
64
|
+
}
|