@hasna/terminal 0.7.6 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/file-index.js +62 -0
- package/dist/line-dedup.js +59 -0
- package/dist/mcp/server.js +6 -0
- package/dist/session-boot.js +59 -0
- package/package.json +1 -1
- package/src/file-index.ts +93 -0
- package/src/line-dedup.ts +66 -0
- package/src/mcp/server.ts +12 -0
- package/src/session-boot.ts +64 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
// Pre-computed file index — build once, serve search from memory
|
|
2
|
+
// Eliminates subprocess spawning for repeat file queries
|
|
3
|
+
import { spawn } from "child_process";
|
|
4
|
+
let index = null;
|
|
5
|
+
let indexCwd = "";
|
|
6
|
+
let indexTime = 0;
|
|
7
|
+
let watcher = null;
|
|
8
|
+
const INDEX_TTL = 30_000; // 30 seconds
|
|
9
|
+
function exec(command, cwd) {
|
|
10
|
+
return new Promise((resolve) => {
|
|
11
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
12
|
+
let out = "";
|
|
13
|
+
proc.stdout?.on("data", (d) => { out += d.toString(); });
|
|
14
|
+
proc.on("close", () => resolve(out));
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
/** Build or return cached file index */
|
|
18
|
+
export async function getFileIndex(cwd) {
|
|
19
|
+
// Return cached if fresh
|
|
20
|
+
if (index && indexCwd === cwd && Date.now() - indexTime < INDEX_TTL) {
|
|
21
|
+
return index;
|
|
22
|
+
}
|
|
23
|
+
const raw = await exec("find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.next/*' -not -path '*/build/*' 2>/dev/null", cwd);
|
|
24
|
+
index = raw.split("\n").filter(l => l.trim()).map(p => {
|
|
25
|
+
const path = p.trim();
|
|
26
|
+
const parts = path.split("/");
|
|
27
|
+
const name = parts[parts.length - 1] ?? path;
|
|
28
|
+
const dir = parts.slice(0, -1).join("/") || ".";
|
|
29
|
+
const ext = name.includes(".") ? "." + name.split(".").pop() : "";
|
|
30
|
+
return { path, dir, name, ext };
|
|
31
|
+
});
|
|
32
|
+
indexCwd = cwd;
|
|
33
|
+
indexTime = Date.now();
|
|
34
|
+
return index;
|
|
35
|
+
}
|
|
36
|
+
/** Search file index by glob pattern (in-memory, no subprocess) */
|
|
37
|
+
export async function searchIndex(cwd, pattern) {
|
|
38
|
+
const idx = await getFileIndex(cwd);
|
|
39
|
+
// Convert glob to regex
|
|
40
|
+
const regex = new RegExp("^" + pattern
|
|
41
|
+
.replace(/\./g, "\\.")
|
|
42
|
+
.replace(/\*/g, ".*")
|
|
43
|
+
.replace(/\?/g, ".")
|
|
44
|
+
+ "$", "i");
|
|
45
|
+
return idx.filter(e => regex.test(e.name) || regex.test(e.path)).map(e => e.path);
|
|
46
|
+
}
|
|
47
|
+
/** Get file index stats */
|
|
48
|
+
export async function indexStats(cwd) {
|
|
49
|
+
const idx = await getFileIndex(cwd);
|
|
50
|
+
const byExt = {};
|
|
51
|
+
const byDir = {};
|
|
52
|
+
for (const e of idx) {
|
|
53
|
+
byExt[e.ext || "(none)"] = (byExt[e.ext || "(none)"] ?? 0) + 1;
|
|
54
|
+
const topDir = e.dir.split("/").slice(0, 2).join("/");
|
|
55
|
+
byDir[topDir] = (byDir[topDir] ?? 0) + 1;
|
|
56
|
+
}
|
|
57
|
+
return { totalFiles: idx.length, byExtension: byExt, byDir };
|
|
58
|
+
}
|
|
59
|
+
/** Invalidate index */
|
|
60
|
+
export function invalidateIndex() {
|
|
61
|
+
index = null;
|
|
62
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// Cross-command line deduplication — track lines already shown to agent
|
|
2
|
+
// When new output contains >50% already-seen lines, suppress them
|
|
3
|
+
const seenLines = new Set();
|
|
4
|
+
const MAX_SEEN = 5000;
|
|
5
|
+
function normalize(line) {
|
|
6
|
+
return line.trim().toLowerCase();
|
|
7
|
+
}
|
|
8
|
+
/** Deduplicate output lines against session history */
|
|
9
|
+
export function dedup(output) {
|
|
10
|
+
const lines = output.split("\n");
|
|
11
|
+
if (lines.length < 5) {
|
|
12
|
+
// Short output — add to seen, don't dedup
|
|
13
|
+
for (const l of lines) {
|
|
14
|
+
if (l.trim())
|
|
15
|
+
seenLines.add(normalize(l));
|
|
16
|
+
}
|
|
17
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
18
|
+
}
|
|
19
|
+
let novelCount = 0;
|
|
20
|
+
let seenCount = 0;
|
|
21
|
+
const novel = [];
|
|
22
|
+
for (const line of lines) {
|
|
23
|
+
const norm = normalize(line);
|
|
24
|
+
if (!norm) {
|
|
25
|
+
novel.push(line);
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
if (seenLines.has(norm)) {
|
|
29
|
+
seenCount++;
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
novelCount++;
|
|
33
|
+
novel.push(line);
|
|
34
|
+
seenLines.add(norm);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
// Evict oldest if too large
|
|
38
|
+
if (seenLines.size > MAX_SEEN) {
|
|
39
|
+
const entries = [...seenLines];
|
|
40
|
+
for (let i = 0; i < entries.length - MAX_SEEN; i++) {
|
|
41
|
+
seenLines.delete(entries[i]);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// Only dedup if >50% were already seen
|
|
45
|
+
if (seenCount > lines.length * 0.5) {
|
|
46
|
+
const result = novel.join("\n");
|
|
47
|
+
return { output: result + `\n(${seenCount} lines already shown, omitted)`, novelCount, seenCount, deduplicated: true };
|
|
48
|
+
}
|
|
49
|
+
// Add all to seen but return full output
|
|
50
|
+
for (const l of lines) {
|
|
51
|
+
if (l.trim())
|
|
52
|
+
seenLines.add(normalize(l));
|
|
53
|
+
}
|
|
54
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
55
|
+
}
|
|
56
|
+
/** Clear dedup history */
|
|
57
|
+
export function clearDedup() {
|
|
58
|
+
seenLines.clear();
|
|
59
|
+
}
|
package/dist/mcp/server.js
CHANGED
|
@@ -15,6 +15,7 @@ import { diffOutput } from "../diff-cache.js";
|
|
|
15
15
|
import { processOutput } from "../output-processor.js";
|
|
16
16
|
import { listSessions, getSessionInteractions, getSessionStats } from "../sessions-db.js";
|
|
17
17
|
import { cachedRead } from "../file-cache.js";
|
|
18
|
+
import { getBootContext } from "../session-boot.js";
|
|
18
19
|
import { storeOutput, expandOutput } from "../expand-store.js";
|
|
19
20
|
import { rewriteCommand } from "../command-rewriter.js";
|
|
20
21
|
import { shouldBeLazy, toLazy } from "../lazy-executor.js";
|
|
@@ -416,6 +417,11 @@ export function createServer() {
|
|
|
416
417
|
const sessions = listSessions(limit ?? 20);
|
|
417
418
|
return { content: [{ type: "text", text: JSON.stringify(sessions) }] };
|
|
418
419
|
});
|
|
420
|
+
// ── boot: session start context (replaces first 5 agent commands) ──────────
|
|
421
|
+
server.tool("boot", "Get everything an agent needs on session start in ONE call — git state, project info, source structure. Replaces: git status + git log + cat package.json + ls src/. Cached for the session.", async () => {
|
|
422
|
+
const ctx = await getBootContext(process.cwd());
|
|
423
|
+
return { content: [{ type: "text", text: JSON.stringify(ctx) }] };
|
|
424
|
+
});
|
|
419
425
|
// ── project_overview: orient agent in one call ─────────────────────────────
|
|
420
426
|
server.tool("project_overview", "Get project overview in one call — package.json info, source structure, config files. Replaces: cat package.json + ls src/ + cat tsconfig.json.", {
|
|
421
427
|
path: z.string().optional().describe("Project root (default: cwd)"),
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// Session boot cache — precompute common data on first MCP call
|
|
2
|
+
// Agents always start with git status + file tree + package.json — do it once
|
|
3
|
+
import { spawn } from "child_process";
|
|
4
|
+
import { existsSync, readFileSync } from "fs";
|
|
5
|
+
import { join } from "path";
|
|
6
|
+
let bootCache = null;
|
|
7
|
+
let bootCwd = "";
|
|
8
|
+
function exec(command, cwd) {
|
|
9
|
+
return new Promise((resolve) => {
|
|
10
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
11
|
+
let out = "";
|
|
12
|
+
proc.stdout?.on("data", (d) => { out += d.toString(); });
|
|
13
|
+
proc.on("close", () => resolve(out.trim()));
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
/** Get or build session boot context */
|
|
17
|
+
export async function getBootContext(cwd) {
|
|
18
|
+
if (bootCache && bootCwd === cwd)
|
|
19
|
+
return bootCache;
|
|
20
|
+
const [branch, status, log, srcLs] = await Promise.all([
|
|
21
|
+
exec("git branch --show-current 2>/dev/null", cwd),
|
|
22
|
+
exec("git status --porcelain 2>/dev/null", cwd),
|
|
23
|
+
exec("git log --oneline -8 2>/dev/null", cwd),
|
|
24
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || echo ''", cwd),
|
|
25
|
+
]);
|
|
26
|
+
let pkg = null;
|
|
27
|
+
const pkgPath = join(cwd, "package.json");
|
|
28
|
+
if (existsSync(pkgPath)) {
|
|
29
|
+
try {
|
|
30
|
+
pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
31
|
+
}
|
|
32
|
+
catch { }
|
|
33
|
+
}
|
|
34
|
+
bootCache = {
|
|
35
|
+
cwd,
|
|
36
|
+
git: {
|
|
37
|
+
branch: branch || null,
|
|
38
|
+
dirty: status.length > 0,
|
|
39
|
+
changedFiles: status.split("\n").filter(l => l.trim()).length,
|
|
40
|
+
recentCommits: log.split("\n").filter(l => l.trim()).slice(0, 5).map(l => {
|
|
41
|
+
const m = l.match(/^([a-f0-9]+)\s+(.+)$/);
|
|
42
|
+
return m ? { hash: m[1], message: m[2] } : null;
|
|
43
|
+
}).filter(Boolean),
|
|
44
|
+
},
|
|
45
|
+
project: pkg ? {
|
|
46
|
+
name: pkg.name,
|
|
47
|
+
version: pkg.version,
|
|
48
|
+
scripts: pkg.scripts ? Object.keys(pkg.scripts) : [],
|
|
49
|
+
deps: pkg.dependencies ? Object.keys(pkg.dependencies).length : 0,
|
|
50
|
+
} : null,
|
|
51
|
+
sourceFiles: srcLs.split("\n").filter(l => l.trim()),
|
|
52
|
+
};
|
|
53
|
+
bootCwd = cwd;
|
|
54
|
+
return bootCache;
|
|
55
|
+
}
|
|
56
|
+
/** Invalidate boot cache (call after git operations or file changes) */
|
|
57
|
+
export function invalidateBootCache() {
|
|
58
|
+
bootCache = null;
|
|
59
|
+
}
|
package/package.json
CHANGED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
// Pre-computed file index — build once, serve search from memory
|
|
2
|
+
// Eliminates subprocess spawning for repeat file queries
|
|
3
|
+
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { watch, type FSWatcher } from "fs";
|
|
6
|
+
|
|
7
|
+
interface FileIndexEntry {
|
|
8
|
+
path: string;
|
|
9
|
+
dir: string;
|
|
10
|
+
name: string;
|
|
11
|
+
ext: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
let index: FileIndexEntry[] | null = null;
|
|
15
|
+
let indexCwd: string = "";
|
|
16
|
+
let indexTime: number = 0;
|
|
17
|
+
let watcher: FSWatcher | null = null;
|
|
18
|
+
|
|
19
|
+
const INDEX_TTL = 30_000; // 30 seconds
|
|
20
|
+
|
|
21
|
+
function exec(command: string, cwd: string): Promise<string> {
|
|
22
|
+
return new Promise((resolve) => {
|
|
23
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
24
|
+
let out = "";
|
|
25
|
+
proc.stdout?.on("data", (d: Buffer) => { out += d.toString(); });
|
|
26
|
+
proc.on("close", () => resolve(out));
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/** Build or return cached file index */
|
|
31
|
+
export async function getFileIndex(cwd: string): Promise<FileIndexEntry[]> {
|
|
32
|
+
// Return cached if fresh
|
|
33
|
+
if (index && indexCwd === cwd && Date.now() - indexTime < INDEX_TTL) {
|
|
34
|
+
return index;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const raw = await exec(
|
|
38
|
+
"find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.next/*' -not -path '*/build/*' 2>/dev/null",
|
|
39
|
+
cwd
|
|
40
|
+
);
|
|
41
|
+
|
|
42
|
+
index = raw.split("\n").filter(l => l.trim()).map(p => {
|
|
43
|
+
const path = p.trim();
|
|
44
|
+
const parts = path.split("/");
|
|
45
|
+
const name = parts[parts.length - 1] ?? path;
|
|
46
|
+
const dir = parts.slice(0, -1).join("/") || ".";
|
|
47
|
+
const ext = name.includes(".") ? "." + name.split(".").pop() : "";
|
|
48
|
+
return { path, dir, name, ext };
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
indexCwd = cwd;
|
|
52
|
+
indexTime = Date.now();
|
|
53
|
+
|
|
54
|
+
return index;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/** Search file index by glob pattern (in-memory, no subprocess) */
|
|
58
|
+
export async function searchIndex(cwd: string, pattern: string): Promise<string[]> {
|
|
59
|
+
const idx = await getFileIndex(cwd);
|
|
60
|
+
|
|
61
|
+
// Convert glob to regex
|
|
62
|
+
const regex = new RegExp(
|
|
63
|
+
"^" + pattern
|
|
64
|
+
.replace(/\./g, "\\.")
|
|
65
|
+
.replace(/\*/g, ".*")
|
|
66
|
+
.replace(/\?/g, ".")
|
|
67
|
+
+ "$",
|
|
68
|
+
"i"
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
return idx.filter(e => regex.test(e.name) || regex.test(e.path)).map(e => e.path);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/** Get file index stats */
|
|
75
|
+
export async function indexStats(cwd: string): Promise<{ totalFiles: number; byExtension: Record<string, number>; byDir: Record<string, number> }> {
|
|
76
|
+
const idx = await getFileIndex(cwd);
|
|
77
|
+
|
|
78
|
+
const byExt: Record<string, number> = {};
|
|
79
|
+
const byDir: Record<string, number> = {};
|
|
80
|
+
|
|
81
|
+
for (const e of idx) {
|
|
82
|
+
byExt[e.ext || "(none)"] = (byExt[e.ext || "(none)"] ?? 0) + 1;
|
|
83
|
+
const topDir = e.dir.split("/").slice(0, 2).join("/");
|
|
84
|
+
byDir[topDir] = (byDir[topDir] ?? 0) + 1;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return { totalFiles: idx.length, byExtension: byExt, byDir };
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/** Invalidate index */
|
|
91
|
+
export function invalidateIndex(): void {
|
|
92
|
+
index = null;
|
|
93
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
// Cross-command line deduplication — track lines already shown to agent
|
|
2
|
+
// When new output contains >50% already-seen lines, suppress them
|
|
3
|
+
|
|
4
|
+
const seenLines = new Set<string>();
|
|
5
|
+
const MAX_SEEN = 5000;
|
|
6
|
+
|
|
7
|
+
function normalize(line: string): string {
|
|
8
|
+
return line.trim().toLowerCase();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface DedupResult {
|
|
12
|
+
output: string;
|
|
13
|
+
novelCount: number;
|
|
14
|
+
seenCount: number;
|
|
15
|
+
deduplicated: boolean;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** Deduplicate output lines against session history */
|
|
19
|
+
export function dedup(output: string): DedupResult {
|
|
20
|
+
const lines = output.split("\n");
|
|
21
|
+
if (lines.length < 5) {
|
|
22
|
+
// Short output — add to seen, don't dedup
|
|
23
|
+
for (const l of lines) { if (l.trim()) seenLines.add(normalize(l)); }
|
|
24
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
let novelCount = 0;
|
|
28
|
+
let seenCount = 0;
|
|
29
|
+
const novel: string[] = [];
|
|
30
|
+
|
|
31
|
+
for (const line of lines) {
|
|
32
|
+
const norm = normalize(line);
|
|
33
|
+
if (!norm) { novel.push(line); continue; }
|
|
34
|
+
|
|
35
|
+
if (seenLines.has(norm)) {
|
|
36
|
+
seenCount++;
|
|
37
|
+
} else {
|
|
38
|
+
novelCount++;
|
|
39
|
+
novel.push(line);
|
|
40
|
+
seenLines.add(norm);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Evict oldest if too large
|
|
45
|
+
if (seenLines.size > MAX_SEEN) {
|
|
46
|
+
const entries = [...seenLines];
|
|
47
|
+
for (let i = 0; i < entries.length - MAX_SEEN; i++) {
|
|
48
|
+
seenLines.delete(entries[i]);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Only dedup if >50% were already seen
|
|
53
|
+
if (seenCount > lines.length * 0.5) {
|
|
54
|
+
const result = novel.join("\n");
|
|
55
|
+
return { output: result + `\n(${seenCount} lines already shown, omitted)`, novelCount, seenCount, deduplicated: true };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Add all to seen but return full output
|
|
59
|
+
for (const l of lines) { if (l.trim()) seenLines.add(normalize(l)); }
|
|
60
|
+
return { output, novelCount: lines.length, seenCount: 0, deduplicated: false };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/** Clear dedup history */
|
|
64
|
+
export function clearDedup(): void {
|
|
65
|
+
seenLines.clear();
|
|
66
|
+
}
|
package/src/mcp/server.ts
CHANGED
|
@@ -16,6 +16,7 @@ import { diffOutput } from "../diff-cache.js";
|
|
|
16
16
|
import { processOutput } from "../output-processor.js";
|
|
17
17
|
import { listSessions, getSessionInteractions, getSessionStats } from "../sessions-db.js";
|
|
18
18
|
import { cachedRead, cacheStats } from "../file-cache.js";
|
|
19
|
+
import { getBootContext } from "../session-boot.js";
|
|
19
20
|
import { storeOutput, expandOutput } from "../expand-store.js";
|
|
20
21
|
import { rewriteCommand } from "../command-rewriter.js";
|
|
21
22
|
import { shouldBeLazy, toLazy } from "../lazy-executor.js";
|
|
@@ -587,6 +588,17 @@ export function createServer(): McpServer {
|
|
|
587
588
|
}
|
|
588
589
|
);
|
|
589
590
|
|
|
591
|
+
// ── boot: session start context (replaces first 5 agent commands) ──────────
|
|
592
|
+
|
|
593
|
+
server.tool(
|
|
594
|
+
"boot",
|
|
595
|
+
"Get everything an agent needs on session start in ONE call — git state, project info, source structure. Replaces: git status + git log + cat package.json + ls src/. Cached for the session.",
|
|
596
|
+
async () => {
|
|
597
|
+
const ctx = await getBootContext(process.cwd());
|
|
598
|
+
return { content: [{ type: "text" as const, text: JSON.stringify(ctx) }] };
|
|
599
|
+
}
|
|
600
|
+
);
|
|
601
|
+
|
|
590
602
|
// ── project_overview: orient agent in one call ─────────────────────────────
|
|
591
603
|
|
|
592
604
|
server.tool(
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
// Session boot cache — precompute common data on first MCP call
|
|
2
|
+
// Agents always start with git status + file tree + package.json — do it once
|
|
3
|
+
|
|
4
|
+
import { spawn } from "child_process";
|
|
5
|
+
import { existsSync, readFileSync } from "fs";
|
|
6
|
+
import { join } from "path";
|
|
7
|
+
|
|
8
|
+
let bootCache: Record<string, unknown> | null = null;
|
|
9
|
+
let bootCwd: string = "";
|
|
10
|
+
|
|
11
|
+
function exec(command: string, cwd: string): Promise<string> {
|
|
12
|
+
return new Promise((resolve) => {
|
|
13
|
+
const proc = spawn("/bin/zsh", ["-c", command], { cwd, stdio: ["ignore", "pipe", "pipe"] });
|
|
14
|
+
let out = "";
|
|
15
|
+
proc.stdout?.on("data", (d: Buffer) => { out += d.toString(); });
|
|
16
|
+
proc.on("close", () => resolve(out.trim()));
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** Get or build session boot context */
|
|
21
|
+
export async function getBootContext(cwd: string): Promise<Record<string, unknown>> {
|
|
22
|
+
if (bootCache && bootCwd === cwd) return bootCache;
|
|
23
|
+
|
|
24
|
+
const [branch, status, log, srcLs] = await Promise.all([
|
|
25
|
+
exec("git branch --show-current 2>/dev/null", cwd),
|
|
26
|
+
exec("git status --porcelain 2>/dev/null", cwd),
|
|
27
|
+
exec("git log --oneline -8 2>/dev/null", cwd),
|
|
28
|
+
exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || echo ''", cwd),
|
|
29
|
+
]);
|
|
30
|
+
|
|
31
|
+
let pkg: any = null;
|
|
32
|
+
const pkgPath = join(cwd, "package.json");
|
|
33
|
+
if (existsSync(pkgPath)) {
|
|
34
|
+
try { pkg = JSON.parse(readFileSync(pkgPath, "utf8")); } catch {}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
bootCache = {
|
|
38
|
+
cwd,
|
|
39
|
+
git: {
|
|
40
|
+
branch: branch || null,
|
|
41
|
+
dirty: status.length > 0,
|
|
42
|
+
changedFiles: status.split("\n").filter(l => l.trim()).length,
|
|
43
|
+
recentCommits: log.split("\n").filter(l => l.trim()).slice(0, 5).map(l => {
|
|
44
|
+
const m = l.match(/^([a-f0-9]+)\s+(.+)$/);
|
|
45
|
+
return m ? { hash: m[1], message: m[2] } : null;
|
|
46
|
+
}).filter(Boolean),
|
|
47
|
+
},
|
|
48
|
+
project: pkg ? {
|
|
49
|
+
name: pkg.name,
|
|
50
|
+
version: pkg.version,
|
|
51
|
+
scripts: pkg.scripts ? Object.keys(pkg.scripts) : [],
|
|
52
|
+
deps: pkg.dependencies ? Object.keys(pkg.dependencies).length : 0,
|
|
53
|
+
} : null,
|
|
54
|
+
sourceFiles: srcLs.split("\n").filter(l => l.trim()),
|
|
55
|
+
};
|
|
56
|
+
bootCwd = cwd;
|
|
57
|
+
|
|
58
|
+
return bootCache;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/** Invalidate boot cache (call after git operations or file changes) */
|
|
62
|
+
export function invalidateBootCache(): void {
|
|
63
|
+
bootCache = null;
|
|
64
|
+
}
|