@hasna/terminal 0.5.2 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/scheduled_tasks.lock +1 -1
- package/dist/cli.js +83 -0
- package/dist/command-rewriter.js +64 -0
- package/dist/compression.js +2 -2
- package/dist/diff-cache.js +21 -1
- package/dist/expand-store.js +38 -0
- package/dist/lazy-executor.js +41 -0
- package/dist/mcp/server.js +70 -10
- package/dist/noise-filter.js +70 -0
- package/dist/output-processor.js +26 -0
- package/dist/search/semantic.js +39 -1
- package/package.json +1 -1
- package/src/cli.tsx +84 -0
- package/src/command-rewriter.ts +80 -0
- package/src/compression.test.ts +2 -3
- package/src/compression.ts +2 -2
- package/src/diff-cache.ts +25 -2
- package/src/expand-store.ts +51 -0
- package/src/hooks/claude-hook.sh +52 -0
- package/src/lazy-executor.ts +57 -0
- package/src/mcp/server.ts +86 -11
- package/src/noise-filter.ts +83 -0
- package/src/output-processor.ts +39 -1
- package/src/search/semantic.ts +35 -1
- package/dist/compression.test.js +0 -42
- package/dist/diff-cache.test.js +0 -27
- package/dist/economy.test.js +0 -13
- package/dist/parsers/parsers.test.js +0 -136
- package/dist/providers/providers.test.js +0 -14
- package/dist/recipes/recipes.test.js +0 -36
- package/dist/search/search.test.js +0 -22
|
@@ -1 +1 @@
|
|
|
1
|
-
{"sessionId":"c1e414c7-f1a5-4b9e-bcc4-64c451584cb8","pid":
|
|
1
|
+
{"sessionId":"c1e414c7-f1a5-4b9e-bcc4-64c451584cb8","pid":1236,"acquiredAt":1773584959902}
|
package/dist/cli.js
CHANGED
|
@@ -19,6 +19,89 @@ if (args[0] === "mcp") {
|
|
|
19
19
|
console.log("Usage: t mcp [serve|install]");
|
|
20
20
|
}
|
|
21
21
|
}
|
|
22
|
+
// ── Hook commands ────────────────────────────────────────────────────────────
|
|
23
|
+
else if (args[0] === "hook") {
|
|
24
|
+
const { existsSync, mkdirSync, writeFileSync, readFileSync } = await import("fs");
|
|
25
|
+
const { join, dirname } = await import("path");
|
|
26
|
+
const { execSync } = await import("child_process");
|
|
27
|
+
const sub = args[1];
|
|
28
|
+
const target = args[2]; // --claude, --codex
|
|
29
|
+
if (sub === "install" && (target === "--claude" || target === "claude")) {
|
|
30
|
+
// Find the hook script
|
|
31
|
+
const hookSrc = join(dirname(new URL(import.meta.url).pathname), "hooks", "claude-hook.sh");
|
|
32
|
+
const hookDest = join(process.env.HOME ?? "~", ".claude", "hooks", "PostToolUse-open-terminal.sh");
|
|
33
|
+
// Copy hook script
|
|
34
|
+
const destDir = dirname(hookDest);
|
|
35
|
+
if (!existsSync(destDir))
|
|
36
|
+
mkdirSync(destDir, { recursive: true });
|
|
37
|
+
// Generate hook with resolved paths
|
|
38
|
+
const terminalBin = execSync("which terminal", { encoding: "utf8" }).trim();
|
|
39
|
+
const hookScript = `#!/usr/bin/env bash
|
|
40
|
+
# open-terminal PostToolUse hook — compresses Bash output
|
|
41
|
+
# Installed by: t hook install --claude
|
|
42
|
+
|
|
43
|
+
if [ "$TOOL_NAME" != "Bash" ]; then exit 0; fi
|
|
44
|
+
OUTPUT=$(cat)
|
|
45
|
+
if [ \${#OUTPUT} -lt 500 ]; then echo "$OUTPUT"; exit 0; fi
|
|
46
|
+
|
|
47
|
+
LINE_COUNT=$(echo "$OUTPUT" | wc -l | tr -d ' ')
|
|
48
|
+
if [ "$LINE_COUNT" -gt 15 ]; then
|
|
49
|
+
COMPRESSED=$(echo "$OUTPUT" | bun -e "
|
|
50
|
+
import{compress,stripAnsi}from'${dirname(terminalBin)}/../lib/node_modules/@hasna/terminal/dist/compression.js';
|
|
51
|
+
import{stripNoise}from'${dirname(terminalBin)}/../lib/node_modules/@hasna/terminal/dist/noise-filter.js';
|
|
52
|
+
let i='';process.stdin.on('data',d=>i+=d);process.stdin.on('end',()=>{
|
|
53
|
+
const c=stripNoise(stripAnsi(i)).cleaned;
|
|
54
|
+
const r=compress('bash',c,{maxTokens:500});
|
|
55
|
+
console.log(r.tokensSaved>50?r.content:c);
|
|
56
|
+
});
|
|
57
|
+
" 2>/dev/null)
|
|
58
|
+
if [ $? -eq 0 ] && [ -n "$COMPRESSED" ]; then echo "$COMPRESSED"; exit 0; fi
|
|
59
|
+
fi
|
|
60
|
+
echo "$OUTPUT"
|
|
61
|
+
`;
|
|
62
|
+
writeFileSync(hookDest, hookScript, { mode: 0o755 });
|
|
63
|
+
// Register in Claude settings
|
|
64
|
+
const settingsPath = join(process.env.HOME ?? "~", ".claude", "settings.json");
|
|
65
|
+
let settings = {};
|
|
66
|
+
if (existsSync(settingsPath)) {
|
|
67
|
+
try {
|
|
68
|
+
settings = JSON.parse(readFileSync(settingsPath, "utf8"));
|
|
69
|
+
}
|
|
70
|
+
catch { }
|
|
71
|
+
}
|
|
72
|
+
if (!settings.hooks)
|
|
73
|
+
settings.hooks = {};
|
|
74
|
+
if (!settings.hooks.PostToolUse)
|
|
75
|
+
settings.hooks.PostToolUse = [];
|
|
76
|
+
const hookEntry = { command: hookDest, event: "PostToolUse", tools: ["Bash"] };
|
|
77
|
+
const exists = settings.hooks.PostToolUse.some((h) => h.command?.includes("open-terminal"));
|
|
78
|
+
if (!exists) {
|
|
79
|
+
settings.hooks.PostToolUse.push(hookEntry);
|
|
80
|
+
writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
|
|
81
|
+
}
|
|
82
|
+
console.log("✓ Installed open-terminal PostToolUse hook for Claude Code");
|
|
83
|
+
console.log(" Hook: " + hookDest);
|
|
84
|
+
console.log(" Bash output >15 lines will be auto-compressed");
|
|
85
|
+
}
|
|
86
|
+
else if (sub === "uninstall") {
|
|
87
|
+
const settingsPath = join(process.env.HOME ?? "~", ".claude", "settings.json");
|
|
88
|
+
if (existsSync(settingsPath)) {
|
|
89
|
+
try {
|
|
90
|
+
const settings = JSON.parse(readFileSync(settingsPath, "utf8"));
|
|
91
|
+
if (settings.hooks?.PostToolUse) {
|
|
92
|
+
settings.hooks.PostToolUse = settings.hooks.PostToolUse.filter((h) => !h.command?.includes("open-terminal"));
|
|
93
|
+
writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
catch { }
|
|
97
|
+
}
|
|
98
|
+
console.log("✓ Uninstalled open-terminal hook");
|
|
99
|
+
}
|
|
100
|
+
else {
|
|
101
|
+
console.log("Usage: t hook install --claude");
|
|
102
|
+
console.log(" t hook uninstall");
|
|
103
|
+
}
|
|
104
|
+
}
|
|
22
105
|
// ── Recipe commands ──────────────────────────────────────────────────────────
|
|
23
106
|
else if (args[0] === "recipe") {
|
|
24
107
|
const { listRecipes, getRecipe, createRecipe, deleteRecipe, listCollections, createCollection } = await import("./recipes/storage.js");
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
// Command rewriter — auto-optimize commands to produce less output
|
|
2
|
+
// Only rewrites when semantic result is identical
|
|
3
|
+
const rules = [
|
|
4
|
+
// find | grep -v node_modules → find -not -path
|
|
5
|
+
{
|
|
6
|
+
pattern: /find\s+(\S+)\s+(.*?)\|\s*grep\s+-v\s+node_modules/,
|
|
7
|
+
rewrite: (m, cmd) => cmd.replace(m[0], `find ${m[1]} ${m[2]}-not -path '*/node_modules/*'`),
|
|
8
|
+
reason: "avoid pipe, filter in-kernel",
|
|
9
|
+
},
|
|
10
|
+
// cat file | grep X → grep X file
|
|
11
|
+
{
|
|
12
|
+
pattern: /cat\s+(\S+)\s*\|\s*grep\s+(.*)/,
|
|
13
|
+
rewrite: (m) => `grep ${m[2]} ${m[1]}`,
|
|
14
|
+
reason: "useless cat",
|
|
15
|
+
},
|
|
16
|
+
// find without node_modules exclusion → add it
|
|
17
|
+
{
|
|
18
|
+
pattern: /^find\s+\.\s+(.*)(?!.*node_modules)/,
|
|
19
|
+
rewrite: (m, cmd) => {
|
|
20
|
+
if (cmd.includes("node_modules") || cmd.includes("-not -path"))
|
|
21
|
+
return cmd;
|
|
22
|
+
return cmd.replace(/^find\s+\.\s+/, "find . -not -path '*/node_modules/*' -not -path '*/.git/*' ");
|
|
23
|
+
},
|
|
24
|
+
reason: "auto-exclude node_modules and .git",
|
|
25
|
+
},
|
|
26
|
+
// git log without limit → add --oneline -20
|
|
27
|
+
{
|
|
28
|
+
pattern: /^git\s+log\s*$/,
|
|
29
|
+
rewrite: () => "git log --oneline -20",
|
|
30
|
+
reason: "prevent unbounded log output",
|
|
31
|
+
},
|
|
32
|
+
// git diff without stat → add --stat for overview
|
|
33
|
+
{
|
|
34
|
+
pattern: /^git\s+diff\s*$/,
|
|
35
|
+
rewrite: () => "git diff --stat",
|
|
36
|
+
reason: "stat overview is usually sufficient",
|
|
37
|
+
},
|
|
38
|
+
// npm ls without depth → add --depth=0
|
|
39
|
+
{
|
|
40
|
+
pattern: /^npm\s+ls\s*$/,
|
|
41
|
+
rewrite: () => "npm ls --depth=0",
|
|
42
|
+
reason: "full tree is massive, top-level usually enough",
|
|
43
|
+
},
|
|
44
|
+
// ps aux without filter → add sort and head
|
|
45
|
+
{
|
|
46
|
+
pattern: /^ps\s+aux\s*$/,
|
|
47
|
+
rewrite: () => "ps aux --sort=-%mem | head -20",
|
|
48
|
+
reason: "full process list is noise, show top consumers",
|
|
49
|
+
},
|
|
50
|
+
];
|
|
51
|
+
/** Rewrite a command to produce less output */
|
|
52
|
+
export function rewriteCommand(cmd) {
|
|
53
|
+
const trimmed = cmd.trim();
|
|
54
|
+
for (const rule of rules) {
|
|
55
|
+
const match = trimmed.match(rule.pattern);
|
|
56
|
+
if (match) {
|
|
57
|
+
const rewritten = rule.rewrite(match, trimmed);
|
|
58
|
+
if (rewritten !== trimmed) {
|
|
59
|
+
return { original: trimmed, rewritten, changed: true, reason: rule.reason };
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return { original: trimmed, rewritten: trimmed, changed: false };
|
|
64
|
+
}
|
package/dist/compression.js
CHANGED
|
@@ -74,8 +74,8 @@ export function compress(command, output, options = {}) {
|
|
|
74
74
|
const json = JSON.stringify(parsed.data, null, format === "summary" ? 0 : 2);
|
|
75
75
|
const savings = tokenSavings(output, parsed.data);
|
|
76
76
|
const compressedTokens = estimateTokens(json);
|
|
77
|
-
//
|
|
78
|
-
if (!maxTokens || compressedTokens <= maxTokens) {
|
|
77
|
+
// ONLY use JSON if it actually saves tokens (never return larger output)
|
|
78
|
+
if (savings.saved > 0 && (!maxTokens || compressedTokens <= maxTokens)) {
|
|
79
79
|
return {
|
|
80
80
|
content: json,
|
|
81
81
|
format: "json",
|
package/dist/diff-cache.js
CHANGED
|
@@ -65,11 +65,31 @@ export function diffOutput(command, cwd, output) {
|
|
|
65
65
|
};
|
|
66
66
|
}
|
|
67
67
|
const diff = lineDiff(prev.output, output);
|
|
68
|
+
const total = diff.added.length + diff.removed.length + diff.unchanged;
|
|
69
|
+
const similarity = total > 0 ? diff.unchanged / total : 0;
|
|
70
|
+
// Fuzzy threshold: if >80% similar, return diff-only (massive token savings)
|
|
71
|
+
const fullTokens = estimateTokens(output);
|
|
72
|
+
if (similarity > 0.8 && diff.added.length + diff.removed.length > 0) {
|
|
73
|
+
const diffContent = [
|
|
74
|
+
...diff.added.map(l => `+ ${l}`),
|
|
75
|
+
...diff.removed.map(l => `- ${l}`),
|
|
76
|
+
].join("\n");
|
|
77
|
+
const diffTokens = estimateTokens(diffContent);
|
|
78
|
+
return {
|
|
79
|
+
full: output,
|
|
80
|
+
hasPrevious: true,
|
|
81
|
+
added: diff.added,
|
|
82
|
+
removed: diff.removed,
|
|
83
|
+
diffSummary: `${Math.round(similarity * 100)}% similar — ${summarizeDiff(diff)}`,
|
|
84
|
+
unchanged: false,
|
|
85
|
+
tokensSaved: Math.max(0, fullTokens - diffTokens),
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
// Less than 80% similar — return full output with diff info
|
|
68
89
|
const diffContent = [
|
|
69
90
|
...diff.added.map(l => `+ ${l}`),
|
|
70
91
|
...diff.removed.map(l => `- ${l}`),
|
|
71
92
|
].join("\n");
|
|
72
|
-
const fullTokens = estimateTokens(output);
|
|
73
93
|
const diffTokens = estimateTokens(diffContent);
|
|
74
94
|
return {
|
|
75
95
|
full: output,
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
// Expand store — keeps full output for progressive disclosure
|
|
2
|
+
// Agents get summary first, call expand(key) only if they need details
|
|
3
|
+
const MAX_ENTRIES = 50;
|
|
4
|
+
const store = new Map();
|
|
5
|
+
let counter = 0;
|
|
6
|
+
/** Store full output and return a retrieval key */
|
|
7
|
+
export function storeOutput(command, output) {
|
|
8
|
+
const key = `out_${++counter}`;
|
|
9
|
+
// Evict oldest if over limit
|
|
10
|
+
if (store.size >= MAX_ENTRIES) {
|
|
11
|
+
const oldest = store.keys().next().value;
|
|
12
|
+
if (oldest)
|
|
13
|
+
store.delete(oldest);
|
|
14
|
+
}
|
|
15
|
+
store.set(key, { command, output, timestamp: Date.now() });
|
|
16
|
+
return key;
|
|
17
|
+
}
|
|
18
|
+
/** Retrieve full output by key, optionally filtered */
|
|
19
|
+
export function expandOutput(key, grep) {
|
|
20
|
+
const entry = store.get(key);
|
|
21
|
+
if (!entry)
|
|
22
|
+
return { found: false };
|
|
23
|
+
let output = entry.output;
|
|
24
|
+
if (grep) {
|
|
25
|
+
const pattern = new RegExp(grep, "i");
|
|
26
|
+
output = output.split("\n").filter(l => pattern.test(l)).join("\n");
|
|
27
|
+
}
|
|
28
|
+
return { found: true, output, lines: output.split("\n").length };
|
|
29
|
+
}
|
|
30
|
+
/** List available stored outputs */
|
|
31
|
+
export function listStored() {
|
|
32
|
+
return [...store.entries()].map(([key, entry]) => ({
|
|
33
|
+
key,
|
|
34
|
+
command: entry.command.slice(0, 60),
|
|
35
|
+
lines: entry.output.split("\n").length,
|
|
36
|
+
age: Date.now() - entry.timestamp,
|
|
37
|
+
}));
|
|
38
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
// Lazy execution — for large result sets, return count + sample + categories
|
|
2
|
+
// instead of full output. Agent requests slices on demand.
|
|
3
|
+
import { dirname } from "path";
|
|
4
|
+
const LAZY_THRESHOLD = 100; // lines before switching to lazy mode
|
|
5
|
+
/** Check if output should use lazy mode */
|
|
6
|
+
export function shouldBeLazy(output) {
|
|
7
|
+
return output.split("\n").filter(l => l.trim()).length > LAZY_THRESHOLD;
|
|
8
|
+
}
|
|
9
|
+
/** Convert large output to lazy format: count + sample + categories */
|
|
10
|
+
export function toLazy(output, command) {
|
|
11
|
+
const lines = output.split("\n").filter(l => l.trim());
|
|
12
|
+
const sample = lines.slice(0, 20);
|
|
13
|
+
// Try to categorize by directory (for file-like output)
|
|
14
|
+
const categories = {};
|
|
15
|
+
const isFilePaths = lines.filter(l => l.includes("/")).length > lines.length * 0.5;
|
|
16
|
+
if (isFilePaths) {
|
|
17
|
+
for (const line of lines) {
|
|
18
|
+
const dir = dirname(line.trim()) || ".";
|
|
19
|
+
// Group by top-level dir
|
|
20
|
+
const topDir = dir.split("/").slice(0, 2).join("/");
|
|
21
|
+
categories[topDir] = (categories[topDir] ?? 0) + 1;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return {
|
|
25
|
+
lazy: true,
|
|
26
|
+
count: lines.length,
|
|
27
|
+
sample,
|
|
28
|
+
categories: Object.keys(categories).length > 1 ? categories : undefined,
|
|
29
|
+
hint: `${lines.length} results. Showing first 20. Use offset/limit to paginate, or narrow your search.`,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
/** Get a slice of output */
|
|
33
|
+
export function getSlice(output, offset, limit) {
|
|
34
|
+
const allLines = output.split("\n").filter(l => l.trim());
|
|
35
|
+
const slice = allLines.slice(offset, offset + limit);
|
|
36
|
+
return {
|
|
37
|
+
lines: slice,
|
|
38
|
+
total: allLines.length,
|
|
39
|
+
hasMore: offset + limit < allLines.length,
|
|
40
|
+
};
|
|
41
|
+
}
|
package/dist/mcp/server.js
CHANGED
|
@@ -4,6 +4,7 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
|
|
|
4
4
|
import { z } from "zod";
|
|
5
5
|
import { spawn } from "child_process";
|
|
6
6
|
import { compress, stripAnsi } from "../compression.js";
|
|
7
|
+
import { stripNoise } from "../noise-filter.js";
|
|
7
8
|
import { parseOutput, tokenSavings, estimateTokens } from "../parsers/index.js";
|
|
8
9
|
import { summarizeOutput } from "../ai.js";
|
|
9
10
|
import { searchFiles, searchContent, semanticSearch } from "../search/index.js";
|
|
@@ -14,13 +15,19 @@ import { diffOutput } from "../diff-cache.js";
|
|
|
14
15
|
import { processOutput } from "../output-processor.js";
|
|
15
16
|
import { listSessions, getSessionInteractions, getSessionStats } from "../sessions-db.js";
|
|
16
17
|
import { cachedRead } from "../file-cache.js";
|
|
18
|
+
import { storeOutput, expandOutput } from "../expand-store.js";
|
|
19
|
+
import { rewriteCommand } from "../command-rewriter.js";
|
|
20
|
+
import { shouldBeLazy, toLazy } from "../lazy-executor.js";
|
|
17
21
|
import { getEconomyStats, recordSaving } from "../economy.js";
|
|
18
22
|
import { captureSnapshot } from "../snapshots.js";
|
|
19
23
|
// ── helpers ──────────────────────────────────────────────────────────────────
|
|
20
24
|
function exec(command, cwd, timeout) {
|
|
25
|
+
// Auto-optimize command before execution
|
|
26
|
+
const rw = rewriteCommand(command);
|
|
27
|
+
const actualCommand = rw.changed ? rw.rewritten : command;
|
|
21
28
|
return new Promise((resolve) => {
|
|
22
29
|
const start = Date.now();
|
|
23
|
-
const proc = spawn("/bin/zsh", ["-c",
|
|
30
|
+
const proc = spawn("/bin/zsh", ["-c", actualCommand], {
|
|
24
31
|
cwd: cwd ?? process.cwd(),
|
|
25
32
|
stdio: ["ignore", "pipe", "pipe"],
|
|
26
33
|
});
|
|
@@ -35,7 +42,10 @@ function exec(command, cwd, timeout) {
|
|
|
35
42
|
proc.on("close", (code) => {
|
|
36
43
|
if (timer)
|
|
37
44
|
clearTimeout(timer);
|
|
38
|
-
|
|
45
|
+
// Strip noise before returning (npm fund, progress bars, etc.)
|
|
46
|
+
const cleanStdout = stripNoise(stdout).cleaned;
|
|
47
|
+
const cleanStderr = stripNoise(stderr).cleaned;
|
|
48
|
+
resolve({ exitCode: code ?? 0, stdout: cleanStdout, stderr: cleanStderr, duration: Date.now() - start, rewritten: rw.changed ? rw.rewritten : undefined });
|
|
39
49
|
});
|
|
40
50
|
});
|
|
41
51
|
}
|
|
@@ -55,26 +65,41 @@ export function createServer() {
|
|
|
55
65
|
}, async ({ command, cwd, timeout, format, maxTokens }) => {
|
|
56
66
|
const result = await exec(command, cwd, timeout ?? 30000);
|
|
57
67
|
const output = (result.stdout + result.stderr).trim();
|
|
58
|
-
// Raw mode
|
|
68
|
+
// Raw mode — with lazy execution for large results
|
|
59
69
|
if (!format || format === "raw") {
|
|
60
70
|
const clean = stripAnsi(output);
|
|
71
|
+
// Lazy mode: if >100 lines, return count + sample instead of full output
|
|
72
|
+
if (shouldBeLazy(clean)) {
|
|
73
|
+
const lazy = toLazy(clean, command);
|
|
74
|
+
const detailKey = storeOutput(command, clean);
|
|
75
|
+
return {
|
|
76
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
77
|
+
exitCode: result.exitCode, ...lazy, detailKey, duration: result.duration,
|
|
78
|
+
...(result.rewritten ? { rewrittenFrom: command } : {}),
|
|
79
|
+
}) }],
|
|
80
|
+
};
|
|
81
|
+
}
|
|
61
82
|
return {
|
|
62
83
|
content: [{ type: "text", text: JSON.stringify({
|
|
63
84
|
exitCode: result.exitCode, output: clean, duration: result.duration, tokens: estimateTokens(clean),
|
|
85
|
+
...(result.rewritten ? { rewrittenFrom: command } : {}),
|
|
64
86
|
}) }],
|
|
65
87
|
};
|
|
66
88
|
}
|
|
67
|
-
// JSON mode — structured parsing
|
|
89
|
+
// JSON mode — structured parsing (only if it actually saves tokens)
|
|
68
90
|
if (format === "json") {
|
|
69
91
|
const parsed = parseOutput(command, output);
|
|
70
92
|
if (parsed) {
|
|
71
93
|
const savings = tokenSavings(output, parsed.data);
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
94
|
+
if (savings.saved > 0) {
|
|
95
|
+
return {
|
|
96
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
97
|
+
exitCode: result.exitCode, parsed: parsed.data, parser: parsed.parser,
|
|
98
|
+
duration: result.duration, tokensSaved: savings.saved, savingsPercent: savings.percent,
|
|
99
|
+
}) }],
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
// JSON was larger — fall through to compression
|
|
78
103
|
}
|
|
79
104
|
}
|
|
80
105
|
// Compressed mode (also fallback for json when no parser matches)
|
|
@@ -121,6 +146,8 @@ export function createServer() {
|
|
|
121
146
|
const result = await exec(command, cwd, timeout ?? 30000);
|
|
122
147
|
const output = (result.stdout + result.stderr).trim();
|
|
123
148
|
const processed = await processOutput(command, output);
|
|
149
|
+
// Progressive disclosure: store full output, return summary + expand key
|
|
150
|
+
const detailKey = output.split("\n").length > 15 ? storeOutput(command, output) : undefined;
|
|
124
151
|
return {
|
|
125
152
|
content: [{ type: "text", text: JSON.stringify({
|
|
126
153
|
exitCode: result.exitCode,
|
|
@@ -130,9 +157,21 @@ export function createServer() {
|
|
|
130
157
|
totalLines: output.split("\n").length,
|
|
131
158
|
tokensSaved: processed.tokensSaved,
|
|
132
159
|
aiProcessed: processed.aiProcessed,
|
|
160
|
+
...(detailKey ? { detailKey, expandable: true } : {}),
|
|
133
161
|
}) }],
|
|
134
162
|
};
|
|
135
163
|
});
|
|
164
|
+
// ── expand: retrieve full output on demand ────────────────────────────────
|
|
165
|
+
server.tool("expand", "Retrieve full output from a previous execute_smart call. Only call this when you need details (e.g., to see failing test errors). Use the detailKey from execute_smart response.", {
|
|
166
|
+
key: z.string().describe("The detailKey from a previous execute_smart response"),
|
|
167
|
+
grep: z.string().optional().describe("Filter output lines by pattern (e.g., 'FAIL', 'error')"),
|
|
168
|
+
}, async ({ key, grep }) => {
|
|
169
|
+
const result = expandOutput(key, grep);
|
|
170
|
+
if (!result.found) {
|
|
171
|
+
return { content: [{ type: "text", text: JSON.stringify({ error: "Output expired or not found" }) }] };
|
|
172
|
+
}
|
|
173
|
+
return { content: [{ type: "text", text: JSON.stringify({ output: result.output, lines: result.lines }) }] };
|
|
174
|
+
});
|
|
136
175
|
// ── browse: list files/dirs as structured JSON ────────────────────────────
|
|
137
176
|
server.tool("browse", "List files and directories as structured JSON. Auto-filters node_modules, .git, dist by default.", {
|
|
138
177
|
path: z.string().optional().describe("Directory path (default: cwd)"),
|
|
@@ -444,6 +483,27 @@ export function createServer() {
|
|
|
444
483
|
content: [{ type: "text", text: JSON.stringify(fileSymbols) }],
|
|
445
484
|
};
|
|
446
485
|
});
|
|
486
|
+
// ── read_symbol: read a function/class by name ─────────────────────────────
|
|
487
|
+
server.tool("read_symbol", "Read a specific function, class, or interface by name from a source file. Returns only the code block — not the entire file. Saves 70-85% tokens vs reading the whole file.", {
|
|
488
|
+
path: z.string().describe("Source file path"),
|
|
489
|
+
name: z.string().describe("Symbol name (function, class, interface)"),
|
|
490
|
+
}, async ({ path: filePath, name }) => {
|
|
491
|
+
const { extractBlock, extractSymbolsFromFile } = await import("../search/semantic.js");
|
|
492
|
+
const block = extractBlock(filePath, name);
|
|
493
|
+
if (!block) {
|
|
494
|
+
// Return available symbols so the agent can pick the right one
|
|
495
|
+
const symbols = extractSymbolsFromFile(filePath);
|
|
496
|
+
const names = symbols.filter(s => s.kind !== "import").map(s => `${s.kind}: ${s.name} (L${s.line})`);
|
|
497
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
498
|
+
error: `Symbol '${name}' not found`,
|
|
499
|
+
available: names.slice(0, 20),
|
|
500
|
+
}) }] };
|
|
501
|
+
}
|
|
502
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
503
|
+
name, code: block.code, startLine: block.startLine, endLine: block.endLine,
|
|
504
|
+
lines: block.endLine - block.startLine + 1,
|
|
505
|
+
}) }] };
|
|
506
|
+
});
|
|
447
507
|
return server;
|
|
448
508
|
}
|
|
449
509
|
// ── main: start MCP server via stdio ─────────────────────────────────────────
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
// Noise filter — strips output that is NEVER useful for AI agents or humans
|
|
2
|
+
// Applied before any parsing/compression so ALL features benefit
|
|
3
|
+
const NOISE_PATTERNS = [
|
|
4
|
+
// npm noise
|
|
5
|
+
/^\d+ packages? are looking for funding/,
|
|
6
|
+
/^\s*run [`']?npm fund[`']? for details/,
|
|
7
|
+
/^found 0 vulnerabilities/,
|
|
8
|
+
/^npm warn deprecated\b/,
|
|
9
|
+
/^npm warn ERESOLVE\b/,
|
|
10
|
+
/^npm warn old lockfile/,
|
|
11
|
+
/^npm notice\b/,
|
|
12
|
+
// Progress bars and spinners
|
|
13
|
+
/[█▓▒░⣾⣽⣻⢿⡿⣟⣯⣷]{3,}/,
|
|
14
|
+
/\[\s*[=>#-]{5,}\s*\]\s*\d+%/, // [=====> ] 45%
|
|
15
|
+
/^\s*[\\/|/-]{1}\s*$/, // spinner chars alone on a line
|
|
16
|
+
/Downloading\s.*\d+%/,
|
|
17
|
+
/Progress:\s*\d+%/i,
|
|
18
|
+
// Build noise
|
|
19
|
+
/^gyp info\b/,
|
|
20
|
+
/^gyp warn\b/,
|
|
21
|
+
/^TSFILE:/,
|
|
22
|
+
/^\s*hmr update\s/i,
|
|
23
|
+
// Python noise
|
|
24
|
+
/^Requirement already satisfied:/,
|
|
25
|
+
// Docker noise
|
|
26
|
+
/^Pulling fs layer/,
|
|
27
|
+
/^Waiting$/,
|
|
28
|
+
/^Downloading\s+\[/,
|
|
29
|
+
/^Extracting\s+\[/,
|
|
30
|
+
// Git LFS
|
|
31
|
+
/^Filtering content:/,
|
|
32
|
+
/^Git LFS:/,
|
|
33
|
+
// Generic download/upload progress
|
|
34
|
+
/^\s*\d+(\.\d+)?\s*[KMG]?B\s*\/\s*\d+(\.\d+)?\s*[KMG]?B\b/,
|
|
35
|
+
];
|
|
36
|
+
/** Strip noise lines from output. Returns cleaned output + count of lines removed. */
|
|
37
|
+
export function stripNoise(output) {
|
|
38
|
+
const lines = output.split("\n");
|
|
39
|
+
let removed = 0;
|
|
40
|
+
const kept = [];
|
|
41
|
+
// Track consecutive blank lines
|
|
42
|
+
let blankRun = 0;
|
|
43
|
+
for (const line of lines) {
|
|
44
|
+
const trimmed = line.trim();
|
|
45
|
+
// Collapse 3+ blank lines to 1
|
|
46
|
+
if (!trimmed) {
|
|
47
|
+
blankRun++;
|
|
48
|
+
if (blankRun <= 1)
|
|
49
|
+
kept.push(line);
|
|
50
|
+
else
|
|
51
|
+
removed++;
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
blankRun = 0;
|
|
55
|
+
// Check noise patterns
|
|
56
|
+
if (NOISE_PATTERNS.some(p => p.test(trimmed))) {
|
|
57
|
+
removed++;
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
// Carriage return overwrites (spinner animations)
|
|
61
|
+
if (line.includes("\r") && !line.endsWith("\r")) {
|
|
62
|
+
// Keep only the last part after \r
|
|
63
|
+
const parts = line.split("\r");
|
|
64
|
+
kept.push(parts[parts.length - 1]);
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
kept.push(line);
|
|
68
|
+
}
|
|
69
|
+
return { cleaned: kept.join("\n"), linesRemoved: removed };
|
|
70
|
+
}
|
package/dist/output-processor.js
CHANGED
|
@@ -32,7 +32,11 @@ export async function processOutput(command, output) {
|
|
|
32
32
|
summary: output,
|
|
33
33
|
full: output,
|
|
34
34
|
tokensSaved: 0,
|
|
35
|
+
aiTokensUsed: 0,
|
|
35
36
|
aiProcessed: false,
|
|
37
|
+
aiCostUsd: 0,
|
|
38
|
+
savingsValueUsd: 0,
|
|
39
|
+
netSavingsUsd: 0,
|
|
36
40
|
};
|
|
37
41
|
}
|
|
38
42
|
// Truncate very long output before sending to AI
|
|
@@ -65,12 +69,30 @@ export async function processOutput(command, output) {
|
|
|
65
69
|
}
|
|
66
70
|
}
|
|
67
71
|
catch { /* not JSON, that's fine */ }
|
|
72
|
+
// Cost calculation
|
|
73
|
+
// AI input: system prompt (~200 tokens) + command + output sent to AI
|
|
74
|
+
const aiInputTokens = estimateTokens(SUMMARIZE_PROMPT) + estimateTokens(toSummarize) + 20;
|
|
75
|
+
const aiOutputTokens = summaryTokens;
|
|
76
|
+
const aiTokensUsed = aiInputTokens + aiOutputTokens;
|
|
77
|
+
// Cerebras qwen-3-235b pricing: $0.60/M input, $1.20/M output
|
|
78
|
+
const aiCostUsd = (aiInputTokens * 0.60 + aiOutputTokens * 1.20) / 1_000_000;
|
|
79
|
+
// Value of tokens saved (at Claude Sonnet $3/M input — what the agent would pay)
|
|
80
|
+
const savingsValueUsd = (saved * 3.0) / 1_000_000;
|
|
81
|
+
const netSavingsUsd = savingsValueUsd - aiCostUsd;
|
|
82
|
+
// Only record savings if net positive (AI cost < token savings value)
|
|
83
|
+
if (netSavingsUsd > 0 && saved > 0) {
|
|
84
|
+
recordSaving("compressed", saved);
|
|
85
|
+
}
|
|
68
86
|
return {
|
|
69
87
|
summary,
|
|
70
88
|
full: output,
|
|
71
89
|
structured,
|
|
72
90
|
tokensSaved: saved,
|
|
91
|
+
aiTokensUsed,
|
|
73
92
|
aiProcessed: true,
|
|
93
|
+
aiCostUsd,
|
|
94
|
+
savingsValueUsd,
|
|
95
|
+
netSavingsUsd,
|
|
74
96
|
};
|
|
75
97
|
}
|
|
76
98
|
catch {
|
|
@@ -82,7 +104,11 @@ export async function processOutput(command, output) {
|
|
|
82
104
|
summary: fallback,
|
|
83
105
|
full: output,
|
|
84
106
|
tokensSaved: Math.max(0, estimateTokens(output) - estimateTokens(fallback)),
|
|
107
|
+
aiTokensUsed: 0,
|
|
85
108
|
aiProcessed: false,
|
|
109
|
+
aiCostUsd: 0,
|
|
110
|
+
savingsValueUsd: 0,
|
|
111
|
+
netSavingsUsd: 0,
|
|
86
112
|
};
|
|
87
113
|
}
|
|
88
114
|
}
|
package/dist/search/semantic.js
CHANGED
|
@@ -16,6 +16,44 @@ function exec(command, cwd) {
|
|
|
16
16
|
export function extractSymbolsFromFile(filePath) {
|
|
17
17
|
return extractSymbols(filePath);
|
|
18
18
|
}
|
|
19
|
+
/** Extract the complete code block for a symbol by name */
|
|
20
|
+
export function extractBlock(filePath, symbolName) {
|
|
21
|
+
if (!existsSync(filePath))
|
|
22
|
+
return null;
|
|
23
|
+
const content = readFileSync(filePath, "utf8");
|
|
24
|
+
const lines = content.split("\n");
|
|
25
|
+
const symbols = extractSymbols(filePath);
|
|
26
|
+
const symbol = symbols.find(s => s.name === symbolName && s.kind !== "import");
|
|
27
|
+
if (!symbol)
|
|
28
|
+
return null;
|
|
29
|
+
const startLine = symbol.line - 1; // 0-indexed
|
|
30
|
+
let braceDepth = 0;
|
|
31
|
+
let foundOpen = false;
|
|
32
|
+
let endLine = startLine;
|
|
33
|
+
for (let i = startLine; i < lines.length; i++) {
|
|
34
|
+
const line = lines[i];
|
|
35
|
+
for (const ch of line) {
|
|
36
|
+
if (ch === "{") {
|
|
37
|
+
braceDepth++;
|
|
38
|
+
foundOpen = true;
|
|
39
|
+
}
|
|
40
|
+
if (ch === "}") {
|
|
41
|
+
braceDepth--;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
endLine = i;
|
|
45
|
+
if (foundOpen && braceDepth <= 0)
|
|
46
|
+
break;
|
|
47
|
+
// For single-line arrow functions without braces
|
|
48
|
+
if (i === startLine && !line.includes("{") && line.includes("=>"))
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
return {
|
|
52
|
+
code: lines.slice(startLine, endLine + 1).join("\n"),
|
|
53
|
+
startLine: startLine + 1, // 1-indexed
|
|
54
|
+
endLine: endLine + 1,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
19
57
|
function extractSymbols(filePath) {
|
|
20
58
|
if (!existsSync(filePath))
|
|
21
59
|
return [];
|
|
@@ -28,7 +66,7 @@ function extractSymbols(filePath) {
|
|
|
28
66
|
const lineNum = i + 1;
|
|
29
67
|
const isExported = line.trimStart().startsWith("export");
|
|
30
68
|
// Functions: export function X(...) or export const X = (...) =>
|
|
31
|
-
const funcMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(
|
|
69
|
+
const funcMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(/);
|
|
32
70
|
if (funcMatch) {
|
|
33
71
|
const prevLine = i > 0 ? lines[i - 1] : "";
|
|
34
72
|
const doc = prevLine.trim().startsWith("/**") || prevLine.trim().startsWith("//")
|
package/package.json
CHANGED