@hasna/terminal 4.3.1 → 4.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/App.js +404 -0
- package/dist/Browse.js +79 -0
- package/dist/FuzzyPicker.js +47 -0
- package/dist/Onboarding.js +51 -0
- package/dist/Spinner.js +12 -0
- package/dist/StatusBar.js +49 -0
- package/dist/ai.js +316 -0
- package/dist/cache.js +42 -0
- package/dist/cli.js +778 -0
- package/dist/command-rewriter.js +64 -0
- package/dist/command-validator.js +86 -0
- package/dist/compression.js +91 -0
- package/dist/context-hints.js +285 -0
- package/dist/diff-cache.js +107 -0
- package/dist/discover.js +212 -0
- package/dist/economy.js +155 -0
- package/dist/expand-store.js +44 -0
- package/dist/file-cache.js +72 -0
- package/dist/file-index.js +62 -0
- package/dist/history.js +62 -0
- package/dist/lazy-executor.js +54 -0
- package/dist/line-dedup.js +59 -0
- package/dist/loop-detector.js +75 -0
- package/dist/mcp/install.js +189 -0
- package/dist/mcp/server.js +56 -0
- package/dist/mcp/tools/batch.js +111 -0
- package/dist/mcp/tools/execute.js +194 -0
- package/dist/mcp/tools/files.js +290 -0
- package/dist/mcp/tools/git.js +233 -0
- package/dist/mcp/tools/helpers.js +63 -0
- package/dist/mcp/tools/memory.js +151 -0
- package/dist/mcp/tools/meta.js +138 -0
- package/dist/mcp/tools/process.js +50 -0
- package/dist/mcp/tools/project.js +251 -0
- package/dist/mcp/tools/search.js +86 -0
- package/dist/noise-filter.js +94 -0
- package/dist/output-processor.js +233 -0
- package/dist/output-store.js +112 -0
- package/dist/paths.js +28 -0
- package/dist/providers/anthropic.js +43 -0
- package/dist/providers/base.js +4 -0
- package/dist/providers/cerebras.js +8 -0
- package/dist/providers/groq.js +8 -0
- package/dist/providers/index.js +142 -0
- package/dist/providers/openai-compat.js +93 -0
- package/dist/providers/xai.js +8 -0
- package/dist/recipes/model.js +20 -0
- package/dist/recipes/storage.js +153 -0
- package/dist/search/content-search.js +70 -0
- package/dist/search/file-search.js +61 -0
- package/dist/search/filters.js +34 -0
- package/dist/search/index.js +5 -0
- package/dist/search/semantic.js +346 -0
- package/dist/session-boot.js +59 -0
- package/dist/session-context.js +55 -0
- package/dist/sessions-db.js +240 -0
- package/dist/smart-display.js +286 -0
- package/dist/snapshots.js +51 -0
- package/dist/supervisor.js +112 -0
- package/dist/test-watchlist.js +131 -0
- package/dist/tokens.js +17 -0
- package/dist/tool-profiles.js +130 -0
- package/dist/tree.js +94 -0
- package/dist/usage-cache.js +65 -0
- package/package.json +2 -1
- package/src/Onboarding.tsx +1 -1
- package/src/ai.ts +5 -4
- package/src/cache.ts +2 -2
- package/src/economy.ts +3 -3
- package/src/history.ts +2 -2
- package/src/mcp/server.ts +2 -0
- package/src/mcp/tools/memory.ts +4 -2
- package/src/output-store.ts +2 -1
- package/src/paths.ts +32 -0
- package/src/recipes/storage.ts +3 -3
- package/src/session-context.ts +2 -2
- package/src/sessions-db.ts +15 -4
- package/src/tool-profiles.ts +4 -3
- package/src/usage-cache.ts +2 -2
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
// Git tools: commit, bulk_commit, smart_commit, diff, repo_state, last_commit
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
import { stripAnsi } from "../../compression.js";
|
|
4
|
+
import { estimateTokens } from "../../tokens.js";
|
|
5
|
+
import { processOutput } from "../../output-processor.js";
|
|
6
|
+
import { getOutputProvider } from "../../providers/index.js";
|
|
7
|
+
import { invalidateBootCache } from "../../session-boot.js";
|
|
8
|
+
export function registerGitTools(server, h) {
|
|
9
|
+
// ── commit ────────────────────────────────────────────────────────────────
|
|
10
|
+
server.tool("commit", "Commit and optionally push. Agent says what to commit, we handle git add/commit/push. Saves ~400 tokens vs raw git commands.", {
|
|
11
|
+
message: z.string().describe("Commit message"),
|
|
12
|
+
files: z.array(z.string()).optional().describe("Files to stage (default: all changed)"),
|
|
13
|
+
push: z.boolean().optional().describe("Push after commit (default: false)"),
|
|
14
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
15
|
+
}, async ({ message, files, push, cwd }) => {
|
|
16
|
+
const start = Date.now();
|
|
17
|
+
const workDir = cwd ?? process.cwd();
|
|
18
|
+
const addCmd = files && files.length > 0 ? `git add ${files.map(f => `"${f}"`).join(" ")}` : "git add -A";
|
|
19
|
+
const commitCmd = `${addCmd} && git commit -m ${JSON.stringify(message)}`;
|
|
20
|
+
const fullCmd = push ? `${commitCmd} && git push` : commitCmd;
|
|
21
|
+
const result = await h.exec(fullCmd, workDir, 30000);
|
|
22
|
+
const output = (result.stdout + result.stderr).trim();
|
|
23
|
+
h.logCall("commit", { command: `commit: ${message.slice(0, 80)}`, durationMs: Date.now() - start, exitCode: result.exitCode });
|
|
24
|
+
invalidateBootCache();
|
|
25
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
26
|
+
exitCode: result.exitCode,
|
|
27
|
+
output: stripAnsi(output).split("\n").filter(l => l.trim()).slice(0, 5).join("\n"),
|
|
28
|
+
pushed: push ?? false,
|
|
29
|
+
}) }] };
|
|
30
|
+
});
|
|
31
|
+
// ── bulk_commit ───────────────────────────────────────────────────────────
|
|
32
|
+
server.tool("bulk_commit", "Multiple logical commits in one call. Agent decides which files go in which commit, we handle all git commands. No AI cost. Use smart_commit instead if you want AI to decide the grouping.", {
|
|
33
|
+
commits: z.array(z.object({
|
|
34
|
+
message: z.string().describe("Commit message"),
|
|
35
|
+
files: z.array(z.string()).describe("Files to stage for this commit"),
|
|
36
|
+
})).describe("Array of logical commits"),
|
|
37
|
+
push: z.boolean().optional().describe("Push after all commits (default: true)"),
|
|
38
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
39
|
+
}, async ({ commits, push, cwd }) => {
|
|
40
|
+
const start = Date.now();
|
|
41
|
+
const workDir = cwd ?? process.cwd();
|
|
42
|
+
const results = [];
|
|
43
|
+
for (const c of commits) {
|
|
44
|
+
const fileArgs = c.files.map(f => `"${f}"`).join(" ");
|
|
45
|
+
const cmd = `git add ${fileArgs} && git commit -m ${JSON.stringify(c.message)}`;
|
|
46
|
+
const r = await h.exec(cmd, workDir, 15000);
|
|
47
|
+
results.push({ message: c.message, files: c.files.length, ok: r.exitCode === 0 });
|
|
48
|
+
}
|
|
49
|
+
let pushed = false;
|
|
50
|
+
if (push !== false) {
|
|
51
|
+
const pushResult = await h.exec("git push", workDir, 30000);
|
|
52
|
+
pushed = pushResult.exitCode === 0;
|
|
53
|
+
}
|
|
54
|
+
invalidateBootCache();
|
|
55
|
+
h.logCall("bulk_commit", { command: `${commits.length} commits`, durationMs: Date.now() - start });
|
|
56
|
+
return { content: [{ type: "text", text: JSON.stringify({ commits: results, pushed, total: results.length }) }] };
|
|
57
|
+
});
|
|
58
|
+
// ── smart_commit ──────────────────────────────────────────────────────────
|
|
59
|
+
server.tool("smart_commit", "AI-powered git commit. Analyzes all changes, groups into logical commits with generated messages, stages and commits each group, optionally pushes. One call replaces the entire git workflow. Agent just says 'commit my work'.", {
|
|
60
|
+
push: z.boolean().optional().describe("Push after all commits (default: true)"),
|
|
61
|
+
hint: z.string().optional().describe("Optional context about the changes (e.g., 'fixed auth + added users endpoint')"),
|
|
62
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
63
|
+
}, async ({ push, hint, cwd }) => {
|
|
64
|
+
const start = Date.now();
|
|
65
|
+
const workDir = cwd ?? process.cwd();
|
|
66
|
+
// 1. Get all changed files
|
|
67
|
+
const status = await h.exec("git status --porcelain", workDir, 10000);
|
|
68
|
+
const diffStat = await h.exec("git diff --stat", workDir, 10000);
|
|
69
|
+
const untrackedDiff = await h.exec("git diff HEAD --stat", workDir, 10000);
|
|
70
|
+
const changedFiles = status.stdout.trim();
|
|
71
|
+
if (!changedFiles) {
|
|
72
|
+
return { content: [{ type: "text", text: JSON.stringify({ message: "Nothing to commit — working tree clean" }) }] };
|
|
73
|
+
}
|
|
74
|
+
// 2. AI groups changes into logical commits
|
|
75
|
+
const provider = getOutputProvider();
|
|
76
|
+
const outputModel = provider.name === "groq" ? "llama-3.1-8b-instant" : undefined;
|
|
77
|
+
const grouping = await provider.complete(`Changed files:\n${changedFiles}\n\nDiff stats:\n${diffStat.stdout}\n${untrackedDiff.stdout}${hint ? `\n\nContext: ${hint}` : ""}`, {
|
|
78
|
+
model: outputModel,
|
|
79
|
+
system: `You are a git commit assistant. Group these changed files into logical commits. Return ONLY a JSON array:
|
|
80
|
+
|
|
81
|
+
[{"message": "conventional commit message", "files": ["file1.ts", "file2.ts"]}]
|
|
82
|
+
|
|
83
|
+
Rules:
|
|
84
|
+
- Group related changes (same feature, same fix, same refactor)
|
|
85
|
+
- Use conventional commits: feat:, fix:, refactor:, test:, docs:, chore:
|
|
86
|
+
- Message should explain WHY, not WHAT (the diff shows what)
|
|
87
|
+
- Each file appears in exactly one group
|
|
88
|
+
- If all changes are related, use a single commit
|
|
89
|
+
- Extract file paths from the status output (skip the status prefix like M, A, ??)`,
|
|
90
|
+
maxTokens: 1000,
|
|
91
|
+
temperature: 0,
|
|
92
|
+
});
|
|
93
|
+
let commits = [];
|
|
94
|
+
try {
|
|
95
|
+
const jsonMatch = grouping.match(/\[[\s\S]*\]/);
|
|
96
|
+
if (jsonMatch)
|
|
97
|
+
commits = JSON.parse(jsonMatch[0]);
|
|
98
|
+
}
|
|
99
|
+
catch { }
|
|
100
|
+
if (commits.length === 0) {
|
|
101
|
+
// Fallback: single commit with all files
|
|
102
|
+
commits = [{ message: hint ?? "chore: update files", files: changedFiles.split("\n").map(l => l.slice(3).trim()) }];
|
|
103
|
+
}
|
|
104
|
+
// 3. Execute each commit
|
|
105
|
+
const results = [];
|
|
106
|
+
for (const c of commits) {
|
|
107
|
+
const fileArgs = c.files.map(f => `"${f}"`).join(" ");
|
|
108
|
+
const cmd = `git add ${fileArgs} && git commit -m ${JSON.stringify(c.message)}`;
|
|
109
|
+
const r = await h.exec(cmd, workDir, 15000);
|
|
110
|
+
results.push({ message: c.message, files: c.files.length, ok: r.exitCode === 0 });
|
|
111
|
+
}
|
|
112
|
+
// 4. Push if requested
|
|
113
|
+
let pushed = false;
|
|
114
|
+
if (push !== false) {
|
|
115
|
+
const pushResult = await h.exec("git push", workDir, 30000);
|
|
116
|
+
pushed = pushResult.exitCode === 0;
|
|
117
|
+
}
|
|
118
|
+
invalidateBootCache();
|
|
119
|
+
h.logCall("smart_commit", { command: `${commits.length} commits`, durationMs: Date.now() - start, aiProcessed: true });
|
|
120
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
121
|
+
commits: results,
|
|
122
|
+
pushed,
|
|
123
|
+
total: results.length,
|
|
124
|
+
ok: results.every(r => r.ok),
|
|
125
|
+
}) }] };
|
|
126
|
+
});
|
|
127
|
+
// ── diff ──────────────────────────────────────────────────────────────────
|
|
128
|
+
server.tool("diff", "Show what changed — git diff with AI summary. One call replaces constructing git diff commands.", {
|
|
129
|
+
ref: z.string().optional().describe("Diff against this ref (default: unstaged changes). Examples: HEAD~1, main, abc123"),
|
|
130
|
+
file: z.string().optional().describe("Diff a specific file only"),
|
|
131
|
+
stat: z.boolean().optional().describe("Show file-level stats only, not full diff (default: false)"),
|
|
132
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
133
|
+
}, async ({ ref, file, stat, cwd }) => {
|
|
134
|
+
const start = Date.now();
|
|
135
|
+
const workDir = cwd ?? process.cwd();
|
|
136
|
+
let cmd = "git diff";
|
|
137
|
+
if (ref)
|
|
138
|
+
cmd += ` ${ref}`;
|
|
139
|
+
if (stat)
|
|
140
|
+
cmd += " --stat";
|
|
141
|
+
if (file)
|
|
142
|
+
cmd += ` -- ${file}`;
|
|
143
|
+
const result = await h.exec(cmd, workDir, 15000);
|
|
144
|
+
const output = (result.stdout + result.stderr).trim();
|
|
145
|
+
if (!output) {
|
|
146
|
+
return { content: [{ type: "text", text: JSON.stringify({ clean: true, message: "No changes" }) }] };
|
|
147
|
+
}
|
|
148
|
+
const processed = await processOutput(cmd, output);
|
|
149
|
+
h.logCall("diff", { command: cmd, outputTokens: estimateTokens(output), tokensSaved: processed.tokensSaved, durationMs: Date.now() - start, aiProcessed: processed.aiProcessed });
|
|
150
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
151
|
+
summary: processed.summary,
|
|
152
|
+
lines: output.split("\n").length,
|
|
153
|
+
tokensSaved: processed.tokensSaved,
|
|
154
|
+
}) }] };
|
|
155
|
+
});
|
|
156
|
+
// ── repo_state ────────────────────────────────────────────────────────────
|
|
157
|
+
server.tool("repo_state", "Get full repository state in one call — branch, status, staged/unstaged files, recent commits. Replaces the common 3-command pattern: git status + git diff --stat + git log.", {
|
|
158
|
+
path: z.string().optional().describe("Repo path (default: cwd)"),
|
|
159
|
+
}, async ({ path }) => {
|
|
160
|
+
const cwd = path ?? process.cwd();
|
|
161
|
+
const [statusResult, diffResult, logResult] = await Promise.all([
|
|
162
|
+
h.exec("git status --porcelain", cwd),
|
|
163
|
+
h.exec("git diff --stat", cwd),
|
|
164
|
+
h.exec("git log --oneline -12 --decorate", cwd),
|
|
165
|
+
]);
|
|
166
|
+
const branchResult = await h.exec("git branch --show-current", cwd);
|
|
167
|
+
const staged = [];
|
|
168
|
+
const unstaged = [];
|
|
169
|
+
const untracked = [];
|
|
170
|
+
for (const line of statusResult.stdout.split("\n").filter(l => l.trim())) {
|
|
171
|
+
const x = line[0], y = line[1], file = line.slice(3);
|
|
172
|
+
if (x === "?" && y === "?")
|
|
173
|
+
untracked.push(file);
|
|
174
|
+
else if (x !== " " && x !== "?")
|
|
175
|
+
staged.push(file);
|
|
176
|
+
if (y !== " " && y !== "?")
|
|
177
|
+
unstaged.push(file);
|
|
178
|
+
}
|
|
179
|
+
const commits = logResult.stdout.split("\n").filter(l => l.trim()).map(l => {
|
|
180
|
+
const match = l.match(/^([a-f0-9]+)\s+(.+)$/);
|
|
181
|
+
return match ? { hash: match[1], message: match[2] } : { hash: "", message: l };
|
|
182
|
+
});
|
|
183
|
+
return {
|
|
184
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
185
|
+
branch: branchResult.stdout.trim(),
|
|
186
|
+
dirty: staged.length + unstaged.length + untracked.length > 0,
|
|
187
|
+
staged, unstaged, untracked,
|
|
188
|
+
diffSummary: diffResult.stdout.trim() || "no changes",
|
|
189
|
+
recentCommits: commits,
|
|
190
|
+
}) }],
|
|
191
|
+
};
|
|
192
|
+
});
|
|
193
|
+
// ── last_commit ───────────────────────────────────────────────────────────
|
|
194
|
+
server.tool("last_commit", "Get details of the last commit — hash, message, files changed, diff stats. Replaces: git log -1 + git show --stat + git diff HEAD~1.", {
|
|
195
|
+
path: z.string().optional().describe("Repo path (default: cwd)"),
|
|
196
|
+
}, async ({ path }) => {
|
|
197
|
+
const cwd = path ?? process.cwd();
|
|
198
|
+
const [logResult, statResult] = await Promise.all([
|
|
199
|
+
h.exec("git log -1 --format='%H%n%s%n%an%n%ai'", cwd),
|
|
200
|
+
h.exec("git show --stat --format='' HEAD", cwd),
|
|
201
|
+
]);
|
|
202
|
+
const [hash, message, author, date] = logResult.stdout.split("\n");
|
|
203
|
+
const filesChanged = statResult.stdout.split("\n").filter(l => l.trim() && !l.includes("changed"));
|
|
204
|
+
return {
|
|
205
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
206
|
+
hash: hash?.trim(),
|
|
207
|
+
message: message?.trim(),
|
|
208
|
+
author: author?.trim(),
|
|
209
|
+
date: date?.trim(),
|
|
210
|
+
filesChanged,
|
|
211
|
+
}) }],
|
|
212
|
+
};
|
|
213
|
+
});
|
|
214
|
+
// ── git_init ────────────────────────────────────────────────────────────
|
|
215
|
+
server.tool("git_init", "Initialize a new git repo, optionally with .gitignore and initial commit.", {
|
|
216
|
+
cwd: z.string().optional().describe("Directory to init (default: cwd)"),
|
|
217
|
+
gitignore: z.string().optional().describe("Content for .gitignore file"),
|
|
218
|
+
initialCommit: z.boolean().optional().describe("Create initial commit (default: true)"),
|
|
219
|
+
}, async ({ cwd, gitignore, initialCommit }) => {
|
|
220
|
+
const workDir = cwd ?? process.cwd();
|
|
221
|
+
await h.exec("git init", workDir, 5000);
|
|
222
|
+
if (gitignore) {
|
|
223
|
+
const { writeFileSync } = await import("fs");
|
|
224
|
+
const { join } = await import("path");
|
|
225
|
+
writeFileSync(join(workDir, ".gitignore"), gitignore);
|
|
226
|
+
}
|
|
227
|
+
if (initialCommit !== false) {
|
|
228
|
+
await h.exec("git add -A && git commit -m 'init' --allow-empty", workDir, 10000);
|
|
229
|
+
}
|
|
230
|
+
h.logCall("git_init", { command: "git init" });
|
|
231
|
+
return { content: [{ type: "text", text: JSON.stringify({ initialized: true, cwd: workDir }) }] };
|
|
232
|
+
});
|
|
233
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
// Shared helpers for all MCP tools
|
|
2
|
+
import { spawn } from "child_process";
|
|
3
|
+
import { stripNoise } from "../../noise-filter.js";
|
|
4
|
+
import { rewriteCommand } from "../../command-rewriter.js";
|
|
5
|
+
import { invalidateBootCache } from "../../session-boot.js";
|
|
6
|
+
import { logInteraction } from "../../sessions-db.js";
|
|
7
|
+
import { join } from "path";
|
|
8
|
+
export { z } from "zod";
|
|
9
|
+
/** Create shared helpers for tool modules */
|
|
10
|
+
export function createHelpers(sessionId) {
|
|
11
|
+
function exec(command, cwd, timeout, allowRewrite = false) {
|
|
12
|
+
const rw = allowRewrite ? rewriteCommand(command) : { changed: false, rewritten: command };
|
|
13
|
+
const actualCommand = rw.changed ? rw.rewritten : command;
|
|
14
|
+
return new Promise((resolve) => {
|
|
15
|
+
const start = Date.now();
|
|
16
|
+
const proc = spawn("/bin/zsh", ["-c", actualCommand], {
|
|
17
|
+
cwd: cwd ?? process.cwd(),
|
|
18
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
19
|
+
});
|
|
20
|
+
let stdout = "";
|
|
21
|
+
let stderr = "";
|
|
22
|
+
proc.stdout?.on("data", (d) => { stdout += d.toString(); });
|
|
23
|
+
proc.stderr?.on("data", (d) => { stderr += d.toString(); });
|
|
24
|
+
const timer = timeout ? setTimeout(() => { try {
|
|
25
|
+
proc.kill("SIGTERM");
|
|
26
|
+
}
|
|
27
|
+
catch { } }, timeout) : null;
|
|
28
|
+
proc.on("close", (code) => {
|
|
29
|
+
if (timer)
|
|
30
|
+
clearTimeout(timer);
|
|
31
|
+
const cleanStdout = stripNoise(stdout).cleaned;
|
|
32
|
+
const cleanStderr = stripNoise(stderr).cleaned;
|
|
33
|
+
if (/\bgit\s+(commit|checkout|branch|merge|reset|push|pull|rebase|stash)\b/.test(actualCommand)) {
|
|
34
|
+
invalidateBootCache();
|
|
35
|
+
}
|
|
36
|
+
resolve({ exitCode: code ?? 0, stdout: cleanStdout, stderr: cleanStderr, duration: Date.now() - start, rewritten: rw.changed ? rw.rewritten : undefined });
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
function resolvePath(p, cwd) {
|
|
41
|
+
if (!p)
|
|
42
|
+
return cwd ?? process.cwd();
|
|
43
|
+
if (p.startsWith("/") || p.startsWith("~"))
|
|
44
|
+
return p;
|
|
45
|
+
return join(cwd ?? process.cwd(), p);
|
|
46
|
+
}
|
|
47
|
+
function logCall(tool, data) {
|
|
48
|
+
try {
|
|
49
|
+
logInteraction(sessionId, {
|
|
50
|
+
nl: `[mcp:${tool}]${data.command ? ` ${data.command.slice(0, 200)}` : ""}`,
|
|
51
|
+
command: data.command?.slice(0, 500),
|
|
52
|
+
exitCode: data.exitCode,
|
|
53
|
+
tokensUsed: data.aiProcessed ? (data.outputTokens ?? 0) : 0,
|
|
54
|
+
tokensSaved: data.tokensSaved ?? 0,
|
|
55
|
+
durationMs: data.durationMs,
|
|
56
|
+
model: data.model,
|
|
57
|
+
cached: false,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
catch { }
|
|
61
|
+
}
|
|
62
|
+
return { exec, resolvePath, logCall, sessionId };
|
|
63
|
+
}
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
// Memory tools: remember, recall, project_note, store_secret, list_secrets
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
// Fallback memory store when mementos SDK not available
|
|
4
|
+
function getLocalMemoryFile() {
|
|
5
|
+
const { join } = require("path");
|
|
6
|
+
return join(process.cwd(), ".terminal", "memories.json");
|
|
7
|
+
}
|
|
8
|
+
function loadLocalMemories() {
|
|
9
|
+
const { existsSync, readFileSync } = require("fs");
|
|
10
|
+
const file = getLocalMemoryFile();
|
|
11
|
+
if (!existsSync(file))
|
|
12
|
+
return [];
|
|
13
|
+
try {
|
|
14
|
+
return JSON.parse(readFileSync(file, "utf8"));
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return [];
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
function saveLocalMemory(key, value, importance) {
|
|
21
|
+
const { existsSync, writeFileSync, mkdirSync } = require("fs");
|
|
22
|
+
const { dirname } = require("path");
|
|
23
|
+
const file = getLocalMemoryFile();
|
|
24
|
+
const dir = dirname(file);
|
|
25
|
+
if (!existsSync(dir))
|
|
26
|
+
mkdirSync(dir, { recursive: true });
|
|
27
|
+
const memories = loadLocalMemories().filter(m => m.key !== key); // dedup by key
|
|
28
|
+
memories.push({ key, value, importance });
|
|
29
|
+
writeFileSync(file, JSON.stringify(memories, null, 2));
|
|
30
|
+
}
|
|
31
|
+
export function registerMemoryTools(server, h) {
|
|
32
|
+
// ── remember ──────────────────────────────────────────────────────────────
|
|
33
|
+
server.tool("remember", "Save a learning about this project for future sessions. Persists across restarts. Use for: project patterns, conventions, toolchain quirks, architectural decisions.", {
|
|
34
|
+
key: z.string().describe("Short key (e.g., 'test-command', 'deploy-process', 'auth-pattern')"),
|
|
35
|
+
value: z.string().describe("What to remember"),
|
|
36
|
+
importance: z.number().optional().describe("1-10, default 7"),
|
|
37
|
+
}, async ({ key, value, importance }) => {
|
|
38
|
+
const imp = importance ?? 7;
|
|
39
|
+
// Try mementos SDK first, fall back to local file
|
|
40
|
+
try {
|
|
41
|
+
const mementos = require("@hasna/mementos");
|
|
42
|
+
mementos.createMemory({ key, value, scope: "shared", category: "knowledge", importance: imp });
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
saveLocalMemory(key, value, imp);
|
|
46
|
+
}
|
|
47
|
+
h.logCall("remember", { command: `remember: ${key}` });
|
|
48
|
+
return { content: [{ type: "text", text: JSON.stringify({ saved: key }) }] };
|
|
49
|
+
});
|
|
50
|
+
// ── recall ────────────────────────────────────────────────────────────────
|
|
51
|
+
server.tool("recall", "Recall project memories from previous sessions. Returns all saved learnings, patterns, and decisions for this project.", {
|
|
52
|
+
search: z.string().optional().describe("Search query to filter memories"),
|
|
53
|
+
limit: z.number().optional().describe("Max memories to return (default: 20)"),
|
|
54
|
+
}, async ({ search, limit }) => {
|
|
55
|
+
let items = [];
|
|
56
|
+
// Try mementos SDK first, fall back to local file
|
|
57
|
+
try {
|
|
58
|
+
const mementos = require("@hasna/mementos");
|
|
59
|
+
const memories = search
|
|
60
|
+
? mementos.searchMemories(search, { limit: limit ?? 20 })
|
|
61
|
+
: mementos.listMemories({ scope: "shared", limit: limit ?? 20 });
|
|
62
|
+
items = (memories ?? []).map((m) => ({ key: m.key, value: m.value, importance: m.importance }));
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
let local = loadLocalMemories();
|
|
66
|
+
if (search) {
|
|
67
|
+
const q = search.toLowerCase();
|
|
68
|
+
local = local.filter(m => m.key.toLowerCase().includes(q) || m.value.toLowerCase().includes(q));
|
|
69
|
+
}
|
|
70
|
+
items = local.slice(0, limit ?? 20);
|
|
71
|
+
}
|
|
72
|
+
h.logCall("recall", { command: `recall${search ? `: ${search}` : ""}` });
|
|
73
|
+
return { content: [{ type: "text", text: JSON.stringify({ memories: items, total: items.length }) }] };
|
|
74
|
+
});
|
|
75
|
+
// ── project_note ──────────────────────────────────────────────────────────
|
|
76
|
+
server.tool("project_note", "Save or recall notes about the current project. Persists across sessions. Agents pick up where they left off.", {
|
|
77
|
+
save: z.string().optional().describe("Note to save"),
|
|
78
|
+
recall: z.boolean().optional().describe("Return all saved notes"),
|
|
79
|
+
clear: z.boolean().optional().describe("Clear all notes"),
|
|
80
|
+
}, async ({ save, recall, clear }) => {
|
|
81
|
+
const { existsSync, readFileSync, writeFileSync, mkdirSync } = await import("fs");
|
|
82
|
+
const { join } = await import("path");
|
|
83
|
+
const notesDir = join(process.cwd(), ".terminal");
|
|
84
|
+
const notesFile = join(notesDir, "notes.json");
|
|
85
|
+
let notes = [];
|
|
86
|
+
if (existsSync(notesFile)) {
|
|
87
|
+
try {
|
|
88
|
+
notes = JSON.parse(readFileSync(notesFile, "utf8"));
|
|
89
|
+
}
|
|
90
|
+
catch { }
|
|
91
|
+
}
|
|
92
|
+
if (clear) {
|
|
93
|
+
notes = [];
|
|
94
|
+
if (!existsSync(notesDir))
|
|
95
|
+
mkdirSync(notesDir, { recursive: true });
|
|
96
|
+
writeFileSync(notesFile, "[]");
|
|
97
|
+
return { content: [{ type: "text", text: JSON.stringify({ cleared: true }) }] };
|
|
98
|
+
}
|
|
99
|
+
if (save) {
|
|
100
|
+
notes.push({ text: save, timestamp: new Date().toISOString() });
|
|
101
|
+
if (!existsSync(notesDir))
|
|
102
|
+
mkdirSync(notesDir, { recursive: true });
|
|
103
|
+
writeFileSync(notesFile, JSON.stringify(notes, null, 2));
|
|
104
|
+
h.logCall("project_note", { command: `save: ${save.slice(0, 80)}` });
|
|
105
|
+
return { content: [{ type: "text", text: JSON.stringify({ saved: true, total: notes.length }) }] };
|
|
106
|
+
}
|
|
107
|
+
return { content: [{ type: "text", text: JSON.stringify({ notes, total: notes.length }) }] };
|
|
108
|
+
});
|
|
109
|
+
// ── store_secret ──────────────────────────────────────────────────────────
|
|
110
|
+
server.tool("store_secret", "Store a secret for use in commands. Agent uses $NAME in commands, we resolve at execution and redact in output.", {
|
|
111
|
+
name: z.string().describe("Secret name (e.g., JIRA_TOKEN)"),
|
|
112
|
+
value: z.string().describe("Secret value"),
|
|
113
|
+
}, async ({ name, value }) => {
|
|
114
|
+
const { existsSync, readFileSync, writeFileSync, chmodSync } = await import("fs");
|
|
115
|
+
const { join } = await import("path");
|
|
116
|
+
const { getTerminalDir } = await import("../../paths.js");
|
|
117
|
+
const secretsFile = join(getTerminalDir(), "secrets.json");
|
|
118
|
+
let secrets = {};
|
|
119
|
+
if (existsSync(secretsFile)) {
|
|
120
|
+
try {
|
|
121
|
+
secrets = JSON.parse(readFileSync(secretsFile, "utf8"));
|
|
122
|
+
}
|
|
123
|
+
catch { }
|
|
124
|
+
}
|
|
125
|
+
secrets[name] = value;
|
|
126
|
+
writeFileSync(secretsFile, JSON.stringify(secrets, null, 2));
|
|
127
|
+
try {
|
|
128
|
+
chmodSync(secretsFile, 0o600);
|
|
129
|
+
}
|
|
130
|
+
catch { }
|
|
131
|
+
h.logCall("store_secret", { command: `store ${name}` });
|
|
132
|
+
return { content: [{ type: "text", text: JSON.stringify({ stored: name, hint: `Use $${name} in commands. Value will be resolved at execution and redacted in output.` }) }] };
|
|
133
|
+
});
|
|
134
|
+
// ── list_secrets ──────────────────────────────────────────────────────────
|
|
135
|
+
server.tool("list_secrets", "List stored secret names (never values).", async () => {
|
|
136
|
+
const { existsSync, readFileSync } = await import("fs");
|
|
137
|
+
const { join } = await import("path");
|
|
138
|
+
const { getTerminalDir } = await import("../../paths.js");
|
|
139
|
+
const secretsFile = join(getTerminalDir(), "secrets.json");
|
|
140
|
+
let names = [];
|
|
141
|
+
if (existsSync(secretsFile)) {
|
|
142
|
+
try {
|
|
143
|
+
names = Object.keys(JSON.parse(readFileSync(secretsFile, "utf8")));
|
|
144
|
+
}
|
|
145
|
+
catch { }
|
|
146
|
+
}
|
|
147
|
+
// Also show env vars that look like secrets
|
|
148
|
+
const envSecrets = Object.keys(process.env).filter(k => /API_KEY|TOKEN|SECRET|PASSWORD/i.test(k));
|
|
149
|
+
return { content: [{ type: "text", text: JSON.stringify({ stored: names, environment: envSecrets }) }] };
|
|
150
|
+
});
|
|
151
|
+
}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
// Meta tools: token_stats, session_history, snapshot, watch, list_recipes, run_recipe, save_recipe, list_collections
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
import { stripAnsi } from "../../compression.js";
|
|
4
|
+
import { processOutput } from "../../output-processor.js";
|
|
5
|
+
import { listRecipes, listCollections, getRecipe, createRecipe } from "../../recipes/storage.js";
|
|
6
|
+
import { substituteVariables } from "../../recipes/model.js";
|
|
7
|
+
import { listSessions, getSessionInteractions, getSessionStats, getSessionEconomy } from "../../sessions-db.js";
|
|
8
|
+
import { getEconomyStats } from "../../economy.js";
|
|
9
|
+
import { captureSnapshot } from "../../snapshots.js";
|
|
10
|
+
import { storeOutput } from "../../expand-store.js";
|
|
11
|
+
export function registerMetaTools(server, h) {
|
|
12
|
+
// ── token_stats ───────────────────────────────────────────────────────────
|
|
13
|
+
server.tool("token_stats", "Get full token economy — savings, costs, ROI. Includes round-trip multiplier (saved tokens repeated across ~5 turns).", async () => {
|
|
14
|
+
const stats = getEconomyStats();
|
|
15
|
+
const { estimateSavingsUsd } = await import("../../economy.js");
|
|
16
|
+
const opus = estimateSavingsUsd(stats.totalTokensSaved, "anthropic-opus");
|
|
17
|
+
const sonnet = estimateSavingsUsd(stats.totalTokensSaved, "anthropic-sonnet");
|
|
18
|
+
const haiku = estimateSavingsUsd(stats.totalTokensSaved, "anthropic");
|
|
19
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
20
|
+
...stats,
|
|
21
|
+
roundTrip: {
|
|
22
|
+
multiplier: 5,
|
|
23
|
+
billableTokensSaved: stats.totalTokensSaved * 5,
|
|
24
|
+
savingsUsd: { opus: opus.savingsUsd, sonnet: sonnet.savingsUsd, haiku: haiku.savingsUsd },
|
|
25
|
+
},
|
|
26
|
+
ratio: stats.totalTokensUsed > 0 ? Math.round((stats.totalTokensSaved / stats.totalTokensUsed) * 10) / 10 : 0,
|
|
27
|
+
}) }] };
|
|
28
|
+
});
|
|
29
|
+
// ── session_history ───────────────────────────────────────────────────────
|
|
30
|
+
server.tool("session_history", "Query terminal session history — recent sessions, specific session details, or aggregate stats.", {
|
|
31
|
+
action: z.enum(["list", "detail", "stats"]).describe("list=recent sessions, detail=specific session, stats=aggregates"),
|
|
32
|
+
sessionId: z.string().optional().describe("Session ID (for detail action)"),
|
|
33
|
+
limit: z.number().optional().describe("Max sessions to return (for list, default: 20)"),
|
|
34
|
+
}, async ({ action, sessionId, limit }) => {
|
|
35
|
+
if (action === "stats") {
|
|
36
|
+
return { content: [{ type: "text", text: JSON.stringify(getSessionStats()) }] };
|
|
37
|
+
}
|
|
38
|
+
if (action === "detail" && sessionId) {
|
|
39
|
+
const interactions = getSessionInteractions(sessionId);
|
|
40
|
+
const economy = getSessionEconomy(sessionId);
|
|
41
|
+
return { content: [{ type: "text", text: JSON.stringify({ interactions, economy }) }] };
|
|
42
|
+
}
|
|
43
|
+
const sessions = listSessions(limit ?? 20);
|
|
44
|
+
return { content: [{ type: "text", text: JSON.stringify(sessions) }] };
|
|
45
|
+
});
|
|
46
|
+
// ── snapshot ──────────────────────────────────────────────────────────────
|
|
47
|
+
server.tool("snapshot", "Capture a compact snapshot of terminal state (cwd, env, running processes, recent commands, recipes). Useful for agent context handoff.", async () => {
|
|
48
|
+
const snap = captureSnapshot();
|
|
49
|
+
return { content: [{ type: "text", text: JSON.stringify(snap) }] };
|
|
50
|
+
});
|
|
51
|
+
// ── watch ─────────────────────────────────────────────────────────────────
|
|
52
|
+
server.tool("watch", "Run a task (test/build/lint/typecheck) on file change. Returns diff from last run. Agent stops polling — we push on change. Call watch_stop to end.", {
|
|
53
|
+
task: z.enum(["test", "build", "lint", "typecheck"]).describe("Task to run on change"),
|
|
54
|
+
path: z.string().optional().describe("File or directory to watch (default: src/)"),
|
|
55
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
56
|
+
}, async ({ task, path: watchPath, cwd }) => {
|
|
57
|
+
const workDir = cwd ?? process.cwd();
|
|
58
|
+
const target = h.resolvePath(watchPath ?? "src/", workDir);
|
|
59
|
+
const watchId = `${task}:${target}`;
|
|
60
|
+
// Run once immediately
|
|
61
|
+
const { existsSync } = await import("fs");
|
|
62
|
+
const { join } = await import("path");
|
|
63
|
+
let runner = "npm run";
|
|
64
|
+
if (existsSync(join(workDir, "bun.lockb")) || existsSync(join(workDir, "bun.lock")))
|
|
65
|
+
runner = "bun run";
|
|
66
|
+
else if (existsSync(join(workDir, "Cargo.toml")))
|
|
67
|
+
runner = "cargo";
|
|
68
|
+
const cmd = runner === "cargo" ? `cargo ${task}` : `${runner} ${task}`;
|
|
69
|
+
const result = await h.exec(cmd, workDir, 60000);
|
|
70
|
+
const output = (result.stdout + result.stderr).trim();
|
|
71
|
+
const processed = await processOutput(cmd, output);
|
|
72
|
+
// Store initial result for diffing
|
|
73
|
+
const detailKey = storeOutput(`watch:${task}`, output);
|
|
74
|
+
h.logCall("watch", { command: `watch ${task} ${target}`, exitCode: result.exitCode, durationMs: 0, aiProcessed: processed.aiProcessed });
|
|
75
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
76
|
+
watchId,
|
|
77
|
+
task,
|
|
78
|
+
watching: target,
|
|
79
|
+
initialRun: { exitCode: result.exitCode, summary: processed.summary, tokensSaved: processed.tokensSaved },
|
|
80
|
+
hint: "File watching active. Call execute_diff with the same command to get changes on next run.",
|
|
81
|
+
}) }] };
|
|
82
|
+
});
|
|
83
|
+
// ── list_recipes ──────────────────────────────────────────────────────────
|
|
84
|
+
server.tool("list_recipes", "List saved command recipes. Optionally filter by collection or project.", {
|
|
85
|
+
collection: z.string().optional().describe("Filter by collection name"),
|
|
86
|
+
project: z.string().optional().describe("Project path for project-scoped recipes"),
|
|
87
|
+
}, async ({ collection, project }) => {
|
|
88
|
+
let recipes = listRecipes(project);
|
|
89
|
+
if (collection)
|
|
90
|
+
recipes = recipes.filter(r => r.collection === collection);
|
|
91
|
+
return { content: [{ type: "text", text: JSON.stringify(recipes) }] };
|
|
92
|
+
});
|
|
93
|
+
// ── run_recipe ────────────────────────────────────────────────────────────
|
|
94
|
+
server.tool("run_recipe", "Run a saved recipe by name with optional variable substitution.", {
|
|
95
|
+
name: z.string().describe("Recipe name"),
|
|
96
|
+
variables: z.record(z.string(), z.string()).optional().describe("Variable values: {port: '3000'}"),
|
|
97
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
98
|
+
format: z.enum(["raw", "json", "compressed"]).optional().describe("Output format"),
|
|
99
|
+
}, async ({ name, variables, cwd, format }) => {
|
|
100
|
+
const recipe = getRecipe(name, cwd);
|
|
101
|
+
if (!recipe) {
|
|
102
|
+
return { content: [{ type: "text", text: JSON.stringify({ error: `Recipe '${name}' not found` }) }] };
|
|
103
|
+
}
|
|
104
|
+
const command = variables ? substituteVariables(recipe.command, variables) : recipe.command;
|
|
105
|
+
const result = await h.exec(command, cwd, 30000);
|
|
106
|
+
const output = (result.stdout + result.stderr).trim();
|
|
107
|
+
if (format === "json" || format === "compressed") {
|
|
108
|
+
const processed = await processOutput(command, output);
|
|
109
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
110
|
+
recipe: name, exitCode: result.exitCode, summary: processed.summary,
|
|
111
|
+
structured: processed.structured, duration: result.duration,
|
|
112
|
+
tokensSaved: processed.tokensSaved, aiProcessed: processed.aiProcessed,
|
|
113
|
+
}) }] };
|
|
114
|
+
}
|
|
115
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
116
|
+
recipe: name, exitCode: result.exitCode, output: stripAnsi(output), duration: result.duration,
|
|
117
|
+
}) }] };
|
|
118
|
+
});
|
|
119
|
+
// ── save_recipe ───────────────────────────────────────────────────────────
|
|
120
|
+
server.tool("save_recipe", "Save a reusable command recipe. Variables in commands use {name} syntax.", {
|
|
121
|
+
name: z.string().describe("Recipe name"),
|
|
122
|
+
command: z.string().describe("Shell command (use {var} for variables)"),
|
|
123
|
+
description: z.string().optional().describe("Description"),
|
|
124
|
+
collection: z.string().optional().describe("Collection to add to"),
|
|
125
|
+
project: z.string().optional().describe("Project path (for project-scoped recipe)"),
|
|
126
|
+
tags: z.array(z.string()).optional().describe("Tags"),
|
|
127
|
+
}, async ({ name, command, description, collection, project, tags }) => {
|
|
128
|
+
const recipe = createRecipe({ name, command, description, collection, project, tags });
|
|
129
|
+
return { content: [{ type: "text", text: JSON.stringify(recipe) }] };
|
|
130
|
+
});
|
|
131
|
+
// ── list_collections ──────────────────────────────────────────────────────
|
|
132
|
+
server.tool("list_collections", "List recipe collections.", {
|
|
133
|
+
project: z.string().optional().describe("Project path"),
|
|
134
|
+
}, async ({ project }) => {
|
|
135
|
+
const collections = listCollections(project);
|
|
136
|
+
return { content: [{ type: "text", text: JSON.stringify(collections) }] };
|
|
137
|
+
});
|
|
138
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
// Process tools: bg_start, bg_stop, bg_status, bg_logs, bg_wait_port
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
import { bgStart, bgStatus, bgStop, bgLogs, bgWaitPort } from "../../supervisor.js";
|
|
4
|
+
export function registerProcessTools(server, h) {
|
|
5
|
+
// ── bg_start ──────────────────────────────────────────────────────────────
|
|
6
|
+
server.tool("bg_start", "Start a background process (e.g., dev server). Auto-detects port from command.", {
|
|
7
|
+
command: z.string().describe("Command to run in background"),
|
|
8
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
9
|
+
}, async ({ command, cwd }) => {
|
|
10
|
+
const result = bgStart(command, cwd);
|
|
11
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
12
|
+
});
|
|
13
|
+
// ── bg_status ─────────────────────────────────────────────────────────────
|
|
14
|
+
server.tool("bg_status", "List all managed background processes with status, ports, and recent output.", async () => {
|
|
15
|
+
return { content: [{ type: "text", text: JSON.stringify(bgStatus()) }] };
|
|
16
|
+
});
|
|
17
|
+
// ── bg_stop ───────────────────────────────────────────────────────────────
|
|
18
|
+
server.tool("bg_stop", "Stop a managed background process by PID.", { pid: z.number().describe("Process ID to stop") }, async ({ pid }) => {
|
|
19
|
+
const ok = bgStop(pid);
|
|
20
|
+
return { content: [{ type: "text", text: JSON.stringify({ stopped: ok, pid }) }] };
|
|
21
|
+
});
|
|
22
|
+
// ── bg_logs ───────────────────────────────────────────────────────────────
|
|
23
|
+
server.tool("bg_logs", "Get recent output lines from a background process.", {
|
|
24
|
+
pid: z.number().describe("Process ID"),
|
|
25
|
+
tail: z.number().optional().describe("Number of lines (default: 20)"),
|
|
26
|
+
}, async ({ pid, tail }) => {
|
|
27
|
+
const lines = bgLogs(pid, tail);
|
|
28
|
+
return { content: [{ type: "text", text: JSON.stringify({ pid, lines }) }] };
|
|
29
|
+
});
|
|
30
|
+
// ── bg_wait_port ──────────────────────────────────────────────────────────
|
|
31
|
+
server.tool("bg_wait_port", "Wait for a port to start accepting connections. Useful after starting a dev server.", {
|
|
32
|
+
port: z.number().describe("Port number to wait for"),
|
|
33
|
+
timeout: z.number().optional().describe("Timeout in ms (default: 30000)"),
|
|
34
|
+
}, async ({ port, timeout }) => {
|
|
35
|
+
const ready = await bgWaitPort(port, timeout);
|
|
36
|
+
return { content: [{ type: "text", text: JSON.stringify({ port, ready }) }] };
|
|
37
|
+
});
|
|
38
|
+
// ── port_check ──────────────────────────────────────────────────────────
|
|
39
|
+
server.tool("port_check", "Check if a port is in use and what process is using it.", {
|
|
40
|
+
port: z.number().describe("Port number to check"),
|
|
41
|
+
}, async ({ port }) => {
|
|
42
|
+
const result = await h.exec(`lsof -i :${port} -P -n 2>/dev/null | head -5`, undefined, 5000);
|
|
43
|
+
const output = result.stdout.trim();
|
|
44
|
+
if (!output || result.exitCode !== 0) {
|
|
45
|
+
return { content: [{ type: "text", text: JSON.stringify({ port, inUse: false }) }] };
|
|
46
|
+
}
|
|
47
|
+
const lines = output.split("\n").filter(l => l.trim());
|
|
48
|
+
return { content: [{ type: "text", text: JSON.stringify({ port, inUse: true, processes: lines.slice(1).map(l => l.split(/\s+/).slice(0, 3).join(" ")) }) }] };
|
|
49
|
+
});
|
|
50
|
+
}
|