@hasna/terminal 4.3.0 → 4.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Onboarding.js +1 -1
- package/dist/ai.js +9 -8
- package/dist/cache.js +2 -2
- package/dist/cli.js +0 -0
- package/dist/economy.js +3 -3
- package/dist/history.js +2 -2
- package/dist/mcp/server.js +26 -1345
- package/dist/mcp/tools/batch.js +111 -0
- package/dist/mcp/tools/execute.js +194 -0
- package/dist/mcp/tools/files.js +290 -0
- package/dist/mcp/tools/git.js +233 -0
- package/dist/mcp/tools/helpers.js +63 -0
- package/dist/mcp/tools/memory.js +151 -0
- package/dist/mcp/tools/meta.js +138 -0
- package/dist/mcp/tools/process.js +50 -0
- package/dist/mcp/tools/project.js +251 -0
- package/dist/mcp/tools/search.js +86 -0
- package/dist/output-store.js +2 -1
- package/dist/paths.js +28 -0
- package/dist/recipes/storage.js +3 -3
- package/dist/session-context.js +2 -2
- package/dist/sessions-db.js +15 -6
- package/dist/snapshots.js +2 -2
- package/dist/tool-profiles.js +4 -3
- package/dist/usage-cache.js +2 -2
- package/package.json +5 -3
- package/src/Onboarding.tsx +1 -1
- package/src/ai.ts +9 -8
- package/src/cache.ts +2 -2
- package/src/economy.ts +3 -3
- package/src/history.ts +2 -2
- package/src/mcp/server.ts +28 -1704
- package/src/mcp/tools/batch.ts +106 -0
- package/src/mcp/tools/execute.ts +248 -0
- package/src/mcp/tools/files.ts +369 -0
- package/src/mcp/tools/git.ts +306 -0
- package/src/mcp/tools/helpers.ts +92 -0
- package/src/mcp/tools/memory.ts +172 -0
- package/src/mcp/tools/meta.ts +202 -0
- package/src/mcp/tools/process.ts +94 -0
- package/src/mcp/tools/project.ts +297 -0
- package/src/mcp/tools/search.ts +118 -0
- package/src/output-store.ts +2 -1
- package/src/paths.ts +32 -0
- package/src/recipes/storage.ts +3 -3
- package/src/session-context.ts +2 -2
- package/src/sessions-db.ts +15 -4
- package/src/snapshots.ts +2 -2
- package/src/tool-profiles.ts +4 -3
- package/src/usage-cache.ts +2 -2
- package/dist/output-router.js +0 -41
- package/dist/parsers/base.js +0 -2
- package/dist/parsers/build.js +0 -64
- package/dist/parsers/errors.js +0 -101
- package/dist/parsers/files.js +0 -78
- package/dist/parsers/git.js +0 -99
- package/dist/parsers/index.js +0 -48
- package/dist/parsers/tests.js +0 -89
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
// Project tools: boot, project_overview, run, install, status, help
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
import { estimateTokens } from "../../tokens.js";
|
|
4
|
+
import { processOutput } from "../../output-processor.js";
|
|
5
|
+
import { getOutputProvider } from "../../providers/index.js";
|
|
6
|
+
import { getBootContext } from "../../session-boot.js";
|
|
7
|
+
/** Detect project toolchain from filesystem */
|
|
8
|
+
function detectToolchain(workDir) {
|
|
9
|
+
const { existsSync, readFileSync } = require("fs");
|
|
10
|
+
const { join } = require("path");
|
|
11
|
+
// JS/TS: bun > pnpm > yarn > npm
|
|
12
|
+
const hasBun = existsSync(join(workDir, "bun.lockb")) || existsSync(join(workDir, "bun.lock")) || (() => {
|
|
13
|
+
try {
|
|
14
|
+
return !!JSON.parse(readFileSync(join(workDir, "package.json"), "utf8")).engines?.bun;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
})();
|
|
20
|
+
if (hasBun)
|
|
21
|
+
return { runner: "bun", ecosystem: "js" };
|
|
22
|
+
if (existsSync(join(workDir, "pnpm-lock.yaml")))
|
|
23
|
+
return { runner: "pnpm", ecosystem: "js" };
|
|
24
|
+
if (existsSync(join(workDir, "yarn.lock")))
|
|
25
|
+
return { runner: "yarn", ecosystem: "js" };
|
|
26
|
+
if (existsSync(join(workDir, "deno.json")) || existsSync(join(workDir, "deno.jsonc")))
|
|
27
|
+
return { runner: "deno", ecosystem: "js" };
|
|
28
|
+
// Rust
|
|
29
|
+
if (existsSync(join(workDir, "Cargo.toml")))
|
|
30
|
+
return { runner: "cargo", ecosystem: "rust" };
|
|
31
|
+
// Go
|
|
32
|
+
if (existsSync(join(workDir, "go.mod")))
|
|
33
|
+
return { runner: "go", ecosystem: "go" };
|
|
34
|
+
// Python: poetry > pip
|
|
35
|
+
if (existsSync(join(workDir, "poetry.lock")))
|
|
36
|
+
return { runner: "poetry", ecosystem: "python" };
|
|
37
|
+
if (existsSync(join(workDir, "Pipfile")))
|
|
38
|
+
return { runner: "pipenv", ecosystem: "python" };
|
|
39
|
+
if (existsSync(join(workDir, "pyproject.toml")) || existsSync(join(workDir, "requirements.txt")))
|
|
40
|
+
return { runner: "pip", ecosystem: "python" };
|
|
41
|
+
// Ruby
|
|
42
|
+
if (existsSync(join(workDir, "Gemfile")))
|
|
43
|
+
return { runner: "bundle", ecosystem: "ruby" };
|
|
44
|
+
// PHP
|
|
45
|
+
if (existsSync(join(workDir, "composer.json")))
|
|
46
|
+
return { runner: "composer", ecosystem: "php" };
|
|
47
|
+
// Elixir
|
|
48
|
+
if (existsSync(join(workDir, "mix.exs")))
|
|
49
|
+
return { runner: "mix", ecosystem: "elixir" };
|
|
50
|
+
// .NET
|
|
51
|
+
if (existsSync(join(workDir, "*.csproj")) || existsSync(join(workDir, "*.fsproj")) || existsSync(join(workDir, "Directory.Build.props")))
|
|
52
|
+
return { runner: "dotnet", ecosystem: "dotnet" };
|
|
53
|
+
// Dart/Flutter
|
|
54
|
+
if (existsSync(join(workDir, "pubspec.yaml")))
|
|
55
|
+
return { runner: "dart", ecosystem: "dart" };
|
|
56
|
+
// Swift
|
|
57
|
+
if (existsSync(join(workDir, "Package.swift")))
|
|
58
|
+
return { runner: "swift", ecosystem: "swift" };
|
|
59
|
+
// Zig
|
|
60
|
+
if (existsSync(join(workDir, "build.zig")))
|
|
61
|
+
return { runner: "zig", ecosystem: "zig" };
|
|
62
|
+
// Make (generic)
|
|
63
|
+
if (existsSync(join(workDir, "Makefile")))
|
|
64
|
+
return { runner: "make", ecosystem: "make" };
|
|
65
|
+
// Fallback: npm if package.json exists
|
|
66
|
+
if (existsSync(join(workDir, "package.json")))
|
|
67
|
+
return { runner: "npm", ecosystem: "js" };
|
|
68
|
+
return { runner: "npm", ecosystem: "unknown" };
|
|
69
|
+
}
|
|
70
|
+
export function registerProjectTools(server, h) {
|
|
71
|
+
// ── boot ──────────────────────────────────────────────────────────────────
|
|
72
|
+
server.tool("boot", "Get everything an agent needs on session start in ONE call — git state, project info, source structure. Replaces: git status + git log + cat package.json + ls src/. Cached for the session.", async () => {
|
|
73
|
+
const ctx = await getBootContext(process.cwd());
|
|
74
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
75
|
+
...ctx,
|
|
76
|
+
hints: {
|
|
77
|
+
cwd: process.cwd(),
|
|
78
|
+
tip: "All terminal tools support relative paths. Use 'src/foo.ts' not the full absolute path. Use commit({message, push:true}) instead of raw git commands. Use run({task:'test'}) instead of bun/npm test. Use lookup({file, items}) instead of grep pipelines.",
|
|
79
|
+
},
|
|
80
|
+
}) }] };
|
|
81
|
+
});
|
|
82
|
+
// ── project_overview ──────────────────────────────────────────────────────
|
|
83
|
+
server.tool("project_overview", "Get project overview in one call — package.json info, source structure, config files. Replaces: cat package.json + ls src/ + cat tsconfig.json.", {
|
|
84
|
+
path: z.string().optional().describe("Project root (default: cwd)"),
|
|
85
|
+
}, async ({ path }) => {
|
|
86
|
+
const cwd = path ?? process.cwd();
|
|
87
|
+
const [pkgResult, srcResult, configResult] = await Promise.all([
|
|
88
|
+
h.exec("cat package.json 2>/dev/null", cwd),
|
|
89
|
+
h.exec("ls -1 src/ 2>/dev/null || ls -1 lib/ 2>/dev/null || ls -1 app/ 2>/dev/null", cwd),
|
|
90
|
+
h.exec("ls -1 *.json *.config.* .env* tsconfig* 2>/dev/null", cwd),
|
|
91
|
+
]);
|
|
92
|
+
let pkg = null;
|
|
93
|
+
try {
|
|
94
|
+
pkg = JSON.parse(pkgResult.stdout);
|
|
95
|
+
}
|
|
96
|
+
catch { }
|
|
97
|
+
return {
|
|
98
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
99
|
+
name: pkg?.name,
|
|
100
|
+
version: pkg?.version,
|
|
101
|
+
scripts: pkg?.scripts,
|
|
102
|
+
dependencies: pkg?.dependencies ? Object.keys(pkg.dependencies) : [],
|
|
103
|
+
devDependencies: pkg?.devDependencies ? Object.keys(pkg.devDependencies) : [],
|
|
104
|
+
sourceFiles: srcResult.stdout.split("\n").filter(l => l.trim()),
|
|
105
|
+
configFiles: configResult.stdout.split("\n").filter(l => l.trim()),
|
|
106
|
+
}) }],
|
|
107
|
+
};
|
|
108
|
+
});
|
|
109
|
+
// ── run ───────────────────────────────────────────────────────────────────
|
|
110
|
+
server.tool("run", "Run a project task by intent — test, build, lint, dev, typecheck, format. Auto-detects toolchain (bun/npm/pnpm/yarn/cargo/go/make). Saves ~100 tokens vs raw commands.", {
|
|
111
|
+
task: z.string().describe("Task to run: test, build, lint, dev, start, typecheck, format, check — or any custom script name from package.json"),
|
|
112
|
+
args: z.string().optional().describe("Extra arguments (e.g., '--watch', 'src/foo.test.ts')"),
|
|
113
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
114
|
+
}, async ({ task, args, cwd }) => {
|
|
115
|
+
const start = Date.now();
|
|
116
|
+
const workDir = cwd ?? process.cwd();
|
|
117
|
+
const { runner, ecosystem } = detectToolchain(workDir);
|
|
118
|
+
const extra = args ? ` ${args}` : "";
|
|
119
|
+
// Map intent to command per ecosystem
|
|
120
|
+
const taskMap = {
|
|
121
|
+
rust: { test: "cargo test", build: "cargo build", lint: "cargo clippy", format: "cargo fmt", check: "cargo check" },
|
|
122
|
+
go: { test: "go test ./...", build: "go build ./...", lint: "golangci-lint run", format: "gofmt -w .", check: "go vet ./..." },
|
|
123
|
+
python: { test: "pytest", build: "python -m build", lint: "ruff check .", format: "ruff format .", check: "mypy .", typecheck: "mypy ." },
|
|
124
|
+
ruby: { test: "bundle exec rake test", build: "bundle exec rake build", lint: "bundle exec rubocop", format: "bundle exec rubocop -a" },
|
|
125
|
+
php: { test: "composer test", build: "composer build", lint: "composer lint", format: "composer format" },
|
|
126
|
+
elixir: { test: "mix test", build: "mix compile", lint: "mix credo", format: "mix format", check: "mix dialyzer" },
|
|
127
|
+
dotnet: { test: "dotnet test", build: "dotnet build", lint: "dotnet format --verify-no-changes", format: "dotnet format", check: "dotnet build --no-incremental" },
|
|
128
|
+
dart: { test: "dart test", build: "dart compile exe", lint: "dart analyze", format: "dart format ." },
|
|
129
|
+
swift: { test: "swift test", build: "swift build", lint: "swiftlint", format: "swiftformat ." },
|
|
130
|
+
zig: { test: "zig build test", build: "zig build" },
|
|
131
|
+
make: { test: "make test", build: "make build", lint: "make lint", format: "make format", check: "make check" },
|
|
132
|
+
};
|
|
133
|
+
let cmd;
|
|
134
|
+
if (ecosystem === "js") {
|
|
135
|
+
const prefix = runner === "yarn" ? "yarn" : `${runner} run`;
|
|
136
|
+
cmd = `${prefix} ${task}${extra}`;
|
|
137
|
+
}
|
|
138
|
+
else if (taskMap[ecosystem]?.[task]) {
|
|
139
|
+
cmd = `${taskMap[ecosystem][task]}${extra}`;
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
cmd = `${runner} ${task}${extra}`;
|
|
143
|
+
}
|
|
144
|
+
const result = await h.exec(cmd, workDir, 120000);
|
|
145
|
+
const output = (result.stdout + result.stderr).trim();
|
|
146
|
+
const processed = await processOutput(cmd, output);
|
|
147
|
+
h.logCall("run", { command: `${task}${args ? ` ${args}` : ""}`, outputTokens: estimateTokens(output), tokensSaved: processed.tokensSaved, durationMs: Date.now() - start, exitCode: result.exitCode, aiProcessed: processed.aiProcessed });
|
|
148
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
149
|
+
exitCode: result.exitCode,
|
|
150
|
+
task,
|
|
151
|
+
runner,
|
|
152
|
+
summary: processed.summary,
|
|
153
|
+
tokensSaved: processed.tokensSaved,
|
|
154
|
+
}) }] };
|
|
155
|
+
});
|
|
156
|
+
// ── install ───────────────────────────────────────────────────────────────
|
|
157
|
+
server.tool("install", "Install packages — auto-detects toolchain for any language. Agent says what to install, we figure out how.", {
|
|
158
|
+
packages: z.array(z.string()).describe("Package names to install"),
|
|
159
|
+
dev: z.boolean().optional().describe("Install as dev dependency (default: false)"),
|
|
160
|
+
cwd: z.string().optional().describe("Working directory"),
|
|
161
|
+
}, async ({ packages, dev, cwd }) => {
|
|
162
|
+
const start = Date.now();
|
|
163
|
+
const workDir = cwd ?? process.cwd();
|
|
164
|
+
const { runner, ecosystem } = detectToolchain(workDir);
|
|
165
|
+
const pkgs = packages.join(" ");
|
|
166
|
+
const installMap = {
|
|
167
|
+
bun: { cmd: `bun add ${pkgs}`, devCmd: `bun add -D ${pkgs}` },
|
|
168
|
+
pnpm: { cmd: `pnpm add ${pkgs}`, devCmd: `pnpm add -D ${pkgs}` },
|
|
169
|
+
yarn: { cmd: `yarn add ${pkgs}`, devCmd: `yarn add --dev ${pkgs}` },
|
|
170
|
+
npm: { cmd: `npm install ${pkgs}`, devCmd: `npm install --save-dev ${pkgs}` },
|
|
171
|
+
deno: { cmd: `deno add ${pkgs}`, devCmd: `deno add --dev ${pkgs}` },
|
|
172
|
+
cargo: { cmd: `cargo add ${pkgs}`, devCmd: `cargo add --dev ${pkgs}` },
|
|
173
|
+
go: { cmd: `go get ${pkgs}`, devCmd: `go get ${pkgs}` },
|
|
174
|
+
pip: { cmd: `pip install ${pkgs}`, devCmd: `pip install ${pkgs}` },
|
|
175
|
+
poetry: { cmd: `poetry add ${pkgs}`, devCmd: `poetry add --group dev ${pkgs}` },
|
|
176
|
+
pipenv: { cmd: `pipenv install ${pkgs}`, devCmd: `pipenv install --dev ${pkgs}` },
|
|
177
|
+
bundle: { cmd: `bundle add ${pkgs}`, devCmd: `bundle add ${pkgs} --group development` },
|
|
178
|
+
composer: { cmd: `composer require ${pkgs}`, devCmd: `composer require --dev ${pkgs}` },
|
|
179
|
+
mix: { cmd: `mix deps.get`, devCmd: `mix deps.get` },
|
|
180
|
+
dotnet: { cmd: `dotnet add package ${pkgs}`, devCmd: `dotnet add package ${pkgs}` },
|
|
181
|
+
dart: { cmd: `dart pub add ${pkgs}`, devCmd: `dart pub add --dev ${pkgs}` },
|
|
182
|
+
swift: { cmd: `swift package add ${pkgs}`, devCmd: `swift package add ${pkgs}` },
|
|
183
|
+
};
|
|
184
|
+
const entry = installMap[runner] ?? installMap.npm;
|
|
185
|
+
const cmd = dev ? entry.devCmd : entry.cmd;
|
|
186
|
+
const result = await h.exec(cmd, workDir, 60000);
|
|
187
|
+
const output = (result.stdout + result.stderr).trim();
|
|
188
|
+
const processed = await processOutput(cmd, output);
|
|
189
|
+
h.logCall("install", { command: cmd, exitCode: result.exitCode, durationMs: Date.now() - start, aiProcessed: processed.aiProcessed });
|
|
190
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
191
|
+
exitCode: result.exitCode,
|
|
192
|
+
command: cmd,
|
|
193
|
+
summary: processed.summary,
|
|
194
|
+
}) }] };
|
|
195
|
+
});
|
|
196
|
+
// ── status ────────────────────────────────────────────────────────────────
|
|
197
|
+
server.tool("status", "Get terminal server status, capabilities, and available parsers.", async () => {
|
|
198
|
+
return {
|
|
199
|
+
content: [{ type: "text", text: JSON.stringify({
|
|
200
|
+
name: "terminal", version: "3.3.0", cwd: process.cwd(),
|
|
201
|
+
features: ["ai-output-processing", "token-compression", "noise-filtering", "diff-caching", "lazy-execution", "progressive-disclosure"],
|
|
202
|
+
}) }],
|
|
203
|
+
};
|
|
204
|
+
});
|
|
205
|
+
// ── help ──────────────────────────────────────────────────────────────────
|
|
206
|
+
server.tool("help", "Get recommendations for which terminal tool to use. Describe what you want to do and get the best tool + usage example.", {
|
|
207
|
+
goal: z.string().optional().describe("What you're trying to do (e.g., 'run tests', 'find where login is defined', 'commit my changes')"),
|
|
208
|
+
}, async ({ goal }) => {
|
|
209
|
+
if (!goal) {
|
|
210
|
+
return { content: [{ type: "text", text: JSON.stringify({
|
|
211
|
+
tools: {
|
|
212
|
+
"execute / execute_smart": "Run any command. Smart = AI summary (80% fewer tokens)",
|
|
213
|
+
"run({task})": "Run test/build/lint — auto-detects toolchain",
|
|
214
|
+
"commit / bulk_commit / smart_commit": "Git commit — single, multi, or AI-grouped",
|
|
215
|
+
"diff({ref})": "Show what changed with AI summary",
|
|
216
|
+
"install({packages})": "Add packages — auto-detects bun/npm/pip/cargo",
|
|
217
|
+
"search_content({pattern})": "Grep with structured results",
|
|
218
|
+
"search_files({pattern})": "Find files by glob",
|
|
219
|
+
"symbols({path})": "AI file outline — any language",
|
|
220
|
+
"read_symbol({path, name})": "Read one function/class by name",
|
|
221
|
+
"read_file({path, summarize})": "Read or AI-summarize a file",
|
|
222
|
+
"read_files({files, summarize})": "Multi-file read in one call",
|
|
223
|
+
"symbols_dir({path})": "Symbols for entire directory",
|
|
224
|
+
"review({since})": "AI code review",
|
|
225
|
+
"lookup({file, items})": "Find items in a file by name",
|
|
226
|
+
"edit({file, find, replace})": "Find-replace in file",
|
|
227
|
+
"repo_state": "Git branch + status + log in one call",
|
|
228
|
+
"boot": "Full project context on session start",
|
|
229
|
+
"watch({task})": "Run task on file change",
|
|
230
|
+
"store_secret / list_secrets": "Secrets vault",
|
|
231
|
+
"project_note({save/recall})": "Persistent project notes",
|
|
232
|
+
},
|
|
233
|
+
tips: [
|
|
234
|
+
"Use relative paths — 'src/foo.ts' not '/Users/.../src/foo.ts'",
|
|
235
|
+
"Use your native Read/Write/Edit for file operations when you don't need AI summary",
|
|
236
|
+
"Use search_content for text patterns, symbols for code structure",
|
|
237
|
+
"Use commit for single, bulk_commit for multiple, smart_commit for AI-grouped",
|
|
238
|
+
],
|
|
239
|
+
}) }] };
|
|
240
|
+
}
|
|
241
|
+
// AI recommends the best tool for the goal
|
|
242
|
+
const provider = getOutputProvider();
|
|
243
|
+
const outputModel = provider.name === "groq" ? "llama-3.1-8b-instant" : undefined;
|
|
244
|
+
const recommendation = await provider.complete(`Agent wants to: ${goal}\n\nAvailable tools: execute, execute_smart, run, commit, bulk_commit, smart_commit, diff, install, search_content, search_files, symbols, read_symbol, read_file, read_files, symbols_dir, review, lookup, edit, repo_state, boot, watch, store_secret, list_secrets, project_note, help`, {
|
|
245
|
+
model: outputModel,
|
|
246
|
+
system: `Recommend the best terminal MCP tool for this goal. Return JSON: {"tool": "name", "example": {params}, "why": "one line"}. If multiple tools work, list top 2.`,
|
|
247
|
+
maxTokens: 200, temperature: 0,
|
|
248
|
+
});
|
|
249
|
+
return { content: [{ type: "text", text: recommendation }] };
|
|
250
|
+
});
|
|
251
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
// Search tools: search_content, search_files, search_semantic, lookup
|
|
2
|
+
import { z } from "./helpers.js";
|
|
3
|
+
import { searchFiles, searchContent, semanticSearch } from "../../search/index.js";
|
|
4
|
+
export function registerSearchTools(server, h) {
|
|
5
|
+
// ── search_files ──────────────────────────────────────────────────────────
|
|
6
|
+
server.tool("search_files", "Search for files by name pattern. Auto-filters node_modules, .git, dist. Returns categorized results (source, config, other) with token savings.", {
|
|
7
|
+
pattern: z.string().describe("Glob pattern (e.g., '*hooks*', '*.test.ts')"),
|
|
8
|
+
path: z.string().optional().describe("Search root (default: cwd)"),
|
|
9
|
+
includeNodeModules: z.boolean().optional().describe("Include node_modules (default: false)"),
|
|
10
|
+
maxResults: z.number().optional().describe("Max results per category (default: 50)"),
|
|
11
|
+
}, async ({ pattern, path, includeNodeModules, maxResults }) => {
|
|
12
|
+
const start = Date.now();
|
|
13
|
+
const result = await searchFiles(pattern, path ?? process.cwd(), { includeNodeModules, maxResults });
|
|
14
|
+
h.logCall("search_files", { command: `search_files ${pattern}`, tokensSaved: result.tokensSaved ?? 0, durationMs: Date.now() - start });
|
|
15
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
16
|
+
});
|
|
17
|
+
// ── search_content ────────────────────────────────────────────────────────
|
|
18
|
+
server.tool("search_content", "Search file contents by regex pattern. Groups matches by file, sorted by relevance. Use offset for pagination when results are truncated.", {
|
|
19
|
+
pattern: z.string().describe("Search pattern (regex)"),
|
|
20
|
+
path: z.string().optional().describe("Search root (default: cwd)"),
|
|
21
|
+
fileType: z.string().optional().describe("File type filter (e.g., 'ts', 'py')"),
|
|
22
|
+
maxResults: z.number().optional().describe("Max files to return (default: 30)"),
|
|
23
|
+
offset: z.number().optional().describe("Skip first N files (for pagination, default: 0)"),
|
|
24
|
+
contextLines: z.number().optional().describe("Context lines around matches (default: 0)"),
|
|
25
|
+
}, async ({ pattern, path, fileType, maxResults, offset, contextLines }) => {
|
|
26
|
+
const start = Date.now();
|
|
27
|
+
// Fetch more than needed to support offset
|
|
28
|
+
const fetchLimit = (maxResults ?? 30) + (offset ?? 0);
|
|
29
|
+
const result = await searchContent(pattern, path ?? process.cwd(), { fileType, maxResults: fetchLimit, contextLines });
|
|
30
|
+
// Apply offset
|
|
31
|
+
if (offset && offset > 0 && result.files) {
|
|
32
|
+
result.files = result.files.slice(offset);
|
|
33
|
+
}
|
|
34
|
+
h.logCall("search_content", { command: `grep ${pattern}`, tokensSaved: result.tokensSaved ?? 0, durationMs: Date.now() - start });
|
|
35
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
36
|
+
});
|
|
37
|
+
// ── search_semantic ───────────────────────────────────────────────────────
|
|
38
|
+
server.tool("search_semantic", "Find functions, classes, components, hooks, types by NAME or SIGNATURE. Searches symbol declarations, NOT code behavior or content. Use search_content (grep) instead for pattern matching inside code (e.g., security audits, string searches, imports).", {
|
|
39
|
+
query: z.string().describe("Symbol name to search for (e.g., 'auth', 'login', 'UserService'). Matches function/class/type names, not code content."),
|
|
40
|
+
path: z.string().optional().describe("Search root (default: cwd)"),
|
|
41
|
+
kinds: z.array(z.enum(["function", "class", "interface", "type", "variable", "export", "import", "component", "hook"])).optional().describe("Filter by symbol kind"),
|
|
42
|
+
exportedOnly: z.boolean().optional().describe("Only show exported symbols (default: false)"),
|
|
43
|
+
maxResults: z.number().optional().describe("Max results (default: 30)"),
|
|
44
|
+
}, async ({ query, path, kinds, exportedOnly, maxResults }) => {
|
|
45
|
+
const result = await semanticSearch(query, path ?? process.cwd(), {
|
|
46
|
+
kinds: kinds,
|
|
47
|
+
exportedOnly,
|
|
48
|
+
maxResults,
|
|
49
|
+
});
|
|
50
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
51
|
+
});
|
|
52
|
+
// ── lookup ────────────────────────────────────────────────────────────────
|
|
53
|
+
server.tool("lookup", "Search for specific items in a file by name or pattern. Agent says what to find, not how to grep. Saves ~300 tokens vs constructing grep pipelines.", {
|
|
54
|
+
file: z.string().describe("File path to search in"),
|
|
55
|
+
items: z.array(z.string()).describe("Names or patterns to look up"),
|
|
56
|
+
context: z.number().optional().describe("Lines of context around each match (default: 3)"),
|
|
57
|
+
}, async ({ file: rawFile, items, context }) => {
|
|
58
|
+
const start = Date.now();
|
|
59
|
+
const file = h.resolvePath(rawFile);
|
|
60
|
+
const { readFileSync } = await import("fs");
|
|
61
|
+
try {
|
|
62
|
+
const content = readFileSync(file, "utf8");
|
|
63
|
+
const lines = content.split("\n");
|
|
64
|
+
const ctx = context ?? 3;
|
|
65
|
+
const results = {};
|
|
66
|
+
for (const item of items) {
|
|
67
|
+
results[item] = [];
|
|
68
|
+
const pattern = new RegExp(item.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "i");
|
|
69
|
+
for (let i = 0; i < lines.length; i++) {
|
|
70
|
+
if (pattern.test(lines[i])) {
|
|
71
|
+
results[item].push({
|
|
72
|
+
line: i + 1,
|
|
73
|
+
text: lines[i].trim(),
|
|
74
|
+
context: lines.slice(Math.max(0, i - ctx), i + ctx + 1).map(l => l.trimEnd()),
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
h.logCall("lookup", { command: `lookup ${file} [${items.join(",")}]`, durationMs: Date.now() - start });
|
|
80
|
+
return { content: [{ type: "text", text: JSON.stringify(results) }] };
|
|
81
|
+
}
|
|
82
|
+
catch (e) {
|
|
83
|
+
return { content: [{ type: "text", text: JSON.stringify({ error: e.message }) }] };
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
}
|
package/dist/output-store.js
CHANGED
|
@@ -3,7 +3,8 @@
|
|
|
3
3
|
import { existsSync, mkdirSync, writeFileSync, readdirSync, statSync, unlinkSync } from "fs";
|
|
4
4
|
import { join } from "path";
|
|
5
5
|
import { createHash } from "crypto";
|
|
6
|
-
|
|
6
|
+
import { getTerminalDir } from "./paths.js";
|
|
7
|
+
const OUTPUTS_DIR = join(getTerminalDir(), "outputs");
|
|
7
8
|
/** Ensure outputs directory exists */
|
|
8
9
|
function ensureDir() {
|
|
9
10
|
if (!existsSync(OUTPUTS_DIR))
|
package/dist/paths.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
// Centralized path resolution for open-terminal global data directory.
|
|
2
|
+
// Migrated from ~/.terminal/ to ~/.hasna/terminal/ with backward compat.
|
|
3
|
+
import { existsSync, mkdirSync } from "fs";
|
|
4
|
+
import { homedir } from "os";
|
|
5
|
+
import { join } from "path";
|
|
6
|
+
/**
|
|
7
|
+
* Get the global terminal data directory.
|
|
8
|
+
* New default: ~/.hasna/terminal/
|
|
9
|
+
* Legacy fallback: ~/.terminal/ (if it exists and new dir doesn't)
|
|
10
|
+
* Env override: HASNA_TERMINAL_DIR or TERMINAL_DIR
|
|
11
|
+
*/
|
|
12
|
+
export function getTerminalDir() {
|
|
13
|
+
if (process.env.HASNA_TERMINAL_DIR)
|
|
14
|
+
return process.env.HASNA_TERMINAL_DIR;
|
|
15
|
+
if (process.env.TERMINAL_DIR)
|
|
16
|
+
return process.env.TERMINAL_DIR;
|
|
17
|
+
const home = homedir();
|
|
18
|
+
const newDir = join(home, ".hasna", "terminal");
|
|
19
|
+
const legacyDir = join(home, ".terminal");
|
|
20
|
+
// Use legacy dir if it exists and new one doesn't yet (backward compat)
|
|
21
|
+
if (!existsSync(newDir) && existsSync(legacyDir)) {
|
|
22
|
+
return legacyDir;
|
|
23
|
+
}
|
|
24
|
+
if (!existsSync(newDir)) {
|
|
25
|
+
mkdirSync(newDir, { recursive: true });
|
|
26
|
+
}
|
|
27
|
+
return newDir;
|
|
28
|
+
}
|
package/dist/recipes/storage.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
// Recipes storage — global (~/.terminal/recipes.json) + per-project (.terminal/recipes.json)
|
|
1
|
+
// Recipes storage — global (~/.hasna/terminal/recipes.json) + per-project (.terminal/recipes.json)
|
|
2
2
|
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
3
|
-
import { homedir } from "os";
|
|
4
3
|
import { join } from "path";
|
|
5
4
|
import { genId, extractVariables } from "./model.js";
|
|
6
|
-
|
|
5
|
+
import { getTerminalDir } from "../paths.js";
|
|
6
|
+
const GLOBAL_DIR = getTerminalDir();
|
|
7
7
|
const GLOBAL_FILE = join(GLOBAL_DIR, "recipes.json");
|
|
8
8
|
function projectFile(projectPath) {
|
|
9
9
|
return join(projectPath, ".terminal", "recipes.json");
|
package/dist/session-context.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
// Session context — stores last N command+output pairs for follow-up queries
|
|
2
2
|
// Enables: terminal "show auth code" → terminal "explain that function"
|
|
3
3
|
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs";
|
|
4
|
-
import { homedir } from "os";
|
|
5
4
|
import { join } from "path";
|
|
6
|
-
|
|
5
|
+
import { getTerminalDir } from "./paths.js";
|
|
6
|
+
const DIR = getTerminalDir();
|
|
7
7
|
const CTX_FILE = join(DIR, "session-context.json");
|
|
8
8
|
const MAX_ENTRIES = 5;
|
|
9
9
|
function ensureDir() {
|
package/dist/sessions-db.js
CHANGED
|
@@ -1,19 +1,18 @@
|
|
|
1
1
|
// SQLite session database — tracks every terminal interaction
|
|
2
|
-
|
|
3
|
-
import { Database } from "bun:sqlite";
|
|
2
|
+
import { SqliteAdapter } from "@hasna/cloud";
|
|
4
3
|
import { existsSync, mkdirSync } from "fs";
|
|
5
|
-
import { homedir } from "os";
|
|
6
4
|
import { join } from "path";
|
|
7
5
|
import { randomUUID } from "crypto";
|
|
8
|
-
|
|
9
|
-
const
|
|
6
|
+
import { getTerminalDir } from "./paths.js";
|
|
7
|
+
const DIR = getTerminalDir();
|
|
8
|
+
const DB_PATH = process.env.HASNA_TERMINAL_DB_PATH ?? process.env.TERMINAL_DB_PATH ?? join(DIR, "sessions.db");
|
|
10
9
|
let db = null;
|
|
11
10
|
function getDb() {
|
|
12
11
|
if (db)
|
|
13
12
|
return db;
|
|
14
13
|
if (!existsSync(DIR))
|
|
15
14
|
mkdirSync(DIR, { recursive: true });
|
|
16
|
-
db = new
|
|
15
|
+
db = new SqliteAdapter(DB_PATH);
|
|
17
16
|
db.exec("PRAGMA journal_mode = WAL");
|
|
18
17
|
db.exec(`
|
|
19
18
|
CREATE TABLE IF NOT EXISTS sessions (
|
|
@@ -68,6 +67,16 @@ function getDb() {
|
|
|
68
67
|
);
|
|
69
68
|
|
|
70
69
|
CREATE INDEX IF NOT EXISTS idx_corrections_prompt ON corrections(prompt);
|
|
70
|
+
|
|
71
|
+
CREATE TABLE IF NOT EXISTS feedback (
|
|
72
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))),
|
|
73
|
+
message TEXT NOT NULL,
|
|
74
|
+
email TEXT,
|
|
75
|
+
category TEXT DEFAULT 'general',
|
|
76
|
+
version TEXT,
|
|
77
|
+
machine_id TEXT,
|
|
78
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
79
|
+
);
|
|
71
80
|
`);
|
|
72
81
|
return db;
|
|
73
82
|
}
|
package/dist/snapshots.js
CHANGED
|
@@ -22,12 +22,12 @@ export function captureSnapshot() {
|
|
|
22
22
|
port: p.port,
|
|
23
23
|
uptime: Date.now() - p.startedAt,
|
|
24
24
|
}));
|
|
25
|
-
// Recent commands (last 10
|
|
25
|
+
// Recent commands (last 10)
|
|
26
26
|
const history = loadHistory().slice(-10);
|
|
27
27
|
const recentCommands = history.map(h => ({
|
|
28
28
|
cmd: h.cmd,
|
|
29
29
|
exitCode: h.error,
|
|
30
|
-
|
|
30
|
+
intent: h.nl !== h.cmd ? h.nl : undefined, // user's original NL intent, not AI-generated
|
|
31
31
|
}));
|
|
32
32
|
// Project recipes
|
|
33
33
|
const recipes = listRecipes(process.cwd()).slice(0, 10).map(r => ({
|
package/dist/tool-profiles.js
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
// Tool profiles — config-driven AI enhancement for specific command categories
|
|
2
|
-
// Profiles are loaded from ~/.terminal/profiles/ (user-customizable)
|
|
2
|
+
// Profiles are loaded from ~/.hasna/terminal/profiles/ (user-customizable)
|
|
3
3
|
// Each profile tells the AI how to handle a specific tool's output
|
|
4
4
|
import { existsSync, readFileSync, readdirSync } from "fs";
|
|
5
5
|
import { join } from "path";
|
|
6
|
-
|
|
6
|
+
import { getTerminalDir } from "./paths.js";
|
|
7
|
+
const PROFILES_DIR = join(getTerminalDir(), "profiles");
|
|
7
8
|
/** Built-in profiles — sensible defaults, user can override */
|
|
8
9
|
const BUILTIN_PROFILES = [
|
|
9
10
|
{
|
|
@@ -68,7 +69,7 @@ const BUILTIN_PROFILES = [
|
|
|
68
69
|
},
|
|
69
70
|
},
|
|
70
71
|
];
|
|
71
|
-
/** Load user profiles from ~/.terminal/profiles/ */
|
|
72
|
+
/** Load user profiles from ~/.hasna/terminal/profiles/ */
|
|
72
73
|
function loadUserProfiles() {
|
|
73
74
|
if (!existsSync(PROFILES_DIR))
|
|
74
75
|
return [];
|
package/dist/usage-cache.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
// Usage learning cache — zero-cost repeated queries
|
|
2
2
|
// After 3 identical prompt→command mappings, cache locally
|
|
3
3
|
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs";
|
|
4
|
-
import { homedir } from "os";
|
|
5
4
|
import { join } from "path";
|
|
6
5
|
import { createHash } from "crypto";
|
|
7
|
-
|
|
6
|
+
import { getTerminalDir } from "./paths.js";
|
|
7
|
+
const DIR = getTerminalDir();
|
|
8
8
|
const CACHE_FILE = join(DIR, "learned.json");
|
|
9
9
|
function ensureDir() {
|
|
10
10
|
if (!existsSync(DIR))
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hasna/terminal",
|
|
3
|
-
"version": "4.3.
|
|
3
|
+
"version": "4.3.2",
|
|
4
4
|
"description": "Smart terminal wrapper for AI agents and humans — structured output, token compression, MCP server, natural language",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"files": [
|
|
@@ -22,6 +22,7 @@
|
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
24
|
"@anthropic-ai/sdk": "^0.39.0",
|
|
25
|
+
"@hasna/cloud": "^0.1.0",
|
|
25
26
|
"@hasna/mementos": "^0.10.0",
|
|
26
27
|
"@modelcontextprotocol/sdk": "^1.27.1",
|
|
27
28
|
"@typescript/vfs": "^1.6.4",
|
|
@@ -29,9 +30,10 @@
|
|
|
29
30
|
"react": "^18.2.0",
|
|
30
31
|
"zod": "^4.3.6"
|
|
31
32
|
},
|
|
33
|
+
"license": "Apache-2.0",
|
|
32
34
|
"publishConfig": {
|
|
33
|
-
"
|
|
34
|
-
"
|
|
35
|
+
"registry": "https://registry.npmjs.org",
|
|
36
|
+
"access": "public"
|
|
35
37
|
},
|
|
36
38
|
"repository": {
|
|
37
39
|
"type": "git",
|
package/src/Onboarding.tsx
CHANGED
|
@@ -76,7 +76,7 @@ export default function Onboarding({ onDone }: Props) {
|
|
|
76
76
|
})}
|
|
77
77
|
</Box>
|
|
78
78
|
<Box marginTop={1}><Text dimColor>space toggle · enter confirm</Text></Box>
|
|
79
|
-
<Text dimColor>edit later: ~/.terminal/config.json</Text>
|
|
79
|
+
<Text dimColor>edit later: ~/.hasna/terminal/config.json</Text>
|
|
80
80
|
</Box>
|
|
81
81
|
);
|
|
82
82
|
}
|
package/src/ai.ts
CHANGED
|
@@ -3,10 +3,11 @@ import { cacheGet, cacheSet } from "./cache.js";
|
|
|
3
3
|
import { getProvider } from "./providers/index.js";
|
|
4
4
|
import { existsSync, readFileSync } from "fs";
|
|
5
5
|
import { join } from "path";
|
|
6
|
+
import { getTerminalDir } from "./paths.js";
|
|
6
7
|
import { discoverProjectHints, discoverSafetyHints, formatHints } from "./context-hints.js";
|
|
7
8
|
|
|
8
9
|
// ── model routing ─────────────────────────────────────────────────────────────
|
|
9
|
-
// Config-driven model selection. Defaults per provider, user can override in ~/.terminal/config.json
|
|
10
|
+
// Config-driven model selection. Defaults per provider, user can override in ~/.hasna/terminal/config.json
|
|
10
11
|
|
|
11
12
|
const COMPLEX_SIGNALS = [
|
|
12
13
|
/\b(undo|revert|rollback|previous|last)\b/i,
|
|
@@ -18,7 +19,7 @@ const COMPLEX_SIGNALS = [
|
|
|
18
19
|
/[|&;]{2}/,
|
|
19
20
|
];
|
|
20
21
|
|
|
21
|
-
/** Default models per provider — user can override in ~/.terminal/config.json under "models" */
|
|
22
|
+
/** Default models per provider — user can override in ~/.hasna/terminal/config.json under "models" */
|
|
22
23
|
const MODEL_DEFAULTS: Record<string, { fast: string; smart: string }> = {
|
|
23
24
|
cerebras: { fast: "qwen-3-235b-a22b-instruct-2507", smart: "qwen-3-235b-a22b-instruct-2507" },
|
|
24
25
|
groq: { fast: "openai/gpt-oss-120b", smart: "moonshotai/kimi-k2-instruct" },
|
|
@@ -26,7 +27,7 @@ const MODEL_DEFAULTS: Record<string, { fast: string; smart: string }> = {
|
|
|
26
27
|
anthropic: { fast: "claude-haiku-4-5-20251001", smart: "claude-sonnet-4-6" },
|
|
27
28
|
};
|
|
28
29
|
|
|
29
|
-
/** Load user model overrides from ~/.terminal/config.json (cached 30s) */
|
|
30
|
+
/** Load user model overrides from ~/.hasna/terminal/config.json (cached 30s) */
|
|
30
31
|
let _modelOverrides: Record<string, { fast?: string; smart?: string }> | null = null;
|
|
31
32
|
let _modelOverridesAt = 0;
|
|
32
33
|
|
|
@@ -34,7 +35,7 @@ function loadModelOverrides(): Record<string, { fast?: string; smart?: string }>
|
|
|
34
35
|
const now = Date.now();
|
|
35
36
|
if (_modelOverrides && now - _modelOverridesAt < 30_000) return _modelOverrides;
|
|
36
37
|
try {
|
|
37
|
-
const configPath = join(
|
|
38
|
+
const configPath = join(getTerminalDir(), "config.json");
|
|
38
39
|
if (existsSync(configPath)) {
|
|
39
40
|
const config = JSON.parse(readFileSync(configPath, "utf8"));
|
|
40
41
|
_modelOverrides = config.models ?? {};
|
|
@@ -215,9 +216,9 @@ function buildSystemPrompt(perms: Permissions, sessionEntries: SessionEntry[], c
|
|
|
215
216
|
list files in current directory → ls
|
|
216
217
|
list all files including hidden → ls -a
|
|
217
218
|
show open files → lsof
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
219
|
+
show file size → du -sh file
|
|
220
|
+
show file type → file filename
|
|
221
|
+
show file permissions → ls -la file
|
|
221
222
|
display routing table → route
|
|
222
223
|
show last logged in users → last
|
|
223
224
|
show file stats → stat file
|
|
@@ -235,7 +236,7 @@ system utilization stats → vmstat
|
|
|
235
236
|
DNS servers → cat /etc/resolv.conf | grep nameserver
|
|
236
237
|
long integer size → getconf LONG_BIT
|
|
237
238
|
base64 decode string → echo 'str' | base64 -d
|
|
238
|
-
|
|
239
|
+
show file owner → ls -la file
|
|
239
240
|
unique lines in file → uniq file
|
|
240
241
|
max cpu time → ulimit -t
|
|
241
242
|
memory info → lsmem
|
package/src/cache.ts
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
// In-memory LRU cache + disk persistence for command translations
|
|
2
2
|
|
|
3
3
|
import { existsSync, readFileSync, writeFileSync } from "fs";
|
|
4
|
-
import { homedir } from "os";
|
|
5
4
|
import { join } from "path";
|
|
5
|
+
import { getTerminalDir } from "./paths.js";
|
|
6
6
|
|
|
7
|
-
const CACHE_FILE = join(
|
|
7
|
+
const CACHE_FILE = join(getTerminalDir(), "cache.json");
|
|
8
8
|
const MAX_ENTRIES = 500;
|
|
9
9
|
|
|
10
10
|
type CacheMap = Record<string, string>;
|
package/src/economy.ts
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
// Token economy tracker — tracks token savings across all interactions
|
|
2
2
|
|
|
3
3
|
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs";
|
|
4
|
-
import { homedir } from "os";
|
|
5
4
|
import { join } from "path";
|
|
5
|
+
import { getTerminalDir } from "./paths.js";
|
|
6
6
|
|
|
7
|
-
const DIR =
|
|
7
|
+
const DIR = getTerminalDir();
|
|
8
8
|
const ECONOMY_FILE = join(DIR, "economy.json");
|
|
9
9
|
|
|
10
10
|
export interface EconomyStats {
|
|
@@ -129,7 +129,7 @@ const PROVIDER_PRICING: Record<string, { input: number; output: number }> = {
|
|
|
129
129
|
"anthropic-opus": { input: 5.00, output: 25.00 },
|
|
130
130
|
};
|
|
131
131
|
|
|
132
|
-
/** Load configurable turns-before-compaction from ~/.terminal/config.json */
|
|
132
|
+
/** Load configurable turns-before-compaction from ~/.hasna/terminal/config.json */
|
|
133
133
|
function loadTurnsMultiplier(): number {
|
|
134
134
|
try {
|
|
135
135
|
const configPath = join(DIR, "config.json");
|