terry-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audit.d.ts +3 -0
- package/dist/audit.js +17 -0
- package/dist/config.d.ts +13 -0
- package/dist/config.js +107 -0
- package/dist/explain.d.ts +3 -0
- package/dist/explain.js +11 -0
- package/dist/health.d.ts +2 -0
- package/dist/health.js +105 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +17 -0
- package/dist/orchestrator.d.ts +2 -0
- package/dist/orchestrator.js +58 -0
- package/dist/patches.d.ts +12 -0
- package/dist/patches.js +126 -0
- package/dist/permissions.d.ts +6 -0
- package/dist/permissions.js +20 -0
- package/dist/policy.d.ts +9 -0
- package/dist/policy.js +33 -0
- package/dist/repo.d.ts +6 -0
- package/dist/repo.js +214 -0
- package/dist/sandbox.d.ts +1 -0
- package/dist/sandbox.js +9 -0
- package/dist/sessions.d.ts +6 -0
- package/dist/sessions.js +57 -0
- package/dist/status.d.ts +2 -0
- package/dist/status.js +22 -0
- package/dist/templates.d.ts +7 -0
- package/dist/templates.js +44 -0
- package/dist/tools.d.ts +7 -0
- package/dist/tools.js +148 -0
- package/dist/types.d.ts +199 -0
- package/dist/types.js +1 -0
- package/dist/usage.d.ts +3 -0
- package/dist/usage.js +41 -0
- package/dist/workspace-data.d.ts +8 -0
- package/dist/workspace-data.js +45 -0
- package/package.json +52 -0
package/dist/repo.js
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import fg from "fast-glob";
|
|
4
|
+
import { ensureWorkspaceDataDirs, readJsonFile, workspaceDataFile, writeJsonFile } from "./workspace-data.js";
|
|
5
|
+
const INDEX_FILE = "repo-index.json";
|
|
6
|
+
const SEMANTIC_FILE = "semantic-index.json";
|
|
7
|
+
function detectProfile(files) {
|
|
8
|
+
const has = (name) => files.includes(name);
|
|
9
|
+
const hasAny = (patterns) => files.some((file) => patterns.some((p) => p.test(file)));
|
|
10
|
+
let language = "unknown";
|
|
11
|
+
if (hasAny([/\.ts$/, /\.tsx$/]))
|
|
12
|
+
language = "typescript";
|
|
13
|
+
else if (hasAny([/\.js$/, /\.jsx$/]))
|
|
14
|
+
language = "javascript";
|
|
15
|
+
else if (hasAny([/\.py$/]))
|
|
16
|
+
language = "python";
|
|
17
|
+
else if (hasAny([/\.go$/]))
|
|
18
|
+
language = "go";
|
|
19
|
+
let framework = "unknown";
|
|
20
|
+
if (has("next.config.js") || has("next.config.ts"))
|
|
21
|
+
framework = "nextjs";
|
|
22
|
+
else if (has("vite.config.ts") || has("vite.config.js"))
|
|
23
|
+
framework = "vite";
|
|
24
|
+
else if (has("angular.json"))
|
|
25
|
+
framework = "angular";
|
|
26
|
+
else if (hasAny([/manage\.py$/]))
|
|
27
|
+
framework = "django";
|
|
28
|
+
else if (hasAny([/go\.mod$/]))
|
|
29
|
+
framework = "go-mod";
|
|
30
|
+
let buildSystem = "unknown";
|
|
31
|
+
if (hasAny([/turbo\.json$/]))
|
|
32
|
+
buildSystem = "turbo";
|
|
33
|
+
else if (hasAny([/nx\.json$/]))
|
|
34
|
+
buildSystem = "nx";
|
|
35
|
+
else if (hasAny([/vite\.config\.(ts|js)$/]))
|
|
36
|
+
buildSystem = "vite";
|
|
37
|
+
else if (hasAny([/webpack\.config\.(ts|js)$/]))
|
|
38
|
+
buildSystem = "webpack";
|
|
39
|
+
let testRunner = "unknown";
|
|
40
|
+
if (hasAny([/vitest\.config\.(ts|js)$/]))
|
|
41
|
+
testRunner = "vitest";
|
|
42
|
+
else if (hasAny([/jest\.config\.(ts|js|mjs|cjs)$/]))
|
|
43
|
+
testRunner = "jest";
|
|
44
|
+
else if (hasAny([/pytest\.ini$/, /conftest\.py$/]))
|
|
45
|
+
testRunner = "pytest";
|
|
46
|
+
else if (hasAny([/_test\.go$/]))
|
|
47
|
+
testRunner = "go-test";
|
|
48
|
+
let packageManager = "unknown";
|
|
49
|
+
if (has("pnpm-lock.yaml"))
|
|
50
|
+
packageManager = "pnpm";
|
|
51
|
+
else if (has("package-lock.json"))
|
|
52
|
+
packageManager = "npm";
|
|
53
|
+
else if (has("yarn.lock"))
|
|
54
|
+
packageManager = "yarn";
|
|
55
|
+
else if (has("go.mod"))
|
|
56
|
+
packageManager = "go";
|
|
57
|
+
else if (has("requirements.txt"))
|
|
58
|
+
packageManager = "pip";
|
|
59
|
+
return { language, framework, buildSystem, testRunner, packageManager };
|
|
60
|
+
}
|
|
61
|
+
function fileToNode(file) {
|
|
62
|
+
return { id: file, path: file, kind: "file" };
|
|
63
|
+
}
|
|
64
|
+
function normalizeImportPath(sourceFile, specifier) {
|
|
65
|
+
if (!specifier.startsWith("."))
|
|
66
|
+
return null;
|
|
67
|
+
const sourceDir = path.posix.dirname(sourceFile.replace(/\\/g, "/"));
|
|
68
|
+
const base = path.posix.normalize(path.posix.join(sourceDir, specifier));
|
|
69
|
+
const candidates = [base, `${base}.ts`, `${base}.tsx`, `${base}.js`, `${base}.jsx`, `${base}.py`, `${base}.go`];
|
|
70
|
+
return candidates;
|
|
71
|
+
}
|
|
72
|
+
function extractImportSpecifiers(filePath, content) {
|
|
73
|
+
const specs = [];
|
|
74
|
+
if (/\.(ts|tsx|js|jsx)$/.test(filePath)) {
|
|
75
|
+
const regex = /\bfrom\s+["']([^"']+)["']|require\(\s*["']([^"']+)["']\s*\)/g;
|
|
76
|
+
let match = regex.exec(content);
|
|
77
|
+
while (match) {
|
|
78
|
+
specs.push((match[1] ?? match[2] ?? "").trim());
|
|
79
|
+
match = regex.exec(content);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
else if (/\.py$/.test(filePath)) {
|
|
83
|
+
const regex = /^\s*(?:from\s+([a-zA-Z0-9_\.]+)\s+import|import\s+([a-zA-Z0-9_\.]+))/gm;
|
|
84
|
+
let match = regex.exec(content);
|
|
85
|
+
while (match) {
|
|
86
|
+
specs.push((match[1] ?? match[2] ?? "").trim());
|
|
87
|
+
match = regex.exec(content);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
return specs.filter(Boolean);
|
|
91
|
+
}
|
|
92
|
+
export async function scanWorkspaceGraph(workspacePath) {
|
|
93
|
+
await ensureWorkspaceDataDirs(workspacePath);
|
|
94
|
+
const files = await fg(["**/*"], {
|
|
95
|
+
cwd: workspacePath,
|
|
96
|
+
onlyFiles: true,
|
|
97
|
+
ignore: ["node_modules/**", ".git/**", "dist/**", ".next/**", "coverage/**"]
|
|
98
|
+
});
|
|
99
|
+
const nodes = files.map(fileToNode);
|
|
100
|
+
const fileSet = new Set(files.map((f) => f.replace(/\\/g, "/")));
|
|
101
|
+
const edges = [];
|
|
102
|
+
for (const file of files.slice(0, 5000)) {
|
|
103
|
+
const full = path.join(workspacePath, file);
|
|
104
|
+
const content = await fs.readFile(full, "utf8").catch(() => "");
|
|
105
|
+
if (!content)
|
|
106
|
+
continue;
|
|
107
|
+
const imports = extractImportSpecifiers(file, content);
|
|
108
|
+
for (const specifier of imports) {
|
|
109
|
+
const candidates = normalizeImportPath(file, specifier);
|
|
110
|
+
if (!candidates)
|
|
111
|
+
continue;
|
|
112
|
+
const target = candidates.find((candidate) => fileSet.has(candidate));
|
|
113
|
+
if (!target)
|
|
114
|
+
continue;
|
|
115
|
+
edges.push({ from: file.replace(/\\/g, "/"), to: target, type: "import" });
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
const profile = detectProfile(files);
|
|
119
|
+
const graph = {
|
|
120
|
+
nodes,
|
|
121
|
+
edges,
|
|
122
|
+
metadata: {
|
|
123
|
+
generatedAt: new Date().toISOString(),
|
|
124
|
+
workspacePath,
|
|
125
|
+
profile
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
await writeJsonFile(workspaceDataFile(workspacePath, INDEX_FILE), graph);
|
|
129
|
+
const semantic = await readJsonFile(workspaceDataFile(workspacePath, SEMANTIC_FILE), null);
|
|
130
|
+
if (!semantic) {
|
|
131
|
+
await writeJsonFile(workspaceDataFile(workspacePath, SEMANTIC_FILE), {
|
|
132
|
+
state: "pending",
|
|
133
|
+
generatedAt: null
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
return graph;
|
|
137
|
+
}
|
|
138
|
+
export async function getWorkspaceGraph(workspacePath) {
|
|
139
|
+
await ensureWorkspaceDataDirs(workspacePath);
|
|
140
|
+
return readJsonFile(workspaceDataFile(workspacePath, INDEX_FILE), null);
|
|
141
|
+
}
|
|
142
|
+
export function graphToTree(graph) {
|
|
143
|
+
const lines = [];
|
|
144
|
+
const grouped = new Map();
|
|
145
|
+
for (const node of graph.nodes) {
|
|
146
|
+
const dir = path.posix.dirname(node.path);
|
|
147
|
+
const current = grouped.get(dir) ?? [];
|
|
148
|
+
current.push(path.posix.basename(node.path));
|
|
149
|
+
grouped.set(dir, current);
|
|
150
|
+
}
|
|
151
|
+
const dirs = [...grouped.keys()].sort();
|
|
152
|
+
for (const dir of dirs) {
|
|
153
|
+
lines.push(dir);
|
|
154
|
+
const files = grouped.get(dir) ?? [];
|
|
155
|
+
for (const file of files.sort())
|
|
156
|
+
lines.push(` - ${file}`);
|
|
157
|
+
}
|
|
158
|
+
return lines.join("\n");
|
|
159
|
+
}
|
|
160
|
+
export function suggestTarget(graph) {
|
|
161
|
+
if (!graph.nodes.length)
|
|
162
|
+
return undefined;
|
|
163
|
+
const inbound = new Map();
|
|
164
|
+
for (const edge of graph.edges) {
|
|
165
|
+
inbound.set(edge.to, (inbound.get(edge.to) ?? 0) + 1);
|
|
166
|
+
}
|
|
167
|
+
return [...inbound.entries()].sort((a, b) => b[1] - a[1])[0]?.[0];
|
|
168
|
+
}
|
|
169
|
+
export async function calculateImpact(workspacePath, target) {
|
|
170
|
+
const graph = await getWorkspaceGraph(workspacePath);
|
|
171
|
+
if (!graph) {
|
|
172
|
+
return {
|
|
173
|
+
target,
|
|
174
|
+
affectedFiles: [],
|
|
175
|
+
confidence: 0,
|
|
176
|
+
rationale: "No repo index found. Run `terry repo scan` first."
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
const norm = target.replace(/\\/g, "/");
|
|
180
|
+
const candidates = graph.nodes.filter((node) => node.path === norm || node.path.endsWith(norm));
|
|
181
|
+
const root = candidates[0]?.path;
|
|
182
|
+
if (!root) {
|
|
183
|
+
return {
|
|
184
|
+
target,
|
|
185
|
+
affectedFiles: [],
|
|
186
|
+
confidence: 0.2,
|
|
187
|
+
rationale: "Target not found in graph index."
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
const reverse = new Map();
|
|
191
|
+
for (const edge of graph.edges) {
|
|
192
|
+
const current = reverse.get(edge.to) ?? [];
|
|
193
|
+
current.push(edge.from);
|
|
194
|
+
reverse.set(edge.to, current);
|
|
195
|
+
}
|
|
196
|
+
const visited = new Set();
|
|
197
|
+
const queue = [root];
|
|
198
|
+
while (queue.length > 0) {
|
|
199
|
+
const current = queue.shift();
|
|
200
|
+
const refs = reverse.get(current) ?? [];
|
|
201
|
+
for (const file of refs) {
|
|
202
|
+
if (!visited.has(file)) {
|
|
203
|
+
visited.add(file);
|
|
204
|
+
queue.push(file);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
return {
|
|
209
|
+
target: root,
|
|
210
|
+
affectedFiles: [...visited].sort(),
|
|
211
|
+
confidence: Math.min(1, 0.4 + visited.size * 0.02),
|
|
212
|
+
rationale: `Computed reverse dependency traversal from ${root}.`
|
|
213
|
+
};
|
|
214
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function resolveInWorkspace(workspacePath: string, relPath: string): string;
|
package/dist/sandbox.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
export function resolveInWorkspace(workspacePath, relPath) {
|
|
3
|
+
const workspace = path.resolve(workspacePath);
|
|
4
|
+
const resolved = path.resolve(workspace, relPath || ".");
|
|
5
|
+
if (resolved !== workspace && !resolved.startsWith(`${workspace}${path.sep}`)) {
|
|
6
|
+
throw new Error("Path escapes workspace sandbox.");
|
|
7
|
+
}
|
|
8
|
+
return resolved;
|
|
9
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { AgentMode, ExecutionMode, SessionRecord, SessionStep } from "./types.js";
|
|
2
|
+
export declare function startSession(workspacePath: string, task: string, mode: AgentMode, executionMode: ExecutionMode): Promise<SessionRecord>;
|
|
3
|
+
export declare function getSession(workspacePath: string, id: string): Promise<SessionRecord | null>;
|
|
4
|
+
export declare function appendSessionStep(workspacePath: string, id: string, step: SessionStep): Promise<SessionRecord>;
|
|
5
|
+
export declare function finishSession(workspacePath: string, id: string, result: SessionRecord["result"]): Promise<SessionRecord>;
|
|
6
|
+
export declare function listSessions(workspacePath: string, limit?: number): Promise<SessionRecord[]>;
|
package/dist/sessions.js
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { workspaceHash, workspaceSubdir, writeJsonFile, readJsonFile, listJsonFiles, ensureWorkspaceDataDirs } from "./workspace-data.js";
|
|
3
|
+
function sessionsDir(workspacePath) {
|
|
4
|
+
return workspaceSubdir(workspacePath, "sessions");
|
|
5
|
+
}
|
|
6
|
+
function sessionPath(workspacePath, sessionId) {
|
|
7
|
+
return path.join(sessionsDir(workspacePath), `${sessionId}.json`);
|
|
8
|
+
}
|
|
9
|
+
function sessionId() {
|
|
10
|
+
return `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
11
|
+
}
|
|
12
|
+
export async function startSession(workspacePath, task, mode, executionMode) {
|
|
13
|
+
await ensureWorkspaceDataDirs(workspacePath);
|
|
14
|
+
const record = {
|
|
15
|
+
id: sessionId(),
|
|
16
|
+
workspaceHash: workspaceHash(workspacePath),
|
|
17
|
+
workspacePath,
|
|
18
|
+
task,
|
|
19
|
+
startedAt: new Date().toISOString(),
|
|
20
|
+
mode,
|
|
21
|
+
executionMode,
|
|
22
|
+
steps: []
|
|
23
|
+
};
|
|
24
|
+
await writeJsonFile(sessionPath(workspacePath, record.id), record);
|
|
25
|
+
return record;
|
|
26
|
+
}
|
|
27
|
+
export async function getSession(workspacePath, id) {
|
|
28
|
+
return readJsonFile(sessionPath(workspacePath, id), null);
|
|
29
|
+
}
|
|
30
|
+
export async function appendSessionStep(workspacePath, id, step) {
|
|
31
|
+
const session = await getSession(workspacePath, id);
|
|
32
|
+
if (!session)
|
|
33
|
+
throw new Error(`Session not found: ${id}`);
|
|
34
|
+
session.steps.push(step);
|
|
35
|
+
await writeJsonFile(sessionPath(workspacePath, id), session);
|
|
36
|
+
return session;
|
|
37
|
+
}
|
|
38
|
+
export async function finishSession(workspacePath, id, result) {
|
|
39
|
+
const session = await getSession(workspacePath, id);
|
|
40
|
+
if (!session)
|
|
41
|
+
throw new Error(`Session not found: ${id}`);
|
|
42
|
+
session.result = result;
|
|
43
|
+
session.finishedAt = new Date().toISOString();
|
|
44
|
+
await writeJsonFile(sessionPath(workspacePath, id), session);
|
|
45
|
+
return session;
|
|
46
|
+
}
|
|
47
|
+
export async function listSessions(workspacePath, limit = 50) {
|
|
48
|
+
await ensureWorkspaceDataDirs(workspacePath);
|
|
49
|
+
const files = await listJsonFiles(sessionsDir(workspacePath));
|
|
50
|
+
const sessions = [];
|
|
51
|
+
for (const file of files) {
|
|
52
|
+
const value = await readJsonFile(path.join(sessionsDir(workspacePath), file), null);
|
|
53
|
+
if (value)
|
|
54
|
+
sessions.push(value);
|
|
55
|
+
}
|
|
56
|
+
return sessions.sort((a, b) => b.startedAt.localeCompare(a.startedAt)).slice(0, limit);
|
|
57
|
+
}
|
package/dist/status.d.ts
ADDED
package/dist/status.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { getWorkspaceSettings } from "./config.js";
|
|
2
|
+
import { getLastExplanation } from "./explain.js";
|
|
3
|
+
import { listPatchProposals } from "./patches.js";
|
|
4
|
+
import { getWorkspaceGraph, suggestTarget } from "./repo.js";
|
|
5
|
+
export async function getWorkspaceStatus(workspacePath) {
|
|
6
|
+
const settings = await getWorkspaceSettings(workspacePath);
|
|
7
|
+
const pending = await listPatchProposals(workspacePath, "pending");
|
|
8
|
+
const last = await getLastExplanation(workspacePath);
|
|
9
|
+
const graph = await getWorkspaceGraph(workspacePath);
|
|
10
|
+
return {
|
|
11
|
+
workspacePath,
|
|
12
|
+
mode: settings.mode,
|
|
13
|
+
executionMode: settings.executionMode,
|
|
14
|
+
pendingPatchCount: pending.length,
|
|
15
|
+
unresolvedApprovals: pending.reduce((count, proposal) => count + proposal.hunks.filter((hunk) => !hunk.approved).length, 0),
|
|
16
|
+
lastActionAt: last?.timestamp,
|
|
17
|
+
lastSummary: last?.summary,
|
|
18
|
+
repoProfile: graph?.metadata.profile,
|
|
19
|
+
repoIndexFreshAt: graph?.metadata.generatedAt,
|
|
20
|
+
nextSuggestedTarget: graph ? suggestTarget(graph) : undefined
|
|
21
|
+
};
|
|
22
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
const BUILTIN_TEMPLATES = [
|
|
2
|
+
{
|
|
3
|
+
name: "add-feature",
|
|
4
|
+
description: "Scaffold and implement a new feature with tests and docs updates.",
|
|
5
|
+
prompts: [
|
|
6
|
+
"Scan this repository and identify where a new feature should be added.",
|
|
7
|
+
"Implement the feature with minimal regressions and include tests.",
|
|
8
|
+
"Summarize changed files and migration notes."
|
|
9
|
+
]
|
|
10
|
+
},
|
|
11
|
+
{
|
|
12
|
+
name: "add-tests",
|
|
13
|
+
description: "Identify missing tests and add targeted coverage.",
|
|
14
|
+
prompts: [
|
|
15
|
+
"Find critical paths lacking test coverage.",
|
|
16
|
+
"Add tests for the highest-risk path first.",
|
|
17
|
+
"Summarize test coverage changes and remaining gaps."
|
|
18
|
+
]
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
name: "refactor-module",
|
|
22
|
+
description: "Refactor a module while preserving external behavior.",
|
|
23
|
+
prompts: [
|
|
24
|
+
"Analyze module boundaries and propose a safe refactor plan.",
|
|
25
|
+
"Apply the refactor with minimal API changes.",
|
|
26
|
+
"Validate with tests and summarize behavioral invariants."
|
|
27
|
+
]
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
name: "upgrade-dependency",
|
|
31
|
+
description: "Safely upgrade dependencies and verify compatibility.",
|
|
32
|
+
prompts: [
|
|
33
|
+
"List outdated dependencies and rank upgrade risk.",
|
|
34
|
+
"Upgrade the selected dependency with changelog-aware updates.",
|
|
35
|
+
"Run checks and summarize compatibility impact."
|
|
36
|
+
]
|
|
37
|
+
}
|
|
38
|
+
];
|
|
39
|
+
export function listTemplates() {
|
|
40
|
+
return BUILTIN_TEMPLATES;
|
|
41
|
+
}
|
|
42
|
+
export function getTemplate(name) {
|
|
43
|
+
return BUILTIN_TEMPLATES.find((template) => template.name === name);
|
|
44
|
+
}
|
package/dist/tools.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { ToolArgsMap, ToolName, ToolResult, WorkspaceSettings } from "./types.js";
|
|
2
|
+
type ToolContext = {
|
|
3
|
+
workspacePath: string;
|
|
4
|
+
settings: WorkspaceSettings;
|
|
5
|
+
};
|
|
6
|
+
export declare function executeTool<T extends ToolName>(context: ToolContext, tool: T, args: ToolArgsMap[T]): Promise<ToolResult>;
|
|
7
|
+
export {};
|
package/dist/tools.js
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import { execFile } from "node:child_process";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { promisify } from "node:util";
|
|
5
|
+
import fg from "fast-glob";
|
|
6
|
+
import { resolveInWorkspace } from "./sandbox.js";
|
|
7
|
+
import { appendAudit } from "./audit.js";
|
|
8
|
+
import { applyPatchProposal, createPatchProposal } from "./patches.js";
|
|
9
|
+
const execFileAsync = promisify(execFile);
|
|
10
|
+
function dryRunResult(tool, files = []) {
|
|
11
|
+
return { ok: true, output: `[dry-run] ${tool} skipped execution.`, files };
|
|
12
|
+
}
|
|
13
|
+
async function withAudit(context, tool, args, affectedFiles, runner) {
|
|
14
|
+
try {
|
|
15
|
+
const result = await runner();
|
|
16
|
+
await appendAudit({
|
|
17
|
+
timestamp: new Date().toISOString(),
|
|
18
|
+
workspace: context.workspacePath,
|
|
19
|
+
tool,
|
|
20
|
+
args,
|
|
21
|
+
affectedFiles: affectedFiles.map((f) => path.relative(context.workspacePath, f)),
|
|
22
|
+
dryRun: context.settings.dryRun,
|
|
23
|
+
ok: result.ok,
|
|
24
|
+
error: result.error
|
|
25
|
+
});
|
|
26
|
+
return result;
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
30
|
+
await appendAudit({
|
|
31
|
+
timestamp: new Date().toISOString(),
|
|
32
|
+
workspace: context.workspacePath,
|
|
33
|
+
tool,
|
|
34
|
+
args,
|
|
35
|
+
affectedFiles: affectedFiles.map((f) => path.relative(context.workspacePath, f)),
|
|
36
|
+
dryRun: context.settings.dryRun,
|
|
37
|
+
ok: false,
|
|
38
|
+
error: message
|
|
39
|
+
});
|
|
40
|
+
return { ok: false, error: message };
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
export async function executeTool(context, tool, args) {
|
|
44
|
+
switch (tool) {
|
|
45
|
+
case "listDirectory":
|
|
46
|
+
return withAudit(context, tool, args, [], async () => {
|
|
47
|
+
const parsed = args;
|
|
48
|
+
const rel = parsed.pathRelativeToWorkspace ?? ".";
|
|
49
|
+
const full = resolveInWorkspace(context.workspacePath, rel);
|
|
50
|
+
if (context.settings.dryRun)
|
|
51
|
+
return dryRunResult(tool, [full]);
|
|
52
|
+
const entries = await fs.readdir(full, { withFileTypes: true });
|
|
53
|
+
const lines = entries.map((e) => `${e.isDirectory() ? "d" : "f"} ${e.name}`);
|
|
54
|
+
return { ok: true, output: lines.join("\n"), files: [full] };
|
|
55
|
+
});
|
|
56
|
+
case "readFile":
|
|
57
|
+
return withAudit(context, tool, args, [], async () => {
|
|
58
|
+
const parsed = args;
|
|
59
|
+
const full = resolveInWorkspace(context.workspacePath, parsed.pathRelativeToWorkspace);
|
|
60
|
+
if (context.settings.dryRun)
|
|
61
|
+
return dryRunResult(tool, [full]);
|
|
62
|
+
const content = await fs.readFile(full, "utf8");
|
|
63
|
+
return { ok: true, output: content, files: [full] };
|
|
64
|
+
});
|
|
65
|
+
case "writeFile":
|
|
66
|
+
return withAudit(context, tool, args, [], async () => {
|
|
67
|
+
const parsed = args;
|
|
68
|
+
const full = resolveInWorkspace(context.workspacePath, parsed.pathRelativeToWorkspace);
|
|
69
|
+
const proposal = await createPatchProposal(context.workspacePath, parsed.pathRelativeToWorkspace, parsed.content, {
|
|
70
|
+
persist: !context.settings.dryRun,
|
|
71
|
+
preApproveAll: context.settings.executionMode === "autonomous"
|
|
72
|
+
});
|
|
73
|
+
if (context.settings.dryRun) {
|
|
74
|
+
return {
|
|
75
|
+
ok: true,
|
|
76
|
+
output: `[dry-run] writeFile generated patch preview for ${parsed.pathRelativeToWorkspace}.`,
|
|
77
|
+
files: [full]
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
if (context.settings.executionMode === "safe") {
|
|
81
|
+
return {
|
|
82
|
+
ok: true,
|
|
83
|
+
output: `Patch proposal created: ${proposal.id}`,
|
|
84
|
+
files: [full],
|
|
85
|
+
patchProposalId: proposal.id
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
await applyPatchProposal(context.workspacePath, proposal.id);
|
|
89
|
+
return {
|
|
90
|
+
ok: true,
|
|
91
|
+
output: `Applied patch ${proposal.id} to ${parsed.pathRelativeToWorkspace}`,
|
|
92
|
+
files: [full],
|
|
93
|
+
patchProposalId: proposal.id
|
|
94
|
+
};
|
|
95
|
+
});
|
|
96
|
+
case "searchInFiles":
|
|
97
|
+
return withAudit(context, tool, args, [], async () => {
|
|
98
|
+
const parsed = args;
|
|
99
|
+
if (context.settings.dryRun)
|
|
100
|
+
return dryRunResult(tool);
|
|
101
|
+
const files = await fg(["**/*"], {
|
|
102
|
+
cwd: context.workspacePath,
|
|
103
|
+
onlyFiles: true,
|
|
104
|
+
ignore: ["node_modules/**", ".git/**", "dist/**"]
|
|
105
|
+
});
|
|
106
|
+
const hits = [];
|
|
107
|
+
for (const file of files.slice(0, 2000)) {
|
|
108
|
+
const full = resolveInWorkspace(context.workspacePath, file);
|
|
109
|
+
const content = await fs.readFile(full, "utf8").catch(() => "");
|
|
110
|
+
if (content.includes(parsed.query))
|
|
111
|
+
hits.push(file);
|
|
112
|
+
if (hits.length >= 200)
|
|
113
|
+
break;
|
|
114
|
+
}
|
|
115
|
+
return { ok: true, output: hits.join("\n") || "No matches.", files: hits };
|
|
116
|
+
});
|
|
117
|
+
case "gitStatus":
|
|
118
|
+
return withAudit(context, tool, args, [], async () => {
|
|
119
|
+
if (context.settings.dryRun)
|
|
120
|
+
return dryRunResult(tool);
|
|
121
|
+
const { stdout, stderr } = await execFileAsync("git", ["status", "--short"], { cwd: context.workspacePath });
|
|
122
|
+
return { ok: true, output: `${stdout}${stderr}`.trim() || "Clean working tree." };
|
|
123
|
+
});
|
|
124
|
+
case "gitDiff":
|
|
125
|
+
return withAudit(context, tool, args, [], async () => {
|
|
126
|
+
const parsed = args;
|
|
127
|
+
const relPath = parsed.path ? resolveInWorkspace(context.workspacePath, parsed.path) : undefined;
|
|
128
|
+
if (context.settings.dryRun)
|
|
129
|
+
return dryRunResult(tool, relPath ? [relPath] : []);
|
|
130
|
+
const diffArgs = ["diff"];
|
|
131
|
+
if (parsed.path)
|
|
132
|
+
diffArgs.push("--", path.relative(context.workspacePath, relPath));
|
|
133
|
+
const { stdout, stderr } = await execFileAsync("git", diffArgs, { cwd: context.workspacePath });
|
|
134
|
+
return { ok: true, output: `${stdout}${stderr}`.trim() || "No diff." };
|
|
135
|
+
});
|
|
136
|
+
case "runCommand":
|
|
137
|
+
return withAudit(context, tool, args, [], async () => {
|
|
138
|
+
const parsed = args;
|
|
139
|
+
const cwd = resolveInWorkspace(context.workspacePath, parsed.cwdRelativeToWorkspace ?? ".");
|
|
140
|
+
if (context.settings.dryRun)
|
|
141
|
+
return dryRunResult(tool, [cwd]);
|
|
142
|
+
const { stdout, stderr } = await execFileAsync(parsed.cmd, parsed.args ?? [], { cwd });
|
|
143
|
+
return { ok: true, output: `${stdout}${stderr}`.trim() };
|
|
144
|
+
});
|
|
145
|
+
default:
|
|
146
|
+
return { ok: false, error: "Unknown tool." };
|
|
147
|
+
}
|
|
148
|
+
}
|