opencode-autognosis 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +5 -3
- package/dist/index.js +6 -233
- package/dist/services/logger.d.ts +1 -0
- package/dist/services/logger.js +17 -0
- package/dist/system-tools.d.ts +1 -208
- package/dist/system-tools.js +132 -116
- package/package.json +5 -4
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -1,235 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
import * as fs from "node:fs/promises";
|
|
4
|
-
import * as fsSync from "node:fs";
|
|
5
|
-
import * as path from "node:path";
|
|
6
|
-
import { promisify } from "node:util";
|
|
7
|
-
import * as crypto from "node:crypto";
|
|
8
|
-
const execAsync = promisify(exec);
|
|
9
|
-
const PROJECT_ROOT = process.cwd();
|
|
10
|
-
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
11
|
-
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
12
|
-
async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
|
|
13
|
-
try {
|
|
14
|
-
const { stdout, stderr } = await execAsync(cmd, {
|
|
15
|
-
cwd,
|
|
16
|
-
maxBuffer: 10 * 1024 * 1024,
|
|
17
|
-
timeout: timeoutMs
|
|
18
|
-
});
|
|
19
|
-
return { stdout: stdout.trim(), stderr: stderr.trim() };
|
|
20
|
-
}
|
|
21
|
-
catch (error) {
|
|
22
|
-
if (error.signal === 'SIGTERM' && error.code === undefined) {
|
|
23
|
-
return { stdout: "", stderr: `Command timed out after ${timeoutMs}ms`, error, timedOut: true };
|
|
24
|
-
}
|
|
25
|
-
return { stdout: "", stderr: error.message, error };
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
async function checkBinary(bin) {
|
|
29
|
-
const { error } = await runCmd(`${bin} --version`, PROJECT_ROOT, 5000);
|
|
30
|
-
return !error;
|
|
31
|
-
}
|
|
32
|
-
async function ensureCache() {
|
|
33
|
-
await fs.mkdir(CACHE_DIR, { recursive: true });
|
|
34
|
-
}
|
|
35
|
-
async function cleanCache() {
|
|
36
|
-
try {
|
|
37
|
-
const files = await fs.readdir(CACHE_DIR);
|
|
38
|
-
const now = Date.now();
|
|
39
|
-
const MAX_AGE = 7 * 24 * 60 * 60 * 1000;
|
|
40
|
-
let deleted = 0;
|
|
41
|
-
for (const file of files) {
|
|
42
|
-
const filePath = path.join(CACHE_DIR, file);
|
|
43
|
-
const stats = await fs.stat(filePath);
|
|
44
|
-
if (now - stats.mtimeMs > MAX_AGE) {
|
|
45
|
-
await fs.unlink(filePath);
|
|
46
|
-
deleted++;
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
return deleted;
|
|
50
|
-
}
|
|
51
|
-
catch (e) {
|
|
52
|
-
return 0;
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
async function maintainSymbolIndex() {
|
|
56
|
-
await ensureCache();
|
|
57
|
-
if (!(await checkBinary("ctags"))) {
|
|
58
|
-
return { rebuilt: false, status: "unavailable", reason: "ctags binary missing" };
|
|
59
|
-
}
|
|
60
|
-
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
61
|
-
const fingerprintFile = path.join(CACHE_DIR, "tags.fingerprint");
|
|
62
|
-
const { stdout: head } = await runCmd("git rev-parse HEAD");
|
|
63
|
-
const { stdout: status } = await runCmd("git status --porcelain");
|
|
64
|
-
const currentFingerprint = `${head}\n${status}`;
|
|
65
|
-
let storedFingerprint = "";
|
|
66
|
-
try {
|
|
67
|
-
storedFingerprint = await fs.readFile(fingerprintFile, "utf-8");
|
|
68
|
-
}
|
|
69
|
-
catch (e) { }
|
|
70
|
-
if (currentFingerprint !== storedFingerprint || !fsSync.existsSync(tagsFile)) {
|
|
71
|
-
const { error, stderr } = await runCmd(`ctags -R -f ${tagsFile} --languages=TypeScript,JavaScript,Python,Go,Rust,C++,C .`, PROJECT_ROOT);
|
|
72
|
-
if (error) {
|
|
73
|
-
return { rebuilt: false, status: "failed", reason: stderr };
|
|
74
|
-
}
|
|
75
|
-
await fs.writeFile(fingerprintFile, currentFingerprint);
|
|
76
|
-
return { rebuilt: true, status: "ok" };
|
|
77
|
-
}
|
|
78
|
-
return { rebuilt: false, status: "ok" };
|
|
79
|
-
}
|
|
80
|
-
export const AutognosisPlugin = async (_ctx) => {
|
|
81
|
-
let pendingInitToken = null;
|
|
1
|
+
import { systemTools } from "./system-tools.js";
|
|
2
|
+
export default function plugin() {
|
|
82
3
|
return {
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
args: {
|
|
87
|
-
mode: tool.schema.enum(["plan", "apply"]).optional().default("plan"),
|
|
88
|
-
token: tool.schema.string().optional()
|
|
89
|
-
},
|
|
90
|
-
async execute({ mode, token }) {
|
|
91
|
-
if (mode === "plan") {
|
|
92
|
-
const checks = { rg: await checkBinary("rg"), fd: await checkBinary("fd"), sg: await checkBinary("sg"), ctags: await checkBinary("ctags"), git: await checkBinary("git") };
|
|
93
|
-
const actions = [];
|
|
94
|
-
if (!fsSync.existsSync(CACHE_DIR))
|
|
95
|
-
actions.push(`Create cache directory: ${CACHE_DIR}`);
|
|
96
|
-
pendingInitToken = crypto.randomBytes(4).toString("hex");
|
|
97
|
-
return JSON.stringify({
|
|
98
|
-
status: "PLAN_READY",
|
|
99
|
-
system_checks: checks,
|
|
100
|
-
planned_actions: actions,
|
|
101
|
-
confirm_token: pendingInitToken,
|
|
102
|
-
instruction: "Call autognosis_init with mode='apply' and the confirm_token."
|
|
103
|
-
}, null, 2);
|
|
104
|
-
}
|
|
105
|
-
else {
|
|
106
|
-
if (!pendingInitToken || token !== pendingInitToken) {
|
|
107
|
-
return JSON.stringify({ status: "ERROR", message: "Invalid token." });
|
|
108
|
-
}
|
|
109
|
-
await ensureCache();
|
|
110
|
-
pendingInitToken = null;
|
|
111
|
-
return JSON.stringify({ status: "SUCCESS", message: "Autognosis initialized." });
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
}),
|
|
115
|
-
fast_search: tool({
|
|
116
|
-
description: "Fast search using rg and fd.",
|
|
117
|
-
args: {
|
|
118
|
-
query: tool.schema.string(),
|
|
119
|
-
mode: tool.schema.enum(["filename", "content"]).optional().default("filename"),
|
|
120
|
-
path: tool.schema.string().optional().default(".")
|
|
121
|
-
},
|
|
122
|
-
async execute({ query, mode, path: searchPath }) {
|
|
123
|
-
if (mode === "content") {
|
|
124
|
-
const { stdout } = await runCmd(`rg -n --column "${query}" "${searchPath}"`);
|
|
125
|
-
return stdout.split('\n').slice(0, 50).join('\n') || "No matches.";
|
|
126
|
-
}
|
|
127
|
-
else {
|
|
128
|
-
const { stdout } = await runCmd(`fd "${query}" "${searchPath}"`);
|
|
129
|
-
return stdout.split('\n').slice(0, 50).join('\n') || "No files.";
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
}),
|
|
133
|
-
read_slice: tool({
|
|
134
|
-
description: "Read a specific slice of a file.",
|
|
135
|
-
args: {
|
|
136
|
-
file: tool.schema.string(),
|
|
137
|
-
start_line: tool.schema.number(),
|
|
138
|
-
end_line: tool.schema.number()
|
|
139
|
-
},
|
|
140
|
-
async execute({ file, start_line, end_line }) {
|
|
141
|
-
const { stdout, stderr } = await runCmd(`sed -n '${start_line},${end_line}p;${end_line + 1}q' "${file}"`);
|
|
142
|
-
if (stderr)
|
|
143
|
-
return `Error: ${stderr}`;
|
|
144
|
-
return JSON.stringify({ file, start_line, end_line, content: stdout }, null, 2);
|
|
145
|
-
}
|
|
146
|
-
}),
|
|
147
|
-
symbol_query: tool({
|
|
148
|
-
description: "Query the symbol index.",
|
|
149
|
-
args: {
|
|
150
|
-
symbol: tool.schema.string()
|
|
151
|
-
},
|
|
152
|
-
async execute({ symbol }) {
|
|
153
|
-
const maint = await maintainSymbolIndex();
|
|
154
|
-
if (maint.status === "unavailable")
|
|
155
|
-
return JSON.stringify({ error: maint.reason });
|
|
156
|
-
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
157
|
-
const { stdout: grepOut } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}"`);
|
|
158
|
-
return JSON.stringify({ matches: grepOut.split('\n').filter(Boolean), metadata: maint }, null, 2);
|
|
159
|
-
}
|
|
160
|
-
}),
|
|
161
|
-
jump_to_symbol: tool({
|
|
162
|
-
description: "Jump to a symbol definition.",
|
|
163
|
-
args: {
|
|
164
|
-
symbol: tool.schema.string()
|
|
165
|
-
},
|
|
166
|
-
async execute({ symbol }) {
|
|
167
|
-
const maint = await maintainSymbolIndex();
|
|
168
|
-
if (maint.status !== "ok")
|
|
169
|
-
return JSON.stringify({ error: maint.reason });
|
|
170
|
-
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
171
|
-
const { stdout: tagLine } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}" | head -n 1`);
|
|
172
|
-
if (!tagLine)
|
|
173
|
-
return JSON.stringify({ found: false, symbol });
|
|
174
|
-
const file = tagLine.split('\t')[1];
|
|
175
|
-
const { stdout: grepLine } = await runCmd(`grep -n "${symbol}" "${file}" | head -n 1`);
|
|
176
|
-
const line = grepLine ? parseInt(grepLine.split(':')[0], 10) : 1;
|
|
177
|
-
const start = Math.max(1, line - 5);
|
|
178
|
-
const end = line + 15;
|
|
179
|
-
const { stdout: slice } = await runCmd(`sed -n '${start},${end}p;${end + 1}q' "${file}"`);
|
|
180
|
-
return JSON.stringify({ symbol, resolved_location: { file, line }, slice: { start, end, content: slice } }, null, 2);
|
|
181
|
-
}
|
|
182
|
-
}),
|
|
183
|
-
brief_fix_loop: tool({
|
|
184
|
-
description: "Action Planner.",
|
|
185
|
-
args: {
|
|
186
|
-
symbol: tool.schema.string(),
|
|
187
|
-
intent: tool.schema.string()
|
|
188
|
-
},
|
|
189
|
-
async execute({ symbol, intent }) {
|
|
190
|
-
return JSON.stringify({ plan_id: `plan-${Date.now()}`, symbol, intent }, null, 2);
|
|
191
|
-
}
|
|
192
|
-
}),
|
|
193
|
-
prepare_patch: tool({
|
|
194
|
-
description: "Generate a .diff artifact.",
|
|
195
|
-
args: {
|
|
196
|
-
message: tool.schema.string()
|
|
197
|
-
},
|
|
198
|
-
async execute({ message }) {
|
|
199
|
-
await ensureCache();
|
|
200
|
-
const patchPath = path.join(CACHE_DIR, `patch-${Date.now()}.diff`);
|
|
201
|
-
const { stdout } = await runCmd("git diff");
|
|
202
|
-
if (!stdout)
|
|
203
|
-
return "No changes.";
|
|
204
|
-
await fs.writeFile(patchPath, `// MSG: ${message}\n\n${stdout}`);
|
|
205
|
-
return `Patch saved to ${patchPath}`;
|
|
206
|
-
}
|
|
207
|
-
}),
|
|
208
|
-
validate_patch: tool({
|
|
209
|
-
description: "Validate a patch.",
|
|
210
|
-
args: {
|
|
211
|
-
patch_path: tool.schema.string()
|
|
212
|
-
},
|
|
213
|
-
async execute({ patch_path }) {
|
|
214
|
-
const { error } = await runCmd(`git apply --check "${patch_path}"`);
|
|
215
|
-
return error ? `FAILED: ${error.message}` : "SUCCESS.";
|
|
216
|
-
}
|
|
217
|
-
}),
|
|
218
|
-
finalize_plan: tool({
|
|
219
|
-
description: "Finalize a plan.",
|
|
220
|
-
args: {
|
|
221
|
-
plan_id: tool.schema.string(),
|
|
222
|
-
outcome: tool.schema.string()
|
|
223
|
-
},
|
|
224
|
-
async execute({ plan_id, outcome }) {
|
|
225
|
-
await ensureCache();
|
|
226
|
-
const report = { plan_id, outcome, time: new Date().toISOString() };
|
|
227
|
-
await fs.appendFile(path.join(CACHE_DIR, "gaps.jsonl"), JSON.stringify(report) + "\n");
|
|
228
|
-
const deleted = await cleanCache();
|
|
229
|
-
return `Finalized. Deleted ${deleted} items.`;
|
|
230
|
-
}
|
|
231
|
-
})
|
|
232
|
-
}
|
|
4
|
+
tools: {
|
|
5
|
+
...systemTools(),
|
|
6
|
+
},
|
|
233
7
|
};
|
|
234
|
-
}
|
|
235
|
-
export default AutognosisPlugin;
|
|
8
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function log(message: string, data?: unknown): void;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { appendFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
const LOG_FILE = join(homedir(), ".opencode-autognosis.log");
|
|
5
|
+
// Start a fresh session block
|
|
6
|
+
writeFileSync(LOG_FILE, `
|
|
7
|
+
--- Autognosis Session started: ${new Date().toISOString()} ---
|
|
8
|
+
`, { flag: "a" });
|
|
9
|
+
export function log(message, data) {
|
|
10
|
+
const timestamp = new Date().toISOString();
|
|
11
|
+
const line = data
|
|
12
|
+
? `[${timestamp}] ${message}: ${JSON.stringify(data)}
|
|
13
|
+
`
|
|
14
|
+
: `[${timestamp}] ${message}
|
|
15
|
+
`;
|
|
16
|
+
appendFileSync(LOG_FILE, line);
|
|
17
|
+
}
|
package/dist/system-tools.d.ts
CHANGED
|
@@ -1,210 +1,3 @@
|
|
|
1
1
|
export declare function systemTools(): {
|
|
2
|
-
|
|
3
|
-
description: string;
|
|
4
|
-
parameters: {
|
|
5
|
-
type: string;
|
|
6
|
-
properties: {
|
|
7
|
-
mode: {
|
|
8
|
-
type: string;
|
|
9
|
-
enum: string[];
|
|
10
|
-
default: string;
|
|
11
|
-
};
|
|
12
|
-
token: {
|
|
13
|
-
type: string;
|
|
14
|
-
};
|
|
15
|
-
};
|
|
16
|
-
required: string[];
|
|
17
|
-
};
|
|
18
|
-
execute: ({ mode, token }: {
|
|
19
|
-
mode: string;
|
|
20
|
-
token?: string;
|
|
21
|
-
}) => Promise<string | undefined>;
|
|
22
|
-
};
|
|
23
|
-
fast_search: {
|
|
24
|
-
description: string;
|
|
25
|
-
parameters: {
|
|
26
|
-
type: string;
|
|
27
|
-
properties: {
|
|
28
|
-
query: {
|
|
29
|
-
type: string;
|
|
30
|
-
};
|
|
31
|
-
mode: {
|
|
32
|
-
type: string;
|
|
33
|
-
enum: string[];
|
|
34
|
-
default: string;
|
|
35
|
-
};
|
|
36
|
-
path: {
|
|
37
|
-
type: string;
|
|
38
|
-
};
|
|
39
|
-
};
|
|
40
|
-
required: string[];
|
|
41
|
-
};
|
|
42
|
-
execute: ({ query, mode, path: searchPath }: {
|
|
43
|
-
query: string;
|
|
44
|
-
mode?: string;
|
|
45
|
-
path?: string;
|
|
46
|
-
}) => Promise<string>;
|
|
47
|
-
};
|
|
48
|
-
structural_search: {
|
|
49
|
-
description: string;
|
|
50
|
-
parameters: {
|
|
51
|
-
type: string;
|
|
52
|
-
properties: {
|
|
53
|
-
pattern: {
|
|
54
|
-
type: string;
|
|
55
|
-
};
|
|
56
|
-
path: {
|
|
57
|
-
type: string;
|
|
58
|
-
default: string;
|
|
59
|
-
};
|
|
60
|
-
plan_id: {
|
|
61
|
-
type: string;
|
|
62
|
-
};
|
|
63
|
-
};
|
|
64
|
-
required: string[];
|
|
65
|
-
};
|
|
66
|
-
execute: ({ pattern, path: searchPath, plan_id }: {
|
|
67
|
-
pattern: string;
|
|
68
|
-
path?: string;
|
|
69
|
-
plan_id?: string;
|
|
70
|
-
}) => Promise<string>;
|
|
71
|
-
};
|
|
72
|
-
read_slice: {
|
|
73
|
-
description: string;
|
|
74
|
-
parameters: {
|
|
75
|
-
type: string;
|
|
76
|
-
properties: {
|
|
77
|
-
file: {
|
|
78
|
-
type: string;
|
|
79
|
-
};
|
|
80
|
-
start_line: {
|
|
81
|
-
type: string;
|
|
82
|
-
};
|
|
83
|
-
end_line: {
|
|
84
|
-
type: string;
|
|
85
|
-
};
|
|
86
|
-
plan_id: {
|
|
87
|
-
type: string;
|
|
88
|
-
};
|
|
89
|
-
};
|
|
90
|
-
required: string[];
|
|
91
|
-
};
|
|
92
|
-
execute: ({ file, start_line, end_line, plan_id }: {
|
|
93
|
-
file: string;
|
|
94
|
-
start_line: number;
|
|
95
|
-
end_line: number;
|
|
96
|
-
plan_id?: string;
|
|
97
|
-
}) => Promise<string>;
|
|
98
|
-
};
|
|
99
|
-
symbol_query: {
|
|
100
|
-
description: string;
|
|
101
|
-
parameters: {
|
|
102
|
-
type: string;
|
|
103
|
-
properties: {
|
|
104
|
-
symbol: {
|
|
105
|
-
type: string;
|
|
106
|
-
};
|
|
107
|
-
};
|
|
108
|
-
required: string[];
|
|
109
|
-
};
|
|
110
|
-
execute: ({ symbol }: {
|
|
111
|
-
symbol: string;
|
|
112
|
-
}) => Promise<string>;
|
|
113
|
-
};
|
|
114
|
-
jump_to_symbol: {
|
|
115
|
-
description: string;
|
|
116
|
-
parameters: {
|
|
117
|
-
type: string;
|
|
118
|
-
properties: {
|
|
119
|
-
symbol: {
|
|
120
|
-
type: string;
|
|
121
|
-
};
|
|
122
|
-
plan_id: {
|
|
123
|
-
type: string;
|
|
124
|
-
};
|
|
125
|
-
};
|
|
126
|
-
required: string[];
|
|
127
|
-
};
|
|
128
|
-
execute: ({ symbol, plan_id }: {
|
|
129
|
-
symbol: string;
|
|
130
|
-
plan_id?: string;
|
|
131
|
-
}) => Promise<string>;
|
|
132
|
-
};
|
|
133
|
-
brief_fix_loop: {
|
|
134
|
-
description: string;
|
|
135
|
-
parameters: {
|
|
136
|
-
type: string;
|
|
137
|
-
properties: {
|
|
138
|
-
symbol: {
|
|
139
|
-
type: string;
|
|
140
|
-
};
|
|
141
|
-
intent: {
|
|
142
|
-
type: string;
|
|
143
|
-
};
|
|
144
|
-
};
|
|
145
|
-
required: string[];
|
|
146
|
-
};
|
|
147
|
-
execute: ({ symbol, intent }: {
|
|
148
|
-
symbol: string;
|
|
149
|
-
intent: string;
|
|
150
|
-
}) => Promise<string>;
|
|
151
|
-
};
|
|
152
|
-
prepare_patch: {
|
|
153
|
-
description: string;
|
|
154
|
-
parameters: {
|
|
155
|
-
type: string;
|
|
156
|
-
properties: {
|
|
157
|
-
plan_id: {
|
|
158
|
-
type: string;
|
|
159
|
-
};
|
|
160
|
-
message: {
|
|
161
|
-
type: string;
|
|
162
|
-
};
|
|
163
|
-
};
|
|
164
|
-
required: string[];
|
|
165
|
-
};
|
|
166
|
-
execute: ({ plan_id, message }: {
|
|
167
|
-
plan_id?: string;
|
|
168
|
-
message: string;
|
|
169
|
-
}) => Promise<string>;
|
|
170
|
-
};
|
|
171
|
-
validate_patch: {
|
|
172
|
-
description: string;
|
|
173
|
-
parameters: {
|
|
174
|
-
type: string;
|
|
175
|
-
properties: {
|
|
176
|
-
patch_path: {
|
|
177
|
-
type: string;
|
|
178
|
-
};
|
|
179
|
-
timeout_ms: {
|
|
180
|
-
type: string;
|
|
181
|
-
default: number;
|
|
182
|
-
};
|
|
183
|
-
};
|
|
184
|
-
required: string[];
|
|
185
|
-
};
|
|
186
|
-
execute: ({ patch_path, timeout_ms }: {
|
|
187
|
-
patch_path: string;
|
|
188
|
-
timeout_ms?: number;
|
|
189
|
-
}) => Promise<string>;
|
|
190
|
-
};
|
|
191
|
-
finalize_plan: {
|
|
192
|
-
description: string;
|
|
193
|
-
parameters: {
|
|
194
|
-
type: string;
|
|
195
|
-
properties: {
|
|
196
|
-
plan_id: {
|
|
197
|
-
type: string;
|
|
198
|
-
};
|
|
199
|
-
outcome: {
|
|
200
|
-
type: string;
|
|
201
|
-
};
|
|
202
|
-
};
|
|
203
|
-
required: string[];
|
|
204
|
-
};
|
|
205
|
-
execute: ({ plan_id, outcome }: {
|
|
206
|
-
plan_id: string;
|
|
207
|
-
outcome: string;
|
|
208
|
-
}) => Promise<string>;
|
|
209
|
-
};
|
|
2
|
+
[key: string]: any;
|
|
210
3
|
};
|
package/dist/system-tools.js
CHANGED
|
@@ -1,21 +1,29 @@
|
|
|
1
|
+
import { tool } from "@opencode-ai/plugin";
|
|
1
2
|
import { exec } from "node:child_process";
|
|
2
3
|
import * as fs from "node:fs/promises";
|
|
3
4
|
import * as fsSync from "node:fs";
|
|
4
5
|
import * as path from "node:path";
|
|
5
6
|
import { promisify } from "node:util";
|
|
6
|
-
import { fileURLToPath } from "node:url";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
8
|
const execAsync = promisify(exec);
|
|
9
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
10
9
|
const PROJECT_ROOT = process.cwd();
|
|
11
10
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
12
11
|
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
// Internal logging
|
|
13
|
+
function log(message, data) {
|
|
14
|
+
// console.error is used here for internal debugging as standard console.log may interfere with tool outputs
|
|
15
|
+
console.error(`[Autognosis] ${message}`, data || '');
|
|
16
|
+
}
|
|
17
|
+
// =============================================================================
|
|
18
|
+
// HELPERS
|
|
19
|
+
// =============================================================================
|
|
16
20
|
async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
|
|
17
21
|
try {
|
|
18
|
-
const { stdout, stderr } = await execAsync(cmd, {
|
|
22
|
+
const { stdout, stderr } = await execAsync(cmd, {
|
|
23
|
+
cwd,
|
|
24
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
25
|
+
timeout: timeoutMs
|
|
26
|
+
});
|
|
19
27
|
return { stdout: stdout.trim(), stderr: stderr.trim() };
|
|
20
28
|
}
|
|
21
29
|
catch (error) {
|
|
@@ -26,17 +34,12 @@ async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
|
|
|
26
34
|
}
|
|
27
35
|
}
|
|
28
36
|
async function checkBinary(bin) {
|
|
29
|
-
const { error } = await runCmd(`${bin} --version`, PROJECT_ROOT, 5000);
|
|
30
|
-
return !error;
|
|
31
|
-
}
|
|
32
|
-
async function getPatterns() {
|
|
33
|
-
const patternsPath = path.join(ASSETS_DIR, "patterns.json");
|
|
34
37
|
try {
|
|
35
|
-
const
|
|
36
|
-
return
|
|
38
|
+
const { error } = await runCmd(`${bin} --version`, PROJECT_ROOT, 5000);
|
|
39
|
+
return !error;
|
|
37
40
|
}
|
|
38
41
|
catch (e) {
|
|
39
|
-
return
|
|
42
|
+
return false;
|
|
40
43
|
}
|
|
41
44
|
}
|
|
42
45
|
async function ensureCache() {
|
|
@@ -87,170 +90,183 @@ async function maintainSymbolIndex() {
|
|
|
87
90
|
}
|
|
88
91
|
return { rebuilt: false, status: "ok" };
|
|
89
92
|
}
|
|
93
|
+
// =============================================================================
|
|
94
|
+
// TOOLS
|
|
95
|
+
// =============================================================================
|
|
90
96
|
export function systemTools() {
|
|
97
|
+
let pendingInitToken = null; // State for init token
|
|
91
98
|
return {
|
|
92
|
-
autognosis_init: {
|
|
99
|
+
autognosis_init: tool({
|
|
93
100
|
description: "Initialize or check the Autognosis environment. Two-phase: 'plan' (default) generates a token, 'apply' executes it.",
|
|
94
|
-
|
|
95
|
-
|
|
101
|
+
args: {
|
|
102
|
+
mode: tool.schema.enum(["plan", "apply"]).optional().default("plan"),
|
|
103
|
+
token: tool.schema.string().optional()
|
|
104
|
+
},
|
|
105
|
+
async execute({ mode, token }) {
|
|
106
|
+
log("Tool call: autognosis_init", { mode });
|
|
96
107
|
if (mode === "plan") {
|
|
97
|
-
const checks = {
|
|
108
|
+
const checks = {
|
|
109
|
+
rg: await checkBinary("rg"),
|
|
110
|
+
fd: await checkBinary("fd"),
|
|
111
|
+
sg: await checkBinary("sg"),
|
|
112
|
+
ctags: await checkBinary("ctags"),
|
|
113
|
+
git: await checkBinary("git")
|
|
114
|
+
};
|
|
98
115
|
const actions = [];
|
|
99
116
|
if (!fsSync.existsSync(CACHE_DIR))
|
|
100
117
|
actions.push(`Create cache directory: ${CACHE_DIR}`);
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
118
|
+
pendingInitToken = crypto.randomBytes(4).toString("hex");
|
|
119
|
+
return JSON.stringify({
|
|
120
|
+
status: "PLAN_READY",
|
|
121
|
+
system_checks: checks,
|
|
122
|
+
planned_actions: actions,
|
|
123
|
+
confirm_token: pendingInitToken,
|
|
124
|
+
instruction: `Call autognosis_init(mode='apply', token='${pendingInitToken}')`
|
|
125
|
+
}, null, 2);
|
|
107
126
|
}
|
|
108
|
-
else
|
|
109
|
-
if (!pendingInitToken ||
|
|
110
|
-
return JSON.stringify({ status: "ERROR", message: "Invalid
|
|
127
|
+
else {
|
|
128
|
+
if (!pendingInitToken || token !== pendingInitToken) {
|
|
129
|
+
return JSON.stringify({ status: "ERROR", message: "Invalid token. Run mode='plan' first." });
|
|
130
|
+
}
|
|
111
131
|
await ensureCache();
|
|
112
132
|
pendingInitToken = null;
|
|
113
|
-
|
|
114
|
-
return JSON.stringify({ status: "SUCCESS", message: "Autognosis initialized.", cache_dir: CACHE_DIR });
|
|
133
|
+
return JSON.stringify({ status: "SUCCESS", message: "Autognosis initialized." });
|
|
115
134
|
}
|
|
116
135
|
}
|
|
117
|
-
},
|
|
118
|
-
fast_search: {
|
|
136
|
+
}),
|
|
137
|
+
fast_search: tool({
|
|
119
138
|
description: "Fast content or filename search using ripgrep (rg) and fd.",
|
|
120
|
-
|
|
121
|
-
|
|
139
|
+
args: {
|
|
140
|
+
query: tool.schema.string(),
|
|
141
|
+
mode: tool.schema.enum(["filename", "content"]).optional().default("filename"),
|
|
142
|
+
path: tool.schema.string().optional().default(".")
|
|
143
|
+
},
|
|
144
|
+
async execute({ query, mode, path: searchPath }) {
|
|
145
|
+
log("Tool call: fast_search", { query, mode, searchPath });
|
|
122
146
|
if (mode === "content") {
|
|
123
147
|
if (!(await checkBinary("rg")))
|
|
124
|
-
return "Error: 'rg'
|
|
148
|
+
return "Error: 'rg' not installed.";
|
|
125
149
|
const { stdout } = await runCmd(`rg -n --column "${query}" "${searchPath}"`);
|
|
126
150
|
if (!stdout)
|
|
127
151
|
return "No matches found.";
|
|
128
|
-
return stdout.split('\n').slice(0, 50).join('\n')
|
|
152
|
+
return stdout.split('\n').slice(0, 50).join('\n');
|
|
129
153
|
}
|
|
130
154
|
else {
|
|
131
155
|
if (!(await checkBinary("fd")))
|
|
132
|
-
return "Error: 'fd'
|
|
156
|
+
return "Error: 'fd' not installed.";
|
|
133
157
|
const { stdout } = await runCmd(`fd "${query}" "${searchPath}"`);
|
|
134
158
|
if (!stdout)
|
|
135
159
|
return "No files found.";
|
|
136
|
-
return stdout.split('\n').slice(0, 50).join('\n')
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
},
|
|
140
|
-
structural_search: {
|
|
141
|
-
description: "Search code using ast-grep patterns or patterns.json IDs.",
|
|
142
|
-
parameters: { type: "object", properties: { pattern: { type: "string" }, path: { type: "string", default: "." }, plan_id: { type: "string" } }, required: ["pattern"] },
|
|
143
|
-
execute: async ({ pattern, path: searchPath = ".", plan_id }) => {
|
|
144
|
-
if (!(await checkBinary("sg")))
|
|
145
|
-
return JSON.stringify({ error: "Degraded Mode: 'sg' (ast-grep) not found.", results: [], plan_id: plan_id || "OFF-PLAN" });
|
|
146
|
-
const knownPatterns = await getPatterns();
|
|
147
|
-
const known = knownPatterns.find((p) => p.name === pattern);
|
|
148
|
-
const cmd = known ? `sg scan -p "${known.pattern}" "${searchPath}" --json` : `sg scan -p "${pattern}" "${searchPath}" --json`;
|
|
149
|
-
const { stdout } = await runCmd(cmd);
|
|
150
|
-
let results = [];
|
|
151
|
-
try {
|
|
152
|
-
results = JSON.parse(stdout);
|
|
160
|
+
return stdout.split('\n').slice(0, 50).join('\n');
|
|
153
161
|
}
|
|
154
|
-
catch (e) { }
|
|
155
|
-
return JSON.stringify({ results: results.slice(0, 50), truncated: results.length > 50, plan_id: plan_id || "OFF-PLAN" }, null, 2);
|
|
156
162
|
}
|
|
157
|
-
},
|
|
158
|
-
read_slice: {
|
|
163
|
+
}),
|
|
164
|
+
read_slice: tool({
|
|
159
165
|
description: "Read a specific slice of a file.",
|
|
160
|
-
|
|
161
|
-
|
|
166
|
+
args: {
|
|
167
|
+
file: tool.schema.string(),
|
|
168
|
+
start_line: tool.schema.number(),
|
|
169
|
+
end_line: tool.schema.number()
|
|
170
|
+
},
|
|
171
|
+
async execute({ file, start_line, end_line }) {
|
|
172
|
+
log("Tool call: read_slice", { file, start_line, end_line });
|
|
162
173
|
const { stdout, stderr } = await runCmd(`sed -n '${start_line},${end_line}p;${end_line + 1}q' "${file}"`);
|
|
163
174
|
if (stderr)
|
|
164
|
-
return `Error
|
|
165
|
-
return JSON.stringify({ file, start_line, end_line, content: stdout
|
|
175
|
+
return `Error: ${stderr}`;
|
|
176
|
+
return JSON.stringify({ file, start_line, end_line, content: stdout }, null, 2);
|
|
166
177
|
}
|
|
167
|
-
},
|
|
168
|
-
symbol_query: {
|
|
178
|
+
}),
|
|
179
|
+
symbol_query: tool({
|
|
169
180
|
description: "Query the symbol index. Rebuilds automatically if stale.",
|
|
170
|
-
|
|
171
|
-
|
|
181
|
+
args: {
|
|
182
|
+
symbol: tool.schema.string()
|
|
183
|
+
},
|
|
184
|
+
async execute({ symbol }) {
|
|
185
|
+
log("Tool call: symbol_query", { symbol });
|
|
172
186
|
const maint = await maintainSymbolIndex();
|
|
173
187
|
if (maint.status === "unavailable")
|
|
174
|
-
return JSON.stringify({ error:
|
|
188
|
+
return JSON.stringify({ error: maint.reason });
|
|
175
189
|
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
return JSON.stringify({ matches: stdout.split('\n').filter(Boolean), metadata: maint }, null, 2);
|
|
179
|
-
}
|
|
180
|
-
else {
|
|
181
|
-
const { stdout: grepOut } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}"`);
|
|
182
|
-
return JSON.stringify({ matches: grepOut.split('\n').filter(Boolean), metadata: maint, note: "using_grep_fallback" }, null, 2);
|
|
183
|
-
}
|
|
190
|
+
const { stdout: grepOut } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}"`);
|
|
191
|
+
return JSON.stringify({ matches: grepOut.split('\n').filter(Boolean), metadata: maint }, null, 2);
|
|
184
192
|
}
|
|
185
|
-
},
|
|
186
|
-
jump_to_symbol: {
|
|
193
|
+
}),
|
|
194
|
+
jump_to_symbol: tool({
|
|
187
195
|
description: "Jump to a symbol's definition by querying the index and reading the slice.",
|
|
188
|
-
|
|
189
|
-
|
|
196
|
+
args: {
|
|
197
|
+
symbol: tool.schema.string()
|
|
198
|
+
},
|
|
199
|
+
async execute({ symbol }) {
|
|
200
|
+
log("Tool call: jump_to_symbol", { symbol });
|
|
190
201
|
const maint = await maintainSymbolIndex();
|
|
191
202
|
if (maint.status !== "ok")
|
|
192
|
-
return JSON.stringify({ error:
|
|
203
|
+
return JSON.stringify({ error: maint.reason });
|
|
193
204
|
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
194
205
|
const { stdout: tagLine } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}" | head -n 1`);
|
|
195
206
|
if (!tagLine)
|
|
196
207
|
return JSON.stringify({ found: false, symbol });
|
|
197
|
-
const
|
|
198
|
-
const file = parts[1];
|
|
199
|
-
let line = 1;
|
|
208
|
+
const file = tagLine.split('\t')[1];
|
|
200
209
|
const { stdout: grepLine } = await runCmd(`grep -n "${symbol}" "${file}" | head -n 1`);
|
|
201
|
-
|
|
202
|
-
line = parseInt(grepLine.split(':')[0], 10);
|
|
210
|
+
const line = grepLine ? parseInt(grepLine.split(':')[0], 10) : 1;
|
|
203
211
|
const start = Math.max(1, line - 5);
|
|
204
212
|
const end = line + 15;
|
|
205
213
|
const { stdout: slice } = await runCmd(`sed -n '${start},${end}p;${end + 1}q' "${file}"`);
|
|
206
|
-
return JSON.stringify({ symbol, resolved_location: { file, line }, slice: { start, end, content: slice }
|
|
214
|
+
return JSON.stringify({ symbol, resolved_location: { file, line }, slice: { start, end, content: slice } }, null, 2);
|
|
207
215
|
}
|
|
208
|
-
},
|
|
209
|
-
brief_fix_loop: {
|
|
216
|
+
}),
|
|
217
|
+
brief_fix_loop: tool({
|
|
210
218
|
description: "The Action Planner. Generates a bounded worklist for a symbol and intent.",
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
219
|
+
args: {
|
|
220
|
+
symbol: tool.schema.string(),
|
|
221
|
+
intent: tool.schema.string()
|
|
222
|
+
},
|
|
223
|
+
async execute({ symbol, intent }) {
|
|
224
|
+
log("Tool call: brief_fix_loop", { symbol, intent });
|
|
225
|
+
const planId = `plan-${Date.now()}`;
|
|
226
|
+
return JSON.stringify({ plan_id: planId, symbol, intent, status: "PLAN_GENERATED" }, null, 2);
|
|
215
227
|
}
|
|
216
|
-
},
|
|
217
|
-
prepare_patch: {
|
|
218
|
-
description: "Generate a .diff artifact for the current changes
|
|
219
|
-
|
|
220
|
-
|
|
228
|
+
}),
|
|
229
|
+
prepare_patch: tool({
|
|
230
|
+
description: "Generate a .diff artifact for the current changes.",
|
|
231
|
+
args: {
|
|
232
|
+
message: tool.schema.string()
|
|
233
|
+
},
|
|
234
|
+
async execute({ message }) {
|
|
235
|
+
log("Tool call: prepare_patch", { message });
|
|
221
236
|
await ensureCache();
|
|
222
237
|
const patchPath = path.join(CACHE_DIR, `patch-${Date.now()}.diff`);
|
|
223
238
|
const { stdout } = await runCmd("git diff");
|
|
224
239
|
if (!stdout)
|
|
225
240
|
return "No changes to patch.";
|
|
226
|
-
|
|
227
|
-
await fs.writeFile(patchPath, content);
|
|
241
|
+
await fs.writeFile(patchPath, `// MSG: ${message}\n\n${stdout}`);
|
|
228
242
|
return `Patch saved to ${patchPath}`;
|
|
229
243
|
}
|
|
230
|
-
},
|
|
231
|
-
validate_patch: {
|
|
232
|
-
description: "Validate a patch by applying it in a fresh worktree.
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
return JSON.stringify({ status: "FAILED", checks: { git_apply_check: "failed" }, error: error.message });
|
|
241
|
-
return JSON.stringify({ status: "SUCCESS", checks: { git_apply_check: "passed" }, duration: Date.now() - start, note: "Patch is valid against current HEAD." }, null, 2);
|
|
244
|
+
}),
|
|
245
|
+
validate_patch: tool({
|
|
246
|
+
description: "Validate a patch by applying it in a fresh worktree.",
|
|
247
|
+
args: {
|
|
248
|
+
patch_path: tool.schema.string()
|
|
249
|
+
},
|
|
250
|
+
async execute({ patch_path }) {
|
|
251
|
+
log("Tool call: validate_patch", { patch_path });
|
|
252
|
+
const { error } = await runCmd(`git apply --check "${patch_path}"`);
|
|
253
|
+
return error ? `FAILED: ${error.message}` : "SUCCESS: Patch is valid against current HEAD.";
|
|
242
254
|
}
|
|
243
|
-
},
|
|
244
|
-
finalize_plan: {
|
|
255
|
+
}),
|
|
256
|
+
finalize_plan: tool({
|
|
245
257
|
description: "Finalize a plan, logging metrics and cleaning cache.",
|
|
246
|
-
|
|
247
|
-
|
|
258
|
+
args: {
|
|
259
|
+
plan_id: tool.schema.string(),
|
|
260
|
+
outcome: tool.schema.string()
|
|
261
|
+
},
|
|
262
|
+
async execute({ plan_id, outcome }) {
|
|
263
|
+
log("Tool call: finalize_plan", { plan_id, outcome });
|
|
248
264
|
await ensureCache();
|
|
249
265
|
const report = { plan_id, outcome, time: new Date().toISOString() };
|
|
250
266
|
await fs.appendFile(path.join(CACHE_DIR, "gaps.jsonl"), JSON.stringify(report) + "\n");
|
|
251
267
|
const deleted = await cleanCache();
|
|
252
|
-
return `
|
|
268
|
+
return `Finalized. Cache hygiene: deleted ${deleted} old items.`;
|
|
253
269
|
}
|
|
254
|
-
}
|
|
270
|
+
})
|
|
255
271
|
};
|
|
256
272
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-autognosis",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"description": "Transforms OpenCode agents into 'miniature engineers' with deep codebase awareness. Includes fast structural search (ast-grep), instant symbol navigation (ctags), and a disciplined 'Plan → Execute → Patch' workflow.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -19,10 +19,11 @@
|
|
|
19
19
|
"type": "plugin",
|
|
20
20
|
"hooks": []
|
|
21
21
|
},
|
|
22
|
-
"dependencies": {},
|
|
23
22
|
"devDependencies": {
|
|
24
23
|
"@opencode-ai/plugin": "^1.0.162",
|
|
24
|
+
"@opencode-ai/sdk": "^1.1.40",
|
|
25
25
|
"@types/node": "^20.0.0",
|
|
26
|
-
"typescript": "^5.0.0"
|
|
26
|
+
"typescript": "^5.0.0",
|
|
27
|
+
"zod": "^4.3.6"
|
|
27
28
|
}
|
|
28
|
-
}
|
|
29
|
+
}
|