opencode-autognosis 0.1.5 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +5 -1
- package/dist/index.js +5 -77
- package/dist/services/logger.d.ts +1 -0
- package/dist/services/logger.js +17 -0
- package/dist/system-tools.d.ts +1 -208
- package/dist/system-tools.js +132 -116
- package/package.json +11 -12
- package/commands/autognosis/init.toml +0 -13
- package/gemini-extension.json +0 -12
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -1,80 +1,8 @@
|
|
|
1
|
-
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
-
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3
|
-
import { z } from "zod";
|
|
4
1
|
import { systemTools } from "./system-tools.js";
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
const tools = systemTools();
|
|
11
|
-
const register = (toolName, zodSchema) => {
|
|
12
|
-
const toolDef = tools[toolName];
|
|
13
|
-
server.registerTool(toolName, {
|
|
14
|
-
description: toolDef.description,
|
|
15
|
-
inputSchema: zodSchema,
|
|
16
|
-
}, async (args) => {
|
|
17
|
-
try {
|
|
18
|
-
const result = await toolDef.execute(args);
|
|
19
|
-
return {
|
|
20
|
-
content: [{ type: "text", text: typeof result === 'string' ? result : JSON.stringify(result, null, 2) }],
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
catch (error) {
|
|
24
|
-
return {
|
|
25
|
-
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
26
|
-
isError: true,
|
|
27
|
-
};
|
|
28
|
-
}
|
|
29
|
-
});
|
|
2
|
+
export default function plugin() {
|
|
3
|
+
return {
|
|
4
|
+
tools: {
|
|
5
|
+
...systemTools(),
|
|
6
|
+
},
|
|
30
7
|
};
|
|
31
|
-
register("fast_search", z.object({
|
|
32
|
-
query: z.string(),
|
|
33
|
-
mode: z.enum(["filename", "content"]).optional().default("filename"),
|
|
34
|
-
path: z.string().optional().default(".")
|
|
35
|
-
}).shape);
|
|
36
|
-
register("structural_search", z.object({
|
|
37
|
-
pattern: z.string(),
|
|
38
|
-
path: z.string().optional().default("."),
|
|
39
|
-
plan_id: z.string().optional()
|
|
40
|
-
}).shape);
|
|
41
|
-
register("read_slice", z.object({
|
|
42
|
-
file: z.string(),
|
|
43
|
-
start_line: z.number(),
|
|
44
|
-
end_line: z.number(),
|
|
45
|
-
plan_id: z.string().optional()
|
|
46
|
-
}).shape);
|
|
47
|
-
register("symbol_query", z.object({
|
|
48
|
-
symbol: z.string()
|
|
49
|
-
}).shape);
|
|
50
|
-
register("jump_to_symbol", z.object({
|
|
51
|
-
symbol: z.string(),
|
|
52
|
-
plan_id: z.string().optional()
|
|
53
|
-
}).shape);
|
|
54
|
-
register("autognosis_init", z.object({
|
|
55
|
-
mode: z.enum(["plan", "apply"]).optional().default("plan"),
|
|
56
|
-
token: z.string().optional()
|
|
57
|
-
}).shape);
|
|
58
|
-
register("brief_fix_loop", z.object({
|
|
59
|
-
symbol: z.string(),
|
|
60
|
-
intent: z.string()
|
|
61
|
-
}).shape);
|
|
62
|
-
register("prepare_patch", z.object({
|
|
63
|
-
plan_id: z.string().optional(),
|
|
64
|
-
message: z.string()
|
|
65
|
-
}).shape);
|
|
66
|
-
register("validate_patch", z.object({
|
|
67
|
-
patch_path: z.string(),
|
|
68
|
-
timeout_ms: z.number().optional().default(30000)
|
|
69
|
-
}).shape);
|
|
70
|
-
register("finalize_plan", z.object({
|
|
71
|
-
plan_id: z.string(),
|
|
72
|
-
outcome: z.string()
|
|
73
|
-
}).shape);
|
|
74
|
-
const transport = new StdioServerTransport();
|
|
75
|
-
await server.connect(transport);
|
|
76
8
|
}
|
|
77
|
-
main().catch((error) => {
|
|
78
|
-
console.error("Fatal error in Autognosis MCP Server:", error);
|
|
79
|
-
process.exit(1);
|
|
80
|
-
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function log(message: string, data?: unknown): void;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { appendFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
const LOG_FILE = join(homedir(), ".opencode-autognosis.log");
|
|
5
|
+
// Start a fresh session block
|
|
6
|
+
writeFileSync(LOG_FILE, `
|
|
7
|
+
--- Autognosis Session started: ${new Date().toISOString()} ---
|
|
8
|
+
`, { flag: "a" });
|
|
9
|
+
export function log(message, data) {
|
|
10
|
+
const timestamp = new Date().toISOString();
|
|
11
|
+
const line = data
|
|
12
|
+
? `[${timestamp}] ${message}: ${JSON.stringify(data)}
|
|
13
|
+
`
|
|
14
|
+
: `[${timestamp}] ${message}
|
|
15
|
+
`;
|
|
16
|
+
appendFileSync(LOG_FILE, line);
|
|
17
|
+
}
|
package/dist/system-tools.d.ts
CHANGED
|
@@ -1,210 +1,3 @@
|
|
|
1
1
|
export declare function systemTools(): {
|
|
2
|
-
|
|
3
|
-
description: string;
|
|
4
|
-
parameters: {
|
|
5
|
-
type: string;
|
|
6
|
-
properties: {
|
|
7
|
-
mode: {
|
|
8
|
-
type: string;
|
|
9
|
-
enum: string[];
|
|
10
|
-
default: string;
|
|
11
|
-
};
|
|
12
|
-
token: {
|
|
13
|
-
type: string;
|
|
14
|
-
};
|
|
15
|
-
};
|
|
16
|
-
required: string[];
|
|
17
|
-
};
|
|
18
|
-
execute: ({ mode, token }: {
|
|
19
|
-
mode: string;
|
|
20
|
-
token?: string;
|
|
21
|
-
}) => Promise<string | undefined>;
|
|
22
|
-
};
|
|
23
|
-
fast_search: {
|
|
24
|
-
description: string;
|
|
25
|
-
parameters: {
|
|
26
|
-
type: string;
|
|
27
|
-
properties: {
|
|
28
|
-
query: {
|
|
29
|
-
type: string;
|
|
30
|
-
};
|
|
31
|
-
mode: {
|
|
32
|
-
type: string;
|
|
33
|
-
enum: string[];
|
|
34
|
-
default: string;
|
|
35
|
-
};
|
|
36
|
-
path: {
|
|
37
|
-
type: string;
|
|
38
|
-
};
|
|
39
|
-
};
|
|
40
|
-
required: string[];
|
|
41
|
-
};
|
|
42
|
-
execute: ({ query, mode, path: searchPath }: {
|
|
43
|
-
query: string;
|
|
44
|
-
mode?: string;
|
|
45
|
-
path?: string;
|
|
46
|
-
}) => Promise<string>;
|
|
47
|
-
};
|
|
48
|
-
structural_search: {
|
|
49
|
-
description: string;
|
|
50
|
-
parameters: {
|
|
51
|
-
type: string;
|
|
52
|
-
properties: {
|
|
53
|
-
pattern: {
|
|
54
|
-
type: string;
|
|
55
|
-
};
|
|
56
|
-
path: {
|
|
57
|
-
type: string;
|
|
58
|
-
default: string;
|
|
59
|
-
};
|
|
60
|
-
plan_id: {
|
|
61
|
-
type: string;
|
|
62
|
-
};
|
|
63
|
-
};
|
|
64
|
-
required: string[];
|
|
65
|
-
};
|
|
66
|
-
execute: ({ pattern, path: searchPath, plan_id }: {
|
|
67
|
-
pattern: string;
|
|
68
|
-
path?: string;
|
|
69
|
-
plan_id?: string;
|
|
70
|
-
}) => Promise<string>;
|
|
71
|
-
};
|
|
72
|
-
read_slice: {
|
|
73
|
-
description: string;
|
|
74
|
-
parameters: {
|
|
75
|
-
type: string;
|
|
76
|
-
properties: {
|
|
77
|
-
file: {
|
|
78
|
-
type: string;
|
|
79
|
-
};
|
|
80
|
-
start_line: {
|
|
81
|
-
type: string;
|
|
82
|
-
};
|
|
83
|
-
end_line: {
|
|
84
|
-
type: string;
|
|
85
|
-
};
|
|
86
|
-
plan_id: {
|
|
87
|
-
type: string;
|
|
88
|
-
};
|
|
89
|
-
};
|
|
90
|
-
required: string[];
|
|
91
|
-
};
|
|
92
|
-
execute: ({ file, start_line, end_line, plan_id }: {
|
|
93
|
-
file: string;
|
|
94
|
-
start_line: number;
|
|
95
|
-
end_line: number;
|
|
96
|
-
plan_id?: string;
|
|
97
|
-
}) => Promise<string>;
|
|
98
|
-
};
|
|
99
|
-
symbol_query: {
|
|
100
|
-
description: string;
|
|
101
|
-
parameters: {
|
|
102
|
-
type: string;
|
|
103
|
-
properties: {
|
|
104
|
-
symbol: {
|
|
105
|
-
type: string;
|
|
106
|
-
};
|
|
107
|
-
};
|
|
108
|
-
required: string[];
|
|
109
|
-
};
|
|
110
|
-
execute: ({ symbol }: {
|
|
111
|
-
symbol: string;
|
|
112
|
-
}) => Promise<string>;
|
|
113
|
-
};
|
|
114
|
-
jump_to_symbol: {
|
|
115
|
-
description: string;
|
|
116
|
-
parameters: {
|
|
117
|
-
type: string;
|
|
118
|
-
properties: {
|
|
119
|
-
symbol: {
|
|
120
|
-
type: string;
|
|
121
|
-
};
|
|
122
|
-
plan_id: {
|
|
123
|
-
type: string;
|
|
124
|
-
};
|
|
125
|
-
};
|
|
126
|
-
required: string[];
|
|
127
|
-
};
|
|
128
|
-
execute: ({ symbol, plan_id }: {
|
|
129
|
-
symbol: string;
|
|
130
|
-
plan_id?: string;
|
|
131
|
-
}) => Promise<string>;
|
|
132
|
-
};
|
|
133
|
-
brief_fix_loop: {
|
|
134
|
-
description: string;
|
|
135
|
-
parameters: {
|
|
136
|
-
type: string;
|
|
137
|
-
properties: {
|
|
138
|
-
symbol: {
|
|
139
|
-
type: string;
|
|
140
|
-
};
|
|
141
|
-
intent: {
|
|
142
|
-
type: string;
|
|
143
|
-
};
|
|
144
|
-
};
|
|
145
|
-
required: string[];
|
|
146
|
-
};
|
|
147
|
-
execute: ({ symbol, intent }: {
|
|
148
|
-
symbol: string;
|
|
149
|
-
intent: string;
|
|
150
|
-
}) => Promise<string>;
|
|
151
|
-
};
|
|
152
|
-
prepare_patch: {
|
|
153
|
-
description: string;
|
|
154
|
-
parameters: {
|
|
155
|
-
type: string;
|
|
156
|
-
properties: {
|
|
157
|
-
plan_id: {
|
|
158
|
-
type: string;
|
|
159
|
-
};
|
|
160
|
-
message: {
|
|
161
|
-
type: string;
|
|
162
|
-
};
|
|
163
|
-
};
|
|
164
|
-
required: string[];
|
|
165
|
-
};
|
|
166
|
-
execute: ({ plan_id, message }: {
|
|
167
|
-
plan_id?: string;
|
|
168
|
-
message: string;
|
|
169
|
-
}) => Promise<string>;
|
|
170
|
-
};
|
|
171
|
-
validate_patch: {
|
|
172
|
-
description: string;
|
|
173
|
-
parameters: {
|
|
174
|
-
type: string;
|
|
175
|
-
properties: {
|
|
176
|
-
patch_path: {
|
|
177
|
-
type: string;
|
|
178
|
-
};
|
|
179
|
-
timeout_ms: {
|
|
180
|
-
type: string;
|
|
181
|
-
default: number;
|
|
182
|
-
};
|
|
183
|
-
};
|
|
184
|
-
required: string[];
|
|
185
|
-
};
|
|
186
|
-
execute: ({ patch_path, timeout_ms }: {
|
|
187
|
-
patch_path: string;
|
|
188
|
-
timeout_ms?: number;
|
|
189
|
-
}) => Promise<string>;
|
|
190
|
-
};
|
|
191
|
-
finalize_plan: {
|
|
192
|
-
description: string;
|
|
193
|
-
parameters: {
|
|
194
|
-
type: string;
|
|
195
|
-
properties: {
|
|
196
|
-
plan_id: {
|
|
197
|
-
type: string;
|
|
198
|
-
};
|
|
199
|
-
outcome: {
|
|
200
|
-
type: string;
|
|
201
|
-
};
|
|
202
|
-
};
|
|
203
|
-
required: string[];
|
|
204
|
-
};
|
|
205
|
-
execute: ({ plan_id, outcome }: {
|
|
206
|
-
plan_id: string;
|
|
207
|
-
outcome: string;
|
|
208
|
-
}) => Promise<string>;
|
|
209
|
-
};
|
|
2
|
+
[key: string]: any;
|
|
210
3
|
};
|
package/dist/system-tools.js
CHANGED
|
@@ -1,21 +1,29 @@
|
|
|
1
|
+
import { tool } from "@opencode-ai/plugin";
|
|
1
2
|
import { exec } from "node:child_process";
|
|
2
3
|
import * as fs from "node:fs/promises";
|
|
3
4
|
import * as fsSync from "node:fs";
|
|
4
5
|
import * as path from "node:path";
|
|
5
6
|
import { promisify } from "node:util";
|
|
6
|
-
import { fileURLToPath } from "node:url";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
8
|
const execAsync = promisify(exec);
|
|
9
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
10
9
|
const PROJECT_ROOT = process.cwd();
|
|
11
10
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
12
11
|
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
// Internal logging
|
|
13
|
+
function log(message, data) {
|
|
14
|
+
// console.error is used here for internal debugging as standard console.log may interfere with tool outputs
|
|
15
|
+
console.error(`[Autognosis] ${message}`, data || '');
|
|
16
|
+
}
|
|
17
|
+
// =============================================================================
|
|
18
|
+
// HELPERS
|
|
19
|
+
// =============================================================================
|
|
16
20
|
async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
|
|
17
21
|
try {
|
|
18
|
-
const { stdout, stderr } = await execAsync(cmd, {
|
|
22
|
+
const { stdout, stderr } = await execAsync(cmd, {
|
|
23
|
+
cwd,
|
|
24
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
25
|
+
timeout: timeoutMs
|
|
26
|
+
});
|
|
19
27
|
return { stdout: stdout.trim(), stderr: stderr.trim() };
|
|
20
28
|
}
|
|
21
29
|
catch (error) {
|
|
@@ -26,17 +34,12 @@ async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
|
|
|
26
34
|
}
|
|
27
35
|
}
|
|
28
36
|
async function checkBinary(bin) {
|
|
29
|
-
const { error } = await runCmd(`${bin} --version`, PROJECT_ROOT, 5000);
|
|
30
|
-
return !error;
|
|
31
|
-
}
|
|
32
|
-
async function getPatterns() {
|
|
33
|
-
const patternsPath = path.join(ASSETS_DIR, "patterns.json");
|
|
34
37
|
try {
|
|
35
|
-
const
|
|
36
|
-
return
|
|
38
|
+
const { error } = await runCmd(`${bin} --version`, PROJECT_ROOT, 5000);
|
|
39
|
+
return !error;
|
|
37
40
|
}
|
|
38
41
|
catch (e) {
|
|
39
|
-
return
|
|
42
|
+
return false;
|
|
40
43
|
}
|
|
41
44
|
}
|
|
42
45
|
async function ensureCache() {
|
|
@@ -87,170 +90,183 @@ async function maintainSymbolIndex() {
|
|
|
87
90
|
}
|
|
88
91
|
return { rebuilt: false, status: "ok" };
|
|
89
92
|
}
|
|
93
|
+
// =============================================================================
|
|
94
|
+
// TOOLS
|
|
95
|
+
// =============================================================================
|
|
90
96
|
export function systemTools() {
|
|
97
|
+
let pendingInitToken = null; // State for init token
|
|
91
98
|
return {
|
|
92
|
-
autognosis_init: {
|
|
99
|
+
autognosis_init: tool({
|
|
93
100
|
description: "Initialize or check the Autognosis environment. Two-phase: 'plan' (default) generates a token, 'apply' executes it.",
|
|
94
|
-
|
|
95
|
-
|
|
101
|
+
args: {
|
|
102
|
+
mode: tool.schema.enum(["plan", "apply"]).optional().default("plan"),
|
|
103
|
+
token: tool.schema.string().optional()
|
|
104
|
+
},
|
|
105
|
+
async execute({ mode, token }) {
|
|
106
|
+
log("Tool call: autognosis_init", { mode });
|
|
96
107
|
if (mode === "plan") {
|
|
97
|
-
const checks = {
|
|
108
|
+
const checks = {
|
|
109
|
+
rg: await checkBinary("rg"),
|
|
110
|
+
fd: await checkBinary("fd"),
|
|
111
|
+
sg: await checkBinary("sg"),
|
|
112
|
+
ctags: await checkBinary("ctags"),
|
|
113
|
+
git: await checkBinary("git")
|
|
114
|
+
};
|
|
98
115
|
const actions = [];
|
|
99
116
|
if (!fsSync.existsSync(CACHE_DIR))
|
|
100
117
|
actions.push(`Create cache directory: ${CACHE_DIR}`);
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
118
|
+
pendingInitToken = crypto.randomBytes(4).toString("hex");
|
|
119
|
+
return JSON.stringify({
|
|
120
|
+
status: "PLAN_READY",
|
|
121
|
+
system_checks: checks,
|
|
122
|
+
planned_actions: actions,
|
|
123
|
+
confirm_token: pendingInitToken,
|
|
124
|
+
instruction: `Call autognosis_init(mode='apply', token='${pendingInitToken}')`
|
|
125
|
+
}, null, 2);
|
|
107
126
|
}
|
|
108
|
-
else
|
|
109
|
-
if (!pendingInitToken ||
|
|
110
|
-
return JSON.stringify({ status: "ERROR", message: "Invalid
|
|
127
|
+
else {
|
|
128
|
+
if (!pendingInitToken || token !== pendingInitToken) {
|
|
129
|
+
return JSON.stringify({ status: "ERROR", message: "Invalid token. Run mode='plan' first." });
|
|
130
|
+
}
|
|
111
131
|
await ensureCache();
|
|
112
132
|
pendingInitToken = null;
|
|
113
|
-
|
|
114
|
-
return JSON.stringify({ status: "SUCCESS", message: "Autognosis initialized.", cache_dir: CACHE_DIR });
|
|
133
|
+
return JSON.stringify({ status: "SUCCESS", message: "Autognosis initialized." });
|
|
115
134
|
}
|
|
116
135
|
}
|
|
117
|
-
},
|
|
118
|
-
fast_search: {
|
|
136
|
+
}),
|
|
137
|
+
fast_search: tool({
|
|
119
138
|
description: "Fast content or filename search using ripgrep (rg) and fd.",
|
|
120
|
-
|
|
121
|
-
|
|
139
|
+
args: {
|
|
140
|
+
query: tool.schema.string(),
|
|
141
|
+
mode: tool.schema.enum(["filename", "content"]).optional().default("filename"),
|
|
142
|
+
path: tool.schema.string().optional().default(".")
|
|
143
|
+
},
|
|
144
|
+
async execute({ query, mode, path: searchPath }) {
|
|
145
|
+
log("Tool call: fast_search", { query, mode, searchPath });
|
|
122
146
|
if (mode === "content") {
|
|
123
147
|
if (!(await checkBinary("rg")))
|
|
124
|
-
return "Error: 'rg'
|
|
148
|
+
return "Error: 'rg' not installed.";
|
|
125
149
|
const { stdout } = await runCmd(`rg -n --column "${query}" "${searchPath}"`);
|
|
126
150
|
if (!stdout)
|
|
127
151
|
return "No matches found.";
|
|
128
|
-
return stdout.split('\n').slice(0, 50).join('\n')
|
|
152
|
+
return stdout.split('\n').slice(0, 50).join('\n');
|
|
129
153
|
}
|
|
130
154
|
else {
|
|
131
155
|
if (!(await checkBinary("fd")))
|
|
132
|
-
return "Error: 'fd'
|
|
156
|
+
return "Error: 'fd' not installed.";
|
|
133
157
|
const { stdout } = await runCmd(`fd "${query}" "${searchPath}"`);
|
|
134
158
|
if (!stdout)
|
|
135
159
|
return "No files found.";
|
|
136
|
-
return stdout.split('\n').slice(0, 50).join('\n')
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
},
|
|
140
|
-
structural_search: {
|
|
141
|
-
description: "Search code using ast-grep patterns or patterns.json IDs.",
|
|
142
|
-
parameters: { type: "object", properties: { pattern: { type: "string" }, path: { type: "string", default: "." }, plan_id: { type: "string" } }, required: ["pattern"] },
|
|
143
|
-
execute: async ({ pattern, path: searchPath = ".", plan_id }) => {
|
|
144
|
-
if (!(await checkBinary("sg")))
|
|
145
|
-
return JSON.stringify({ error: "Degraded Mode: 'sg' (ast-grep) not found.", results: [], plan_id: plan_id || "OFF-PLAN" });
|
|
146
|
-
const knownPatterns = await getPatterns();
|
|
147
|
-
const known = knownPatterns.find((p) => p.name === pattern);
|
|
148
|
-
const cmd = known ? `sg scan -p "${known.pattern}" "${searchPath}" --json` : `sg scan -p "${pattern}" "${searchPath}" --json`;
|
|
149
|
-
const { stdout } = await runCmd(cmd);
|
|
150
|
-
let results = [];
|
|
151
|
-
try {
|
|
152
|
-
results = JSON.parse(stdout);
|
|
160
|
+
return stdout.split('\n').slice(0, 50).join('\n');
|
|
153
161
|
}
|
|
154
|
-
catch (e) { }
|
|
155
|
-
return JSON.stringify({ results: results.slice(0, 50), truncated: results.length > 50, plan_id: plan_id || "OFF-PLAN" }, null, 2);
|
|
156
162
|
}
|
|
157
|
-
},
|
|
158
|
-
read_slice: {
|
|
163
|
+
}),
|
|
164
|
+
read_slice: tool({
|
|
159
165
|
description: "Read a specific slice of a file.",
|
|
160
|
-
|
|
161
|
-
|
|
166
|
+
args: {
|
|
167
|
+
file: tool.schema.string(),
|
|
168
|
+
start_line: tool.schema.number(),
|
|
169
|
+
end_line: tool.schema.number()
|
|
170
|
+
},
|
|
171
|
+
async execute({ file, start_line, end_line }) {
|
|
172
|
+
log("Tool call: read_slice", { file, start_line, end_line });
|
|
162
173
|
const { stdout, stderr } = await runCmd(`sed -n '${start_line},${end_line}p;${end_line + 1}q' "${file}"`);
|
|
163
174
|
if (stderr)
|
|
164
|
-
return `Error
|
|
165
|
-
return JSON.stringify({ file, start_line, end_line, content: stdout
|
|
175
|
+
return `Error: ${stderr}`;
|
|
176
|
+
return JSON.stringify({ file, start_line, end_line, content: stdout }, null, 2);
|
|
166
177
|
}
|
|
167
|
-
},
|
|
168
|
-
symbol_query: {
|
|
178
|
+
}),
|
|
179
|
+
symbol_query: tool({
|
|
169
180
|
description: "Query the symbol index. Rebuilds automatically if stale.",
|
|
170
|
-
|
|
171
|
-
|
|
181
|
+
args: {
|
|
182
|
+
symbol: tool.schema.string()
|
|
183
|
+
},
|
|
184
|
+
async execute({ symbol }) {
|
|
185
|
+
log("Tool call: symbol_query", { symbol });
|
|
172
186
|
const maint = await maintainSymbolIndex();
|
|
173
187
|
if (maint.status === "unavailable")
|
|
174
|
-
return JSON.stringify({ error:
|
|
188
|
+
return JSON.stringify({ error: maint.reason });
|
|
175
189
|
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
return JSON.stringify({ matches: stdout.split('\n').filter(Boolean), metadata: maint }, null, 2);
|
|
179
|
-
}
|
|
180
|
-
else {
|
|
181
|
-
const { stdout: grepOut } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}"`);
|
|
182
|
-
return JSON.stringify({ matches: grepOut.split('\n').filter(Boolean), metadata: maint, note: "using_grep_fallback" }, null, 2);
|
|
183
|
-
}
|
|
190
|
+
const { stdout: grepOut } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}"`);
|
|
191
|
+
return JSON.stringify({ matches: grepOut.split('\n').filter(Boolean), metadata: maint }, null, 2);
|
|
184
192
|
}
|
|
185
|
-
},
|
|
186
|
-
jump_to_symbol: {
|
|
193
|
+
}),
|
|
194
|
+
jump_to_symbol: tool({
|
|
187
195
|
description: "Jump to a symbol's definition by querying the index and reading the slice.",
|
|
188
|
-
|
|
189
|
-
|
|
196
|
+
args: {
|
|
197
|
+
symbol: tool.schema.string()
|
|
198
|
+
},
|
|
199
|
+
async execute({ symbol }) {
|
|
200
|
+
log("Tool call: jump_to_symbol", { symbol });
|
|
190
201
|
const maint = await maintainSymbolIndex();
|
|
191
202
|
if (maint.status !== "ok")
|
|
192
|
-
return JSON.stringify({ error:
|
|
203
|
+
return JSON.stringify({ error: maint.reason });
|
|
193
204
|
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
194
205
|
const { stdout: tagLine } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}" | head -n 1`);
|
|
195
206
|
if (!tagLine)
|
|
196
207
|
return JSON.stringify({ found: false, symbol });
|
|
197
|
-
const
|
|
198
|
-
const file = parts[1];
|
|
199
|
-
let line = 1;
|
|
208
|
+
const file = tagLine.split('\t')[1];
|
|
200
209
|
const { stdout: grepLine } = await runCmd(`grep -n "${symbol}" "${file}" | head -n 1`);
|
|
201
|
-
|
|
202
|
-
line = parseInt(grepLine.split(':')[0], 10);
|
|
210
|
+
const line = grepLine ? parseInt(grepLine.split(':')[0], 10) : 1;
|
|
203
211
|
const start = Math.max(1, line - 5);
|
|
204
212
|
const end = line + 15;
|
|
205
213
|
const { stdout: slice } = await runCmd(`sed -n '${start},${end}p;${end + 1}q' "${file}"`);
|
|
206
|
-
return JSON.stringify({ symbol, resolved_location: { file, line }, slice: { start, end, content: slice }
|
|
214
|
+
return JSON.stringify({ symbol, resolved_location: { file, line }, slice: { start, end, content: slice } }, null, 2);
|
|
207
215
|
}
|
|
208
|
-
},
|
|
209
|
-
brief_fix_loop: {
|
|
216
|
+
}),
|
|
217
|
+
brief_fix_loop: tool({
|
|
210
218
|
description: "The Action Planner. Generates a bounded worklist for a symbol and intent.",
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
219
|
+
args: {
|
|
220
|
+
symbol: tool.schema.string(),
|
|
221
|
+
intent: tool.schema.string()
|
|
222
|
+
},
|
|
223
|
+
async execute({ symbol, intent }) {
|
|
224
|
+
log("Tool call: brief_fix_loop", { symbol, intent });
|
|
225
|
+
const planId = `plan-${Date.now()}`;
|
|
226
|
+
return JSON.stringify({ plan_id: planId, symbol, intent, status: "PLAN_GENERATED" }, null, 2);
|
|
215
227
|
}
|
|
216
|
-
},
|
|
217
|
-
prepare_patch: {
|
|
218
|
-
description: "Generate a .diff artifact for the current changes
|
|
219
|
-
|
|
220
|
-
|
|
228
|
+
}),
|
|
229
|
+
prepare_patch: tool({
|
|
230
|
+
description: "Generate a .diff artifact for the current changes.",
|
|
231
|
+
args: {
|
|
232
|
+
message: tool.schema.string()
|
|
233
|
+
},
|
|
234
|
+
async execute({ message }) {
|
|
235
|
+
log("Tool call: prepare_patch", { message });
|
|
221
236
|
await ensureCache();
|
|
222
237
|
const patchPath = path.join(CACHE_DIR, `patch-${Date.now()}.diff`);
|
|
223
238
|
const { stdout } = await runCmd("git diff");
|
|
224
239
|
if (!stdout)
|
|
225
240
|
return "No changes to patch.";
|
|
226
|
-
|
|
227
|
-
await fs.writeFile(patchPath, content);
|
|
241
|
+
await fs.writeFile(patchPath, `// MSG: ${message}\n\n${stdout}`);
|
|
228
242
|
return `Patch saved to ${patchPath}`;
|
|
229
243
|
}
|
|
230
|
-
},
|
|
231
|
-
validate_patch: {
|
|
232
|
-
description: "Validate a patch by applying it in a fresh worktree.
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
return JSON.stringify({ status: "FAILED", checks: { git_apply_check: "failed" }, error: error.message });
|
|
241
|
-
return JSON.stringify({ status: "SUCCESS", checks: { git_apply_check: "passed" }, duration: Date.now() - start, note: "Patch is valid against current HEAD." }, null, 2);
|
|
244
|
+
}),
|
|
245
|
+
validate_patch: tool({
|
|
246
|
+
description: "Validate a patch by applying it in a fresh worktree.",
|
|
247
|
+
args: {
|
|
248
|
+
patch_path: tool.schema.string()
|
|
249
|
+
},
|
|
250
|
+
async execute({ patch_path }) {
|
|
251
|
+
log("Tool call: validate_patch", { patch_path });
|
|
252
|
+
const { error } = await runCmd(`git apply --check "${patch_path}"`);
|
|
253
|
+
return error ? `FAILED: ${error.message}` : "SUCCESS: Patch is valid against current HEAD.";
|
|
242
254
|
}
|
|
243
|
-
},
|
|
244
|
-
finalize_plan: {
|
|
255
|
+
}),
|
|
256
|
+
finalize_plan: tool({
|
|
245
257
|
description: "Finalize a plan, logging metrics and cleaning cache.",
|
|
246
|
-
|
|
247
|
-
|
|
258
|
+
args: {
|
|
259
|
+
plan_id: tool.schema.string(),
|
|
260
|
+
outcome: tool.schema.string()
|
|
261
|
+
},
|
|
262
|
+
async execute({ plan_id, outcome }) {
|
|
263
|
+
log("Tool call: finalize_plan", { plan_id, outcome });
|
|
248
264
|
await ensureCache();
|
|
249
265
|
const report = { plan_id, outcome, time: new Date().toISOString() };
|
|
250
266
|
await fs.appendFile(path.join(CACHE_DIR, "gaps.jsonl"), JSON.stringify(report) + "\n");
|
|
251
267
|
const deleted = await cleanCache();
|
|
252
|
-
return `
|
|
268
|
+
return `Finalized. Cache hygiene: deleted ${deleted} old items.`;
|
|
253
269
|
}
|
|
254
|
-
}
|
|
270
|
+
})
|
|
255
271
|
};
|
|
256
272
|
}
|
package/package.json
CHANGED
|
@@ -1,30 +1,29 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-autognosis",
|
|
3
|
-
"version": "0.1
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"description": "Transforms OpenCode agents into 'miniature engineers' with deep codebase awareness. Includes fast structural search (ast-grep), instant symbol navigation (ctags), and a disciplined 'Plan → Execute → Patch' workflow.",
|
|
5
5
|
"type": "module",
|
|
6
|
-
"
|
|
7
|
-
|
|
8
|
-
},
|
|
9
|
-
"types": "./dist/index.d.ts",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
10
8
|
"files": [
|
|
11
9
|
"dist",
|
|
12
10
|
"assets",
|
|
13
|
-
"commands",
|
|
14
11
|
"LICENSE",
|
|
15
|
-
"README.md"
|
|
16
|
-
"gemini-extension.json"
|
|
12
|
+
"README.md"
|
|
17
13
|
],
|
|
18
14
|
"scripts": {
|
|
19
15
|
"build": "tsc -p tsconfig.json",
|
|
20
16
|
"prepublishOnly": "npm run build"
|
|
21
17
|
},
|
|
22
|
-
"
|
|
23
|
-
"
|
|
24
|
-
"
|
|
18
|
+
"opencode": {
|
|
19
|
+
"type": "plugin",
|
|
20
|
+
"hooks": []
|
|
25
21
|
},
|
|
26
22
|
"devDependencies": {
|
|
23
|
+
"@opencode-ai/plugin": "^1.0.162",
|
|
24
|
+
"@opencode-ai/sdk": "^1.1.40",
|
|
27
25
|
"@types/node": "^20.0.0",
|
|
28
|
-
"typescript": "^5.0.0"
|
|
26
|
+
"typescript": "^5.0.0",
|
|
27
|
+
"zod": "^4.3.6"
|
|
29
28
|
}
|
|
30
29
|
}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
description = "Initialize or check the Autognosis environment"
|
|
2
|
-
|
|
3
|
-
# The command executes the tool via the agent
|
|
4
|
-
# Or it can just be a prompt that calls the tool
|
|
5
|
-
prompt = """
|
|
6
|
-
Please run the autognosis_init tool with the following arguments:
|
|
7
|
-
mode = "{{mode}}"
|
|
8
|
-
token = "{{token}}"
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
[parameters]
|
|
12
|
-
mode = { type = "string", description = "The mode to run in (plan or apply)", default = "plan" }
|
|
13
|
-
token = { type = "string", description = "The confirmation token for apply mode" }
|
package/gemini-extension.json
DELETED
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "opencode-autognosis",
|
|
3
|
-
"version": "0.1.3",
|
|
4
|
-
"description": "Autognosis extension for Gemini CLI",
|
|
5
|
-
"mcpServers": {
|
|
6
|
-
"autognosis": {
|
|
7
|
-
"command": "node",
|
|
8
|
-
"args": ["${extensionPath}/dist/index.js"],
|
|
9
|
-
"cwd": "${extensionPath}"
|
|
10
|
-
}
|
|
11
|
-
}
|
|
12
|
-
}
|