opencode-autognosis 2.0.2 → 2.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/database.d.ts +24 -4
- package/dist/database.js +234 -98
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -16
- package/dist/performance-optimization.js +18 -7
- package/dist/system-tools.js +137 -31
- package/dist/unified-api.d.ts +3 -0
- package/dist/unified-api.js +160 -0
- package/package.json +1 -1
package/dist/database.d.ts
CHANGED
|
@@ -4,17 +4,37 @@ export declare class CodeGraphDB {
|
|
|
4
4
|
private workerRunning;
|
|
5
5
|
constructor();
|
|
6
6
|
private initialize;
|
|
7
|
+
/**
|
|
8
|
+
* Background Job Management
|
|
9
|
+
*/
|
|
10
|
+
createJob(id: string, type: string, metadata?: any): void;
|
|
11
|
+
updateJob(id: string, updates: {
|
|
12
|
+
status?: string;
|
|
13
|
+
progress?: number;
|
|
14
|
+
result?: string;
|
|
15
|
+
error?: string;
|
|
16
|
+
}): void;
|
|
17
|
+
getJob(id: string): unknown;
|
|
18
|
+
listJobs(type?: string, limit?: number): unknown[];
|
|
7
19
|
private startWorker;
|
|
8
20
|
private processEmbeddingQueue;
|
|
9
21
|
/**
|
|
10
22
|
* Syncs a ChunkCard (JSON) into the SQLite Index.
|
|
11
|
-
* This is an "Upsert" operation.
|
|
12
23
|
*/
|
|
13
24
|
ingestChunkCard(card: ChunkCard): void;
|
|
14
|
-
/**
|
|
15
|
-
* Remove a card from the index
|
|
16
|
-
*/
|
|
17
25
|
deleteChunkCard(cardId: string): void;
|
|
26
|
+
recordExecution(planId: string | undefined, toolName: string, args: any, isOnPlan: boolean): void;
|
|
27
|
+
ingestCommits(commits: any[]): void;
|
|
28
|
+
getHotFiles(pathPrefix?: string, limit?: number): {
|
|
29
|
+
path: string;
|
|
30
|
+
count: number;
|
|
31
|
+
}[];
|
|
32
|
+
getPlanMetrics(planId: string): {
|
|
33
|
+
total: number;
|
|
34
|
+
on_plan: number;
|
|
35
|
+
off_plan: number;
|
|
36
|
+
compliance: number;
|
|
37
|
+
};
|
|
18
38
|
findDependents(filePath: string): string[];
|
|
19
39
|
searchSymbols(query: string): any[];
|
|
20
40
|
semanticSearch(query: string, limit?: number): Promise<any[]>;
|
package/dist/database.js
CHANGED
|
@@ -62,10 +62,40 @@ export class CodeGraphDB {
|
|
|
62
62
|
FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
63
63
|
);
|
|
64
64
|
|
|
65
|
+
CREATE TABLE IF NOT EXISTS commits (
|
|
66
|
+
hash TEXT PRIMARY KEY,
|
|
67
|
+
author TEXT,
|
|
68
|
+
date DATETIME,
|
|
69
|
+
message TEXT,
|
|
70
|
+
files_touched TEXT -- JSON array of paths
|
|
71
|
+
);
|
|
72
|
+
|
|
73
|
+
CREATE TABLE IF NOT EXISTS plan_ledger (
|
|
74
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
75
|
+
plan_id TEXT,
|
|
76
|
+
tool_name TEXT,
|
|
77
|
+
args TEXT,
|
|
78
|
+
is_on_plan BOOLEAN,
|
|
79
|
+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
80
|
+
);
|
|
81
|
+
|
|
82
|
+
CREATE TABLE IF NOT EXISTS background_jobs (
|
|
83
|
+
id TEXT PRIMARY KEY,
|
|
84
|
+
type TEXT, -- 'validation', 'setup', 'indexing'
|
|
85
|
+
status TEXT DEFAULT 'pending', -- pending, running, completed, failed
|
|
86
|
+
progress INTEGER DEFAULT 0,
|
|
87
|
+
result TEXT,
|
|
88
|
+
error TEXT,
|
|
89
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
90
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
91
|
+
);
|
|
92
|
+
|
|
65
93
|
-- Indexes for performance
|
|
66
94
|
CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
|
|
67
95
|
CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
|
|
68
96
|
CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
|
|
97
|
+
CREATE INDEX IF NOT EXISTS idx_ledger_plan ON plan_ledger(plan_id);
|
|
98
|
+
CREATE INDEX IF NOT EXISTS idx_jobs_status ON background_jobs(status);
|
|
69
99
|
`);
|
|
70
100
|
// Migrations
|
|
71
101
|
try {
|
|
@@ -73,6 +103,47 @@ export class CodeGraphDB {
|
|
|
73
103
|
}
|
|
74
104
|
catch { }
|
|
75
105
|
}
|
|
106
|
+
/**
|
|
107
|
+
* Background Job Management
|
|
108
|
+
*/
|
|
109
|
+
createJob(id, type, metadata) {
|
|
110
|
+
this.db.prepare(`
|
|
111
|
+
INSERT INTO background_jobs (id, type, status, progress, result)
|
|
112
|
+
VALUES (?, ?, 'pending', 0, ?)
|
|
113
|
+
`).run(id, type, metadata ? JSON.stringify(metadata) : null);
|
|
114
|
+
}
|
|
115
|
+
updateJob(id, updates) {
|
|
116
|
+
const sets = [];
|
|
117
|
+
const params = [];
|
|
118
|
+
if (updates.status) {
|
|
119
|
+
sets.push("status = ?");
|
|
120
|
+
params.push(updates.status);
|
|
121
|
+
}
|
|
122
|
+
if (updates.progress !== undefined) {
|
|
123
|
+
sets.push("progress = ?");
|
|
124
|
+
params.push(updates.progress);
|
|
125
|
+
}
|
|
126
|
+
if (updates.result) {
|
|
127
|
+
sets.push("result = ?");
|
|
128
|
+
params.push(updates.result);
|
|
129
|
+
}
|
|
130
|
+
if (updates.error) {
|
|
131
|
+
sets.push("error = ?");
|
|
132
|
+
params.push(updates.error);
|
|
133
|
+
}
|
|
134
|
+
sets.push("updated_at = CURRENT_TIMESTAMP");
|
|
135
|
+
params.push(id);
|
|
136
|
+
this.db.prepare(`UPDATE background_jobs SET ${sets.join(", ")} WHERE id = ?`).run(...params);
|
|
137
|
+
}
|
|
138
|
+
getJob(id) {
|
|
139
|
+
return this.db.prepare("SELECT * FROM background_jobs WHERE id = ?").get(id);
|
|
140
|
+
}
|
|
141
|
+
listJobs(type, limit = 10) {
|
|
142
|
+
if (type) {
|
|
143
|
+
return this.db.prepare("SELECT * FROM background_jobs WHERE type = ? ORDER BY created_at DESC LIMIT ?").all(type, limit);
|
|
144
|
+
}
|
|
145
|
+
return this.db.prepare("SELECT * FROM background_jobs ORDER BY created_at DESC LIMIT ?").all(limit);
|
|
146
|
+
}
|
|
76
147
|
async startWorker() {
|
|
77
148
|
if (this.workerRunning)
|
|
78
149
|
return;
|
|
@@ -83,7 +154,7 @@ export class CodeGraphDB {
|
|
|
83
154
|
await this.processEmbeddingQueue();
|
|
84
155
|
}
|
|
85
156
|
catch (e) {
|
|
86
|
-
//
|
|
157
|
+
// Log to file if needed, but avoid console to protect TUI
|
|
87
158
|
}
|
|
88
159
|
}, 5000); // Check every 5s
|
|
89
160
|
}
|
|
@@ -123,18 +194,15 @@ export class CodeGraphDB {
|
|
|
123
194
|
}
|
|
124
195
|
catch (error) {
|
|
125
196
|
if (task.retries > 3) {
|
|
126
|
-
// Give up
|
|
127
197
|
this.db.prepare("UPDATE embedding_queue SET status = 'failed' WHERE chunk_id = ?").run(task.chunk_id);
|
|
128
198
|
}
|
|
129
199
|
else {
|
|
130
|
-
// Retry
|
|
131
200
|
this.db.prepare("UPDATE embedding_queue SET status = 'pending', retries = retries + 1 WHERE chunk_id = ?").run(task.chunk_id);
|
|
132
201
|
}
|
|
133
202
|
}
|
|
134
203
|
}
|
|
135
204
|
/**
|
|
136
205
|
* Syncs a ChunkCard (JSON) into the SQLite Index.
|
|
137
|
-
* This is an "Upsert" operation.
|
|
138
206
|
*/
|
|
139
207
|
ingestChunkCard(card) {
|
|
140
208
|
const insertFile = this.db.prepare(`
|
|
@@ -169,22 +237,17 @@ export class CodeGraphDB {
|
|
|
169
237
|
const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
|
|
170
238
|
const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
|
|
171
239
|
const transaction = this.db.transaction(() => {
|
|
172
|
-
// 1. Upsert File
|
|
173
240
|
const fileRes = insertFile.get(card.file_path, card.metadata.hash);
|
|
174
241
|
const fileId = fileRes.id;
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
// Use the summary or content as the text to embed
|
|
180
|
-
const textToEmbed = `${card.chunk_type.toUpperCase()} for ${path.basename(card.file_path)}\n\n${card.content.slice(0, 2000)}`;
|
|
242
|
+
insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500));
|
|
243
|
+
const textToEmbed = `${card.chunk_type.toUpperCase()} for ${path.basename(card.file_path)}
|
|
244
|
+
|
|
245
|
+
${card.content.slice(0, 2000)}`;
|
|
181
246
|
queueEmbedding.run(card.id, textToEmbed);
|
|
182
|
-
// 4. Replace Symbols
|
|
183
247
|
deleteOldSymbols.run(card.id);
|
|
184
248
|
for (const sym of card.metadata.symbols) {
|
|
185
249
|
insertSymbol.run(card.id, sym);
|
|
186
250
|
}
|
|
187
|
-
// 5. Replace Dependencies
|
|
188
251
|
deleteOldDeps.run(card.id);
|
|
189
252
|
for (const dep of card.metadata.dependencies) {
|
|
190
253
|
insertDep.run(card.id, dep);
|
|
@@ -192,18 +255,61 @@ export class CodeGraphDB {
|
|
|
192
255
|
});
|
|
193
256
|
transaction();
|
|
194
257
|
}
|
|
195
|
-
/**
|
|
196
|
-
* Remove a card from the index
|
|
197
|
-
*/
|
|
198
258
|
deleteChunkCard(cardId) {
|
|
199
259
|
this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
|
|
200
260
|
}
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
261
|
+
recordExecution(planId, toolName, args, isOnPlan) {
|
|
262
|
+
this.db.prepare(`
|
|
263
|
+
INSERT INTO plan_ledger (plan_id, tool_name, args, is_on_plan)
|
|
264
|
+
VALUES (?, ?, ?, ?)
|
|
265
|
+
`).run(planId || 'no-plan', toolName, JSON.stringify(args), isOnPlan ? 1 : 0);
|
|
266
|
+
}
|
|
267
|
+
ingestCommits(commits) {
|
|
268
|
+
const insert = this.db.prepare(`
|
|
269
|
+
INSERT INTO commits (hash, author, date, message, files_touched)
|
|
270
|
+
VALUES (?, ?, ?, ?, ?)
|
|
271
|
+
ON CONFLICT(hash) DO NOTHING
|
|
272
|
+
`);
|
|
273
|
+
const transaction = this.db.transaction((data) => {
|
|
274
|
+
for (const c of data) {
|
|
275
|
+
insert.run(c.hash, c.author, c.date, c.message, JSON.stringify(c.files));
|
|
276
|
+
}
|
|
277
|
+
});
|
|
278
|
+
transaction(commits);
|
|
279
|
+
}
|
|
280
|
+
getHotFiles(pathPrefix = '', limit = 10) {
|
|
281
|
+
const recent = this.db.prepare(`
|
|
282
|
+
SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100
|
|
283
|
+
`).all();
|
|
284
|
+
const counts = {};
|
|
285
|
+
for (const r of recent) {
|
|
286
|
+
try {
|
|
287
|
+
const files = JSON.parse(r.files_touched);
|
|
288
|
+
for (const f of files) {
|
|
289
|
+
if (f.startsWith(pathPrefix)) {
|
|
290
|
+
counts[f] = (counts[f] || 0) + 1;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
catch { }
|
|
295
|
+
}
|
|
296
|
+
return Object.entries(counts)
|
|
297
|
+
.map(([path, count]) => ({ path, count }))
|
|
298
|
+
.sort((a, b) => b.count - a.count)
|
|
299
|
+
.slice(0, limit);
|
|
300
|
+
}
|
|
301
|
+
getPlanMetrics(planId) {
|
|
302
|
+
const total = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ?").get(planId);
|
|
303
|
+
const onPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 1").get(planId);
|
|
304
|
+
const offPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 0").get(planId);
|
|
305
|
+
return {
|
|
306
|
+
total: total.c,
|
|
307
|
+
on_plan: onPlan.c,
|
|
308
|
+
off_plan: offPlan.c,
|
|
309
|
+
compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100
|
|
310
|
+
};
|
|
311
|
+
}
|
|
204
312
|
findDependents(filePath) {
|
|
205
|
-
// Find all chunks that depend on this file path
|
|
206
|
-
// Note: dependency paths might be relative or absolute, simplistic matching for now
|
|
207
313
|
const query = this.db.prepare(`
|
|
208
314
|
SELECT DISTINCT f.path
|
|
209
315
|
FROM files f
|
|
@@ -211,7 +317,6 @@ export class CodeGraphDB {
|
|
|
211
317
|
JOIN dependencies d ON c.id = d.source_chunk_id
|
|
212
318
|
WHERE d.target_path LIKE ? OR d.target_path = ?
|
|
213
319
|
`);
|
|
214
|
-
// Attempt to match exact path or likely relative imports (simplistic)
|
|
215
320
|
const basename = path.basename(filePath);
|
|
216
321
|
const results = query.all(`%/${basename}%`, basename);
|
|
217
322
|
return results.map(r => r.path);
|
|
@@ -234,9 +339,6 @@ export class CodeGraphDB {
|
|
|
234
339
|
const queryVec = await ollama.getEmbedding(query);
|
|
235
340
|
if (queryVec.length === 0)
|
|
236
341
|
return [];
|
|
237
|
-
// Get all embeddings from DB
|
|
238
|
-
// SQLite doesn't have vector math, so we fetch all and sort in JS
|
|
239
|
-
// Optimizations: In future, use sqlite-vec or filter by complexity/type first
|
|
240
342
|
const chunks = this.db.prepare(`
|
|
241
343
|
SELECT c.id, c.content_summary, c.type, f.path, c.embedding
|
|
242
344
|
FROM chunks c
|
|
@@ -246,15 +348,13 @@ export class CodeGraphDB {
|
|
|
246
348
|
const results = chunks.map(chunk => {
|
|
247
349
|
const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
|
|
248
350
|
const similarity = this.cosineSimilarity(queryVec, vector);
|
|
249
|
-
return { ...chunk, similarity, embedding: undefined };
|
|
351
|
+
return { ...chunk, similarity, embedding: undefined };
|
|
250
352
|
});
|
|
251
353
|
results.sort((a, b) => b.similarity - a.similarity);
|
|
252
354
|
return results.slice(0, limit);
|
|
253
355
|
}
|
|
254
356
|
cosineSimilarity(vecA, vecB) {
|
|
255
|
-
let dot = 0;
|
|
256
|
-
let normA = 0;
|
|
257
|
-
let normB = 0;
|
|
357
|
+
let dot = 0, normA = 0, normB = 0;
|
|
258
358
|
for (let i = 0; i < vecA.length; i++) {
|
|
259
359
|
dot += vecA[i] * vecB[i];
|
|
260
360
|
normA += vecA[i] * vecA[i];
|
|
@@ -270,69 +370,60 @@ export class CodeGraphDB {
|
|
|
270
370
|
const embedded = this.db.prepare('SELECT COUNT(*) as c FROM chunks WHERE embedding IS NOT NULL').get();
|
|
271
371
|
const queue = this.db.prepare("SELECT COUNT(*) as c FROM embedding_queue WHERE status = 'pending'").get();
|
|
272
372
|
return {
|
|
273
|
-
files: files.c,
|
|
274
|
-
|
|
275
|
-
symbols: symbols.c,
|
|
276
|
-
dependencies: deps.c,
|
|
277
|
-
embeddings: {
|
|
278
|
-
completed: embedded.c,
|
|
279
|
-
pending: queue.c
|
|
280
|
-
}
|
|
373
|
+
files: files.c, chunks: chunks.c, symbols: symbols.c, dependencies: deps.c,
|
|
374
|
+
embeddings: { completed: embedded.c, pending: queue.c }
|
|
281
375
|
};
|
|
282
376
|
}
|
|
283
377
|
}
|
|
284
|
-
// Singleton instance for the plugin
|
|
285
378
|
let dbInstance = null;
|
|
286
379
|
export function getDb() {
|
|
287
|
-
if (!dbInstance)
|
|
380
|
+
if (!dbInstance)
|
|
288
381
|
dbInstance = new CodeGraphDB();
|
|
289
|
-
}
|
|
290
382
|
return dbInstance;
|
|
291
383
|
}
|
|
292
384
|
export function graphTools() {
|
|
293
385
|
return {
|
|
294
386
|
autognosis_setup_ai: tool({
|
|
295
|
-
description: "Configure local AI capabilities (Ollama)
|
|
296
|
-
args: {
|
|
297
|
-
model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL).describe("Embedding model to pull")
|
|
298
|
-
},
|
|
387
|
+
description: "Configure local AI capabilities (Ollama) in the background.",
|
|
388
|
+
args: { model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL) },
|
|
299
389
|
async execute({ model }) {
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
await ollama.pullModel(model);
|
|
308
|
-
return JSON.stringify({
|
|
309
|
-
status: "SUCCESS",
|
|
310
|
-
message: `${statusMsg} Server is running. Model ${model} is ready.`,
|
|
311
|
-
config: {
|
|
312
|
-
model,
|
|
313
|
-
base_url: "http://127.0.0.1:11434"
|
|
390
|
+
const jobId = `job-setup-ai-${Date.now()}`;
|
|
391
|
+
getDb().createJob(jobId, "setup", { model });
|
|
392
|
+
(async () => {
|
|
393
|
+
try {
|
|
394
|
+
getDb().updateJob(jobId, { status: "running", progress: 10 });
|
|
395
|
+
if (!(await ollama.isInstalled())) {
|
|
396
|
+
await ollama.install();
|
|
314
397
|
}
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
398
|
+
getDb().updateJob(jobId, { progress: 40 });
|
|
399
|
+
await ollama.startServer();
|
|
400
|
+
getDb().updateJob(jobId, { progress: 60 });
|
|
401
|
+
await ollama.pullModel(model);
|
|
402
|
+
getDb().updateJob(jobId, {
|
|
403
|
+
status: "completed",
|
|
404
|
+
progress: 100,
|
|
405
|
+
result: `Model ${model} is ready.`
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
catch (error) {
|
|
409
|
+
getDb().updateJob(jobId, { status: "failed", error: error.message });
|
|
410
|
+
}
|
|
411
|
+
})();
|
|
412
|
+
return JSON.stringify({
|
|
413
|
+
status: "STARTED",
|
|
414
|
+
message: "AI Setup started in background.",
|
|
415
|
+
job_id: jobId,
|
|
416
|
+
instruction: "Use graph_background_status to check progress."
|
|
417
|
+
}, null, 2);
|
|
320
418
|
}
|
|
321
419
|
}),
|
|
322
420
|
graph_semantic_search: tool({
|
|
323
|
-
description: "Search the codebase using natural language (Vector/Semantic Search).
|
|
324
|
-
args: {
|
|
325
|
-
query: tool.schema.string().describe("Natural language query"),
|
|
326
|
-
limit: tool.schema.number().optional().default(10).describe("Max results")
|
|
327
|
-
},
|
|
421
|
+
description: "Search the codebase using natural language (Vector/Semantic Search).",
|
|
422
|
+
args: { query: tool.schema.string(), limit: tool.schema.number().optional().default(10) },
|
|
328
423
|
async execute({ query, limit }) {
|
|
329
424
|
try {
|
|
330
425
|
const results = await getDb().semanticSearch(query, limit);
|
|
331
|
-
return JSON.stringify({
|
|
332
|
-
status: "SUCCESS",
|
|
333
|
-
query,
|
|
334
|
-
results
|
|
335
|
-
}, null, 2);
|
|
426
|
+
return JSON.stringify({ status: "SUCCESS", query, results }, null, 2);
|
|
336
427
|
}
|
|
337
428
|
catch (error) {
|
|
338
429
|
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
@@ -340,19 +431,12 @@ export function graphTools() {
|
|
|
340
431
|
}
|
|
341
432
|
}),
|
|
342
433
|
graph_query_dependents: tool({
|
|
343
|
-
description: "Find all files that depend on a specific file
|
|
344
|
-
args: {
|
|
345
|
-
file_path: tool.schema.string().describe("File path to analyze"),
|
|
346
|
-
},
|
|
434
|
+
description: "Find all files that depend on a specific file.",
|
|
435
|
+
args: { file_path: tool.schema.string() },
|
|
347
436
|
async execute({ file_path }) {
|
|
348
437
|
try {
|
|
349
438
|
const dependents = getDb().findDependents(file_path);
|
|
350
|
-
return JSON.stringify({
|
|
351
|
-
status: "SUCCESS",
|
|
352
|
-
file_path,
|
|
353
|
-
dependents,
|
|
354
|
-
count: dependents.length
|
|
355
|
-
}, null, 2);
|
|
439
|
+
return JSON.stringify({ status: "SUCCESS", file_path, dependents, count: dependents.length }, null, 2);
|
|
356
440
|
}
|
|
357
441
|
catch (error) {
|
|
358
442
|
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
@@ -360,19 +444,12 @@ export function graphTools() {
|
|
|
360
444
|
}
|
|
361
445
|
}),
|
|
362
446
|
graph_search_symbols: tool({
|
|
363
|
-
description: "Fast fuzzy search for symbols
|
|
364
|
-
args: {
|
|
365
|
-
query: tool.schema.string().describe("Symbol name query"),
|
|
366
|
-
},
|
|
447
|
+
description: "Fast fuzzy search for symbols across the entire codebase index.",
|
|
448
|
+
args: { query: tool.schema.string() },
|
|
367
449
|
async execute({ query }) {
|
|
368
450
|
try {
|
|
369
451
|
const results = getDb().searchSymbols(query);
|
|
370
|
-
return JSON.stringify({
|
|
371
|
-
status: "SUCCESS",
|
|
372
|
-
query,
|
|
373
|
-
results,
|
|
374
|
-
count: results.length
|
|
375
|
-
}, null, 2);
|
|
452
|
+
return JSON.stringify({ status: "SUCCESS", query, results, count: results.length }, null, 2);
|
|
376
453
|
}
|
|
377
454
|
catch (error) {
|
|
378
455
|
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
@@ -384,11 +461,70 @@ export function graphTools() {
|
|
|
384
461
|
args: {},
|
|
385
462
|
async execute() {
|
|
386
463
|
try {
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
464
|
+
return JSON.stringify({ status: "SUCCESS", stats: getDb().getStats() }, null, 2);
|
|
465
|
+
}
|
|
466
|
+
catch (error) {
|
|
467
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
}),
|
|
471
|
+
journal_build: tool({
|
|
472
|
+
description: "Scan git history and populate the Change Journal.",
|
|
473
|
+
args: { limit: tool.schema.number().optional().default(100) },
|
|
474
|
+
async execute({ limit }) {
|
|
475
|
+
try {
|
|
476
|
+
const { execSync } = await import("node:child_process");
|
|
477
|
+
const logOut = execSync(`git log -n ${limit} --pretty=format:"%H|%an|%ad|%s" --date=iso`, { encoding: 'utf-8' });
|
|
478
|
+
const commits = logOut.split('\n').filter(Boolean).map(line => {
|
|
479
|
+
const [hash, author, date, message] = line.split('|');
|
|
480
|
+
const files = execSync(`git show --name-only --pretty="" ${hash}`, { encoding: 'utf-8' }).split('\n').filter(Boolean);
|
|
481
|
+
return { hash, author, date, message, files };
|
|
482
|
+
});
|
|
483
|
+
getDb().ingestCommits(commits);
|
|
484
|
+
return JSON.stringify({ status: "SUCCESS", message: `Ingested ${commits.length} commits.` }, null, 2);
|
|
485
|
+
}
|
|
486
|
+
catch (error) {
|
|
487
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
}),
|
|
491
|
+
journal_query_hot_files: tool({
|
|
492
|
+
description: "Query the Change Journal for frequently changed files.",
|
|
493
|
+
args: { path_prefix: tool.schema.string().optional().default(""), limit: tool.schema.number().optional().default(10) },
|
|
494
|
+
async execute({ path_prefix, limit }) {
|
|
495
|
+
try {
|
|
496
|
+
return JSON.stringify({ status: "SUCCESS", hot_files: getDb().getHotFiles(path_prefix, limit) }, null, 2);
|
|
497
|
+
}
|
|
498
|
+
catch (error) {
|
|
499
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}),
|
|
503
|
+
graph_get_plan_metrics: tool({
|
|
504
|
+
description: "Retrieve execution metrics for a specific plan ID.",
|
|
505
|
+
args: { plan_id: tool.schema.string() },
|
|
506
|
+
async execute({ plan_id }) {
|
|
507
|
+
try {
|
|
508
|
+
return JSON.stringify({ status: "SUCCESS", plan_id, metrics: getDb().getPlanMetrics(plan_id) }, null, 2);
|
|
509
|
+
}
|
|
510
|
+
catch (error) {
|
|
511
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}),
|
|
515
|
+
graph_background_status: tool({
|
|
516
|
+
description: "Check status of background tasks (validation, setup, indexing).",
|
|
517
|
+
args: {
|
|
518
|
+
job_id: tool.schema.string().optional(),
|
|
519
|
+
type: tool.schema.enum(["validation", "setup", "indexing"]).optional(),
|
|
520
|
+
limit: tool.schema.number().optional().default(5)
|
|
521
|
+
},
|
|
522
|
+
async execute({ job_id, type, limit }) {
|
|
523
|
+
try {
|
|
524
|
+
if (job_id) {
|
|
525
|
+
return JSON.stringify({ status: "SUCCESS", job: getDb().getJob(job_id) }, null, 2);
|
|
526
|
+
}
|
|
527
|
+
return JSON.stringify({ status: "SUCCESS", jobs: getDb().listJobs(type, limit) }, null, 2);
|
|
392
528
|
}
|
|
393
529
|
catch (error) {
|
|
394
530
|
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -1,22 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { gitWorktreeTools } from "./git-worktree.js";
|
|
3
|
-
import { testingTools } from "./testing-infrastructure.js";
|
|
4
|
-
import { chunkCardsTools } from "./chunk-cards.js";
|
|
5
|
-
import { activeSetTools } from "./activeset.js";
|
|
6
|
-
import { moduleSummariesTools } from "./module-summaries.js";
|
|
7
|
-
import { performanceTools } from "./performance-optimization.js";
|
|
8
|
-
import { graphTools } from "./database.js";
|
|
1
|
+
import { unifiedTools } from "./unified-api.js";
|
|
9
2
|
export const AutognosisPlugin = async () => {
|
|
10
3
|
return {
|
|
11
4
|
tool: {
|
|
12
|
-
...
|
|
13
|
-
...gitWorktreeTools(),
|
|
14
|
-
...testingTools(),
|
|
15
|
-
...chunkCardsTools(),
|
|
16
|
-
...activeSetTools(),
|
|
17
|
-
...moduleSummariesTools(),
|
|
18
|
-
...performanceTools(),
|
|
19
|
-
...graphTools(),
|
|
5
|
+
...unifiedTools(),
|
|
20
6
|
},
|
|
21
7
|
};
|
|
22
8
|
};
|
|
@@ -433,29 +433,40 @@ export function performanceTools() {
|
|
|
433
433
|
description: "Check status of background tasks and operations.",
|
|
434
434
|
args: {
|
|
435
435
|
task_id: tool.schema.string().optional().describe("Specific task ID to check"),
|
|
436
|
-
task_type: tool.schema.enum(["indexing", "caching", "cleanup", "analysis"]).optional().describe("Filter by task type")
|
|
436
|
+
task_type: tool.schema.enum(["indexing", "caching", "cleanup", "analysis", "validation", "setup"]).optional().describe("Filter by task type")
|
|
437
437
|
},
|
|
438
438
|
async execute({ task_id, task_type }) {
|
|
439
439
|
log("Tool call: perf_background_status", { task_id, task_type });
|
|
440
440
|
try {
|
|
441
|
+
const tasks = [];
|
|
442
|
+
// 1. Check DB Jobs
|
|
443
|
+
if (task_id) {
|
|
444
|
+
const job = getDb().getJob(task_id);
|
|
445
|
+
if (job)
|
|
446
|
+
tasks.push(job);
|
|
447
|
+
}
|
|
448
|
+
else {
|
|
449
|
+
const dbJobs = getDb().listJobs(task_type, 10);
|
|
450
|
+
tasks.push(...dbJobs);
|
|
451
|
+
}
|
|
452
|
+
// 2. Check File-based tasks
|
|
441
453
|
await ensurePerfDirs();
|
|
442
454
|
const files = await fs.readdir(PERF_DIR);
|
|
443
|
-
const tasks = [];
|
|
444
455
|
for (const file of files) {
|
|
445
456
|
if (file.startsWith('task-') && file.endsWith('.json')) {
|
|
446
457
|
try {
|
|
447
458
|
const taskPath = path.join(PERF_DIR, file);
|
|
448
459
|
const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
|
|
449
|
-
// Apply filters
|
|
450
460
|
if (task_id && task.id !== task_id)
|
|
451
461
|
continue;
|
|
452
462
|
if (task_type && task.type !== task_type)
|
|
453
463
|
continue;
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
464
|
+
// Avoid duplication if already in DB (shouldn't happen with new ID scheme)
|
|
465
|
+
if (!tasks.some(t => t.id === task.id)) {
|
|
466
|
+
tasks.push(task);
|
|
467
|
+
}
|
|
458
468
|
}
|
|
469
|
+
catch (error) { }
|
|
459
470
|
}
|
|
460
471
|
}
|
|
461
472
|
return JSON.stringify({
|
package/dist/system-tools.js
CHANGED
|
@@ -6,6 +6,7 @@ import * as path from "node:path";
|
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
8
|
import { Logger } from "./services/logger.js";
|
|
9
|
+
import { getDb } from "./database.js";
|
|
9
10
|
const execAsync = promisify(exec);
|
|
10
11
|
const PROJECT_ROOT = process.cwd();
|
|
11
12
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -94,7 +95,10 @@ async function maintainSymbolIndex() {
|
|
|
94
95
|
// TOOLS
|
|
95
96
|
// =============================================================================
|
|
96
97
|
export function systemTools() {
|
|
97
|
-
let pendingInitToken = null;
|
|
98
|
+
let pendingInitToken = null;
|
|
99
|
+
const record = (planId, tool, args) => {
|
|
100
|
+
getDb().recordExecution(planId, tool, args, !!planId);
|
|
101
|
+
};
|
|
98
102
|
return {
|
|
99
103
|
autognosis_init: tool({
|
|
100
104
|
description: "Initialize or check the Autognosis environment. Two-phase: 'plan' (default) generates a token, 'apply' executes it.",
|
|
@@ -104,6 +108,7 @@ export function systemTools() {
|
|
|
104
108
|
},
|
|
105
109
|
async execute({ mode, token }) {
|
|
106
110
|
log("Tool call: autognosis_init", { mode });
|
|
111
|
+
record(undefined, "autognosis_init", { mode });
|
|
107
112
|
if (mode === "plan") {
|
|
108
113
|
const checks = {
|
|
109
114
|
rg: await checkBinary("rg"),
|
|
@@ -139,10 +144,12 @@ export function systemTools() {
|
|
|
139
144
|
args: {
|
|
140
145
|
query: tool.schema.string(),
|
|
141
146
|
mode: tool.schema.enum(["filename", "content"]).optional().default("filename"),
|
|
142
|
-
path: tool.schema.string().optional().default(".")
|
|
147
|
+
path: tool.schema.string().optional().default("."),
|
|
148
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
143
149
|
},
|
|
144
|
-
async execute({ query, mode, path: searchPath }) {
|
|
145
|
-
log("Tool call: fast_search", { query, mode, searchPath });
|
|
150
|
+
async execute({ query, mode, path: searchPath, plan_id }) {
|
|
151
|
+
log("Tool call: fast_search", { query, mode, searchPath, plan_id });
|
|
152
|
+
record(plan_id, "fast_search", { query, mode, searchPath });
|
|
146
153
|
if (mode === "content") {
|
|
147
154
|
if (!(await checkBinary("rg")))
|
|
148
155
|
return "Error: 'rg' not installed.";
|
|
@@ -166,10 +173,12 @@ export function systemTools() {
|
|
|
166
173
|
args: {
|
|
167
174
|
file: tool.schema.string(),
|
|
168
175
|
start_line: tool.schema.number(),
|
|
169
|
-
end_line: tool.schema.number()
|
|
176
|
+
end_line: tool.schema.number(),
|
|
177
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
170
178
|
},
|
|
171
|
-
async execute({ file, start_line, end_line }) {
|
|
172
|
-
log("Tool call: read_slice", { file, start_line, end_line });
|
|
179
|
+
async execute({ file, start_line, end_line, plan_id }) {
|
|
180
|
+
log("Tool call: read_slice", { file, start_line, end_line, plan_id });
|
|
181
|
+
record(plan_id, "read_slice", { file, start_line, end_line });
|
|
173
182
|
const { stdout, stderr } = await runCmd(`sed -n '${start_line},${end_line}p;${end_line + 1}q' "${file}"`);
|
|
174
183
|
if (stderr)
|
|
175
184
|
return `Error: ${stderr}`;
|
|
@@ -179,10 +188,12 @@ export function systemTools() {
|
|
|
179
188
|
symbol_query: tool({
|
|
180
189
|
description: "Query the symbol index. Rebuilds automatically if stale.",
|
|
181
190
|
args: {
|
|
182
|
-
symbol: tool.schema.string()
|
|
191
|
+
symbol: tool.schema.string(),
|
|
192
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
183
193
|
},
|
|
184
|
-
async execute({ symbol }) {
|
|
185
|
-
log("Tool call: symbol_query", { symbol });
|
|
194
|
+
async execute({ symbol, plan_id }) {
|
|
195
|
+
log("Tool call: symbol_query", { symbol, plan_id });
|
|
196
|
+
record(plan_id, "symbol_query", { symbol });
|
|
186
197
|
const maint = await maintainSymbolIndex();
|
|
187
198
|
if (maint.status === "unavailable")
|
|
188
199
|
return JSON.stringify({ error: maint.reason });
|
|
@@ -194,10 +205,12 @@ export function systemTools() {
|
|
|
194
205
|
jump_to_symbol: tool({
|
|
195
206
|
description: "Jump to a symbol's definition by querying the index and reading the slice.",
|
|
196
207
|
args: {
|
|
197
|
-
symbol: tool.schema.string()
|
|
208
|
+
symbol: tool.schema.string(),
|
|
209
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
198
210
|
},
|
|
199
|
-
async execute({ symbol }) {
|
|
200
|
-
log("Tool call: jump_to_symbol", { symbol });
|
|
211
|
+
async execute({ symbol, plan_id }) {
|
|
212
|
+
log("Tool call: jump_to_symbol", { symbol, plan_id });
|
|
213
|
+
record(plan_id, "jump_to_symbol", { symbol });
|
|
201
214
|
const maint = await maintainSymbolIndex();
|
|
202
215
|
if (maint.status !== "ok")
|
|
203
216
|
return JSON.stringify({ error: maint.reason });
|
|
@@ -223,34 +236,121 @@ export function systemTools() {
|
|
|
223
236
|
async execute({ symbol, intent }) {
|
|
224
237
|
log("Tool call: brief_fix_loop", { symbol, intent });
|
|
225
238
|
const planId = `plan-${Date.now()}`;
|
|
226
|
-
|
|
239
|
+
record(planId, "brief_fix_loop", { symbol, intent });
|
|
240
|
+
const maint = await maintainSymbolIndex();
|
|
241
|
+
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
242
|
+
const { stdout: tagLine } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}" | head -n 1`);
|
|
243
|
+
const locusFile = tagLine ? tagLine.split('\t')[1] : null;
|
|
244
|
+
let dependents = [];
|
|
245
|
+
let hotFiles = [];
|
|
246
|
+
if (locusFile) {
|
|
247
|
+
dependents = getDb().findDependents(locusFile);
|
|
248
|
+
hotFiles = getDb().getHotFiles('', 20);
|
|
249
|
+
}
|
|
250
|
+
const worklist = dependents.map(d => ({
|
|
251
|
+
file: d,
|
|
252
|
+
is_hot: hotFiles.some(h => h.path === d),
|
|
253
|
+
reason: "Dependency impact"
|
|
254
|
+
}));
|
|
255
|
+
return JSON.stringify({
|
|
256
|
+
plan_id: planId,
|
|
257
|
+
symbol,
|
|
258
|
+
intent,
|
|
259
|
+
locus: { file: locusFile },
|
|
260
|
+
worklist,
|
|
261
|
+
status: "PLAN_GENERATED",
|
|
262
|
+
metadata: {
|
|
263
|
+
fingerprint: maint.status,
|
|
264
|
+
generated_at: new Date().toISOString()
|
|
265
|
+
}
|
|
266
|
+
}, null, 2);
|
|
227
267
|
}
|
|
228
268
|
}),
|
|
229
269
|
prepare_patch: tool({
|
|
230
270
|
description: "Generate a .diff artifact for the current changes.",
|
|
231
271
|
args: {
|
|
232
|
-
message: tool.schema.string()
|
|
272
|
+
message: tool.schema.string(),
|
|
273
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
233
274
|
},
|
|
234
|
-
async execute({ message }) {
|
|
235
|
-
log("Tool call: prepare_patch", { message });
|
|
275
|
+
async execute({ message, plan_id }) {
|
|
276
|
+
log("Tool call: prepare_patch", { message, plan_id });
|
|
277
|
+
record(plan_id, "prepare_patch", { message });
|
|
236
278
|
await ensureCache();
|
|
237
|
-
const
|
|
238
|
-
const
|
|
239
|
-
|
|
279
|
+
const patchId = `patch-${Date.now()}`;
|
|
280
|
+
const patchPath = path.join(CACHE_DIR, `${patchId}.diff`);
|
|
281
|
+
const { stdout: diff } = await runCmd("git diff");
|
|
282
|
+
if (!diff)
|
|
240
283
|
return "No changes to patch.";
|
|
241
|
-
|
|
242
|
-
|
|
284
|
+
const header = {
|
|
285
|
+
patch_id: patchId,
|
|
286
|
+
plan_id: plan_id || "adhoc",
|
|
287
|
+
message,
|
|
288
|
+
created_at: new Date().toISOString()
|
|
289
|
+
};
|
|
290
|
+
await fs.writeFile(patchPath, `// PATCH_METADATA: ${JSON.stringify(header)}\n\n${diff}`);
|
|
291
|
+
return JSON.stringify({ status: "SUCCESS", patch_id: patchId, path: patchPath }, null, 2);
|
|
243
292
|
}
|
|
244
293
|
}),
|
|
245
294
|
validate_patch: tool({
|
|
246
|
-
description: "Validate a patch by applying it in a fresh worktree.",
|
|
295
|
+
description: "Validate a patch by applying it in a fresh worktree and running build (Background Job).",
|
|
247
296
|
args: {
|
|
248
|
-
patch_path: tool.schema.string()
|
|
297
|
+
patch_path: tool.schema.string(),
|
|
298
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
249
299
|
},
|
|
250
|
-
async execute({ patch_path }) {
|
|
251
|
-
log("Tool call: validate_patch", { patch_path });
|
|
252
|
-
|
|
253
|
-
|
|
300
|
+
async execute({ patch_path, plan_id }) {
|
|
301
|
+
log("Tool call: validate_patch (background)", { patch_path, plan_id });
|
|
302
|
+
record(plan_id, "validate_patch", { patch_path });
|
|
303
|
+
const jobId = `job-validate-${Date.now()}`;
|
|
304
|
+
getDb().createJob(jobId, "validation", { patch_path, plan_id });
|
|
305
|
+
// Spawn background worker
|
|
306
|
+
(async () => {
|
|
307
|
+
getDb().updateJob(jobId, { status: "running", progress: 10 });
|
|
308
|
+
const tempWorktree = path.join(PROJECT_ROOT, ".opencode", "temp-" + jobId);
|
|
309
|
+
try {
|
|
310
|
+
await runCmd(`git worktree add -d "${tempWorktree}"`);
|
|
311
|
+
getDb().updateJob(jobId, { progress: 30 });
|
|
312
|
+
const content = await fs.readFile(patch_path, "utf-8");
|
|
313
|
+
const parts = content.split('\n\n');
|
|
314
|
+
const diffOnly = parts.length > 1 ? parts.slice(1).join('\n\n') : content;
|
|
315
|
+
const tempDiff = path.join(tempWorktree, "valid.diff");
|
|
316
|
+
await fs.writeFile(tempDiff, diffOnly);
|
|
317
|
+
const { error: applyError } = await runCmd(`git apply "${tempDiff}"`, tempWorktree);
|
|
318
|
+
if (applyError)
|
|
319
|
+
throw new Error(`Apply failed: ${applyError.message}`);
|
|
320
|
+
getDb().updateJob(jobId, { progress: 60 });
|
|
321
|
+
let buildStatus = "SKIPPED";
|
|
322
|
+
if (fsSync.existsSync(path.join(tempWorktree, "package.json"))) {
|
|
323
|
+
const { error: buildError } = await runCmd("npm run build", tempWorktree);
|
|
324
|
+
buildStatus = buildError ? "FAILED" : "SUCCESS";
|
|
325
|
+
}
|
|
326
|
+
else if (fsSync.existsSync(path.join(tempWorktree, "Package.swift"))) {
|
|
327
|
+
const { error: buildError } = await runCmd("swift build", tempWorktree);
|
|
328
|
+
buildStatus = buildError ? "FAILED" : "SUCCESS";
|
|
329
|
+
}
|
|
330
|
+
getDb().updateJob(jobId, {
|
|
331
|
+
status: "completed",
|
|
332
|
+
progress: 100,
|
|
333
|
+
result: JSON.stringify({ apply: "OK", build: buildStatus })
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
catch (error) {
|
|
337
|
+
getDb().updateJob(jobId, { status: "failed", error: error.message });
|
|
338
|
+
}
|
|
339
|
+
finally {
|
|
340
|
+
try {
|
|
341
|
+
await runCmd(`git worktree remove -f "${tempWorktree}"`);
|
|
342
|
+
if (fsSync.existsSync(tempWorktree))
|
|
343
|
+
await fs.rm(tempWorktree, { recursive: true, force: true });
|
|
344
|
+
}
|
|
345
|
+
catch (e) { }
|
|
346
|
+
}
|
|
347
|
+
})();
|
|
348
|
+
return JSON.stringify({
|
|
349
|
+
status: "STARTED",
|
|
350
|
+
message: "Validation started in background.",
|
|
351
|
+
job_id: jobId,
|
|
352
|
+
instruction: "Use perf_background_status to check progress."
|
|
353
|
+
}, null, 2);
|
|
254
354
|
}
|
|
255
355
|
}),
|
|
256
356
|
finalize_plan: tool({
|
|
@@ -261,11 +361,17 @@ export function systemTools() {
|
|
|
261
361
|
},
|
|
262
362
|
async execute({ plan_id, outcome }) {
|
|
263
363
|
log("Tool call: finalize_plan", { plan_id, outcome });
|
|
264
|
-
|
|
265
|
-
const
|
|
364
|
+
record(plan_id, "finalize_plan", { outcome });
|
|
365
|
+
const metrics = getDb().getPlanMetrics(plan_id);
|
|
366
|
+
const report = {
|
|
367
|
+
plan_id,
|
|
368
|
+
outcome,
|
|
369
|
+
metrics,
|
|
370
|
+
finished_at: new Date().toISOString()
|
|
371
|
+
};
|
|
266
372
|
await fs.appendFile(path.join(CACHE_DIR, "gaps.jsonl"), JSON.stringify(report) + "\n");
|
|
267
373
|
const deleted = await cleanCache();
|
|
268
|
-
return
|
|
374
|
+
return JSON.stringify({ status: "FINALIZED", report, cache_cleared: deleted }, null, 2);
|
|
269
375
|
}
|
|
270
376
|
})
|
|
271
377
|
};
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { tool } from "@opencode-ai/plugin";
|
|
2
|
+
import { systemTools } from "./system-tools.js";
|
|
3
|
+
import { activeSetTools } from "./activeset.js";
|
|
4
|
+
import { chunkCardsTools } from "./chunk-cards.js";
|
|
5
|
+
import { moduleSummariesTools } from "./module-summaries.js";
|
|
6
|
+
import { performanceTools } from "./performance-optimization.js";
|
|
7
|
+
import { graphTools } from "./database.js";
|
|
8
|
+
// Aggregate all internal tools
|
|
9
|
+
const internal = {
|
|
10
|
+
...systemTools(),
|
|
11
|
+
...activeSetTools(),
|
|
12
|
+
...chunkCardsTools(),
|
|
13
|
+
...moduleSummariesTools(),
|
|
14
|
+
...performanceTools(),
|
|
15
|
+
...graphTools(),
|
|
16
|
+
};
|
|
17
|
+
export function unifiedTools() {
|
|
18
|
+
return {
|
|
19
|
+
code_search: tool({
|
|
20
|
+
description: "Search the codebase using various engines (filename, content, symbol, or semantic/vector).",
|
|
21
|
+
args: {
|
|
22
|
+
query: tool.schema.string().describe("Search query"),
|
|
23
|
+
mode: tool.schema.enum(["filename", "content", "symbol", "semantic"]).optional().default("filename").describe("Search strategy"),
|
|
24
|
+
path: tool.schema.string().optional().default(".").describe("Root path for search"),
|
|
25
|
+
limit: tool.schema.number().optional().default(10).describe("Max results"),
|
|
26
|
+
plan_id: tool.schema.string().optional()
|
|
27
|
+
},
|
|
28
|
+
async execute(args) {
|
|
29
|
+
switch (args.mode) {
|
|
30
|
+
case "content": return internal.fast_search.execute({ ...args, mode: "content" });
|
|
31
|
+
case "symbol": return internal.graph_search_symbols.execute({ query: args.query });
|
|
32
|
+
case "semantic": return internal.graph_semantic_search.execute({ query: args.query, limit: args.limit });
|
|
33
|
+
default: return internal.fast_search.execute({ ...args, mode: "filename" });
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}),
|
|
37
|
+
code_analyze: tool({
|
|
38
|
+
description: "Perform structural analysis on files or modules. Generates summaries, API maps, and impact reports.",
|
|
39
|
+
args: {
|
|
40
|
+
target: tool.schema.string().describe("File path or module ID"),
|
|
41
|
+
mode: tool.schema.enum(["summary", "api", "invariant", "module", "impact", "reasoning"]).optional().default("summary"),
|
|
42
|
+
force: tool.schema.boolean().optional().default(false),
|
|
43
|
+
plan_id: tool.schema.string().optional()
|
|
44
|
+
},
|
|
45
|
+
async execute(args) {
|
|
46
|
+
switch (args.mode) {
|
|
47
|
+
case "module": return internal.module_synthesize.execute({ file_path: args.target, force_resynthesize: args.force });
|
|
48
|
+
case "impact": return internal.brief_fix_loop.execute({ symbol: args.target, intent: "impact_analysis" });
|
|
49
|
+
case "reasoning": return internal.module_hierarchical_reasoning.execute({ module_id: args.target });
|
|
50
|
+
default: return internal.chunk_create_card.execute({ file_path: args.target, chunk_type: args.mode, force_recreate: args.force });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}),
|
|
54
|
+
code_context: tool({
|
|
55
|
+
description: "Manage working memory (ActiveSets). Limits context window usage by loading/unloading specific chunks.",
|
|
56
|
+
args: {
|
|
57
|
+
action: tool.schema.enum(["create", "load", "add", "remove", "status", "list", "close"]),
|
|
58
|
+
target: tool.schema.string().optional().describe("ActiveSet ID or Chunk IDs (comma separated)"),
|
|
59
|
+
name: tool.schema.string().optional().describe("Name for new ActiveSet"),
|
|
60
|
+
plan_id: tool.schema.string().optional()
|
|
61
|
+
},
|
|
62
|
+
async execute(args) {
|
|
63
|
+
const chunk_ids = args.target?.split(',').map(s => s.trim());
|
|
64
|
+
switch (args.action) {
|
|
65
|
+
case "create": return internal.activeset_create.execute({ name: args.name || "Context", chunk_ids });
|
|
66
|
+
case "load": return internal.activeset_load.execute({ set_id: args.target });
|
|
67
|
+
case "add": return internal.activeset_add_chunks.execute({ chunk_ids: chunk_ids });
|
|
68
|
+
case "remove": return internal.activeset_remove_chunks.execute({ chunk_ids: chunk_ids });
|
|
69
|
+
case "list": return internal.activeset_list.execute({});
|
|
70
|
+
case "close": return internal.activeset_close.execute({});
|
|
71
|
+
default: return internal.activeset_get_current.execute({});
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}),
|
|
75
|
+
code_read: tool({
|
|
76
|
+
description: "Precise reading of symbols or file slices. Follows the current plan.",
|
|
77
|
+
args: {
|
|
78
|
+
symbol: tool.schema.string().optional().describe("Symbol to jump to"),
|
|
79
|
+
file: tool.schema.string().optional().describe("File path to read"),
|
|
80
|
+
start_line: tool.schema.number().optional(),
|
|
81
|
+
end_line: tool.schema.number().optional(),
|
|
82
|
+
plan_id: tool.schema.string().optional()
|
|
83
|
+
},
|
|
84
|
+
async execute(args) {
|
|
85
|
+
if (args.symbol)
|
|
86
|
+
return internal.jump_to_symbol.execute({ symbol: args.symbol, plan_id: args.plan_id });
|
|
87
|
+
if (args.file && args.start_line && args.end_line) {
|
|
88
|
+
return internal.read_slice.execute({ file: args.file, start_line: args.start_line, end_line: args.end_line, plan_id: args.plan_id });
|
|
89
|
+
}
|
|
90
|
+
throw new Error("Either 'symbol' or 'file' with line range must be provided.");
|
|
91
|
+
}
|
|
92
|
+
}),
|
|
93
|
+
code_propose: tool({
|
|
94
|
+
description: "Plan and propose changes. Generates worklists, diffs, and validates them.",
|
|
95
|
+
args: {
|
|
96
|
+
action: tool.schema.enum(["plan", "patch", "validate", "finalize"]),
|
|
97
|
+
symbol: tool.schema.string().optional().describe("Locus symbol for plan"),
|
|
98
|
+
intent: tool.schema.string().optional().describe("Work intent (e.g. refactor)"),
|
|
99
|
+
message: tool.schema.string().optional().describe("Commit message for patch"),
|
|
100
|
+
patch_path: tool.schema.string().optional().describe("Path to .diff file"),
|
|
101
|
+
plan_id: tool.schema.string().optional(),
|
|
102
|
+
outcome: tool.schema.string().optional()
|
|
103
|
+
},
|
|
104
|
+
async execute(args) {
|
|
105
|
+
switch (args.action) {
|
|
106
|
+
case "plan": return internal.brief_fix_loop.execute({ symbol: args.symbol, intent: args.intent });
|
|
107
|
+
case "patch": return internal.prepare_patch.execute({ message: args.message, plan_id: args.plan_id });
|
|
108
|
+
case "validate": return internal.validate_patch.execute({ patch_path: args.patch_path, plan_id: args.plan_id });
|
|
109
|
+
case "finalize": return internal.finalize_plan.execute({ plan_id: args.plan_id, outcome: args.outcome });
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}),
|
|
113
|
+
code_status: tool({
|
|
114
|
+
description: "Monitor system health, background jobs, and plan metrics.",
|
|
115
|
+
args: {
|
|
116
|
+
mode: tool.schema.enum(["stats", "hot_files", "jobs", "plan"]).optional().default("stats"),
|
|
117
|
+
job_id: tool.schema.string().optional(),
|
|
118
|
+
plan_id: tool.schema.string().optional(),
|
|
119
|
+
path: tool.schema.string().optional().default("")
|
|
120
|
+
},
|
|
121
|
+
async execute(args) {
|
|
122
|
+
switch (args.mode) {
|
|
123
|
+
case "hot_files": return internal.journal_query_hot_files.execute({ path_prefix: args.path });
|
|
124
|
+
case "jobs": return internal.graph_background_status.execute({ job_id: args.job_id });
|
|
125
|
+
case "plan": return internal.graph_get_plan_metrics.execute({ plan_id: args.plan_id });
|
|
126
|
+
default: return internal.graph_stats.execute({});
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}),
|
|
130
|
+
code_setup: tool({
|
|
131
|
+
description: "One-time setup and maintenance tasks (AI, Git Journal, Indexing).",
|
|
132
|
+
args: {
|
|
133
|
+
action: tool.schema.enum(["init", "ai", "index", "journal"]),
|
|
134
|
+
model: tool.schema.string().optional().describe("AI Model name"),
|
|
135
|
+
limit: tool.schema.number().optional().describe("History limit")
|
|
136
|
+
},
|
|
137
|
+
async execute(args) {
|
|
138
|
+
switch (args.action) {
|
|
139
|
+
case "ai": return internal.autognosis_setup_ai.execute({ model: args.model });
|
|
140
|
+
case "index": return internal.perf_incremental_index.execute({ background: true });
|
|
141
|
+
case "journal": return internal.journal_build.execute({ limit: args.limit });
|
|
142
|
+
default: return internal.autognosis_init.execute({ mode: "apply", token: "adhoc" }); // Simplified
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}),
|
|
146
|
+
internal_call: tool({
|
|
147
|
+
description: "Advanced access to specialized internal tools. Use only when unified tools are insufficient.",
|
|
148
|
+
args: {
|
|
149
|
+
tool_name: tool.schema.string().describe("Internal tool name"),
|
|
150
|
+
args: tool.schema.any().describe("Arguments for the internal tool")
|
|
151
|
+
},
|
|
152
|
+
async execute({ tool_name, args }) {
|
|
153
|
+
const target = internal[tool_name];
|
|
154
|
+
if (!target)
|
|
155
|
+
throw new Error(`Internal tool '${tool_name}' not found.`);
|
|
156
|
+
return target.execute(args);
|
|
157
|
+
}
|
|
158
|
+
})
|
|
159
|
+
};
|
|
160
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-autognosis",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.4",
|
|
4
4
|
"description": "Advanced RAG-powered codebase awareness for OpenCode agents. Features Chunk Cards synthesis, hierarchical reasoning, ActiveSet working memory, and performance optimization for enterprise-scale repositories.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|