opencode-autognosis 2.0.1 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/activeset.js +2 -1
- package/dist/chunk-cards.js +7 -1
- package/dist/database.d.ts +36 -0
- package/dist/database.js +399 -0
- package/dist/git-worktree.js +5 -4
- package/dist/index.js +2 -0
- package/dist/module-summaries.js +2 -1
- package/dist/performance-optimization.js +6 -2
- package/dist/services/logger.d.ts +4 -1
- package/dist/services/logger.js +39 -16
- package/dist/services/ollama.d.ts +11 -0
- package/dist/services/ollama.js +132 -0
- package/dist/system-tools.js +2 -2
- package/dist/testing-infrastructure.js +2 -1
- package/package.json +5 -1
package/dist/activeset.js
CHANGED
|
@@ -3,12 +3,13 @@ import * as fs from "node:fs/promises";
|
|
|
3
3
|
import * as fsSync from "node:fs";
|
|
4
4
|
import * as path from "node:path";
|
|
5
5
|
import * as crypto from "node:crypto";
|
|
6
|
+
import { Logger } from "./services/logger.js";
|
|
6
7
|
const PROJECT_ROOT = process.cwd();
|
|
7
8
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
8
9
|
const ACTIVESET_DIR = path.join(OPENCODE_DIR, "activesets");
|
|
9
10
|
// Internal logging
|
|
10
11
|
function log(message, data) {
|
|
11
|
-
|
|
12
|
+
Logger.log("ActiveSet", message, data);
|
|
12
13
|
}
|
|
13
14
|
// =============================================================================
|
|
14
15
|
// HELPERS
|
package/dist/chunk-cards.js
CHANGED
|
@@ -6,6 +6,8 @@ import * as path from "node:path";
|
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
8
|
import ts from "typescript";
|
|
9
|
+
import { getDb } from "./database.js";
|
|
10
|
+
import { Logger } from "./services/logger.js";
|
|
9
11
|
const execAsync = promisify(exec);
|
|
10
12
|
const PROJECT_ROOT = process.cwd();
|
|
11
13
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -13,7 +15,7 @@ export const CHUNK_DIR = path.join(OPENCODE_DIR, "chunks");
|
|
|
13
15
|
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
14
16
|
// Internal logging
|
|
15
17
|
function log(message, data) {
|
|
16
|
-
|
|
18
|
+
Logger.log("ChunkCards", message, data);
|
|
17
19
|
}
|
|
18
20
|
// =============================================================================
|
|
19
21
|
// HELPERS
|
|
@@ -158,6 +160,8 @@ export function chunkCardsTools() {
|
|
|
158
160
|
};
|
|
159
161
|
// Save chunk card
|
|
160
162
|
await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
|
|
163
|
+
// Sync to SQLite Index
|
|
164
|
+
getDb().ingestChunkCard(chunkCard);
|
|
161
165
|
return JSON.stringify({
|
|
162
166
|
status: "SUCCESS",
|
|
163
167
|
card: chunkCard,
|
|
@@ -297,6 +301,8 @@ export function chunkCardsTools() {
|
|
|
297
301
|
}, null, 2);
|
|
298
302
|
}
|
|
299
303
|
await fs.unlink(cardPath);
|
|
304
|
+
// Remove from SQLite Index
|
|
305
|
+
getDb().deleteChunkCard(card_id);
|
|
300
306
|
return JSON.stringify({
|
|
301
307
|
status: "SUCCESS",
|
|
302
308
|
message: `Card deleted: ${card_id}`
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { ChunkCard } from "./chunk-cards.js";
|
|
2
|
+
export declare class CodeGraphDB {
|
|
3
|
+
private db;
|
|
4
|
+
private workerRunning;
|
|
5
|
+
constructor();
|
|
6
|
+
private initialize;
|
|
7
|
+
private startWorker;
|
|
8
|
+
private processEmbeddingQueue;
|
|
9
|
+
/**
|
|
10
|
+
* Syncs a ChunkCard (JSON) into the SQLite Index.
|
|
11
|
+
* This is an "Upsert" operation.
|
|
12
|
+
*/
|
|
13
|
+
ingestChunkCard(card: ChunkCard): void;
|
|
14
|
+
/**
|
|
15
|
+
* Remove a card from the index
|
|
16
|
+
*/
|
|
17
|
+
deleteChunkCard(cardId: string): void;
|
|
18
|
+
findDependents(filePath: string): string[];
|
|
19
|
+
searchSymbols(query: string): any[];
|
|
20
|
+
semanticSearch(query: string, limit?: number): Promise<any[]>;
|
|
21
|
+
private cosineSimilarity;
|
|
22
|
+
getStats(): {
|
|
23
|
+
files: number;
|
|
24
|
+
chunks: number;
|
|
25
|
+
symbols: number;
|
|
26
|
+
dependencies: number;
|
|
27
|
+
embeddings: {
|
|
28
|
+
completed: number;
|
|
29
|
+
pending: number;
|
|
30
|
+
};
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
export declare function getDb(): CodeGraphDB;
|
|
34
|
+
export declare function graphTools(): {
|
|
35
|
+
[key: string]: any;
|
|
36
|
+
};
|
package/dist/database.js
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import * as fs from "node:fs";
|
|
4
|
+
import { tool } from "@opencode-ai/plugin";
|
|
5
|
+
import { ollama, DEFAULT_EMBEDDING_MODEL } from "./services/ollama.js";
|
|
6
|
+
const PROJECT_ROOT = process.cwd();
|
|
7
|
+
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
8
|
+
const DB_PATH = path.join(OPENCODE_DIR, "autognosis.db");
|
|
9
|
+
export class CodeGraphDB {
|
|
10
|
+
db;
|
|
11
|
+
workerRunning = false;
|
|
12
|
+
constructor() {
|
|
13
|
+
// Ensure directory exists
|
|
14
|
+
if (!fs.existsSync(OPENCODE_DIR)) {
|
|
15
|
+
fs.mkdirSync(OPENCODE_DIR, { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
this.db = new Database(DB_PATH);
|
|
18
|
+
this.initialize();
|
|
19
|
+
// Start background worker
|
|
20
|
+
this.startWorker();
|
|
21
|
+
}
|
|
22
|
+
initialize() {
|
|
23
|
+
// Enable WAL mode for concurrency and performance
|
|
24
|
+
this.db.pragma('journal_mode = WAL');
|
|
25
|
+
this.db.exec(`
|
|
26
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
27
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
28
|
+
path TEXT UNIQUE NOT NULL,
|
|
29
|
+
hash TEXT,
|
|
30
|
+
last_indexed DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
34
|
+
id TEXT PRIMARY KEY,
|
|
35
|
+
file_id INTEGER,
|
|
36
|
+
type TEXT,
|
|
37
|
+
complexity_score REAL,
|
|
38
|
+
content_summary TEXT,
|
|
39
|
+
embedding BLOB,
|
|
40
|
+
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
CREATE TABLE IF NOT EXISTS embedding_queue (
|
|
44
|
+
chunk_id TEXT PRIMARY KEY,
|
|
45
|
+
text_to_embed TEXT,
|
|
46
|
+
status TEXT DEFAULT 'pending', -- pending, processing, failed
|
|
47
|
+
retries INTEGER DEFAULT 0,
|
|
48
|
+
FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
CREATE TABLE IF NOT EXISTS symbols (
|
|
52
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
53
|
+
chunk_id TEXT,
|
|
54
|
+
name TEXT NOT NULL,
|
|
55
|
+
kind TEXT, -- 'function', 'class', 'interface', etc.
|
|
56
|
+
FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
57
|
+
);
|
|
58
|
+
|
|
59
|
+
CREATE TABLE IF NOT EXISTS dependencies (
|
|
60
|
+
source_chunk_id TEXT,
|
|
61
|
+
target_path TEXT,
|
|
62
|
+
FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
-- Indexes for performance
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
|
|
67
|
+
CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
|
|
68
|
+
CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
|
|
69
|
+
`);
|
|
70
|
+
// Migrations
|
|
71
|
+
try {
|
|
72
|
+
this.db.exec("ALTER TABLE chunks ADD COLUMN embedding BLOB");
|
|
73
|
+
}
|
|
74
|
+
catch { }
|
|
75
|
+
}
|
|
76
|
+
async startWorker() {
|
|
77
|
+
if (this.workerRunning)
|
|
78
|
+
return;
|
|
79
|
+
this.workerRunning = true;
|
|
80
|
+
// Run periodically
|
|
81
|
+
setInterval(async () => {
|
|
82
|
+
try {
|
|
83
|
+
await this.processEmbeddingQueue();
|
|
84
|
+
}
|
|
85
|
+
catch (e) {
|
|
86
|
+
// console.error("Worker error:", e);
|
|
87
|
+
}
|
|
88
|
+
}, 5000); // Check every 5s
|
|
89
|
+
}
|
|
90
|
+
async processEmbeddingQueue() {
|
|
91
|
+
// Check if Ollama is ready
|
|
92
|
+
if (!(await ollama.isRunning()))
|
|
93
|
+
return;
|
|
94
|
+
// Get next task
|
|
95
|
+
const task = this.db.prepare(`
|
|
96
|
+
SELECT chunk_id, text_to_embed, retries
|
|
97
|
+
FROM embedding_queue
|
|
98
|
+
WHERE status = 'pending'
|
|
99
|
+
ORDER BY rowid ASC
|
|
100
|
+
LIMIT 1
|
|
101
|
+
`).get();
|
|
102
|
+
if (!task)
|
|
103
|
+
return;
|
|
104
|
+
// Mark processing
|
|
105
|
+
this.db.prepare("UPDATE embedding_queue SET status = 'processing' WHERE chunk_id = ?").run(task.chunk_id);
|
|
106
|
+
try {
|
|
107
|
+
// Generate embedding
|
|
108
|
+
const vector = await ollama.getEmbedding(task.text_to_embed);
|
|
109
|
+
if (vector.length > 0) {
|
|
110
|
+
// Store blob (Float32Array to Buffer)
|
|
111
|
+
const buffer = Buffer.from(new Float32Array(vector).buffer);
|
|
112
|
+
const updateChunk = this.db.prepare("UPDATE chunks SET embedding = ? WHERE id = ?");
|
|
113
|
+
const deleteQueue = this.db.prepare("DELETE FROM embedding_queue WHERE chunk_id = ?");
|
|
114
|
+
const txn = this.db.transaction(() => {
|
|
115
|
+
updateChunk.run(buffer, task.chunk_id);
|
|
116
|
+
deleteQueue.run(task.chunk_id);
|
|
117
|
+
});
|
|
118
|
+
txn();
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
throw new Error("Empty vector returned");
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
if (task.retries > 3) {
|
|
126
|
+
// Give up
|
|
127
|
+
this.db.prepare("UPDATE embedding_queue SET status = 'failed' WHERE chunk_id = ?").run(task.chunk_id);
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
// Retry
|
|
131
|
+
this.db.prepare("UPDATE embedding_queue SET status = 'pending', retries = retries + 1 WHERE chunk_id = ?").run(task.chunk_id);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Syncs a ChunkCard (JSON) into the SQLite Index.
|
|
137
|
+
* This is an "Upsert" operation.
|
|
138
|
+
*/
|
|
139
|
+
ingestChunkCard(card) {
|
|
140
|
+
const insertFile = this.db.prepare(`
|
|
141
|
+
INSERT INTO files (path, hash, last_indexed)
|
|
142
|
+
VALUES (?, ?, CURRENT_TIMESTAMP)
|
|
143
|
+
ON CONFLICT(path) DO UPDATE SET
|
|
144
|
+
hash = excluded.hash,
|
|
145
|
+
last_indexed = CURRENT_TIMESTAMP
|
|
146
|
+
RETURNING id
|
|
147
|
+
`);
|
|
148
|
+
const insertChunk = this.db.prepare(`
|
|
149
|
+
INSERT INTO chunks (id, file_id, type, complexity_score, content_summary)
|
|
150
|
+
VALUES (?, ?, ?, ?, ?)
|
|
151
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
152
|
+
complexity_score = excluded.complexity_score,
|
|
153
|
+
content_summary = excluded.content_summary
|
|
154
|
+
`);
|
|
155
|
+
const queueEmbedding = this.db.prepare(`
|
|
156
|
+
INSERT INTO embedding_queue (chunk_id, text_to_embed)
|
|
157
|
+
VALUES (?, ?)
|
|
158
|
+
ON CONFLICT(chunk_id) DO UPDATE SET
|
|
159
|
+
text_to_embed = excluded.text_to_embed,
|
|
160
|
+
status = 'pending',
|
|
161
|
+
retries = 0
|
|
162
|
+
`);
|
|
163
|
+
const insertSymbol = this.db.prepare(`
|
|
164
|
+
INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')
|
|
165
|
+
`);
|
|
166
|
+
const insertDep = this.db.prepare(`
|
|
167
|
+
INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)
|
|
168
|
+
`);
|
|
169
|
+
const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
|
|
170
|
+
const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
|
|
171
|
+
const transaction = this.db.transaction(() => {
|
|
172
|
+
// 1. Upsert File
|
|
173
|
+
const fileRes = insertFile.get(card.file_path, card.metadata.hash);
|
|
174
|
+
const fileId = fileRes.id;
|
|
175
|
+
// 2. Upsert Chunk
|
|
176
|
+
insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500) // Store preview
|
|
177
|
+
);
|
|
178
|
+
// 3. Queue for Embedding
|
|
179
|
+
// Use the summary or content as the text to embed
|
|
180
|
+
const textToEmbed = `${card.chunk_type.toUpperCase()} for ${path.basename(card.file_path)}\n\n${card.content.slice(0, 2000)}`;
|
|
181
|
+
queueEmbedding.run(card.id, textToEmbed);
|
|
182
|
+
// 4. Replace Symbols
|
|
183
|
+
deleteOldSymbols.run(card.id);
|
|
184
|
+
for (const sym of card.metadata.symbols) {
|
|
185
|
+
insertSymbol.run(card.id, sym);
|
|
186
|
+
}
|
|
187
|
+
// 5. Replace Dependencies
|
|
188
|
+
deleteOldDeps.run(card.id);
|
|
189
|
+
for (const dep of card.metadata.dependencies) {
|
|
190
|
+
insertDep.run(card.id, dep);
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
transaction();
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Remove a card from the index
|
|
197
|
+
*/
|
|
198
|
+
deleteChunkCard(cardId) {
|
|
199
|
+
this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
|
|
200
|
+
}
|
|
201
|
+
// ===========================================================================
|
|
202
|
+
// QUERY METHODS
|
|
203
|
+
// ===========================================================================
|
|
204
|
+
findDependents(filePath) {
|
|
205
|
+
// Find all chunks that depend on this file path
|
|
206
|
+
// Note: dependency paths might be relative or absolute, simplistic matching for now
|
|
207
|
+
const query = this.db.prepare(`
|
|
208
|
+
SELECT DISTINCT f.path
|
|
209
|
+
FROM files f
|
|
210
|
+
JOIN chunks c ON f.id = c.file_id
|
|
211
|
+
JOIN dependencies d ON c.id = d.source_chunk_id
|
|
212
|
+
WHERE d.target_path LIKE ? OR d.target_path = ?
|
|
213
|
+
`);
|
|
214
|
+
// Attempt to match exact path or likely relative imports (simplistic)
|
|
215
|
+
const basename = path.basename(filePath);
|
|
216
|
+
const results = query.all(`%/${basename}%`, basename);
|
|
217
|
+
return results.map(r => r.path);
|
|
218
|
+
}
|
|
219
|
+
searchSymbols(query) {
|
|
220
|
+
const stmt = this.db.prepare(`
|
|
221
|
+
SELECT s.name, c.type, f.path
|
|
222
|
+
FROM symbols s
|
|
223
|
+
JOIN chunks c ON s.chunk_id = c.id
|
|
224
|
+
JOIN files f ON c.file_id = f.id
|
|
225
|
+
WHERE s.name LIKE ?
|
|
226
|
+
LIMIT 20
|
|
227
|
+
`);
|
|
228
|
+
return stmt.all(`%${query}%`);
|
|
229
|
+
}
|
|
230
|
+
async semanticSearch(query, limit = 10) {
|
|
231
|
+
if (!(await ollama.isRunning())) {
|
|
232
|
+
throw new Error("Ollama is not running. Please run 'autognosis_setup_ai' first.");
|
|
233
|
+
}
|
|
234
|
+
const queryVec = await ollama.getEmbedding(query);
|
|
235
|
+
if (queryVec.length === 0)
|
|
236
|
+
return [];
|
|
237
|
+
// Get all embeddings from DB
|
|
238
|
+
// SQLite doesn't have vector math, so we fetch all and sort in JS
|
|
239
|
+
// Optimizations: In future, use sqlite-vec or filter by complexity/type first
|
|
240
|
+
const chunks = this.db.prepare(`
|
|
241
|
+
SELECT c.id, c.content_summary, c.type, f.path, c.embedding
|
|
242
|
+
FROM chunks c
|
|
243
|
+
JOIN files f ON c.file_id = f.id
|
|
244
|
+
WHERE c.embedding IS NOT NULL
|
|
245
|
+
`).all();
|
|
246
|
+
const results = chunks.map(chunk => {
|
|
247
|
+
const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
|
|
248
|
+
const similarity = this.cosineSimilarity(queryVec, vector);
|
|
249
|
+
return { ...chunk, similarity, embedding: undefined }; // Don't return blob
|
|
250
|
+
});
|
|
251
|
+
results.sort((a, b) => b.similarity - a.similarity);
|
|
252
|
+
return results.slice(0, limit);
|
|
253
|
+
}
|
|
254
|
+
cosineSimilarity(vecA, vecB) {
|
|
255
|
+
let dot = 0;
|
|
256
|
+
let normA = 0;
|
|
257
|
+
let normB = 0;
|
|
258
|
+
for (let i = 0; i < vecA.length; i++) {
|
|
259
|
+
dot += vecA[i] * vecB[i];
|
|
260
|
+
normA += vecA[i] * vecA[i];
|
|
261
|
+
normB += vecB[i] * vecB[i];
|
|
262
|
+
}
|
|
263
|
+
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
264
|
+
}
|
|
265
|
+
getStats() {
|
|
266
|
+
const files = this.db.prepare('SELECT COUNT(*) as c FROM files').get();
|
|
267
|
+
const symbols = this.db.prepare('SELECT COUNT(*) as c FROM symbols').get();
|
|
268
|
+
const deps = this.db.prepare('SELECT COUNT(*) as c FROM dependencies').get();
|
|
269
|
+
const chunks = this.db.prepare('SELECT COUNT(*) as c FROM chunks').get();
|
|
270
|
+
const embedded = this.db.prepare('SELECT COUNT(*) as c FROM chunks WHERE embedding IS NOT NULL').get();
|
|
271
|
+
const queue = this.db.prepare("SELECT COUNT(*) as c FROM embedding_queue WHERE status = 'pending'").get();
|
|
272
|
+
return {
|
|
273
|
+
files: files.c,
|
|
274
|
+
chunks: chunks.c,
|
|
275
|
+
symbols: symbols.c,
|
|
276
|
+
dependencies: deps.c,
|
|
277
|
+
embeddings: {
|
|
278
|
+
completed: embedded.c,
|
|
279
|
+
pending: queue.c
|
|
280
|
+
}
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
// Singleton instance for the plugin
|
|
285
|
+
let dbInstance = null;
|
|
286
|
+
export function getDb() {
|
|
287
|
+
if (!dbInstance) {
|
|
288
|
+
dbInstance = new CodeGraphDB();
|
|
289
|
+
}
|
|
290
|
+
return dbInstance;
|
|
291
|
+
}
|
|
292
|
+
export function graphTools() {
|
|
293
|
+
return {
|
|
294
|
+
autognosis_setup_ai: tool({
|
|
295
|
+
description: "Configure local AI capabilities (Ollama). Checks installation, installs if needed, and pulls the embedding model.",
|
|
296
|
+
args: {
|
|
297
|
+
model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL).describe("Embedding model to pull")
|
|
298
|
+
},
|
|
299
|
+
async execute({ model }) {
|
|
300
|
+
try {
|
|
301
|
+
const installed = await ollama.isInstalled();
|
|
302
|
+
let statusMsg = "Ollama is installed.";
|
|
303
|
+
if (!installed) {
|
|
304
|
+
statusMsg = await ollama.install();
|
|
305
|
+
}
|
|
306
|
+
await ollama.startServer();
|
|
307
|
+
await ollama.pullModel(model);
|
|
308
|
+
return JSON.stringify({
|
|
309
|
+
status: "SUCCESS",
|
|
310
|
+
message: `${statusMsg} Server is running. Model ${model} is ready.`,
|
|
311
|
+
config: {
|
|
312
|
+
model,
|
|
313
|
+
base_url: "http://127.0.0.1:11434"
|
|
314
|
+
}
|
|
315
|
+
}, null, 2);
|
|
316
|
+
}
|
|
317
|
+
catch (error) {
|
|
318
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}),
|
|
322
|
+
graph_semantic_search: tool({
|
|
323
|
+
description: "Search the codebase using natural language (Vector/Semantic Search). Requires AI setup.",
|
|
324
|
+
args: {
|
|
325
|
+
query: tool.schema.string().describe("Natural language query"),
|
|
326
|
+
limit: tool.schema.number().optional().default(10).describe("Max results")
|
|
327
|
+
},
|
|
328
|
+
async execute({ query, limit }) {
|
|
329
|
+
try {
|
|
330
|
+
const results = await getDb().semanticSearch(query, limit);
|
|
331
|
+
return JSON.stringify({
|
|
332
|
+
status: "SUCCESS",
|
|
333
|
+
query,
|
|
334
|
+
results
|
|
335
|
+
}, null, 2);
|
|
336
|
+
}
|
|
337
|
+
catch (error) {
|
|
338
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
}),
|
|
342
|
+
graph_query_dependents: tool({
|
|
343
|
+
description: "Find all files that depend on a specific file (upstream impact analysis).",
|
|
344
|
+
args: {
|
|
345
|
+
file_path: tool.schema.string().describe("File path to analyze"),
|
|
346
|
+
},
|
|
347
|
+
async execute({ file_path }) {
|
|
348
|
+
try {
|
|
349
|
+
const dependents = getDb().findDependents(file_path);
|
|
350
|
+
return JSON.stringify({
|
|
351
|
+
status: "SUCCESS",
|
|
352
|
+
file_path,
|
|
353
|
+
dependents,
|
|
354
|
+
count: dependents.length
|
|
355
|
+
}, null, 2);
|
|
356
|
+
}
|
|
357
|
+
catch (error) {
|
|
358
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
}),
|
|
362
|
+
graph_search_symbols: tool({
|
|
363
|
+
description: "Fast fuzzy search for symbols (functions, classes) across the entire codebase index.",
|
|
364
|
+
args: {
|
|
365
|
+
query: tool.schema.string().describe("Symbol name query"),
|
|
366
|
+
},
|
|
367
|
+
async execute({ query }) {
|
|
368
|
+
try {
|
|
369
|
+
const results = getDb().searchSymbols(query);
|
|
370
|
+
return JSON.stringify({
|
|
371
|
+
status: "SUCCESS",
|
|
372
|
+
query,
|
|
373
|
+
results,
|
|
374
|
+
count: results.length
|
|
375
|
+
}, null, 2);
|
|
376
|
+
}
|
|
377
|
+
catch (error) {
|
|
378
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}),
|
|
382
|
+
graph_stats: tool({
|
|
383
|
+
description: "Get statistics about the Code Graph Index.",
|
|
384
|
+
args: {},
|
|
385
|
+
async execute() {
|
|
386
|
+
try {
|
|
387
|
+
const stats = getDb().getStats();
|
|
388
|
+
return JSON.stringify({
|
|
389
|
+
status: "SUCCESS",
|
|
390
|
+
stats
|
|
391
|
+
}, null, 2);
|
|
392
|
+
}
|
|
393
|
+
catch (error) {
|
|
394
|
+
return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
})
|
|
398
|
+
};
|
|
399
|
+
}
|
package/dist/git-worktree.js
CHANGED
|
@@ -5,13 +5,14 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { Logger } from "./services/logger.js";
|
|
8
9
|
const execAsync = promisify(exec);
|
|
9
10
|
const PROJECT_ROOT = process.cwd();
|
|
10
11
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
11
12
|
const WORKTREE_DIR = path.join(OPENCODE_DIR, "worktrees");
|
|
12
13
|
// Internal logging
|
|
13
14
|
function log(message, data) {
|
|
14
|
-
|
|
15
|
+
Logger.log("GitWorktree", message, data);
|
|
15
16
|
}
|
|
16
17
|
// =============================================================================
|
|
17
18
|
// HELPERS
|
|
@@ -202,8 +203,8 @@ export function gitWorktreeTools() {
|
|
|
202
203
|
}, null, 2);
|
|
203
204
|
}
|
|
204
205
|
// Create initial commit if needed
|
|
205
|
-
const { stdout:
|
|
206
|
-
if (!
|
|
206
|
+
const { stdout: gitLog } = await runCmd("git log --oneline -1");
|
|
207
|
+
if (!gitLog || gitLog.includes("Initial commit")) {
|
|
207
208
|
await runCmd(`git commit --allow-empty -m "${message}"`);
|
|
208
209
|
}
|
|
209
210
|
let worktreePath = null;
|
|
@@ -214,7 +215,7 @@ export function gitWorktreeTools() {
|
|
|
214
215
|
if (fsSync.existsSync(worktreePath)) {
|
|
215
216
|
const { error: removeError } = await runCmd(`git worktree remove ${worktreePath}`);
|
|
216
217
|
if (removeError) {
|
|
217
|
-
|
|
218
|
+
log("Warning: Failed to remove existing worktree", removeError);
|
|
218
219
|
}
|
|
219
220
|
}
|
|
220
221
|
// Create new worktree
|
package/dist/index.js
CHANGED
|
@@ -5,6 +5,7 @@ import { chunkCardsTools } from "./chunk-cards.js";
|
|
|
5
5
|
import { activeSetTools } from "./activeset.js";
|
|
6
6
|
import { moduleSummariesTools } from "./module-summaries.js";
|
|
7
7
|
import { performanceTools } from "./performance-optimization.js";
|
|
8
|
+
import { graphTools } from "./database.js";
|
|
8
9
|
export const AutognosisPlugin = async () => {
|
|
9
10
|
return {
|
|
10
11
|
tool: {
|
|
@@ -15,6 +16,7 @@ export const AutognosisPlugin = async () => {
|
|
|
15
16
|
...activeSetTools(),
|
|
16
17
|
...moduleSummariesTools(),
|
|
17
18
|
...performanceTools(),
|
|
19
|
+
...graphTools(),
|
|
18
20
|
},
|
|
19
21
|
};
|
|
20
22
|
};
|
package/dist/module-summaries.js
CHANGED
|
@@ -3,13 +3,14 @@ import * as fs from "node:fs/promises";
|
|
|
3
3
|
import * as fsSync from "node:fs";
|
|
4
4
|
import * as path from "node:path";
|
|
5
5
|
import * as crypto from "node:crypto";
|
|
6
|
+
import { Logger } from "./services/logger.js";
|
|
6
7
|
const PROJECT_ROOT = process.cwd();
|
|
7
8
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
8
9
|
const CHUNK_DIR = path.join(OPENCODE_DIR, "chunks");
|
|
9
10
|
const MODULE_DIR = path.join(OPENCODE_DIR, "modules");
|
|
10
11
|
// Internal logging
|
|
11
12
|
function log(message, data) {
|
|
12
|
-
|
|
13
|
+
Logger.log("ModuleSummaries", message, data);
|
|
13
14
|
}
|
|
14
15
|
// =============================================================================
|
|
15
16
|
// HELPERS
|
|
@@ -5,7 +5,9 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { getDb } from "./database.js";
|
|
8
9
|
import { CHUNK_DIR, ensureChunkDir, calculateHash, calculateComplexity, parseFileAST, generateSummaryChunk, generateApiChunk, generateInvariantChunk, extractDependencies, extractSymbolsFromAST, extractSymbols } from "./chunk-cards.js";
|
|
10
|
+
import { Logger } from "./services/logger.js";
|
|
9
11
|
const execAsync = promisify(exec);
|
|
10
12
|
const PROJECT_ROOT = process.cwd();
|
|
11
13
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -14,7 +16,7 @@ const PERF_DIR = path.join(OPENCODE_DIR, "performance");
|
|
|
14
16
|
const METRICS_DIR = path.join(OPENCODE_DIR, "metrics");
|
|
15
17
|
// Internal logging
|
|
16
18
|
function log(message, data) {
|
|
17
|
-
|
|
19
|
+
Logger.log("Performance", message, data);
|
|
18
20
|
}
|
|
19
21
|
// =============================================================================
|
|
20
22
|
// HELPERS
|
|
@@ -626,6 +628,8 @@ async function indexFile(filePath) {
|
|
|
626
628
|
}
|
|
627
629
|
};
|
|
628
630
|
await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
|
|
631
|
+
// Sync to SQLite Index
|
|
632
|
+
getDb().ingestChunkCard(chunkCard);
|
|
629
633
|
}
|
|
630
634
|
}
|
|
631
635
|
catch (error) {
|
|
@@ -719,7 +723,7 @@ async function runBackgroundIndexing(taskId, indexingState) {
|
|
|
719
723
|
}
|
|
720
724
|
}
|
|
721
725
|
catch (writeError) {
|
|
722
|
-
|
|
726
|
+
log("Failed to update task error state", writeError);
|
|
723
727
|
}
|
|
724
728
|
}
|
|
725
729
|
}
|
package/dist/services/logger.js
CHANGED
|
@@ -1,17 +1,40 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
1
|
+
import * as fs from "node:fs";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
const PROJECT_ROOT = process.cwd();
|
|
4
|
+
const LOG_DIR = path.join(PROJECT_ROOT, ".opencode", "logs");
|
|
5
|
+
const LOG_FILE = path.join(LOG_DIR, "autognosis.log");
|
|
6
|
+
// Ensure log directory exists
|
|
7
|
+
try {
|
|
8
|
+
if (!fs.existsSync(LOG_DIR)) {
|
|
9
|
+
fs.mkdirSync(LOG_DIR, { recursive: true });
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
catch (e) {
|
|
13
|
+
// Ignore error if we can't create directory (e.g. read-only fs)
|
|
14
|
+
}
|
|
15
|
+
export class Logger {
|
|
16
|
+
static formatMessage(module, message, data) {
|
|
17
|
+
const timestamp = new Date().toISOString();
|
|
18
|
+
let dataStr = "";
|
|
19
|
+
if (data) {
|
|
20
|
+
try {
|
|
21
|
+
dataStr = typeof data === "string" ? data : JSON.stringify(data);
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
dataStr = "[Circular/Unserializable]";
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return `[${timestamp}] [${module}] ${message} ${dataStr}\n`;
|
|
28
|
+
}
|
|
29
|
+
static log(module, message, data) {
|
|
30
|
+
const line = this.formatMessage(module, message, data);
|
|
31
|
+
try {
|
|
32
|
+
// Append to log file synchronously to ensure write
|
|
33
|
+
fs.appendFileSync(LOG_FILE, line);
|
|
34
|
+
}
|
|
35
|
+
catch (e) {
|
|
36
|
+
// Fallback: strictly avoid console.log/error to prevent TUI breakage.
|
|
37
|
+
// We essentially swallow the log if file write fails.
|
|
38
|
+
}
|
|
39
|
+
}
|
|
17
40
|
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
|
|
2
|
+
export declare const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
3
|
+
export declare class OllamaService {
|
|
4
|
+
isInstalled(): Promise<boolean>;
|
|
5
|
+
isRunning(): Promise<boolean>;
|
|
6
|
+
install(): Promise<string>;
|
|
7
|
+
startServer(): Promise<void>;
|
|
8
|
+
pullModel(model?: string): Promise<void>;
|
|
9
|
+
getEmbedding(text: string, model?: string): Promise<number[]>;
|
|
10
|
+
}
|
|
11
|
+
export declare const ollama: OllamaService;
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import { exec, spawn } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import * as fs from "node:fs";
|
|
4
|
+
import * as path from "node:path";
|
|
5
|
+
import { Logger } from "./logger.js";
|
|
6
|
+
const execAsync = promisify(exec);
|
|
7
|
+
export const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
|
|
8
|
+
export const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
9
|
+
export class OllamaService {
|
|
10
|
+
async isInstalled() {
|
|
11
|
+
try {
|
|
12
|
+
await execAsync("which ollama");
|
|
13
|
+
return true;
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
async isRunning() {
|
|
20
|
+
try {
|
|
21
|
+
const controller = new AbortController();
|
|
22
|
+
const timeoutId = setTimeout(() => controller.abort(), 1000);
|
|
23
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/version`, { signal: controller.signal });
|
|
24
|
+
clearTimeout(timeoutId);
|
|
25
|
+
return res.ok;
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
async install() {
|
|
32
|
+
const platform = process.platform;
|
|
33
|
+
try {
|
|
34
|
+
if (platform === "darwin") {
|
|
35
|
+
// Try Homebrew first
|
|
36
|
+
try {
|
|
37
|
+
await execAsync("which brew");
|
|
38
|
+
await execAsync("brew install ollama");
|
|
39
|
+
return "Installed via Homebrew";
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
// Fallback to script
|
|
43
|
+
await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
|
|
44
|
+
return "Installed via official script";
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
else if (platform === "linux") {
|
|
48
|
+
await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
|
|
49
|
+
return "Installed via official script";
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
throw new Error("Automatic installation only supported on macOS and Linux. Please install Ollama manually.");
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
throw new Error(`Installation failed: ${error.message}`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async startServer() {
|
|
60
|
+
if (await this.isRunning())
|
|
61
|
+
return;
|
|
62
|
+
// Start in background
|
|
63
|
+
const logFile = fs.openSync(path.join(process.cwd(), ".opencode", "ollama.log"), "a");
|
|
64
|
+
const child = spawn("ollama", ["serve"], {
|
|
65
|
+
detached: true,
|
|
66
|
+
stdio: ["ignore", logFile, logFile]
|
|
67
|
+
});
|
|
68
|
+
child.unref();
|
|
69
|
+
// Wait for it to come up
|
|
70
|
+
let attempts = 0;
|
|
71
|
+
while (attempts < 10) {
|
|
72
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
73
|
+
if (await this.isRunning())
|
|
74
|
+
return;
|
|
75
|
+
attempts++;
|
|
76
|
+
}
|
|
77
|
+
throw new Error("Ollama server failed to start within 10 seconds");
|
|
78
|
+
}
|
|
79
|
+
async pullModel(model = DEFAULT_EMBEDDING_MODEL) {
|
|
80
|
+
// Check if exists
|
|
81
|
+
try {
|
|
82
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
|
|
83
|
+
const data = await res.json();
|
|
84
|
+
const models = data.models || [];
|
|
85
|
+
if (models.some((m) => m.name.includes(model))) {
|
|
86
|
+
return; // Already exists
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
catch { }
|
|
90
|
+
// Pull model (this blocks, usually handled via CLI)
|
|
91
|
+
// We'll use the API to pull so we can await it
|
|
92
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/pull`, {
|
|
93
|
+
method: "POST",
|
|
94
|
+
body: JSON.stringify({ name: model }),
|
|
95
|
+
});
|
|
96
|
+
if (!res.ok)
|
|
97
|
+
throw new Error(`Failed to pull model ${model}`);
|
|
98
|
+
// Read stream to completion to ensure it's done
|
|
99
|
+
const reader = res.body?.getReader();
|
|
100
|
+
if (reader) {
|
|
101
|
+
while (true) {
|
|
102
|
+
const { done } = await reader.read();
|
|
103
|
+
if (done)
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
async getEmbedding(text, model = DEFAULT_EMBEDDING_MODEL) {
|
|
109
|
+
if (!text || !text.trim())
|
|
110
|
+
return [];
|
|
111
|
+
try {
|
|
112
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/embeddings`, {
|
|
113
|
+
method: "POST",
|
|
114
|
+
body: JSON.stringify({
|
|
115
|
+
model,
|
|
116
|
+
prompt: text
|
|
117
|
+
})
|
|
118
|
+
});
|
|
119
|
+
if (!res.ok) {
|
|
120
|
+
const errText = await res.text();
|
|
121
|
+
throw new Error(`Ollama API error: ${res.status} ${errText}`);
|
|
122
|
+
}
|
|
123
|
+
const data = await res.json();
|
|
124
|
+
return data.embedding;
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
Logger.log("Ollama", "Embedding failed", error);
|
|
128
|
+
return [];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
export const ollama = new OllamaService();
|
package/dist/system-tools.js
CHANGED
|
@@ -5,14 +5,14 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { Logger } from "./services/logger.js";
|
|
8
9
|
const execAsync = promisify(exec);
|
|
9
10
|
const PROJECT_ROOT = process.cwd();
|
|
10
11
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
11
12
|
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
12
13
|
// Internal logging
|
|
13
14
|
function log(message, data) {
|
|
14
|
-
|
|
15
|
-
console.error(`[Autognosis] ${message}`, data || '');
|
|
15
|
+
Logger.log("Autognosis", message, data);
|
|
16
16
|
}
|
|
17
17
|
// =============================================================================
|
|
18
18
|
// HELPERS
|
|
@@ -5,6 +5,7 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { Logger } from "./services/logger.js";
|
|
8
9
|
const execAsync = promisify(exec);
|
|
9
10
|
const PROJECT_ROOT = process.cwd();
|
|
10
11
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -12,7 +13,7 @@ const TEST_DIR = path.join(OPENCODE_DIR, "tests");
|
|
|
12
13
|
const BENCHMARK_DIR = path.join(OPENCODE_DIR, "benchmarks");
|
|
13
14
|
// Internal logging
|
|
14
15
|
function log(message, data) {
|
|
15
|
-
|
|
16
|
+
Logger.log("Testing", message, data);
|
|
16
17
|
}
|
|
17
18
|
// =============================================================================
|
|
18
19
|
// HELPERS
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-autognosis",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.2",
|
|
4
4
|
"description": "Advanced RAG-powered codebase awareness for OpenCode agents. Features Chunk Cards synthesis, hierarchical reasoning, ActiveSet working memory, and performance optimization for enterprise-scale repositories.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -46,8 +46,12 @@
|
|
|
46
46
|
"devDependencies": {
|
|
47
47
|
"@opencode-ai/plugin": "^1.0.162",
|
|
48
48
|
"@opencode-ai/sdk": "^1.1.40",
|
|
49
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
49
50
|
"@types/node": "^20.0.0",
|
|
50
51
|
"typescript": "^5.0.0",
|
|
51
52
|
"zod": "^4.3.6"
|
|
53
|
+
},
|
|
54
|
+
"dependencies": {
|
|
55
|
+
"better-sqlite3": "^12.6.2"
|
|
52
56
|
}
|
|
53
57
|
}
|