opencode-autognosis 2.0.4 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/activeset.d.ts +28 -0
- package/dist/activeset.js +2 -2
- package/dist/chunk-cards.d.ts +4 -0
- package/dist/chunk-cards.js +40 -0
- package/dist/database.d.ts +16 -8
- package/dist/database.js +239 -119
- package/dist/index.d.ts +6 -1
- package/dist/index.js +32 -1
- package/dist/performance-optimization.d.ts +1 -0
- package/dist/performance-optimization.js +7 -2
- package/dist/services/mlx.d.ts +8 -0
- package/dist/services/mlx.js +53 -0
- package/dist/services/policy.d.ts +12 -0
- package/dist/services/policy.js +59 -0
- package/dist/services/tui.d.ts +8 -0
- package/dist/services/tui.js +43 -0
- package/dist/services/watcher.d.ts +8 -0
- package/dist/services/watcher.js +50 -0
- package/dist/system-tools.js +6 -0
- package/dist/unified-api.js +156 -31
- package/package.json +4 -2
package/dist/database.js
CHANGED
|
@@ -1,26 +1,66 @@
|
|
|
1
|
-
import Database from "better-sqlite3";
|
|
2
1
|
import * as path from "node:path";
|
|
3
2
|
import * as fs from "node:fs";
|
|
3
|
+
import { createRequire } from "node:module";
|
|
4
4
|
import { tool } from "@opencode-ai/plugin";
|
|
5
5
|
import { ollama, DEFAULT_EMBEDDING_MODEL } from "./services/ollama.js";
|
|
6
|
+
import { mlxService } from "./services/mlx.js";
|
|
7
|
+
import { tui } from "./services/tui.js";
|
|
8
|
+
const require = createRequire(import.meta.url);
|
|
6
9
|
const PROJECT_ROOT = process.cwd();
|
|
7
10
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
8
11
|
const DB_PATH = path.join(OPENCODE_DIR, "autognosis.db");
|
|
12
|
+
/**
|
|
13
|
+
* Multi-Runtime SQLite Adapter
|
|
14
|
+
* Detects Bun vs Node and provides a unified interface.
|
|
15
|
+
*/
|
|
16
|
+
class DatabaseAdapter {
|
|
17
|
+
inner;
|
|
18
|
+
isBun;
|
|
19
|
+
constructor(path) {
|
|
20
|
+
this.isBun = !!globalThis.Bun;
|
|
21
|
+
if (this.isBun) {
|
|
22
|
+
const { Database } = require("bun:sqlite");
|
|
23
|
+
this.inner = new Database(path, { create: true });
|
|
24
|
+
}
|
|
25
|
+
else {
|
|
26
|
+
const Database = require("better-sqlite3");
|
|
27
|
+
this.inner = new Database(path);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
exec(sql) {
|
|
31
|
+
return this.inner.exec(sql);
|
|
32
|
+
}
|
|
33
|
+
pragma(sql) {
|
|
34
|
+
if (this.isBun)
|
|
35
|
+
return this.inner.exec(`PRAGMA ${sql}`);
|
|
36
|
+
return this.inner.pragma(sql);
|
|
37
|
+
}
|
|
38
|
+
prepare(sql) {
|
|
39
|
+
const stmt = this.inner.prepare(sql);
|
|
40
|
+
// Unify APIs: Bun uses .get()/.all() on statement, Better-SQLite3 does too.
|
|
41
|
+
// However, Better-SQLite3 returns 'info' from .run(), Bun returns nothing or different.
|
|
42
|
+
return {
|
|
43
|
+
run: (...args) => stmt.run(...args),
|
|
44
|
+
get: (...args) => stmt.get(...args),
|
|
45
|
+
all: (...args) => stmt.all(...args)
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
transaction(fn) {
|
|
49
|
+
return this.inner.transaction(fn);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
9
52
|
export class CodeGraphDB {
|
|
10
53
|
db;
|
|
11
54
|
workerRunning = false;
|
|
12
55
|
constructor() {
|
|
13
|
-
// Ensure directory exists
|
|
14
56
|
if (!fs.existsSync(OPENCODE_DIR)) {
|
|
15
57
|
fs.mkdirSync(OPENCODE_DIR, { recursive: true });
|
|
16
58
|
}
|
|
17
|
-
this.db = new
|
|
59
|
+
this.db = new DatabaseAdapter(DB_PATH);
|
|
18
60
|
this.initialize();
|
|
19
|
-
// Start background worker
|
|
20
61
|
this.startWorker();
|
|
21
62
|
}
|
|
22
63
|
initialize() {
|
|
23
|
-
// Enable WAL mode for concurrency and performance
|
|
24
64
|
this.db.pragma('journal_mode = WAL');
|
|
25
65
|
this.db.exec(`
|
|
26
66
|
CREATE TABLE IF NOT EXISTS files (
|
|
@@ -43,7 +83,7 @@ export class CodeGraphDB {
|
|
|
43
83
|
CREATE TABLE IF NOT EXISTS embedding_queue (
|
|
44
84
|
chunk_id TEXT PRIMARY KEY,
|
|
45
85
|
text_to_embed TEXT,
|
|
46
|
-
status TEXT DEFAULT 'pending',
|
|
86
|
+
status TEXT DEFAULT 'pending',
|
|
47
87
|
retries INTEGER DEFAULT 0,
|
|
48
88
|
FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
49
89
|
);
|
|
@@ -52,7 +92,7 @@ export class CodeGraphDB {
|
|
|
52
92
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
53
93
|
chunk_id TEXT,
|
|
54
94
|
name TEXT NOT NULL,
|
|
55
|
-
kind TEXT,
|
|
95
|
+
kind TEXT,
|
|
56
96
|
FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
57
97
|
);
|
|
58
98
|
|
|
@@ -62,12 +102,34 @@ export class CodeGraphDB {
|
|
|
62
102
|
FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
63
103
|
);
|
|
64
104
|
|
|
105
|
+
CREATE TABLE IF NOT EXISTS calls (
|
|
106
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
107
|
+
caller_chunk_id TEXT,
|
|
108
|
+
callee_name TEXT,
|
|
109
|
+
line_number INTEGER,
|
|
110
|
+
FOREIGN KEY(caller_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
111
|
+
);
|
|
112
|
+
|
|
113
|
+
CREATE TABLE IF NOT EXISTS policies (
|
|
114
|
+
id TEXT PRIMARY KEY,
|
|
115
|
+
name TEXT,
|
|
116
|
+
pattern TEXT,
|
|
117
|
+
severity TEXT, -- 'error', 'warning'
|
|
118
|
+
description TEXT
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
CREATE TABLE IF NOT EXISTS context_access_log (
|
|
122
|
+
chunk_id TEXT,
|
|
123
|
+
plan_id TEXT,
|
|
124
|
+
accessed_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
125
|
+
);
|
|
126
|
+
|
|
65
127
|
CREATE TABLE IF NOT EXISTS commits (
|
|
66
128
|
hash TEXT PRIMARY KEY,
|
|
67
129
|
author TEXT,
|
|
68
130
|
date DATETIME,
|
|
69
131
|
message TEXT,
|
|
70
|
-
files_touched TEXT
|
|
132
|
+
files_touched TEXT
|
|
71
133
|
);
|
|
72
134
|
|
|
73
135
|
CREATE TABLE IF NOT EXISTS plan_ledger (
|
|
@@ -79,10 +141,31 @@ export class CodeGraphDB {
|
|
|
79
141
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
80
142
|
);
|
|
81
143
|
|
|
144
|
+
CREATE TABLE IF NOT EXISTS blackboard (
|
|
145
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
146
|
+
author TEXT,
|
|
147
|
+
message TEXT,
|
|
148
|
+
topic TEXT,
|
|
149
|
+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
150
|
+
);
|
|
151
|
+
|
|
152
|
+
CREATE TABLE IF NOT EXISTS intents (
|
|
153
|
+
patch_id TEXT PRIMARY KEY,
|
|
154
|
+
reasoning TEXT,
|
|
155
|
+
plan_id TEXT
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
CREATE TABLE IF NOT EXISTS arch_rules (
|
|
159
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
160
|
+
source_pattern TEXT,
|
|
161
|
+
target_pattern TEXT,
|
|
162
|
+
restriction TEXT DEFAULT 'forbidden'
|
|
163
|
+
);
|
|
164
|
+
|
|
82
165
|
CREATE TABLE IF NOT EXISTS background_jobs (
|
|
83
166
|
id TEXT PRIMARY KEY,
|
|
84
|
-
type TEXT,
|
|
85
|
-
status TEXT DEFAULT 'pending',
|
|
167
|
+
type TEXT,
|
|
168
|
+
status TEXT DEFAULT 'pending',
|
|
86
169
|
progress INTEGER DEFAULT 0,
|
|
87
170
|
result TEXT,
|
|
88
171
|
error TEXT,
|
|
@@ -90,22 +173,17 @@ export class CodeGraphDB {
|
|
|
90
173
|
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
91
174
|
);
|
|
92
175
|
|
|
93
|
-
-- Indexes for performance
|
|
94
176
|
CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
|
|
95
177
|
CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
|
|
96
178
|
CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
|
|
97
179
|
CREATE INDEX IF NOT EXISTS idx_ledger_plan ON plan_ledger(plan_id);
|
|
98
180
|
CREATE INDEX IF NOT EXISTS idx_jobs_status ON background_jobs(status);
|
|
99
181
|
`);
|
|
100
|
-
// Migrations
|
|
101
182
|
try {
|
|
102
183
|
this.db.exec("ALTER TABLE chunks ADD COLUMN embedding BLOB");
|
|
103
184
|
}
|
|
104
185
|
catch { }
|
|
105
186
|
}
|
|
106
|
-
/**
|
|
107
|
-
* Background Job Management
|
|
108
|
-
*/
|
|
109
187
|
createJob(id, type, metadata) {
|
|
110
188
|
this.db.prepare(`
|
|
111
189
|
INSERT INTO background_jobs (id, type, status, progress, result)
|
|
@@ -144,25 +222,65 @@ export class CodeGraphDB {
|
|
|
144
222
|
}
|
|
145
223
|
return this.db.prepare("SELECT * FROM background_jobs ORDER BY created_at DESC LIMIT ?").all(limit);
|
|
146
224
|
}
|
|
225
|
+
postToBlackboard(author, message, topic = 'general') {
|
|
226
|
+
this.db.prepare(`
|
|
227
|
+
INSERT INTO blackboard (author, message, topic)
|
|
228
|
+
VALUES (?, ?, ?)
|
|
229
|
+
`).run(author, message, topic);
|
|
230
|
+
}
|
|
231
|
+
readBlackboard(topic, limit = 10) {
|
|
232
|
+
if (topic) {
|
|
233
|
+
return this.db.prepare(`
|
|
234
|
+
SELECT * FROM blackboard WHERE topic = ? ORDER BY timestamp DESC LIMIT ?
|
|
235
|
+
`).all(topic, limit);
|
|
236
|
+
}
|
|
237
|
+
return this.db.prepare(`
|
|
238
|
+
SELECT * FROM blackboard ORDER BY timestamp DESC LIMIT ?
|
|
239
|
+
`).all(limit);
|
|
240
|
+
}
|
|
241
|
+
storeIntent(patchId, reasoning, planId) {
|
|
242
|
+
this.db.prepare(`
|
|
243
|
+
INSERT INTO intents (patch_id, reasoning, plan_id)
|
|
244
|
+
VALUES (?, ?, ?)
|
|
245
|
+
ON CONFLICT(patch_id) DO UPDATE SET
|
|
246
|
+
reasoning = excluded.reasoning,
|
|
247
|
+
plan_id = excluded.plan_id
|
|
248
|
+
`).run(patchId, reasoning, planId);
|
|
249
|
+
}
|
|
250
|
+
getIntent(patchId) {
|
|
251
|
+
return this.db.prepare("SELECT * FROM intents WHERE patch_id = ?").get(patchId);
|
|
252
|
+
}
|
|
253
|
+
addArchRule(source, target) {
|
|
254
|
+
this.db.prepare(`
|
|
255
|
+
INSERT INTO arch_rules (source_pattern, target_pattern)
|
|
256
|
+
VALUES (?, ?)
|
|
257
|
+
`).run(source, target);
|
|
258
|
+
}
|
|
259
|
+
checkArchViolation(sourcePath, targetPath) {
|
|
260
|
+
const rules = this.db.prepare("SELECT * FROM arch_rules").all();
|
|
261
|
+
for (const rule of rules) {
|
|
262
|
+
if (sourcePath.includes(rule.source_pattern) && targetPath.includes(rule.target_pattern)) {
|
|
263
|
+
return rule;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
return null;
|
|
267
|
+
}
|
|
147
268
|
async startWorker() {
|
|
148
269
|
if (this.workerRunning)
|
|
149
270
|
return;
|
|
150
271
|
this.workerRunning = true;
|
|
151
|
-
// Run periodically
|
|
152
272
|
setInterval(async () => {
|
|
153
273
|
try {
|
|
154
274
|
await this.processEmbeddingQueue();
|
|
155
275
|
}
|
|
156
|
-
catch (e) {
|
|
157
|
-
|
|
158
|
-
}
|
|
159
|
-
}, 5000); // Check every 5s
|
|
276
|
+
catch (e) { }
|
|
277
|
+
}, 5000);
|
|
160
278
|
}
|
|
161
279
|
async processEmbeddingQueue() {
|
|
162
|
-
|
|
163
|
-
|
|
280
|
+
const useMLX = await mlxService.checkAvailability();
|
|
281
|
+
const useOllama = !useMLX && (await ollama.isRunning());
|
|
282
|
+
if (!useMLX && !useOllama)
|
|
164
283
|
return;
|
|
165
|
-
// Get next task
|
|
166
284
|
const task = this.db.prepare(`
|
|
167
285
|
SELECT chunk_id, text_to_embed, retries
|
|
168
286
|
FROM embedding_queue
|
|
@@ -172,24 +290,22 @@ export class CodeGraphDB {
|
|
|
172
290
|
`).get();
|
|
173
291
|
if (!task)
|
|
174
292
|
return;
|
|
175
|
-
// Mark processing
|
|
176
293
|
this.db.prepare("UPDATE embedding_queue SET status = 'processing' WHERE chunk_id = ?").run(task.chunk_id);
|
|
177
294
|
try {
|
|
178
|
-
|
|
179
|
-
|
|
295
|
+
const vector = useMLX
|
|
296
|
+
? await mlxService.getEmbedding(task.text_to_embed)
|
|
297
|
+
: await ollama.getEmbedding(task.text_to_embed);
|
|
180
298
|
if (vector.length > 0) {
|
|
181
|
-
// Store blob (Float32Array to Buffer)
|
|
182
299
|
const buffer = Buffer.from(new Float32Array(vector).buffer);
|
|
183
300
|
const updateChunk = this.db.prepare("UPDATE chunks SET embedding = ? WHERE id = ?");
|
|
184
301
|
const deleteQueue = this.db.prepare("DELETE FROM embedding_queue WHERE chunk_id = ?");
|
|
185
|
-
|
|
302
|
+
this.db.transaction(() => {
|
|
186
303
|
updateChunk.run(buffer, task.chunk_id);
|
|
187
304
|
deleteQueue.run(task.chunk_id);
|
|
188
|
-
});
|
|
189
|
-
txn();
|
|
305
|
+
})();
|
|
190
306
|
}
|
|
191
307
|
else {
|
|
192
|
-
throw new Error("Empty vector
|
|
308
|
+
throw new Error("Empty vector");
|
|
193
309
|
}
|
|
194
310
|
}
|
|
195
311
|
catch (error) {
|
|
@@ -201,42 +317,30 @@ export class CodeGraphDB {
|
|
|
201
317
|
}
|
|
202
318
|
}
|
|
203
319
|
}
|
|
204
|
-
/**
|
|
205
|
-
* Syncs a ChunkCard (JSON) into the SQLite Index.
|
|
206
|
-
*/
|
|
207
320
|
ingestChunkCard(card) {
|
|
208
321
|
const insertFile = this.db.prepare(`
|
|
209
322
|
INSERT INTO files (path, hash, last_indexed)
|
|
210
323
|
VALUES (?, ?, CURRENT_TIMESTAMP)
|
|
211
|
-
ON CONFLICT(path) DO UPDATE SET
|
|
212
|
-
hash = excluded.hash,
|
|
213
|
-
last_indexed = CURRENT_TIMESTAMP
|
|
324
|
+
ON CONFLICT(path) DO UPDATE SET hash = excluded.hash, last_indexed = CURRENT_TIMESTAMP
|
|
214
325
|
RETURNING id
|
|
215
326
|
`);
|
|
216
327
|
const insertChunk = this.db.prepare(`
|
|
217
328
|
INSERT INTO chunks (id, file_id, type, complexity_score, content_summary)
|
|
218
329
|
VALUES (?, ?, ?, ?, ?)
|
|
219
|
-
ON CONFLICT(id) DO UPDATE SET
|
|
220
|
-
complexity_score = excluded.complexity_score,
|
|
221
|
-
content_summary = excluded.content_summary
|
|
330
|
+
ON CONFLICT(id) DO UPDATE SET complexity_score = excluded.complexity_score, content_summary = excluded.content_summary
|
|
222
331
|
`);
|
|
223
332
|
const queueEmbedding = this.db.prepare(`
|
|
224
333
|
INSERT INTO embedding_queue (chunk_id, text_to_embed)
|
|
225
334
|
VALUES (?, ?)
|
|
226
|
-
ON CONFLICT(chunk_id) DO UPDATE SET
|
|
227
|
-
text_to_embed = excluded.text_to_embed,
|
|
228
|
-
status = 'pending',
|
|
229
|
-
retries = 0
|
|
230
|
-
`);
|
|
231
|
-
const insertSymbol = this.db.prepare(`
|
|
232
|
-
INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')
|
|
233
|
-
`);
|
|
234
|
-
const insertDep = this.db.prepare(`
|
|
235
|
-
INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)
|
|
335
|
+
ON CONFLICT(chunk_id) DO UPDATE SET text_to_embed = excluded.text_to_embed, status = 'pending', retries = 0
|
|
236
336
|
`);
|
|
337
|
+
const insertSymbol = this.db.prepare(`INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')`);
|
|
338
|
+
const insertDep = this.db.prepare(`INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)`);
|
|
339
|
+
const insertCall = this.db.prepare(`INSERT INTO calls (caller_chunk_id, callee_name, line_number) VALUES (?, ?, ?)`);
|
|
237
340
|
const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
|
|
238
341
|
const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
|
|
239
|
-
const
|
|
342
|
+
const deleteOldCalls = this.db.prepare('DELETE FROM calls WHERE caller_chunk_id = ?');
|
|
343
|
+
this.db.transaction(() => {
|
|
240
344
|
const fileRes = insertFile.get(card.file_path, card.metadata.hash);
|
|
241
345
|
const fileId = fileRes.id;
|
|
242
346
|
insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500));
|
|
@@ -245,15 +349,26 @@ export class CodeGraphDB {
|
|
|
245
349
|
${card.content.slice(0, 2000)}`;
|
|
246
350
|
queueEmbedding.run(card.id, textToEmbed);
|
|
247
351
|
deleteOldSymbols.run(card.id);
|
|
248
|
-
for (const sym of card.metadata.symbols)
|
|
352
|
+
for (const sym of card.metadata.symbols)
|
|
249
353
|
insertSymbol.run(card.id, sym);
|
|
250
|
-
}
|
|
251
354
|
deleteOldDeps.run(card.id);
|
|
252
|
-
for (const dep of card.metadata.dependencies)
|
|
355
|
+
for (const dep of card.metadata.dependencies)
|
|
253
356
|
insertDep.run(card.id, dep);
|
|
357
|
+
deleteOldCalls.run(card.id);
|
|
358
|
+
if (card.metadata.calls) {
|
|
359
|
+
for (const call of card.metadata.calls)
|
|
360
|
+
insertCall.run(card.id, call.name, call.line);
|
|
254
361
|
}
|
|
255
|
-
});
|
|
256
|
-
|
|
362
|
+
})();
|
|
363
|
+
}
|
|
364
|
+
findCallers(functionName) {
|
|
365
|
+
return this.db.prepare(`
|
|
366
|
+
SELECT DISTINCT f.path, cl.line_number
|
|
367
|
+
FROM files f
|
|
368
|
+
JOIN chunks c ON f.id = c.file_id
|
|
369
|
+
JOIN calls cl ON c.id = cl.caller_chunk_id
|
|
370
|
+
WHERE cl.callee_name = ?
|
|
371
|
+
`).all(functionName);
|
|
257
372
|
}
|
|
258
373
|
deleteChunkCard(cardId) {
|
|
259
374
|
this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
|
|
@@ -264,57 +379,55 @@ ${card.content.slice(0, 2000)}`;
|
|
|
264
379
|
VALUES (?, ?, ?, ?)
|
|
265
380
|
`).run(planId || 'no-plan', toolName, JSON.stringify(args), isOnPlan ? 1 : 0);
|
|
266
381
|
}
|
|
382
|
+
logAccess(chunkId, planId) {
|
|
383
|
+
this.db.prepare(`
|
|
384
|
+
INSERT INTO context_access_log (chunk_id, plan_id)
|
|
385
|
+
VALUES (?, ?)
|
|
386
|
+
`).run(chunkId, planId || 'default');
|
|
387
|
+
}
|
|
388
|
+
getLruChunks(limit = 5) {
|
|
389
|
+
return this.db.prepare(`
|
|
390
|
+
SELECT chunk_id, MAX(accessed_at) as last_seen
|
|
391
|
+
FROM context_access_log
|
|
392
|
+
GROUP BY chunk_id
|
|
393
|
+
ORDER BY last_seen ASC
|
|
394
|
+
LIMIT ?
|
|
395
|
+
`).all(limit);
|
|
396
|
+
}
|
|
267
397
|
ingestCommits(commits) {
|
|
268
398
|
const insert = this.db.prepare(`
|
|
269
399
|
INSERT INTO commits (hash, author, date, message, files_touched)
|
|
270
400
|
VALUES (?, ?, ?, ?, ?)
|
|
271
401
|
ON CONFLICT(hash) DO NOTHING
|
|
272
402
|
`);
|
|
273
|
-
|
|
274
|
-
for (const c of data)
|
|
403
|
+
this.db.transaction((data) => {
|
|
404
|
+
for (const c of data)
|
|
275
405
|
insert.run(c.hash, c.author, c.date, c.message, JSON.stringify(c.files));
|
|
276
|
-
|
|
277
|
-
});
|
|
278
|
-
transaction(commits);
|
|
406
|
+
})(commits);
|
|
279
407
|
}
|
|
280
408
|
getHotFiles(pathPrefix = '', limit = 10) {
|
|
281
|
-
const recent = this.db.prepare(`
|
|
282
|
-
SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100
|
|
283
|
-
`).all();
|
|
409
|
+
const recent = this.db.prepare(`SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100`).all();
|
|
284
410
|
const counts = {};
|
|
285
411
|
for (const r of recent) {
|
|
286
412
|
try {
|
|
287
413
|
const files = JSON.parse(r.files_touched);
|
|
288
|
-
for (const f of files)
|
|
289
|
-
if (f.startsWith(pathPrefix))
|
|
414
|
+
for (const f of files)
|
|
415
|
+
if (f.startsWith(pathPrefix))
|
|
290
416
|
counts[f] = (counts[f] || 0) + 1;
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
417
|
}
|
|
294
418
|
catch { }
|
|
295
419
|
}
|
|
296
|
-
return Object.entries(counts)
|
|
297
|
-
.map(([path, count]) => ({ path, count }))
|
|
298
|
-
.sort((a, b) => b.count - a.count)
|
|
299
|
-
.slice(0, limit);
|
|
420
|
+
return Object.entries(counts).map(([path, count]) => ({ path, count })).sort((a, b) => b.count - a.count).slice(0, limit);
|
|
300
421
|
}
|
|
301
422
|
getPlanMetrics(planId) {
|
|
302
423
|
const total = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ?").get(planId);
|
|
303
424
|
const onPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 1").get(planId);
|
|
304
425
|
const offPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 0").get(planId);
|
|
305
|
-
return {
|
|
306
|
-
total: total.c,
|
|
307
|
-
on_plan: onPlan.c,
|
|
308
|
-
off_plan: offPlan.c,
|
|
309
|
-
compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100
|
|
310
|
-
};
|
|
426
|
+
return { total: total.c, on_plan: onPlan.c, off_plan: offPlan.c, compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100 };
|
|
311
427
|
}
|
|
312
428
|
findDependents(filePath) {
|
|
313
429
|
const query = this.db.prepare(`
|
|
314
|
-
SELECT DISTINCT f.path
|
|
315
|
-
FROM files f
|
|
316
|
-
JOIN chunks c ON f.id = c.file_id
|
|
317
|
-
JOIN dependencies d ON c.id = d.source_chunk_id
|
|
430
|
+
SELECT DISTINCT f.path FROM files f JOIN chunks c ON f.id = c.file_id JOIN dependencies d ON c.id = d.source_chunk_id
|
|
318
431
|
WHERE d.target_path LIKE ? OR d.target_path = ?
|
|
319
432
|
`);
|
|
320
433
|
const basename = path.basename(filePath);
|
|
@@ -323,32 +436,33 @@ ${card.content.slice(0, 2000)}`;
|
|
|
323
436
|
}
|
|
324
437
|
searchSymbols(query) {
|
|
325
438
|
const stmt = this.db.prepare(`
|
|
326
|
-
SELECT s.name, c.type, f.path
|
|
327
|
-
|
|
328
|
-
JOIN chunks c ON s.chunk_id = c.id
|
|
329
|
-
JOIN files f ON c.file_id = f.id
|
|
330
|
-
WHERE s.name LIKE ?
|
|
331
|
-
LIMIT 20
|
|
439
|
+
SELECT s.name, c.type, f.path FROM symbols s JOIN chunks c ON s.chunk_id = c.id JOIN files f ON c.file_id = f.id
|
|
440
|
+
WHERE s.name LIKE ? LIMIT 20
|
|
332
441
|
`);
|
|
333
442
|
return stmt.all(`%${query}%`);
|
|
334
443
|
}
|
|
335
444
|
async semanticSearch(query, limit = 10) {
|
|
336
|
-
if (!(await ollama.isRunning()))
|
|
337
|
-
throw new Error("Ollama is not running.
|
|
338
|
-
}
|
|
445
|
+
if (!(await ollama.isRunning()))
|
|
446
|
+
throw new Error("Ollama is not running.");
|
|
339
447
|
const queryVec = await ollama.getEmbedding(query);
|
|
340
448
|
if (queryVec.length === 0)
|
|
341
449
|
return [];
|
|
342
450
|
const chunks = this.db.prepare(`
|
|
343
|
-
SELECT c.id, c.content_summary, c.type, f.path, c.embedding
|
|
344
|
-
FROM chunks c
|
|
345
|
-
JOIN files f ON c.file_id = f.id
|
|
451
|
+
SELECT c.id, c.content_summary, c.type, f.path, c.embedding FROM chunks c JOIN files f ON c.file_id = f.id
|
|
346
452
|
WHERE c.embedding IS NOT NULL
|
|
347
453
|
`).all();
|
|
348
454
|
const results = chunks.map(chunk => {
|
|
349
455
|
const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
|
|
350
|
-
const
|
|
351
|
-
|
|
456
|
+
const vectorSimilarity = this.cosineSimilarity(Array.from(queryVec), vector);
|
|
457
|
+
// Heuristic Reranking: Blended score with keyword overlap
|
|
458
|
+
const keywords = query.toLowerCase().split(/\s+/);
|
|
459
|
+
const text = chunk.content_summary.toLowerCase();
|
|
460
|
+
let keywordScore = 0;
|
|
461
|
+
for (const kw of keywords)
|
|
462
|
+
if (text.includes(kw))
|
|
463
|
+
keywordScore += 0.1;
|
|
464
|
+
const similarity = (vectorSimilarity * 0.7) + (Math.min(0.3, keywordScore));
|
|
465
|
+
return { ...chunk, similarity, vectorSimilarity, keywordScore, embedding: undefined };
|
|
352
466
|
});
|
|
353
467
|
results.sort((a, b) => b.similarity - a.similarity);
|
|
354
468
|
return results.slice(0, limit);
|
|
@@ -384,37 +498,44 @@ export function getDb() {
|
|
|
384
498
|
export function graphTools() {
|
|
385
499
|
return {
|
|
386
500
|
autognosis_setup_ai: tool({
|
|
387
|
-
description: "Configure local AI capabilities (Ollama) in the background.",
|
|
388
|
-
args: {
|
|
389
|
-
|
|
501
|
+
description: "Configure local AI capabilities (Ollama or MLX) in the background.",
|
|
502
|
+
args: {
|
|
503
|
+
provider: tool.schema.enum(["ollama", "mlx"]).optional().default("ollama").describe("AI Provider to use"),
|
|
504
|
+
model: tool.schema.string().optional().describe("Model name (optional override)")
|
|
505
|
+
},
|
|
506
|
+
async execute({ provider, model }) {
|
|
390
507
|
const jobId = `job-setup-ai-${Date.now()}`;
|
|
391
|
-
getDb().createJob(jobId, "setup", { model });
|
|
508
|
+
getDb().createJob(jobId, "setup", { provider, model });
|
|
392
509
|
(async () => {
|
|
393
510
|
try {
|
|
394
511
|
getDb().updateJob(jobId, { status: "running", progress: 10 });
|
|
395
|
-
|
|
396
|
-
|
|
512
|
+
await tui.showProgress("AI Setup", 10, `Initializing ${provider}...`);
|
|
513
|
+
if (provider === "mlx") {
|
|
514
|
+
await mlxService.setup();
|
|
515
|
+
getDb().updateJob(jobId, { status: "completed", progress: 100, result: "MLX is ready." });
|
|
516
|
+
await tui.showSuccess("AI Setup Complete", "MLX backend is ready.");
|
|
517
|
+
}
|
|
518
|
+
else {
|
|
519
|
+
if (!(await ollama.isInstalled())) {
|
|
520
|
+
await tui.showProgress("AI Setup", 20, "Downloading Ollama...");
|
|
521
|
+
await ollama.install();
|
|
522
|
+
}
|
|
523
|
+
getDb().updateJob(jobId, { progress: 40 });
|
|
524
|
+
await tui.showProgress("AI Setup", 40, "Starting Ollama server...");
|
|
525
|
+
await ollama.startServer();
|
|
526
|
+
getDb().updateJob(jobId, { progress: 60 });
|
|
527
|
+
await tui.showProgress("AI Setup", 60, `Pulling model: ${model || DEFAULT_EMBEDDING_MODEL}...`);
|
|
528
|
+
await ollama.pullModel(model || DEFAULT_EMBEDDING_MODEL);
|
|
529
|
+
getDb().updateJob(jobId, { status: "completed", progress: 100, result: `Ollama (${model || DEFAULT_EMBEDDING_MODEL}) is ready.` });
|
|
530
|
+
await tui.showSuccess("AI Setup Complete", `Ollama (${model || DEFAULT_EMBEDDING_MODEL}) is ready.`);
|
|
397
531
|
}
|
|
398
|
-
getDb().updateJob(jobId, { progress: 40 });
|
|
399
|
-
await ollama.startServer();
|
|
400
|
-
getDb().updateJob(jobId, { progress: 60 });
|
|
401
|
-
await ollama.pullModel(model);
|
|
402
|
-
getDb().updateJob(jobId, {
|
|
403
|
-
status: "completed",
|
|
404
|
-
progress: 100,
|
|
405
|
-
result: `Model ${model} is ready.`
|
|
406
|
-
});
|
|
407
532
|
}
|
|
408
533
|
catch (error) {
|
|
409
534
|
getDb().updateJob(jobId, { status: "failed", error: error.message });
|
|
535
|
+
await tui.showError("AI Setup Failed", error.message);
|
|
410
536
|
}
|
|
411
537
|
})();
|
|
412
|
-
return JSON.stringify({
|
|
413
|
-
status: "STARTED",
|
|
414
|
-
message: "AI Setup started in background.",
|
|
415
|
-
job_id: jobId,
|
|
416
|
-
instruction: "Use graph_background_status to check progress."
|
|
417
|
-
}, null, 2);
|
|
538
|
+
return JSON.stringify({ status: "STARTED", message: `AI Setup (${provider}) started in background.`, job_id: jobId, instruction: "Use graph_background_status to check progress." }, null, 2);
|
|
418
539
|
}
|
|
419
540
|
}),
|
|
420
541
|
graph_semantic_search: tool({
|
|
@@ -521,9 +642,8 @@ export function graphTools() {
|
|
|
521
642
|
},
|
|
522
643
|
async execute({ job_id, type, limit }) {
|
|
523
644
|
try {
|
|
524
|
-
if (job_id)
|
|
645
|
+
if (job_id)
|
|
525
646
|
return JSON.stringify({ status: "SUCCESS", job: getDb().getJob(job_id) }, null, 2);
|
|
526
|
-
}
|
|
527
647
|
return JSON.stringify({ status: "SUCCESS", jobs: getDb().listJobs(type, limit) }, null, 2);
|
|
528
648
|
}
|
|
529
649
|
catch (error) {
|
package/dist/index.d.ts
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
|
-
export declare const AutognosisPlugin: () => Promise<{
|
|
1
|
+
export declare const AutognosisPlugin: ({ client }: any) => Promise<{
|
|
2
2
|
tool: {
|
|
3
3
|
[key: string]: any;
|
|
4
4
|
};
|
|
5
|
+
"experimental.session.compacting": (input: {
|
|
6
|
+
sessionID: string;
|
|
7
|
+
}, output: {
|
|
8
|
+
context: string[];
|
|
9
|
+
}) => Promise<void>;
|
|
5
10
|
}>;
|
|
6
11
|
export default AutognosisPlugin;
|
package/dist/index.js
CHANGED
|
@@ -1,9 +1,40 @@
|
|
|
1
1
|
import { unifiedTools } from "./unified-api.js";
|
|
2
|
-
|
|
2
|
+
import { loadWorkingMemory, loadActiveSet } from "./activeset.js";
|
|
3
|
+
import { tui } from "./services/tui.js";
|
|
4
|
+
import { codeWatcher } from "./services/watcher.js";
|
|
5
|
+
export const AutognosisPlugin = async ({ client }) => {
|
|
6
|
+
// Initialize TUI service for progress streaming
|
|
7
|
+
tui.setClient(client);
|
|
8
|
+
// Start live file watcher
|
|
9
|
+
codeWatcher.start();
|
|
3
10
|
return {
|
|
4
11
|
tool: {
|
|
5
12
|
...unifiedTools(),
|
|
6
13
|
},
|
|
14
|
+
"experimental.session.compacting": async (input, output) => {
|
|
15
|
+
try {
|
|
16
|
+
const memory = await loadWorkingMemory();
|
|
17
|
+
if (memory.current_set) {
|
|
18
|
+
const activeSet = await loadActiveSet(memory.current_set);
|
|
19
|
+
if (activeSet) {
|
|
20
|
+
const stateBlock = `
|
|
21
|
+
[AUTOGNOSIS CONTEXT PRESERVATION]
|
|
22
|
+
ActiveSet ID: ${activeSet.id}
|
|
23
|
+
ActiveSet Name: ${activeSet.name}
|
|
24
|
+
Priority: ${activeSet.priority}
|
|
25
|
+
Loaded Chunks: ${activeSet.chunks.join(", ")}
|
|
26
|
+
Metadata: ${JSON.stringify(activeSet.metadata)}
|
|
27
|
+
|
|
28
|
+
The agent is currently focused on these files and symbols. Ensure the summary reflects this active working memory state.
|
|
29
|
+
`;
|
|
30
|
+
output.context.push(stateBlock);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
// Fail silently during compaction to avoid breaking the core session
|
|
36
|
+
}
|
|
37
|
+
}
|
|
7
38
|
};
|
|
8
39
|
};
|
|
9
40
|
export default AutognosisPlugin;
|
|
@@ -8,6 +8,7 @@ import * as crypto from "node:crypto";
|
|
|
8
8
|
import { getDb } from "./database.js";
|
|
9
9
|
import { CHUNK_DIR, ensureChunkDir, calculateHash, calculateComplexity, parseFileAST, generateSummaryChunk, generateApiChunk, generateInvariantChunk, extractDependencies, extractSymbolsFromAST, extractSymbols } from "./chunk-cards.js";
|
|
10
10
|
import { Logger } from "./services/logger.js";
|
|
11
|
+
import { tui } from "./services/tui.js";
|
|
11
12
|
const execAsync = promisify(exec);
|
|
12
13
|
const PROJECT_ROOT = process.cwd();
|
|
13
14
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -606,7 +607,7 @@ async function getAllSourceFiles() {
|
|
|
606
607
|
await scanDirectory(PROJECT_ROOT);
|
|
607
608
|
return sourceFiles;
|
|
608
609
|
}
|
|
609
|
-
async function indexFile(filePath) {
|
|
610
|
+
export async function indexFile(filePath) {
|
|
610
611
|
try {
|
|
611
612
|
const content = await fs.readFile(filePath, 'utf-8');
|
|
612
613
|
await ensureChunkDir();
|
|
@@ -711,8 +712,11 @@ async function runBackgroundIndexing(taskId, indexingState) {
|
|
|
711
712
|
processed++;
|
|
712
713
|
// Update progress periodically
|
|
713
714
|
if (processed % 5 === 0 || processed === total) {
|
|
714
|
-
|
|
715
|
+
const progress = Math.round((processed / total) * 100);
|
|
716
|
+
task.progress = progress;
|
|
715
717
|
await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
|
|
718
|
+
// Stream to TUI
|
|
719
|
+
await tui.showProgress("Codebase Indexing", progress, `Processing: ${file}`);
|
|
716
720
|
}
|
|
717
721
|
}
|
|
718
722
|
// Complete task
|
|
@@ -720,6 +724,7 @@ async function runBackgroundIndexing(taskId, indexingState) {
|
|
|
720
724
|
task.completed_at = new Date().toISOString();
|
|
721
725
|
task.progress = 100;
|
|
722
726
|
await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
|
|
727
|
+
await tui.showSuccess("Indexing Complete", `Processed ${total} files.`);
|
|
723
728
|
}
|
|
724
729
|
catch (error) {
|
|
725
730
|
// Update task with error
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare const DEFAULT_MLX_MODEL = "sentence-transformers/all-MiniLM-L6-v2";
|
|
2
|
+
export declare class MLXService {
|
|
3
|
+
private isAvailable;
|
|
4
|
+
checkAvailability(): Promise<boolean>;
|
|
5
|
+
setup(): Promise<string>;
|
|
6
|
+
getEmbedding(text: string, model?: string): Promise<number[]>;
|
|
7
|
+
}
|
|
8
|
+
export declare const mlxService: MLXService;
|