opencode-autognosis 2.0.5 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/database.js CHANGED
@@ -1,26 +1,66 @@
1
- import Database from "better-sqlite3";
2
1
  import * as path from "node:path";
3
2
  import * as fs from "node:fs";
3
+ import { createRequire } from "node:module";
4
4
  import { tool } from "@opencode-ai/plugin";
5
5
  import { ollama, DEFAULT_EMBEDDING_MODEL } from "./services/ollama.js";
6
+ import { mlxService } from "./services/mlx.js";
7
+ import { tui } from "./services/tui.js";
8
+ const require = createRequire(import.meta.url);
6
9
  const PROJECT_ROOT = process.cwd();
7
10
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
11
  const DB_PATH = path.join(OPENCODE_DIR, "autognosis.db");
12
+ /**
13
+ * Multi-Runtime SQLite Adapter
14
+ * Detects Bun vs Node and provides a unified interface.
15
+ */
16
+ class DatabaseAdapter {
17
+ inner;
18
+ isBun;
19
+ constructor(path) {
20
+ this.isBun = !!globalThis.Bun;
21
+ if (this.isBun) {
22
+ const { Database } = require("bun:sqlite");
23
+ this.inner = new Database(path, { create: true });
24
+ }
25
+ else {
26
+ const Database = require("better-sqlite3");
27
+ this.inner = new Database(path);
28
+ }
29
+ }
30
+ exec(sql) {
31
+ return this.inner.exec(sql);
32
+ }
33
+ pragma(sql) {
34
+ if (this.isBun)
35
+ return this.inner.exec(`PRAGMA ${sql}`);
36
+ return this.inner.pragma(sql);
37
+ }
38
+ prepare(sql) {
39
+ const stmt = this.inner.prepare(sql);
40
+ // Unify APIs: Bun uses .get()/.all() on statement, Better-SQLite3 does too.
41
+ // However, Better-SQLite3 returns 'info' from .run(), Bun returns nothing or different.
42
+ return {
43
+ run: (...args) => stmt.run(...args),
44
+ get: (...args) => stmt.get(...args),
45
+ all: (...args) => stmt.all(...args)
46
+ };
47
+ }
48
+ transaction(fn) {
49
+ return this.inner.transaction(fn);
50
+ }
51
+ }
9
52
  export class CodeGraphDB {
10
53
  db;
11
54
  workerRunning = false;
12
55
  constructor() {
13
- // Ensure directory exists
14
56
  if (!fs.existsSync(OPENCODE_DIR)) {
15
57
  fs.mkdirSync(OPENCODE_DIR, { recursive: true });
16
58
  }
17
- this.db = new Database(DB_PATH);
59
+ this.db = new DatabaseAdapter(DB_PATH);
18
60
  this.initialize();
19
- // Start background worker
20
61
  this.startWorker();
21
62
  }
22
63
  initialize() {
23
- // Enable WAL mode for concurrency and performance
24
64
  this.db.pragma('journal_mode = WAL');
25
65
  this.db.exec(`
26
66
  CREATE TABLE IF NOT EXISTS files (
@@ -43,7 +83,7 @@ export class CodeGraphDB {
43
83
  CREATE TABLE IF NOT EXISTS embedding_queue (
44
84
  chunk_id TEXT PRIMARY KEY,
45
85
  text_to_embed TEXT,
46
- status TEXT DEFAULT 'pending', -- pending, processing, failed
86
+ status TEXT DEFAULT 'pending',
47
87
  retries INTEGER DEFAULT 0,
48
88
  FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
49
89
  );
@@ -52,7 +92,7 @@ export class CodeGraphDB {
52
92
  id INTEGER PRIMARY KEY AUTOINCREMENT,
53
93
  chunk_id TEXT,
54
94
  name TEXT NOT NULL,
55
- kind TEXT, -- 'function', 'class', 'interface', etc.
95
+ kind TEXT,
56
96
  FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
57
97
  );
58
98
 
@@ -62,12 +102,34 @@ export class CodeGraphDB {
62
102
  FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
63
103
  );
64
104
 
105
+ CREATE TABLE IF NOT EXISTS calls (
106
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
107
+ caller_chunk_id TEXT,
108
+ callee_name TEXT,
109
+ line_number INTEGER,
110
+ FOREIGN KEY(caller_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
111
+ );
112
+
113
+ CREATE TABLE IF NOT EXISTS policies (
114
+ id TEXT PRIMARY KEY,
115
+ name TEXT,
116
+ pattern TEXT,
117
+ severity TEXT, -- 'error', 'warning'
118
+ description TEXT
119
+ );
120
+
121
+ CREATE TABLE IF NOT EXISTS context_access_log (
122
+ chunk_id TEXT,
123
+ plan_id TEXT,
124
+ accessed_at DATETIME DEFAULT CURRENT_TIMESTAMP
125
+ );
126
+
65
127
  CREATE TABLE IF NOT EXISTS commits (
66
128
  hash TEXT PRIMARY KEY,
67
129
  author TEXT,
68
130
  date DATETIME,
69
131
  message TEXT,
70
- files_touched TEXT -- JSON array of paths
132
+ files_touched TEXT
71
133
  );
72
134
 
73
135
  CREATE TABLE IF NOT EXISTS plan_ledger (
@@ -79,10 +141,40 @@ export class CodeGraphDB {
79
141
  timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
80
142
  );
81
143
 
144
+ CREATE TABLE IF NOT EXISTS blackboard (
145
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
146
+ author TEXT,
147
+ message TEXT,
148
+ topic TEXT,
149
+ symbol_id TEXT, -- Optional link to a code symbol
150
+ embedding BLOB, -- For semantic search on the blackboard
151
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
152
+ );
153
+
154
+ CREATE TABLE IF NOT EXISTS locks (
155
+ resource_id TEXT PRIMARY KEY, -- file path or symbol name
156
+ owner_agent TEXT,
157
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
158
+ expires_at DATETIME
159
+ );
160
+
161
+ CREATE TABLE IF NOT EXISTS intents (
162
+ patch_id TEXT PRIMARY KEY,
163
+ reasoning TEXT,
164
+ plan_id TEXT
165
+ );
166
+
167
+ CREATE TABLE IF NOT EXISTS arch_rules (
168
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
169
+ source_pattern TEXT,
170
+ target_pattern TEXT,
171
+ restriction TEXT DEFAULT 'forbidden'
172
+ );
173
+
82
174
  CREATE TABLE IF NOT EXISTS background_jobs (
83
175
  id TEXT PRIMARY KEY,
84
- type TEXT, -- 'validation', 'setup', 'indexing'
85
- status TEXT DEFAULT 'pending', -- pending, running, completed, failed
176
+ type TEXT,
177
+ status TEXT DEFAULT 'pending',
86
178
  progress INTEGER DEFAULT 0,
87
179
  result TEXT,
88
180
  error TEXT,
@@ -90,22 +182,17 @@ export class CodeGraphDB {
90
182
  updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
91
183
  );
92
184
 
93
- -- Indexes for performance
94
185
  CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
95
186
  CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
96
187
  CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
97
188
  CREATE INDEX IF NOT EXISTS idx_ledger_plan ON plan_ledger(plan_id);
98
189
  CREATE INDEX IF NOT EXISTS idx_jobs_status ON background_jobs(status);
99
190
  `);
100
- // Migrations
101
191
  try {
102
192
  this.db.exec("ALTER TABLE chunks ADD COLUMN embedding BLOB");
103
193
  }
104
194
  catch { }
105
195
  }
106
- /**
107
- * Background Job Management
108
- */
109
196
  createJob(id, type, metadata) {
110
197
  this.db.prepare(`
111
198
  INSERT INTO background_jobs (id, type, status, progress, result)
@@ -144,25 +231,109 @@ export class CodeGraphDB {
144
231
  }
145
232
  return this.db.prepare("SELECT * FROM background_jobs ORDER BY created_at DESC LIMIT ?").all(limit);
146
233
  }
234
+ postToBlackboard(author, message, topic = 'general', symbolId) {
235
+ const insert = this.db.prepare(`
236
+ INSERT INTO blackboard (author, message, topic, symbol_id)
237
+ VALUES (?, ?, ?, ?)
238
+ RETURNING id
239
+ `);
240
+ const res = insert.get(author, message, topic, symbolId || null);
241
+ // Queue for embedding (blackboard search)
242
+ this.db.prepare(`
243
+ INSERT INTO embedding_queue (chunk_id, text_to_embed)
244
+ VALUES (?, ?)
245
+ `).run(`blackboard-${res.id}`, `${topic.toUpperCase()}: ${message}`);
246
+ }
247
+ readBlackboard(topic, limit = 10) {
248
+ if (topic) {
249
+ return this.db.prepare(`
250
+ SELECT * FROM blackboard WHERE topic = ? ORDER BY timestamp DESC LIMIT ?
251
+ `).all(topic, limit);
252
+ }
253
+ return this.db.prepare(`
254
+ SELECT * FROM blackboard ORDER BY timestamp DESC LIMIT ?
255
+ `).all(limit);
256
+ }
257
+ getGraffiti(symbolId) {
258
+ return this.db.prepare(`
259
+ SELECT author, message, timestamp
260
+ FROM blackboard
261
+ WHERE symbol_id = ?
262
+ ORDER BY timestamp DESC
263
+ `).all(symbolId);
264
+ }
265
+ acquireLock(resourceId, agentName, ttlSeconds = 300) {
266
+ // Check if already locked by someone else
267
+ const current = this.isLocked(resourceId);
268
+ if (current && current.owner_agent !== agentName) {
269
+ throw new Error(`Resource ${resourceId} is already locked by ${current.owner_agent}`);
270
+ }
271
+ const expiresAt = new Date(Date.now() + ttlSeconds * 1000).toISOString();
272
+ this.db.prepare(`
273
+ INSERT INTO locks (resource_id, owner_agent, expires_at)
274
+ VALUES (?, ?, ?)
275
+ ON CONFLICT(resource_id) DO UPDATE SET
276
+ owner_agent = excluded.owner_agent,
277
+ expires_at = excluded.expires_at
278
+ `).run(resourceId, agentName, expiresAt);
279
+ }
280
+ releaseLock(resourceId, agentName) {
281
+ this.db.prepare(`
282
+ DELETE FROM locks
283
+ WHERE resource_id = ? AND owner_agent = ?
284
+ `).run(resourceId, agentName);
285
+ }
286
+ isLocked(resourceId) {
287
+ // Automatically prune expired locks
288
+ this.db.prepare("DELETE FROM locks WHERE expires_at < CURRENT_TIMESTAMP").run();
289
+ return this.db.prepare("SELECT * FROM locks WHERE resource_id = ?").get(resourceId);
290
+ }
291
+ listLocks() {
292
+ return this.db.prepare("SELECT * FROM locks").all();
293
+ }
294
+ storeIntent(patchId, reasoning, planId) {
295
+ this.db.prepare(`
296
+ INSERT INTO intents (patch_id, reasoning, plan_id)
297
+ VALUES (?, ?, ?)
298
+ ON CONFLICT(patch_id) DO UPDATE SET
299
+ reasoning = excluded.reasoning,
300
+ plan_id = excluded.plan_id
301
+ `).run(patchId, reasoning, planId);
302
+ }
303
+ getIntent(patchId) {
304
+ return this.db.prepare("SELECT * FROM intents WHERE patch_id = ?").get(patchId);
305
+ }
306
+ addArchRule(source, target) {
307
+ this.db.prepare(`
308
+ INSERT INTO arch_rules (source_pattern, target_pattern)
309
+ VALUES (?, ?)
310
+ `).run(source, target);
311
+ }
312
+ checkArchViolation(sourcePath, targetPath) {
313
+ const rules = this.db.prepare("SELECT * FROM arch_rules").all();
314
+ for (const rule of rules) {
315
+ if (sourcePath.includes(rule.source_pattern) && targetPath.includes(rule.target_pattern)) {
316
+ return rule;
317
+ }
318
+ }
319
+ return null;
320
+ }
147
321
  async startWorker() {
148
322
  if (this.workerRunning)
149
323
  return;
150
324
  this.workerRunning = true;
151
- // Run periodically
152
325
  setInterval(async () => {
153
326
  try {
154
327
  await this.processEmbeddingQueue();
155
328
  }
156
- catch (e) {
157
- // Log to file if needed, but avoid console to protect TUI
158
- }
159
- }, 5000); // Check every 5s
329
+ catch (e) { }
330
+ }, 5000);
160
331
  }
161
332
  async processEmbeddingQueue() {
162
- // Check if Ollama is ready
163
- if (!(await ollama.isRunning()))
333
+ const useMLX = await mlxService.checkAvailability();
334
+ const useOllama = !useMLX && (await ollama.isRunning());
335
+ if (!useMLX && !useOllama)
164
336
  return;
165
- // Get next task
166
337
  const task = this.db.prepare(`
167
338
  SELECT chunk_id, text_to_embed, retries
168
339
  FROM embedding_queue
@@ -172,24 +343,22 @@ export class CodeGraphDB {
172
343
  `).get();
173
344
  if (!task)
174
345
  return;
175
- // Mark processing
176
346
  this.db.prepare("UPDATE embedding_queue SET status = 'processing' WHERE chunk_id = ?").run(task.chunk_id);
177
347
  try {
178
- // Generate embedding
179
- const vector = await ollama.getEmbedding(task.text_to_embed);
348
+ const vector = useMLX
349
+ ? await mlxService.getEmbedding(task.text_to_embed)
350
+ : await ollama.getEmbedding(task.text_to_embed);
180
351
  if (vector.length > 0) {
181
- // Store blob (Float32Array to Buffer)
182
352
  const buffer = Buffer.from(new Float32Array(vector).buffer);
183
353
  const updateChunk = this.db.prepare("UPDATE chunks SET embedding = ? WHERE id = ?");
184
354
  const deleteQueue = this.db.prepare("DELETE FROM embedding_queue WHERE chunk_id = ?");
185
- const txn = this.db.transaction(() => {
355
+ this.db.transaction(() => {
186
356
  updateChunk.run(buffer, task.chunk_id);
187
357
  deleteQueue.run(task.chunk_id);
188
- });
189
- txn();
358
+ })();
190
359
  }
191
360
  else {
192
- throw new Error("Empty vector returned");
361
+ throw new Error("Empty vector");
193
362
  }
194
363
  }
195
364
  catch (error) {
@@ -201,42 +370,30 @@ export class CodeGraphDB {
201
370
  }
202
371
  }
203
372
  }
204
- /**
205
- * Syncs a ChunkCard (JSON) into the SQLite Index.
206
- */
207
373
  ingestChunkCard(card) {
208
374
  const insertFile = this.db.prepare(`
209
375
  INSERT INTO files (path, hash, last_indexed)
210
376
  VALUES (?, ?, CURRENT_TIMESTAMP)
211
- ON CONFLICT(path) DO UPDATE SET
212
- hash = excluded.hash,
213
- last_indexed = CURRENT_TIMESTAMP
377
+ ON CONFLICT(path) DO UPDATE SET hash = excluded.hash, last_indexed = CURRENT_TIMESTAMP
214
378
  RETURNING id
215
379
  `);
216
380
  const insertChunk = this.db.prepare(`
217
381
  INSERT INTO chunks (id, file_id, type, complexity_score, content_summary)
218
382
  VALUES (?, ?, ?, ?, ?)
219
- ON CONFLICT(id) DO UPDATE SET
220
- complexity_score = excluded.complexity_score,
221
- content_summary = excluded.content_summary
383
+ ON CONFLICT(id) DO UPDATE SET complexity_score = excluded.complexity_score, content_summary = excluded.content_summary
222
384
  `);
223
385
  const queueEmbedding = this.db.prepare(`
224
386
  INSERT INTO embedding_queue (chunk_id, text_to_embed)
225
387
  VALUES (?, ?)
226
- ON CONFLICT(chunk_id) DO UPDATE SET
227
- text_to_embed = excluded.text_to_embed,
228
- status = 'pending',
229
- retries = 0
230
- `);
231
- const insertSymbol = this.db.prepare(`
232
- INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')
233
- `);
234
- const insertDep = this.db.prepare(`
235
- INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)
388
+ ON CONFLICT(chunk_id) DO UPDATE SET text_to_embed = excluded.text_to_embed, status = 'pending', retries = 0
236
389
  `);
390
+ const insertSymbol = this.db.prepare(`INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')`);
391
+ const insertDep = this.db.prepare(`INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)`);
392
+ const insertCall = this.db.prepare(`INSERT INTO calls (caller_chunk_id, callee_name, line_number) VALUES (?, ?, ?)`);
237
393
  const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
238
394
  const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
239
- const transaction = this.db.transaction(() => {
395
+ const deleteOldCalls = this.db.prepare('DELETE FROM calls WHERE caller_chunk_id = ?');
396
+ this.db.transaction(() => {
240
397
  const fileRes = insertFile.get(card.file_path, card.metadata.hash);
241
398
  const fileId = fileRes.id;
242
399
  insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500));
@@ -245,15 +402,26 @@ export class CodeGraphDB {
245
402
  ${card.content.slice(0, 2000)}`;
246
403
  queueEmbedding.run(card.id, textToEmbed);
247
404
  deleteOldSymbols.run(card.id);
248
- for (const sym of card.metadata.symbols) {
405
+ for (const sym of card.metadata.symbols)
249
406
  insertSymbol.run(card.id, sym);
250
- }
251
407
  deleteOldDeps.run(card.id);
252
- for (const dep of card.metadata.dependencies) {
408
+ for (const dep of card.metadata.dependencies)
253
409
  insertDep.run(card.id, dep);
410
+ deleteOldCalls.run(card.id);
411
+ if (card.metadata.calls) {
412
+ for (const call of card.metadata.calls)
413
+ insertCall.run(card.id, call.name, call.line);
254
414
  }
255
- });
256
- transaction();
415
+ })();
416
+ }
417
+ findCallers(functionName) {
418
+ return this.db.prepare(`
419
+ SELECT DISTINCT f.path, cl.line_number
420
+ FROM files f
421
+ JOIN chunks c ON f.id = c.file_id
422
+ JOIN calls cl ON c.id = cl.caller_chunk_id
423
+ WHERE cl.callee_name = ?
424
+ `).all(functionName);
257
425
  }
258
426
  deleteChunkCard(cardId) {
259
427
  this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
@@ -264,57 +432,55 @@ ${card.content.slice(0, 2000)}`;
264
432
  VALUES (?, ?, ?, ?)
265
433
  `).run(planId || 'no-plan', toolName, JSON.stringify(args), isOnPlan ? 1 : 0);
266
434
  }
435
+ logAccess(chunkId, planId) {
436
+ this.db.prepare(`
437
+ INSERT INTO context_access_log (chunk_id, plan_id)
438
+ VALUES (?, ?)
439
+ `).run(chunkId, planId || 'default');
440
+ }
441
+ getLruChunks(limit = 5) {
442
+ return this.db.prepare(`
443
+ SELECT chunk_id, MAX(accessed_at) as last_seen
444
+ FROM context_access_log
445
+ GROUP BY chunk_id
446
+ ORDER BY last_seen ASC
447
+ LIMIT ?
448
+ `).all(limit);
449
+ }
267
450
  ingestCommits(commits) {
268
451
  const insert = this.db.prepare(`
269
452
  INSERT INTO commits (hash, author, date, message, files_touched)
270
453
  VALUES (?, ?, ?, ?, ?)
271
454
  ON CONFLICT(hash) DO NOTHING
272
455
  `);
273
- const transaction = this.db.transaction((data) => {
274
- for (const c of data) {
456
+ this.db.transaction((data) => {
457
+ for (const c of data)
275
458
  insert.run(c.hash, c.author, c.date, c.message, JSON.stringify(c.files));
276
- }
277
- });
278
- transaction(commits);
459
+ })(commits);
279
460
  }
280
461
  getHotFiles(pathPrefix = '', limit = 10) {
281
- const recent = this.db.prepare(`
282
- SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100
283
- `).all();
462
+ const recent = this.db.prepare(`SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100`).all();
284
463
  const counts = {};
285
464
  for (const r of recent) {
286
465
  try {
287
466
  const files = JSON.parse(r.files_touched);
288
- for (const f of files) {
289
- if (f.startsWith(pathPrefix)) {
467
+ for (const f of files)
468
+ if (f.startsWith(pathPrefix))
290
469
  counts[f] = (counts[f] || 0) + 1;
291
- }
292
- }
293
470
  }
294
471
  catch { }
295
472
  }
296
- return Object.entries(counts)
297
- .map(([path, count]) => ({ path, count }))
298
- .sort((a, b) => b.count - a.count)
299
- .slice(0, limit);
473
+ return Object.entries(counts).map(([path, count]) => ({ path, count })).sort((a, b) => b.count - a.count).slice(0, limit);
300
474
  }
301
475
  getPlanMetrics(planId) {
302
476
  const total = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ?").get(planId);
303
477
  const onPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 1").get(planId);
304
478
  const offPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 0").get(planId);
305
- return {
306
- total: total.c,
307
- on_plan: onPlan.c,
308
- off_plan: offPlan.c,
309
- compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100
310
- };
479
+ return { total: total.c, on_plan: onPlan.c, off_plan: offPlan.c, compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100 };
311
480
  }
312
481
  findDependents(filePath) {
313
482
  const query = this.db.prepare(`
314
- SELECT DISTINCT f.path
315
- FROM files f
316
- JOIN chunks c ON f.id = c.file_id
317
- JOIN dependencies d ON c.id = d.source_chunk_id
483
+ SELECT DISTINCT f.path FROM files f JOIN chunks c ON f.id = c.file_id JOIN dependencies d ON c.id = d.source_chunk_id
318
484
  WHERE d.target_path LIKE ? OR d.target_path = ?
319
485
  `);
320
486
  const basename = path.basename(filePath);
@@ -323,32 +489,33 @@ ${card.content.slice(0, 2000)}`;
323
489
  }
324
490
  searchSymbols(query) {
325
491
  const stmt = this.db.prepare(`
326
- SELECT s.name, c.type, f.path
327
- FROM symbols s
328
- JOIN chunks c ON s.chunk_id = c.id
329
- JOIN files f ON c.file_id = f.id
330
- WHERE s.name LIKE ?
331
- LIMIT 20
492
+ SELECT s.name, c.type, f.path FROM symbols s JOIN chunks c ON s.chunk_id = c.id JOIN files f ON c.file_id = f.id
493
+ WHERE s.name LIKE ? LIMIT 20
332
494
  `);
333
495
  return stmt.all(`%${query}%`);
334
496
  }
335
497
  async semanticSearch(query, limit = 10) {
336
- if (!(await ollama.isRunning())) {
337
- throw new Error("Ollama is not running. Please run 'autognosis_setup_ai' first.");
338
- }
498
+ if (!(await ollama.isRunning()))
499
+ throw new Error("Ollama is not running.");
339
500
  const queryVec = await ollama.getEmbedding(query);
340
501
  if (queryVec.length === 0)
341
502
  return [];
342
503
  const chunks = this.db.prepare(`
343
- SELECT c.id, c.content_summary, c.type, f.path, c.embedding
344
- FROM chunks c
345
- JOIN files f ON c.file_id = f.id
504
+ SELECT c.id, c.content_summary, c.type, f.path, c.embedding FROM chunks c JOIN files f ON c.file_id = f.id
346
505
  WHERE c.embedding IS NOT NULL
347
506
  `).all();
348
507
  const results = chunks.map(chunk => {
349
508
  const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
350
- const similarity = this.cosineSimilarity(queryVec, vector);
351
- return { ...chunk, similarity, embedding: undefined };
509
+ const vectorSimilarity = this.cosineSimilarity(Array.from(queryVec), vector);
510
+ // Heuristic Reranking: Blended score with keyword overlap
511
+ const keywords = query.toLowerCase().split(/\s+/);
512
+ const text = chunk.content_summary.toLowerCase();
513
+ let keywordScore = 0;
514
+ for (const kw of keywords)
515
+ if (text.includes(kw))
516
+ keywordScore += 0.1;
517
+ const similarity = (vectorSimilarity * 0.7) + (Math.min(0.3, keywordScore));
518
+ return { ...chunk, similarity, vectorSimilarity, keywordScore, embedding: undefined };
352
519
  });
353
520
  results.sort((a, b) => b.similarity - a.similarity);
354
521
  return results.slice(0, limit);
@@ -384,37 +551,44 @@ export function getDb() {
384
551
  export function graphTools() {
385
552
  return {
386
553
  autognosis_setup_ai: tool({
387
- description: "Configure local AI capabilities (Ollama) in the background.",
388
- args: { model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL) },
389
- async execute({ model }) {
554
+ description: "Configure local AI capabilities (Ollama or MLX) in the background.",
555
+ args: {
556
+ provider: tool.schema.enum(["ollama", "mlx"]).optional().default("ollama").describe("AI Provider to use"),
557
+ model: tool.schema.string().optional().describe("Model name (optional override)")
558
+ },
559
+ async execute({ provider, model }) {
390
560
  const jobId = `job-setup-ai-${Date.now()}`;
391
- getDb().createJob(jobId, "setup", { model });
561
+ getDb().createJob(jobId, "setup", { provider, model });
392
562
  (async () => {
393
563
  try {
394
564
  getDb().updateJob(jobId, { status: "running", progress: 10 });
395
- if (!(await ollama.isInstalled())) {
396
- await ollama.install();
565
+ await tui.showProgress("AI Setup", 10, `Initializing ${provider}...`);
566
+ if (provider === "mlx") {
567
+ await mlxService.setup();
568
+ getDb().updateJob(jobId, { status: "completed", progress: 100, result: "MLX is ready." });
569
+ await tui.showSuccess("AI Setup Complete", "MLX backend is ready.");
570
+ }
571
+ else {
572
+ if (!(await ollama.isInstalled())) {
573
+ await tui.showProgress("AI Setup", 20, "Downloading Ollama...");
574
+ await ollama.install();
575
+ }
576
+ getDb().updateJob(jobId, { progress: 40 });
577
+ await tui.showProgress("AI Setup", 40, "Starting Ollama server...");
578
+ await ollama.startServer();
579
+ getDb().updateJob(jobId, { progress: 60 });
580
+ await tui.showProgress("AI Setup", 60, `Pulling model: ${model || DEFAULT_EMBEDDING_MODEL}...`);
581
+ await ollama.pullModel(model || DEFAULT_EMBEDDING_MODEL);
582
+ getDb().updateJob(jobId, { status: "completed", progress: 100, result: `Ollama (${model || DEFAULT_EMBEDDING_MODEL}) is ready.` });
583
+ await tui.showSuccess("AI Setup Complete", `Ollama (${model || DEFAULT_EMBEDDING_MODEL}) is ready.`);
397
584
  }
398
- getDb().updateJob(jobId, { progress: 40 });
399
- await ollama.startServer();
400
- getDb().updateJob(jobId, { progress: 60 });
401
- await ollama.pullModel(model);
402
- getDb().updateJob(jobId, {
403
- status: "completed",
404
- progress: 100,
405
- result: `Model ${model} is ready.`
406
- });
407
585
  }
408
586
  catch (error) {
409
587
  getDb().updateJob(jobId, { status: "failed", error: error.message });
588
+ await tui.showError("AI Setup Failed", error.message);
410
589
  }
411
590
  })();
412
- return JSON.stringify({
413
- status: "STARTED",
414
- message: "AI Setup started in background.",
415
- job_id: jobId,
416
- instruction: "Use graph_background_status to check progress."
417
- }, null, 2);
591
+ return JSON.stringify({ status: "STARTED", message: `AI Setup (${provider}) started in background.`, job_id: jobId, instruction: "Use graph_background_status to check progress." }, null, 2);
418
592
  }
419
593
  }),
420
594
  graph_semantic_search: tool({
@@ -521,9 +695,8 @@ export function graphTools() {
521
695
  },
522
696
  async execute({ job_id, type, limit }) {
523
697
  try {
524
- if (job_id) {
698
+ if (job_id)
525
699
  return JSON.stringify({ status: "SUCCESS", job: getDb().getJob(job_id) }, null, 2);
526
- }
527
700
  return JSON.stringify({ status: "SUCCESS", jobs: getDb().listJobs(type, limit) }, null, 2);
528
701
  }
529
702
  catch (error) {
package/dist/index.d.ts CHANGED
@@ -1,6 +1,11 @@
1
- export declare const AutognosisPlugin: () => Promise<{
1
+ export declare const AutognosisPlugin: ({ client }: any) => Promise<{
2
2
  tool: {
3
3
  [key: string]: any;
4
4
  };
5
+ "experimental.session.compacting": (input: {
6
+ sessionID: string;
7
+ }, output: {
8
+ context: string[];
9
+ }) => Promise<void>;
5
10
  }>;
6
11
  export default AutognosisPlugin;
package/dist/index.js CHANGED
@@ -1,9 +1,40 @@
1
1
  import { unifiedTools } from "./unified-api.js";
2
- export const AutognosisPlugin = async () => {
2
+ import { loadWorkingMemory, loadActiveSet } from "./activeset.js";
3
+ import { tui } from "./services/tui.js";
4
+ import { codeWatcher } from "./services/watcher.js";
5
+ export const AutognosisPlugin = async ({ client }) => {
6
+ // Initialize TUI service for progress streaming
7
+ tui.setClient(client);
8
+ // Start live file watcher
9
+ codeWatcher.start();
3
10
  return {
4
11
  tool: {
5
12
  ...unifiedTools(),
6
13
  },
14
+ "experimental.session.compacting": async (input, output) => {
15
+ try {
16
+ const memory = await loadWorkingMemory();
17
+ if (memory.current_set) {
18
+ const activeSet = await loadActiveSet(memory.current_set);
19
+ if (activeSet) {
20
+ const stateBlock = `
21
+ [AUTOGNOSIS CONTEXT PRESERVATION]
22
+ ActiveSet ID: ${activeSet.id}
23
+ ActiveSet Name: ${activeSet.name}
24
+ Priority: ${activeSet.priority}
25
+ Loaded Chunks: ${activeSet.chunks.join(", ")}
26
+ Metadata: ${JSON.stringify(activeSet.metadata)}
27
+
28
+ The agent is currently focused on these files and symbols. Ensure the summary reflects this active working memory state.
29
+ `;
30
+ output.context.push(stateBlock);
31
+ }
32
+ }
33
+ }
34
+ catch (error) {
35
+ // Fail silently during compaction to avoid breaking the core session
36
+ }
37
+ }
7
38
  };
8
39
  };
9
40
  export default AutognosisPlugin;
@@ -1,3 +1,4 @@
1
1
  export declare function performanceTools(): {
2
2
  [key: string]: any;
3
3
  };
4
+ export declare function indexFile(filePath: string): Promise<void>;