opencode-autognosis 2.0.1 → 2.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/activeset.js CHANGED
@@ -3,12 +3,13 @@ import * as fs from "node:fs/promises";
3
3
  import * as fsSync from "node:fs";
4
4
  import * as path from "node:path";
5
5
  import * as crypto from "node:crypto";
6
+ import { Logger } from "./services/logger.js";
6
7
  const PROJECT_ROOT = process.cwd();
7
8
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
9
  const ACTIVESET_DIR = path.join(OPENCODE_DIR, "activesets");
9
10
  // Internal logging
10
11
  function log(message, data) {
11
- console.error(`[ActiveSet] ${message}`, data || '');
12
+ Logger.log("ActiveSet", message, data);
12
13
  }
13
14
  // =============================================================================
14
15
  // HELPERS
@@ -6,6 +6,8 @@ import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
8
  import ts from "typescript";
9
+ import { getDb } from "./database.js";
10
+ import { Logger } from "./services/logger.js";
9
11
  const execAsync = promisify(exec);
10
12
  const PROJECT_ROOT = process.cwd();
11
13
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
@@ -13,7 +15,7 @@ export const CHUNK_DIR = path.join(OPENCODE_DIR, "chunks");
13
15
  const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
14
16
  // Internal logging
15
17
  function log(message, data) {
16
- console.error(`[ChunkCards] ${message}`, data || '');
18
+ Logger.log("ChunkCards", message, data);
17
19
  }
18
20
  // =============================================================================
19
21
  // HELPERS
@@ -158,6 +160,8 @@ export function chunkCardsTools() {
158
160
  };
159
161
  // Save chunk card
160
162
  await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
163
+ // Sync to SQLite Index
164
+ getDb().ingestChunkCard(chunkCard);
161
165
  return JSON.stringify({
162
166
  status: "SUCCESS",
163
167
  card: chunkCard,
@@ -297,6 +301,8 @@ export function chunkCardsTools() {
297
301
  }, null, 2);
298
302
  }
299
303
  await fs.unlink(cardPath);
304
+ // Remove from SQLite Index
305
+ getDb().deleteChunkCard(card_id);
300
306
  return JSON.stringify({
301
307
  status: "SUCCESS",
302
308
  message: `Card deleted: ${card_id}`
@@ -0,0 +1,56 @@
1
+ import type { ChunkCard } from "./chunk-cards.js";
2
+ export declare class CodeGraphDB {
3
+ private db;
4
+ private workerRunning;
5
+ constructor();
6
+ private initialize;
7
+ /**
8
+ * Background Job Management
9
+ */
10
+ createJob(id: string, type: string, metadata?: any): void;
11
+ updateJob(id: string, updates: {
12
+ status?: string;
13
+ progress?: number;
14
+ result?: string;
15
+ error?: string;
16
+ }): void;
17
+ getJob(id: string): unknown;
18
+ listJobs(type?: string, limit?: number): unknown[];
19
+ private startWorker;
20
+ private processEmbeddingQueue;
21
+ /**
22
+ * Syncs a ChunkCard (JSON) into the SQLite Index.
23
+ */
24
+ ingestChunkCard(card: ChunkCard): void;
25
+ deleteChunkCard(cardId: string): void;
26
+ recordExecution(planId: string | undefined, toolName: string, args: any, isOnPlan: boolean): void;
27
+ ingestCommits(commits: any[]): void;
28
+ getHotFiles(pathPrefix?: string, limit?: number): {
29
+ path: string;
30
+ count: number;
31
+ }[];
32
+ getPlanMetrics(planId: string): {
33
+ total: number;
34
+ on_plan: number;
35
+ off_plan: number;
36
+ compliance: number;
37
+ };
38
+ findDependents(filePath: string): string[];
39
+ searchSymbols(query: string): any[];
40
+ semanticSearch(query: string, limit?: number): Promise<any[]>;
41
+ private cosineSimilarity;
42
+ getStats(): {
43
+ files: number;
44
+ chunks: number;
45
+ symbols: number;
46
+ dependencies: number;
47
+ embeddings: {
48
+ completed: number;
49
+ pending: number;
50
+ };
51
+ };
52
+ }
53
+ export declare function getDb(): CodeGraphDB;
54
+ export declare function graphTools(): {
55
+ [key: string]: any;
56
+ };
@@ -0,0 +1,535 @@
1
+ import Database from "better-sqlite3";
2
+ import * as path from "node:path";
3
+ import * as fs from "node:fs";
4
+ import { tool } from "@opencode-ai/plugin";
5
+ import { ollama, DEFAULT_EMBEDDING_MODEL } from "./services/ollama.js";
6
+ const PROJECT_ROOT = process.cwd();
7
+ const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
+ const DB_PATH = path.join(OPENCODE_DIR, "autognosis.db");
9
+ export class CodeGraphDB {
10
+ db;
11
+ workerRunning = false;
12
+ constructor() {
13
+ // Ensure directory exists
14
+ if (!fs.existsSync(OPENCODE_DIR)) {
15
+ fs.mkdirSync(OPENCODE_DIR, { recursive: true });
16
+ }
17
+ this.db = new Database(DB_PATH);
18
+ this.initialize();
19
+ // Start background worker
20
+ this.startWorker();
21
+ }
22
+ initialize() {
23
+ // Enable WAL mode for concurrency and performance
24
+ this.db.pragma('journal_mode = WAL');
25
+ this.db.exec(`
26
+ CREATE TABLE IF NOT EXISTS files (
27
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
28
+ path TEXT UNIQUE NOT NULL,
29
+ hash TEXT,
30
+ last_indexed DATETIME DEFAULT CURRENT_TIMESTAMP
31
+ );
32
+
33
+ CREATE TABLE IF NOT EXISTS chunks (
34
+ id TEXT PRIMARY KEY,
35
+ file_id INTEGER,
36
+ type TEXT,
37
+ complexity_score REAL,
38
+ content_summary TEXT,
39
+ embedding BLOB,
40
+ FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
41
+ );
42
+
43
+ CREATE TABLE IF NOT EXISTS embedding_queue (
44
+ chunk_id TEXT PRIMARY KEY,
45
+ text_to_embed TEXT,
46
+ status TEXT DEFAULT 'pending', -- pending, processing, failed
47
+ retries INTEGER DEFAULT 0,
48
+ FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
49
+ );
50
+
51
+ CREATE TABLE IF NOT EXISTS symbols (
52
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
53
+ chunk_id TEXT,
54
+ name TEXT NOT NULL,
55
+ kind TEXT, -- 'function', 'class', 'interface', etc.
56
+ FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
57
+ );
58
+
59
+ CREATE TABLE IF NOT EXISTS dependencies (
60
+ source_chunk_id TEXT,
61
+ target_path TEXT,
62
+ FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
63
+ );
64
+
65
+ CREATE TABLE IF NOT EXISTS commits (
66
+ hash TEXT PRIMARY KEY,
67
+ author TEXT,
68
+ date DATETIME,
69
+ message TEXT,
70
+ files_touched TEXT -- JSON array of paths
71
+ );
72
+
73
+ CREATE TABLE IF NOT EXISTS plan_ledger (
74
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
75
+ plan_id TEXT,
76
+ tool_name TEXT,
77
+ args TEXT,
78
+ is_on_plan BOOLEAN,
79
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
80
+ );
81
+
82
+ CREATE TABLE IF NOT EXISTS background_jobs (
83
+ id TEXT PRIMARY KEY,
84
+ type TEXT, -- 'validation', 'setup', 'indexing'
85
+ status TEXT DEFAULT 'pending', -- pending, running, completed, failed
86
+ progress INTEGER DEFAULT 0,
87
+ result TEXT,
88
+ error TEXT,
89
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
90
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
91
+ );
92
+
93
+ -- Indexes for performance
94
+ CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
95
+ CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
96
+ CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
97
+ CREATE INDEX IF NOT EXISTS idx_ledger_plan ON plan_ledger(plan_id);
98
+ CREATE INDEX IF NOT EXISTS idx_jobs_status ON background_jobs(status);
99
+ `);
100
+ // Migrations
101
+ try {
102
+ this.db.exec("ALTER TABLE chunks ADD COLUMN embedding BLOB");
103
+ }
104
+ catch { }
105
+ }
106
+ /**
107
+ * Background Job Management
108
+ */
109
+ createJob(id, type, metadata) {
110
+ this.db.prepare(`
111
+ INSERT INTO background_jobs (id, type, status, progress, result)
112
+ VALUES (?, ?, 'pending', 0, ?)
113
+ `).run(id, type, metadata ? JSON.stringify(metadata) : null);
114
+ }
115
+ updateJob(id, updates) {
116
+ const sets = [];
117
+ const params = [];
118
+ if (updates.status) {
119
+ sets.push("status = ?");
120
+ params.push(updates.status);
121
+ }
122
+ if (updates.progress !== undefined) {
123
+ sets.push("progress = ?");
124
+ params.push(updates.progress);
125
+ }
126
+ if (updates.result) {
127
+ sets.push("result = ?");
128
+ params.push(updates.result);
129
+ }
130
+ if (updates.error) {
131
+ sets.push("error = ?");
132
+ params.push(updates.error);
133
+ }
134
+ sets.push("updated_at = CURRENT_TIMESTAMP");
135
+ params.push(id);
136
+ this.db.prepare(`UPDATE background_jobs SET ${sets.join(", ")} WHERE id = ?`).run(...params);
137
+ }
138
+ getJob(id) {
139
+ return this.db.prepare("SELECT * FROM background_jobs WHERE id = ?").get(id);
140
+ }
141
+ listJobs(type, limit = 10) {
142
+ if (type) {
143
+ return this.db.prepare("SELECT * FROM background_jobs WHERE type = ? ORDER BY created_at DESC LIMIT ?").all(type, limit);
144
+ }
145
+ return this.db.prepare("SELECT * FROM background_jobs ORDER BY created_at DESC LIMIT ?").all(limit);
146
+ }
147
+ async startWorker() {
148
+ if (this.workerRunning)
149
+ return;
150
+ this.workerRunning = true;
151
+ // Run periodically
152
+ setInterval(async () => {
153
+ try {
154
+ await this.processEmbeddingQueue();
155
+ }
156
+ catch (e) {
157
+ // Log to file if needed, but avoid console to protect TUI
158
+ }
159
+ }, 5000); // Check every 5s
160
+ }
161
+ async processEmbeddingQueue() {
162
+ // Check if Ollama is ready
163
+ if (!(await ollama.isRunning()))
164
+ return;
165
+ // Get next task
166
+ const task = this.db.prepare(`
167
+ SELECT chunk_id, text_to_embed, retries
168
+ FROM embedding_queue
169
+ WHERE status = 'pending'
170
+ ORDER BY rowid ASC
171
+ LIMIT 1
172
+ `).get();
173
+ if (!task)
174
+ return;
175
+ // Mark processing
176
+ this.db.prepare("UPDATE embedding_queue SET status = 'processing' WHERE chunk_id = ?").run(task.chunk_id);
177
+ try {
178
+ // Generate embedding
179
+ const vector = await ollama.getEmbedding(task.text_to_embed);
180
+ if (vector.length > 0) {
181
+ // Store blob (Float32Array to Buffer)
182
+ const buffer = Buffer.from(new Float32Array(vector).buffer);
183
+ const updateChunk = this.db.prepare("UPDATE chunks SET embedding = ? WHERE id = ?");
184
+ const deleteQueue = this.db.prepare("DELETE FROM embedding_queue WHERE chunk_id = ?");
185
+ const txn = this.db.transaction(() => {
186
+ updateChunk.run(buffer, task.chunk_id);
187
+ deleteQueue.run(task.chunk_id);
188
+ });
189
+ txn();
190
+ }
191
+ else {
192
+ throw new Error("Empty vector returned");
193
+ }
194
+ }
195
+ catch (error) {
196
+ if (task.retries > 3) {
197
+ this.db.prepare("UPDATE embedding_queue SET status = 'failed' WHERE chunk_id = ?").run(task.chunk_id);
198
+ }
199
+ else {
200
+ this.db.prepare("UPDATE embedding_queue SET status = 'pending', retries = retries + 1 WHERE chunk_id = ?").run(task.chunk_id);
201
+ }
202
+ }
203
+ }
204
+ /**
205
+ * Syncs a ChunkCard (JSON) into the SQLite Index.
206
+ */
207
+ ingestChunkCard(card) {
208
+ const insertFile = this.db.prepare(`
209
+ INSERT INTO files (path, hash, last_indexed)
210
+ VALUES (?, ?, CURRENT_TIMESTAMP)
211
+ ON CONFLICT(path) DO UPDATE SET
212
+ hash = excluded.hash,
213
+ last_indexed = CURRENT_TIMESTAMP
214
+ RETURNING id
215
+ `);
216
+ const insertChunk = this.db.prepare(`
217
+ INSERT INTO chunks (id, file_id, type, complexity_score, content_summary)
218
+ VALUES (?, ?, ?, ?, ?)
219
+ ON CONFLICT(id) DO UPDATE SET
220
+ complexity_score = excluded.complexity_score,
221
+ content_summary = excluded.content_summary
222
+ `);
223
+ const queueEmbedding = this.db.prepare(`
224
+ INSERT INTO embedding_queue (chunk_id, text_to_embed)
225
+ VALUES (?, ?)
226
+ ON CONFLICT(chunk_id) DO UPDATE SET
227
+ text_to_embed = excluded.text_to_embed,
228
+ status = 'pending',
229
+ retries = 0
230
+ `);
231
+ const insertSymbol = this.db.prepare(`
232
+ INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')
233
+ `);
234
+ const insertDep = this.db.prepare(`
235
+ INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)
236
+ `);
237
+ const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
238
+ const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
239
+ const transaction = this.db.transaction(() => {
240
+ const fileRes = insertFile.get(card.file_path, card.metadata.hash);
241
+ const fileId = fileRes.id;
242
+ insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500));
243
+ const textToEmbed = `${card.chunk_type.toUpperCase()} for ${path.basename(card.file_path)}
244
+
245
+ ${card.content.slice(0, 2000)}`;
246
+ queueEmbedding.run(card.id, textToEmbed);
247
+ deleteOldSymbols.run(card.id);
248
+ for (const sym of card.metadata.symbols) {
249
+ insertSymbol.run(card.id, sym);
250
+ }
251
+ deleteOldDeps.run(card.id);
252
+ for (const dep of card.metadata.dependencies) {
253
+ insertDep.run(card.id, dep);
254
+ }
255
+ });
256
+ transaction();
257
+ }
258
+ deleteChunkCard(cardId) {
259
+ this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
260
+ }
261
+ recordExecution(planId, toolName, args, isOnPlan) {
262
+ this.db.prepare(`
263
+ INSERT INTO plan_ledger (plan_id, tool_name, args, is_on_plan)
264
+ VALUES (?, ?, ?, ?)
265
+ `).run(planId || 'no-plan', toolName, JSON.stringify(args), isOnPlan ? 1 : 0);
266
+ }
267
+ ingestCommits(commits) {
268
+ const insert = this.db.prepare(`
269
+ INSERT INTO commits (hash, author, date, message, files_touched)
270
+ VALUES (?, ?, ?, ?, ?)
271
+ ON CONFLICT(hash) DO NOTHING
272
+ `);
273
+ const transaction = this.db.transaction((data) => {
274
+ for (const c of data) {
275
+ insert.run(c.hash, c.author, c.date, c.message, JSON.stringify(c.files));
276
+ }
277
+ });
278
+ transaction(commits);
279
+ }
280
+ getHotFiles(pathPrefix = '', limit = 10) {
281
+ const recent = this.db.prepare(`
282
+ SELECT files_touched FROM commits ORDER BY date DESC LIMIT 100
283
+ `).all();
284
+ const counts = {};
285
+ for (const r of recent) {
286
+ try {
287
+ const files = JSON.parse(r.files_touched);
288
+ for (const f of files) {
289
+ if (f.startsWith(pathPrefix)) {
290
+ counts[f] = (counts[f] || 0) + 1;
291
+ }
292
+ }
293
+ }
294
+ catch { }
295
+ }
296
+ return Object.entries(counts)
297
+ .map(([path, count]) => ({ path, count }))
298
+ .sort((a, b) => b.count - a.count)
299
+ .slice(0, limit);
300
+ }
301
+ getPlanMetrics(planId) {
302
+ const total = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ?").get(planId);
303
+ const onPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 1").get(planId);
304
+ const offPlan = this.db.prepare("SELECT COUNT(*) as c FROM plan_ledger WHERE plan_id = ? AND is_on_plan = 0").get(planId);
305
+ return {
306
+ total: total.c,
307
+ on_plan: onPlan.c,
308
+ off_plan: offPlan.c,
309
+ compliance: total.c > 0 ? Math.round((onPlan.c / total.c) * 100) : 100
310
+ };
311
+ }
312
+ findDependents(filePath) {
313
+ const query = this.db.prepare(`
314
+ SELECT DISTINCT f.path
315
+ FROM files f
316
+ JOIN chunks c ON f.id = c.file_id
317
+ JOIN dependencies d ON c.id = d.source_chunk_id
318
+ WHERE d.target_path LIKE ? OR d.target_path = ?
319
+ `);
320
+ const basename = path.basename(filePath);
321
+ const results = query.all(`%/${basename}%`, basename);
322
+ return results.map(r => r.path);
323
+ }
324
+ searchSymbols(query) {
325
+ const stmt = this.db.prepare(`
326
+ SELECT s.name, c.type, f.path
327
+ FROM symbols s
328
+ JOIN chunks c ON s.chunk_id = c.id
329
+ JOIN files f ON c.file_id = f.id
330
+ WHERE s.name LIKE ?
331
+ LIMIT 20
332
+ `);
333
+ return stmt.all(`%${query}%`);
334
+ }
335
+ async semanticSearch(query, limit = 10) {
336
+ if (!(await ollama.isRunning())) {
337
+ throw new Error("Ollama is not running. Please run 'autognosis_setup_ai' first.");
338
+ }
339
+ const queryVec = await ollama.getEmbedding(query);
340
+ if (queryVec.length === 0)
341
+ return [];
342
+ const chunks = this.db.prepare(`
343
+ SELECT c.id, c.content_summary, c.type, f.path, c.embedding
344
+ FROM chunks c
345
+ JOIN files f ON c.file_id = f.id
346
+ WHERE c.embedding IS NOT NULL
347
+ `).all();
348
+ const results = chunks.map(chunk => {
349
+ const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
350
+ const similarity = this.cosineSimilarity(queryVec, vector);
351
+ return { ...chunk, similarity, embedding: undefined };
352
+ });
353
+ results.sort((a, b) => b.similarity - a.similarity);
354
+ return results.slice(0, limit);
355
+ }
356
+ cosineSimilarity(vecA, vecB) {
357
+ let dot = 0, normA = 0, normB = 0;
358
+ for (let i = 0; i < vecA.length; i++) {
359
+ dot += vecA[i] * vecB[i];
360
+ normA += vecA[i] * vecA[i];
361
+ normB += vecB[i] * vecB[i];
362
+ }
363
+ return dot / (Math.sqrt(normA) * Math.sqrt(normB));
364
+ }
365
+ getStats() {
366
+ const files = this.db.prepare('SELECT COUNT(*) as c FROM files').get();
367
+ const symbols = this.db.prepare('SELECT COUNT(*) as c FROM symbols').get();
368
+ const deps = this.db.prepare('SELECT COUNT(*) as c FROM dependencies').get();
369
+ const chunks = this.db.prepare('SELECT COUNT(*) as c FROM chunks').get();
370
+ const embedded = this.db.prepare('SELECT COUNT(*) as c FROM chunks WHERE embedding IS NOT NULL').get();
371
+ const queue = this.db.prepare("SELECT COUNT(*) as c FROM embedding_queue WHERE status = 'pending'").get();
372
+ return {
373
+ files: files.c, chunks: chunks.c, symbols: symbols.c, dependencies: deps.c,
374
+ embeddings: { completed: embedded.c, pending: queue.c }
375
+ };
376
+ }
377
+ }
378
+ let dbInstance = null;
379
+ export function getDb() {
380
+ if (!dbInstance)
381
+ dbInstance = new CodeGraphDB();
382
+ return dbInstance;
383
+ }
384
+ export function graphTools() {
385
+ return {
386
+ autognosis_setup_ai: tool({
387
+ description: "Configure local AI capabilities (Ollama) in the background.",
388
+ args: { model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL) },
389
+ async execute({ model }) {
390
+ const jobId = `job-setup-ai-${Date.now()}`;
391
+ getDb().createJob(jobId, "setup", { model });
392
+ (async () => {
393
+ try {
394
+ getDb().updateJob(jobId, { status: "running", progress: 10 });
395
+ if (!(await ollama.isInstalled())) {
396
+ await ollama.install();
397
+ }
398
+ getDb().updateJob(jobId, { progress: 40 });
399
+ await ollama.startServer();
400
+ getDb().updateJob(jobId, { progress: 60 });
401
+ await ollama.pullModel(model);
402
+ getDb().updateJob(jobId, {
403
+ status: "completed",
404
+ progress: 100,
405
+ result: `Model ${model} is ready.`
406
+ });
407
+ }
408
+ catch (error) {
409
+ getDb().updateJob(jobId, { status: "failed", error: error.message });
410
+ }
411
+ })();
412
+ return JSON.stringify({
413
+ status: "STARTED",
414
+ message: "AI Setup started in background.",
415
+ job_id: jobId,
416
+ instruction: "Use graph_background_status to check progress."
417
+ }, null, 2);
418
+ }
419
+ }),
420
+ graph_semantic_search: tool({
421
+ description: "Search the codebase using natural language (Vector/Semantic Search).",
422
+ args: { query: tool.schema.string(), limit: tool.schema.number().optional().default(10) },
423
+ async execute({ query, limit }) {
424
+ try {
425
+ const results = await getDb().semanticSearch(query, limit);
426
+ return JSON.stringify({ status: "SUCCESS", query, results }, null, 2);
427
+ }
428
+ catch (error) {
429
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
430
+ }
431
+ }
432
+ }),
433
+ graph_query_dependents: tool({
434
+ description: "Find all files that depend on a specific file.",
435
+ args: { file_path: tool.schema.string() },
436
+ async execute({ file_path }) {
437
+ try {
438
+ const dependents = getDb().findDependents(file_path);
439
+ return JSON.stringify({ status: "SUCCESS", file_path, dependents, count: dependents.length }, null, 2);
440
+ }
441
+ catch (error) {
442
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
443
+ }
444
+ }
445
+ }),
446
+ graph_search_symbols: tool({
447
+ description: "Fast fuzzy search for symbols across the entire codebase index.",
448
+ args: { query: tool.schema.string() },
449
+ async execute({ query }) {
450
+ try {
451
+ const results = getDb().searchSymbols(query);
452
+ return JSON.stringify({ status: "SUCCESS", query, results, count: results.length }, null, 2);
453
+ }
454
+ catch (error) {
455
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
456
+ }
457
+ }
458
+ }),
459
+ graph_stats: tool({
460
+ description: "Get statistics about the Code Graph Index.",
461
+ args: {},
462
+ async execute() {
463
+ try {
464
+ return JSON.stringify({ status: "SUCCESS", stats: getDb().getStats() }, null, 2);
465
+ }
466
+ catch (error) {
467
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
468
+ }
469
+ }
470
+ }),
471
+ journal_build: tool({
472
+ description: "Scan git history and populate the Change Journal.",
473
+ args: { limit: tool.schema.number().optional().default(100) },
474
+ async execute({ limit }) {
475
+ try {
476
+ const { execSync } = await import("node:child_process");
477
+ const logOut = execSync(`git log -n ${limit} --pretty=format:"%H|%an|%ad|%s" --date=iso`, { encoding: 'utf-8' });
478
+ const commits = logOut.split('\n').filter(Boolean).map(line => {
479
+ const [hash, author, date, message] = line.split('|');
480
+ const files = execSync(`git show --name-only --pretty="" ${hash}`, { encoding: 'utf-8' }).split('\n').filter(Boolean);
481
+ return { hash, author, date, message, files };
482
+ });
483
+ getDb().ingestCommits(commits);
484
+ return JSON.stringify({ status: "SUCCESS", message: `Ingested ${commits.length} commits.` }, null, 2);
485
+ }
486
+ catch (error) {
487
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
488
+ }
489
+ }
490
+ }),
491
+ journal_query_hot_files: tool({
492
+ description: "Query the Change Journal for frequently changed files.",
493
+ args: { path_prefix: tool.schema.string().optional().default(""), limit: tool.schema.number().optional().default(10) },
494
+ async execute({ path_prefix, limit }) {
495
+ try {
496
+ return JSON.stringify({ status: "SUCCESS", hot_files: getDb().getHotFiles(path_prefix, limit) }, null, 2);
497
+ }
498
+ catch (error) {
499
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
500
+ }
501
+ }
502
+ }),
503
+ graph_get_plan_metrics: tool({
504
+ description: "Retrieve execution metrics for a specific plan ID.",
505
+ args: { plan_id: tool.schema.string() },
506
+ async execute({ plan_id }) {
507
+ try {
508
+ return JSON.stringify({ status: "SUCCESS", plan_id, metrics: getDb().getPlanMetrics(plan_id) }, null, 2);
509
+ }
510
+ catch (error) {
511
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
512
+ }
513
+ }
514
+ }),
515
+ graph_background_status: tool({
516
+ description: "Check status of background tasks (validation, setup, indexing).",
517
+ args: {
518
+ job_id: tool.schema.string().optional(),
519
+ type: tool.schema.enum(["validation", "setup", "indexing"]).optional(),
520
+ limit: tool.schema.number().optional().default(5)
521
+ },
522
+ async execute({ job_id, type, limit }) {
523
+ try {
524
+ if (job_id) {
525
+ return JSON.stringify({ status: "SUCCESS", job: getDb().getJob(job_id) }, null, 2);
526
+ }
527
+ return JSON.stringify({ status: "SUCCESS", jobs: getDb().listJobs(type, limit) }, null, 2);
528
+ }
529
+ catch (error) {
530
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
531
+ }
532
+ }
533
+ })
534
+ };
535
+ }
@@ -5,13 +5,14 @@ import * as fsSync from "node:fs";
5
5
  import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
+ import { Logger } from "./services/logger.js";
8
9
  const execAsync = promisify(exec);
9
10
  const PROJECT_ROOT = process.cwd();
10
11
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
11
12
  const WORKTREE_DIR = path.join(OPENCODE_DIR, "worktrees");
12
13
  // Internal logging
13
14
  function log(message, data) {
14
- console.error(`[GitWorktree] ${message}`, data || '');
15
+ Logger.log("GitWorktree", message, data);
15
16
  }
16
17
  // =============================================================================
17
18
  // HELPERS
@@ -202,8 +203,8 @@ export function gitWorktreeTools() {
202
203
  }, null, 2);
203
204
  }
204
205
  // Create initial commit if needed
205
- const { stdout: log } = await runCmd("git log --oneline -1");
206
- if (!log || log.includes("Initial commit")) {
206
+ const { stdout: gitLog } = await runCmd("git log --oneline -1");
207
+ if (!gitLog || gitLog.includes("Initial commit")) {
207
208
  await runCmd(`git commit --allow-empty -m "${message}"`);
208
209
  }
209
210
  let worktreePath = null;
@@ -214,7 +215,7 @@ export function gitWorktreeTools() {
214
215
  if (fsSync.existsSync(worktreePath)) {
215
216
  const { error: removeError } = await runCmd(`git worktree remove ${worktreePath}`);
216
217
  if (removeError) {
217
- console.error("[GitWorktree] Warning: Failed to remove existing worktree", removeError);
218
+ log("Warning: Failed to remove existing worktree", removeError);
218
219
  }
219
220
  }
220
221
  // Create new worktree
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  export declare const AutognosisPlugin: () => Promise<{
2
2
  tool: {
3
- [x: string]: any;
3
+ [key: string]: any;
4
4
  };
5
5
  }>;
6
6
  export default AutognosisPlugin;