opencode-autognosis 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,399 @@
1
+ import Database from "better-sqlite3";
2
+ import * as path from "node:path";
3
+ import * as fs from "node:fs";
4
+ import { tool } from "@opencode-ai/plugin";
5
+ import { ollama, DEFAULT_EMBEDDING_MODEL } from "./services/ollama.js";
6
+ const PROJECT_ROOT = process.cwd();
7
+ const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
+ const DB_PATH = path.join(OPENCODE_DIR, "autognosis.db");
9
+ export class CodeGraphDB {
10
+ db;
11
+ workerRunning = false;
12
+ constructor() {
13
+ // Ensure directory exists
14
+ if (!fs.existsSync(OPENCODE_DIR)) {
15
+ fs.mkdirSync(OPENCODE_DIR, { recursive: true });
16
+ }
17
+ this.db = new Database(DB_PATH);
18
+ this.initialize();
19
+ // Start background worker
20
+ this.startWorker();
21
+ }
22
+ initialize() {
23
+ // Enable WAL mode for concurrency and performance
24
+ this.db.pragma('journal_mode = WAL');
25
+ this.db.exec(`
26
+ CREATE TABLE IF NOT EXISTS files (
27
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
28
+ path TEXT UNIQUE NOT NULL,
29
+ hash TEXT,
30
+ last_indexed DATETIME DEFAULT CURRENT_TIMESTAMP
31
+ );
32
+
33
+ CREATE TABLE IF NOT EXISTS chunks (
34
+ id TEXT PRIMARY KEY,
35
+ file_id INTEGER,
36
+ type TEXT,
37
+ complexity_score REAL,
38
+ content_summary TEXT,
39
+ embedding BLOB,
40
+ FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
41
+ );
42
+
43
+ CREATE TABLE IF NOT EXISTS embedding_queue (
44
+ chunk_id TEXT PRIMARY KEY,
45
+ text_to_embed TEXT,
46
+ status TEXT DEFAULT 'pending', -- pending, processing, failed
47
+ retries INTEGER DEFAULT 0,
48
+ FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
49
+ );
50
+
51
+ CREATE TABLE IF NOT EXISTS symbols (
52
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
53
+ chunk_id TEXT,
54
+ name TEXT NOT NULL,
55
+ kind TEXT, -- 'function', 'class', 'interface', etc.
56
+ FOREIGN KEY(chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
57
+ );
58
+
59
+ CREATE TABLE IF NOT EXISTS dependencies (
60
+ source_chunk_id TEXT,
61
+ target_path TEXT,
62
+ FOREIGN KEY(source_chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
63
+ );
64
+
65
+ -- Indexes for performance
66
+ CREATE INDEX IF NOT EXISTS idx_files_path ON files(path);
67
+ CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
68
+ CREATE INDEX IF NOT EXISTS idx_dependencies_target ON dependencies(target_path);
69
+ `);
70
+ // Migrations
71
+ try {
72
+ this.db.exec("ALTER TABLE chunks ADD COLUMN embedding BLOB");
73
+ }
74
+ catch { }
75
+ }
76
+ async startWorker() {
77
+ if (this.workerRunning)
78
+ return;
79
+ this.workerRunning = true;
80
+ // Run periodically
81
+ setInterval(async () => {
82
+ try {
83
+ await this.processEmbeddingQueue();
84
+ }
85
+ catch (e) {
86
+ // console.error("Worker error:", e);
87
+ }
88
+ }, 5000); // Check every 5s
89
+ }
90
+ async processEmbeddingQueue() {
91
+ // Check if Ollama is ready
92
+ if (!(await ollama.isRunning()))
93
+ return;
94
+ // Get next task
95
+ const task = this.db.prepare(`
96
+ SELECT chunk_id, text_to_embed, retries
97
+ FROM embedding_queue
98
+ WHERE status = 'pending'
99
+ ORDER BY rowid ASC
100
+ LIMIT 1
101
+ `).get();
102
+ if (!task)
103
+ return;
104
+ // Mark processing
105
+ this.db.prepare("UPDATE embedding_queue SET status = 'processing' WHERE chunk_id = ?").run(task.chunk_id);
106
+ try {
107
+ // Generate embedding
108
+ const vector = await ollama.getEmbedding(task.text_to_embed);
109
+ if (vector.length > 0) {
110
+ // Store blob (Float32Array to Buffer)
111
+ const buffer = Buffer.from(new Float32Array(vector).buffer);
112
+ const updateChunk = this.db.prepare("UPDATE chunks SET embedding = ? WHERE id = ?");
113
+ const deleteQueue = this.db.prepare("DELETE FROM embedding_queue WHERE chunk_id = ?");
114
+ const txn = this.db.transaction(() => {
115
+ updateChunk.run(buffer, task.chunk_id);
116
+ deleteQueue.run(task.chunk_id);
117
+ });
118
+ txn();
119
+ }
120
+ else {
121
+ throw new Error("Empty vector returned");
122
+ }
123
+ }
124
+ catch (error) {
125
+ if (task.retries > 3) {
126
+ // Give up
127
+ this.db.prepare("UPDATE embedding_queue SET status = 'failed' WHERE chunk_id = ?").run(task.chunk_id);
128
+ }
129
+ else {
130
+ // Retry
131
+ this.db.prepare("UPDATE embedding_queue SET status = 'pending', retries = retries + 1 WHERE chunk_id = ?").run(task.chunk_id);
132
+ }
133
+ }
134
+ }
135
+ /**
136
+ * Syncs a ChunkCard (JSON) into the SQLite Index.
137
+ * This is an "Upsert" operation.
138
+ */
139
+ ingestChunkCard(card) {
140
+ const insertFile = this.db.prepare(`
141
+ INSERT INTO files (path, hash, last_indexed)
142
+ VALUES (?, ?, CURRENT_TIMESTAMP)
143
+ ON CONFLICT(path) DO UPDATE SET
144
+ hash = excluded.hash,
145
+ last_indexed = CURRENT_TIMESTAMP
146
+ RETURNING id
147
+ `);
148
+ const insertChunk = this.db.prepare(`
149
+ INSERT INTO chunks (id, file_id, type, complexity_score, content_summary)
150
+ VALUES (?, ?, ?, ?, ?)
151
+ ON CONFLICT(id) DO UPDATE SET
152
+ complexity_score = excluded.complexity_score,
153
+ content_summary = excluded.content_summary
154
+ `);
155
+ const queueEmbedding = this.db.prepare(`
156
+ INSERT INTO embedding_queue (chunk_id, text_to_embed)
157
+ VALUES (?, ?)
158
+ ON CONFLICT(chunk_id) DO UPDATE SET
159
+ text_to_embed = excluded.text_to_embed,
160
+ status = 'pending',
161
+ retries = 0
162
+ `);
163
+ const insertSymbol = this.db.prepare(`
164
+ INSERT INTO symbols (chunk_id, name, kind) VALUES (?, ?, 'unknown')
165
+ `);
166
+ const insertDep = this.db.prepare(`
167
+ INSERT INTO dependencies (source_chunk_id, target_path) VALUES (?, ?)
168
+ `);
169
+ const deleteOldSymbols = this.db.prepare('DELETE FROM symbols WHERE chunk_id = ?');
170
+ const deleteOldDeps = this.db.prepare('DELETE FROM dependencies WHERE source_chunk_id = ?');
171
+ const transaction = this.db.transaction(() => {
172
+ // 1. Upsert File
173
+ const fileRes = insertFile.get(card.file_path, card.metadata.hash);
174
+ const fileId = fileRes.id;
175
+ // 2. Upsert Chunk
176
+ insertChunk.run(card.id, fileId, card.chunk_type, card.metadata.complexity_score, card.content.slice(0, 500) // Store preview
177
+ );
178
+ // 3. Queue for Embedding
179
+ // Use the summary or content as the text to embed
180
+ const textToEmbed = `${card.chunk_type.toUpperCase()} for ${path.basename(card.file_path)}\n\n${card.content.slice(0, 2000)}`;
181
+ queueEmbedding.run(card.id, textToEmbed);
182
+ // 4. Replace Symbols
183
+ deleteOldSymbols.run(card.id);
184
+ for (const sym of card.metadata.symbols) {
185
+ insertSymbol.run(card.id, sym);
186
+ }
187
+ // 5. Replace Dependencies
188
+ deleteOldDeps.run(card.id);
189
+ for (const dep of card.metadata.dependencies) {
190
+ insertDep.run(card.id, dep);
191
+ }
192
+ });
193
+ transaction();
194
+ }
195
+ /**
196
+ * Remove a card from the index
197
+ */
198
+ deleteChunkCard(cardId) {
199
+ this.db.prepare('DELETE FROM chunks WHERE id = ?').run(cardId);
200
+ }
201
+ // ===========================================================================
202
+ // QUERY METHODS
203
+ // ===========================================================================
204
+ findDependents(filePath) {
205
+ // Find all chunks that depend on this file path
206
+ // Note: dependency paths might be relative or absolute, simplistic matching for now
207
+ const query = this.db.prepare(`
208
+ SELECT DISTINCT f.path
209
+ FROM files f
210
+ JOIN chunks c ON f.id = c.file_id
211
+ JOIN dependencies d ON c.id = d.source_chunk_id
212
+ WHERE d.target_path LIKE ? OR d.target_path = ?
213
+ `);
214
+ // Attempt to match exact path or likely relative imports (simplistic)
215
+ const basename = path.basename(filePath);
216
+ const results = query.all(`%/${basename}%`, basename);
217
+ return results.map(r => r.path);
218
+ }
219
+ searchSymbols(query) {
220
+ const stmt = this.db.prepare(`
221
+ SELECT s.name, c.type, f.path
222
+ FROM symbols s
223
+ JOIN chunks c ON s.chunk_id = c.id
224
+ JOIN files f ON c.file_id = f.id
225
+ WHERE s.name LIKE ?
226
+ LIMIT 20
227
+ `);
228
+ return stmt.all(`%${query}%`);
229
+ }
230
+ async semanticSearch(query, limit = 10) {
231
+ if (!(await ollama.isRunning())) {
232
+ throw new Error("Ollama is not running. Please run 'autognosis_setup_ai' first.");
233
+ }
234
+ const queryVec = await ollama.getEmbedding(query);
235
+ if (queryVec.length === 0)
236
+ return [];
237
+ // Get all embeddings from DB
238
+ // SQLite doesn't have vector math, so we fetch all and sort in JS
239
+ // Optimizations: In future, use sqlite-vec or filter by complexity/type first
240
+ const chunks = this.db.prepare(`
241
+ SELECT c.id, c.content_summary, c.type, f.path, c.embedding
242
+ FROM chunks c
243
+ JOIN files f ON c.file_id = f.id
244
+ WHERE c.embedding IS NOT NULL
245
+ `).all();
246
+ const results = chunks.map(chunk => {
247
+ const vector = new Float32Array(chunk.embedding.buffer, chunk.embedding.byteOffset, chunk.embedding.byteLength / 4);
248
+ const similarity = this.cosineSimilarity(queryVec, vector);
249
+ return { ...chunk, similarity, embedding: undefined }; // Don't return blob
250
+ });
251
+ results.sort((a, b) => b.similarity - a.similarity);
252
+ return results.slice(0, limit);
253
+ }
254
+ cosineSimilarity(vecA, vecB) {
255
+ let dot = 0;
256
+ let normA = 0;
257
+ let normB = 0;
258
+ for (let i = 0; i < vecA.length; i++) {
259
+ dot += vecA[i] * vecB[i];
260
+ normA += vecA[i] * vecA[i];
261
+ normB += vecB[i] * vecB[i];
262
+ }
263
+ return dot / (Math.sqrt(normA) * Math.sqrt(normB));
264
+ }
265
+ getStats() {
266
+ const files = this.db.prepare('SELECT COUNT(*) as c FROM files').get();
267
+ const symbols = this.db.prepare('SELECT COUNT(*) as c FROM symbols').get();
268
+ const deps = this.db.prepare('SELECT COUNT(*) as c FROM dependencies').get();
269
+ const chunks = this.db.prepare('SELECT COUNT(*) as c FROM chunks').get();
270
+ const embedded = this.db.prepare('SELECT COUNT(*) as c FROM chunks WHERE embedding IS NOT NULL').get();
271
+ const queue = this.db.prepare("SELECT COUNT(*) as c FROM embedding_queue WHERE status = 'pending'").get();
272
+ return {
273
+ files: files.c,
274
+ chunks: chunks.c,
275
+ symbols: symbols.c,
276
+ dependencies: deps.c,
277
+ embeddings: {
278
+ completed: embedded.c,
279
+ pending: queue.c
280
+ }
281
+ };
282
+ }
283
+ }
284
+ // Singleton instance for the plugin
285
+ let dbInstance = null;
286
+ export function getDb() {
287
+ if (!dbInstance) {
288
+ dbInstance = new CodeGraphDB();
289
+ }
290
+ return dbInstance;
291
+ }
292
+ export function graphTools() {
293
+ return {
294
+ autognosis_setup_ai: tool({
295
+ description: "Configure local AI capabilities (Ollama). Checks installation, installs if needed, and pulls the embedding model.",
296
+ args: {
297
+ model: tool.schema.string().optional().default(DEFAULT_EMBEDDING_MODEL).describe("Embedding model to pull")
298
+ },
299
+ async execute({ model }) {
300
+ try {
301
+ const installed = await ollama.isInstalled();
302
+ let statusMsg = "Ollama is installed.";
303
+ if (!installed) {
304
+ statusMsg = await ollama.install();
305
+ }
306
+ await ollama.startServer();
307
+ await ollama.pullModel(model);
308
+ return JSON.stringify({
309
+ status: "SUCCESS",
310
+ message: `${statusMsg} Server is running. Model ${model} is ready.`,
311
+ config: {
312
+ model,
313
+ base_url: "http://127.0.0.1:11434"
314
+ }
315
+ }, null, 2);
316
+ }
317
+ catch (error) {
318
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
319
+ }
320
+ }
321
+ }),
322
+ graph_semantic_search: tool({
323
+ description: "Search the codebase using natural language (Vector/Semantic Search). Requires AI setup.",
324
+ args: {
325
+ query: tool.schema.string().describe("Natural language query"),
326
+ limit: tool.schema.number().optional().default(10).describe("Max results")
327
+ },
328
+ async execute({ query, limit }) {
329
+ try {
330
+ const results = await getDb().semanticSearch(query, limit);
331
+ return JSON.stringify({
332
+ status: "SUCCESS",
333
+ query,
334
+ results
335
+ }, null, 2);
336
+ }
337
+ catch (error) {
338
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
339
+ }
340
+ }
341
+ }),
342
+ graph_query_dependents: tool({
343
+ description: "Find all files that depend on a specific file (upstream impact analysis).",
344
+ args: {
345
+ file_path: tool.schema.string().describe("File path to analyze"),
346
+ },
347
+ async execute({ file_path }) {
348
+ try {
349
+ const dependents = getDb().findDependents(file_path);
350
+ return JSON.stringify({
351
+ status: "SUCCESS",
352
+ file_path,
353
+ dependents,
354
+ count: dependents.length
355
+ }, null, 2);
356
+ }
357
+ catch (error) {
358
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
359
+ }
360
+ }
361
+ }),
362
+ graph_search_symbols: tool({
363
+ description: "Fast fuzzy search for symbols (functions, classes) across the entire codebase index.",
364
+ args: {
365
+ query: tool.schema.string().describe("Symbol name query"),
366
+ },
367
+ async execute({ query }) {
368
+ try {
369
+ const results = getDb().searchSymbols(query);
370
+ return JSON.stringify({
371
+ status: "SUCCESS",
372
+ query,
373
+ results,
374
+ count: results.length
375
+ }, null, 2);
376
+ }
377
+ catch (error) {
378
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
379
+ }
380
+ }
381
+ }),
382
+ graph_stats: tool({
383
+ description: "Get statistics about the Code Graph Index.",
384
+ args: {},
385
+ async execute() {
386
+ try {
387
+ const stats = getDb().getStats();
388
+ return JSON.stringify({
389
+ status: "SUCCESS",
390
+ stats
391
+ }, null, 2);
392
+ }
393
+ catch (error) {
394
+ return JSON.stringify({ status: "ERROR", message: String(error) }, null, 2);
395
+ }
396
+ }
397
+ })
398
+ };
399
+ }
@@ -5,13 +5,14 @@ import * as fsSync from "node:fs";
5
5
  import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
+ import { Logger } from "./services/logger.js";
8
9
  const execAsync = promisify(exec);
9
10
  const PROJECT_ROOT = process.cwd();
10
11
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
11
12
  const WORKTREE_DIR = path.join(OPENCODE_DIR, "worktrees");
12
13
  // Internal logging
13
14
  function log(message, data) {
14
- console.error(`[GitWorktree] ${message}`, data || '');
15
+ Logger.log("GitWorktree", message, data);
15
16
  }
16
17
  // =============================================================================
17
18
  // HELPERS
@@ -202,8 +203,8 @@ export function gitWorktreeTools() {
202
203
  }, null, 2);
203
204
  }
204
205
  // Create initial commit if needed
205
- const { stdout: log } = await runCmd("git log --oneline -1");
206
- if (!log || log.includes("Initial commit")) {
206
+ const { stdout: gitLog } = await runCmd("git log --oneline -1");
207
+ if (!gitLog || gitLog.includes("Initial commit")) {
207
208
  await runCmd(`git commit --allow-empty -m "${message}"`);
208
209
  }
209
210
  let worktreePath = null;
@@ -214,7 +215,7 @@ export function gitWorktreeTools() {
214
215
  if (fsSync.existsSync(worktreePath)) {
215
216
  const { error: removeError } = await runCmd(`git worktree remove ${worktreePath}`);
216
217
  if (removeError) {
217
- console.error("[GitWorktree] Warning: Failed to remove existing worktree", removeError);
218
+ log("Warning: Failed to remove existing worktree", removeError);
218
219
  }
219
220
  }
220
221
  // Create new worktree
package/dist/index.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- export default function plugin(): {
2
- tools: {
3
- [key: string]: any;
1
+ export declare const AutognosisPlugin: () => Promise<{
2
+ tool: {
3
+ [x: string]: any;
4
4
  };
5
- };
5
+ }>;
6
+ export default AutognosisPlugin;
package/dist/index.js CHANGED
@@ -5,9 +5,10 @@ import { chunkCardsTools } from "./chunk-cards.js";
5
5
  import { activeSetTools } from "./activeset.js";
6
6
  import { moduleSummariesTools } from "./module-summaries.js";
7
7
  import { performanceTools } from "./performance-optimization.js";
8
- export default function plugin() {
8
+ import { graphTools } from "./database.js";
9
+ export const AutognosisPlugin = async () => {
9
10
  return {
10
- tools: {
11
+ tool: {
11
12
  ...systemTools(),
12
13
  ...gitWorktreeTools(),
13
14
  ...testingTools(),
@@ -15,6 +16,8 @@ export default function plugin() {
15
16
  ...activeSetTools(),
16
17
  ...moduleSummariesTools(),
17
18
  ...performanceTools(),
19
+ ...graphTools(),
18
20
  },
19
21
  };
20
- }
22
+ };
23
+ export default AutognosisPlugin;
@@ -3,13 +3,14 @@ import * as fs from "node:fs/promises";
3
3
  import * as fsSync from "node:fs";
4
4
  import * as path from "node:path";
5
5
  import * as crypto from "node:crypto";
6
+ import { Logger } from "./services/logger.js";
6
7
  const PROJECT_ROOT = process.cwd();
7
8
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
9
  const CHUNK_DIR = path.join(OPENCODE_DIR, "chunks");
9
10
  const MODULE_DIR = path.join(OPENCODE_DIR, "modules");
10
11
  // Internal logging
11
12
  function log(message, data) {
12
- console.error(`[ModuleSummaries] ${message}`, data || '');
13
+ Logger.log("ModuleSummaries", message, data);
13
14
  }
14
15
  // =============================================================================
15
16
  // HELPERS
@@ -5,6 +5,9 @@ import * as fsSync from "node:fs";
5
5
  import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
+ import { getDb } from "./database.js";
9
+ import { CHUNK_DIR, ensureChunkDir, calculateHash, calculateComplexity, parseFileAST, generateSummaryChunk, generateApiChunk, generateInvariantChunk, extractDependencies, extractSymbolsFromAST, extractSymbols } from "./chunk-cards.js";
10
+ import { Logger } from "./services/logger.js";
8
11
  const execAsync = promisify(exec);
9
12
  const PROJECT_ROOT = process.cwd();
10
13
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
@@ -13,7 +16,7 @@ const PERF_DIR = path.join(OPENCODE_DIR, "performance");
13
16
  const METRICS_DIR = path.join(OPENCODE_DIR, "metrics");
14
17
  // Internal logging
15
18
  function log(message, data) {
16
- console.error(`[Performance] ${message}`, data || '');
19
+ Logger.log("Performance", message, data);
17
20
  }
18
21
  // =============================================================================
19
22
  // HELPERS
@@ -593,10 +596,45 @@ async function getAllSourceFiles() {
593
596
  return sourceFiles;
594
597
  }
595
598
  async function indexFile(filePath) {
596
- // Simplified indexing - would create chunk cards, analyze symbols, etc.
597
- // For now, just touch the file to update its timestamp
598
- const stats = await fs.stat(filePath);
599
- // Indexing logic would go here
599
+ try {
600
+ const content = await fs.readFile(filePath, 'utf-8');
601
+ await ensureChunkDir();
602
+ const ast = parseFileAST(filePath, content);
603
+ const chunkTypes = ["summary", "api", "invariant"];
604
+ // Generate file hash for ID consistency
605
+ const fileHash = calculateHash(filePath);
606
+ for (const chunkType of chunkTypes) {
607
+ const cardId = `${path.basename(filePath)}-${chunkType}-${fileHash.slice(0, 8)}`;
608
+ const cardPath = path.join(CHUNK_DIR, `${cardId}.json`);
609
+ let chunkContent = "";
610
+ if (chunkType === "summary")
611
+ chunkContent = await generateSummaryChunk(content, filePath, ast);
612
+ else if (chunkType === "api")
613
+ chunkContent = await generateApiChunk(content, filePath, ast);
614
+ else if (chunkType === "invariant")
615
+ chunkContent = await generateInvariantChunk(content, filePath, ast);
616
+ const chunkCard = {
617
+ id: cardId,
618
+ file_path: filePath,
619
+ chunk_type: chunkType,
620
+ content: chunkContent,
621
+ metadata: {
622
+ created_at: new Date().toISOString(),
623
+ updated_at: new Date().toISOString(),
624
+ hash: calculateHash(chunkContent),
625
+ dependencies: await extractDependencies(content, ast, filePath),
626
+ symbols: extractSymbolsFromAST(ast, content) || extractSymbols(content, filePath),
627
+ complexity_score: calculateComplexity(content)
628
+ }
629
+ };
630
+ await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
631
+ // Sync to SQLite Index
632
+ getDb().ingestChunkCard(chunkCard);
633
+ }
634
+ }
635
+ catch (error) {
636
+ log(`Failed to index file ${filePath}`, error);
637
+ }
600
638
  }
601
639
  async function runBackgroundIndexing(taskId, indexingState) {
602
640
  try {
@@ -606,11 +644,65 @@ async function runBackgroundIndexing(taskId, indexingState) {
606
644
  task.status = "running";
607
645
  task.started_at = new Date().toISOString();
608
646
  await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
609
- // Simulate background indexing work
610
- for (let i = 0; i < 10; i++) {
611
- task.progress = (i + 1) * 10;
647
+ // Determine files to index
648
+ const force_full = task.metadata?.force_full || false;
649
+ let filesToIndex = [];
650
+ if (force_full) {
651
+ filesToIndex = await getAllSourceFiles();
652
+ }
653
+ else {
654
+ // For incremental, we try to use git diff. If that fails or returns empty,
655
+ // we might default to all files or just recent ones. For robustness here:
656
+ try {
657
+ const { stdout: gitDiff } = await runCmd(`git diff --name-only --since="${indexingState.last_indexed}"`);
658
+ const changedFiles = gitDiff.split('\n').filter(Boolean);
659
+ if (changedFiles.length > 0) {
660
+ filesToIndex = changedFiles;
661
+ }
662
+ else {
663
+ // If no changes detected by git, maybe we don't need to do anything?
664
+ // But if forced or state is stale, maybe we should.
665
+ // For background task simplicity, if not full, and no git changes, we index nothing or check simple timestamps.
666
+ // Let's rely on getAllSourceFiles filtering if we wanted robust check.
667
+ // Here, we'll just check timestamps of all source files against last_indexed.
668
+ const allFiles = await getAllSourceFiles();
669
+ filesToIndex = [];
670
+ for (const f of allFiles) {
671
+ const fp = path.join(PROJECT_ROOT, f);
672
+ if (fsSync.existsSync(fp)) {
673
+ const stats = await fs.stat(fp);
674
+ if (stats.mtime.toISOString() > indexingState.last_indexed) {
675
+ filesToIndex.push(f);
676
+ }
677
+ }
678
+ }
679
+ }
680
+ }
681
+ catch (e) {
682
+ // Fallback to full scan if git fails
683
+ filesToIndex = await getAllSourceFiles();
684
+ }
685
+ }
686
+ const total = filesToIndex.length;
687
+ let processed = 0;
688
+ if (total === 0) {
689
+ task.progress = 100;
690
+ task.status = "completed";
691
+ task.completed_at = new Date().toISOString();
612
692
  await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
613
- await new Promise(resolve => setTimeout(resolve, 1000));
693
+ return;
694
+ }
695
+ for (const file of filesToIndex) {
696
+ const filePath = path.join(PROJECT_ROOT, file);
697
+ if (fsSync.existsSync(filePath)) {
698
+ await indexFile(filePath);
699
+ }
700
+ processed++;
701
+ // Update progress periodically
702
+ if (processed % 5 === 0 || processed === total) {
703
+ task.progress = Math.round((processed / total) * 100);
704
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
705
+ }
614
706
  }
615
707
  // Complete task
616
708
  task.status = "completed";
@@ -621,11 +713,18 @@ async function runBackgroundIndexing(taskId, indexingState) {
621
713
  catch (error) {
622
714
  // Update task with error
623
715
  const taskPath = path.join(PERF_DIR, `${taskId}.json`);
624
- const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
625
- task.status = "failed";
626
- task.error = error instanceof Error ? error.message : `${error}`;
627
- task.completed_at = new Date().toISOString();
628
- await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
716
+ try {
717
+ if (fsSync.existsSync(taskPath)) {
718
+ const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
719
+ task.status = "failed";
720
+ task.error = error instanceof Error ? error.message : `${error}`;
721
+ task.completed_at = new Date().toISOString();
722
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
723
+ }
724
+ }
725
+ catch (writeError) {
726
+ log("Failed to update task error state", writeError);
727
+ }
629
728
  }
630
729
  }
631
730
  function calculateMetricsSummary(metrics) {
@@ -1 +1,4 @@
1
- export declare function log(message: string, data?: unknown): void;
1
+ export declare class Logger {
2
+ private static formatMessage;
3
+ static log(module: string, message: string, data?: unknown): void;
4
+ }