opencode-autognosis 1.0.1 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,748 @@
1
+ import { tool } from "@opencode-ai/plugin";
2
+ import { exec } from "node:child_process";
3
+ import * as fs from "node:fs/promises";
4
+ import * as fsSync from "node:fs";
5
+ import * as path from "node:path";
6
+ import { promisify } from "node:util";
7
+ import * as crypto from "node:crypto";
8
+ import { CHUNK_DIR, ensureChunkDir, calculateHash, calculateComplexity, parseFileAST, generateSummaryChunk, generateApiChunk, generateInvariantChunk, extractDependencies, extractSymbolsFromAST, extractSymbols } from "./chunk-cards.js";
9
+ const execAsync = promisify(exec);
10
+ const PROJECT_ROOT = process.cwd();
11
+ const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
12
+ const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
13
+ const PERF_DIR = path.join(OPENCODE_DIR, "performance");
14
+ const METRICS_DIR = path.join(OPENCODE_DIR, "metrics");
15
+ // Internal logging
16
+ function log(message, data) {
17
+ console.error(`[Performance] ${message}`, data || '');
18
+ }
19
+ // =============================================================================
20
+ // HELPERS
21
+ // =============================================================================
22
+ async function runCmd(cmd, cwd = PROJECT_ROOT, timeoutMs = 30000) {
23
+ try {
24
+ const { stdout, stderr } = await execAsync(cmd, {
25
+ cwd,
26
+ maxBuffer: 10 * 1024 * 1024,
27
+ timeout: timeoutMs
28
+ });
29
+ return { stdout: stdout.trim(), stderr: stderr.trim() };
30
+ }
31
+ catch (error) {
32
+ if (error.signal === 'SIGTERM' && error.code === undefined) {
33
+ return { stdout: "", stderr: `Command timed out after ${timeoutMs}ms`, error, timedOut: true };
34
+ }
35
+ return { stdout: "", stderr: error.message, error };
36
+ }
37
+ }
38
+ async function ensurePerfDirs() {
39
+ await fs.mkdir(CACHE_DIR, { recursive: true });
40
+ await fs.mkdir(PERF_DIR, { recursive: true });
41
+ await fs.mkdir(METRICS_DIR, { recursive: true });
42
+ }
43
+ function generateCacheKey(operation, params) {
44
+ const paramHash = crypto.createHash('md5').update(JSON.stringify(params)).digest('hex');
45
+ return `${operation}-${paramHash}`;
46
+ }
47
+ async function getCacheEntry(key) {
48
+ try {
49
+ const cachePath = path.join(CACHE_DIR, `${key}.json`);
50
+ if (!fsSync.existsSync(cachePath)) {
51
+ return null;
52
+ }
53
+ const entry = JSON.parse(await fs.readFile(cachePath, 'utf-8'));
54
+ // Check TTL if present
55
+ if (entry.metadata.ttl_seconds) {
56
+ const age = (Date.now() - new Date(entry.metadata.created_at).getTime()) / 1000;
57
+ if (age > entry.metadata.ttl_seconds) {
58
+ await fs.unlink(cachePath);
59
+ return null;
60
+ }
61
+ }
62
+ // Update access metadata
63
+ entry.metadata.last_accessed = new Date().toISOString();
64
+ entry.metadata.access_count++;
65
+ await fs.writeFile(cachePath, JSON.stringify(entry, null, 2));
66
+ return entry;
67
+ }
68
+ catch (error) {
69
+ return null;
70
+ }
71
+ }
72
+ async function setCacheEntry(key, value, ttlSeconds) {
73
+ try {
74
+ const entry = {
75
+ key,
76
+ value,
77
+ metadata: {
78
+ created_at: new Date().toISOString(),
79
+ last_accessed: new Date().toISOString(),
80
+ access_count: 1,
81
+ size_bytes: JSON.stringify(value).length,
82
+ ttl_seconds: ttlSeconds
83
+ }
84
+ };
85
+ const cachePath = path.join(CACHE_DIR, `${key}.json`);
86
+ await fs.writeFile(cachePath, JSON.stringify(entry, null, 2));
87
+ }
88
+ catch (error) {
89
+ // Fail silently for cache errors
90
+ }
91
+ }
92
+ async function recordMetrics(metrics) {
93
+ try {
94
+ const metricsPath = path.join(METRICS_DIR, `metrics-${Date.now()}.json`);
95
+ await fs.writeFile(metricsPath, JSON.stringify(metrics, null, 2));
96
+ }
97
+ catch (error) {
98
+ // Fail silently for metrics errors
99
+ }
100
+ }
101
+ function measureMemoryUsage() {
102
+ if (typeof process !== 'undefined' && process.memoryUsage) {
103
+ return process.memoryUsage().heapUsed / 1024 / 1024; // MB
104
+ }
105
+ return 0;
106
+ }
107
+ // =============================================================================
108
+ // PERFORMANCE OPTIMIZATION TOOLS
109
+ // =============================================================================
110
+ export function performanceTools() {
111
+ return {
112
+ perf_incremental_index: tool({
113
+ description: "Perform incremental re-indexing of the codebase. Only processes changed files since last index.",
114
+ args: {
115
+ force_full: tool.schema.boolean().optional().default(false).describe("Force full re-indexing instead of incremental"),
116
+ parallel_workers: tool.schema.number().optional().default(4).describe("Number of parallel workers for indexing"),
117
+ background: tool.schema.boolean().optional().default(false).describe("Run indexing in background")
118
+ },
119
+ async execute({ force_full, parallel_workers, background }) {
120
+ log("Tool call: perf_incremental_index", { force_full, parallel_workers, background });
121
+ const startTime = Date.now();
122
+ const metrics = {
123
+ operation: "incremental_index",
124
+ start_time: startTime,
125
+ end_time: 0,
126
+ duration_ms: 0,
127
+ memory_usage_mb: measureMemoryUsage(),
128
+ cache_hits: 0,
129
+ cache_misses: 0,
130
+ success: false
131
+ };
132
+ try {
133
+ await ensurePerfDirs();
134
+ // Load indexing state
135
+ const statePath = path.join(PERF_DIR, "indexing-state.json");
136
+ let indexingState = {
137
+ last_indexed: "1970-01-01T00:00:00.000Z",
138
+ files_processed: 0,
139
+ files_indexed: 0,
140
+ indexing_duration_ms: 0,
141
+ cache_status: "stale"
142
+ };
143
+ if (!force_full && fsSync.existsSync(statePath)) {
144
+ indexingState = JSON.parse(await fs.readFile(statePath, 'utf-8'));
145
+ }
146
+ if (background) {
147
+ // Create background task
148
+ const taskId = `task-index-${Date.now()}`;
149
+ const taskPath = path.join(PERF_DIR, `${taskId}.json`);
150
+ const backgroundTask = {
151
+ id: taskId,
152
+ type: "indexing",
153
+ status: "pending",
154
+ progress: 0,
155
+ metadata: {
156
+ force_full,
157
+ parallel_workers,
158
+ started_at: new Date().toISOString()
159
+ }
160
+ };
161
+ await fs.writeFile(taskPath, JSON.stringify(backgroundTask, null, 2));
162
+ // Start background indexing (simplified for demo)
163
+ setTimeout(() => runBackgroundIndexing(taskId, indexingState), 100);
164
+ return JSON.stringify({
165
+ status: "BACKGROUND_STARTED",
166
+ task_id: taskId,
167
+ message: "Incremental indexing started in background"
168
+ }, null, 2);
169
+ }
170
+ // Get changed files
171
+ const { stdout: gitStatus } = await runCmd("git status --porcelain");
172
+ const { stdout: gitDiff } = await runCmd(`git diff --name-only --since="${indexingState.last_indexed}"`);
173
+ const changedFiles = gitDiff.split('\n').filter(Boolean);
174
+ const allFiles = await getAllSourceFiles();
175
+ const filesToIndex = force_full ? allFiles : changedFiles.length > 0 ? changedFiles : allFiles.slice(0, 10); // Fallback to some files
176
+ // Process files
177
+ let filesProcessed = 0;
178
+ let filesIndexed = 0;
179
+ for (const file of filesToIndex) {
180
+ try {
181
+ filesProcessed++;
182
+ // Check if file needs indexing
183
+ const filePath = path.join(PROJECT_ROOT, file);
184
+ if (!fsSync.existsSync(filePath))
185
+ continue;
186
+ const stats = await fs.stat(filePath);
187
+ const lastModified = stats.mtime.toISOString();
188
+ if (!force_full && lastModified < indexingState.last_indexed) {
189
+ continue;
190
+ }
191
+ // Index the file (simplified - would create chunk cards, etc.)
192
+ await indexFile(filePath);
193
+ filesIndexed++;
194
+ }
195
+ catch (error) {
196
+ // Continue with other files
197
+ }
198
+ }
199
+ // Update indexing state
200
+ indexingState.last_indexed = new Date().toISOString();
201
+ indexingState.files_processed += filesProcessed;
202
+ indexingState.files_indexed += filesIndexed;
203
+ indexingState.indexing_duration_ms = Date.now() - startTime;
204
+ indexingState.cache_status = "fresh";
205
+ await fs.writeFile(statePath, JSON.stringify(indexingState, null, 2));
206
+ // Update metrics
207
+ metrics.end_time = Date.now();
208
+ metrics.duration_ms = metrics.end_time - metrics.start_time;
209
+ metrics.success = true;
210
+ await recordMetrics(metrics);
211
+ return JSON.stringify({
212
+ status: "SUCCESS",
213
+ indexing: {
214
+ mode: force_full ? "full" : "incremental",
215
+ files_processed: filesProcessed,
216
+ files_indexed: filesIndexed,
217
+ duration_ms: Date.now() - startTime,
218
+ cache_status: indexingState.cache_status
219
+ },
220
+ state: indexingState
221
+ }, null, 2);
222
+ }
223
+ catch (error) {
224
+ metrics.end_time = Date.now();
225
+ metrics.duration_ms = metrics.end_time - metrics.start_time;
226
+ metrics.success = false;
227
+ metrics.error = error instanceof Error ? error.message : `${error}`;
228
+ await recordMetrics(metrics);
229
+ return JSON.stringify({
230
+ status: "ERROR",
231
+ message: error instanceof Error ? error.message : `${error}`
232
+ }, null, 2);
233
+ }
234
+ }
235
+ }),
236
+ perf_cache_get: tool({
237
+ description: "Retrieve a value from the performance cache with automatic hit/miss tracking.",
238
+ args: {
239
+ operation: tool.schema.string().describe("Operation type for cache key"),
240
+ params: tool.schema.any().describe("Parameters for cache key generation")
241
+ },
242
+ async execute({ operation, params }) {
243
+ log("Tool call: perf_cache_get", { operation, params });
244
+ try {
245
+ const cacheKey = generateCacheKey(operation, params);
246
+ const entry = await getCacheEntry(cacheKey);
247
+ if (entry) {
248
+ return JSON.stringify({
249
+ status: "HIT",
250
+ cache_key: cacheKey,
251
+ value: entry.value,
252
+ metadata: entry.metadata
253
+ }, null, 2);
254
+ }
255
+ else {
256
+ return JSON.stringify({
257
+ status: "MISS",
258
+ cache_key: cacheKey,
259
+ message: "Cache entry not found"
260
+ }, null, 2);
261
+ }
262
+ }
263
+ catch (error) {
264
+ return JSON.stringify({
265
+ status: "ERROR",
266
+ message: error instanceof Error ? error.message : `${error}`
267
+ }, null, 2);
268
+ }
269
+ }
270
+ }),
271
+ perf_cache_set: tool({
272
+ description: "Store a value in the performance cache with optional TTL.",
273
+ args: {
274
+ operation: tool.schema.string().describe("Operation type for cache key"),
275
+ params: tool.schema.any().describe("Parameters for cache key generation"),
276
+ value: tool.schema.any().describe("Value to cache"),
277
+ ttl_seconds: tool.schema.number().optional().describe("Time-to-live in seconds")
278
+ },
279
+ async execute({ operation, params, value, ttl_seconds }) {
280
+ log("Tool call: perf_cache_set", { operation, params, ttl_seconds });
281
+ try {
282
+ const cacheKey = generateCacheKey(operation, params);
283
+ await setCacheEntry(cacheKey, value, ttl_seconds);
284
+ return JSON.stringify({
285
+ status: "SUCCESS",
286
+ cache_key: cacheKey,
287
+ ttl_seconds,
288
+ message: "Value cached successfully"
289
+ }, null, 2);
290
+ }
291
+ catch (error) {
292
+ return JSON.stringify({
293
+ status: "ERROR",
294
+ message: error instanceof Error ? error.message : `${error}`
295
+ }, null, 2);
296
+ }
297
+ }
298
+ }),
299
+ perf_cache_cleanup: tool({
300
+ description: "Clean up expired and stale cache entries to free memory.",
301
+ args: {
302
+ max_age_hours: tool.schema.number().optional().default(24).describe("Maximum age for cache entries"),
303
+ max_size_mb: tool.schema.number().optional().default(100).describe("Maximum cache size in MB"),
304
+ dry_run: tool.schema.boolean().optional().default(false).describe("Show what would be deleted without actually deleting")
305
+ },
306
+ async execute({ max_age_hours, max_size_mb, dry_run }) {
307
+ log("Tool call: perf_cache_cleanup", { max_age_hours, max_size_mb, dry_run });
308
+ try {
309
+ await ensurePerfDirs();
310
+ const files = await fs.readdir(CACHE_DIR);
311
+ const now = Date.now();
312
+ const maxAge = max_age_hours * 60 * 60 * 1000;
313
+ let totalSize = 0;
314
+ let expiredCount = 0;
315
+ let oversizedCount = 0;
316
+ const deletedFiles = [];
317
+ for (const file of files) {
318
+ if (file.endsWith('.json')) {
319
+ const filePath = path.join(CACHE_DIR, file);
320
+ const stats = await fs.stat(filePath);
321
+ const age = now - stats.mtime.getTime();
322
+ try {
323
+ const entry = JSON.parse(await fs.readFile(filePath, 'utf-8'));
324
+ totalSize += entry.metadata.size_bytes || 0;
325
+ const shouldDelete = age > maxAge || totalSize > (max_size_mb * 1024 * 1024);
326
+ if (shouldDelete) {
327
+ if (age > maxAge)
328
+ expiredCount++;
329
+ else
330
+ oversizedCount++;
331
+ deletedFiles.push({
332
+ file,
333
+ reason: age > maxAge ? "expired" : "oversized",
334
+ size_bytes: entry.metadata.size_bytes || 0,
335
+ age_hours: age / (1000 * 60 * 60)
336
+ });
337
+ if (!dry_run) {
338
+ await fs.unlink(filePath);
339
+ }
340
+ }
341
+ }
342
+ catch (error) {
343
+ // Remove corrupted files
344
+ if (!dry_run) {
345
+ await fs.unlink(filePath);
346
+ }
347
+ }
348
+ }
349
+ }
350
+ return JSON.stringify({
351
+ status: "SUCCESS",
352
+ cleanup: {
353
+ dry_run,
354
+ max_age_hours,
355
+ max_size_mb,
356
+ total_files: files.length,
357
+ expired_files: expiredCount,
358
+ oversized_files: oversizedCount,
359
+ deleted_files: deletedFiles,
360
+ total_size_mb: Math.round(totalSize / 1024 / 1024 * 100) / 100
361
+ }
362
+ }, null, 2);
363
+ }
364
+ catch (error) {
365
+ return JSON.stringify({
366
+ status: "ERROR",
367
+ message: error instanceof Error ? error.message : `${error}`
368
+ }, null, 2);
369
+ }
370
+ }
371
+ }),
372
+ perf_metrics_get: tool({
373
+ description: "Retrieve performance metrics for analysis and monitoring.",
374
+ args: {
375
+ operation_filter: tool.schema.string().optional().describe("Filter by operation type (regex)"),
376
+ time_range_hours: tool.schema.number().optional().default(24).describe("Time range in hours"),
377
+ limit: tool.schema.number().optional().default(100).describe("Maximum number of metrics to return")
378
+ },
379
+ async execute({ operation_filter, time_range_hours, limit }) {
380
+ log("Tool call: perf_metrics_get", { operation_filter, time_range_hours, limit });
381
+ try {
382
+ await ensurePerfDirs();
383
+ const files = await fs.readdir(METRICS_DIR);
384
+ const cutoffTime = Date.now() - (time_range_hours * 60 * 60 * 1000);
385
+ const metrics = [];
386
+ for (const file of files) {
387
+ if (file.endsWith('.json')) {
388
+ const filePath = path.join(METRICS_DIR, file);
389
+ const stats = await fs.stat(filePath);
390
+ if (stats.mtime.getTime() > cutoffTime) {
391
+ try {
392
+ const metric = JSON.parse(await fs.readFile(filePath, 'utf-8'));
393
+ // Apply operation filter if specified
394
+ if (operation_filter && !new RegExp(operation_filter).test(metric.operation)) {
395
+ continue;
396
+ }
397
+ metrics.push(metric);
398
+ }
399
+ catch (error) {
400
+ // Skip corrupted files
401
+ }
402
+ }
403
+ }
404
+ }
405
+ // Sort by start time (most recent first)
406
+ metrics.sort((a, b) => b.start_time - a.start_time);
407
+ // Apply limit
408
+ const limitedMetrics = metrics.slice(0, limit);
409
+ // Calculate summary statistics
410
+ const summary = calculateMetricsSummary(limitedMetrics);
411
+ return JSON.stringify({
412
+ status: "SUCCESS",
413
+ metrics: limitedMetrics,
414
+ summary,
415
+ filters: {
416
+ operation_filter,
417
+ time_range_hours,
418
+ limit
419
+ }
420
+ }, null, 2);
421
+ }
422
+ catch (error) {
423
+ return JSON.stringify({
424
+ status: "ERROR",
425
+ message: error instanceof Error ? error.message : `${error}`
426
+ }, null, 2);
427
+ }
428
+ }
429
+ }),
430
+ perf_background_status: tool({
431
+ description: "Check status of background tasks and operations.",
432
+ args: {
433
+ task_id: tool.schema.string().optional().describe("Specific task ID to check"),
434
+ task_type: tool.schema.enum(["indexing", "caching", "cleanup", "analysis"]).optional().describe("Filter by task type")
435
+ },
436
+ async execute({ task_id, task_type }) {
437
+ log("Tool call: perf_background_status", { task_id, task_type });
438
+ try {
439
+ await ensurePerfDirs();
440
+ const files = await fs.readdir(PERF_DIR);
441
+ const tasks = [];
442
+ for (const file of files) {
443
+ if (file.startsWith('task-') && file.endsWith('.json')) {
444
+ try {
445
+ const taskPath = path.join(PERF_DIR, file);
446
+ const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
447
+ // Apply filters
448
+ if (task_id && task.id !== task_id)
449
+ continue;
450
+ if (task_type && task.type !== task_type)
451
+ continue;
452
+ tasks.push(task);
453
+ }
454
+ catch (error) {
455
+ // Skip corrupted files
456
+ }
457
+ }
458
+ }
459
+ return JSON.stringify({
460
+ status: "SUCCESS",
461
+ tasks,
462
+ total_count: tasks.length,
463
+ filters: {
464
+ task_id,
465
+ task_type
466
+ }
467
+ }, null, 2);
468
+ }
469
+ catch (error) {
470
+ return JSON.stringify({
471
+ status: "ERROR",
472
+ message: error instanceof Error ? error.message : `${error}`
473
+ }, null, 2);
474
+ }
475
+ }
476
+ }),
477
+ perf_optimize_memory: tool({
478
+ description: "Optimize memory usage for large codebases with intelligent caching and cleanup.",
479
+ args: {
480
+ aggressive: tool.schema.boolean().optional().default(false).describe("Use aggressive memory optimization"),
481
+ target_memory_mb: tool.schema.number().optional().default(500).describe("Target memory usage in MB"),
482
+ preserve_recent: tool.schema.boolean().optional().default(true).describe("Preserve recently accessed cache entries")
483
+ },
484
+ async execute({ aggressive, target_memory_mb, preserve_recent }) {
485
+ log("Tool call: perf_optimize_memory", { aggressive, target_memory_mb, preserve_recent });
486
+ try {
487
+ await ensurePerfDirs();
488
+ const currentMemory = measureMemoryUsage();
489
+ const memoryReductionNeeded = Math.max(0, currentMemory - target_memory_mb);
490
+ if (memoryReductionNeeded <= 0) {
491
+ return JSON.stringify({
492
+ status: "SUCCESS",
493
+ message: "Memory usage already within target",
494
+ current_memory_mb: currentMemory,
495
+ target_memory_mb
496
+ }, null, 2);
497
+ }
498
+ // Get all cache entries
499
+ const cacheFiles = await fs.readdir(CACHE_DIR);
500
+ const cacheEntries = [];
501
+ for (const file of cacheFiles) {
502
+ if (file.endsWith('.json')) {
503
+ try {
504
+ const filePath = path.join(CACHE_DIR, file);
505
+ const entry = JSON.parse(await fs.readFile(filePath, 'utf-8'));
506
+ cacheEntries.push({
507
+ file,
508
+ entry,
509
+ size_bytes: entry.metadata.size_bytes || 0,
510
+ last_accessed: new Date(entry.metadata.last_accessed).getTime(),
511
+ access_count: entry.metadata.access_count || 0
512
+ });
513
+ }
514
+ catch (error) {
515
+ // Remove corrupted files
516
+ await fs.unlink(path.join(CACHE_DIR, file));
517
+ }
518
+ }
519
+ }
520
+ // Sort by priority (keep recently accessed and frequently used)
521
+ cacheEntries.sort((a, b) => {
522
+ const scoreA = (preserve_recent ? a.last_accessed : 0) + (a.access_count * 1000);
523
+ const scoreB = (preserve_recent ? b.last_accessed : 0) + (b.access_count * 1000);
524
+ return scoreB - scoreA;
525
+ });
526
+ // Remove entries until target is met
527
+ let removedSize = 0;
528
+ let removedCount = 0;
529
+ const targetRemovalSize = memoryReductionNeeded * 1024 * 1024; // Convert to bytes
530
+ for (let i = cacheEntries.length - 1; i >= 0; i--) {
531
+ if (removedSize >= targetRemovalSize)
532
+ break;
533
+ const entry = cacheEntries[i];
534
+ await fs.unlink(path.join(CACHE_DIR, entry.file));
535
+ removedSize += entry.size_bytes;
536
+ removedCount++;
537
+ }
538
+ // Force garbage collection if available
539
+ if (global.gc) {
540
+ global.gc();
541
+ }
542
+ const finalMemory = measureMemoryUsage();
543
+ return JSON.stringify({
544
+ status: "SUCCESS",
545
+ optimization: {
546
+ aggressive,
547
+ target_memory_mb,
548
+ preserve_recent,
549
+ initial_memory_mb: currentMemory,
550
+ final_memory_mb: finalMemory,
551
+ memory_freed_mb: Math.round((currentMemory - finalMemory) * 100) / 100,
552
+ cache_entries_removed: removedCount,
553
+ cache_size_freed_mb: Math.round(removedSize / 1024 / 1024 * 100) / 100
554
+ }
555
+ }, null, 2);
556
+ }
557
+ catch (error) {
558
+ return JSON.stringify({
559
+ status: "ERROR",
560
+ message: error instanceof Error ? error.message : `${error}`
561
+ }, null, 2);
562
+ }
563
+ }
564
+ })
565
+ };
566
+ }
567
+ // =============================================================================
568
+ // HELPER FUNCTIONS
569
+ // =============================================================================
570
+ async function getAllSourceFiles() {
571
+ const extensions = ['.ts', '.js', '.tsx', '.jsx', '.py', '.go', '.rs', '.cpp', '.c'];
572
+ const sourceFiles = [];
573
+ async function scanDirectory(dir) {
574
+ try {
575
+ const entries = await fs.readdir(dir, { withFileTypes: true });
576
+ for (const entry of entries) {
577
+ if (entry.name.startsWith('.') || entry.name === 'node_modules') {
578
+ continue;
579
+ }
580
+ const fullPath = path.join(dir, entry.name);
581
+ if (entry.isDirectory()) {
582
+ await scanDirectory(fullPath);
583
+ }
584
+ else if (extensions.includes(path.extname(entry.name))) {
585
+ sourceFiles.push(path.relative(PROJECT_ROOT, fullPath));
586
+ }
587
+ }
588
+ }
589
+ catch (error) {
590
+ // Skip directories we can't read
591
+ }
592
+ }
593
+ await scanDirectory(PROJECT_ROOT);
594
+ return sourceFiles;
595
+ }
596
+ async function indexFile(filePath) {
597
+ try {
598
+ const content = await fs.readFile(filePath, 'utf-8');
599
+ await ensureChunkDir();
600
+ const ast = parseFileAST(filePath, content);
601
+ const chunkTypes = ["summary", "api", "invariant"];
602
+ // Generate file hash for ID consistency
603
+ const fileHash = calculateHash(filePath);
604
+ for (const chunkType of chunkTypes) {
605
+ const cardId = `${path.basename(filePath)}-${chunkType}-${fileHash.slice(0, 8)}`;
606
+ const cardPath = path.join(CHUNK_DIR, `${cardId}.json`);
607
+ let chunkContent = "";
608
+ if (chunkType === "summary")
609
+ chunkContent = await generateSummaryChunk(content, filePath, ast);
610
+ else if (chunkType === "api")
611
+ chunkContent = await generateApiChunk(content, filePath, ast);
612
+ else if (chunkType === "invariant")
613
+ chunkContent = await generateInvariantChunk(content, filePath, ast);
614
+ const chunkCard = {
615
+ id: cardId,
616
+ file_path: filePath,
617
+ chunk_type: chunkType,
618
+ content: chunkContent,
619
+ metadata: {
620
+ created_at: new Date().toISOString(),
621
+ updated_at: new Date().toISOString(),
622
+ hash: calculateHash(chunkContent),
623
+ dependencies: await extractDependencies(content, ast, filePath),
624
+ symbols: extractSymbolsFromAST(ast, content) || extractSymbols(content, filePath),
625
+ complexity_score: calculateComplexity(content)
626
+ }
627
+ };
628
+ await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
629
+ }
630
+ }
631
+ catch (error) {
632
+ log(`Failed to index file ${filePath}`, error);
633
+ }
634
+ }
635
+ async function runBackgroundIndexing(taskId, indexingState) {
636
+ try {
637
+ const taskPath = path.join(PERF_DIR, `${taskId}.json`);
638
+ let task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
639
+ // Update task status
640
+ task.status = "running";
641
+ task.started_at = new Date().toISOString();
642
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
643
+ // Determine files to index
644
+ const force_full = task.metadata?.force_full || false;
645
+ let filesToIndex = [];
646
+ if (force_full) {
647
+ filesToIndex = await getAllSourceFiles();
648
+ }
649
+ else {
650
+ // For incremental, we try to use git diff. If that fails or returns empty,
651
+ // we might default to all files or just recent ones. For robustness here:
652
+ try {
653
+ const { stdout: gitDiff } = await runCmd(`git diff --name-only --since="${indexingState.last_indexed}"`);
654
+ const changedFiles = gitDiff.split('\n').filter(Boolean);
655
+ if (changedFiles.length > 0) {
656
+ filesToIndex = changedFiles;
657
+ }
658
+ else {
659
+ // If no changes detected by git, maybe we don't need to do anything?
660
+ // But if forced or state is stale, maybe we should.
661
+ // For background task simplicity, if not full, and no git changes, we index nothing or check simple timestamps.
662
+ // Let's rely on getAllSourceFiles filtering if we wanted robust check.
663
+ // Here, we'll just check timestamps of all source files against last_indexed.
664
+ const allFiles = await getAllSourceFiles();
665
+ filesToIndex = [];
666
+ for (const f of allFiles) {
667
+ const fp = path.join(PROJECT_ROOT, f);
668
+ if (fsSync.existsSync(fp)) {
669
+ const stats = await fs.stat(fp);
670
+ if (stats.mtime.toISOString() > indexingState.last_indexed) {
671
+ filesToIndex.push(f);
672
+ }
673
+ }
674
+ }
675
+ }
676
+ }
677
+ catch (e) {
678
+ // Fallback to full scan if git fails
679
+ filesToIndex = await getAllSourceFiles();
680
+ }
681
+ }
682
+ const total = filesToIndex.length;
683
+ let processed = 0;
684
+ if (total === 0) {
685
+ task.progress = 100;
686
+ task.status = "completed";
687
+ task.completed_at = new Date().toISOString();
688
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
689
+ return;
690
+ }
691
+ for (const file of filesToIndex) {
692
+ const filePath = path.join(PROJECT_ROOT, file);
693
+ if (fsSync.existsSync(filePath)) {
694
+ await indexFile(filePath);
695
+ }
696
+ processed++;
697
+ // Update progress periodically
698
+ if (processed % 5 === 0 || processed === total) {
699
+ task.progress = Math.round((processed / total) * 100);
700
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
701
+ }
702
+ }
703
+ // Complete task
704
+ task.status = "completed";
705
+ task.completed_at = new Date().toISOString();
706
+ task.progress = 100;
707
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
708
+ }
709
+ catch (error) {
710
+ // Update task with error
711
+ const taskPath = path.join(PERF_DIR, `${taskId}.json`);
712
+ try {
713
+ if (fsSync.existsSync(taskPath)) {
714
+ const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
715
+ task.status = "failed";
716
+ task.error = error instanceof Error ? error.message : `${error}`;
717
+ task.completed_at = new Date().toISOString();
718
+ await fs.writeFile(taskPath, JSON.stringify(task, null, 2));
719
+ }
720
+ }
721
+ catch (writeError) {
722
+ console.error("Failed to update task error state", writeError);
723
+ }
724
+ }
725
+ }
726
+ function calculateMetricsSummary(metrics) {
727
+ if (metrics.length === 0) {
728
+ return {
729
+ total_operations: 0,
730
+ success_rate: 0,
731
+ avg_duration_ms: 0,
732
+ avg_memory_mb: 0
733
+ };
734
+ }
735
+ const successCount = metrics.filter(m => m.success).length;
736
+ const totalDuration = metrics.reduce((sum, m) => sum + m.duration_ms, 0);
737
+ const totalMemory = metrics.reduce((sum, m) => sum + m.memory_usage_mb, 0);
738
+ return {
739
+ total_operations: metrics.length,
740
+ success_rate: Math.round((successCount / metrics.length) * 100),
741
+ avg_duration_ms: Math.round(totalDuration / metrics.length),
742
+ avg_memory_mb: Math.round(totalMemory / metrics.length * 100) / 100,
743
+ operations_by_type: metrics.reduce((acc, m) => {
744
+ acc[m.operation] = (acc[m.operation] || 0) + 1;
745
+ return acc;
746
+ }, {})
747
+ };
748
+ }