@vibecheck-ai/mcp 24.6.4 → 24.6.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +2 -20
  2. package/dist/index.js +1900 -1719
  3. package/package.json +5 -5
package/dist/index.js CHANGED
@@ -3,7 +3,7 @@ import { createRequire } from 'module';
3
3
  import { fileURLToPath, pathToFileURL } from 'url';
4
4
  import * as path2 from 'path';
5
5
  import path2__default, { dirname, join, extname, resolve, normalize } from 'path';
6
- import { SCAN_ENGINE_FOCUS_PRESET_NAMES, FEATURE_NAMES, DAILY_SCAN_LIMIT_UPGRADE_URL, formatDailyScanLimitMessage, dashboardFindingUrl, gateCanonicalScanReportFindings, getTrustScoreStatus, formatTrustScoreMcp, buildGatedScanResponse, formatFindingSeverityBreakdown, ENGINE_FOCUS_PRESETS, computeTrustScore, normalizeCanonicalScanReport, planHasApiSurface, canAccessFeature, getMinPlanForApiSurface, buildCliUpgradeBlock, getMinimumPlanForFeature, getQuotas, normalizePlanId, fetchCanonicalAccess } from './chunk-JIMU4YXW.js';
6
+ import { SCAN_ENGINE_FOCUS_PRESET_NAMES, FEATURE_NAMES, DAILY_SCAN_LIMIT_UPGRADE_URL, formatDailyScanLimitMessage, dashboardFindingUrl, gateCanonicalScanReportFindings, getTrustScoreStatus, formatTrustScoreMcp, buildGatedScanResponse, formatFindingSeverityBreakdown, ENGINE_FOCUS_PRESETS, computeTrustScore, normalizeCanonicalScanReport, planHasApiSurface, canAccessFeature, getMinPlanForApiSurface, buildCliUpgradeBlock, getMinimumPlanForFeature, getQuotas, normalizePlanId, fetchCanonicalAccess } from './chunk-XQUM7V7W.js';
7
7
  import './chunk-MUP4JXOF.js';
8
8
  import './chunk-DDTUTWRY.js';
9
9
  import { require_typescript } from './chunk-QFDZMUGO.js';
@@ -14,7 +14,7 @@ import './chunk-G3FQJC2H.js';
14
14
  import './chunk-NR36RTVO.js';
15
15
  import './chunk-JZSHXEYP.js';
16
16
  import './chunk-QYXENOVK.js';
17
- import './chunk-HSG3IFNQ.js';
17
+ import './chunk-LQSBUKYZ.js';
18
18
  import './chunk-5DADZJ3D.js';
19
19
  import './chunk-43XAAYST.js';
20
20
  import './chunk-F34MHA6A.js';
@@ -18048,1850 +18048,1850 @@ rules:
18048
18048
  target:
18049
18049
  match: "src/repositories/**"
18050
18050
  `;
18051
-
18052
- // ../context-engine/dist/chunk-HMKLYBWJ.js
18053
- var import_better_sqlite3 = __toESM(require_lib(), 1);
18054
- var import_fast_glob2 = __toESM(require_out4(), 1);
18055
- var SCHEMA_VERSION = 1;
18056
- var PersistentIndex = class {
18057
- db;
18058
- config;
18051
+ var ContextSynthesizer = class {
18059
18052
  rootPath;
18060
- constructor(config) {
18061
- this.rootPath = config.rootPath;
18062
- this.config = {
18063
- rootPath: config.rootPath,
18064
- dbPath: config.dbPath ?? join(config.rootPath, ".vibecheck", "index.db"),
18065
- includePatterns: config.includePatterns ?? ["**/*.{ts,tsx,js,jsx,py,rs,go,java,c,cpp,h,hpp,rb,swift,kt,lua,zig}"],
18066
- excludePatterns: config.excludePatterns ?? [
18067
- "**/node_modules/**",
18068
- "**/dist/**",
18069
- "**/build/**",
18070
- "**/.next/**",
18071
- "**/.git/**",
18072
- "**/coverage/**",
18073
- "**/.turbo/**",
18074
- "**/__pycache__/**",
18075
- "**/target/**",
18076
- "**/.mcp_data/**"
18077
- ],
18078
- maxFileSize: config.maxFileSize ?? 5e5,
18079
- maxFiles: config.maxFiles ?? 1e4,
18080
- storeContent: config.storeContent ?? true
18081
- };
18082
- const dbDir = join(this.config.dbPath, "..");
18083
- mkdirSync(dbDir, { recursive: true });
18084
- this.db = new import_better_sqlite3.default(this.config.dbPath);
18085
- this.db.pragma("journal_mode = WAL");
18086
- this.db.pragma("synchronous = NORMAL");
18087
- this.db.pragma("cache_size = -64000");
18088
- this.initSchema();
18089
- }
18090
- // ═══════════════════════════════════════════════════════════════════════════
18091
- // SCHEMA
18092
- // ═══════════════════════════════════════════════════════════════════════════
18093
- initSchema() {
18094
- const version = this.getSchemaVersion();
18095
- if (version === SCHEMA_VERSION) return;
18096
- this.db.exec(`
18097
- DROP TABLE IF EXISTS file_hashes;
18098
- DROP TABLE IF EXISTS files;
18099
- DROP TABLE IF EXISTS symbols;
18100
- DROP TABLE IF EXISTS imports;
18101
- DROP TABLE IF EXISTS call_edges;
18102
- DROP TABLE IF EXISTS routes;
18103
- DROP TABLE IF EXISTS services;
18104
- DROP TABLE IF EXISTS embeddings;
18105
- DROP TABLE IF EXISTS meta;
18106
-
18107
- CREATE TABLE meta (
18108
- key TEXT PRIMARY KEY,
18109
- value TEXT NOT NULL
18110
- );
18111
-
18112
- CREATE TABLE file_hashes (
18113
- relative_path TEXT PRIMARY KEY,
18114
- content_hash TEXT NOT NULL,
18115
- size_bytes INTEGER NOT NULL,
18116
- modified_ms INTEGER NOT NULL,
18117
- indexed_at INTEGER NOT NULL DEFAULT (unixepoch('now'))
18118
- );
18119
-
18120
- CREATE TABLE files (
18121
- id TEXT PRIMARY KEY,
18122
- path TEXT NOT NULL,
18123
- relative_path TEXT NOT NULL UNIQUE,
18124
- language TEXT NOT NULL,
18125
- line_count INTEGER NOT NULL,
18126
- exports TEXT NOT NULL DEFAULT '[]',
18127
- content TEXT
18128
- );
18129
-
18130
- CREATE TABLE symbols (
18131
- id TEXT PRIMARY KEY,
18132
- name TEXT NOT NULL,
18133
- kind TEXT NOT NULL,
18134
- file_path TEXT NOT NULL,
18135
- start_line INTEGER NOT NULL,
18136
- end_line INTEGER NOT NULL,
18137
- exported INTEGER NOT NULL DEFAULT 0,
18138
- async INTEGER NOT NULL DEFAULT 0,
18139
- params INTEGER,
18140
- branches INTEGER,
18141
- signature TEXT
18142
- );
18143
-
18144
- CREATE TABLE imports (
18145
- id INTEGER PRIMARY KEY AUTOINCREMENT,
18146
- file_id TEXT NOT NULL,
18147
- file_path TEXT NOT NULL,
18148
- source_path TEXT NOT NULL,
18149
- resolved_path TEXT NOT NULL DEFAULT '',
18150
- imported_symbols TEXT NOT NULL DEFAULT '[]',
18151
- is_type_only INTEGER NOT NULL DEFAULT 0,
18152
- is_dynamic INTEGER NOT NULL DEFAULT 0,
18153
- line INTEGER NOT NULL DEFAULT 0
18154
- );
18155
-
18156
- CREATE TABLE call_edges (
18157
- id INTEGER PRIMARY KEY AUTOINCREMENT,
18158
- caller_id TEXT NOT NULL,
18159
- callee_id TEXT NOT NULL,
18160
- caller_name TEXT NOT NULL,
18161
- callee_name TEXT NOT NULL,
18162
- caller_file TEXT NOT NULL,
18163
- callee_file TEXT NOT NULL
18164
- );
18165
-
18166
- CREATE TABLE routes (
18167
- id INTEGER PRIMARY KEY AUTOINCREMENT,
18168
- path TEXT NOT NULL,
18169
- method TEXT NOT NULL,
18170
- handler TEXT NOT NULL,
18171
- file TEXT NOT NULL,
18172
- line INTEGER NOT NULL DEFAULT 0,
18173
- middleware TEXT NOT NULL DEFAULT '[]',
18174
- auth INTEGER
18175
- );
18176
-
18177
- CREATE TABLE services (
18178
- id TEXT PRIMARY KEY,
18179
- name TEXT NOT NULL,
18180
- root_path TEXT NOT NULL DEFAULT ''
18181
- );
18182
-
18183
- CREATE TABLE embeddings (
18184
- path TEXT NOT NULL,
18185
- chunk_id TEXT NOT NULL,
18186
- chunk_type TEXT NOT NULL DEFAULT 'file',
18187
- content_hash TEXT NOT NULL,
18188
- vector BLOB NOT NULL,
18189
- metadata TEXT NOT NULL DEFAULT '{}',
18190
- PRIMARY KEY (path, chunk_id)
18191
- );
18192
-
18193
- -- Indexes for fast lookups
18194
- CREATE INDEX idx_symbols_file ON symbols(file_path);
18195
- CREATE INDEX idx_symbols_name ON symbols(name);
18196
- CREATE INDEX idx_symbols_kind ON symbols(kind);
18197
- CREATE INDEX idx_imports_file ON imports(file_path);
18198
- CREATE INDEX idx_imports_source ON imports(source_path);
18199
- CREATE INDEX idx_imports_resolved ON imports(resolved_path);
18200
- CREATE INDEX idx_call_edges_caller ON call_edges(caller_file);
18201
- CREATE INDEX idx_call_edges_callee ON call_edges(callee_file);
18202
- CREATE INDEX idx_embeddings_type ON embeddings(chunk_type);
18203
- `);
18204
- this.setMeta("schema_version", String(SCHEMA_VERSION));
18205
- this.setMeta("created_at", (/* @__PURE__ */ new Date()).toISOString());
18206
- }
18207
- getSchemaVersion() {
18208
- try {
18209
- const row = this.db.prepare("SELECT value FROM meta WHERE key = ?").get("schema_version");
18210
- return row ? Number.parseInt(row.value, 10) : 0;
18211
- } catch {
18212
- return 0;
18213
- }
18214
- }
18215
- setMeta(key, value) {
18216
- this.db.prepare("INSERT OR REPLACE INTO meta (key, value) VALUES (?, ?)").run(key, value);
18053
+ data;
18054
+ dna;
18055
+ graph;
18056
+ ruleResult;
18057
+ constructor(rootPath, data, dna, graph, ruleResult) {
18058
+ this.rootPath = rootPath;
18059
+ this.data = data;
18060
+ this.dna = dna;
18061
+ this.graph = graph;
18062
+ this.ruleResult = ruleResult || null;
18217
18063
  }
18218
- getMeta(key) {
18219
- const row = this.db.prepare("SELECT value FROM meta WHERE key = ?").get(key);
18220
- return row?.value ?? null;
18064
+ /**
18065
+ * Synthesize the full context the complete brain dump for AI agents.
18066
+ */
18067
+ synthesize() {
18068
+ const projectIdentity = this.buildProjectIdentity();
18069
+ const archRules = this.buildArchRuleSummary();
18070
+ const activeViolations = this.ruleResult?.violations || [];
18071
+ const codebaseDNA = this.buildDNASummary();
18072
+ const fileContexts = this.buildFileContexts();
18073
+ const taskPlaybooks = this.buildTaskPlaybooks();
18074
+ const verificationSteps = this.buildVerificationSteps();
18075
+ const riskBriefing = this.buildRiskBriefing();
18076
+ return {
18077
+ version: "2.0.0",
18078
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
18079
+ projectIdentity,
18080
+ architecturalRules: archRules,
18081
+ activeViolations,
18082
+ codebaseDNA,
18083
+ fileContexts,
18084
+ taskPlaybooks,
18085
+ verificationSteps,
18086
+ riskBriefing
18087
+ };
18221
18088
  }
18222
- // ═══════════════════════════════════════════════════════════════════════════
18223
- // INCREMENTAL INDEXING
18224
- // ═══════════════════════════════════════════════════════════════════════════
18225
18089
  /**
18226
- * Discover files, diff against stored hashes, return only changed files.
18090
+ * Generate context for a specific file what an AI agent needs to know
18091
+ * before editing this file.
18227
18092
  */
18228
- async diffFiles() {
18229
- const discoveredFiles = await this.discoverFiles();
18230
- const storedHashes = this.getStoredHashes();
18231
- const changed = [];
18232
- const unchanged = [];
18233
- const currentPaths = /* @__PURE__ */ new Set();
18234
- for (const file of discoveredFiles) {
18235
- currentPaths.add(file.relativePath);
18236
- const stored = storedHashes.get(file.relativePath);
18237
- if (!stored || stored.contentHash !== file.contentHash) {
18238
- changed.push(file.relativePath);
18239
- } else {
18240
- unchanged.push(file.relativePath);
18241
- }
18242
- }
18243
- const deleted = [];
18244
- for (const storedPath of storedHashes.keys()) {
18245
- if (!currentPaths.has(storedPath)) {
18246
- deleted.push(storedPath);
18247
- }
18248
- }
18249
- return { changed, deleted, unchanged };
18093
+ synthesizeForFile(filePath) {
18094
+ const rel = this.toRelative(filePath);
18095
+ const file = this.data.files.find((f) => f.relativePath === rel || f.path === filePath);
18096
+ if (!file) return null;
18097
+ return this.buildSingleFileContext(file);
18250
18098
  }
18251
18099
  /**
18252
- * Full reindex scan all files and store data.
18253
- * Returns parsed CodebaseData + stats.
18100
+ * Generate a compact markdown context document for IDE rules.
18101
+ * This replaces the old static rule generation with intelligence-driven context.
18254
18102
  */
18255
- async reindex(parser4) {
18256
- const startMs = Date.now();
18257
- const { changed, deleted, unchanged } = await this.diffFiles();
18258
- if (deleted.length > 0) {
18259
- this.removeFiles(deleted);
18103
+ generateContextDocument() {
18104
+ const ctx = this.synthesize();
18105
+ const lines = [];
18106
+ lines.push(`# ${ctx.projectIdentity.name} \u2014 AI Context`);
18107
+ lines.push(`<!-- Generated by @repo/context-engine at ${ctx.generatedAt} -->`);
18108
+ lines.push("");
18109
+ lines.push("## Project Identity");
18110
+ lines.push(`- **Stack**: ${ctx.projectIdentity.stack}`);
18111
+ lines.push(`- **Architecture**: ${ctx.projectIdentity.architecture}`);
18112
+ if (ctx.projectIdentity.keyPatterns.length > 0) {
18113
+ lines.push(`- **Key Patterns**: ${ctx.projectIdentity.keyPatterns.join(", ")}`);
18260
18114
  }
18261
- const parsedFiles = [];
18262
- for (const relPath of changed) {
18263
- const absPath = join(this.rootPath, relPath);
18264
- try {
18265
- const parsed = await parser4.parseFile(absPath, relPath);
18266
- parsedFiles.push(parsed);
18267
- } catch {
18115
+ lines.push("");
18116
+ if (ctx.codebaseDNA.conventions.length > 0) {
18117
+ lines.push("## Conventions (Auto-Discovered)");
18118
+ for (const conv of ctx.codebaseDNA.conventions) {
18119
+ lines.push(`- ${conv}`);
18268
18120
  }
18121
+ lines.push("");
18269
18122
  }
18270
- this.storeFiles(parsedFiles);
18271
- const data = this.loadCodebaseData();
18272
- const stats = {
18273
- totalFiles: data.files.length,
18274
- totalSymbols: data.symbols.length,
18275
- totalImports: data.imports.length,
18276
- indexedAt: (/* @__PURE__ */ new Date()).toISOString(),
18277
- reindexedFiles: changed.length,
18278
- skippedFiles: unchanged.length,
18279
- durationMs: Date.now() - startMs
18280
- };
18281
- this.setMeta("last_index_at", stats.indexedAt);
18282
- this.setMeta("last_index_stats", JSON.stringify(stats));
18283
- return { data, stats };
18284
- }
18285
- /**
18286
- * Incremental update — only reindex specific files.
18287
- */
18288
- async reindexFiles(relativePaths, parser4) {
18289
- const startMs = Date.now();
18290
- this.removeFiles(relativePaths);
18291
- const parsedFiles = [];
18292
- for (const relPath of relativePaths) {
18293
- const absPath = join(this.rootPath, relPath);
18294
- try {
18295
- const parsed = await parser4.parseFile(absPath, relPath);
18296
- parsedFiles.push(parsed);
18297
- } catch {
18123
+ if (ctx.architecturalRules.length > 0) {
18124
+ lines.push("## Architecture Rules");
18125
+ for (const rule of ctx.architecturalRules) {
18126
+ const icon = rule.severity === "error" ? "MUST" : rule.severity === "warning" ? "SHOULD" : "MAY";
18127
+ lines.push(`- **[${icon}]** ${rule.name}: ${rule.description}`);
18128
+ if (rule.violationCount > 0) {
18129
+ lines.push(` - ${rule.violationCount} active violations`);
18130
+ }
18298
18131
  }
18132
+ lines.push("");
18299
18133
  }
18300
- this.storeFiles(parsedFiles);
18301
- const totalFiles = this.db.prepare("SELECT COUNT(*) as cnt FROM files").get();
18302
- const totalSymbols = this.db.prepare("SELECT COUNT(*) as cnt FROM symbols").get();
18303
- const totalImports = this.db.prepare("SELECT COUNT(*) as cnt FROM imports").get();
18304
- return {
18305
- totalFiles: totalFiles.cnt,
18306
- totalSymbols: totalSymbols.cnt,
18307
- totalImports: totalImports.cnt,
18308
- indexedAt: (/* @__PURE__ */ new Date()).toISOString(),
18309
- reindexedFiles: parsedFiles.length,
18310
- skippedFiles: 0,
18311
- durationMs: Date.now() - startMs
18312
- };
18134
+ if (ctx.codebaseDNA.boundaries.length > 0) {
18135
+ lines.push("## Module Boundaries");
18136
+ for (const boundary of ctx.codebaseDNA.boundaries) {
18137
+ lines.push(`- ${boundary}`);
18138
+ }
18139
+ lines.push("");
18140
+ }
18141
+ if (ctx.codebaseDNA.hotFiles.length > 0) {
18142
+ lines.push("## High-Impact Files (Edit With Care)");
18143
+ for (const file of ctx.codebaseDNA.hotFiles.slice(0, 10)) {
18144
+ lines.push(`- \`${file}\``);
18145
+ }
18146
+ lines.push("");
18147
+ }
18148
+ if (ctx.riskBriefing.securityConcerns.length > 0 || ctx.riskBriefing.testGaps.length > 0) {
18149
+ lines.push("## Risk Areas");
18150
+ for (const concern of ctx.riskBriefing.securityConcerns) {
18151
+ lines.push(`- ${concern}`);
18152
+ }
18153
+ for (const gap of ctx.riskBriefing.testGaps.slice(0, 5)) {
18154
+ lines.push(`- ${gap}`);
18155
+ }
18156
+ lines.push("");
18157
+ }
18158
+ if (ctx.projectIdentity.noGoZones.length > 0) {
18159
+ lines.push("## No-Go Zones");
18160
+ for (const zone of ctx.projectIdentity.noGoZones) {
18161
+ lines.push(`- ${zone}`);
18162
+ }
18163
+ lines.push("");
18164
+ }
18165
+ if (ctx.taskPlaybooks.length > 0) {
18166
+ lines.push("## Task Playbooks");
18167
+ for (const playbook of ctx.taskPlaybooks) {
18168
+ lines.push(`### ${playbook.taskType}`);
18169
+ for (const step of playbook.steps) {
18170
+ lines.push(`1. ${step}`);
18171
+ }
18172
+ if (playbook.mustVerify.length > 0) {
18173
+ lines.push(`**Verify**: ${playbook.mustVerify.join(", ")}`);
18174
+ }
18175
+ lines.push("");
18176
+ }
18177
+ }
18178
+ if (ctx.verificationSteps.length > 0) {
18179
+ lines.push("## Verification Protocol");
18180
+ for (const step of ctx.verificationSteps) {
18181
+ lines.push(`### On ${step.trigger}`);
18182
+ for (const check of step.checks) {
18183
+ lines.push(`- ${check}`);
18184
+ }
18185
+ if (step.commands.length > 0) {
18186
+ lines.push(`**Run**: \`${step.commands.join(" && ")}\``);
18187
+ }
18188
+ lines.push("");
18189
+ }
18190
+ }
18191
+ lines.push("## Codebase Health");
18192
+ lines.push(`- **Overall**: ${this.dna.healthScore.overall}/100`);
18193
+ const dims = this.dna.healthScore.dimensions;
18194
+ lines.push(`- Architecture: ${dims.architecture} | Tests: ${dims.testCoverage} | Conventions: ${dims.conventions} | Dependencies: ${dims.dependencies}`);
18195
+ lines.push("");
18196
+ lines.push("---");
18197
+ lines.push("<!-- context-engine:v2 -->");
18198
+ return lines.join("\n");
18313
18199
  }
18314
18200
  // ═══════════════════════════════════════════════════════════════════════════
18315
- // DATA LOADING (warm start)
18201
+ // IDENTITY
18316
18202
  // ═══════════════════════════════════════════════════════════════════════════
18317
- /**
18318
- * Load full CodebaseData from the persistent store.
18319
- * This is the warm-start path sub-second for indexed repos.
18320
- */
18321
- loadCodebaseData() {
18322
- const files = this.loadFiles();
18323
- const symbols = this.loadSymbols();
18324
- const imports = this.loadImports();
18325
- const callEdges = this.loadCallEdges();
18326
- const routes = this.loadRoutes();
18327
- const services = this.loadServices();
18328
- return { files, symbols, imports, callEdges, routes, services };
18329
- }
18330
- /**
18331
- * Check if the index exists and has data.
18332
- */
18333
- isPopulated() {
18334
- try {
18335
- const row = this.db.prepare("SELECT COUNT(*) as cnt FROM files").get();
18336
- return row.cnt > 0;
18337
- } catch {
18338
- return false;
18203
+ buildProjectIdentity() {
18204
+ const fp = this.dna.fingerprint;
18205
+ const stack = [fp.framework, fp.language, fp.orm, fp.validator, fp.authLib, fp.router].filter(Boolean).join(" | ");
18206
+ const keyPatterns = this.dna.patterns.map((p) => p.name);
18207
+ const criticalPaths = this.dna.hotspots.slice(0, 5).map((h) => h.file);
18208
+ const noGoZones = [];
18209
+ if (this.ruleResult) {
18210
+ const errorRules = this.ruleResult.violations.filter((v) => v.severity === "error");
18211
+ const uniqueMessages = [...new Set(errorRules.map((v) => v.message))];
18212
+ noGoZones.push(...uniqueMessages.slice(0, 5));
18339
18213
  }
18340
- }
18341
- /**
18342
- * Get the last index timestamp.
18343
- */
18344
- getLastIndexedAt() {
18345
- return this.getMeta("last_index_at");
18346
- }
18347
- /**
18348
- * Get the last index stats.
18349
- */
18350
- getLastIndexStats() {
18351
- const raw = this.getMeta("last_index_stats");
18352
- if (!raw) return null;
18353
- try {
18354
- return JSON.parse(raw);
18355
- } catch {
18356
- return null;
18214
+ for (const cycle of this.graph.cycles) {
18215
+ noGoZones.push(`Circular dependency: ${cycle.nodes.slice(0, 3).join(" \u2192 ")}...`);
18357
18216
  }
18217
+ const architecture = this.dna.conventions.filter((c) => c.area === "structure").map((c) => c.description).join("; ") || fp.framework;
18218
+ return {
18219
+ name: fp.name,
18220
+ stack,
18221
+ architecture,
18222
+ keyPatterns,
18223
+ criticalPaths,
18224
+ noGoZones: noGoZones.slice(0, 10)
18225
+ };
18358
18226
  }
18359
18227
  // ═══════════════════════════════════════════════════════════════════════════
18360
- // EMBEDDING STORAGE
18228
+ // RULES SUMMARY
18361
18229
  // ═══════════════════════════════════════════════════════════════════════════
18362
- /**
18363
- * Store a chunk embedding (file-level, function-level, etc.)
18364
- */
18365
- storeEmbedding(path10, chunkId, chunkType, contentHash, vector, metadata) {
18366
- const vectorBuf = Buffer.from(new Float32Array(vector).buffer);
18367
- this.db.prepare(`
18368
- INSERT OR REPLACE INTO embeddings (path, chunk_id, chunk_type, content_hash, vector, metadata)
18369
- VALUES (?, ?, ?, ?, ?, ?)
18370
- `).run(path10, chunkId, chunkType, contentHash, vectorBuf, JSON.stringify(metadata ?? {}));
18230
+ buildArchRuleSummary() {
18231
+ if (!this.ruleResult) return [];
18232
+ const breakdown = this.ruleResult.ruleBreakdown;
18233
+ return Object.entries(breakdown).map(([ruleId, count]) => {
18234
+ const violation = this.ruleResult.violations.find((v) => v.ruleId === ruleId);
18235
+ return {
18236
+ id: ruleId,
18237
+ name: violation?.ruleName || ruleId,
18238
+ type: "import_forbidden",
18239
+ severity: violation?.severity || "warning",
18240
+ scope: violation?.sourceSymbol.filePath || "",
18241
+ description: violation?.message || "",
18242
+ violationCount: count
18243
+ };
18244
+ });
18371
18245
  }
18372
- /**
18373
- * Load embedding for a specific chunk.
18374
- */
18375
- loadEmbedding(path10, chunkId) {
18376
- const row = this.db.prepare("SELECT vector, content_hash, metadata FROM embeddings WHERE path = ? AND chunk_id = ?").get(path10, chunkId);
18377
- if (!row) return null;
18246
+ // ═══════════════════════════════════════════════════════════════════════════
18247
+ // DNA SUMMARY
18248
+ // ═══════════════════════════════════════════════════════════════════════════
18249
+ buildDNASummary() {
18378
18250
  return {
18379
- vector: Array.from(new Float32Array(row.vector.buffer, row.vector.byteOffset, row.vector.byteLength / 4)),
18380
- contentHash: row.content_hash,
18381
- metadata: JSON.parse(row.metadata)
18251
+ conventions: this.dna.conventions.filter((c) => c.confidence > 0.5).map((c) => c.description),
18252
+ patterns: this.dna.patterns.map((p) => `${p.name}: ${p.description}`),
18253
+ boundaries: this.dna.boundaries.filter((b) => b.importCount > 3).map((b) => `${b.from} \u2192 ${b.to} (${b.importCount} imports${b.isCircular ? ", CIRCULAR" : ""})`),
18254
+ hotFiles: this.dna.hotspots.slice(0, 10).map((h) => h.file),
18255
+ riskAreas: this.dna.riskMap.filter((r) => r.riskLevel === "critical" || r.riskLevel === "high").map((r) => `${r.file}: ${r.factors[0]}`)
18382
18256
  };
18383
18257
  }
18384
- /**
18385
- * Load all embeddings of a given type for vector search.
18386
- */
18387
- loadEmbeddingsByType(chunkType) {
18388
- const rows = this.db.prepare("SELECT path, chunk_id, vector, content_hash, metadata FROM embeddings WHERE chunk_type = ?").all(chunkType);
18389
- return rows.map((row) => ({
18390
- path: row.path,
18391
- chunkId: row.chunk_id,
18392
- vector: Array.from(new Float32Array(row.vector.buffer, row.vector.byteOffset, row.vector.byteLength / 4)),
18393
- contentHash: row.content_hash,
18394
- metadata: JSON.parse(row.metadata)
18395
- }));
18396
- }
18397
- /**
18398
- * Remove stale embeddings for files no longer in the index.
18399
- */
18400
- pruneStaleEmbeddings() {
18401
- const result = this.db.prepare(`
18402
- DELETE FROM embeddings WHERE path NOT IN (SELECT relative_path FROM files)
18403
- `).run();
18404
- return result.changes;
18405
- }
18406
18258
  // ═══════════════════════════════════════════════════════════════════════════
18407
- // QUERYING
18259
+ // FILE CONTEXTS
18408
18260
  // ═══════════════════════════════════════════════════════════════════════════
18409
- /**
18410
- * Get all symbols in a specific file.
18411
- */
18412
- getSymbolsForFile(filePath) {
18413
- return this.loadSymbolsWhere("file_path = ?", [filePath]);
18414
- }
18415
- /**
18416
- * Search symbols by name pattern.
18417
- */
18418
- searchSymbols(namePattern, limit = 50) {
18419
- return this.loadSymbolsWhere("name LIKE ?", [`%${namePattern}%`]).slice(0, limit);
18420
- }
18421
- /**
18422
- * Get files that import a given file.
18423
- */
18424
- getDependents(filePath) {
18425
- const rows = this.db.prepare("SELECT DISTINCT file_path FROM imports WHERE resolved_path = ?").all(filePath);
18426
- return rows.map((r) => r.file_path);
18261
+ buildFileContexts() {
18262
+ const contexts = /* @__PURE__ */ new Map();
18263
+ for (const file of this.data.files) {
18264
+ contexts.set(file.relativePath, this.buildSingleFileContext(file));
18265
+ }
18266
+ return contexts;
18427
18267
  }
18428
- /**
18429
- * Get files that a given file imports.
18430
- */
18431
- getDependencies(filePath) {
18432
- const rows = this.db.prepare('SELECT DISTINCT resolved_path FROM imports WHERE file_path = ? AND resolved_path != ""').all(filePath);
18433
- return rows.map((r) => r.resolved_path);
18268
+ buildSingleFileContext(file) {
18269
+ const rel = file.relativePath;
18270
+ const role = this.classifyRole(file);
18271
+ const graphNode = this.graph.nodes.find((n) => n.relativePath === rel);
18272
+ const layer = graphNode?.layer;
18273
+ const dependsOn = this.graph.edges.filter((e) => e.from === rel).map((e) => e.to);
18274
+ const dependedOnBy = this.graph.edges.filter((e) => e.to === rel).map((e) => e.from);
18275
+ const applicableRules = [];
18276
+ if (this.ruleResult) {
18277
+ for (const v of this.ruleResult.violations) {
18278
+ if (v.sourceSymbol.filePath.includes(rel) || v.targetSymbol && v.targetSymbol.filePath.includes(rel)) {
18279
+ if (!applicableRules.includes(v.ruleId)) applicableRules.push(v.ruleId);
18280
+ }
18281
+ }
18282
+ }
18283
+ const conventions = this.dna.conventions.filter((c) => this.conventionAppliesToFile(c.area, file)).map((c) => c.description);
18284
+ const patterns = this.dna.patterns.filter((p) => p.fileMatches.some((m) => m === rel)).map((p) => p.name);
18285
+ const riskEntry = this.dna.riskMap.find((r) => r.file === rel);
18286
+ const riskLevel = riskEntry?.riskLevel || "low";
18287
+ const relatedFiles = this.findRelatedFiles(file, role).slice(0, 8);
18288
+ const editGuidance = this.generateEditGuidance(file, role, layer, dependedOnBy, conventions);
18289
+ return {
18290
+ filePath: file.path,
18291
+ role,
18292
+ layer,
18293
+ dependsOn,
18294
+ dependedOnBy,
18295
+ applicableRules,
18296
+ conventions,
18297
+ patterns,
18298
+ riskLevel,
18299
+ relatedFiles,
18300
+ editGuidance
18301
+ };
18434
18302
  }
18435
- // ═══════════════════════════════════════════════════════════════════════════
18436
- // CLEANUP
18437
- // ═══════════════════════════════════════════════════════════════════════════
18438
- /**
18439
- * Close the database connection.
18440
- */
18441
- close() {
18442
- this.db.close();
18303
+ classifyRole(file) {
18304
+ const rel = file.relativePath.toLowerCase();
18305
+ if (rel.includes(".test.") || rel.includes(".spec.") || rel.includes("__tests__")) return "test";
18306
+ if (rel.includes("fixture") || rel.includes("mock")) return "fixture";
18307
+ if (rel.includes("migration")) return "migration";
18308
+ if (rel.match(/\.(css|scss|less|styl)$/)) return "style";
18309
+ if (rel.includes(".config.") || rel.includes("config/") || rel === "tsconfig.json") return "config";
18310
+ if (rel.includes("middleware")) return "middleware";
18311
+ if (rel.includes("/api/") || rel.includes("route")) return "route-handler";
18312
+ if (rel.includes("service") || rel.includes("Service")) return "service";
18313
+ if (rel.includes("repositor") || rel.includes("Repositor")) return "repository";
18314
+ if (rel.endsWith(".tsx") && !rel.includes("page.")) return "component";
18315
+ if (rel.includes("/types") || rel.endsWith(".d.ts")) return "type";
18316
+ if (rel.includes("util") || rel.includes("helper") || rel.includes("lib/")) return "util";
18317
+ if (rel.includes("script") || rel.includes("bin/")) return "script";
18318
+ if (rel.match(/^(src\/)?index\.|^(src\/)?main\.|^(src\/)?app\./)) return "entry";
18319
+ return "unknown";
18443
18320
  }
18444
- /**
18445
- * Wipe all data and rebuild schema.
18446
- */
18447
- reset() {
18448
- this.db.exec("DROP TABLE IF EXISTS file_hashes");
18449
- this.db.exec("DROP TABLE IF EXISTS files");
18450
- this.db.exec("DROP TABLE IF EXISTS symbols");
18451
- this.db.exec("DROP TABLE IF EXISTS imports");
18452
- this.db.exec("DROP TABLE IF EXISTS call_edges");
18453
- this.db.exec("DROP TABLE IF EXISTS routes");
18454
- this.db.exec("DROP TABLE IF EXISTS services");
18455
- this.db.exec("DROP TABLE IF EXISTS embeddings");
18456
- this.db.exec("DROP TABLE IF EXISTS meta");
18457
- this.initSchema();
18321
+ conventionAppliesToFile(area, file) {
18322
+ switch (area) {
18323
+ case "naming":
18324
+ return true;
18325
+ case "imports":
18326
+ return file.relativePath.endsWith(".ts") || file.relativePath.endsWith(".tsx");
18327
+ case "exports":
18328
+ return file.exports.length > 0;
18329
+ case "testing":
18330
+ return file.relativePath.includes(".test.") || file.relativePath.includes(".spec.");
18331
+ case "error-handling":
18332
+ return !file.relativePath.includes(".test.");
18333
+ case "types":
18334
+ return file.relativePath.endsWith(".ts") || file.relativePath.endsWith(".tsx");
18335
+ default:
18336
+ return true;
18337
+ }
18458
18338
  }
18459
- // ═══════════════════════════════════════════════════════════════════════════
18460
- // PRIVATE File Discovery
18461
- // ═══════════════════════════════════════════════════════════════════════════
18462
- async discoverFiles() {
18463
- const files = await (0, import_fast_glob2.glob)(this.config.includePatterns, {
18464
- cwd: this.rootPath,
18465
- ignore: this.config.excludePatterns,
18466
- absolute: false,
18467
- dot: false,
18468
- onlyFiles: true
18469
- });
18470
- const hashes = [];
18471
- const limit = this.config.maxFiles;
18472
- for (const relPath of files.slice(0, limit)) {
18473
- const absPath = join(this.rootPath, relPath);
18474
- try {
18475
- const fileStat = await stat(absPath);
18476
- if (fileStat.size > this.config.maxFileSize) continue;
18477
- const content = await readFile(absPath, "utf-8");
18478
- hashes.push({
18479
- relativePath: relPath.replace(/\\/g, "/"),
18480
- contentHash: hashContent2(content),
18481
- sizeBytes: fileStat.size,
18482
- modifiedMs: Math.floor(fileStat.mtimeMs)
18483
- });
18484
- } catch {
18339
+ findRelatedFiles(file, role) {
18340
+ const related = [];
18341
+ const dir = path2.dirname(file.relativePath);
18342
+ for (const other of this.data.files) {
18343
+ if (other.path === file.path) continue;
18344
+ if (path2.dirname(other.relativePath) === dir) {
18345
+ related.push(other.relativePath);
18485
18346
  }
18486
18347
  }
18487
- return hashes;
18348
+ if (related.length < 5) {
18349
+ for (const other of this.data.files) {
18350
+ if (other.path === file.path) continue;
18351
+ if (related.includes(other.relativePath)) continue;
18352
+ if (this.classifyRole(other) === role) {
18353
+ related.push(other.relativePath);
18354
+ if (related.length >= 8) break;
18355
+ }
18356
+ }
18357
+ }
18358
+ return related;
18488
18359
  }
18489
- getStoredHashes() {
18490
- const rows = this.db.prepare("SELECT relative_path, content_hash, size_bytes, modified_ms FROM file_hashes").all();
18491
- const map = /* @__PURE__ */ new Map();
18492
- for (const row of rows) {
18493
- map.set(row.relative_path, {
18494
- relativePath: row.relative_path,
18495
- contentHash: row.content_hash,
18496
- sizeBytes: row.size_bytes,
18497
- modifiedMs: row.modified_ms
18498
- });
18360
+ generateEditGuidance(file, role, layer, dependedOnBy, conventions) {
18361
+ const guidance = [];
18362
+ if (dependedOnBy.length > 10) {
18363
+ guidance.push(`HIGH IMPACT: ${dependedOnBy.length} files depend on this. Changes have wide blast radius.`);
18499
18364
  }
18500
- return map;
18365
+ switch (role) {
18366
+ case "route-handler":
18367
+ guidance.push("Validate all inputs with schemas before processing.");
18368
+ guidance.push("Return consistent response shapes ({ success, data } or { success, error }).");
18369
+ guidance.push("Ensure authentication middleware is applied to protected endpoints.");
18370
+ break;
18371
+ case "service":
18372
+ guidance.push("Keep business logic here, not in controllers/routes.");
18373
+ guidance.push("Use dependency injection for testability.");
18374
+ if (layer) guidance.push(`This is in the ${layer} layer \u2014 only import from lower layers.`);
18375
+ break;
18376
+ case "repository":
18377
+ guidance.push("Only data access logic belongs here \u2014 no business rules.");
18378
+ guidance.push("Return domain objects, not raw database rows.");
18379
+ break;
18380
+ case "component":
18381
+ guidance.push("Keep components focused and composable.");
18382
+ guidance.push("Extract complex logic to custom hooks.");
18383
+ break;
18384
+ case "middleware":
18385
+ guidance.push("Middleware must call next() or return a response \u2014 never leave the request hanging.");
18386
+ guidance.push("Keep middleware focused on a single concern.");
18387
+ break;
18388
+ case "test":
18389
+ guidance.push("Follow Arrange-Act-Assert pattern.");
18390
+ guidance.push("Test edge cases and error conditions, not just happy path.");
18391
+ break;
18392
+ }
18393
+ for (const conv of conventions.slice(0, 3)) {
18394
+ guidance.push(`Convention: ${conv}`);
18395
+ }
18396
+ return guidance;
18501
18397
  }
18502
18398
  // ═══════════════════════════════════════════════════════════════════════════
18503
- // PRIVATE — Storage
18399
+ // TASK PLAYBOOKS
18504
18400
  // ═══════════════════════════════════════════════════════════════════════════
18505
- storeFiles(parsedFiles) {
18506
- if (parsedFiles.length === 0) return;
18507
- const insertHash = this.db.prepare(
18508
- "INSERT OR REPLACE INTO file_hashes (relative_path, content_hash, size_bytes, modified_ms) VALUES (?, ?, ?, ?)"
18509
- );
18510
- const insertFile = this.db.prepare(
18511
- "INSERT OR REPLACE INTO files (id, path, relative_path, language, line_count, exports, content) VALUES (?, ?, ?, ?, ?, ?, ?)"
18512
- );
18513
- const insertSymbol = this.db.prepare(
18514
- "INSERT OR REPLACE INTO symbols (id, name, kind, file_path, start_line, end_line, exported, async, params, branches, signature) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
18515
- );
18516
- const insertImport = this.db.prepare(
18517
- "INSERT INTO imports (file_id, file_path, source_path, resolved_path, imported_symbols, is_type_only, is_dynamic, line) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
18518
- );
18519
- const insertCallEdge = this.db.prepare(
18520
- "INSERT INTO call_edges (caller_id, callee_id, caller_name, callee_name, caller_file, callee_file) VALUES (?, ?, ?, ?, ?, ?)"
18521
- );
18522
- const txn = this.db.transaction(() => {
18523
- for (const parsed of parsedFiles) {
18524
- const { file, symbols, imports, callEdges, contentHash } = parsed;
18525
- insertHash.run(file.relativePath, contentHash, 0, Date.now());
18526
- insertFile.run(
18527
- file.id,
18528
- file.path,
18529
- file.relativePath,
18530
- file.language,
18531
- file.lineCount,
18532
- JSON.stringify(file.exports),
18533
- this.config.storeContent ? file.content ?? null : null
18534
- );
18535
- for (const sym of symbols) {
18536
- insertSymbol.run(
18537
- sym.id,
18538
- sym.name,
18539
- sym.kind,
18540
- sym.filePath,
18541
- sym.startLine,
18542
- sym.endLine,
18543
- sym.exported ? 1 : 0,
18544
- sym.async ? 1 : 0,
18545
- sym.params ?? null,
18546
- sym.branches ?? null,
18547
- sym.signature ?? null
18548
- );
18549
- }
18550
- for (const imp of imports) {
18551
- insertImport.run(
18552
- imp.fileId,
18553
- imp.filePath,
18554
- imp.sourcePath,
18555
- imp.resolvedPath,
18556
- JSON.stringify(imp.importedSymbols),
18557
- imp.isTypeOnly ? 1 : 0,
18558
- imp.isDynamic ? 1 : 0,
18559
- imp.line
18560
- );
18561
- }
18562
- for (const edge of callEdges) {
18563
- insertCallEdge.run(
18564
- edge.callerId,
18565
- edge.calleeId,
18566
- edge.callerName,
18567
- edge.calleeName,
18568
- edge.callerFile,
18569
- edge.calleeFile
18570
- );
18571
- }
18572
- }
18401
+ buildTaskPlaybooks() {
18402
+ const fp = this.dna.fingerprint;
18403
+ const playbooks = [];
18404
+ playbooks.push({
18405
+ taskType: "Bug Fix",
18406
+ steps: [
18407
+ "Reproduce the bug and understand the expected vs actual behavior",
18408
+ "Identify the root cause file(s) using the dependency graph",
18409
+ "Write a failing test that reproduces the bug",
18410
+ "Apply the minimal fix at the root cause",
18411
+ "Verify the fix passes the test and does not break existing tests",
18412
+ "Check that the fix does not violate any architecture rules"
18413
+ ],
18414
+ mustRead: this.dna.hotspots.slice(0, 3).map((h) => h.file),
18415
+ mustUpdate: ["The buggy file", "Related test file"],
18416
+ mustVerify: ["All existing tests pass", "New regression test passes", "No new arch rule violations"],
18417
+ stopConditions: ["Never modify tests to make them pass \u2014 fix the code", "Do not change public API signatures without discussion"]
18573
18418
  });
18574
- txn();
18575
- }
18576
- removeFiles(relativePaths) {
18577
- if (relativePaths.length === 0) return;
18578
- const txn = this.db.transaction(() => {
18579
- for (const relPath of relativePaths) {
18580
- const absPath = join(this.rootPath, relPath);
18581
- this.db.prepare("DELETE FROM file_hashes WHERE relative_path = ?").run(relPath);
18582
- this.db.prepare("DELETE FROM files WHERE relative_path = ?").run(relPath);
18583
- this.db.prepare("DELETE FROM symbols WHERE file_path = ? OR file_path = ?").run(absPath, relPath);
18584
- this.db.prepare("DELETE FROM imports WHERE file_path = ? OR file_path = ?").run(absPath, relPath);
18585
- this.db.prepare("DELETE FROM call_edges WHERE caller_file = ? OR callee_file = ? OR caller_file = ? OR callee_file = ?").run(absPath, absPath, relPath, relPath);
18586
- this.db.prepare("DELETE FROM embeddings WHERE path = ?").run(relPath);
18587
- }
18419
+ if (fp.router) {
18420
+ playbooks.push({
18421
+ taskType: "Add API Endpoint",
18422
+ steps: [
18423
+ "Check existing routes in truthpack to avoid duplicates",
18424
+ "Create the route handler following existing patterns",
18425
+ "Add input validation using the project validator",
18426
+ "Add authentication middleware if the route is protected",
18427
+ "Write tests for success, validation failure, and auth failure",
18428
+ "Update the truthpack (run vibecheck scan)"
18429
+ ],
18430
+ mustRead: ["truthpack/routes.json", ...this.dna.patterns.filter((p) => p.category === "api").map((p) => p.exemplar)],
18431
+ mustUpdate: ["Route file", "Test file", "Truthpack"],
18432
+ mustVerify: ["Route responds correctly", "Input validation works", "Auth is enforced", "Test passes"],
18433
+ stopConditions: ["Do not create duplicate routes", "Do not hardcode mock data in handlers"]
18434
+ });
18435
+ }
18436
+ if (fp.framework.includes("Next") || fp.framework.includes("React")) {
18437
+ playbooks.push({
18438
+ taskType: "Add UI Component",
18439
+ steps: [
18440
+ "Check if a similar component already exists",
18441
+ "Create the component following existing naming and structure patterns",
18442
+ "Add TypeScript props interface",
18443
+ "Add unit test for the component",
18444
+ "If using state, determine if it should be a client component"
18445
+ ],
18446
+ mustRead: this.dna.patterns.filter((p) => p.category === "ui" || p.category === "state").map((p) => p.exemplar),
18447
+ mustUpdate: ["Component file", "Test file", "Parent component that uses it"],
18448
+ mustVerify: ["Component renders correctly", "Props are typed", "Test passes"],
18449
+ stopConditions: ['Do not use "any" type for props', 'Do not add useState in server components without "use client"']
18450
+ });
18451
+ }
18452
+ playbooks.push({
18453
+ taskType: "Refactor",
18454
+ steps: [
18455
+ "Identify all callers/dependents of the code being refactored",
18456
+ "Ensure comprehensive tests exist before refactoring",
18457
+ "Apply changes incrementally, testing after each step",
18458
+ "Update all dependents to use the new API",
18459
+ "Remove old code only after all dependents are migrated",
18460
+ "Verify no architecture rules are violated"
18461
+ ],
18462
+ mustRead: ["Dependency graph for affected files"],
18463
+ mustUpdate: ["Refactored file", "All dependent files", "Tests"],
18464
+ mustVerify: ["All tests pass", "No new violations", "No regressions"],
18465
+ stopConditions: ["Never break existing public APIs without migration path", "Do not refactor and add features in the same change"]
18588
18466
  });
18589
- txn();
18467
+ return playbooks;
18590
18468
  }
18591
18469
  // ═══════════════════════════════════════════════════════════════════════════
18592
- // PRIVATE — Loading
18470
+ // VERIFICATION STEPS
18593
18471
  // ═══════════════════════════════════════════════════════════════════════════
18594
- loadFiles() {
18595
- const rows = this.db.prepare("SELECT id, path, relative_path, language, line_count, exports, content FROM files").all();
18596
- return rows.map((row) => ({
18597
- id: row.id,
18598
- path: row.path,
18599
- relativePath: row.relative_path,
18600
- language: row.language,
18601
- lineCount: row.line_count,
18602
- exports: JSON.parse(row.exports),
18603
- content: row.content ?? void 0
18604
- }));
18605
- }
18606
- loadSymbols() {
18607
- const rows = this.db.prepare("SELECT id, name, kind, file_path, start_line, end_line, exported, async, params, branches FROM symbols").all();
18608
- return rows.map((row) => ({
18609
- id: row.id,
18610
- name: row.name,
18611
- kind: row.kind,
18612
- filePath: row.file_path,
18613
- startLine: row.start_line,
18614
- endLine: row.end_line,
18615
- exported: row.exported === 1,
18616
- async: row.async === 1,
18617
- params: row.params ?? void 0,
18618
- branches: row.branches ?? void 0
18619
- }));
18620
- }
18621
- loadSymbolsWhere(where, params) {
18622
- const rows = this.db.prepare(`SELECT id, name, kind, file_path, start_line, end_line, exported, async, params, branches FROM symbols WHERE ${where}`).all(...params);
18623
- return rows.map((row) => ({
18624
- id: row.id,
18625
- name: row.name,
18626
- kind: row.kind,
18627
- filePath: row.file_path,
18628
- startLine: row.start_line,
18629
- endLine: row.end_line,
18630
- exported: row.exported === 1,
18631
- async: row.async === 1,
18632
- params: row.params ?? void 0,
18633
- branches: row.branches ?? void 0
18634
- }));
18472
+ buildVerificationSteps() {
18473
+ const fp = this.dna.fingerprint;
18474
+ const steps = [];
18475
+ if (fp.language === "TypeScript") {
18476
+ steps.push({
18477
+ trigger: "Any TypeScript file change",
18478
+ checks: ["TypeScript compilation succeeds", "No new type errors introduced"],
18479
+ commands: [fp.packageManager === "pnpm" ? "pnpm run check-types" : "npm run check-types"],
18480
+ artifacts: []
18481
+ });
18482
+ }
18483
+ if (fp.testRunner) {
18484
+ steps.push({
18485
+ trigger: "Any source file change",
18486
+ checks: ["Related tests pass", "No test regressions"],
18487
+ commands: [`${fp.packageManager} run test`],
18488
+ artifacts: ["test-results.json"]
18489
+ });
18490
+ }
18491
+ if (fp.router) {
18492
+ steps.push({
18493
+ trigger: "Route handler added or modified",
18494
+ checks: ["Route responds with correct status", "Auth middleware is applied", "Input validation works"],
18495
+ commands: ["vibecheck scan"],
18496
+ artifacts: ["truthpack/routes.json"]
18497
+ });
18498
+ }
18499
+ steps.push({
18500
+ trigger: "Any source file change",
18501
+ checks: ["No new architecture rule violations", "No new circular dependencies"],
18502
+ commands: ["vibecheck arch-rules"],
18503
+ artifacts: []
18504
+ });
18505
+ return steps;
18635
18506
  }
18636
- loadImports() {
18637
- const rows = this.db.prepare("SELECT file_id, file_path, source_path, resolved_path, imported_symbols, is_type_only, is_dynamic, line FROM imports").all();
18638
- return rows.map((row) => ({
18639
- fileId: row.file_id,
18640
- filePath: row.file_path,
18641
- sourcePath: row.source_path,
18642
- resolvedPath: row.resolved_path,
18643
- importedSymbols: JSON.parse(row.imported_symbols),
18644
- isTypeOnly: row.is_type_only === 1,
18645
- isDynamic: row.is_dynamic === 1,
18646
- line: row.line
18647
- }));
18507
+ // ═══════════════════════════════════════════════════════════════════════════
18508
+ // RISK BRIEFING
18509
+ // ═══════════════════════════════════════════════════════════════════════════
18510
+ buildRiskBriefing() {
18511
+ const criticalFiles = this.dna.hotspots.filter((h) => h.score > 30).slice(0, 10).map((h) => h.file);
18512
+ const recentViolations = this.ruleResult?.violations.filter((v) => v.severity === "error").slice(0, 10) || [];
18513
+ const securityConcerns = [];
18514
+ for (const risk of this.dna.riskMap) {
18515
+ if (risk.riskLevel === "critical") {
18516
+ securityConcerns.push(`${risk.file}: ${risk.factors.join(", ")}`);
18517
+ }
18518
+ }
18519
+ const testGaps = [];
18520
+ const sourceFiles = this.data.files.filter(
18521
+ (f) => !f.relativePath.includes(".test.") && !f.relativePath.includes(".spec.") && (f.relativePath.endsWith(".ts") || f.relativePath.endsWith(".tsx")) && !f.relativePath.includes("config") && !f.relativePath.includes(".d.ts")
18522
+ );
18523
+ const testFiles = this.data.files.filter(
18524
+ (f) => f.relativePath.includes(".test.") || f.relativePath.includes(".spec.")
18525
+ );
18526
+ const testedBases = new Set(testFiles.map(
18527
+ (f) => path2.basename(f.relativePath).replace(/\.(test|spec)\.(ts|tsx|js|jsx)$/, "")
18528
+ ));
18529
+ for (const file of sourceFiles) {
18530
+ const baseName = path2.basename(file.relativePath).replace(/\.(ts|tsx|js|jsx)$/, "");
18531
+ if (!testedBases.has(baseName) && file.exports.length > 0) {
18532
+ testGaps.push(`${file.relativePath} has exports but no test file`);
18533
+ }
18534
+ }
18535
+ const driftWarnings = [];
18536
+ for (const cycle of this.graph.cycles) {
18537
+ driftWarnings.push(`Circular dependency: ${cycle.nodes.slice(0, 3).join(" \u2192 ")}${cycle.nodes.length > 3 ? "..." : ""}`);
18538
+ }
18539
+ return {
18540
+ criticalFiles,
18541
+ recentViolations,
18542
+ securityConcerns: securityConcerns.slice(0, 5),
18543
+ testGaps: testGaps.slice(0, 10),
18544
+ driftWarnings: driftWarnings.slice(0, 5)
18545
+ };
18648
18546
  }
18649
- loadCallEdges() {
18650
- const rows = this.db.prepare("SELECT caller_id, callee_id, caller_name, callee_name, caller_file, callee_file FROM call_edges").all();
18651
- return rows.map((row) => ({
18652
- callerId: row.caller_id,
18653
- calleeId: row.callee_id,
18654
- callerName: row.caller_name,
18655
- calleeName: row.callee_name,
18656
- callerFile: row.caller_file,
18657
- calleeFile: row.callee_file
18658
- }));
18547
+ // ═══════════════════════════════════════════════════════════════════════════
18548
+ // HELPERS
18549
+ // ═══════════════════════════════════════════════════════════════════════════
18550
+ toRelative(filePath) {
18551
+ if (filePath.startsWith(this.rootPath)) {
18552
+ return filePath.slice(this.rootPath.length + 1).replace(/\\/g, "/");
18553
+ }
18554
+ return filePath.replace(/\\/g, "/");
18659
18555
  }
18660
- loadRoutes() {
18661
- const rows = this.db.prepare("SELECT path, method, handler, file, line, middleware, auth FROM routes").all();
18662
- return rows.map((row) => ({
18663
- path: row.path,
18664
- method: row.method,
18665
- handler: row.handler,
18666
- file: row.file,
18667
- line: row.line,
18668
- middleware: JSON.parse(row.middleware),
18669
- auth: row.auth === null ? void 0 : row.auth === 1
18670
- }));
18671
- }
18672
- loadServices() {
18673
- const rows = this.db.prepare("SELECT id, name, root_path FROM services").all();
18674
- return rows.map((row) => ({
18675
- id: row.id,
18676
- name: row.name,
18677
- rootPath: row.root_path
18678
- }));
18679
- }
18680
- };
18681
- function hashContent2(content) {
18682
- return createHash("sha256").update(content).digest("hex").slice(0, 16);
18683
- }
18684
- var EXT_LANG = {
18685
- ".ts": "typescript",
18686
- ".tsx": "typescript",
18687
- ".js": "javascript",
18688
- ".jsx": "javascript",
18689
- ".mjs": "javascript",
18690
- ".cjs": "javascript",
18691
- ".py": "python",
18692
- ".rs": "rust",
18693
- ".go": "go",
18694
- ".java": "java",
18695
- ".c": "c",
18696
- ".h": "c",
18697
- ".cpp": "cpp",
18698
- ".hpp": "cpp",
18699
- ".rb": "ruby",
18700
- ".swift": "swift",
18701
- ".kt": "kotlin",
18702
- ".lua": "lua",
18703
- ".zig": "zig",
18704
- ".cs": "csharp"
18705
18556
  };
18706
- var TS_IMPORT_RE = /^import\s+(?:type\s+)?(?:\{[^}]*\}|[\w*]+(?:\s*,\s*\{[^}]*\})?)\s+from\s+['"]([^'"]+)['"]/gm;
18707
- var TS_IMPORT_TYPE_RE = /^import\s+type\s+/;
18708
- var TS_DYNAMIC_IMPORT_RE = /(?:import|require)\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
18709
- var PY_IMPORT_RE = /^(?:from\s+([\w.]+)\s+import|import\s+([\w.]+))/gm;
18710
- var GO_IMPORT_RE = /import\s+(?:\(\s*([\s\S]*?)\s*\)|"([^"]+)")/g;
18711
- var DefaultFileParser = class {
18712
- rootPath;
18713
- treeSitterParser = null;
18714
- treeSitterLoaded = false;
18715
- constructor(rootPath) {
18716
- this.rootPath = rootPath;
18717
- }
18718
- async parseFile(absolutePath, relativePath2) {
18719
- const content = await readFile(absolutePath, "utf-8");
18720
- const lines = content.split("\n");
18721
- const ext2 = extname(absolutePath).toLowerCase();
18722
- const language = EXT_LANG[ext2] ?? "unknown";
18723
- const contentHash = createHash("sha256").update(content).digest("hex").slice(0, 16);
18724
- const exports$1 = this.extractExports(content, language);
18725
- const symbols = await this.extractSymbols(content, lines, absolutePath, relativePath2, language);
18726
- const imports = this.extractImports(content, absolutePath, relativePath2, language);
18727
- const callEdges = this.extractCallEdges(content, symbols, absolutePath);
18728
- const fileId = `file:${relativePath2}`;
18729
- const file = {
18730
- id: fileId,
18731
- path: absolutePath,
18732
- relativePath: relativePath2,
18733
- language,
18734
- lineCount: lines.length,
18735
- exports: exports$1,
18736
- content
18737
- };
18738
- return { file, symbols, imports, callEdges, contentHash };
18739
- }
18740
- // ═══════════════════════════════════════════════════════════════════════════
18741
- // EXPORTS
18742
- // ═══════════════════════════════════════════════════════════════════════════
18743
- extractExports(content, language) {
18744
- if (language !== "typescript" && language !== "javascript") return [];
18745
- const exports$1 = [];
18746
- const exportRe = /export\s+(?:default\s+)?(?:async\s+)?(?:function|class|const|let|var|type|interface|enum)\s+(\w+)/g;
18747
- for (const match2 of content.matchAll(exportRe)) {
18748
- if (match2[1]) exports$1.push(match2[1]);
18749
- }
18750
- if (/export\s+default\s/.test(content) && !exports$1.includes("default")) {
18751
- exports$1.push("default");
18752
- }
18753
- const reExportRe = /export\s+\{([^}]+)\}\s+from/g;
18754
- for (const match2 of content.matchAll(reExportRe)) {
18755
- for (const sym of match2[1].split(",")) {
18756
- const name = sym.trim().split(/\s+as\s+/).pop()?.trim();
18757
- if (name) exports$1.push(name);
18557
+ var ContextExplainer = class {
18558
+ /**
18559
+ * Generate a rich explanation for a file.
18560
+ */
18561
+ explain(filePath, ctx) {
18562
+ const fc = ctx.fileContext;
18563
+ const paragraphs = [];
18564
+ const quickFacts = [];
18565
+ const warnings = [];
18566
+ const relatedFiles = [];
18567
+ const overlays = [];
18568
+ const role = fc?.role ?? "unknown";
18569
+ const roleSummary = this.buildRoleSummary(filePath, role, ctx);
18570
+ if (fc) {
18571
+ const depCount = fc.dependedOnBy.length;
18572
+ const depsOnCount = fc.dependsOn.length;
18573
+ if (depCount > 0 || depsOnCount > 0) {
18574
+ const parts = [];
18575
+ if (depCount > 0) {
18576
+ const critical = depCount > 10 ? " \u2014 changes here have wide blast radius" : "";
18577
+ parts.push(`${depCount} file${depCount > 1 ? "s" : ""} depend on this${critical}`);
18578
+ }
18579
+ if (depsOnCount > 0) {
18580
+ parts.push(`it imports from ${depsOnCount} file${depsOnCount > 1 ? "s" : ""}`);
18581
+ }
18582
+ paragraphs.push({
18583
+ heading: "Dependencies",
18584
+ text: parts.join(". ") + ".",
18585
+ importance: depCount > 10 ? "critical" : depCount > 5 ? "high" : "medium"
18586
+ });
18758
18587
  }
18759
- }
18760
- return [...new Set(exports$1)];
18761
- }
18762
- // ═══════════════════════════════════════════════════════════════════════════
18763
- // SYMBOLS
18764
- // ═══════════════════════════════════════════════════════════════════════════
18765
- async extractSymbols(content, lines, absolutePath, relativePath2, language) {
18766
- const tsSymbols = await this.tryTreeSitter(content, absolutePath);
18767
- if (tsSymbols) {
18768
- return tsSymbols.map((sym, i) => ({
18769
- id: `sym:${relativePath2}:${sym.name}:${sym.line}`,
18770
- name: sym.name,
18771
- kind: mapTreeSitterKind(sym.kind),
18772
- filePath: absolutePath,
18773
- startLine: sym.line,
18774
- endLine: sym.endLine,
18775
- exported: this.isExported(content, sym.name, language),
18776
- async: sym.signature.includes("async "),
18777
- params: this.countParams(sym.signature),
18778
- branches: void 0,
18779
- signature: sym.signature
18780
- }));
18781
- }
18782
- return this.extractSymbolsRegex(content, lines, absolutePath, relativePath2, language);
18783
- }
18784
- async tryTreeSitter(content, filePath) {
18785
- if (!this.treeSitterLoaded) {
18786
- this.treeSitterLoaded = true;
18787
- try {
18788
- const mod = await import('./tree-sitter-H5E7LKR4-MKO3NNLJ.js');
18789
- this.treeSitterParser = mod.parseWithTreeSitter;
18790
- } catch {
18791
- this.treeSitterParser = null;
18588
+ quickFacts.push({ label: "Role", value: role, icon: "layer" });
18589
+ if (fc.layer) quickFacts.push({ label: "Layer", value: fc.layer, icon: "layer" });
18590
+ quickFacts.push({ label: "Dependents", value: String(depCount), icon: "dependency" });
18591
+ quickFacts.push({ label: "Dependencies", value: String(depsOnCount), icon: "dependency" });
18592
+ if (fc.riskLevel === "critical" || fc.riskLevel === "high") {
18593
+ quickFacts.push({ label: "Risk", value: fc.riskLevel.toUpperCase(), icon: "warning" });
18594
+ warnings.push({
18595
+ message: `This file is classified as ${fc.riskLevel} risk`,
18596
+ severity: fc.riskLevel === "critical" ? "error" : "warning",
18597
+ action: "Add comprehensive tests and review carefully before merging changes"
18598
+ });
18599
+ }
18600
+ for (const dep of fc.dependedOnBy.slice(0, 5)) {
18601
+ relatedFiles.push({
18602
+ filePath: dep,
18603
+ reason: `Imports from this file`,
18604
+ relationship: "depended-by",
18605
+ confidence: 0.9
18606
+ });
18792
18607
  }
18608
+ overlays.push({
18609
+ type: "code-lens",
18610
+ line: 1,
18611
+ text: `${depCount} dependent${depCount !== 1 ? "s" : ""} \xB7 ${depsOnCount} import${depsOnCount !== 1 ? "s" : ""} \xB7 ${role}`,
18612
+ tooltip: `This ${role} file has ${depCount} files that depend on it and imports from ${depsOnCount} files`
18613
+ });
18793
18614
  }
18794
- if (!this.treeSitterParser) return null;
18795
- try {
18796
- const ext2 = extname(filePath).toLowerCase();
18797
- const symbols = await this.treeSitterParser(content, ext2);
18798
- if (!symbols || symbols.length === 0) return null;
18799
- return flattenCodeSymbols(symbols);
18800
- } catch {
18801
- return null;
18615
+ if (fc && fc.conventions.length > 0) {
18616
+ paragraphs.push({
18617
+ heading: "Conventions",
18618
+ text: `Follow these discovered conventions: ${fc.conventions.slice(0, 3).join("; ")}.`,
18619
+ importance: "medium"
18620
+ });
18802
18621
  }
18803
- }
18804
- extractSymbolsRegex(content, lines, absolutePath, relativePath2, language) {
18805
- const symbols = [];
18806
- if (language === "typescript" || language === "javascript") {
18807
- const patterns = [
18808
- { re: /^(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*(<[^>]*>)?\s*\(([^)]*)\)/gm, kind: "function" },
18809
- { re: /^(?:export\s+)?(?:abstract\s+)?class\s+(\w+)/gm, kind: "class" },
18810
- { re: /^(?:export\s+)?interface\s+(\w+)/gm, kind: "interface" },
18811
- { re: /^(?:export\s+)?type\s+(\w+)\s*(?:<[^>]*>)?\s*=/gm, kind: "type" },
18812
- { re: /^(?:export\s+)?(?:const\s+)?enum\s+(\w+)/gm, kind: "enum" },
18813
- { re: /^(?:export\s+)?const\s+(\w+)\s*(?::\s*[^=]+)?\s*=\s*(?:async\s+)?\(/gm, kind: "function" }
18814
- ];
18815
- for (const { re, kind } of patterns) {
18816
- for (const match2 of content.matchAll(re)) {
18817
- const name = match2[1];
18818
- if (!name) continue;
18819
- const line = content.slice(0, match2.index).split("\n").length;
18820
- const endLine = this.findBlockEnd(lines, line - 1);
18821
- symbols.push({
18822
- id: `sym:${relativePath2}:${name}:${line}`,
18823
- name,
18824
- kind,
18825
- filePath: absolutePath,
18826
- startLine: line,
18827
- endLine,
18828
- exported: this.isExported(content, name, language),
18829
- async: match2[0].includes("async"),
18830
- params: this.countParams(match2[0]),
18831
- branches: this.countBranches(lines, line - 1, endLine - 1),
18832
- signature: match2[0].trim().replace(/\s*\{?\s*$/, "")
18622
+ if (ctx.rules) {
18623
+ const fileViolations = ctx.rules.violations.filter(
18624
+ (v) => v.sourceSymbol.filePath.includes(shortName(filePath)) || v.targetSymbol && v.targetSymbol.filePath.includes(shortName(filePath))
18625
+ );
18626
+ if (fileViolations.length > 0) {
18627
+ const errors = fileViolations.filter((v) => v.severity === "error");
18628
+ const warns = fileViolations.filter((v) => v.severity === "warning");
18629
+ paragraphs.push({
18630
+ heading: "Architecture Violations",
18631
+ text: `${errors.length} error${errors.length !== 1 ? "s" : ""} and ${warns.length} warning${warns.length !== 1 ? "s" : ""} from architecture rules.`,
18632
+ importance: errors.length > 0 ? "critical" : "high"
18633
+ });
18634
+ for (const v of fileViolations.slice(0, 5)) {
18635
+ warnings.push({
18636
+ message: v.message,
18637
+ severity: v.severity === "error" ? "error" : "warning",
18638
+ action: v.suggestedFix ?? "Review and fix the violation",
18639
+ ruleId: v.ruleId
18640
+ });
18641
+ overlays.push({
18642
+ type: "diagnostic",
18643
+ line: v.sourceSymbol.line,
18644
+ text: v.message,
18645
+ severity: v.severity === "error" ? "error" : "warning",
18646
+ tooltip: v.suggestedFix
18833
18647
  });
18834
18648
  }
18835
18649
  }
18836
18650
  }
18837
- if (language === "python") {
18838
- const re = /^(?:async\s+)?(?:def|class)\s+(\w+)/gm;
18839
- for (const match2 of content.matchAll(re)) {
18840
- const name = match2[1];
18841
- const kind = match2[0].includes("class") ? "class" : "function";
18842
- const line = content.slice(0, match2.index).split("\n").length;
18843
- symbols.push({
18844
- id: `sym:${relativePath2}:${name}:${line}`,
18845
- name,
18846
- kind,
18847
- filePath: absolutePath,
18848
- startLine: line,
18849
- endLine: line + 10,
18850
- exported: true,
18851
- async: match2[0].includes("async"),
18852
- params: this.countParams(match2[0]),
18853
- branches: void 0,
18854
- signature: match2[0].trim()
18651
+ if (ctx.callGraph) {
18652
+ const fileNodes = ctx.callGraph.nodes.filter((n) => n.filePath.includes(shortName(filePath)));
18653
+ const hotFunctions = fileNodes.filter((n) => n.callerCount > 5);
18654
+ if (hotFunctions.length > 0) {
18655
+ paragraphs.push({
18656
+ heading: "Hot Functions",
18657
+ text: hotFunctions.map(
18658
+ (f) => `\`${f.name}\` is called by ${f.callerCount} function${f.callerCount !== 1 ? "s" : ""}${f.calleeCount > 0 ? ` and calls ${f.calleeCount}` : ""}`
18659
+ ).join(". ") + ".",
18660
+ importance: "high"
18855
18661
  });
18662
+ for (const fn of hotFunctions) {
18663
+ overlays.push({
18664
+ type: "code-lens",
18665
+ line: 0,
18666
+ // Would need symbol line mapping
18667
+ text: `${fn.callerCount} callers \xB7 ${fn.calleeCount} callees`,
18668
+ tooltip: `Function ${fn.name} has ${fn.callerCount} callers and ${fn.calleeCount} callees`
18669
+ });
18670
+ }
18671
+ }
18672
+ const deadInFile = ctx.callGraph.stats.deadFunctions.filter(
18673
+ (d) => d.filePath.includes(shortName(filePath))
18674
+ );
18675
+ if (deadInFile.length > 0) {
18676
+ for (const dead of deadInFile) {
18677
+ warnings.push({
18678
+ message: `\`${dead.name}\` appears to be dead code (exported but never called)`,
18679
+ severity: "info",
18680
+ action: "Verify this function is not called via dynamic dispatch or external consumers, then consider removing it"
18681
+ });
18682
+ }
18856
18683
  }
18857
18684
  }
18858
- return symbols;
18859
- }
18860
- // ═══════════════════════════════════════════════════════════════════════════
18861
- // IMPORTS
18862
- // ═══════════════════════════════════════════════════════════════════════════
18863
- extractImports(content, absolutePath, relativePath2, language) {
18864
- const imports = [];
18865
- const fileId = `file:${relativePath2}`;
18866
- if (language === "typescript" || language === "javascript") {
18867
- for (const match2 of content.matchAll(TS_IMPORT_RE)) {
18868
- const sourcePath = match2[1];
18869
- const line = content.slice(0, match2.index).split("\n").length;
18870
- const isTypeOnly = TS_IMPORT_TYPE_RE.test(match2[0]);
18871
- const importedSymbols = this.extractImportedSymbols(match2[0]);
18872
- const resolvedPath = this.resolveImportPath(sourcePath, absolutePath);
18873
- imports.push({
18874
- fileId,
18875
- filePath: absolutePath,
18876
- sourcePath,
18877
- resolvedPath,
18878
- importedSymbols,
18879
- isTypeOnly,
18880
- isDynamic: false,
18881
- line
18685
+ if (ctx.temporal) {
18686
+ const hotspot = ctx.temporal.changeHotspots.find((h) => filePath.includes(h.file) || h.file.includes(shortName(filePath)));
18687
+ if (hotspot) {
18688
+ const daysSince = Math.round((Date.now() - new Date(hotspot.lastChanged).getTime()) / 864e5);
18689
+ paragraphs.push({
18690
+ heading: "Recent Activity",
18691
+ text: `Changed ${hotspot.commits} times in the last ${ctx.temporal.stats.analysisWindowDays} days by ${hotspot.authors} author${hotspot.authors !== 1 ? "s" : ""}. Last modified ${daysSince} day${daysSince !== 1 ? "s" : ""} ago.`,
18692
+ importance: hotspot.commits > 10 ? "high" : "medium"
18882
18693
  });
18694
+ quickFacts.push({ label: "Recent commits", value: String(hotspot.commits), icon: "git" });
18695
+ quickFacts.push({ label: "Last changed", value: `${daysSince}d ago`, icon: "git" });
18696
+ quickFacts.push({ label: "Authors", value: String(hotspot.authors), icon: "git" });
18883
18697
  }
18884
- for (const match2 of content.matchAll(TS_DYNAMIC_IMPORT_RE)) {
18885
- const sourcePath = match2[1];
18886
- const line = content.slice(0, match2.index).split("\n").length;
18887
- imports.push({
18888
- fileId,
18889
- filePath: absolutePath,
18890
- sourcePath,
18891
- resolvedPath: this.resolveImportPath(sourcePath, absolutePath),
18892
- importedSymbols: [],
18893
- isTypeOnly: false,
18894
- isDynamic: true,
18895
- line
18698
+ const churn = ctx.temporal.churnFiles.find((c) => filePath.includes(c.file) || c.file.includes(shortName(filePath)));
18699
+ if (churn && churn.severity !== "low") {
18700
+ warnings.push({
18701
+ message: churn.reason,
18702
+ severity: churn.severity === "high" ? "warning" : "info",
18703
+ action: "Consider whether this file needs refactoring to reduce change frequency"
18896
18704
  });
18897
18705
  }
18898
- }
18899
- if (language === "python") {
18900
- for (const match2 of content.matchAll(PY_IMPORT_RE)) {
18901
- const sourcePath = match2[1] || match2[2];
18902
- if (!sourcePath) continue;
18903
- const line = content.slice(0, match2.index).split("\n").length;
18904
- imports.push({
18905
- fileId,
18906
- filePath: absolutePath,
18907
- sourcePath,
18908
- resolvedPath: "",
18909
- importedSymbols: [],
18910
- isTypeOnly: false,
18911
- isDynamic: false,
18912
- line
18706
+ const expertise = ctx.temporal.authorExpertise.find(
18707
+ (e) => filePath.startsWith(e.area) || filePath.includes(e.area)
18708
+ );
18709
+ if (expertise && expertise.busFactor === 1) {
18710
+ warnings.push({
18711
+ message: `Bus factor of 1 \u2014 ${expertise.primaryAuthor} has made ${expertise.authors[0]?.percentage}% of changes to this area`,
18712
+ severity: "info",
18713
+ action: "Consider knowledge sharing or pair programming for this area"
18913
18714
  });
18914
18715
  }
18915
18716
  }
18916
- if (language === "go") {
18917
- for (const match2 of content.matchAll(GO_IMPORT_RE)) {
18918
- const block = match2[1] || match2[2];
18919
- if (!block) continue;
18920
- const paths = block.match(/"([^"]+)"/g) ?? [];
18921
- for (const p of paths) {
18922
- const sourcePath = p.replace(/"/g, "");
18923
- imports.push({
18924
- fileId,
18925
- filePath: absolutePath,
18926
- sourcePath,
18927
- resolvedPath: "",
18928
- importedSymbols: [],
18929
- isTypeOnly: false,
18930
- isDynamic: false,
18931
- line: 0
18717
+ if (ctx.learned) {
18718
+ const coEdits = ctx.learned.coEdits.filter((p) => p.files[0].includes(shortName(filePath)) || p.files[1].includes(shortName(filePath))).slice(0, 3);
18719
+ if (coEdits.length > 0) {
18720
+ for (const pair of coEdits) {
18721
+ const other = pair.files[0].includes(shortName(filePath)) ? pair.files[1] : pair.files[0];
18722
+ relatedFiles.push({
18723
+ filePath: other,
18724
+ reason: `Often edited together (${pair.count} times)`,
18725
+ relationship: "co-edited",
18726
+ confidence: pair.weight
18932
18727
  });
18933
18728
  }
18934
18729
  }
18935
18730
  }
18936
- return imports;
18937
- }
18938
- extractImportedSymbols(importLine) {
18939
- const braceMatch = importLine.match(/\{([^}]+)\}/);
18940
- if (!braceMatch) {
18941
- const defaultMatch = importLine.match(/import\s+(?:type\s+)?(\w+)\s+from/);
18942
- return defaultMatch?.[1] ? [defaultMatch[1]] : [];
18731
+ if (fc && fc.editGuidance.length > 0) {
18732
+ paragraphs.push({
18733
+ heading: "Edit Guidance",
18734
+ text: fc.editGuidance.join(" "),
18735
+ importance: "medium"
18736
+ });
18943
18737
  }
18944
- return braceMatch[1].split(",").map((s) => {
18945
- const parts = s.trim().split(/\s+as\s+/);
18946
- return parts[parts.length - 1].trim();
18947
- }).filter(Boolean);
18738
+ return {
18739
+ filePath,
18740
+ roleSummary,
18741
+ paragraphs,
18742
+ quickFacts,
18743
+ warnings,
18744
+ relatedFiles,
18745
+ overlays
18746
+ };
18948
18747
  }
18949
- resolveImportPath(sourcePath, fromFile) {
18950
- if (!sourcePath.startsWith(".")) return sourcePath;
18951
- const dir = dirname(fromFile);
18952
- const resolved = resolve(dir, sourcePath);
18953
- const extensions = [".ts", ".tsx", ".js", ".jsx", "/index.ts", "/index.tsx", "/index.js"];
18954
- for (const ext2 of extensions) {
18955
- const candidate = resolved + ext2;
18956
- try {
18957
- accessSync(candidate);
18958
- return candidate;
18959
- } catch {
18748
+ /**
18749
+ * Generate a compact markdown explanation for AI agent consumption.
18750
+ */
18751
+ explainForAgent(filePath, ctx) {
18752
+ const explanation = this.explain(filePath, ctx);
18753
+ const lines = [];
18754
+ lines.push(`## ${shortName(filePath)}: ${explanation.roleSummary}`);
18755
+ lines.push("");
18756
+ if (explanation.quickFacts.length > 0) {
18757
+ lines.push(explanation.quickFacts.map((f) => `**${f.label}**: ${f.value}`).join(" \xB7 "));
18758
+ lines.push("");
18759
+ }
18760
+ for (const p of explanation.paragraphs.filter((p2) => p2.importance === "critical" || p2.importance === "high")) {
18761
+ lines.push(`### ${p.heading}`);
18762
+ lines.push(p.text);
18763
+ lines.push("");
18764
+ }
18765
+ if (explanation.warnings.length > 0) {
18766
+ lines.push("### Warnings");
18767
+ for (const w of explanation.warnings) {
18768
+ const icon = w.severity === "error" ? "MUST FIX" : w.severity === "warning" ? "SHOULD FIX" : "NOTE";
18769
+ lines.push(`- **[${icon}]** ${w.message} \u2014 ${w.action}`);
18960
18770
  }
18771
+ lines.push("");
18961
18772
  }
18962
- return resolved;
18963
- }
18964
- // ═══════════════════════════════════════════════════════════════════════════
18965
- // CALL EDGES (basic extraction)
18966
- // ═══════════════════════════════════════════════════════════════════════════
18967
- extractCallEdges(content, symbols, filePath) {
18968
- const edges = [];
18969
- const functionNames = new Set(symbols.filter((s) => s.kind === "function" || s.kind === "method").map((s) => s.name));
18970
- for (const caller of symbols) {
18971
- if (caller.kind !== "function" && caller.kind !== "method") continue;
18972
- const body = content.split("\n").slice(caller.startLine - 1, caller.endLine).join("\n");
18973
- for (const calleeName of functionNames) {
18974
- if (calleeName === caller.name) continue;
18975
- const callRe = new RegExp(`\\b${calleeName}\\s*\\(`, "g");
18976
- if (callRe.test(body)) {
18977
- const callee = symbols.find((s) => s.name === calleeName);
18978
- if (callee) {
18979
- edges.push({
18980
- callerId: caller.id,
18981
- calleeId: callee.id,
18982
- callerName: caller.name,
18983
- calleeName: callee.name,
18984
- callerFile: filePath,
18985
- calleeFile: filePath
18986
- });
18987
- }
18988
- }
18773
+ if (explanation.relatedFiles.length > 0) {
18774
+ lines.push("### Related Files");
18775
+ for (const rf of explanation.relatedFiles.slice(0, 5)) {
18776
+ lines.push(`- \`${rf.filePath}\` \u2014 ${rf.reason}`);
18989
18777
  }
18778
+ lines.push("");
18990
18779
  }
18991
- return edges;
18780
+ return lines.join("\n");
18992
18781
  }
18993
- // ═══════════════════════════════════════════════════════════════════════════
18994
- // HELPERS
18995
- // ═══════════════════════════════════════════════════════════════════════════
18996
- isExported(content, name, language) {
18997
- if (language === "python") return true;
18998
- const re = new RegExp(`export\\s+(?:default\\s+)?(?:async\\s+)?(?:function|class|const|let|var|type|interface|enum)\\s+${name}\\b`);
18999
- if (re.test(content)) return true;
19000
- const reExport = new RegExp(`export\\s+\\{[^}]*\\b${name}\\b[^}]*\\}`);
19001
- return reExport.test(content);
19002
- }
19003
- countParams(signature) {
19004
- const parenMatch = signature.match(/\(([^)]*)\)/);
19005
- if (!parenMatch || !parenMatch[1].trim()) return 0;
19006
- return parenMatch[1].split(",").length;
19007
- }
19008
- countBranches(lines, startIdx, endIdx) {
19009
- let branches = 0;
19010
- const branchRe = /\b(if|else if|case|for|while|catch|&&|\|\||\?\?)\b/g;
19011
- for (let i = startIdx; i < Math.min(endIdx, lines.length); i++) {
19012
- const matches = lines[i].match(branchRe);
19013
- if (matches) branches += matches.length;
19014
- }
19015
- return branches;
19016
- }
19017
- findBlockEnd(lines, startIdx) {
19018
- let depth = 0;
19019
- let seenOpen = false;
19020
- for (let i = startIdx; i < lines.length; i++) {
19021
- for (const ch of lines[i]) {
19022
- if (ch === "{") {
19023
- depth++;
19024
- seenOpen = true;
19025
- } else if (ch === "}" && seenOpen) {
19026
- depth--;
19027
- if (depth <= 0) return i + 1;
19028
- }
19029
- }
19030
- }
19031
- return startIdx + 1;
19032
- }
19033
- };
19034
- function mapTreeSitterKind(kind) {
19035
- const map = {
19036
- function: "function",
19037
- method: "method",
19038
- class: "class",
19039
- interface: "interface",
19040
- type: "type",
19041
- enum: "enum",
19042
- const: "variable",
19043
- variable: "variable",
19044
- struct: "class",
19045
- trait: "interface",
19046
- export: "variable"
19047
- };
19048
- return map[kind] ?? "function";
19049
- }
19050
- function flattenCodeSymbols(symbols) {
19051
- const flat = [];
19052
- for (const sym of symbols) {
19053
- flat.push({ name: sym.name, kind: sym.kind, line: sym.line, endLine: sym.endLine, signature: sym.signature });
19054
- if (sym.children && Array.isArray(sym.children)) {
19055
- flat.push(...flattenCodeSymbols(sym.children));
19056
- }
19057
- }
19058
- return flat;
19059
- }
19060
- var ContextSynthesizer = class {
19061
- rootPath;
19062
- data;
19063
- dna;
19064
- graph;
19065
- ruleResult;
19066
- constructor(rootPath, data, dna, graph, ruleResult) {
19067
- this.rootPath = rootPath;
19068
- this.data = data;
19069
- this.dna = dna;
19070
- this.graph = graph;
19071
- this.ruleResult = ruleResult || null;
19072
- }
19073
- /**
19074
- * Synthesize the full context — the complete brain dump for AI agents.
19075
- */
19076
- synthesize() {
19077
- const projectIdentity = this.buildProjectIdentity();
19078
- const archRules = this.buildArchRuleSummary();
19079
- const activeViolations = this.ruleResult?.violations || [];
19080
- const codebaseDNA = this.buildDNASummary();
19081
- const fileContexts = this.buildFileContexts();
19082
- const taskPlaybooks = this.buildTaskPlaybooks();
19083
- const verificationSteps = this.buildVerificationSteps();
19084
- const riskBriefing = this.buildRiskBriefing();
19085
- return {
19086
- version: "2.0.0",
19087
- generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
19088
- projectIdentity,
19089
- architecturalRules: archRules,
19090
- activeViolations,
19091
- codebaseDNA,
19092
- fileContexts,
19093
- taskPlaybooks,
19094
- verificationSteps,
19095
- riskBriefing
19096
- };
19097
- }
19098
- /**
19099
- * Generate context for a specific file — what an AI agent needs to know
19100
- * before editing this file.
19101
- */
19102
- synthesizeForFile(filePath) {
19103
- const rel = this.toRelative(filePath);
19104
- const file = this.data.files.find((f) => f.relativePath === rel || f.path === filePath);
19105
- if (!file) return null;
19106
- return this.buildSingleFileContext(file);
18782
+ /**
18783
+ * Generate IDE overlay data for the VS Code extension to consume.
18784
+ */
18785
+ getIDEOverlays(filePath, ctx) {
18786
+ const explanation = this.explain(filePath, ctx);
18787
+ return explanation.overlays;
19107
18788
  }
19108
18789
  /**
19109
- * Generate a compact markdown context document for IDE rules.
19110
- * This replaces the old static rule generation with intelligence-driven context.
18790
+ * Generate a health report explanation.
19111
18791
  */
19112
- generateContextDocument() {
19113
- const ctx = this.synthesize();
18792
+ explainHealth(health, dna) {
19114
18793
  const lines = [];
19115
- lines.push(`# ${ctx.projectIdentity.name} \u2014 AI Context`);
19116
- lines.push(`<!-- Generated by @repo/context-engine at ${ctx.generatedAt} -->`);
19117
- lines.push("");
19118
- lines.push("## Project Identity");
19119
- lines.push(`- **Stack**: ${ctx.projectIdentity.stack}`);
19120
- lines.push(`- **Architecture**: ${ctx.projectIdentity.architecture}`);
19121
- if (ctx.projectIdentity.keyPatterns.length > 0) {
19122
- lines.push(`- **Key Patterns**: ${ctx.projectIdentity.keyPatterns.join(", ")}`);
19123
- }
18794
+ const dims = health.dimensions;
18795
+ lines.push(`## Codebase Health: ${health.overall}/100`);
19124
18796
  lines.push("");
19125
- if (ctx.codebaseDNA.conventions.length > 0) {
19126
- lines.push("## Conventions (Auto-Discovered)");
19127
- for (const conv of ctx.codebaseDNA.conventions) {
19128
- lines.push(`- ${conv}`);
19129
- }
19130
- lines.push("");
19131
- }
19132
- if (ctx.architecturalRules.length > 0) {
19133
- lines.push("## Architecture Rules");
19134
- for (const rule of ctx.architecturalRules) {
19135
- const icon = rule.severity === "error" ? "MUST" : rule.severity === "warning" ? "SHOULD" : "MAY";
19136
- lines.push(`- **[${icon}]** ${rule.name}: ${rule.description}`);
19137
- if (rule.violationCount > 0) {
19138
- lines.push(` - ${rule.violationCount} active violations`);
19139
- }
19140
- }
19141
- lines.push("");
19142
- }
19143
- if (ctx.codebaseDNA.boundaries.length > 0) {
19144
- lines.push("## Module Boundaries");
19145
- for (const boundary of ctx.codebaseDNA.boundaries) {
19146
- lines.push(`- ${boundary}`);
19147
- }
19148
- lines.push("");
19149
- }
19150
- if (ctx.codebaseDNA.hotFiles.length > 0) {
19151
- lines.push("## High-Impact Files (Edit With Care)");
19152
- for (const file of ctx.codebaseDNA.hotFiles.slice(0, 10)) {
19153
- lines.push(`- \`${file}\``);
19154
- }
19155
- lines.push("");
19156
- }
19157
- if (ctx.riskBriefing.securityConcerns.length > 0 || ctx.riskBriefing.testGaps.length > 0) {
19158
- lines.push("## Risk Areas");
19159
- for (const concern of ctx.riskBriefing.securityConcerns) {
19160
- lines.push(`- ${concern}`);
19161
- }
19162
- for (const gap of ctx.riskBriefing.testGaps.slice(0, 5)) {
19163
- lines.push(`- ${gap}`);
19164
- }
19165
- lines.push("");
19166
- }
19167
- if (ctx.projectIdentity.noGoZones.length > 0) {
19168
- lines.push("## No-Go Zones");
19169
- for (const zone of ctx.projectIdentity.noGoZones) {
19170
- lines.push(`- ${zone}`);
19171
- }
19172
- lines.push("");
19173
- }
19174
- if (ctx.taskPlaybooks.length > 0) {
19175
- lines.push("## Task Playbooks");
19176
- for (const playbook of ctx.taskPlaybooks) {
19177
- lines.push(`### ${playbook.taskType}`);
19178
- for (const step of playbook.steps) {
19179
- lines.push(`1. ${step}`);
19180
- }
19181
- if (playbook.mustVerify.length > 0) {
19182
- lines.push(`**Verify**: ${playbook.mustVerify.join(", ")}`);
19183
- }
19184
- lines.push("");
19185
- }
19186
- }
19187
- if (ctx.verificationSteps.length > 0) {
19188
- lines.push("## Verification Protocol");
19189
- for (const step of ctx.verificationSteps) {
19190
- lines.push(`### On ${step.trigger}`);
19191
- for (const check of step.checks) {
19192
- lines.push(`- ${check}`);
19193
- }
19194
- if (step.commands.length > 0) {
19195
- lines.push(`**Run**: \`${step.commands.join(" && ")}\``);
19196
- }
19197
- lines.push("");
19198
- }
18797
+ const entries = [
18798
+ { name: "Architecture", score: dims.architecture, explain: this.explainArchScore(dims.architecture, dna) },
18799
+ { name: "Test Coverage", score: dims.testCoverage, explain: this.explainTestScore(dims.testCoverage) },
18800
+ { name: "Conventions", score: dims.conventions, explain: this.explainConventionScore(dims.conventions, dna) },
18801
+ { name: "Dependencies", score: dims.dependencies, explain: this.explainDependencyScore(dims.dependencies, dna) },
18802
+ { name: "Security", score: dims.security, explain: dims.security >= 80 ? "No critical security concerns detected" : "Critical risk areas identified" },
18803
+ { name: "Complexity", score: dims.complexity, explain: dims.complexity >= 80 ? "Complexity is well-managed" : "High-complexity hotspots detected" }
18804
+ ];
18805
+ for (const entry of entries) {
18806
+ const bar = this.renderBar(entry.score);
18807
+ lines.push(`${bar} **${entry.name}**: ${entry.score}/100 \u2014 ${entry.explain}`);
19199
18808
  }
19200
- lines.push("## Codebase Health");
19201
- lines.push(`- **Overall**: ${this.dna.healthScore.overall}/100`);
19202
- const dims = this.dna.healthScore.dimensions;
19203
- lines.push(`- Architecture: ${dims.architecture} | Tests: ${dims.testCoverage} | Conventions: ${dims.conventions} | Dependencies: ${dims.dependencies}`);
19204
- lines.push("");
19205
- lines.push("---");
19206
- lines.push("<!-- context-engine:v2 -->");
19207
18809
  return lines.join("\n");
19208
18810
  }
19209
18811
  // ═══════════════════════════════════════════════════════════════════════════
19210
- // IDENTITY
19211
- // ═══════════════════════════════════════════════════════════════════════════
19212
- buildProjectIdentity() {
19213
- const fp = this.dna.fingerprint;
19214
- const stack = [fp.framework, fp.language, fp.orm, fp.validator, fp.authLib, fp.router].filter(Boolean).join(" | ");
19215
- const keyPatterns = this.dna.patterns.map((p) => p.name);
19216
- const criticalPaths = this.dna.hotspots.slice(0, 5).map((h) => h.file);
19217
- const noGoZones = [];
19218
- if (this.ruleResult) {
19219
- const errorRules = this.ruleResult.violations.filter((v) => v.severity === "error");
19220
- const uniqueMessages = [...new Set(errorRules.map((v) => v.message))];
19221
- noGoZones.push(...uniqueMessages.slice(0, 5));
19222
- }
19223
- for (const cycle of this.graph.cycles) {
19224
- noGoZones.push(`Circular dependency: ${cycle.nodes.slice(0, 3).join(" \u2192 ")}...`);
19225
- }
19226
- const architecture = this.dna.conventions.filter((c) => c.area === "structure").map((c) => c.description).join("; ") || fp.framework;
19227
- return {
19228
- name: fp.name,
19229
- stack,
19230
- architecture,
19231
- keyPatterns,
19232
- criticalPaths,
19233
- noGoZones: noGoZones.slice(0, 10)
19234
- };
19235
- }
19236
- // ═══════════════════════════════════════════════════════════════════════════
19237
- // RULES SUMMARY
18812
+ // PRIVATE
19238
18813
  // ═══════════════════════════════════════════════════════════════════════════
19239
- buildArchRuleSummary() {
19240
- if (!this.ruleResult) return [];
19241
- const breakdown = this.ruleResult.ruleBreakdown;
19242
- return Object.entries(breakdown).map(([ruleId, count]) => {
19243
- const violation = this.ruleResult.violations.find((v) => v.ruleId === ruleId);
19244
- return {
19245
- id: ruleId,
19246
- name: violation?.ruleName || ruleId,
19247
- type: "import_forbidden",
19248
- severity: violation?.severity || "warning",
19249
- scope: violation?.sourceSymbol.filePath || "",
19250
- description: violation?.message || "",
19251
- violationCount: count
19252
- };
19253
- });
19254
- }
19255
- // ═══════════════════════════════════════════════════════════════════════════
19256
- // DNA SUMMARY
19257
- // ═══════════════════════════════════════════════════════════════════════════
19258
- buildDNASummary() {
19259
- return {
19260
- conventions: this.dna.conventions.filter((c) => c.confidence > 0.5).map((c) => c.description),
19261
- patterns: this.dna.patterns.map((p) => `${p.name}: ${p.description}`),
19262
- boundaries: this.dna.boundaries.filter((b) => b.importCount > 3).map((b) => `${b.from} \u2192 ${b.to} (${b.importCount} imports${b.isCircular ? ", CIRCULAR" : ""})`),
19263
- hotFiles: this.dna.hotspots.slice(0, 10).map((h) => h.file),
19264
- riskAreas: this.dna.riskMap.filter((r) => r.riskLevel === "critical" || r.riskLevel === "high").map((r) => `${r.file}: ${r.factors[0]}`)
19265
- };
19266
- }
19267
- // ═══════════════════════════════════════════════════════════════════════════
19268
- // FILE CONTEXTS
19269
- // ═══════════════════════════════════════════════════════════════════════════
19270
- buildFileContexts() {
19271
- const contexts = /* @__PURE__ */ new Map();
19272
- for (const file of this.data.files) {
19273
- contexts.set(file.relativePath, this.buildSingleFileContext(file));
19274
- }
19275
- return contexts;
19276
- }
19277
- buildSingleFileContext(file) {
19278
- const rel = file.relativePath;
19279
- const role = this.classifyRole(file);
19280
- const graphNode = this.graph.nodes.find((n) => n.relativePath === rel);
19281
- const layer = graphNode?.layer;
19282
- const dependsOn = this.graph.edges.filter((e) => e.from === rel).map((e) => e.to);
19283
- const dependedOnBy = this.graph.edges.filter((e) => e.to === rel).map((e) => e.from);
19284
- const applicableRules = [];
19285
- if (this.ruleResult) {
19286
- for (const v of this.ruleResult.violations) {
19287
- if (v.sourceSymbol.filePath.includes(rel) || v.targetSymbol && v.targetSymbol.filePath.includes(rel)) {
19288
- if (!applicableRules.includes(v.ruleId)) applicableRules.push(v.ruleId);
19289
- }
19290
- }
19291
- }
19292
- const conventions = this.dna.conventions.filter((c) => this.conventionAppliesToFile(c.area, file)).map((c) => c.description);
19293
- const patterns = this.dna.patterns.filter((p) => p.fileMatches.some((m) => m === rel)).map((p) => p.name);
19294
- const riskEntry = this.dna.riskMap.find((r) => r.file === rel);
19295
- const riskLevel = riskEntry?.riskLevel || "low";
19296
- const relatedFiles = this.findRelatedFiles(file, role).slice(0, 8);
19297
- const editGuidance = this.generateEditGuidance(file, role, layer, dependedOnBy, conventions);
19298
- return {
19299
- filePath: file.path,
19300
- role,
19301
- layer,
19302
- dependsOn,
19303
- dependedOnBy,
19304
- applicableRules,
19305
- conventions,
19306
- patterns,
19307
- riskLevel,
19308
- relatedFiles,
19309
- editGuidance
19310
- };
19311
- }
19312
- classifyRole(file) {
19313
- const rel = file.relativePath.toLowerCase();
19314
- if (rel.includes(".test.") || rel.includes(".spec.") || rel.includes("__tests__")) return "test";
19315
- if (rel.includes("fixture") || rel.includes("mock")) return "fixture";
19316
- if (rel.includes("migration")) return "migration";
19317
- if (rel.match(/\.(css|scss|less|styl)$/)) return "style";
19318
- if (rel.includes(".config.") || rel.includes("config/") || rel === "tsconfig.json") return "config";
19319
- if (rel.includes("middleware")) return "middleware";
19320
- if (rel.includes("/api/") || rel.includes("route")) return "route-handler";
19321
- if (rel.includes("service") || rel.includes("Service")) return "service";
19322
- if (rel.includes("repositor") || rel.includes("Repositor")) return "repository";
19323
- if (rel.endsWith(".tsx") && !rel.includes("page.")) return "component";
19324
- if (rel.includes("/types") || rel.endsWith(".d.ts")) return "type";
19325
- if (rel.includes("util") || rel.includes("helper") || rel.includes("lib/")) return "util";
19326
- if (rel.includes("script") || rel.includes("bin/")) return "script";
19327
- if (rel.match(/^(src\/)?index\.|^(src\/)?main\.|^(src\/)?app\./)) return "entry";
19328
- return "unknown";
19329
- }
19330
- conventionAppliesToFile(area, file) {
19331
- switch (area) {
19332
- case "naming":
19333
- return true;
19334
- case "imports":
19335
- return file.relativePath.endsWith(".ts") || file.relativePath.endsWith(".tsx");
19336
- case "exports":
19337
- return file.exports.length > 0;
19338
- case "testing":
19339
- return file.relativePath.includes(".test.") || file.relativePath.includes(".spec.");
19340
- case "error-handling":
19341
- return !file.relativePath.includes(".test.");
19342
- case "types":
19343
- return file.relativePath.endsWith(".ts") || file.relativePath.endsWith(".tsx");
19344
- default:
19345
- return true;
19346
- }
19347
- }
19348
- findRelatedFiles(file, role) {
19349
- const related = [];
19350
- const dir = path2.dirname(file.relativePath);
19351
- for (const other of this.data.files) {
19352
- if (other.path === file.path) continue;
19353
- if (path2.dirname(other.relativePath) === dir) {
19354
- related.push(other.relativePath);
19355
- }
19356
- }
19357
- if (related.length < 5) {
19358
- for (const other of this.data.files) {
19359
- if (other.path === file.path) continue;
19360
- if (related.includes(other.relativePath)) continue;
19361
- if (this.classifyRole(other) === role) {
19362
- related.push(other.relativePath);
19363
- if (related.length >= 8) break;
19364
- }
19365
- }
19366
- }
19367
- return related;
19368
- }
19369
- generateEditGuidance(file, role, layer, dependedOnBy, conventions) {
19370
- const guidance = [];
19371
- if (dependedOnBy.length > 10) {
19372
- guidance.push(`HIGH IMPACT: ${dependedOnBy.length} files depend on this. Changes have wide blast radius.`);
19373
- }
18814
+ buildRoleSummary(filePath, role, ctx) {
18815
+ const parts = [];
19374
18816
  switch (role) {
19375
- case "route-handler":
19376
- guidance.push("Validate all inputs with schemas before processing.");
19377
- guidance.push("Return consistent response shapes ({ success, data } or { success, error }).");
19378
- guidance.push("Ensure authentication middleware is applied to protected endpoints.");
19379
- break;
19380
18817
  case "service":
19381
- guidance.push("Keep business logic here, not in controllers/routes.");
19382
- guidance.push("Use dependency injection for testability.");
19383
- if (layer) guidance.push(`This is in the ${layer} layer \u2014 only import from lower layers.`);
18818
+ parts.push("Business logic service");
19384
18819
  break;
19385
- case "repository":
19386
- guidance.push("Only data access logic belongs here \u2014 no business rules.");
19387
- guidance.push("Return domain objects, not raw database rows.");
18820
+ case "route-handler":
18821
+ parts.push("API route handler");
19388
18822
  break;
19389
18823
  case "component":
19390
- guidance.push("Keep components focused and composable.");
19391
- guidance.push("Extract complex logic to custom hooks.");
18824
+ parts.push("UI component");
18825
+ break;
18826
+ case "repository":
18827
+ parts.push("Data access layer");
19392
18828
  break;
19393
18829
  case "middleware":
19394
- guidance.push("Middleware must call next() or return a response \u2014 never leave the request hanging.");
19395
- guidance.push("Keep middleware focused on a single concern.");
18830
+ parts.push("Request middleware");
19396
18831
  break;
19397
18832
  case "test":
19398
- guidance.push("Follow Arrange-Act-Assert pattern.");
19399
- guidance.push("Test edge cases and error conditions, not just happy path.");
18833
+ parts.push("Test file");
18834
+ break;
18835
+ case "config":
18836
+ parts.push("Configuration");
18837
+ break;
18838
+ case "type":
18839
+ parts.push("Type definitions");
18840
+ break;
18841
+ case "util":
18842
+ parts.push("Utility module");
18843
+ break;
18844
+ case "entry":
18845
+ parts.push("Entry point");
19400
18846
  break;
18847
+ default:
18848
+ parts.push("Source file");
19401
18849
  }
19402
- for (const conv of conventions.slice(0, 3)) {
19403
- guidance.push(`Convention: ${conv}`);
18850
+ if (ctx.fileContext) {
18851
+ if (ctx.fileContext.layer) parts.push(`in ${ctx.fileContext.layer} layer`);
18852
+ if (ctx.fileContext.dependedOnBy.length > 10) parts.push("(high-impact)");
19404
18853
  }
19405
- return guidance;
18854
+ return parts.join(" ");
18855
+ }
18856
+ explainArchScore(score, dna) {
18857
+ if (score >= 80) return `Strong architecture with ${dna.patterns.length} recognized patterns`;
18858
+ if (score >= 50) return `Moderate architecture \u2014 ${dna.patterns.length} patterns detected, room to strengthen boundaries`;
18859
+ return "Architecture needs attention \u2014 few recognized patterns or boundaries";
18860
+ }
18861
+ explainTestScore(score) {
18862
+ if (score >= 80) return "Good test coverage across source files";
18863
+ if (score >= 50) return "Moderate coverage \u2014 some source files lack tests";
18864
+ return "Low test coverage \u2014 many exported modules have no test files";
18865
+ }
18866
+ explainConventionScore(score, dna) {
18867
+ const strong = dna.conventions.filter((c) => c.confidence > 0.6).length;
18868
+ if (score >= 80) return `${strong} strong conventions enforced consistently`;
18869
+ if (score >= 50) return `${strong} conventions detected but inconsistently applied`;
18870
+ return "Few consistent conventions \u2014 codebase style varies across files";
18871
+ }
18872
+ explainDependencyScore(score, dna) {
18873
+ const circular = dna.boundaries.filter((b) => b.isCircular).length;
18874
+ if (score >= 80) return "Clean dependency graph with no circular dependencies";
18875
+ if (circular > 0) return `${circular} circular dependency${circular > 1 ? "ies" : "y"} detected \u2014 these increase coupling and make code harder to reason about`;
18876
+ return "Dependency health needs improvement";
18877
+ }
18878
+ renderBar(score) {
18879
+ const filled = Math.round(score / 10);
18880
+ return "\u2588".repeat(filled) + "\u2591".repeat(10 - filled);
18881
+ }
18882
+ };
18883
+ function shortName(filePath) {
18884
+ const parts = filePath.split("/");
18885
+ return parts[parts.length - 1] ?? filePath;
18886
+ }
18887
+
18888
+ // ../context-engine/dist/chunk-HMKLYBWJ.js
18889
+ var import_better_sqlite3 = __toESM(require_lib(), 1);
18890
+ var import_fast_glob2 = __toESM(require_out4(), 1);
18891
+ var SCHEMA_VERSION = 1;
18892
+ var PersistentIndex = class {
18893
+ db;
18894
+ config;
18895
+ rootPath;
18896
+ constructor(config) {
18897
+ this.rootPath = config.rootPath;
18898
+ this.config = {
18899
+ rootPath: config.rootPath,
18900
+ dbPath: config.dbPath ?? join(config.rootPath, ".vibecheck", "index.db"),
18901
+ includePatterns: config.includePatterns ?? ["**/*.{ts,tsx,js,jsx,py,rs,go,java,c,cpp,h,hpp,rb,swift,kt,lua,zig}"],
18902
+ excludePatterns: config.excludePatterns ?? [
18903
+ "**/node_modules/**",
18904
+ "**/dist/**",
18905
+ "**/build/**",
18906
+ "**/.next/**",
18907
+ "**/.git/**",
18908
+ "**/coverage/**",
18909
+ "**/.turbo/**",
18910
+ "**/__pycache__/**",
18911
+ "**/target/**",
18912
+ "**/.mcp_data/**"
18913
+ ],
18914
+ maxFileSize: config.maxFileSize ?? 5e5,
18915
+ maxFiles: config.maxFiles ?? 1e4,
18916
+ storeContent: config.storeContent ?? true
18917
+ };
18918
+ const dbDir = join(this.config.dbPath, "..");
18919
+ mkdirSync(dbDir, { recursive: true });
18920
+ this.db = new import_better_sqlite3.default(this.config.dbPath);
18921
+ this.db.pragma("journal_mode = WAL");
18922
+ this.db.pragma("synchronous = NORMAL");
18923
+ this.db.pragma("cache_size = -64000");
18924
+ this.initSchema();
19406
18925
  }
19407
18926
  // ═══════════════════════════════════════════════════════════════════════════
19408
- // TASK PLAYBOOKS
18927
+ // SCHEMA
19409
18928
  // ═══════════════════════════════════════════════════════════════════════════
19410
- buildTaskPlaybooks() {
19411
- const fp = this.dna.fingerprint;
19412
- const playbooks = [];
19413
- playbooks.push({
19414
- taskType: "Bug Fix",
19415
- steps: [
19416
- "Reproduce the bug and understand the expected vs actual behavior",
19417
- "Identify the root cause file(s) using the dependency graph",
19418
- "Write a failing test that reproduces the bug",
19419
- "Apply the minimal fix at the root cause",
19420
- "Verify the fix passes the test and does not break existing tests",
19421
- "Check that the fix does not violate any architecture rules"
19422
- ],
19423
- mustRead: this.dna.hotspots.slice(0, 3).map((h) => h.file),
19424
- mustUpdate: ["The buggy file", "Related test file"],
19425
- mustVerify: ["All existing tests pass", "New regression test passes", "No new arch rule violations"],
19426
- stopConditions: ["Never modify tests to make them pass \u2014 fix the code", "Do not change public API signatures without discussion"]
18929
+ initSchema() {
18930
+ const version = this.getSchemaVersion();
18931
+ if (version === SCHEMA_VERSION) return;
18932
+ this.db.exec(`
18933
+ DROP TABLE IF EXISTS file_hashes;
18934
+ DROP TABLE IF EXISTS files;
18935
+ DROP TABLE IF EXISTS symbols;
18936
+ DROP TABLE IF EXISTS imports;
18937
+ DROP TABLE IF EXISTS call_edges;
18938
+ DROP TABLE IF EXISTS routes;
18939
+ DROP TABLE IF EXISTS services;
18940
+ DROP TABLE IF EXISTS embeddings;
18941
+ DROP TABLE IF EXISTS meta;
18942
+
18943
+ CREATE TABLE meta (
18944
+ key TEXT PRIMARY KEY,
18945
+ value TEXT NOT NULL
18946
+ );
18947
+
18948
+ CREATE TABLE file_hashes (
18949
+ relative_path TEXT PRIMARY KEY,
18950
+ content_hash TEXT NOT NULL,
18951
+ size_bytes INTEGER NOT NULL,
18952
+ modified_ms INTEGER NOT NULL,
18953
+ indexed_at INTEGER NOT NULL DEFAULT (unixepoch('now'))
18954
+ );
18955
+
18956
+ CREATE TABLE files (
18957
+ id TEXT PRIMARY KEY,
18958
+ path TEXT NOT NULL,
18959
+ relative_path TEXT NOT NULL UNIQUE,
18960
+ language TEXT NOT NULL,
18961
+ line_count INTEGER NOT NULL,
18962
+ exports TEXT NOT NULL DEFAULT '[]',
18963
+ content TEXT
18964
+ );
18965
+
18966
+ CREATE TABLE symbols (
18967
+ id TEXT PRIMARY KEY,
18968
+ name TEXT NOT NULL,
18969
+ kind TEXT NOT NULL,
18970
+ file_path TEXT NOT NULL,
18971
+ start_line INTEGER NOT NULL,
18972
+ end_line INTEGER NOT NULL,
18973
+ exported INTEGER NOT NULL DEFAULT 0,
18974
+ async INTEGER NOT NULL DEFAULT 0,
18975
+ params INTEGER,
18976
+ branches INTEGER,
18977
+ signature TEXT
18978
+ );
18979
+
18980
+ CREATE TABLE imports (
18981
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
18982
+ file_id TEXT NOT NULL,
18983
+ file_path TEXT NOT NULL,
18984
+ source_path TEXT NOT NULL,
18985
+ resolved_path TEXT NOT NULL DEFAULT '',
18986
+ imported_symbols TEXT NOT NULL DEFAULT '[]',
18987
+ is_type_only INTEGER NOT NULL DEFAULT 0,
18988
+ is_dynamic INTEGER NOT NULL DEFAULT 0,
18989
+ line INTEGER NOT NULL DEFAULT 0
18990
+ );
18991
+
18992
+ CREATE TABLE call_edges (
18993
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
18994
+ caller_id TEXT NOT NULL,
18995
+ callee_id TEXT NOT NULL,
18996
+ caller_name TEXT NOT NULL,
18997
+ callee_name TEXT NOT NULL,
18998
+ caller_file TEXT NOT NULL,
18999
+ callee_file TEXT NOT NULL
19000
+ );
19001
+
19002
+ CREATE TABLE routes (
19003
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
19004
+ path TEXT NOT NULL,
19005
+ method TEXT NOT NULL,
19006
+ handler TEXT NOT NULL,
19007
+ file TEXT NOT NULL,
19008
+ line INTEGER NOT NULL DEFAULT 0,
19009
+ middleware TEXT NOT NULL DEFAULT '[]',
19010
+ auth INTEGER
19011
+ );
19012
+
19013
+ CREATE TABLE services (
19014
+ id TEXT PRIMARY KEY,
19015
+ name TEXT NOT NULL,
19016
+ root_path TEXT NOT NULL DEFAULT ''
19017
+ );
19018
+
19019
+ CREATE TABLE embeddings (
19020
+ path TEXT NOT NULL,
19021
+ chunk_id TEXT NOT NULL,
19022
+ chunk_type TEXT NOT NULL DEFAULT 'file',
19023
+ content_hash TEXT NOT NULL,
19024
+ vector BLOB NOT NULL,
19025
+ metadata TEXT NOT NULL DEFAULT '{}',
19026
+ PRIMARY KEY (path, chunk_id)
19027
+ );
19028
+
19029
+ -- Indexes for fast lookups
19030
+ CREATE INDEX idx_symbols_file ON symbols(file_path);
19031
+ CREATE INDEX idx_symbols_name ON symbols(name);
19032
+ CREATE INDEX idx_symbols_kind ON symbols(kind);
19033
+ CREATE INDEX idx_imports_file ON imports(file_path);
19034
+ CREATE INDEX idx_imports_source ON imports(source_path);
19035
+ CREATE INDEX idx_imports_resolved ON imports(resolved_path);
19036
+ CREATE INDEX idx_call_edges_caller ON call_edges(caller_file);
19037
+ CREATE INDEX idx_call_edges_callee ON call_edges(callee_file);
19038
+ CREATE INDEX idx_embeddings_type ON embeddings(chunk_type);
19039
+ `);
19040
+ this.setMeta("schema_version", String(SCHEMA_VERSION));
19041
+ this.setMeta("created_at", (/* @__PURE__ */ new Date()).toISOString());
19042
+ }
19043
+ getSchemaVersion() {
19044
+ try {
19045
+ const row = this.db.prepare("SELECT value FROM meta WHERE key = ?").get("schema_version");
19046
+ return row ? Number.parseInt(row.value, 10) : 0;
19047
+ } catch {
19048
+ return 0;
19049
+ }
19050
+ }
19051
+ setMeta(key, value) {
19052
+ this.db.prepare("INSERT OR REPLACE INTO meta (key, value) VALUES (?, ?)").run(key, value);
19053
+ }
19054
+ getMeta(key) {
19055
+ const row = this.db.prepare("SELECT value FROM meta WHERE key = ?").get(key);
19056
+ return row?.value ?? null;
19057
+ }
19058
+ // ═══════════════════════════════════════════════════════════════════════════
19059
+ // INCREMENTAL INDEXING
19060
+ // ═══════════════════════════════════════════════════════════════════════════
19061
+ /**
19062
+ * Discover files, diff against stored hashes, return only changed files.
19063
+ */
19064
+ async diffFiles() {
19065
+ const discoveredFiles = await this.discoverFiles();
19066
+ const storedHashes = this.getStoredHashes();
19067
+ const changed = [];
19068
+ const unchanged = [];
19069
+ const currentPaths = /* @__PURE__ */ new Set();
19070
+ for (const file of discoveredFiles) {
19071
+ currentPaths.add(file.relativePath);
19072
+ const stored = storedHashes.get(file.relativePath);
19073
+ if (!stored || stored.contentHash !== file.contentHash) {
19074
+ changed.push(file.relativePath);
19075
+ } else {
19076
+ unchanged.push(file.relativePath);
19077
+ }
19078
+ }
19079
+ const deleted = [];
19080
+ for (const storedPath of storedHashes.keys()) {
19081
+ if (!currentPaths.has(storedPath)) {
19082
+ deleted.push(storedPath);
19083
+ }
19084
+ }
19085
+ return { changed, deleted, unchanged };
19086
+ }
19087
+ /**
19088
+ * Full reindex — scan all files and store data.
19089
+ * Returns parsed CodebaseData + stats.
19090
+ */
19091
+ async reindex(parser4) {
19092
+ const startMs = Date.now();
19093
+ const { changed, deleted, unchanged } = await this.diffFiles();
19094
+ if (deleted.length > 0) {
19095
+ this.removeFiles(deleted);
19096
+ }
19097
+ const parsedFiles = [];
19098
+ for (const relPath of changed) {
19099
+ const absPath = join(this.rootPath, relPath);
19100
+ try {
19101
+ const parsed = await parser4.parseFile(absPath, relPath);
19102
+ parsedFiles.push(parsed);
19103
+ } catch {
19104
+ }
19105
+ }
19106
+ this.storeFiles(parsedFiles);
19107
+ const data = this.loadCodebaseData();
19108
+ const stats = {
19109
+ totalFiles: data.files.length,
19110
+ totalSymbols: data.symbols.length,
19111
+ totalImports: data.imports.length,
19112
+ indexedAt: (/* @__PURE__ */ new Date()).toISOString(),
19113
+ reindexedFiles: changed.length,
19114
+ skippedFiles: unchanged.length,
19115
+ durationMs: Date.now() - startMs
19116
+ };
19117
+ this.setMeta("last_index_at", stats.indexedAt);
19118
+ this.setMeta("last_index_stats", JSON.stringify(stats));
19119
+ return { data, stats };
19120
+ }
19121
+ /**
19122
+ * Incremental update — only reindex specific files.
19123
+ */
19124
+ async reindexFiles(relativePaths, parser4) {
19125
+ const startMs = Date.now();
19126
+ this.removeFiles(relativePaths);
19127
+ const parsedFiles = [];
19128
+ for (const relPath of relativePaths) {
19129
+ const absPath = join(this.rootPath, relPath);
19130
+ try {
19131
+ const parsed = await parser4.parseFile(absPath, relPath);
19132
+ parsedFiles.push(parsed);
19133
+ } catch {
19134
+ }
19135
+ }
19136
+ this.storeFiles(parsedFiles);
19137
+ const totalFiles = this.db.prepare("SELECT COUNT(*) as cnt FROM files").get();
19138
+ const totalSymbols = this.db.prepare("SELECT COUNT(*) as cnt FROM symbols").get();
19139
+ const totalImports = this.db.prepare("SELECT COUNT(*) as cnt FROM imports").get();
19140
+ return {
19141
+ totalFiles: totalFiles.cnt,
19142
+ totalSymbols: totalSymbols.cnt,
19143
+ totalImports: totalImports.cnt,
19144
+ indexedAt: (/* @__PURE__ */ new Date()).toISOString(),
19145
+ reindexedFiles: parsedFiles.length,
19146
+ skippedFiles: 0,
19147
+ durationMs: Date.now() - startMs
19148
+ };
19149
+ }
19150
+ // ═══════════════════════════════════════════════════════════════════════════
19151
+ // DATA LOADING (warm start)
19152
+ // ═══════════════════════════════════════════════════════════════════════════
19153
+ /**
19154
+ * Load full CodebaseData from the persistent store.
19155
+ * This is the warm-start path — sub-second for indexed repos.
19156
+ */
19157
+ loadCodebaseData() {
19158
+ const files = this.loadFiles();
19159
+ const symbols = this.loadSymbols();
19160
+ const imports = this.loadImports();
19161
+ const callEdges = this.loadCallEdges();
19162
+ const routes = this.loadRoutes();
19163
+ const services = this.loadServices();
19164
+ return { files, symbols, imports, callEdges, routes, services };
19165
+ }
19166
+ /**
19167
+ * Check if the index exists and has data.
19168
+ */
19169
+ isPopulated() {
19170
+ try {
19171
+ const row = this.db.prepare("SELECT COUNT(*) as cnt FROM files").get();
19172
+ return row.cnt > 0;
19173
+ } catch {
19174
+ return false;
19175
+ }
19176
+ }
19177
+ /**
19178
+ * Get the last index timestamp.
19179
+ */
19180
+ getLastIndexedAt() {
19181
+ return this.getMeta("last_index_at");
19182
+ }
19183
+ /**
19184
+ * Get the last index stats.
19185
+ */
19186
+ getLastIndexStats() {
19187
+ const raw = this.getMeta("last_index_stats");
19188
+ if (!raw) return null;
19189
+ try {
19190
+ return JSON.parse(raw);
19191
+ } catch {
19192
+ return null;
19193
+ }
19194
+ }
19195
+ // ═══════════════════════════════════════════════════════════════════════════
19196
+ // EMBEDDING STORAGE
19197
+ // ═══════════════════════════════════════════════════════════════════════════
19198
+ /**
19199
+ * Store a chunk embedding (file-level, function-level, etc.)
19200
+ */
19201
+ storeEmbedding(path10, chunkId, chunkType, contentHash, vector, metadata) {
19202
+ const vectorBuf = Buffer.from(new Float32Array(vector).buffer);
19203
+ this.db.prepare(`
19204
+ INSERT OR REPLACE INTO embeddings (path, chunk_id, chunk_type, content_hash, vector, metadata)
19205
+ VALUES (?, ?, ?, ?, ?, ?)
19206
+ `).run(path10, chunkId, chunkType, contentHash, vectorBuf, JSON.stringify(metadata ?? {}));
19207
+ }
19208
+ /**
19209
+ * Load embedding for a specific chunk.
19210
+ */
19211
+ loadEmbedding(path10, chunkId) {
19212
+ const row = this.db.prepare("SELECT vector, content_hash, metadata FROM embeddings WHERE path = ? AND chunk_id = ?").get(path10, chunkId);
19213
+ if (!row) return null;
19214
+ return {
19215
+ vector: Array.from(new Float32Array(row.vector.buffer, row.vector.byteOffset, row.vector.byteLength / 4)),
19216
+ contentHash: row.content_hash,
19217
+ metadata: JSON.parse(row.metadata)
19218
+ };
19219
+ }
19220
+ /**
19221
+ * Load all embeddings of a given type for vector search.
19222
+ */
19223
+ loadEmbeddingsByType(chunkType) {
19224
+ const rows = this.db.prepare("SELECT path, chunk_id, vector, content_hash, metadata FROM embeddings WHERE chunk_type = ?").all(chunkType);
19225
+ return rows.map((row) => ({
19226
+ path: row.path,
19227
+ chunkId: row.chunk_id,
19228
+ vector: Array.from(new Float32Array(row.vector.buffer, row.vector.byteOffset, row.vector.byteLength / 4)),
19229
+ contentHash: row.content_hash,
19230
+ metadata: JSON.parse(row.metadata)
19231
+ }));
19232
+ }
19233
+ /**
19234
+ * Remove stale embeddings for files no longer in the index.
19235
+ */
19236
+ pruneStaleEmbeddings() {
19237
+ const result = this.db.prepare(`
19238
+ DELETE FROM embeddings WHERE path NOT IN (SELECT relative_path FROM files)
19239
+ `).run();
19240
+ return result.changes;
19241
+ }
19242
+ // ═══════════════════════════════════════════════════════════════════════════
19243
+ // QUERYING
19244
+ // ═══════════════════════════════════════════════════════════════════════════
19245
+ /**
19246
+ * Get all symbols in a specific file.
19247
+ */
19248
+ getSymbolsForFile(filePath) {
19249
+ return this.loadSymbolsWhere("file_path = ?", [filePath]);
19250
+ }
19251
+ /**
19252
+ * Search symbols by name pattern.
19253
+ */
19254
+ searchSymbols(namePattern, limit = 50) {
19255
+ return this.loadSymbolsWhere("name LIKE ?", [`%${namePattern}%`]).slice(0, limit);
19256
+ }
19257
+ /**
19258
+ * Get files that import a given file.
19259
+ */
19260
+ getDependents(filePath) {
19261
+ const rows = this.db.prepare("SELECT DISTINCT file_path FROM imports WHERE resolved_path = ?").all(filePath);
19262
+ return rows.map((r) => r.file_path);
19263
+ }
19264
+ /**
19265
+ * Get files that a given file imports.
19266
+ */
19267
+ getDependencies(filePath) {
19268
+ const rows = this.db.prepare('SELECT DISTINCT resolved_path FROM imports WHERE file_path = ? AND resolved_path != ""').all(filePath);
19269
+ return rows.map((r) => r.resolved_path);
19270
+ }
19271
+ // ═══════════════════════════════════════════════════════════════════════════
19272
+ // CLEANUP
19273
+ // ═══════════════════════════════════════════════════════════════════════════
19274
+ /**
19275
+ * Close the database connection.
19276
+ */
19277
+ close() {
19278
+ this.db.close();
19279
+ }
19280
+ /**
19281
+ * Wipe all data and rebuild schema.
19282
+ */
19283
+ reset() {
19284
+ this.db.exec("DROP TABLE IF EXISTS file_hashes");
19285
+ this.db.exec("DROP TABLE IF EXISTS files");
19286
+ this.db.exec("DROP TABLE IF EXISTS symbols");
19287
+ this.db.exec("DROP TABLE IF EXISTS imports");
19288
+ this.db.exec("DROP TABLE IF EXISTS call_edges");
19289
+ this.db.exec("DROP TABLE IF EXISTS routes");
19290
+ this.db.exec("DROP TABLE IF EXISTS services");
19291
+ this.db.exec("DROP TABLE IF EXISTS embeddings");
19292
+ this.db.exec("DROP TABLE IF EXISTS meta");
19293
+ this.initSchema();
19294
+ }
19295
+ // ═══════════════════════════════════════════════════════════════════════════
19296
+ // PRIVATE — File Discovery
19297
+ // ═══════════════════════════════════════════════════════════════════════════
19298
+ async discoverFiles() {
19299
+ const files = await (0, import_fast_glob2.glob)(this.config.includePatterns, {
19300
+ cwd: this.rootPath,
19301
+ ignore: this.config.excludePatterns,
19302
+ absolute: false,
19303
+ dot: false,
19304
+ onlyFiles: true
19427
19305
  });
19428
- if (fp.router) {
19429
- playbooks.push({
19430
- taskType: "Add API Endpoint",
19431
- steps: [
19432
- "Check existing routes in truthpack to avoid duplicates",
19433
- "Create the route handler following existing patterns",
19434
- "Add input validation using the project validator",
19435
- "Add authentication middleware if the route is protected",
19436
- "Write tests for success, validation failure, and auth failure",
19437
- "Update the truthpack (run vibecheck scan)"
19438
- ],
19439
- mustRead: ["truthpack/routes.json", ...this.dna.patterns.filter((p) => p.category === "api").map((p) => p.exemplar)],
19440
- mustUpdate: ["Route file", "Test file", "Truthpack"],
19441
- mustVerify: ["Route responds correctly", "Input validation works", "Auth is enforced", "Test passes"],
19442
- stopConditions: ["Do not create duplicate routes", "Do not hardcode mock data in handlers"]
19443
- });
19306
+ const hashes = [];
19307
+ const limit = this.config.maxFiles;
19308
+ for (const relPath of files.slice(0, limit)) {
19309
+ const absPath = join(this.rootPath, relPath);
19310
+ try {
19311
+ const fileStat = await stat(absPath);
19312
+ if (fileStat.size > this.config.maxFileSize) continue;
19313
+ const content = await readFile(absPath, "utf-8");
19314
+ hashes.push({
19315
+ relativePath: relPath.replace(/\\/g, "/"),
19316
+ contentHash: hashContent2(content),
19317
+ sizeBytes: fileStat.size,
19318
+ modifiedMs: Math.floor(fileStat.mtimeMs)
19319
+ });
19320
+ } catch {
19321
+ }
19444
19322
  }
19445
- if (fp.framework.includes("Next") || fp.framework.includes("React")) {
19446
- playbooks.push({
19447
- taskType: "Add UI Component",
19448
- steps: [
19449
- "Check if a similar component already exists",
19450
- "Create the component following existing naming and structure patterns",
19451
- "Add TypeScript props interface",
19452
- "Add unit test for the component",
19453
- "If using state, determine if it should be a client component"
19454
- ],
19455
- mustRead: this.dna.patterns.filter((p) => p.category === "ui" || p.category === "state").map((p) => p.exemplar),
19456
- mustUpdate: ["Component file", "Test file", "Parent component that uses it"],
19457
- mustVerify: ["Component renders correctly", "Props are typed", "Test passes"],
19458
- stopConditions: ['Do not use "any" type for props', 'Do not add useState in server components without "use client"']
19323
+ return hashes;
19324
+ }
19325
+ getStoredHashes() {
19326
+ const rows = this.db.prepare("SELECT relative_path, content_hash, size_bytes, modified_ms FROM file_hashes").all();
19327
+ const map = /* @__PURE__ */ new Map();
19328
+ for (const row of rows) {
19329
+ map.set(row.relative_path, {
19330
+ relativePath: row.relative_path,
19331
+ contentHash: row.content_hash,
19332
+ sizeBytes: row.size_bytes,
19333
+ modifiedMs: row.modified_ms
19459
19334
  });
19460
19335
  }
19461
- playbooks.push({
19462
- taskType: "Refactor",
19463
- steps: [
19464
- "Identify all callers/dependents of the code being refactored",
19465
- "Ensure comprehensive tests exist before refactoring",
19466
- "Apply changes incrementally, testing after each step",
19467
- "Update all dependents to use the new API",
19468
- "Remove old code only after all dependents are migrated",
19469
- "Verify no architecture rules are violated"
19470
- ],
19471
- mustRead: ["Dependency graph for affected files"],
19472
- mustUpdate: ["Refactored file", "All dependent files", "Tests"],
19473
- mustVerify: ["All tests pass", "No new violations", "No regressions"],
19474
- stopConditions: ["Never break existing public APIs without migration path", "Do not refactor and add features in the same change"]
19336
+ return map;
19337
+ }
19338
+ // ═══════════════════════════════════════════════════════════════════════════
19339
+ // PRIVATE Storage
19340
+ // ═══════════════════════════════════════════════════════════════════════════
19341
+ storeFiles(parsedFiles) {
19342
+ if (parsedFiles.length === 0) return;
19343
+ const insertHash = this.db.prepare(
19344
+ "INSERT OR REPLACE INTO file_hashes (relative_path, content_hash, size_bytes, modified_ms) VALUES (?, ?, ?, ?)"
19345
+ );
19346
+ const insertFile = this.db.prepare(
19347
+ "INSERT OR REPLACE INTO files (id, path, relative_path, language, line_count, exports, content) VALUES (?, ?, ?, ?, ?, ?, ?)"
19348
+ );
19349
+ const insertSymbol = this.db.prepare(
19350
+ "INSERT OR REPLACE INTO symbols (id, name, kind, file_path, start_line, end_line, exported, async, params, branches, signature) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
19351
+ );
19352
+ const insertImport = this.db.prepare(
19353
+ "INSERT INTO imports (file_id, file_path, source_path, resolved_path, imported_symbols, is_type_only, is_dynamic, line) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
19354
+ );
19355
+ const insertCallEdge = this.db.prepare(
19356
+ "INSERT INTO call_edges (caller_id, callee_id, caller_name, callee_name, caller_file, callee_file) VALUES (?, ?, ?, ?, ?, ?)"
19357
+ );
19358
+ const txn = this.db.transaction(() => {
19359
+ for (const parsed of parsedFiles) {
19360
+ const { file, symbols, imports, callEdges, contentHash } = parsed;
19361
+ insertHash.run(file.relativePath, contentHash, 0, Date.now());
19362
+ insertFile.run(
19363
+ file.id,
19364
+ file.path,
19365
+ file.relativePath,
19366
+ file.language,
19367
+ file.lineCount,
19368
+ JSON.stringify(file.exports),
19369
+ this.config.storeContent ? file.content ?? null : null
19370
+ );
19371
+ for (const sym of symbols) {
19372
+ insertSymbol.run(
19373
+ sym.id,
19374
+ sym.name,
19375
+ sym.kind,
19376
+ sym.filePath,
19377
+ sym.startLine,
19378
+ sym.endLine,
19379
+ sym.exported ? 1 : 0,
19380
+ sym.async ? 1 : 0,
19381
+ sym.params ?? null,
19382
+ sym.branches ?? null,
19383
+ sym.signature ?? null
19384
+ );
19385
+ }
19386
+ for (const imp of imports) {
19387
+ insertImport.run(
19388
+ imp.fileId,
19389
+ imp.filePath,
19390
+ imp.sourcePath,
19391
+ imp.resolvedPath,
19392
+ JSON.stringify(imp.importedSymbols),
19393
+ imp.isTypeOnly ? 1 : 0,
19394
+ imp.isDynamic ? 1 : 0,
19395
+ imp.line
19396
+ );
19397
+ }
19398
+ for (const edge of callEdges) {
19399
+ insertCallEdge.run(
19400
+ edge.callerId,
19401
+ edge.calleeId,
19402
+ edge.callerName,
19403
+ edge.calleeName,
19404
+ edge.callerFile,
19405
+ edge.calleeFile
19406
+ );
19407
+ }
19408
+ }
19475
19409
  });
19476
- return playbooks;
19410
+ txn();
19411
+ }
19412
+ removeFiles(relativePaths) {
19413
+ if (relativePaths.length === 0) return;
19414
+ const txn = this.db.transaction(() => {
19415
+ for (const relPath of relativePaths) {
19416
+ const absPath = join(this.rootPath, relPath);
19417
+ this.db.prepare("DELETE FROM file_hashes WHERE relative_path = ?").run(relPath);
19418
+ this.db.prepare("DELETE FROM files WHERE relative_path = ?").run(relPath);
19419
+ this.db.prepare("DELETE FROM symbols WHERE file_path = ? OR file_path = ?").run(absPath, relPath);
19420
+ this.db.prepare("DELETE FROM imports WHERE file_path = ? OR file_path = ?").run(absPath, relPath);
19421
+ this.db.prepare("DELETE FROM call_edges WHERE caller_file = ? OR callee_file = ? OR caller_file = ? OR callee_file = ?").run(absPath, absPath, relPath, relPath);
19422
+ this.db.prepare("DELETE FROM embeddings WHERE path = ?").run(relPath);
19423
+ }
19424
+ });
19425
+ txn();
19426
+ }
19427
+ // ═══════════════════════════════════════════════════════════════════════════
19428
+ // PRIVATE — Loading
19429
+ // ═══════════════════════════════════════════════════════════════════════════
19430
+ loadFiles() {
19431
+ const rows = this.db.prepare("SELECT id, path, relative_path, language, line_count, exports, content FROM files").all();
19432
+ return rows.map((row) => ({
19433
+ id: row.id,
19434
+ path: row.path,
19435
+ relativePath: row.relative_path,
19436
+ language: row.language,
19437
+ lineCount: row.line_count,
19438
+ exports: JSON.parse(row.exports),
19439
+ content: row.content ?? void 0
19440
+ }));
19441
+ }
19442
+ loadSymbols() {
19443
+ const rows = this.db.prepare("SELECT id, name, kind, file_path, start_line, end_line, exported, async, params, branches FROM symbols").all();
19444
+ return rows.map((row) => ({
19445
+ id: row.id,
19446
+ name: row.name,
19447
+ kind: row.kind,
19448
+ filePath: row.file_path,
19449
+ startLine: row.start_line,
19450
+ endLine: row.end_line,
19451
+ exported: row.exported === 1,
19452
+ async: row.async === 1,
19453
+ params: row.params ?? void 0,
19454
+ branches: row.branches ?? void 0
19455
+ }));
19456
+ }
19457
+ loadSymbolsWhere(where, params) {
19458
+ const rows = this.db.prepare(`SELECT id, name, kind, file_path, start_line, end_line, exported, async, params, branches FROM symbols WHERE ${where}`).all(...params);
19459
+ return rows.map((row) => ({
19460
+ id: row.id,
19461
+ name: row.name,
19462
+ kind: row.kind,
19463
+ filePath: row.file_path,
19464
+ startLine: row.start_line,
19465
+ endLine: row.end_line,
19466
+ exported: row.exported === 1,
19467
+ async: row.async === 1,
19468
+ params: row.params ?? void 0,
19469
+ branches: row.branches ?? void 0
19470
+ }));
19471
+ }
19472
+ loadImports() {
19473
+ const rows = this.db.prepare("SELECT file_id, file_path, source_path, resolved_path, imported_symbols, is_type_only, is_dynamic, line FROM imports").all();
19474
+ return rows.map((row) => ({
19475
+ fileId: row.file_id,
19476
+ filePath: row.file_path,
19477
+ sourcePath: row.source_path,
19478
+ resolvedPath: row.resolved_path,
19479
+ importedSymbols: JSON.parse(row.imported_symbols),
19480
+ isTypeOnly: row.is_type_only === 1,
19481
+ isDynamic: row.is_dynamic === 1,
19482
+ line: row.line
19483
+ }));
19484
+ }
19485
+ loadCallEdges() {
19486
+ const rows = this.db.prepare("SELECT caller_id, callee_id, caller_name, callee_name, caller_file, callee_file FROM call_edges").all();
19487
+ return rows.map((row) => ({
19488
+ callerId: row.caller_id,
19489
+ calleeId: row.callee_id,
19490
+ callerName: row.caller_name,
19491
+ calleeName: row.callee_name,
19492
+ callerFile: row.caller_file,
19493
+ calleeFile: row.callee_file
19494
+ }));
19495
+ }
19496
+ loadRoutes() {
19497
+ const rows = this.db.prepare("SELECT path, method, handler, file, line, middleware, auth FROM routes").all();
19498
+ return rows.map((row) => ({
19499
+ path: row.path,
19500
+ method: row.method,
19501
+ handler: row.handler,
19502
+ file: row.file,
19503
+ line: row.line,
19504
+ middleware: JSON.parse(row.middleware),
19505
+ auth: row.auth === null ? void 0 : row.auth === 1
19506
+ }));
19507
+ }
19508
+ loadServices() {
19509
+ const rows = this.db.prepare("SELECT id, name, root_path FROM services").all();
19510
+ return rows.map((row) => ({
19511
+ id: row.id,
19512
+ name: row.name,
19513
+ rootPath: row.root_path
19514
+ }));
19515
+ }
19516
+ };
19517
+ function hashContent2(content) {
19518
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
19519
+ }
19520
+ var EXT_LANG = {
19521
+ ".ts": "typescript",
19522
+ ".tsx": "typescript",
19523
+ ".js": "javascript",
19524
+ ".jsx": "javascript",
19525
+ ".mjs": "javascript",
19526
+ ".cjs": "javascript",
19527
+ ".py": "python",
19528
+ ".rs": "rust",
19529
+ ".go": "go",
19530
+ ".java": "java",
19531
+ ".c": "c",
19532
+ ".h": "c",
19533
+ ".cpp": "cpp",
19534
+ ".hpp": "cpp",
19535
+ ".rb": "ruby",
19536
+ ".swift": "swift",
19537
+ ".kt": "kotlin",
19538
+ ".lua": "lua",
19539
+ ".zig": "zig",
19540
+ ".cs": "csharp"
19541
+ };
19542
+ var TS_IMPORT_RE = /^import\s+(?:type\s+)?(?:\{[^}]*\}|[\w*]+(?:\s*,\s*\{[^}]*\})?)\s+from\s+['"]([^'"]+)['"]/gm;
19543
+ var TS_IMPORT_TYPE_RE = /^import\s+type\s+/;
19544
+ var TS_DYNAMIC_IMPORT_RE = /(?:import|require)\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
19545
+ var PY_IMPORT_RE = /^(?:from\s+([\w.]+)\s+import|import\s+([\w.]+))/gm;
19546
+ var GO_IMPORT_RE = /import\s+(?:\(\s*([\s\S]*?)\s*\)|"([^"]+)")/g;
19547
+ var DefaultFileParser = class {
19548
+ rootPath;
19549
+ treeSitterParser = null;
19550
+ treeSitterLoaded = false;
19551
+ constructor(rootPath) {
19552
+ this.rootPath = rootPath;
19477
19553
  }
19478
- // ═══════════════════════════════════════════════════════════════════════════
19479
- // VERIFICATION STEPS
19480
- // ═══════════════════════════════════════════════════════════════════════════
19481
- buildVerificationSteps() {
19482
- const fp = this.dna.fingerprint;
19483
- const steps = [];
19484
- if (fp.language === "TypeScript") {
19485
- steps.push({
19486
- trigger: "Any TypeScript file change",
19487
- checks: ["TypeScript compilation succeeds", "No new type errors introduced"],
19488
- commands: [fp.packageManager === "pnpm" ? "pnpm run check-types" : "npm run check-types"],
19489
- artifacts: []
19490
- });
19491
- }
19492
- if (fp.testRunner) {
19493
- steps.push({
19494
- trigger: "Any source file change",
19495
- checks: ["Related tests pass", "No test regressions"],
19496
- commands: [`${fp.packageManager} run test`],
19497
- artifacts: ["test-results.json"]
19498
- });
19499
- }
19500
- if (fp.router) {
19501
- steps.push({
19502
- trigger: "Route handler added or modified",
19503
- checks: ["Route responds with correct status", "Auth middleware is applied", "Input validation works"],
19504
- commands: ["vibecheck scan"],
19505
- artifacts: ["truthpack/routes.json"]
19506
- });
19507
- }
19508
- steps.push({
19509
- trigger: "Any source file change",
19510
- checks: ["No new architecture rule violations", "No new circular dependencies"],
19511
- commands: ["vibecheck arch-rules"],
19512
- artifacts: []
19513
- });
19514
- return steps;
19554
+ async parseFile(absolutePath, relativePath2) {
19555
+ const content = await readFile(absolutePath, "utf-8");
19556
+ const lines = content.split("\n");
19557
+ const ext2 = extname(absolutePath).toLowerCase();
19558
+ const language = EXT_LANG[ext2] ?? "unknown";
19559
+ const contentHash = createHash("sha256").update(content).digest("hex").slice(0, 16);
19560
+ const exports$1 = this.extractExports(content, language);
19561
+ const symbols = await this.extractSymbols(content, lines, absolutePath, relativePath2, language);
19562
+ const imports = this.extractImports(content, absolutePath, relativePath2, language);
19563
+ const callEdges = this.extractCallEdges(content, symbols, absolutePath);
19564
+ const fileId = `file:${relativePath2}`;
19565
+ const file = {
19566
+ id: fileId,
19567
+ path: absolutePath,
19568
+ relativePath: relativePath2,
19569
+ language,
19570
+ lineCount: lines.length,
19571
+ exports: exports$1,
19572
+ content
19573
+ };
19574
+ return { file, symbols, imports, callEdges, contentHash };
19515
19575
  }
19516
19576
  // ═══════════════════════════════════════════════════════════════════════════
19517
- // RISK BRIEFING
19577
+ // EXPORTS
19518
19578
  // ═══════════════════════════════════════════════════════════════════════════
19519
- buildRiskBriefing() {
19520
- const criticalFiles = this.dna.hotspots.filter((h) => h.score > 30).slice(0, 10).map((h) => h.file);
19521
- const recentViolations = this.ruleResult?.violations.filter((v) => v.severity === "error").slice(0, 10) || [];
19522
- const securityConcerns = [];
19523
- for (const risk of this.dna.riskMap) {
19524
- if (risk.riskLevel === "critical") {
19525
- securityConcerns.push(`${risk.file}: ${risk.factors.join(", ")}`);
19526
- }
19579
+ extractExports(content, language) {
19580
+ if (language !== "typescript" && language !== "javascript") return [];
19581
+ const exports$1 = [];
19582
+ const exportRe = /export\s+(?:default\s+)?(?:async\s+)?(?:function|class|const|let|var|type|interface|enum)\s+(\w+)/g;
19583
+ for (const match2 of content.matchAll(exportRe)) {
19584
+ if (match2[1]) exports$1.push(match2[1]);
19527
19585
  }
19528
- const testGaps = [];
19529
- const sourceFiles = this.data.files.filter(
19530
- (f) => !f.relativePath.includes(".test.") && !f.relativePath.includes(".spec.") && (f.relativePath.endsWith(".ts") || f.relativePath.endsWith(".tsx")) && !f.relativePath.includes("config") && !f.relativePath.includes(".d.ts")
19531
- );
19532
- const testFiles = this.data.files.filter(
19533
- (f) => f.relativePath.includes(".test.") || f.relativePath.includes(".spec.")
19534
- );
19535
- const testedBases = new Set(testFiles.map(
19536
- (f) => path2.basename(f.relativePath).replace(/\.(test|spec)\.(ts|tsx|js|jsx)$/, "")
19537
- ));
19538
- for (const file of sourceFiles) {
19539
- const baseName = path2.basename(file.relativePath).replace(/\.(ts|tsx|js|jsx)$/, "");
19540
- if (!testedBases.has(baseName) && file.exports.length > 0) {
19541
- testGaps.push(`${file.relativePath} has exports but no test file`);
19542
- }
19586
+ if (/export\s+default\s/.test(content) && !exports$1.includes("default")) {
19587
+ exports$1.push("default");
19543
19588
  }
19544
- const driftWarnings = [];
19545
- for (const cycle of this.graph.cycles) {
19546
- driftWarnings.push(`Circular dependency: ${cycle.nodes.slice(0, 3).join(" \u2192 ")}${cycle.nodes.length > 3 ? "..." : ""}`);
19589
+ const reExportRe = /export\s+\{([^}]+)\}\s+from/g;
19590
+ for (const match2 of content.matchAll(reExportRe)) {
19591
+ for (const sym of match2[1].split(",")) {
19592
+ const name = sym.trim().split(/\s+as\s+/).pop()?.trim();
19593
+ if (name) exports$1.push(name);
19594
+ }
19547
19595
  }
19548
- return {
19549
- criticalFiles,
19550
- recentViolations,
19551
- securityConcerns: securityConcerns.slice(0, 5),
19552
- testGaps: testGaps.slice(0, 10),
19553
- driftWarnings: driftWarnings.slice(0, 5)
19554
- };
19596
+ return [...new Set(exports$1)];
19555
19597
  }
19556
19598
  // ═══════════════════════════════════════════════════════════════════════════
19557
- // HELPERS
19599
+ // SYMBOLS
19558
19600
  // ═══════════════════════════════════════════════════════════════════════════
19559
- toRelative(filePath) {
19560
- if (filePath.startsWith(this.rootPath)) {
19561
- return filePath.slice(this.rootPath.length + 1).replace(/\\/g, "/");
19601
+ async extractSymbols(content, lines, absolutePath, relativePath2, language) {
19602
+ const tsSymbols = await this.tryTreeSitter(content, absolutePath);
19603
+ if (tsSymbols) {
19604
+ return tsSymbols.map((sym, i) => ({
19605
+ id: `sym:${relativePath2}:${sym.name}:${sym.line}`,
19606
+ name: sym.name,
19607
+ kind: mapTreeSitterKind(sym.kind),
19608
+ filePath: absolutePath,
19609
+ startLine: sym.line,
19610
+ endLine: sym.endLine,
19611
+ exported: this.isExported(content, sym.name, language),
19612
+ async: sym.signature.includes("async "),
19613
+ params: this.countParams(sym.signature),
19614
+ branches: void 0,
19615
+ signature: sym.signature
19616
+ }));
19562
19617
  }
19563
- return filePath.replace(/\\/g, "/");
19618
+ return this.extractSymbolsRegex(content, lines, absolutePath, relativePath2, language);
19564
19619
  }
19565
- };
19566
- var ContextExplainer = class {
19567
- /**
19568
- * Generate a rich explanation for a file.
19569
- */
19570
- explain(filePath, ctx) {
19571
- const fc = ctx.fileContext;
19572
- const paragraphs = [];
19573
- const quickFacts = [];
19574
- const warnings = [];
19575
- const relatedFiles = [];
19576
- const overlays = [];
19577
- const role = fc?.role ?? "unknown";
19578
- const roleSummary = this.buildRoleSummary(filePath, role, ctx);
19579
- if (fc) {
19580
- const depCount = fc.dependedOnBy.length;
19581
- const depsOnCount = fc.dependsOn.length;
19582
- if (depCount > 0 || depsOnCount > 0) {
19583
- const parts = [];
19584
- if (depCount > 0) {
19585
- const critical = depCount > 10 ? " \u2014 changes here have wide blast radius" : "";
19586
- parts.push(`${depCount} file${depCount > 1 ? "s" : ""} depend on this${critical}`);
19587
- }
19588
- if (depsOnCount > 0) {
19589
- parts.push(`it imports from ${depsOnCount} file${depsOnCount > 1 ? "s" : ""}`);
19590
- }
19591
- paragraphs.push({
19592
- heading: "Dependencies",
19593
- text: parts.join(". ") + ".",
19594
- importance: depCount > 10 ? "critical" : depCount > 5 ? "high" : "medium"
19595
- });
19596
- }
19597
- quickFacts.push({ label: "Role", value: role, icon: "layer" });
19598
- if (fc.layer) quickFacts.push({ label: "Layer", value: fc.layer, icon: "layer" });
19599
- quickFacts.push({ label: "Dependents", value: String(depCount), icon: "dependency" });
19600
- quickFacts.push({ label: "Dependencies", value: String(depsOnCount), icon: "dependency" });
19601
- if (fc.riskLevel === "critical" || fc.riskLevel === "high") {
19602
- quickFacts.push({ label: "Risk", value: fc.riskLevel.toUpperCase(), icon: "warning" });
19603
- warnings.push({
19604
- message: `This file is classified as ${fc.riskLevel} risk`,
19605
- severity: fc.riskLevel === "critical" ? "error" : "warning",
19606
- action: "Add comprehensive tests and review carefully before merging changes"
19607
- });
19608
- }
19609
- for (const dep of fc.dependedOnBy.slice(0, 5)) {
19610
- relatedFiles.push({
19611
- filePath: dep,
19612
- reason: `Imports from this file`,
19613
- relationship: "depended-by",
19614
- confidence: 0.9
19615
- });
19620
+ async tryTreeSitter(content, filePath) {
19621
+ if (!this.treeSitterLoaded) {
19622
+ this.treeSitterLoaded = true;
19623
+ try {
19624
+ const mod = await import('./tree-sitter-H5E7LKR4-MKO3NNLJ.js');
19625
+ this.treeSitterParser = mod.parseWithTreeSitter;
19626
+ } catch {
19627
+ this.treeSitterParser = null;
19616
19628
  }
19617
- overlays.push({
19618
- type: "code-lens",
19619
- line: 1,
19620
- text: `${depCount} dependent${depCount !== 1 ? "s" : ""} \xB7 ${depsOnCount} import${depsOnCount !== 1 ? "s" : ""} \xB7 ${role}`,
19621
- tooltip: `This ${role} file has ${depCount} files that depend on it and imports from ${depsOnCount} files`
19622
- });
19623
19629
  }
19624
- if (fc && fc.conventions.length > 0) {
19625
- paragraphs.push({
19626
- heading: "Conventions",
19627
- text: `Follow these discovered conventions: ${fc.conventions.slice(0, 3).join("; ")}.`,
19628
- importance: "medium"
19629
- });
19630
+ if (!this.treeSitterParser) return null;
19631
+ try {
19632
+ const ext2 = extname(filePath).toLowerCase();
19633
+ const symbols = await this.treeSitterParser(content, ext2);
19634
+ if (!symbols || symbols.length === 0) return null;
19635
+ return flattenCodeSymbols(symbols);
19636
+ } catch {
19637
+ return null;
19630
19638
  }
19631
- if (ctx.rules) {
19632
- const fileViolations = ctx.rules.violations.filter(
19633
- (v) => v.sourceSymbol.filePath.includes(shortName(filePath)) || v.targetSymbol && v.targetSymbol.filePath.includes(shortName(filePath))
19634
- );
19635
- if (fileViolations.length > 0) {
19636
- const errors = fileViolations.filter((v) => v.severity === "error");
19637
- const warns = fileViolations.filter((v) => v.severity === "warning");
19638
- paragraphs.push({
19639
- heading: "Architecture Violations",
19640
- text: `${errors.length} error${errors.length !== 1 ? "s" : ""} and ${warns.length} warning${warns.length !== 1 ? "s" : ""} from architecture rules.`,
19641
- importance: errors.length > 0 ? "critical" : "high"
19642
- });
19643
- for (const v of fileViolations.slice(0, 5)) {
19644
- warnings.push({
19645
- message: v.message,
19646
- severity: v.severity === "error" ? "error" : "warning",
19647
- action: v.suggestedFix ?? "Review and fix the violation",
19648
- ruleId: v.ruleId
19649
- });
19650
- overlays.push({
19651
- type: "diagnostic",
19652
- line: v.sourceSymbol.line,
19653
- text: v.message,
19654
- severity: v.severity === "error" ? "error" : "warning",
19655
- tooltip: v.suggestedFix
19639
+ }
19640
+ extractSymbolsRegex(content, lines, absolutePath, relativePath2, language) {
19641
+ const symbols = [];
19642
+ if (language === "typescript" || language === "javascript") {
19643
+ const patterns = [
19644
+ { re: /^(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*(<[^>]*>)?\s*\(([^)]*)\)/gm, kind: "function" },
19645
+ { re: /^(?:export\s+)?(?:abstract\s+)?class\s+(\w+)/gm, kind: "class" },
19646
+ { re: /^(?:export\s+)?interface\s+(\w+)/gm, kind: "interface" },
19647
+ { re: /^(?:export\s+)?type\s+(\w+)\s*(?:<[^>]*>)?\s*=/gm, kind: "type" },
19648
+ { re: /^(?:export\s+)?(?:const\s+)?enum\s+(\w+)/gm, kind: "enum" },
19649
+ { re: /^(?:export\s+)?const\s+(\w+)\s*(?::\s*[^=]+)?\s*=\s*(?:async\s+)?\(/gm, kind: "function" }
19650
+ ];
19651
+ for (const { re, kind } of patterns) {
19652
+ for (const match2 of content.matchAll(re)) {
19653
+ const name = match2[1];
19654
+ if (!name) continue;
19655
+ const line = content.slice(0, match2.index).split("\n").length;
19656
+ const endLine = this.findBlockEnd(lines, line - 1);
19657
+ symbols.push({
19658
+ id: `sym:${relativePath2}:${name}:${line}`,
19659
+ name,
19660
+ kind,
19661
+ filePath: absolutePath,
19662
+ startLine: line,
19663
+ endLine,
19664
+ exported: this.isExported(content, name, language),
19665
+ async: match2[0].includes("async"),
19666
+ params: this.countParams(match2[0]),
19667
+ branches: this.countBranches(lines, line - 1, endLine - 1),
19668
+ signature: match2[0].trim().replace(/\s*\{?\s*$/, "")
19656
19669
  });
19657
19670
  }
19658
19671
  }
19659
19672
  }
19660
- if (ctx.callGraph) {
19661
- const fileNodes = ctx.callGraph.nodes.filter((n) => n.filePath.includes(shortName(filePath)));
19662
- const hotFunctions = fileNodes.filter((n) => n.callerCount > 5);
19663
- if (hotFunctions.length > 0) {
19664
- paragraphs.push({
19665
- heading: "Hot Functions",
19666
- text: hotFunctions.map(
19667
- (f) => `\`${f.name}\` is called by ${f.callerCount} function${f.callerCount !== 1 ? "s" : ""}${f.calleeCount > 0 ? ` and calls ${f.calleeCount}` : ""}`
19668
- ).join(". ") + ".",
19669
- importance: "high"
19673
+ if (language === "python") {
19674
+ const re = /^(?:async\s+)?(?:def|class)\s+(\w+)/gm;
19675
+ for (const match2 of content.matchAll(re)) {
19676
+ const name = match2[1];
19677
+ const kind = match2[0].includes("class") ? "class" : "function";
19678
+ const line = content.slice(0, match2.index).split("\n").length;
19679
+ symbols.push({
19680
+ id: `sym:${relativePath2}:${name}:${line}`,
19681
+ name,
19682
+ kind,
19683
+ filePath: absolutePath,
19684
+ startLine: line,
19685
+ endLine: line + 10,
19686
+ exported: true,
19687
+ async: match2[0].includes("async"),
19688
+ params: this.countParams(match2[0]),
19689
+ branches: void 0,
19690
+ signature: match2[0].trim()
19670
19691
  });
19671
- for (const fn of hotFunctions) {
19672
- overlays.push({
19673
- type: "code-lens",
19674
- line: 0,
19675
- // Would need symbol line mapping
19676
- text: `${fn.callerCount} callers \xB7 ${fn.calleeCount} callees`,
19677
- tooltip: `Function ${fn.name} has ${fn.callerCount} callers and ${fn.calleeCount} callees`
19678
- });
19679
- }
19680
- }
19681
- const deadInFile = ctx.callGraph.stats.deadFunctions.filter(
19682
- (d) => d.filePath.includes(shortName(filePath))
19683
- );
19684
- if (deadInFile.length > 0) {
19685
- for (const dead of deadInFile) {
19686
- warnings.push({
19687
- message: `\`${dead.name}\` appears to be dead code (exported but never called)`,
19688
- severity: "info",
19689
- action: "Verify this function is not called via dynamic dispatch or external consumers, then consider removing it"
19690
- });
19691
- }
19692
19692
  }
19693
19693
  }
19694
- if (ctx.temporal) {
19695
- const hotspot = ctx.temporal.changeHotspots.find((h) => filePath.includes(h.file) || h.file.includes(shortName(filePath)));
19696
- if (hotspot) {
19697
- const daysSince = Math.round((Date.now() - new Date(hotspot.lastChanged).getTime()) / 864e5);
19698
- paragraphs.push({
19699
- heading: "Recent Activity",
19700
- text: `Changed ${hotspot.commits} times in the last ${ctx.temporal.stats.analysisWindowDays} days by ${hotspot.authors} author${hotspot.authors !== 1 ? "s" : ""}. Last modified ${daysSince} day${daysSince !== 1 ? "s" : ""} ago.`,
19701
- importance: hotspot.commits > 10 ? "high" : "medium"
19694
+ return symbols;
19695
+ }
19696
+ // ═══════════════════════════════════════════════════════════════════════════
19697
+ // IMPORTS
19698
+ // ═══════════════════════════════════════════════════════════════════════════
19699
+ extractImports(content, absolutePath, relativePath2, language) {
19700
+ const imports = [];
19701
+ const fileId = `file:${relativePath2}`;
19702
+ if (language === "typescript" || language === "javascript") {
19703
+ for (const match2 of content.matchAll(TS_IMPORT_RE)) {
19704
+ const sourcePath = match2[1];
19705
+ const line = content.slice(0, match2.index).split("\n").length;
19706
+ const isTypeOnly = TS_IMPORT_TYPE_RE.test(match2[0]);
19707
+ const importedSymbols = this.extractImportedSymbols(match2[0]);
19708
+ const resolvedPath = this.resolveImportPath(sourcePath, absolutePath);
19709
+ imports.push({
19710
+ fileId,
19711
+ filePath: absolutePath,
19712
+ sourcePath,
19713
+ resolvedPath,
19714
+ importedSymbols,
19715
+ isTypeOnly,
19716
+ isDynamic: false,
19717
+ line
19702
19718
  });
19703
- quickFacts.push({ label: "Recent commits", value: String(hotspot.commits), icon: "git" });
19704
- quickFacts.push({ label: "Last changed", value: `${daysSince}d ago`, icon: "git" });
19705
- quickFacts.push({ label: "Authors", value: String(hotspot.authors), icon: "git" });
19706
19719
  }
19707
- const churn = ctx.temporal.churnFiles.find((c) => filePath.includes(c.file) || c.file.includes(shortName(filePath)));
19708
- if (churn && churn.severity !== "low") {
19709
- warnings.push({
19710
- message: churn.reason,
19711
- severity: churn.severity === "high" ? "warning" : "info",
19712
- action: "Consider whether this file needs refactoring to reduce change frequency"
19720
+ for (const match2 of content.matchAll(TS_DYNAMIC_IMPORT_RE)) {
19721
+ const sourcePath = match2[1];
19722
+ const line = content.slice(0, match2.index).split("\n").length;
19723
+ imports.push({
19724
+ fileId,
19725
+ filePath: absolutePath,
19726
+ sourcePath,
19727
+ resolvedPath: this.resolveImportPath(sourcePath, absolutePath),
19728
+ importedSymbols: [],
19729
+ isTypeOnly: false,
19730
+ isDynamic: true,
19731
+ line
19713
19732
  });
19714
19733
  }
19715
- const expertise = ctx.temporal.authorExpertise.find(
19716
- (e) => filePath.startsWith(e.area) || filePath.includes(e.area)
19717
- );
19718
- if (expertise && expertise.busFactor === 1) {
19719
- warnings.push({
19720
- message: `Bus factor of 1 \u2014 ${expertise.primaryAuthor} has made ${expertise.authors[0]?.percentage}% of changes to this area`,
19721
- severity: "info",
19722
- action: "Consider knowledge sharing or pair programming for this area"
19734
+ }
19735
+ if (language === "python") {
19736
+ for (const match2 of content.matchAll(PY_IMPORT_RE)) {
19737
+ const sourcePath = match2[1] || match2[2];
19738
+ if (!sourcePath) continue;
19739
+ const line = content.slice(0, match2.index).split("\n").length;
19740
+ imports.push({
19741
+ fileId,
19742
+ filePath: absolutePath,
19743
+ sourcePath,
19744
+ resolvedPath: "",
19745
+ importedSymbols: [],
19746
+ isTypeOnly: false,
19747
+ isDynamic: false,
19748
+ line
19723
19749
  });
19724
19750
  }
19725
19751
  }
19726
- if (ctx.learned) {
19727
- const coEdits = ctx.learned.coEdits.filter((p) => p.files[0].includes(shortName(filePath)) || p.files[1].includes(shortName(filePath))).slice(0, 3);
19728
- if (coEdits.length > 0) {
19729
- for (const pair of coEdits) {
19730
- const other = pair.files[0].includes(shortName(filePath)) ? pair.files[1] : pair.files[0];
19731
- relatedFiles.push({
19732
- filePath: other,
19733
- reason: `Often edited together (${pair.count} times)`,
19734
- relationship: "co-edited",
19735
- confidence: pair.weight
19752
+ if (language === "go") {
19753
+ for (const match2 of content.matchAll(GO_IMPORT_RE)) {
19754
+ const block = match2[1] || match2[2];
19755
+ if (!block) continue;
19756
+ const paths = block.match(/"([^"]+)"/g) ?? [];
19757
+ for (const p of paths) {
19758
+ const sourcePath = p.replace(/"/g, "");
19759
+ imports.push({
19760
+ fileId,
19761
+ filePath: absolutePath,
19762
+ sourcePath,
19763
+ resolvedPath: "",
19764
+ importedSymbols: [],
19765
+ isTypeOnly: false,
19766
+ isDynamic: false,
19767
+ line: 0
19736
19768
  });
19737
19769
  }
19738
19770
  }
19739
19771
  }
19740
- if (fc && fc.editGuidance.length > 0) {
19741
- paragraphs.push({
19742
- heading: "Edit Guidance",
19743
- text: fc.editGuidance.join(" "),
19744
- importance: "medium"
19745
- });
19746
- }
19747
- return {
19748
- filePath,
19749
- roleSummary,
19750
- paragraphs,
19751
- quickFacts,
19752
- warnings,
19753
- relatedFiles,
19754
- overlays
19755
- };
19756
- }
19757
- /**
19758
- * Generate a compact markdown explanation for AI agent consumption.
19759
- */
19760
- explainForAgent(filePath, ctx) {
19761
- const explanation = this.explain(filePath, ctx);
19762
- const lines = [];
19763
- lines.push(`## ${shortName(filePath)}: ${explanation.roleSummary}`);
19764
- lines.push("");
19765
- if (explanation.quickFacts.length > 0) {
19766
- lines.push(explanation.quickFacts.map((f) => `**${f.label}**: ${f.value}`).join(" \xB7 "));
19767
- lines.push("");
19768
- }
19769
- for (const p of explanation.paragraphs.filter((p2) => p2.importance === "critical" || p2.importance === "high")) {
19770
- lines.push(`### ${p.heading}`);
19771
- lines.push(p.text);
19772
- lines.push("");
19773
- }
19774
- if (explanation.warnings.length > 0) {
19775
- lines.push("### Warnings");
19776
- for (const w of explanation.warnings) {
19777
- const icon = w.severity === "error" ? "MUST FIX" : w.severity === "warning" ? "SHOULD FIX" : "NOTE";
19778
- lines.push(`- **[${icon}]** ${w.message} \u2014 ${w.action}`);
19779
- }
19780
- lines.push("");
19781
- }
19782
- if (explanation.relatedFiles.length > 0) {
19783
- lines.push("### Related Files");
19784
- for (const rf of explanation.relatedFiles.slice(0, 5)) {
19785
- lines.push(`- \`${rf.filePath}\` \u2014 ${rf.reason}`);
19786
- }
19787
- lines.push("");
19788
- }
19789
- return lines.join("\n");
19790
- }
19791
- /**
19792
- * Generate IDE overlay data for the VS Code extension to consume.
19793
- */
19794
- getIDEOverlays(filePath, ctx) {
19795
- const explanation = this.explain(filePath, ctx);
19796
- return explanation.overlays;
19772
+ return imports;
19797
19773
  }
19798
- /**
19799
- * Generate a health report explanation.
19800
- */
19801
- explainHealth(health, dna) {
19802
- const lines = [];
19803
- const dims = health.dimensions;
19804
- lines.push(`## Codebase Health: ${health.overall}/100`);
19805
- lines.push("");
19806
- const entries = [
19807
- { name: "Architecture", score: dims.architecture, explain: this.explainArchScore(dims.architecture, dna) },
19808
- { name: "Test Coverage", score: dims.testCoverage, explain: this.explainTestScore(dims.testCoverage) },
19809
- { name: "Conventions", score: dims.conventions, explain: this.explainConventionScore(dims.conventions, dna) },
19810
- { name: "Dependencies", score: dims.dependencies, explain: this.explainDependencyScore(dims.dependencies, dna) },
19811
- { name: "Security", score: dims.security, explain: dims.security >= 80 ? "No critical security concerns detected" : "Critical risk areas identified" },
19812
- { name: "Complexity", score: dims.complexity, explain: dims.complexity >= 80 ? "Complexity is well-managed" : "High-complexity hotspots detected" }
19813
- ];
19814
- for (const entry of entries) {
19815
- const bar = this.renderBar(entry.score);
19816
- lines.push(`${bar} **${entry.name}**: ${entry.score}/100 \u2014 ${entry.explain}`);
19774
+ extractImportedSymbols(importLine) {
19775
+ const braceMatch = importLine.match(/\{([^}]+)\}/);
19776
+ if (!braceMatch) {
19777
+ const defaultMatch = importLine.match(/import\s+(?:type\s+)?(\w+)\s+from/);
19778
+ return defaultMatch?.[1] ? [defaultMatch[1]] : [];
19817
19779
  }
19818
- return lines.join("\n");
19780
+ return braceMatch[1].split(",").map((s) => {
19781
+ const parts = s.trim().split(/\s+as\s+/);
19782
+ return parts[parts.length - 1].trim();
19783
+ }).filter(Boolean);
19784
+ }
19785
+ resolveImportPath(sourcePath, fromFile) {
19786
+ if (!sourcePath.startsWith(".")) return sourcePath;
19787
+ const dir = dirname(fromFile);
19788
+ const resolved = resolve(dir, sourcePath);
19789
+ const extensions = [".ts", ".tsx", ".js", ".jsx", "/index.ts", "/index.tsx", "/index.js"];
19790
+ for (const ext2 of extensions) {
19791
+ const candidate = resolved + ext2;
19792
+ try {
19793
+ accessSync(candidate);
19794
+ return candidate;
19795
+ } catch {
19796
+ }
19797
+ }
19798
+ return resolved;
19819
19799
  }
19820
19800
  // ═══════════════════════════════════════════════════════════════════════════
19821
- // PRIVATE
19801
+ // CALL EDGES (basic extraction)
19822
19802
  // ═══════════════════════════════════════════════════════════════════════════
19823
- buildRoleSummary(filePath, role, ctx) {
19824
- const parts = [];
19825
- switch (role) {
19826
- case "service":
19827
- parts.push("Business logic service");
19828
- break;
19829
- case "route-handler":
19830
- parts.push("API route handler");
19831
- break;
19832
- case "component":
19833
- parts.push("UI component");
19834
- break;
19835
- case "repository":
19836
- parts.push("Data access layer");
19837
- break;
19838
- case "middleware":
19839
- parts.push("Request middleware");
19840
- break;
19841
- case "test":
19842
- parts.push("Test file");
19843
- break;
19844
- case "config":
19845
- parts.push("Configuration");
19846
- break;
19847
- case "type":
19848
- parts.push("Type definitions");
19849
- break;
19850
- case "util":
19851
- parts.push("Utility module");
19852
- break;
19853
- case "entry":
19854
- parts.push("Entry point");
19855
- break;
19856
- default:
19857
- parts.push("Source file");
19858
- }
19859
- if (ctx.fileContext) {
19860
- if (ctx.fileContext.layer) parts.push(`in ${ctx.fileContext.layer} layer`);
19861
- if (ctx.fileContext.dependedOnBy.length > 10) parts.push("(high-impact)");
19803
+ extractCallEdges(content, symbols, filePath) {
19804
+ const edges = [];
19805
+ const functionNames = new Set(symbols.filter((s) => s.kind === "function" || s.kind === "method").map((s) => s.name));
19806
+ for (const caller of symbols) {
19807
+ if (caller.kind !== "function" && caller.kind !== "method") continue;
19808
+ const body = content.split("\n").slice(caller.startLine - 1, caller.endLine).join("\n");
19809
+ for (const calleeName of functionNames) {
19810
+ if (calleeName === caller.name) continue;
19811
+ const callRe = new RegExp(`\\b${calleeName}\\s*\\(`, "g");
19812
+ if (callRe.test(body)) {
19813
+ const callee = symbols.find((s) => s.name === calleeName);
19814
+ if (callee) {
19815
+ edges.push({
19816
+ callerId: caller.id,
19817
+ calleeId: callee.id,
19818
+ callerName: caller.name,
19819
+ calleeName: callee.name,
19820
+ callerFile: filePath,
19821
+ calleeFile: filePath
19822
+ });
19823
+ }
19824
+ }
19825
+ }
19862
19826
  }
19863
- return parts.join(" ");
19864
- }
19865
- explainArchScore(score, dna) {
19866
- if (score >= 80) return `Strong architecture with ${dna.patterns.length} recognized patterns`;
19867
- if (score >= 50) return `Moderate architecture \u2014 ${dna.patterns.length} patterns detected, room to strengthen boundaries`;
19868
- return "Architecture needs attention \u2014 few recognized patterns or boundaries";
19827
+ return edges;
19869
19828
  }
19870
- explainTestScore(score) {
19871
- if (score >= 80) return "Good test coverage across source files";
19872
- if (score >= 50) return "Moderate coverage \u2014 some source files lack tests";
19873
- return "Low test coverage \u2014 many exported modules have no test files";
19829
+ // ═══════════════════════════════════════════════════════════════════════════
19830
+ // HELPERS
19831
+ // ═══════════════════════════════════════════════════════════════════════════
19832
+ isExported(content, name, language) {
19833
+ if (language === "python") return true;
19834
+ const re = new RegExp(`export\\s+(?:default\\s+)?(?:async\\s+)?(?:function|class|const|let|var|type|interface|enum)\\s+${name}\\b`);
19835
+ if (re.test(content)) return true;
19836
+ const reExport = new RegExp(`export\\s+\\{[^}]*\\b${name}\\b[^}]*\\}`);
19837
+ return reExport.test(content);
19874
19838
  }
19875
- explainConventionScore(score, dna) {
19876
- const strong = dna.conventions.filter((c) => c.confidence > 0.6).length;
19877
- if (score >= 80) return `${strong} strong conventions enforced consistently`;
19878
- if (score >= 50) return `${strong} conventions detected but inconsistently applied`;
19879
- return "Few consistent conventions \u2014 codebase style varies across files";
19839
+ countParams(signature) {
19840
+ const parenMatch = signature.match(/\(([^)]*)\)/);
19841
+ if (!parenMatch || !parenMatch[1].trim()) return 0;
19842
+ return parenMatch[1].split(",").length;
19880
19843
  }
19881
- explainDependencyScore(score, dna) {
19882
- const circular = dna.boundaries.filter((b) => b.isCircular).length;
19883
- if (score >= 80) return "Clean dependency graph with no circular dependencies";
19884
- if (circular > 0) return `${circular} circular dependency${circular > 1 ? "ies" : "y"} detected \u2014 these increase coupling and make code harder to reason about`;
19885
- return "Dependency health needs improvement";
19844
+ countBranches(lines, startIdx, endIdx) {
19845
+ let branches = 0;
19846
+ const branchRe = /\b(if|else if|case|for|while|catch|&&|\|\||\?\?)\b/g;
19847
+ for (let i = startIdx; i < Math.min(endIdx, lines.length); i++) {
19848
+ const matches = lines[i].match(branchRe);
19849
+ if (matches) branches += matches.length;
19850
+ }
19851
+ return branches;
19886
19852
  }
19887
- renderBar(score) {
19888
- const filled = Math.round(score / 10);
19889
- return "\u2588".repeat(filled) + "\u2591".repeat(10 - filled);
19853
+ findBlockEnd(lines, startIdx) {
19854
+ let depth = 0;
19855
+ let seenOpen = false;
19856
+ for (let i = startIdx; i < lines.length; i++) {
19857
+ for (const ch of lines[i]) {
19858
+ if (ch === "{") {
19859
+ depth++;
19860
+ seenOpen = true;
19861
+ } else if (ch === "}" && seenOpen) {
19862
+ depth--;
19863
+ if (depth <= 0) return i + 1;
19864
+ }
19865
+ }
19866
+ }
19867
+ return startIdx + 1;
19890
19868
  }
19891
19869
  };
19892
- function shortName(filePath) {
19893
- const parts = filePath.split("/");
19894
- return parts[parts.length - 1] ?? filePath;
19870
+ function mapTreeSitterKind(kind) {
19871
+ const map = {
19872
+ function: "function",
19873
+ method: "method",
19874
+ class: "class",
19875
+ interface: "interface",
19876
+ type: "type",
19877
+ enum: "enum",
19878
+ const: "variable",
19879
+ variable: "variable",
19880
+ struct: "class",
19881
+ trait: "interface",
19882
+ export: "variable"
19883
+ };
19884
+ return map[kind] ?? "function";
19885
+ }
19886
+ function flattenCodeSymbols(symbols) {
19887
+ const flat = [];
19888
+ for (const sym of symbols) {
19889
+ flat.push({ name: sym.name, kind: sym.kind, line: sym.line, endLine: sym.endLine, signature: sym.signature });
19890
+ if (sym.children && Array.isArray(sym.children)) {
19891
+ flat.push(...flattenCodeSymbols(sym.children));
19892
+ }
19893
+ }
19894
+ return flat;
19895
19895
  }
19896
19896
  function normalizePath(p) {
19897
19897
  return p.replace(/\\/g, "/").replace(/^\.\//, "");
@@ -19910,26 +19910,41 @@ function getGraphNeighbors(graph, filePath) {
19910
19910
  async function getProactiveContext(engine, rootPath, options) {
19911
19911
  const lines = [];
19912
19912
  const { focusedFile } = options;
19913
+ const maxItems = Math.max(1, options.maxItems ?? 5);
19913
19914
  const fileCtx = engine.getFileContext(focusedFile);
19914
19915
  if (fileCtx) {
19915
- lines.push(`## Focused: ${focusedFile}`);
19916
- lines.push(`Role: ${fileCtx.role}`);
19916
+ lines.push(`## Focus: ${focusedFile}`);
19917
+ lines.push(`Role: ${fileCtx.role} | Risk: ${fileCtx.riskLevel}`);
19918
+ if (fileCtx.editGuidance.length > 0) {
19919
+ lines.push(`Top guidance: ${fileCtx.editGuidance[0]}`);
19920
+ }
19921
+ lines.push("");
19922
+ lines.push("## Stay On Target");
19923
+ lines.push("- Edit only this file and direct neighbors unless requirements force expansion.");
19924
+ lines.push("- Reuse existing patterns from related files before creating new abstractions.");
19925
+ lines.push("- If uncertain about API/type existence, verify before generating code.");
19926
+ lines.push("");
19927
+ lines.push("## Immediate Neighbors");
19917
19928
  if (fileCtx.dependsOn.length > 0) {
19918
- lines.push(`Depends on: ${fileCtx.dependsOn.slice(0, 5).join(", ")}`);
19929
+ lines.push(`- Depends on: ${fileCtx.dependsOn.slice(0, maxItems).join(", ")}`);
19919
19930
  }
19920
19931
  if (fileCtx.dependedOnBy.length > 0) {
19921
- lines.push(`Depended on by: ${fileCtx.dependedOnBy.slice(0, 5).join(", ")}`);
19932
+ lines.push(`- Depended on by: ${fileCtx.dependedOnBy.slice(0, maxItems).join(", ")}`);
19922
19933
  }
19923
19934
  if (fileCtx.relatedFiles.length > 0) {
19924
- lines.push(`Related: ${fileCtx.relatedFiles.slice(0, 5).join(", ")}`);
19935
+ lines.push(`- Related: ${fileCtx.relatedFiles.slice(0, maxItems).join(", ")}`);
19925
19936
  }
19926
19937
  lines.push("");
19938
+ } else {
19939
+ lines.push(`## Focus: ${focusedFile}`);
19940
+ lines.push("File context unavailable \u2014 keep scope minimal and verify dependencies manually.");
19941
+ lines.push("");
19927
19942
  }
19928
19943
  const graph = engine.getGraph();
19929
19944
  const neighbors = getGraphNeighbors(graph, focusedFile);
19930
19945
  if (neighbors.length > 0) {
19931
- lines.push("## Graph neighbors");
19932
- for (const n of neighbors.slice(0, 8)) {
19946
+ lines.push("## Graph Adjacency");
19947
+ for (const n of neighbors.slice(0, maxItems)) {
19933
19948
  lines.push(`- ${n}`);
19934
19949
  }
19935
19950
  lines.push("");
@@ -19938,16 +19953,16 @@ async function getProactiveContext(engine, rootPath, options) {
19938
19953
  const evolution = await import('./dist-JUOVMQEA.js');
19939
19954
  const boosted = await evolution.getBoostedFiles(rootPath, focusedFile);
19940
19955
  if (boosted.length > 0) {
19941
- lines.push("## Often edited together (learned)");
19942
- for (const { file, weight } of boosted.slice(0, 5)) {
19943
- lines.push(`- ${file} (weight: ${weight.toFixed(2)})`);
19956
+ lines.push("## Learned Co-Edit Priors");
19957
+ for (const { file, weight } of boosted.slice(0, maxItems)) {
19958
+ lines.push(`- ${file} (affinity: ${weight.toFixed(2)})`);
19944
19959
  }
19945
19960
  lines.push("");
19946
19961
  }
19947
19962
  const sequential = await evolution.getSequentialFiles(rootPath, focusedFile);
19948
19963
  if (sequential.length > 0) {
19949
- lines.push("## Often edited next (sequence)");
19950
- for (const { file, weight } of sequential.slice(0, 5)) {
19964
+ lines.push("## Likely Next Files");
19965
+ for (const { file, weight } of sequential.slice(0, maxItems)) {
19951
19966
  lines.push(`- ${file} (weight: ${weight.toFixed(2)})`);
19952
19967
  }
19953
19968
  lines.push("");
@@ -19963,26 +19978,33 @@ function runKeywordSearch(data, query, limit) {
19963
19978
  const symbols = [];
19964
19979
  const seenFiles = /* @__PURE__ */ new Set();
19965
19980
  const seenSymbols = /* @__PURE__ */ new Set();
19981
+ const overlapScores = [];
19966
19982
  for (const file of data.files) {
19967
19983
  const pathLower = file.relativePath.toLowerCase();
19968
- if (words.some((w) => pathLower.includes(w)) && !seenFiles.has(file.relativePath)) {
19984
+ const overlapCount = words.filter((w) => pathLower.includes(w)).length;
19985
+ if (overlapCount > 0 && !seenFiles.has(file.relativePath)) {
19969
19986
  seenFiles.add(file.relativePath);
19970
19987
  files.push(file.relativePath);
19988
+ overlapScores.push(overlapCount / Math.max(1, words.length));
19971
19989
  }
19972
19990
  }
19973
19991
  for (const sym of data.symbols) {
19974
19992
  const nameLower = sym.name.toLowerCase();
19975
- if (words.some((w) => nameLower.includes(w))) {
19993
+ const overlapCount = words.filter((w) => nameLower.includes(w)).length;
19994
+ if (overlapCount > 0) {
19976
19995
  const key = `${sym.filePath}:${sym.name}`;
19977
19996
  if (!seenSymbols.has(key)) {
19978
19997
  seenSymbols.add(key);
19979
19998
  symbols.push({ name: sym.name, file: sym.filePath, kind: sym.kind });
19999
+ overlapScores.push(overlapCount / Math.max(1, words.length));
19980
20000
  }
19981
20001
  }
19982
20002
  }
20003
+ const confidence = overlapScores.length > 0 ? Math.max(...overlapScores) : 0;
19983
20004
  return {
19984
20005
  files: files.slice(0, limit),
19985
- symbols: symbols.slice(0, limit)
20006
+ symbols: symbols.slice(0, limit),
20007
+ confidence
19986
20008
  };
19987
20009
  }
19988
20010
  function parseSymbolLocation(loc) {
@@ -19998,18 +20020,23 @@ function parseSymbolLocation(loc) {
19998
20020
  async function intentQuery(rootPath, data, query, options = {}) {
19999
20021
  const limit = options.limit ?? 10;
20000
20022
  const useSemantic = options.useSemantic ?? false;
20023
+ const strictGrounding = options.strictGrounding ?? true;
20024
+ const minConfidence = options.minConfidence ?? 0.35;
20001
20025
  const degradations = [];
20002
20026
  let files = [];
20003
20027
  let symbols = [];
20004
20028
  let usedSemantic = false;
20005
20029
  let semanticResults = [];
20030
+ let confidence = 0;
20006
20031
  if (useSemantic) {
20007
20032
  try {
20008
- const { semanticSearch } = await import('./semantic-PYRDZSHX.js');
20033
+ const { semanticSearch } = await import('./semantic-3IRDVTDG.js');
20009
20034
  const results = await semanticSearch(rootPath, query, { topK: limit * 2, embeddingProvider: options.embeddingProvider });
20010
20035
  if (results.length > 0) {
20011
20036
  usedSemantic = true;
20012
20037
  semanticResults = results;
20038
+ const topSemantic = results.slice(0, Math.max(1, Math.min(3, results.length)));
20039
+ confidence = topSemantic.reduce((sum, r) => sum + Math.max(0, r.score / 100), 0) / topSemantic.length;
20013
20040
  const seenFiles = /* @__PURE__ */ new Set();
20014
20041
  const seenSymbols = /* @__PURE__ */ new Set();
20015
20042
  for (const r of results) {
@@ -20042,6 +20069,7 @@ async function intentQuery(rootPath, data, query, options = {}) {
20042
20069
  const kw = runKeywordSearch(data, query, limit);
20043
20070
  files = kw.files;
20044
20071
  symbols = kw.symbols;
20072
+ confidence = kw.confidence;
20045
20073
  }
20046
20074
  const snippets = [];
20047
20075
  for (const sym of symbols.slice(0, limit)) {
@@ -20063,16 +20091,28 @@ async function intentQuery(rootPath, data, query, options = {}) {
20063
20091
  content: `${sym.kind} ${sym.name}`
20064
20092
  });
20065
20093
  }
20066
- const summary = files.length > 0 || symbols.length > 0 ? `Found ${files.length} files and ${symbols.length} symbols matching "${query}"${usedSemantic ? " (semantic)" : ""}` : `No files or symbols directly matching "${query}"`;
20094
+ const hasMatches = files.length > 0 || symbols.length > 0;
20095
+ const grounded = !strictGrounding || confidence >= minConfidence;
20096
+ const finalFiles = grounded ? files : [];
20097
+ const finalSymbols = grounded ? symbols : [];
20098
+ const finalSnippets = grounded ? snippets : [];
20099
+ const summary = !grounded ? `Withheld low-confidence matches for "${query}" (confidence ${confidence.toFixed(2)} < ${minConfidence.toFixed(2)}). Refine the query or disable strict grounding.` : hasMatches ? `Found ${files.length} files and ${symbols.length} symbols matching "${query}"${usedSemantic ? " (semantic)" : ""}` : `No files or symbols directly matching "${query}"`;
20067
20100
  return {
20068
20101
  summary,
20069
- files,
20070
- symbols,
20071
- snippets,
20102
+ files: finalFiles,
20103
+ symbols: finalSymbols,
20104
+ snippets: finalSnippets,
20072
20105
  execution: {
20073
20106
  semanticRequested: useSemantic,
20074
20107
  semanticUsed: usedSemantic,
20075
- degradations
20108
+ degradations,
20109
+ confidence,
20110
+ grounding: {
20111
+ strict: strictGrounding,
20112
+ minConfidence,
20113
+ evidenceCount: finalFiles.length + finalSymbols.length + finalSnippets.length,
20114
+ decision: grounded ? "pass" : "withheld"
20115
+ }
20076
20116
  }
20077
20117
  };
20078
20118
  }
@@ -21744,6 +21784,64 @@ var MCP_TOOLS = [
21744
21784
  }
21745
21785
  }
21746
21786
  }
21787
+ },
21788
+ {
21789
+ name: "isl_studio_apply",
21790
+ description: "Push code into the live ISL Studio editor running inside the user's VS Code / Cursor / Windsurf. Use when the user has ISL Studio open and you want changes to land in the live preview. Writes to .vibecheck/isl-studio-inbox.jsonl which the panel watches. The studio rebuilds the live preview automatically.",
21791
+ inputSchema: {
21792
+ type: "object",
21793
+ properties: {
21794
+ path: {
21795
+ type: "string",
21796
+ description: "Workspace root path. Defaults to current directory."
21797
+ },
21798
+ lang: {
21799
+ type: "string",
21800
+ enum: ["html", "css", "js"],
21801
+ description: "Which editor pane to update."
21802
+ },
21803
+ code: { type: "string", description: "Source code to write." },
21804
+ mode: {
21805
+ type: "string",
21806
+ enum: ["replace", "append"],
21807
+ description: "replace = overwrite the pane contents (default). append = add to end."
21808
+ }
21809
+ },
21810
+ required: ["lang", "code"]
21811
+ }
21812
+ },
21813
+ {
21814
+ name: "isl_studio_send_message",
21815
+ description: "Post a system or assistant message into the ISL Studio chat transcript. Use to surface progress updates, explanations, or follow-up questions while the user has ISL Studio open.",
21816
+ inputSchema: {
21817
+ type: "object",
21818
+ properties: {
21819
+ path: {
21820
+ type: "string",
21821
+ description: "Workspace root path. Defaults to current directory."
21822
+ },
21823
+ role: {
21824
+ type: "string",
21825
+ enum: ["assistant", "system"],
21826
+ description: "Display role for the message."
21827
+ },
21828
+ content: { type: "string", description: "Message body. Markdown is rendered as plain text but fenced code blocks become applyable." }
21829
+ },
21830
+ required: ["content"]
21831
+ }
21832
+ },
21833
+ {
21834
+ name: "isl_studio_get_state",
21835
+ description: "Read the current ISL Studio editor state (last saved html/css/js snapshot) from the workspace. Use before generating changes so you have full context of what the user is working on.",
21836
+ inputSchema: {
21837
+ type: "object",
21838
+ properties: {
21839
+ path: {
21840
+ type: "string",
21841
+ description: "Workspace root path. Defaults to current directory."
21842
+ }
21843
+ }
21844
+ }
21747
21845
  }
21748
21846
  ];
21749
21847
  function isKnownToolName(toolName) {
@@ -21888,7 +21986,7 @@ function createScanIdempotencyKey(prefix) {
21888
21986
  // src/mcp-scan-meter-client.ts
21889
21987
  var MCP_SCAN_METER_CLIENT = {
21890
21988
  type: "mcp",
21891
- version: "24.6.4"
21989
+ version: "24.6.6"
21892
21990
  };
21893
21991
 
21894
21992
  // ../shared/dist/chunk-YYSV5CG4.js
@@ -22187,6 +22285,36 @@ function resolveWorkspaceRoot(cwd) {
22187
22285
  const resolvedRoot = path2.resolve(cwd);
22188
22286
  return fs4.existsSync(resolvedRoot) ? fs4.realpathSync(resolvedRoot) : resolvedRoot;
22189
22287
  }
22288
+ async function appendIslStudioInbox(workspaceRoot, jsonLine) {
22289
+ try {
22290
+ const dir = path2.join(workspaceRoot, ".vibecheck");
22291
+ await fs4.promises.mkdir(dir, { recursive: true });
22292
+ const target = path2.join(dir, "isl-studio-inbox.jsonl");
22293
+ await fs4.promises.appendFile(target, jsonLine + "\n", "utf-8");
22294
+ return { ok: true };
22295
+ } catch (err) {
22296
+ return { ok: false, message: err instanceof Error ? err.message : String(err) };
22297
+ }
22298
+ }
22299
+ async function readIslStudioState(workspaceRoot) {
22300
+ try {
22301
+ const target = path2.join(workspaceRoot, ".vibecheck", "isl-studio", "editor-state.json");
22302
+ const raw = await fs4.promises.readFile(target, "utf-8");
22303
+ const parsed = JSON.parse(raw);
22304
+ return {
22305
+ available: true,
22306
+ html: typeof parsed.html === "string" ? parsed.html : "",
22307
+ css: typeof parsed.css === "string" ? parsed.css : "",
22308
+ js: typeof parsed.js === "string" ? parsed.js : "",
22309
+ source: "editor-state.json"
22310
+ };
22311
+ } catch {
22312
+ return {
22313
+ available: false,
22314
+ hint: "ISL Studio editor state not found. The user must open the ISL Studio panel and edit at least once for state to be persisted to .vibecheck/isl-studio/editor-state.json."
22315
+ };
22316
+ }
22317
+ }
22190
22318
  function resolveWorkspaceTargetPath(workspaceRoot, requestedPath) {
22191
22319
  const resolvedTarget = path2.resolve(workspaceRoot, requestedPath);
22192
22320
  const normalizedTarget = fs4.existsSync(resolvedTarget) ? fs4.realpathSync(resolvedTarget) : path2.normalize(resolvedTarget);
@@ -22536,7 +22664,7 @@ ${validation.errors.join("\n")}`);
22536
22664
  if (!fs4.existsSync(absFile) || !fs4.statSync(absFile).isFile()) {
22537
22665
  return buildErrorResponse(`Not a file: ${relFile}`);
22538
22666
  }
22539
- const { runGhostTrace } = await import('./dist-PVKUO7NU.js');
22667
+ const { runGhostTrace } = await import('./dist-2TGNIDV6.js');
22540
22668
  const trace = await withTimeout(
22541
22669
  runGhostTrace({ workspaceRoot: targetPath, filePath: absFile }),
22542
22670
  MCP_TOOL_TIMEOUT_MS,
@@ -22814,6 +22942,59 @@ This change is synced across all surfaces.`
22814
22942
  );
22815
22943
  }
22816
22944
  }
22945
+ case "isl_studio_apply": {
22946
+ const lang = args?.lang;
22947
+ const code = args?.code;
22948
+ const mode = args?.mode === "append" ? "append" : "replace";
22949
+ if (lang !== "html" && lang !== "css" && lang !== "js") {
22950
+ return buildErrorResponse('isl_studio_apply requires lang to be "html", "css", or "js"');
22951
+ }
22952
+ if (typeof code !== "string") {
22953
+ return buildErrorResponse('isl_studio_apply requires "code" (string)');
22954
+ }
22955
+ const ISL_APPLY_MAX_BYTES = 1024 * 1024;
22956
+ const codeByteLen = Buffer.byteLength(code, "utf-8");
22957
+ if (codeByteLen > ISL_APPLY_MAX_BYTES) {
22958
+ return buildErrorResponse(
22959
+ `isl_studio_apply rejected: code is ${(codeByteLen / 1024).toFixed(1)} KB, max is ${ISL_APPLY_MAX_BYTES / 1024} KB. Split the change into smaller updates.`
22960
+ );
22961
+ }
22962
+ const evt = JSON.stringify({ type: "apply", lang, code, mode });
22963
+ const r = await appendIslStudioInbox(workspaceRoot, evt);
22964
+ if (!r.ok) return buildErrorResponse(`isl_studio_apply failed: ${r.message}`);
22965
+ return {
22966
+ content: [
22967
+ {
22968
+ type: "text",
22969
+ text: `Wrote ${code.length} chars of ${lang} to ISL Studio (mode: ${mode}). The live preview will rebuild.`
22970
+ }
22971
+ ]
22972
+ };
22973
+ }
22974
+ case "isl_studio_send_message": {
22975
+ const role = args?.role === "system" ? "system" : "assistant";
22976
+ const content = args?.content;
22977
+ if (typeof content !== "string" || !content.trim()) {
22978
+ return buildErrorResponse('isl_studio_send_message requires non-empty "content"');
22979
+ }
22980
+ const ISL_MSG_MAX_BYTES = 64 * 1024;
22981
+ const msgByteLen = Buffer.byteLength(content, "utf-8");
22982
+ if (msgByteLen > ISL_MSG_MAX_BYTES) {
22983
+ return buildErrorResponse(
22984
+ `isl_studio_send_message rejected: content is ${(msgByteLen / 1024).toFixed(1)} KB, max is ${ISL_MSG_MAX_BYTES / 1024} KB.`
22985
+ );
22986
+ }
22987
+ const evt = JSON.stringify({ type: "message", role, content });
22988
+ const r = await appendIslStudioInbox(workspaceRoot, evt);
22989
+ if (!r.ok) return buildErrorResponse(`isl_studio_send_message failed: ${r.message}`);
22990
+ return { content: [{ type: "text", text: "Message posted to ISL Studio chat." }] };
22991
+ }
22992
+ case "isl_studio_get_state": {
22993
+ const r = await readIslStudioState(workspaceRoot);
22994
+ return {
22995
+ content: [{ type: "text", text: JSON.stringify(r, null, 2) }]
22996
+ };
22997
+ }
22817
22998
  default:
22818
22999
  return buildErrorResponse(`Unknown tool: ${name}`);
22819
23000
  }
@@ -22827,7 +23008,7 @@ function createMcpServer(runtimeOverrides = {}) {
22827
23008
  const server = new Server(
22828
23009
  {
22829
23010
  name: "vibecheck-mcp",
22830
- version: "24.6.4"
23011
+ version: "24.6.6"
22831
23012
  },
22832
23013
  {
22833
23014
  capabilities: {