engramx 2.0.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/CHANGELOG.md +271 -0
  2. package/README.md +161 -17
  3. package/dist/{aider-context-BC5R2ZTA.js → aider-context-6IDE3R7U.js} +1 -1
  4. package/dist/check-2Z3MPZEJ.js +12 -0
  5. package/dist/{chunk-PEH54LYC.js → chunk-645NBY6L.js} +42 -5
  6. package/dist/chunk-73IBCRFI.js +215 -0
  7. package/dist/{chunk-SJT7VS2G.js → chunk-B4UOE64J.js} +46 -11
  8. package/dist/chunk-FKY6HIT2.js +99 -0
  9. package/dist/{chunk-533LR4I7.js → chunk-G4U3QOOW.js} +13 -97
  10. package/dist/chunk-RJC6RNXJ.js +1405 -0
  11. package/dist/chunk-RM2TBOVW.js +121 -0
  12. package/dist/chunk-SMU4WR3D.js +187 -0
  13. package/dist/{chunk-C6GBUOAL.js → chunk-VLTWBTQ7.js} +14 -15
  14. package/dist/chunk-XVYE4OX2.js +232 -0
  15. package/dist/chunk-ZUC6OXSL.js +178 -0
  16. package/dist/cli.js +818 -1533
  17. package/dist/{core-6IY5L6II.js → core-77F2BVYV.js} +2 -2
  18. package/dist/{cursor-mdc-GJ7E5LDD.js → cursor-mdc-EEO7PYZ3.js} +1 -1
  19. package/dist/{exporter-GWU2GF23.js → exporter-ZYJ4WM2F.js} +1 -1
  20. package/dist/{importer-V62NGZRK.js → importer-4UWQDH4W.js} +1 -1
  21. package/dist/index.js +3 -3
  22. package/dist/install-YVMVCFQW.js +121 -0
  23. package/dist/mcp-client-ROOJF76V.js +9 -0
  24. package/dist/mcp-config-QD4NPVXB.js +12 -0
  25. package/dist/{migrate-UKCO6BUU.js → migrate-KJ5K5NWO.js} +1 -1
  26. package/dist/notify-5POGKMRX.js +36 -0
  27. package/dist/{plugin-loader-STTGYIL5.js → plugin-loader-SQQB6V74.js} +69 -23
  28. package/dist/report-C3GTM3HY.js +12 -0
  29. package/dist/resolver-H7GXVP73.js +21 -0
  30. package/dist/serve.js +5 -4
  31. package/dist/{server-KUG7U6SG.js → server-2ZQKXJ5M.js} +74 -4
  32. package/dist/{windsurf-rules-C7SVDHBL.js → windsurf-rules-XF7MYF6J.js} +1 -1
  33. package/dist/wizard-UH27IO4I.js +274 -0
  34. package/package.json +3 -2
  35. package/dist/{tuner-KFNNGKG3.js → tuner-Y2YENAZC.js} +3 -3
@@ -1,7 +1,12 @@
1
1
  // src/db/migrate.ts
2
2
  import { existsSync, copyFileSync } from "fs";
3
- var CURRENT_SCHEMA_VERSION = 7;
3
+ var CURRENT_SCHEMA_VERSION = 8;
4
4
  var DOWN_MIGRATIONS = {
5
+ // v3.0: bi-temporal mistake validity. SQLite only added DROP COLUMN in
6
+ // 3.35 (2021); older sql.js builds may not support it. We don't depend
7
+ // on the columns being absent — leaving them in place is safe. The index
8
+ // CAN be dropped cleanly.
9
+ 8: `DROP INDEX IF EXISTS idx_nodes_validity;`,
5
10
  7: `DROP TABLE IF EXISTS query_cache; DROP TABLE IF EXISTS pattern_cache;`,
6
11
  6: `DROP TABLE IF EXISTS engram_config;`,
7
12
  5: `DROP TABLE IF EXISTS provider_cache;`,
@@ -14,6 +19,13 @@ var DOWN_MIGRATIONS = {
14
19
  // 1 → 0 drops the entire schema. We require `engram init` for that.
15
20
  1: `DROP TABLE IF EXISTS stats; DROP TABLE IF EXISTS edges; DROP TABLE IF EXISTS nodes;`
16
21
  };
22
+ function addColumnIfMissing(db, table, column, ddl) {
23
+ const result = db.exec(`PRAGMA table_info(${table})`);
24
+ const existing = (result[0]?.values ?? []).map((row) => row[1]);
25
+ if (!existing.includes(column)) {
26
+ db.exec(`ALTER TABLE ${table} ADD COLUMN ${ddl}`);
27
+ }
28
+ }
17
29
  var MIGRATIONS = {
18
30
  // v0.1.0: Initial schema
19
31
  1: `
@@ -85,7 +97,28 @@ CREATE TABLE IF NOT EXISTS pattern_cache (
85
97
  graph_version INTEGER NOT NULL,
86
98
  hit_count INTEGER NOT NULL DEFAULT 0
87
99
  );
88
- CREATE INDEX IF NOT EXISTS idx_query_cache_file ON query_cache(file_path);`
100
+ CREATE INDEX IF NOT EXISTS idx_query_cache_file ON query_cache(file_path);`,
101
+ // v3.0.0: Bi-temporal validity for mistake nodes (and any other node kind
102
+ // that wants it). `valid_until` is the unix-ms timestamp after which the
103
+ // mistake should NO LONGER surface in context (e.g. the referenced code
104
+ // was refactored away). NULL = still valid (back-compat default for all
105
+ // existing rows). `invalidated_by_commit` records the git SHA that caused
106
+ // the invalidation, for audit + future "explain why this mistake stopped
107
+ // firing" UX. Index is partial — only mistakes with an explicit validity
108
+ // window pay storage cost.
109
+ //
110
+ // Function-based because ALTER TABLE ADD COLUMN isn't idempotent in
111
+ // SQLite — re-running on a db that already has the columns throws
112
+ // 'duplicate column name'. We pre-check via PRAGMA table_info.
113
+ 8: (db) => {
114
+ addColumnIfMissing(db, "nodes", "valid_until", "valid_until INTEGER");
115
+ addColumnIfMissing(db, "nodes", "invalidated_by_commit", "invalidated_by_commit TEXT");
116
+ db.exec(`
117
+ CREATE INDEX IF NOT EXISTS idx_nodes_validity
118
+ ON nodes(kind, valid_until)
119
+ WHERE kind = 'mistake' AND valid_until IS NOT NULL;
120
+ `);
121
+ }
89
122
  };
90
123
  function getSchemaVersion(db) {
91
124
  try {
@@ -116,9 +149,13 @@ function runMigrations(db, dbPath) {
116
149
  );
117
150
  let migrationsRun = 0;
118
151
  for (let v = fromVersion + 1; v <= CURRENT_SCHEMA_VERSION; v++) {
119
- const sql = MIGRATIONS[v];
120
- if (sql) {
121
- db.exec(sql);
152
+ const step = MIGRATIONS[v];
153
+ if (step) {
154
+ if (typeof step === "string") {
155
+ db.exec(step);
156
+ } else {
157
+ step(db);
158
+ }
122
159
  migrationsRun++;
123
160
  }
124
161
  }
@@ -0,0 +1,215 @@
1
+ import {
2
+ applyArgTemplate
3
+ } from "./chunk-ZUC6OXSL.js";
4
+
5
+ // src/providers/mcp-client.ts
6
+ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
7
+ import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
8
+ function estimateTokens(text) {
9
+ return Math.ceil(text.length / 4);
10
+ }
11
+ var McpClientWrapper = class {
12
+ constructor(config) {
13
+ this.config = config;
14
+ }
15
+ config;
16
+ client = null;
17
+ transport = null;
18
+ connectingPromise = null;
19
+ shutdownRegistered = false;
20
+ lastErrorAt = 0;
21
+ errorBackoffMs = 3e4;
22
+ /**
23
+ * Connect once (idempotent). Concurrent callers share one promise so
24
+ * we never spawn the server twice. On failure we set a backoff window
25
+ * so the next Read doesn't re-try spawn immediately.
26
+ */
27
+ async connect() {
28
+ if (this.client) return;
29
+ if (this.connectingPromise) return this.connectingPromise;
30
+ if (Date.now() - this.lastErrorAt < this.errorBackoffMs) {
31
+ throw new Error(
32
+ `[mcp] ${this.config.name}: in error backoff (last failure ${Math.round(
33
+ (Date.now() - this.lastErrorAt) / 1e3
34
+ )}s ago)`
35
+ );
36
+ }
37
+ this.connectingPromise = this.doConnect().catch((err) => {
38
+ this.lastErrorAt = Date.now();
39
+ this.client = null;
40
+ this.transport = null;
41
+ throw err;
42
+ }).finally(() => {
43
+ this.connectingPromise = null;
44
+ });
45
+ return this.connectingPromise;
46
+ }
47
+ async doConnect() {
48
+ if (this.config.transport !== "stdio") {
49
+ throw new Error(
50
+ `[mcp] ${this.config.name}: http transport not yet implemented`
51
+ );
52
+ }
53
+ const transport = new StdioClientTransport({
54
+ command: this.config.command,
55
+ args: this.config.args ? [...this.config.args] : void 0,
56
+ env: this.config.env ? { ...this.config.env } : void 0,
57
+ cwd: this.config.cwd,
58
+ // Pipe stderr so a chatty server doesn't spam the parent's stderr
59
+ // during normal operation. Re-enable "inherit" for debugging.
60
+ stderr: "pipe"
61
+ });
62
+ const client = new Client(
63
+ { name: "engramx", version: "3.0.0" },
64
+ { capabilities: {} }
65
+ );
66
+ await client.connect(transport);
67
+ this.transport = transport;
68
+ this.client = client;
69
+ if (!this.shutdownRegistered) {
70
+ this.registerShutdown();
71
+ this.shutdownRegistered = true;
72
+ }
73
+ }
74
+ /**
75
+ * Call a single tool with a timeout. Returns null on error (never
76
+ * throws). Caller is responsible for aggregating multiple tool results.
77
+ */
78
+ async callTool(toolName, args, timeoutMs) {
79
+ try {
80
+ await this.connect();
81
+ } catch {
82
+ return null;
83
+ }
84
+ if (!this.client) return null;
85
+ const abort = new AbortController();
86
+ const timer = setTimeout(() => abort.abort(), timeoutMs);
87
+ try {
88
+ const result = await this.client.callTool(
89
+ { name: toolName, arguments: args },
90
+ void 0,
91
+ { signal: abort.signal, timeout: timeoutMs }
92
+ );
93
+ clearTimeout(timer);
94
+ const blocks = Array.isArray(result?.content) ? result.content : [];
95
+ const text = blocks.map((b) => {
96
+ const block = b;
97
+ if (block.type === "text" && typeof block.text === "string") {
98
+ return block.text;
99
+ }
100
+ return `[${block.type ?? "unknown"} block]`;
101
+ }).join("\n").trim();
102
+ if (text.length === 0) return null;
103
+ return { content: text };
104
+ } catch {
105
+ return null;
106
+ } finally {
107
+ clearTimeout(timer);
108
+ }
109
+ }
110
+ /** Close the connection. Safe to call on an unconnected client. */
111
+ async disconnect() {
112
+ const client = this.client;
113
+ const transport = this.transport;
114
+ this.client = null;
115
+ this.transport = null;
116
+ try {
117
+ await client?.close();
118
+ } catch {
119
+ }
120
+ try {
121
+ await transport?.close();
122
+ } catch {
123
+ }
124
+ }
125
+ registerShutdown() {
126
+ const shutdown = () => {
127
+ void this.disconnect();
128
+ };
129
+ process.once("SIGTERM", shutdown);
130
+ process.once("SIGINT", shutdown);
131
+ process.once("beforeExit", shutdown);
132
+ }
133
+ };
134
+ function createMcpProvider(config) {
135
+ const wrapper = new McpClientWrapper(config);
136
+ const tokenBudget = config.tokenBudget ?? 200;
137
+ const timeoutMs = config.timeoutMs ?? 2e3;
138
+ const enabled = config.enabled ?? true;
139
+ return {
140
+ name: config.name,
141
+ label: config.label,
142
+ // Tier 2 — external process/HTTP with cache support. Matches
143
+ // context7/obsidian tier semantics in the existing resolver.
144
+ tier: 2,
145
+ tokenBudget,
146
+ timeoutMs,
147
+ async isAvailable() {
148
+ if (!enabled) return false;
149
+ if (config.tools.length === 0) return false;
150
+ return true;
151
+ },
152
+ async resolve(filePath, context) {
153
+ try {
154
+ const results = await Promise.allSettled(
155
+ config.tools.map((tool) => callSingleTool(wrapper, tool, filePath, context, timeoutMs))
156
+ );
157
+ const sections = [];
158
+ let highestConfidence = 0;
159
+ for (const outcome of results) {
160
+ if (outcome.status === "fulfilled" && outcome.value) {
161
+ sections.push(outcome.value.content);
162
+ highestConfidence = Math.max(
163
+ highestConfidence,
164
+ outcome.value.confidence
165
+ );
166
+ }
167
+ }
168
+ if (sections.length === 0) return null;
169
+ let combined = sections.join("\n\n");
170
+ const budget = tokenBudget;
171
+ if (estimateTokens(combined) > budget) {
172
+ const lines = combined.split("\n");
173
+ const kept = [];
174
+ let used = 0;
175
+ for (const line of lines) {
176
+ const lineTokens = estimateTokens(line) + 1;
177
+ if (used + lineTokens > budget) break;
178
+ kept.push(line);
179
+ used += lineTokens;
180
+ }
181
+ combined = kept.join("\n") + "\n\u2026 [truncated to fit budget]";
182
+ }
183
+ return {
184
+ provider: config.name,
185
+ content: combined,
186
+ confidence: highestConfidence,
187
+ cached: false
188
+ };
189
+ } catch {
190
+ return null;
191
+ }
192
+ }
193
+ };
194
+ }
195
+ async function callSingleTool(wrapper, tool, filePath, context, timeoutMs) {
196
+ const args = applyArgTemplate(tool.args, {
197
+ filePath,
198
+ projectRoot: context.projectRoot,
199
+ imports: context.imports
200
+ });
201
+ const result = await wrapper.callTool(tool.name, args, timeoutMs);
202
+ if (!result) return null;
203
+ return {
204
+ content: result.content,
205
+ confidence: tool.confidence ?? 0.75
206
+ };
207
+ }
208
+ var __internalsForTesting = {
209
+ McpClientWrapper
210
+ };
211
+
212
+ export {
213
+ createMcpProvider,
214
+ __internalsForTesting
215
+ };
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  runMigrations
3
- } from "./chunk-PEH54LYC.js";
3
+ } from "./chunk-645NBY6L.js";
4
4
 
5
5
  // src/core.ts
6
6
  import { join as join4, resolve as resolve2, relative as relative2 } from "path";
@@ -98,8 +98,8 @@ var GraphStore = class _GraphStore {
98
98
  }
99
99
  upsertNode(node) {
100
100
  this.db.run(
101
- `INSERT OR REPLACE INTO nodes (id, label, kind, source_file, source_location, confidence, confidence_score, last_verified, query_count, metadata)
102
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
101
+ `INSERT OR REPLACE INTO nodes (id, label, kind, source_file, source_location, confidence, confidence_score, last_verified, query_count, metadata, valid_until, invalidated_by_commit)
102
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
103
103
  [
104
104
  node.id,
105
105
  node.label,
@@ -110,7 +110,9 @@ var GraphStore = class _GraphStore {
110
110
  node.confidenceScore,
111
111
  node.lastVerified,
112
112
  node.queryCount,
113
- JSON.stringify(node.metadata)
113
+ JSON.stringify(node.metadata),
114
+ node.validUntil ?? null,
115
+ node.invalidatedByCommit ?? null
114
116
  ]
115
117
  );
116
118
  }
@@ -147,6 +149,19 @@ var GraphStore = class _GraphStore {
147
149
  throw e;
148
150
  }
149
151
  }
152
+ countBySourceFile(sourceFile) {
153
+ const stmt = this.db.prepare(
154
+ "SELECT COUNT(*) AS n FROM nodes WHERE source_file = ?"
155
+ );
156
+ stmt.bind([sourceFile]);
157
+ let count = 0;
158
+ if (stmt.step()) {
159
+ const row = stmt.getAsObject();
160
+ count = Number(row.n) || 0;
161
+ }
162
+ stmt.free();
163
+ return count;
164
+ }
150
165
  bulkUpsert(nodes, edges) {
151
166
  this.db.run("BEGIN TRANSACTION");
152
167
  for (const node of nodes) this.upsertNode(node);
@@ -489,6 +504,8 @@ var GraphStore = class _GraphStore {
489
504
  this.db.close();
490
505
  }
491
506
  rowToNode(row) {
507
+ const validUntilRaw = row.valid_until;
508
+ const invalidatedByRaw = row.invalidated_by_commit;
492
509
  return {
493
510
  id: row.id,
494
511
  label: row.label,
@@ -499,7 +516,9 @@ var GraphStore = class _GraphStore {
499
516
  confidenceScore: row.confidence_score ?? 1,
500
517
  lastVerified: row.last_verified ?? 0,
501
518
  queryCount: row.query_count ?? 0,
502
- metadata: JSON.parse(row.metadata || "{}")
519
+ metadata: JSON.parse(row.metadata || "{}"),
520
+ validUntil: validUntilRaw === null || validUntilRaw === void 0 ? void 0 : validUntilRaw,
521
+ invalidatedByCommit: invalidatedByRaw === null || invalidatedByRaw === void 0 ? void 0 : invalidatedByRaw
503
522
  };
504
523
  }
505
524
  rowToEdge(row) {
@@ -535,6 +554,9 @@ function truncateGraphemeSafe(s, max) {
535
554
  if (code >= 55296 && code <= 56319) cut--;
536
555
  return s.slice(0, cut) + "\u2026";
537
556
  }
557
+ function formatThousands(n) {
558
+ return n.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
559
+ }
538
560
 
539
561
  // src/graph/query.ts
540
562
  var MISTAKE_SCORE_BOOST = 2.5;
@@ -1230,6 +1252,7 @@ function getPatterns(lang) {
1230
1252
  return { classes: [], functions: [], imports: [], exports: [] };
1231
1253
  }
1232
1254
  }
1255
+ var MAX_DEPTH = 100;
1233
1256
  var DEFAULT_SKIP_DIRS = /* @__PURE__ */ new Set([
1234
1257
  "node_modules",
1235
1258
  "dist",
@@ -1281,7 +1304,8 @@ function extractDirectory(dirPath, rootDir, options = {}) {
1281
1304
  if (ignorePatterns.has(name)) return true;
1282
1305
  return false;
1283
1306
  }
1284
- function walk(dir) {
1307
+ function walk(dir, depth) {
1308
+ if (depth > MAX_DEPTH) return;
1285
1309
  let realDir;
1286
1310
  try {
1287
1311
  realDir = realpathSync(dir);
@@ -1290,12 +1314,17 @@ function extractDirectory(dirPath, rootDir, options = {}) {
1290
1314
  }
1291
1315
  if (visitedDirs.has(realDir)) return;
1292
1316
  visitedDirs.add(realDir);
1293
- const entries = readdirSync(dir, { withFileTypes: true });
1317
+ let entries;
1318
+ try {
1319
+ entries = readdirSync(dir, { withFileTypes: true });
1320
+ } catch {
1321
+ return;
1322
+ }
1294
1323
  for (const entry of entries) {
1295
1324
  const fullPath = join(dir, entry.name);
1296
1325
  if (entry.isDirectory()) {
1297
1326
  if (shouldSkipDir(entry.name)) continue;
1298
- walk(fullPath);
1327
+ walk(fullPath, depth + 1);
1299
1328
  continue;
1300
1329
  }
1301
1330
  if (!entry.isFile()) continue;
@@ -1321,7 +1350,7 @@ function extractDirectory(dirPath, rootDir, options = {}) {
1321
1350
  options.onProgress?.(fileCount, skippedCount, relPath);
1322
1351
  }
1323
1352
  }
1324
- walk(dirPath);
1353
+ walk(dirPath, 0);
1325
1354
  return { nodes: allNodes, edges: allEdges, fileCount, totalLines, mtimes, skippedCount };
1326
1355
  }
1327
1356
 
@@ -1368,6 +1397,8 @@ function mineGitHistory(projectRoot, maxCommits = 200) {
1368
1397
  const fileChangeCount = /* @__PURE__ */ new Map();
1369
1398
  const authorMap = /* @__PURE__ */ new Map();
1370
1399
  const commitBlocks = log.split("\n\n").filter(Boolean);
1400
+ const SKIP_PREFIXES = ["dist/", "build/", "node_modules/", ".venv/", "target/", "coverage/"];
1401
+ const MAX_FILES_PER_COMMIT = 50;
1371
1402
  for (const block of commitBlocks) {
1372
1403
  const lines = block.split("\n").filter(Boolean);
1373
1404
  if (lines.length === 0) continue;
@@ -1375,9 +1406,12 @@ function mineGitHistory(projectRoot, maxCommits = 200) {
1375
1406
  const parts = header.split("|");
1376
1407
  if (parts.length < 3) continue;
1377
1408
  const author = parts[1];
1378
- const files = fileLines.filter(
1379
- (f) => f.length > 0 && !f.includes("|") && !f.startsWith(" ") && f.includes(".")
1409
+ let files = fileLines.filter(
1410
+ (f) => f.length > 0 && !f.includes("|") && !f.startsWith(" ") && f.includes(".") && !SKIP_PREFIXES.some((p) => f.startsWith(p))
1380
1411
  );
1412
+ if (files.length > MAX_FILES_PER_COMMIT) {
1413
+ files = files.slice(0, MAX_FILES_PER_COMMIT);
1414
+ }
1381
1415
  for (const file of files) {
1382
1416
  fileChangeCount.set(file, (fileChangeCount.get(file) ?? 0) + 1);
1383
1417
  if (!authorMap.has(file)) authorMap.set(file, /* @__PURE__ */ new Set());
@@ -2163,6 +2197,7 @@ export {
2163
2197
  GraphStore,
2164
2198
  sliceGraphemeSafe,
2165
2199
  truncateGraphemeSafe,
2200
+ formatThousands,
2166
2201
  MAX_MISTAKE_LABEL_CHARS,
2167
2202
  queryGraph,
2168
2203
  shortestPath,
@@ -0,0 +1,99 @@
1
+ import {
2
+ formatThousands
3
+ } from "./chunk-B4UOE64J.js";
4
+
5
+ // src/intercept/stats.ts
6
+ var ESTIMATED_TOKENS_PER_READ_DENY = 1200;
7
+ function summarizeHookLog(entries) {
8
+ const byEvent = {};
9
+ const byTool = {};
10
+ const byDecision = {};
11
+ let readDenyCount = 0;
12
+ let firstEntryTs = null;
13
+ let lastEntryTs = null;
14
+ for (const entry of entries) {
15
+ const event = entry.event ?? "unknown";
16
+ byEvent[event] = (byEvent[event] ?? 0) + 1;
17
+ const tool = entry.tool ?? "unknown";
18
+ byTool[tool] = (byTool[tool] ?? 0) + 1;
19
+ if (entry.decision) {
20
+ byDecision[entry.decision] = (byDecision[entry.decision] ?? 0) + 1;
21
+ }
22
+ if (event === "PreToolUse" && tool === "Read" && entry.decision === "deny") {
23
+ readDenyCount += 1;
24
+ }
25
+ const ts = entry.ts;
26
+ if (typeof ts === "string") {
27
+ if (firstEntryTs === null || ts < firstEntryTs) firstEntryTs = ts;
28
+ if (lastEntryTs === null || ts > lastEntryTs) lastEntryTs = ts;
29
+ }
30
+ }
31
+ return {
32
+ totalInvocations: entries.length,
33
+ byEvent: Object.freeze(byEvent),
34
+ byTool: Object.freeze(byTool),
35
+ byDecision: Object.freeze(byDecision),
36
+ readDenyCount,
37
+ estimatedTokensSaved: readDenyCount * ESTIMATED_TOKENS_PER_READ_DENY,
38
+ firstEntry: firstEntryTs,
39
+ lastEntry: lastEntryTs
40
+ };
41
+ }
42
+ function formatStatsSummary(summary) {
43
+ if (summary.totalInvocations === 0) {
44
+ return "engram hook stats: no log entries yet.\n\nRun engram install-hook in a project, then use Claude Code to see interceptions.";
45
+ }
46
+ const lines = [];
47
+ lines.push(`engram hook stats (${summary.totalInvocations} invocations)`);
48
+ lines.push("\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
49
+ if (summary.firstEntry && summary.lastEntry) {
50
+ lines.push(`Time range: ${summary.firstEntry} \u2192 ${summary.lastEntry}`);
51
+ lines.push("");
52
+ }
53
+ lines.push("By event:");
54
+ const eventEntries = Object.entries(summary.byEvent).sort(
55
+ (a, b) => b[1] - a[1]
56
+ );
57
+ for (const [event, count] of eventEntries) {
58
+ const pct = (count / summary.totalInvocations * 100).toFixed(1);
59
+ lines.push(` ${event.padEnd(18)} ${String(count).padStart(5)} (${pct}%)`);
60
+ }
61
+ lines.push("");
62
+ lines.push("By tool:");
63
+ const toolEntries = Object.entries(summary.byTool).filter(([k]) => k !== "unknown").sort((a, b) => b[1] - a[1]);
64
+ for (const [tool, count] of toolEntries) {
65
+ lines.push(` ${tool.padEnd(18)} ${String(count).padStart(5)}`);
66
+ }
67
+ if (toolEntries.length === 0) {
68
+ lines.push(" (no tool-tagged entries)");
69
+ }
70
+ lines.push("");
71
+ const decisionEntries = Object.entries(summary.byDecision);
72
+ if (decisionEntries.length > 0) {
73
+ lines.push("PreToolUse decisions:");
74
+ for (const [decision, count] of decisionEntries.sort(
75
+ (a, b) => b[1] - a[1]
76
+ )) {
77
+ lines.push(` ${decision.padEnd(18)} ${String(count).padStart(5)}`);
78
+ }
79
+ lines.push("");
80
+ }
81
+ if (summary.readDenyCount > 0) {
82
+ lines.push(
83
+ `Estimated tokens saved: ~${formatThousands(summary.estimatedTokensSaved)}`
84
+ );
85
+ lines.push(
86
+ ` (${summary.readDenyCount} Read denies \xD7 ${ESTIMATED_TOKENS_PER_READ_DENY} tok/deny avg)`
87
+ );
88
+ } else {
89
+ lines.push("Estimated tokens saved: 0");
90
+ lines.push(" (no PreToolUse:Read denies recorded yet)");
91
+ }
92
+ return lines.join("\n");
93
+ }
94
+
95
+ export {
96
+ ESTIMATED_TOKENS_PER_READ_DENY,
97
+ summarizeHookLog,
98
+ formatStatsSummary
99
+ };
@@ -1,93 +1,3 @@
1
- // src/intercept/stats.ts
2
- var ESTIMATED_TOKENS_PER_READ_DENY = 1200;
3
- function summarizeHookLog(entries) {
4
- const byEvent = {};
5
- const byTool = {};
6
- const byDecision = {};
7
- let readDenyCount = 0;
8
- let firstEntryTs = null;
9
- let lastEntryTs = null;
10
- for (const entry of entries) {
11
- const event = entry.event ?? "unknown";
12
- byEvent[event] = (byEvent[event] ?? 0) + 1;
13
- const tool = entry.tool ?? "unknown";
14
- byTool[tool] = (byTool[tool] ?? 0) + 1;
15
- if (entry.decision) {
16
- byDecision[entry.decision] = (byDecision[entry.decision] ?? 0) + 1;
17
- }
18
- if (event === "PreToolUse" && tool === "Read" && entry.decision === "deny") {
19
- readDenyCount += 1;
20
- }
21
- const ts = entry.ts;
22
- if (typeof ts === "string") {
23
- if (firstEntryTs === null || ts < firstEntryTs) firstEntryTs = ts;
24
- if (lastEntryTs === null || ts > lastEntryTs) lastEntryTs = ts;
25
- }
26
- }
27
- return {
28
- totalInvocations: entries.length,
29
- byEvent: Object.freeze(byEvent),
30
- byTool: Object.freeze(byTool),
31
- byDecision: Object.freeze(byDecision),
32
- readDenyCount,
33
- estimatedTokensSaved: readDenyCount * ESTIMATED_TOKENS_PER_READ_DENY,
34
- firstEntry: firstEntryTs,
35
- lastEntry: lastEntryTs
36
- };
37
- }
38
- function formatStatsSummary(summary) {
39
- if (summary.totalInvocations === 0) {
40
- return "engram hook stats: no log entries yet.\n\nRun engram install-hook in a project, then use Claude Code to see interceptions.";
41
- }
42
- const lines = [];
43
- lines.push(`engram hook stats (${summary.totalInvocations} invocations)`);
44
- lines.push("\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
45
- if (summary.firstEntry && summary.lastEntry) {
46
- lines.push(`Time range: ${summary.firstEntry} \u2192 ${summary.lastEntry}`);
47
- lines.push("");
48
- }
49
- lines.push("By event:");
50
- const eventEntries = Object.entries(summary.byEvent).sort(
51
- (a, b) => b[1] - a[1]
52
- );
53
- for (const [event, count] of eventEntries) {
54
- const pct = (count / summary.totalInvocations * 100).toFixed(1);
55
- lines.push(` ${event.padEnd(18)} ${String(count).padStart(5)} (${pct}%)`);
56
- }
57
- lines.push("");
58
- lines.push("By tool:");
59
- const toolEntries = Object.entries(summary.byTool).filter(([k]) => k !== "unknown").sort((a, b) => b[1] - a[1]);
60
- for (const [tool, count] of toolEntries) {
61
- lines.push(` ${tool.padEnd(18)} ${String(count).padStart(5)}`);
62
- }
63
- if (toolEntries.length === 0) {
64
- lines.push(" (no tool-tagged entries)");
65
- }
66
- lines.push("");
67
- const decisionEntries = Object.entries(summary.byDecision);
68
- if (decisionEntries.length > 0) {
69
- lines.push("PreToolUse decisions:");
70
- for (const [decision, count] of decisionEntries.sort(
71
- (a, b) => b[1] - a[1]
72
- )) {
73
- lines.push(` ${decision.padEnd(18)} ${String(count).padStart(5)}`);
74
- }
75
- lines.push("");
76
- }
77
- if (summary.readDenyCount > 0) {
78
- lines.push(
79
- `Estimated tokens saved: ~${summary.estimatedTokensSaved.toLocaleString()}`
80
- );
81
- lines.push(
82
- ` (${summary.readDenyCount} Read denies \xD7 ${ESTIMATED_TOKENS_PER_READ_DENY} tok/deny avg)`
83
- );
84
- } else {
85
- lines.push("Estimated tokens saved: 0");
86
- lines.push(" (no PreToolUse:Read denies recorded yet)");
87
- }
88
- return lines.join("\n");
89
- }
90
-
91
1
  // src/intercept/component-status.ts
92
2
  import { existsSync, readFileSync, writeFileSync } from "fs";
93
3
  import { join, dirname } from "path";
@@ -112,10 +22,16 @@ function checkHttp(projectRoot) {
112
22
  }
113
23
  function checkLsp(projectRoot) {
114
24
  if (existsSync(join(projectRoot, ".engram", "lsp-available"))) return true;
25
+ const uid = typeof process.getuid === "function" ? process.getuid() : 0;
115
26
  const tmp = tmpdir();
116
27
  const candidates = [
117
- join(tmp, "tsserver.sock"),
118
- join(tmp, "typescript-language-server.sock")
28
+ join(tmp, `tsserver-${uid}.sock`),
29
+ join(tmp, "lsp-server.sock"),
30
+ join(tmp, "typescript-language-server.sock"),
31
+ join(tmp, `pyright-${uid}.sock`),
32
+ join(tmp, "rust-analyzer.sock"),
33
+ // Legacy name kept for back-compat with older tsserver installs.
34
+ join(tmp, "tsserver.sock")
119
35
  ];
120
36
  return candidates.some((c) => existsSync(c));
121
37
  }
@@ -123,10 +39,12 @@ function checkAst(projectRoot) {
123
39
  try {
124
40
  const here = dirname(fileURLToPath(import.meta.url));
125
41
  const candidates = [
42
+ join(here, "grammars"),
43
+ // flattened bundle
126
44
  join(here, "..", "grammars"),
127
- // from dist/intercept/
45
+ // nested bundle
128
46
  join(here, "..", "..", "dist", "grammars")
129
- // from src/intercept/ dev
47
+ // dev-time
130
48
  ];
131
49
  for (const dir of candidates) {
132
50
  if (existsSync(dir)) return true;
@@ -212,9 +130,7 @@ function formatHudStatus(report) {
212
130
  }
213
131
 
214
132
  export {
215
- ESTIMATED_TOKENS_PER_READ_DENY,
216
- summarizeHookLog,
217
- formatStatsSummary,
133
+ refreshComponentStatus,
218
134
  getComponentStatus,
219
135
  formatHudStatus
220
136
  };