engramx 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,361 @@
1
+ // src/hooks.ts
2
+ import { existsSync, readFileSync, writeFileSync, chmodSync, unlinkSync } from "fs";
3
+ import { join } from "path";
4
+ var HOOK_START = "# engram-hook-start";
5
+ var HOOK_END = "# engram-hook-end";
6
+ var POST_COMMIT_SCRIPT = `
7
+ ${HOOK_START}
8
+ # Auto-rebuild engram graph after commit (AST only, no LLM needed)
9
+ ENGRAM_BIN=$(command -v engram 2>/dev/null)
10
+ if [ -z "$ENGRAM_BIN" ]; then
11
+ ENGRAM_BIN=$(npm root -g 2>/dev/null)/engram/dist/cli.js
12
+ fi
13
+
14
+ if [ -d ".engram" ] && [ -f "$ENGRAM_BIN" ]; then
15
+ node "$ENGRAM_BIN" init . --quiet 2>/dev/null &
16
+ fi
17
+ ${HOOK_END}
18
+ `;
19
+ var POST_CHECKOUT_SCRIPT = `
20
+ ${HOOK_START}
21
+ # Auto-rebuild engram graph on branch switch
22
+ PREV_HEAD=$1
23
+ NEW_HEAD=$2
24
+ BRANCH_SWITCH=$3
25
+
26
+ if [ "$BRANCH_SWITCH" != "1" ]; then
27
+ exit 0
28
+ fi
29
+
30
+ ENGRAM_BIN=$(command -v engram 2>/dev/null)
31
+ if [ -z "$ENGRAM_BIN" ]; then
32
+ ENGRAM_BIN=$(npm root -g 2>/dev/null)/engram/dist/cli.js
33
+ fi
34
+
35
+ if [ -d ".engram" ] && [ -f "$ENGRAM_BIN" ]; then
36
+ echo "[engram] Branch switched \u2014 rebuilding graph..."
37
+ node "$ENGRAM_BIN" init . --quiet 2>/dev/null &
38
+ fi
39
+ ${HOOK_END}
40
+ `;
41
+ function findGitRoot(from) {
42
+ let current = from;
43
+ while (current !== "/") {
44
+ if (existsSync(join(current, ".git"))) return current;
45
+ current = join(current, "..");
46
+ }
47
+ return null;
48
+ }
49
+ function installHook(hooksDir, name, script) {
50
+ const hookPath = join(hooksDir, name);
51
+ if (existsSync(hookPath)) {
52
+ const content = readFileSync(hookPath, "utf-8");
53
+ if (content.includes(HOOK_START)) {
54
+ return `${name}: already installed`;
55
+ }
56
+ writeFileSync(hookPath, content.trimEnd() + "\n\n" + script);
57
+ return `${name}: appended to existing hook`;
58
+ }
59
+ writeFileSync(hookPath, "#!/bin/bash\n" + script);
60
+ chmodSync(hookPath, 493);
61
+ return `${name}: installed`;
62
+ }
63
+ function uninstallHook(hooksDir, name) {
64
+ const hookPath = join(hooksDir, name);
65
+ if (!existsSync(hookPath)) return `${name}: not installed`;
66
+ const content = readFileSync(hookPath, "utf-8");
67
+ if (!content.includes(HOOK_START)) return `${name}: engram hook not found`;
68
+ const cleaned = content.replace(new RegExp(`\\n?${HOOK_START}[\\s\\S]*?${HOOK_END}\\n?`, "g"), "").trim();
69
+ if (!cleaned || cleaned === "#!/bin/bash") {
70
+ unlinkSync(hookPath);
71
+ return `${name}: removed`;
72
+ }
73
+ writeFileSync(hookPath, cleaned + "\n");
74
+ return `${name}: engram section removed (other hooks preserved)`;
75
+ }
76
+ function install(projectRoot) {
77
+ const gitRoot = findGitRoot(projectRoot);
78
+ if (!gitRoot) return "Error: not a git repository";
79
+ const hooksDir = join(gitRoot, ".git", "hooks");
80
+ const results = [
81
+ installHook(hooksDir, "post-commit", POST_COMMIT_SCRIPT),
82
+ installHook(hooksDir, "post-checkout", POST_CHECKOUT_SCRIPT)
83
+ ];
84
+ return results.join("\n");
85
+ }
86
+ function uninstall(projectRoot) {
87
+ const gitRoot = findGitRoot(projectRoot);
88
+ if (!gitRoot) return "Error: not a git repository";
89
+ const hooksDir = join(gitRoot, ".git", "hooks");
90
+ const results = [
91
+ uninstallHook(hooksDir, "post-commit"),
92
+ uninstallHook(hooksDir, "post-checkout")
93
+ ];
94
+ return results.join("\n");
95
+ }
96
+ function status(projectRoot) {
97
+ const gitRoot = findGitRoot(projectRoot);
98
+ if (!gitRoot) return "Not a git repository";
99
+ const hooksDir = join(gitRoot, ".git", "hooks");
100
+ const check = (name) => {
101
+ const p = join(hooksDir, name);
102
+ if (!existsSync(p)) return "not installed";
103
+ return readFileSync(p, "utf-8").includes(HOOK_START) ? "installed" : "not installed";
104
+ };
105
+ return `post-commit: ${check("post-commit")}
106
+ post-checkout: ${check("post-checkout")}`;
107
+ }
108
+
109
+ // src/autogen.ts
110
+ import { existsSync as existsSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
111
+ import { join as join2 } from "path";
112
+ var AUTOGEN_START = "<!-- engram:start -->";
113
+ var AUTOGEN_END = "<!-- engram:end -->";
114
+ var SECTION_BUILDERS = {
115
+ gods: (store, limit) => {
116
+ const gods = store.getGodNodes(limit);
117
+ if (gods.length === 0) return "";
118
+ return gods.map(
119
+ (g) => `- \`${g.node.label}\` (${g.node.kind}, ${g.degree} connections) \u2014 ${g.node.sourceFile}`
120
+ ).join("\n");
121
+ },
122
+ // Hot files are produced by git-miner as `kind: "pattern"` nodes with
123
+ // `metadata.type === "hot_file"`. We surface them separately so the
124
+ // Patterns section only contains real session-mined patterns.
125
+ hotFiles: (store, limit) => {
126
+ const allNodes = store.getAllNodes();
127
+ const hot = allNodes.filter(
128
+ (n) => n.kind === "pattern" && n.metadata?.type === "hot_file"
129
+ ).sort((a, b) => {
130
+ const ac = a.metadata.changeCount ?? 0;
131
+ const bc = b.metadata.changeCount ?? 0;
132
+ return bc - ac;
133
+ }).slice(0, limit);
134
+ if (hot.length === 0) return "";
135
+ return hot.map((n) => `- ${n.label}`).join("\n");
136
+ },
137
+ mistakes: (store, limit) => {
138
+ const mistakes = store.getAllNodes().filter((n) => n.kind === "mistake").slice(0, limit);
139
+ if (mistakes.length === 0) return "";
140
+ return mistakes.map((m) => `- ${m.label}`).join("\n");
141
+ },
142
+ decisions: (store, limit) => {
143
+ const decisions = store.getAllNodes().filter((n) => n.kind === "decision").slice(0, limit);
144
+ if (decisions.length === 0) return "";
145
+ return decisions.map((d) => `- ${d.label}`).join("\n");
146
+ },
147
+ // "Real" patterns = kind:pattern nodes that are NOT hot files.
148
+ // Hot files live in their own section so they don't clutter this one.
149
+ patterns: (store, limit) => {
150
+ const patterns = store.getAllNodes().filter((n) => {
151
+ if (n.kind !== "pattern") return false;
152
+ const meta = n.metadata;
153
+ return meta?.type !== "hot_file";
154
+ }).slice(0, limit);
155
+ if (patterns.length === 0) return "";
156
+ return patterns.map((p) => `- ${p.label}`).join("\n");
157
+ },
158
+ deps: (store, limit) => {
159
+ const allEdges = store.getAllEdges();
160
+ const importEdges = allEdges.filter((e) => e.relation === "imports");
161
+ const mostImported = /* @__PURE__ */ new Map();
162
+ for (const edge of importEdges) {
163
+ mostImported.set(edge.target, (mostImported.get(edge.target) ?? 0) + 1);
164
+ }
165
+ const topImported = [...mostImported.entries()].sort((a, b) => b[1] - a[1]).slice(0, limit);
166
+ if (topImported.length === 0) return "";
167
+ return topImported.map(([target, count]) => {
168
+ const node = store.getNode(target);
169
+ return `- \`${node?.label ?? target}\` (imported by ${count} files)`;
170
+ }).join("\n");
171
+ },
172
+ // `limit` is intentionally unused — the structure section always renders
173
+ // the full file tree. A 0 or positive limit is legal ceremony in the
174
+ // SectionSpec because every other builder honors it.
175
+ structure: (store, _limit) => {
176
+ void _limit;
177
+ const filesByDir = /* @__PURE__ */ new Map();
178
+ for (const node of store.getAllNodes()) {
179
+ if (node.kind !== "file" || !node.sourceFile) continue;
180
+ const parts = node.sourceFile.split("/");
181
+ const dir = parts.slice(0, -1).join("/") || ".";
182
+ if (!filesByDir.has(dir)) filesByDir.set(dir, []);
183
+ filesByDir.get(dir).push(node.label);
184
+ }
185
+ if (filesByDir.size === 0) return "";
186
+ return [...filesByDir.entries()].sort(([a], [b]) => a.localeCompare(b)).map(([dir, files]) => `- \`${dir}/\` \u2014 ${files.join(", ")}`).join("\n");
187
+ }
188
+ };
189
+ var VIEWS = {
190
+ general: {
191
+ name: "general",
192
+ sections: [
193
+ { section: "gods", limit: 8, heading: "Core entities" },
194
+ { section: "structure", limit: 0, heading: "Structure" },
195
+ { section: "mistakes", limit: 5, heading: "\u26A0\uFE0F Past mistakes" },
196
+ { section: "decisions", limit: 5, heading: "Decisions" },
197
+ { section: "patterns", limit: 5, heading: "Patterns" },
198
+ { section: "deps", limit: 5, heading: "Key dependencies" }
199
+ ]
200
+ },
201
+ "bug-fix": {
202
+ name: "bug-fix",
203
+ sections: [
204
+ { section: "hotFiles", limit: 10, heading: "\u{1F525} Hot files" },
205
+ { section: "mistakes", limit: 10, heading: "\u26A0\uFE0F Past mistakes" },
206
+ { section: "gods", limit: 5, heading: "Core entities" },
207
+ { section: "structure", limit: 0, heading: "Structure" },
208
+ { section: "patterns", limit: 5, heading: "Patterns" }
209
+ ]
210
+ },
211
+ feature: {
212
+ name: "feature",
213
+ sections: [
214
+ { section: "gods", limit: 12, heading: "Core entities" },
215
+ { section: "structure", limit: 0, heading: "Structure" },
216
+ { section: "decisions", limit: 10, heading: "Decisions" },
217
+ { section: "deps", limit: 5, heading: "Key dependencies" },
218
+ { section: "mistakes", limit: 3, heading: "\u26A0\uFE0F Past mistakes" }
219
+ ]
220
+ },
221
+ refactor: {
222
+ name: "refactor",
223
+ sections: [
224
+ { section: "gods", limit: 15, heading: "Core entities" },
225
+ { section: "structure", limit: 0, heading: "Structure" },
226
+ { section: "deps", limit: 10, heading: "Key dependencies" },
227
+ { section: "patterns", limit: 10, heading: "Patterns" },
228
+ { section: "mistakes", limit: 5, heading: "\u26A0\uFE0F Past mistakes" }
229
+ ]
230
+ }
231
+ };
232
+ function generateSummary(store, view = VIEWS.general) {
233
+ const stats = store.getStats();
234
+ const parts = [
235
+ AUTOGEN_START,
236
+ "## Codebase Structure (auto-generated by engram)",
237
+ "",
238
+ `**Graph:** ${stats.nodes} nodes, ${stats.edges} edges | ${stats.extractedPct}% extracted, ${stats.inferredPct}% inferred`,
239
+ `**View:** ${view.name}`,
240
+ ""
241
+ ];
242
+ for (const spec of view.sections) {
243
+ const builder = SECTION_BUILDERS[spec.section];
244
+ if (!builder) continue;
245
+ const body = builder(store, spec.limit);
246
+ if (!body) continue;
247
+ parts.push(`## ${spec.heading}`, "", body, "");
248
+ }
249
+ parts.push(
250
+ '**Tip:** Run `engram query "your question"` for structural context instead of reading files.',
251
+ AUTOGEN_END
252
+ );
253
+ return parts.join("\n");
254
+ }
255
+ function analyzeMarkers(content) {
256
+ const realStarts = [];
257
+ const realEnds = [];
258
+ let fenceDepth = 0;
259
+ let pos = 0;
260
+ const lines = content.split("\n");
261
+ for (const line of lines) {
262
+ const trimmed = line.trimStart();
263
+ if (trimmed.startsWith("```") || trimmed.startsWith("~~~")) {
264
+ fenceDepth = fenceDepth === 0 ? 1 : 0;
265
+ pos += line.length + 1;
266
+ continue;
267
+ }
268
+ if (fenceDepth === 0) {
269
+ const startAt = line.indexOf(AUTOGEN_START);
270
+ if (startAt !== -1) realStarts.push(pos + startAt);
271
+ const endAt = line.indexOf(AUTOGEN_END);
272
+ if (endAt !== -1) realEnds.push(pos + endAt);
273
+ }
274
+ pos += line.length + 1;
275
+ }
276
+ if (realStarts.length === 0 && realEnds.length === 0) {
277
+ return { state: "none" };
278
+ }
279
+ if (realStarts.length === 1 && realEnds.length === 1 && realStarts[0] < realEnds[0]) {
280
+ return {
281
+ state: "balanced",
282
+ startOffset: realStarts[0],
283
+ endOffset: realEnds[0] + AUTOGEN_END.length
284
+ };
285
+ }
286
+ return {
287
+ state: "unbalanced",
288
+ error: `Found ${realStarts.length} start marker(s) and ${realEnds.length} end marker(s) outside code fences; expected exactly 1 of each. Fix the markers manually.`
289
+ };
290
+ }
291
+ function writeToFile(filePath, summary) {
292
+ let content = "";
293
+ if (existsSync2(filePath)) {
294
+ content = readFileSync2(filePath, "utf-8");
295
+ }
296
+ const analysis = analyzeMarkers(content);
297
+ if (analysis.state === "unbalanced") {
298
+ throw new Error(
299
+ `engram: cannot safely update ${filePath}: ${analysis.error} Re-run engram gen after fixing the markers.`
300
+ );
301
+ }
302
+ let newContent;
303
+ if (analysis.state === "balanced") {
304
+ newContent = content.slice(0, analysis.startOffset) + summary + content.slice(analysis.endOffset);
305
+ } else {
306
+ const trimmed = content.trimEnd();
307
+ newContent = (trimmed ? trimmed + "\n\n" : "") + summary + "\n";
308
+ }
309
+ if (!newContent.endsWith("\n")) newContent += "\n";
310
+ writeFileSync2(filePath, newContent);
311
+ }
312
+ async function autogen(projectRoot, target, task) {
313
+ const { getStore } = await import("./core-H72MM256.js");
314
+ const store = await getStore(projectRoot);
315
+ try {
316
+ let view = VIEWS.general;
317
+ if (task) {
318
+ const found = VIEWS[task];
319
+ if (!found) {
320
+ const valid = Object.keys(VIEWS).join(", ");
321
+ throw new Error(
322
+ `engram gen: unknown task "${task}". Valid: ${valid}.`
323
+ );
324
+ }
325
+ view = found;
326
+ }
327
+ const summary = generateSummary(store, view);
328
+ const stats = store.getStats();
329
+ let targetFile;
330
+ if (target === "claude") {
331
+ targetFile = join2(projectRoot, "CLAUDE.md");
332
+ } else if (target === "cursor") {
333
+ targetFile = join2(projectRoot, ".cursorrules");
334
+ } else if (target === "agents") {
335
+ targetFile = join2(projectRoot, "AGENTS.md");
336
+ } else {
337
+ if (existsSync2(join2(projectRoot, "CLAUDE.md"))) {
338
+ targetFile = join2(projectRoot, "CLAUDE.md");
339
+ } else if (existsSync2(join2(projectRoot, ".cursorrules"))) {
340
+ targetFile = join2(projectRoot, ".cursorrules");
341
+ } else if (existsSync2(join2(projectRoot, "AGENTS.md"))) {
342
+ targetFile = join2(projectRoot, "AGENTS.md");
343
+ } else {
344
+ targetFile = join2(projectRoot, "CLAUDE.md");
345
+ }
346
+ }
347
+ writeToFile(targetFile, summary);
348
+ return { file: targetFile, nodesIncluded: stats.nodes, view: view.name };
349
+ } finally {
350
+ store.close();
351
+ }
352
+ }
353
+
354
+ export {
355
+ install,
356
+ uninstall,
357
+ status,
358
+ VIEWS,
359
+ generateSummary,
360
+ autogen
361
+ };
package/dist/cli.js CHANGED
@@ -4,31 +4,42 @@ import {
4
4
  install,
5
5
  status,
6
6
  uninstall
7
- } from "./chunk-WJUA4VZ7.js";
7
+ } from "./chunk-QKCPFSVU.js";
8
8
  import {
9
9
  benchmark,
10
10
  godNodes,
11
11
  init,
12
12
  learn,
13
+ mistakes,
13
14
  path,
14
15
  query,
15
16
  stats
16
- } from "./chunk-44GN6IRQ.js";
17
+ } from "./chunk-D53DRZZL.js";
17
18
 
18
19
  // src/cli.ts
19
20
  import { Command } from "commander";
20
21
  import chalk from "chalk";
21
22
  var program = new Command();
22
- program.name("engram").description("AI coding memory that learns from every session").version("0.1.0");
23
- program.command("init").description("Scan codebase and build knowledge graph (zero LLM cost)").argument("[path]", "Project directory", ".").action(async (projectPath) => {
23
+ program.name("engram").description("AI coding memory that learns from every session").version("0.2.0");
24
+ program.command("init").description("Scan codebase and build knowledge graph (zero LLM cost)").argument("[path]", "Project directory", ".").option(
25
+ "--with-skills [dir]",
26
+ "Also index Claude Code skills from ~/.claude/skills/ or a given path"
27
+ ).action(async (projectPath, opts) => {
24
28
  console.log(chalk.dim("\u{1F50D} Scanning codebase..."));
25
- const result = await init(projectPath);
29
+ const result = await init(projectPath, {
30
+ withSkills: opts.withSkills
31
+ });
26
32
  console.log(
27
33
  chalk.green("\u{1F333} AST extraction complete") + chalk.dim(` (${result.timeMs}ms, 0 tokens used)`)
28
34
  );
29
35
  console.log(
30
36
  ` ${chalk.bold(String(result.nodes))} nodes, ${chalk.bold(String(result.edges))} edges from ${chalk.bold(String(result.fileCount))} files (${result.totalLines.toLocaleString()} lines)`
31
37
  );
38
+ if (result.skillCount && result.skillCount > 0) {
39
+ console.log(
40
+ chalk.cyan(` ${chalk.bold(String(result.skillCount))} skills indexed`)
41
+ );
42
+ }
32
43
  const bench = await benchmark(projectPath);
33
44
  if (bench.naiveFullCorpus > 0 && bench.reductionVsRelevant > 1) {
34
45
  console.log(
@@ -105,6 +116,33 @@ program.command("learn").description("Teach engram a decision, pattern, or lesso
105
116
  console.log(chalk.yellow("No patterns extracted. Try a more specific statement."));
106
117
  }
107
118
  });
119
+ program.command("mistakes").description("List known mistakes extracted from past sessions").option("-p, --project <path>", "Project directory", ".").option("-l, --limit <n>", "Max entries to display", "20").option("--since <days>", "Only mistakes from the last N days").action(
120
+ async (opts) => {
121
+ const result = await mistakes(opts.project, {
122
+ limit: Number(opts.limit),
123
+ sinceDays: opts.since ? Number(opts.since) : void 0
124
+ });
125
+ if (result.length === 0) {
126
+ console.log(chalk.yellow("No mistakes recorded."));
127
+ return;
128
+ }
129
+ console.log(
130
+ chalk.bold(`
131
+ \u26A0\uFE0F ${result.length} mistake(s) recorded:
132
+ `)
133
+ );
134
+ for (const m of result) {
135
+ const ago = Math.max(
136
+ 1,
137
+ Math.round((Date.now() - m.lastVerified) / 864e5)
138
+ );
139
+ console.log(
140
+ ` ${chalk.dim(`[${m.sourceFile}, ${ago}d ago]`)} ${m.label}`
141
+ );
142
+ }
143
+ console.log();
144
+ }
145
+ );
108
146
  program.command("bench").description("Run token reduction benchmark").option("-p, --project <path>", "Project directory", ".").action(async (opts) => {
109
147
  const result = await benchmark(opts.project);
110
148
  console.log(chalk.bold("\n\u26A1 engram token reduction benchmark\n"));
@@ -122,8 +160,18 @@ var hooks = program.command("hooks").description("Manage git hooks");
122
160
  hooks.command("install").description("Install post-commit and post-checkout hooks").argument("[path]", "Project directory", ".").action((p) => console.log(install(p)));
123
161
  hooks.command("uninstall").description("Remove engram git hooks").argument("[path]", "Project directory", ".").action((p) => console.log(uninstall(p)));
124
162
  hooks.command("status").description("Check if hooks are installed").argument("[path]", "Project directory", ".").action((p) => console.log(status(p)));
125
- program.command("gen").description("Generate CLAUDE.md / .cursorrules section from graph").option("-p, --project <path>", "Project directory", ".").option("-t, --target <type>", "Target file: claude, cursor, agents").action(async (opts) => {
126
- const result = await autogen(opts.project, opts.target);
127
- console.log(chalk.green(`\u2705 Updated ${result.file} (${result.nodesIncluded} nodes)`));
128
- });
163
+ program.command("gen").description("Generate CLAUDE.md / .cursorrules section from graph").option("-p, --project <path>", "Project directory", ".").option("-t, --target <type>", "Target file: claude, cursor, agents").option(
164
+ "--task <name>",
165
+ "Task-aware view: general (default), bug-fix, feature, refactor"
166
+ ).action(
167
+ async (opts) => {
168
+ const target = opts.target;
169
+ const result = await autogen(opts.project, target, opts.task);
170
+ console.log(
171
+ chalk.green(
172
+ `\u2705 Updated ${result.file} (${result.nodesIncluded} nodes, view: ${result.view})`
173
+ )
174
+ );
175
+ }
176
+ );
129
177
  program.parse();
@@ -5,10 +5,11 @@ import {
5
5
  godNodes,
6
6
  init,
7
7
  learn,
8
+ mistakes,
8
9
  path,
9
10
  query,
10
11
  stats
11
- } from "./chunk-44GN6IRQ.js";
12
+ } from "./chunk-D53DRZZL.js";
12
13
  export {
13
14
  benchmark,
14
15
  getDbPath,
@@ -16,6 +17,7 @@ export {
16
17
  godNodes,
17
18
  init,
18
19
  learn,
20
+ mistakes,
19
21
  path,
20
22
  query,
21
23
  stats
package/dist/index.js CHANGED
@@ -1,8 +1,10 @@
1
1
  import {
2
+ VIEWS,
2
3
  autogen,
4
+ generateSummary,
3
5
  install,
4
6
  uninstall
5
- } from "./chunk-WJUA4VZ7.js";
7
+ } from "./chunk-QKCPFSVU.js";
6
8
  import {
7
9
  GraphStore,
8
10
  SUPPORTED_EXTENSIONS,
@@ -12,27 +14,37 @@ import {
12
14
  godNodes,
13
15
  init,
14
16
  learn,
17
+ mineSkills,
18
+ mistakes,
15
19
  path,
16
20
  query,
17
21
  queryGraph,
18
22
  shortestPath,
19
- stats
20
- } from "./chunk-44GN6IRQ.js";
23
+ sliceGraphemeSafe,
24
+ stats,
25
+ truncateGraphemeSafe
26
+ } from "./chunk-D53DRZZL.js";
21
27
  export {
22
28
  GraphStore,
23
29
  SUPPORTED_EXTENSIONS,
30
+ VIEWS,
24
31
  autogen,
25
32
  benchmark,
26
33
  extractDirectory,
27
34
  extractFile,
35
+ generateSummary,
28
36
  godNodes,
29
37
  init,
30
38
  install as installHooks,
31
39
  learn,
40
+ mineSkills,
41
+ mistakes,
32
42
  path,
33
43
  query,
34
44
  queryGraph,
35
45
  shortestPath,
46
+ sliceGraphemeSafe,
36
47
  stats,
48
+ truncateGraphemeSafe,
37
49
  uninstall as uninstallHooks
38
50
  };
package/dist/serve.js CHANGED
@@ -1,13 +1,21 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
+ MAX_MISTAKE_LABEL_CHARS,
3
4
  benchmark,
4
5
  godNodes,
6
+ mistakes,
5
7
  path,
6
8
  query,
7
- stats
8
- } from "./chunk-44GN6IRQ.js";
9
+ stats,
10
+ truncateGraphemeSafe
11
+ } from "./chunk-D53DRZZL.js";
9
12
 
10
13
  // src/serve.ts
14
+ function clampInt(value, defaultValue, min, max) {
15
+ const n = Number(value);
16
+ if (!Number.isFinite(n)) return defaultValue;
17
+ return Math.max(min, Math.min(max, Math.floor(n)));
18
+ }
11
19
  var PROJECT_ROOT = process.argv[2] || process.cwd();
12
20
  var TOOLS = [
13
21
  {
@@ -50,22 +58,41 @@ var TOOLS = [
50
58
  name: "benchmark",
51
59
  description: "Compare token cost of graph queries vs reading raw files.",
52
60
  inputSchema: { type: "object", properties: {} }
61
+ },
62
+ {
63
+ name: "list_mistakes",
64
+ description: "List known mistakes from the knowledge graph \u2014 prior bugs, failure modes, and wrong approaches extracted from past session documents. Use this before making changes to check if the codebase has hit similar problems before.",
65
+ inputSchema: {
66
+ type: "object",
67
+ properties: {
68
+ limit: {
69
+ type: "integer",
70
+ default: 20,
71
+ description: "Maximum number of mistakes to return"
72
+ },
73
+ since_days: {
74
+ type: "integer",
75
+ description: "Only return mistakes verified in the last N days"
76
+ }
77
+ }
78
+ }
53
79
  }
54
80
  ];
55
81
  async function handleToolCall(name, args) {
56
82
  switch (name) {
57
83
  case "query_graph": {
58
- const result = await query(PROJECT_ROOT, args.question, {
59
- mode: args.mode ?? "bfs",
60
- depth: args.depth ?? 3,
61
- tokenBudget: args.token_budget ?? 2e3
84
+ const question = typeof args.question === "string" ? args.question : "";
85
+ const result = await query(PROJECT_ROOT, question, {
86
+ mode: args.mode === "dfs" ? "dfs" : "bfs",
87
+ depth: clampInt(args.depth, 3, 1, 6),
88
+ tokenBudget: clampInt(args.token_budget, 2e3, 100, 1e4)
62
89
  });
63
90
  return `${result.nodesFound} nodes found (~${result.estimatedTokens} tokens)
64
91
 
65
92
  ${result.text}`;
66
93
  }
67
94
  case "god_nodes": {
68
- const gods = await godNodes(PROJECT_ROOT, args.top_n ?? 10);
95
+ const gods = await godNodes(PROJECT_ROOT, clampInt(args.top_n, 10, 1, 100));
69
96
  return gods.map((g, i) => `${i + 1}. ${g.label} [${g.kind}] \u2014 ${g.degree} edges (${g.sourceFile})`).join("\n");
70
97
  }
71
98
  case "graph_stats": {
@@ -91,6 +118,17 @@ AMBIGUOUS: ${s.ambiguousPct}%`;
91
118
  ...b.perQuestion.map((pq) => `[${pq.reductionFull}x full / ${pq.reductionRelevant}x relevant] ${pq.question}`)
92
119
  ].join("\n");
93
120
  }
121
+ case "list_mistakes": {
122
+ const result = await mistakes(PROJECT_ROOT, {
123
+ limit: clampInt(args.limit, 20, 1, 100),
124
+ sinceDays: args.since_days !== void 0 ? clampInt(args.since_days, 0, 0, 3650) : void 0
125
+ });
126
+ if (result.length === 0) return "No mistakes recorded.";
127
+ return result.map((m, i) => {
128
+ const label = truncateGraphemeSafe(m.label, MAX_MISTAKE_LABEL_CHARS);
129
+ return `${i + 1}. ${label} (confidence: ${m.confidence} ${m.confidenceScore}, from ${m.sourceFile})`;
130
+ }).join("\n");
131
+ }
94
132
  default:
95
133
  return `Unknown tool: ${name}`;
96
134
  }
@@ -103,13 +141,28 @@ process.stdin.on("data", (chunk) => {
103
141
  buffer = lines.pop() ?? "";
104
142
  for (const line of lines) {
105
143
  if (!line.trim()) continue;
144
+ let req;
106
145
  try {
107
- const req = JSON.parse(line);
108
- handleRequest(req).then((res) => {
109
- process.stdout.write(JSON.stringify(res) + "\n");
110
- });
146
+ req = JSON.parse(line);
111
147
  } catch {
148
+ const errResp = {
149
+ jsonrpc: "2.0",
150
+ id: null,
151
+ error: { code: -32700, message: "Parse error" }
152
+ };
153
+ process.stdout.write(JSON.stringify(errResp) + "\n");
154
+ continue;
112
155
  }
156
+ handleRequest(req).then((res) => {
157
+ process.stdout.write(JSON.stringify(res) + "\n");
158
+ }).catch(() => {
159
+ const errResp = {
160
+ jsonrpc: "2.0",
161
+ id: req.id,
162
+ error: { code: -32e3, message: "Internal server error" }
163
+ };
164
+ process.stdout.write(JSON.stringify(errResp) + "\n");
165
+ });
113
166
  }
114
167
  });
115
168
  async function handleRequest(req) {
@@ -121,7 +174,7 @@ async function handleRequest(req) {
121
174
  result: {
122
175
  protocolVersion: "2024-11-05",
123
176
  capabilities: { tools: {} },
124
- serverInfo: { name: "engram", version: "0.1.0" }
177
+ serverInfo: { name: "engram", version: "0.2.0" }
125
178
  }
126
179
  };
127
180
  case "tools/list":
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "engramx",
3
- "version": "0.1.1",
3
+ "version": "0.2.0",
4
4
  "description": "AI coding memory that learns from every session — persistent, structural, universal",
5
5
  "type": "module",
6
6
  "bin": {