@tekmidian/pai 0.2.2 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/ARCHITECTURE.md +148 -6
  2. package/FEATURE.md +1 -1
  3. package/README.md +79 -0
  4. package/dist/{auto-route-D7W6RE06.mjs → auto-route-JjW3f7pV.mjs} +4 -4
  5. package/dist/{auto-route-D7W6RE06.mjs.map → auto-route-JjW3f7pV.mjs.map} +1 -1
  6. package/dist/chunker-CbnBe0s0.mjs +191 -0
  7. package/dist/chunker-CbnBe0s0.mjs.map +1 -0
  8. package/dist/cli/index.mjs +835 -40
  9. package/dist/cli/index.mjs.map +1 -1
  10. package/dist/{config-DBh1bYM2.mjs → config-DELNqq3Z.mjs} +4 -2
  11. package/dist/{config-DBh1bYM2.mjs.map → config-DELNqq3Z.mjs.map} +1 -1
  12. package/dist/daemon/index.mjs +9 -9
  13. package/dist/{daemon-v5O897D4.mjs → daemon-CeTX4NpF.mjs} +94 -13
  14. package/dist/daemon-CeTX4NpF.mjs.map +1 -0
  15. package/dist/daemon-mcp/index.mjs +3 -3
  16. package/dist/db-Dp8VXIMR.mjs +212 -0
  17. package/dist/db-Dp8VXIMR.mjs.map +1 -0
  18. package/dist/{detect-BHqYcjJ1.mjs → detect-D7gPV3fQ.mjs} +1 -1
  19. package/dist/{detect-BHqYcjJ1.mjs.map → detect-D7gPV3fQ.mjs.map} +1 -1
  20. package/dist/{detector-DKA83aTZ.mjs → detector-cYYhK2Mi.mjs} +2 -2
  21. package/dist/{detector-DKA83aTZ.mjs.map → detector-cYYhK2Mi.mjs.map} +1 -1
  22. package/dist/{embeddings-mfqv-jFu.mjs → embeddings-DGRAPAYb.mjs} +2 -2
  23. package/dist/{embeddings-mfqv-jFu.mjs.map → embeddings-DGRAPAYb.mjs.map} +1 -1
  24. package/dist/{factory-BDAiKtYR.mjs → factory-DZLvRf4m.mjs} +4 -4
  25. package/dist/{factory-BDAiKtYR.mjs.map → factory-DZLvRf4m.mjs.map} +1 -1
  26. package/dist/index.d.mts +1 -1
  27. package/dist/index.d.mts.map +1 -1
  28. package/dist/index.mjs +9 -7
  29. package/dist/{indexer-B20bPHL-.mjs → indexer-CKQcgKsz.mjs} +4 -190
  30. package/dist/indexer-CKQcgKsz.mjs.map +1 -0
  31. package/dist/{indexer-backend-BXaocO5r.mjs → indexer-backend-BHztlJJg.mjs} +4 -3
  32. package/dist/{indexer-backend-BXaocO5r.mjs.map → indexer-backend-BHztlJJg.mjs.map} +1 -1
  33. package/dist/{ipc-client-DPy7s3iu.mjs → ipc-client-CLt2fNlC.mjs} +1 -1
  34. package/dist/ipc-client-CLt2fNlC.mjs.map +1 -0
  35. package/dist/mcp/index.mjs +118 -5
  36. package/dist/mcp/index.mjs.map +1 -1
  37. package/dist/{migrate-Bwj7qPaE.mjs → migrate-jokLenje.mjs} +8 -1
  38. package/dist/migrate-jokLenje.mjs.map +1 -0
  39. package/dist/{pai-marker-DX_mFLum.mjs → pai-marker-CXQPX2P6.mjs} +1 -1
  40. package/dist/{pai-marker-DX_mFLum.mjs.map → pai-marker-CXQPX2P6.mjs.map} +1 -1
  41. package/dist/{postgres-Ccvpc6fC.mjs → postgres-CRBe30Ag.mjs} +1 -1
  42. package/dist/{postgres-Ccvpc6fC.mjs.map → postgres-CRBe30Ag.mjs.map} +1 -1
  43. package/dist/{schemas-DjdwzIQ8.mjs → schemas-BY3Pjvje.mjs} +1 -1
  44. package/dist/{schemas-DjdwzIQ8.mjs.map → schemas-BY3Pjvje.mjs.map} +1 -1
  45. package/dist/{search-PjftDxxs.mjs → search-GK0ibTJy.mjs} +2 -2
  46. package/dist/{search-PjftDxxs.mjs.map → search-GK0ibTJy.mjs.map} +1 -1
  47. package/dist/{sqlite-CHUrNtbI.mjs → sqlite-RyR8Up1v.mjs} +3 -3
  48. package/dist/{sqlite-CHUrNtbI.mjs.map → sqlite-RyR8Up1v.mjs.map} +1 -1
  49. package/dist/{tools-CLK4080-.mjs → tools-CUg0Lyg-.mjs} +175 -11
  50. package/dist/{tools-CLK4080-.mjs.map → tools-CUg0Lyg-.mjs.map} +1 -1
  51. package/dist/{utils-DEWdIFQ0.mjs → utils-QSfKagcj.mjs} +62 -2
  52. package/dist/utils-QSfKagcj.mjs.map +1 -0
  53. package/dist/vault-indexer-Bo2aPSzP.mjs +499 -0
  54. package/dist/vault-indexer-Bo2aPSzP.mjs.map +1 -0
  55. package/dist/zettelkasten-Co-w0XSZ.mjs +901 -0
  56. package/dist/zettelkasten-Co-w0XSZ.mjs.map +1 -0
  57. package/package.json +2 -1
  58. package/src/hooks/README.md +99 -0
  59. package/src/hooks/hooks.md +13 -0
  60. package/src/hooks/pre-compact.sh +95 -0
  61. package/src/hooks/session-stop.sh +93 -0
  62. package/statusline-command.sh +9 -4
  63. package/templates/pai-skill.template.md +428 -0
  64. package/templates/templates.md +20 -0
  65. package/dist/daemon-v5O897D4.mjs.map +0 -1
  66. package/dist/db-BcDxXVBu.mjs +0 -110
  67. package/dist/db-BcDxXVBu.mjs.map +0 -1
  68. package/dist/indexer-B20bPHL-.mjs.map +0 -1
  69. package/dist/ipc-client-DPy7s3iu.mjs.map +0 -1
  70. package/dist/migrate-Bwj7qPaE.mjs.map +0 -1
  71. package/dist/utils-DEWdIFQ0.mjs.map +0 -1
@@ -1,195 +1,9 @@
1
+ import { t as chunkMarkdown } from "./chunker-CbnBe0s0.mjs";
1
2
  import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
2
3
  import { homedir } from "node:os";
3
4
  import { basename, join, normalize, relative } from "node:path";
4
5
  import { createHash } from "node:crypto";
5
6
 
6
- //#region src/memory/chunker.ts
7
- /**
8
- * Markdown text chunker for the PAI memory engine.
9
- *
10
- * Splits markdown files into overlapping text segments suitable for BM25
11
- * full-text indexing. Respects heading boundaries where possible, falling
12
- * back to paragraph and sentence splitting when sections are large.
13
- */
14
- const DEFAULT_MAX_TOKENS = 400;
15
- const DEFAULT_OVERLAP = 80;
16
- /**
17
- * Approximate token count using a words * 1.3 heuristic.
18
- * Matches the OpenClaw estimate approach.
19
- */
20
- function estimateTokens(text) {
21
- const wordCount = text.split(/\s+/).filter(Boolean).length;
22
- return Math.ceil(wordCount * 1.3);
23
- }
24
- /**
25
- * Compute SHA-256 hash of a string, returning a hex string.
26
- */
27
- function sha256(text) {
28
- return createHash("sha256").update(text).digest("hex");
29
- }
30
- /**
31
- * Split content into sections delimited by ## or ### headings.
32
- * Each section starts at its heading line (or at line 1 for a preamble).
33
- */
34
- function splitBySections(lines) {
35
- const sections = [];
36
- let current = [];
37
- for (const line of lines) {
38
- if (/^#{1,3}\s/.test(line.text) && current.length > 0) {
39
- const text = current.map((l) => l.text).join("\n");
40
- sections.push({
41
- lines: current,
42
- tokens: estimateTokens(text)
43
- });
44
- current = [];
45
- }
46
- current.push(line);
47
- }
48
- if (current.length > 0) {
49
- const text = current.map((l) => l.text).join("\n");
50
- sections.push({
51
- lines: current,
52
- tokens: estimateTokens(text)
53
- });
54
- }
55
- return sections;
56
- }
57
- /**
58
- * Split a LineBlock by double-newline paragraph boundaries.
59
- */
60
- function splitByParagraphs(block) {
61
- const paragraphs = [];
62
- let current = [];
63
- for (const line of block.lines) if (line.text.trim() === "" && current.length > 0) {
64
- const text = current.map((l) => l.text).join("\n");
65
- paragraphs.push({
66
- lines: [...current],
67
- tokens: estimateTokens(text)
68
- });
69
- current = [];
70
- } else current.push(line);
71
- if (current.length > 0) {
72
- const text = current.map((l) => l.text).join("\n");
73
- paragraphs.push({
74
- lines: current,
75
- tokens: estimateTokens(text)
76
- });
77
- }
78
- return paragraphs.length > 0 ? paragraphs : [block];
79
- }
80
- /**
81
- * Split a LineBlock by sentence boundaries (. ! ?) when even paragraphs are
82
- * too large. Works character-by-character within joined lines.
83
- */
84
- function splitBySentences(block, maxTokens) {
85
- const sentences = block.lines.map((l) => l.text).join(" ").split(/(?<=[.!?])\s+(?=[A-Z"'])/g);
86
- const result = [];
87
- let accText = "";
88
- const startLine = block.lines[0]?.lineNo ?? 1;
89
- const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;
90
- const totalLines = endLine - startLine + 1;
91
- const linesPerSentence = Math.max(1, Math.floor(totalLines / Math.max(1, sentences.length)));
92
- let sentenceIdx = 0;
93
- let approxLine = startLine;
94
- const flush = () => {
95
- if (!accText.trim()) return;
96
- const endApprox = Math.min(approxLine + linesPerSentence - 1, endLine);
97
- result.push({
98
- lines: [{
99
- text: accText.trim(),
100
- lineNo: approxLine
101
- }],
102
- tokens: estimateTokens(accText)
103
- });
104
- approxLine = endApprox + 1;
105
- accText = "";
106
- };
107
- for (const sentence of sentences) {
108
- sentenceIdx++;
109
- const candidateText = accText ? accText + " " + sentence : sentence;
110
- if (estimateTokens(candidateText) > maxTokens && accText) {
111
- flush();
112
- accText = sentence;
113
- } else accText = candidateText;
114
- }
115
- flush();
116
- return result.length > 0 ? result : [block];
117
- }
118
- /**
119
- * Extract the last `overlapTokens` worth of text from a list of previously
120
- * emitted chunks to prepend to the next chunk.
121
- */
122
- function buildOverlapPrefix(chunks, overlapTokens) {
123
- if (overlapTokens <= 0 || chunks.length === 0) return [];
124
- const lastChunk = chunks[chunks.length - 1];
125
- if (!lastChunk) return [];
126
- const lines = lastChunk.text.split("\n");
127
- const kept = [];
128
- let acc = 0;
129
- for (let i = lines.length - 1; i >= 0; i--) {
130
- const lineTokens = estimateTokens(lines[i] ?? "");
131
- acc += lineTokens;
132
- kept.unshift(lines[i] ?? "");
133
- if (acc >= overlapTokens) break;
134
- }
135
- const startLine = lastChunk.endLine - kept.length + 1;
136
- return kept.map((text, idx) => ({
137
- text,
138
- lineNo: Math.max(lastChunk.startLine, startLine + idx)
139
- }));
140
- }
141
- /**
142
- * Chunk a markdown file into overlapping segments for BM25 indexing.
143
- *
144
- * Strategy:
145
- * 1. Split by headings (##, ###) as natural boundaries.
146
- * 2. If a section exceeds maxTokens, split by paragraphs.
147
- * 3. If a paragraph still exceeds maxTokens, split by sentences.
148
- * 4. Apply overlap: each chunk includes the last `overlap` tokens from the
149
- * previous chunk.
150
- */
151
- function chunkMarkdown(content, opts) {
152
- const maxTokens = opts?.maxTokens ?? DEFAULT_MAX_TOKENS;
153
- const overlapTokens = opts?.overlap ?? DEFAULT_OVERLAP;
154
- if (!content.trim()) return [];
155
- const sections = splitBySections(content.split("\n").map((text, idx) => ({
156
- text,
157
- lineNo: idx + 1
158
- })));
159
- const finalBlocks = [];
160
- for (const section of sections) {
161
- if (section.tokens <= maxTokens) {
162
- finalBlocks.push(section);
163
- continue;
164
- }
165
- const paras = splitByParagraphs(section);
166
- for (const para of paras) {
167
- if (para.tokens <= maxTokens) {
168
- finalBlocks.push(para);
169
- continue;
170
- }
171
- const sentences = splitBySentences(para, maxTokens);
172
- finalBlocks.push(...sentences);
173
- }
174
- }
175
- const chunks = [];
176
- for (const block of finalBlocks) {
177
- if (block.lines.length === 0) continue;
178
- const text = [...buildOverlapPrefix(chunks, overlapTokens), ...block.lines].map((l) => l.text).join("\n").trim();
179
- if (!text) continue;
180
- const startLine = block.lines[0]?.lineNo ?? 1;
181
- const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;
182
- chunks.push({
183
- text,
184
- startLine,
185
- endLine,
186
- hash: sha256(text)
187
- });
188
- }
189
- return chunks;
190
- }
191
-
192
- //#endregion
193
7
  //#region src/memory/indexer.ts
194
8
  /**
195
9
  * File indexer for the PAI federation memory engine.
@@ -634,7 +448,7 @@ async function indexAll(db, registryDb) {
634
448
  * @param onProgress Optional callback called after each batch with running totals.
635
449
  */
636
450
  async function embedChunks(db, projectId, batchSize = 50, onProgress) {
637
- const { generateEmbedding, serializeEmbedding } = await import("./embeddings-mfqv-jFu.mjs").then((n) => n.i);
451
+ const { generateEmbedding, serializeEmbedding } = await import("./embeddings-DGRAPAYb.mjs").then((n) => n.i);
638
452
  const conditions = ["embedding IS NULL"];
639
453
  const params = [];
640
454
  if (projectId !== void 0) {
@@ -673,5 +487,5 @@ async function embedChunks(db, projectId, batchSize = 50, onProgress) {
673
487
  }
674
488
 
675
489
  //#endregion
676
- export { indexProject as a, indexFile as i, embedChunks as n, chunkMarkdown as o, indexAll as r, estimateTokens as s, detectTier as t };
677
- //# sourceMappingURL=indexer-B20bPHL-.mjs.map
490
+ export { indexProject as a, indexFile as i, embedChunks as n, indexAll as r, detectTier as t };
491
+ //# sourceMappingURL=indexer-CKQcgKsz.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"indexer-CKQcgKsz.mjs","names":[],"sources":["../src/memory/indexer.ts"],"sourcesContent":["/**\n * File indexer for the PAI federation memory engine.\n *\n * Scans project memory/ and Notes/ directories, chunks markdown files, and\n * inserts the resulting chunks into federation.db for BM25 search.\n *\n * Change detection: files whose SHA-256 hash has not changed since the last\n * index run are skipped, keeping incremental re-indexing fast.\n *\n * Phase 2.5: adds embedChunks() for generating vector embeddings on indexed\n * chunks that do not yet have an embedding stored.\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, normalize } from \"node:path\";\nimport { homedir } from \"node:os\";\nimport type { Database } from \"better-sqlite3\";\nimport { chunkMarkdown } from \"./chunker.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface IndexResult {\n filesProcessed: number;\n chunksCreated: number;\n filesSkipped: number;\n}\n\n// ---------------------------------------------------------------------------\n// Tier detection\n// ---------------------------------------------------------------------------\n\n/**\n * Classify a relative file path into one of the four memory tiers.\n *\n * Rules (in priority order):\n * - MEMORY.md anywhere in memory/ → 'evergreen'\n * - YYYY-MM-DD.md in memory/ → 'daily'\n * - anything else in memory/ → 'topic'\n * - anything in Notes/ → 'session'\n */\nexport function detectTier(\n relativePath: string,\n): \"evergreen\" | \"daily\" | \"topic\" | \"session\" {\n // Normalise to forward slashes and strip leading ./\n const p = relativePath.replace(/\\\\/g, \"/\").replace(/^\\.\\//, \"\");\n\n // Notes directory → session tier\n if (p.startsWith(\"Notes/\") || p === \"Notes\") {\n return \"session\";\n }\n\n const fileName = basename(p);\n\n // MEMORY.md (case-sensitive match) → evergreen\n if (fileName === \"MEMORY.md\") {\n return \"evergreen\";\n }\n\n // YYYY-MM-DD.md → daily\n if (/^\\d{4}-\\d{2}-\\d{2}\\.md$/.test(fileName)) {\n return \"daily\";\n }\n\n // Default for memory/ files\n return \"topic\";\n}\n\n// ---------------------------------------------------------------------------\n// Chunk ID generation\n// ---------------------------------------------------------------------------\n\n/**\n * Generate a deterministic chunk ID from its coordinates.\n * Format: sha256(\"projectId:path:chunkIndex:startLine:endLine\")\n *\n * The chunkIndex (0-based position within the file) is included so that\n * chunks with approximated line numbers (e.g. from splitBySentences) never\n * produce colliding IDs even when multiple chunks share the same startLine/endLine.\n */\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\n// ---------------------------------------------------------------------------\n// File hash\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\n// ---------------------------------------------------------------------------\n// Core indexing operations\n// ---------------------------------------------------------------------------\n\n/**\n * Index a single file into the federation database.\n *\n * @returns true if the file was re-indexed (changed or new), false if skipped.\n */\nexport function indexFile(\n db: Database,\n projectId: number,\n rootPath: string,\n relativePath: string,\n source: string,\n tier: string,\n): boolean {\n const absPath = join(rootPath, relativePath);\n\n // Read file content\n let content: string;\n let stat: ReturnType<typeof statSync>;\n try {\n content = readFileSync(absPath, \"utf8\");\n stat = statSync(absPath);\n } catch {\n // File unreadable or missing — skip silently\n return false;\n }\n\n const hash = sha256File(content);\n const mtime = Math.floor(stat.mtimeMs);\n const size = stat.size;\n\n // Check if the file has changed since last index\n const existing = db\n .prepare(\n \"SELECT hash FROM memory_files WHERE project_id = ? AND path = ?\",\n )\n .get(projectId, relativePath) as { hash: string } | undefined;\n\n if (existing?.hash === hash) {\n // Unchanged — skip\n return false;\n }\n\n // Delete old chunks for this file from both tables\n const oldChunkIds = db\n .prepare(\n \"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\",\n )\n .all(projectId, relativePath) as Array<{ id: string }>;\n\n const deleteFts = db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n const deleteChunk = db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n db.transaction(() => {\n for (const row of oldChunkIds) {\n deleteFts.run(row.id);\n }\n deleteChunk.run(projectId, relativePath);\n })();\n\n // Chunk the new content\n const chunks = chunkMarkdown(content);\n\n // Insert new chunks into memory_chunks and memory_fts\n const insertChunk = db.prepare(`\n INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const insertFts = db.prepare(`\n INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const upsertFile = db.prepare(`\n INSERT INTO memory_files (project_id, path, source, tier, hash, mtime, size)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT(project_id, path) DO UPDATE SET\n source = excluded.source,\n tier = excluded.tier,\n hash = excluded.hash,\n mtime = excluded.mtime,\n size = excluded.size\n `);\n\n const updatedAt = Date.now();\n\n db.transaction(() => {\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n const id = chunkId(projectId, relativePath, i, chunk.startLine, chunk.endLine);\n insertChunk.run(\n id,\n projectId,\n source,\n tier,\n relativePath,\n chunk.startLine,\n chunk.endLine,\n chunk.hash,\n chunk.text,\n updatedAt,\n );\n insertFts.run(\n chunk.text,\n id,\n projectId,\n relativePath,\n source,\n tier,\n chunk.startLine,\n chunk.endLine,\n );\n }\n upsertFile.run(projectId, relativePath, source, tier, hash, mtime, size);\n })();\n\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Directory walker\n// ---------------------------------------------------------------------------\n\n/**\n * Safety cap: maximum number of .md files collected per project scan.\n * Prevents runaway scans on huge root paths (e.g. home directory).\n * Projects with more files than this are scanned up to the cap only.\n */\nconst MAX_FILES_PER_PROJECT = 5_000;\n\n/**\n * Maximum recursion depth for directory walks.\n * Prevents deep traversal of large directory trees (e.g. development repos).\n * Depth 0 = the given directory itself (no recursion).\n * Value 6 allows: root → subdirs → sub-subdirs → ... up to 6 levels.\n * Sufficient for memory/, Notes/, and typical docs structures.\n */\nconst MAX_WALK_DEPTH = 6;\n\n/**\n * Recursively collect all .md files under a directory.\n * Returns absolute paths. Stops early if the accumulated count hits the cap\n * or if the recursion depth exceeds MAX_WALK_DEPTH.\n *\n * @param dir Directory to scan.\n * @param acc Shared accumulator array (mutated in place for early exit).\n * @param cap Maximum number of files to collect (across all recursive calls).\n * @param depth Current recursion depth (0 = the initial call).\n */\nfunction walkMdFiles(\n dir: string,\n acc?: string[],\n cap = MAX_FILES_PER_PROJECT,\n depth = 0,\n): string[] {\n const results = acc ?? [];\n if (!existsSync(dir)) return results;\n if (results.length >= cap) return results;\n if (depth > MAX_WALK_DEPTH) return results;\n\n try {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n if (results.length >= cap) break;\n if (entry.isSymbolicLink()) continue;\n // Skip known junk directories at every recursion depth\n if (ALWAYS_SKIP_DIRS.has(entry.name)) continue;\n const full = join(dir, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, cap, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n results.push(full);\n }\n }\n } catch {\n // Unreadable directory — skip\n }\n return results;\n}\n\n/**\n * Directories to ALWAYS skip, at any depth, during any directory walk.\n * These are build artifacts, dependency trees, and VCS internals that\n * should never be indexed regardless of where they appear in the tree.\n */\nconst ALWAYS_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\", // CocoaPods (iOS/macOS)\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\", // Xcode\n \".next\", // Next.js\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n]);\n\n/**\n * Directories to skip when doing a root-level content scan.\n * These are either already handled by dedicated scans or should never be indexed.\n */\nconst ROOT_SCAN_SKIP_DIRS = new Set([\n \"memory\",\n \"Notes\",\n \".claude\",\n \".DS_Store\",\n // Everything in ALWAYS_SKIP_DIRS is also excluded at root level\n ...ALWAYS_SKIP_DIRS,\n]);\n\n/**\n * Additional directories to skip at the content-scan level (first level below root).\n * These are common macOS/Linux home-directory or repo noise directories that are\n * never meaningful as project content.\n */\nconst CONTENT_SCAN_SKIP_DIRS = new Set([\n // macOS home directory standard folders\n \"Library\",\n \"Applications\",\n \"Music\",\n \"Movies\",\n \"Pictures\",\n \"Desktop\",\n \"Downloads\",\n \"Public\",\n // Common dev noise\n \"coverage\",\n // Everything in ALWAYS_SKIP_DIRS is also excluded at this level\n ...ALWAYS_SKIP_DIRS,\n]);\n\n/**\n * Recursively collect all .md files under rootPath, excluding directories\n * that are already covered by dedicated scans (memory/, Notes/) and\n * common noise directories (.git, node_modules, etc.).\n *\n * Returns absolute paths for files NOT already handled by the specific scanners.\n * Stops collecting once MAX_FILES_PER_PROJECT is reached.\n */\nfunction walkContentFiles(rootPath: string): string[] {\n if (!existsSync(rootPath)) return [];\n\n const results: string[] = [];\n try {\n for (const entry of readdirSync(rootPath, { withFileTypes: true })) {\n if (results.length >= MAX_FILES_PER_PROJECT) break;\n if (entry.isSymbolicLink()) continue;\n if (ROOT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n if (CONTENT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n\n const full = join(rootPath, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, MAX_FILES_PER_PROJECT);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n // Skip root-level MEMORY.md — handled by the dedicated evergreen scan\n if (entry.name !== \"MEMORY.md\") {\n results.push(full);\n }\n }\n }\n } catch {\n // Unreadable directory — skip\n }\n return results;\n}\n\n// ---------------------------------------------------------------------------\n// Project-level indexing\n// ---------------------------------------------------------------------------\n\n/**\n * Index all memory, Notes, and content files for a single registered project.\n *\n * Scans:\n * - {rootPath}/MEMORY.md → source='memory', tier='evergreen'\n * - {rootPath}/memory/ → source='memory', tier from detectTier()\n * - {rootPath}/Notes/ → source='notes', tier='session'\n * - {rootPath}/**\\/\\*.md → source='content', tier='topic' (all other .md files, recursive)\n * - {claudeNotesDir}/ → source='notes', tier='session' (if set and different)\n *\n * The content scan covers projects like job-discussions where markdown files\n * live in date/topic subdirectories rather than a memory/ folder. The\n * memory/, Notes/, .git/, and node_modules/ directories are excluded from\n * the content scan to avoid double-indexing.\n *\n * The claudeNotesDir parameter points to ~/.claude/projects/{encoded}/Notes/\n * where Claude Code writes session notes for a given working directory.\n * It is stored on the project row as claude_notes_dir after a registry scan.\n */\n/**\n * Number of files to process before yielding to the event loop inside\n * indexProject. Keeps IPC responsive even while indexing large projects.\n * Lower = more responsive but more overhead. 10 is a good balance.\n */\nconst INDEX_YIELD_EVERY = 10;\n\n/**\n * Returns true if rootPath should skip the recursive content scan.\n *\n * Skips content scanning for:\n * - The home directory itself or any ancestor (too broad — millions of files)\n * - Git repositories (code repos — index memory/ and Notes/ only, not all .md files)\n *\n * The content scan is still useful for Obsidian vaults, Notes folders, and\n * other doc-centric project trees where ALL markdown files are meaningful.\n *\n * The memory/, Notes/, and claude_notes_dir scans always run regardless.\n */\nfunction isPathTooBroadForContentScan(rootPath: string): boolean {\n const normalized = normalize(rootPath);\n const home = homedir();\n\n // Skip the home directory itself or any ancestor of home\n if (home.startsWith(normalized) || normalized === \"/\") {\n return true;\n }\n\n // Skip home directory itself (depth 0)\n if (normalized.startsWith(home)) {\n const rel = normalized.slice(home.length).replace(/^\\//, \"\");\n const depth = rel ? rel.split(\"/\").length : 0;\n if (depth === 0) return true;\n }\n\n // Skip git repositories — content scan is only for doc-centric projects\n // (Obsidian vaults, knowledge bases). Code repos use memory/ and Notes/ only.\n if (existsSync(join(normalized, \".git\"))) {\n return true;\n }\n\n return false;\n}\n\nexport async function indexProject(\n db: Database,\n projectId: number,\n rootPath: string,\n claudeNotesDir?: string | null,\n): Promise<IndexResult> {\n const result: IndexResult = {\n filesProcessed: 0,\n chunksCreated: 0,\n filesSkipped: 0,\n };\n\n const filesToIndex: Array<{ absPath: string; rootBase: string; source: string; tier: string }> = [];\n\n // Root-level MEMORY.md\n const rootMemoryMd = join(rootPath, \"MEMORY.md\");\n if (existsSync(rootMemoryMd)) {\n filesToIndex.push({ absPath: rootMemoryMd, rootBase: rootPath, source: \"memory\", tier: \"evergreen\" });\n }\n\n // memory/ directory\n const memoryDir = join(rootPath, \"memory\");\n for (const absPath of walkMdFiles(memoryDir)) {\n const relPath = relative(rootPath, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"memory\", tier });\n }\n\n // {rootPath}/Notes/ directory\n const notesDir = join(rootPath, \"Notes\");\n for (const absPath of walkMdFiles(notesDir)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic session-title chunks for Notes files with the standard filename format:\n // \"NNNN - YYYY-MM-DD - Descriptive Title.md\"\n // These are small, high-signal chunks that make session titles searchable via BM25 and embeddings.\n {\n const SESSION_TITLE_RE = /^(\\d{4})\\s*-\\s*(\\d{4}-\\d{2}-\\d{2})\\s*-\\s*(.+)\\.md$/;\n const titleInsertChunk = db.prepare(`\n INSERT OR IGNORE INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n const titleInsertFts = db.prepare(`\n INSERT OR IGNORE INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n const updatedAt = Date.now();\n for (const absPath of walkMdFiles(notesDir)) {\n const fileName = basename(absPath);\n const m = SESSION_TITLE_RE.exec(fileName);\n if (!m) continue;\n const [, num, date, title] = m;\n const text = `Session #${num} ${date}: ${title}`;\n const relPath = relative(rootPath, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n db.transaction(() => {\n titleInsertChunk.run(id, projectId, \"notes\", \"session\", syntheticPath, 0, 0, hash, text, updatedAt);\n titleInsertFts.run(text, id, projectId, syntheticPath, \"notes\", \"session\", 0, 0);\n })();\n }\n }\n\n // {rootPath}/**/*.md — all other markdown content (e.g. year/month/topic dirs)\n // Uses walkContentFiles which skips memory/, Notes/, .git/, node_modules/ etc.\n // Skip the content scan for paths that are too broad (home dir, filesystem root, etc.)\n // to avoid runaway directory traversal. Memory and Notes scans above are always safe\n // because they target specific named subdirectories.\n if (!isPathTooBroadForContentScan(rootPath)) {\n for (const absPath of walkContentFiles(rootPath)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"content\", tier: \"topic\" });\n }\n }\n\n // Claude Code session notes directory (~/.claude/projects/{encoded}/Notes/)\n // Only scan if it is set, exists, and is not the same path as rootPath/Notes/\n if (claudeNotesDir && claudeNotesDir !== notesDir) {\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n filesToIndex.push({ absPath, rootBase: claudeNotesDir, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic title chunks for claude notes dir\n {\n const SESSION_TITLE_RE_CLAUDE = /^(\\d{4})\\s*-\\s*(\\d{4}-\\d{2}-\\d{2})\\s*-\\s*(.+)\\.md$/;\n const updatedAt = Date.now();\n const titleInsertChunk2 = db.prepare(`\n INSERT OR IGNORE INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n const titleInsertFts2 = db.prepare(`\n INSERT OR IGNORE INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n const fileName = basename(absPath);\n const m = SESSION_TITLE_RE_CLAUDE.exec(fileName);\n if (!m) continue;\n const [, num, date, title] = m;\n const text = `Session #${num} ${date}: ${title}`;\n const relPath = relative(claudeNotesDir, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n db.transaction(() => {\n titleInsertChunk2.run(id, projectId, \"notes\", \"session\", syntheticPath, 0, 0, hash, text, updatedAt);\n titleInsertFts2.run(text, id, projectId, syntheticPath, \"notes\", \"session\", 0, 0);\n })();\n }\n }\n\n // Derive the sibling memory/ directory: .../Notes/ → .../memory/\n if (claudeNotesDir.endsWith(\"/Notes\")) {\n const claudeProjectDir = claudeNotesDir.slice(0, -\"/Notes\".length);\n const claudeMemoryDir = join(claudeProjectDir, \"memory\");\n\n // MEMORY.md at the Claude Code project dir level (sibling of Notes/)\n const claudeMemoryMd = join(claudeProjectDir, \"MEMORY.md\");\n if (existsSync(claudeMemoryMd)) {\n filesToIndex.push({\n absPath: claudeMemoryMd,\n rootBase: claudeProjectDir,\n source: \"memory\",\n tier: \"evergreen\",\n });\n }\n\n // memory/ directory sibling of Notes/\n for (const absPath of walkMdFiles(claudeMemoryDir)) {\n const relPath = relative(claudeProjectDir, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: claudeProjectDir, source: \"memory\", tier });\n }\n }\n }\n\n // Yield after collection phase (which is synchronous) before we start processing\n await yieldToEventLoop();\n\n let filesSinceYield = 0;\n\n for (const { absPath, rootBase, source, tier } of filesToIndex) {\n // Yield to the event loop periodically so the IPC server stays responsive\n if (filesSinceYield >= INDEX_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const relPath = relative(rootBase, absPath);\n const changed = indexFile(db, projectId, rootBase, relPath, source, tier);\n\n if (changed) {\n // Count chunks created for this file\n const count = db\n .prepare(\n \"SELECT COUNT(*) as n FROM memory_chunks WHERE project_id = ? AND path = ?\",\n )\n .get(projectId, relPath) as { n: number };\n\n result.filesProcessed++;\n result.chunksCreated += count.n;\n } else {\n result.filesSkipped++;\n }\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Global indexing (all registered projects)\n// ---------------------------------------------------------------------------\n\n/**\n * Yield to the Node.js event loop between projects so the IPC server\n * remains responsive during long index runs.\n */\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n/**\n * Index all active projects registered in the registry DB.\n *\n * Async: yields to the event loop between each project so that the daemon's\n * Unix socket server can process IPC requests (e.g. status) while indexing.\n */\nexport async function indexAll(\n db: Database,\n registryDb: Database,\n): Promise<{ projects: number; result: IndexResult }> {\n const projects = registryDb\n .prepare(\"SELECT id, root_path, claude_notes_dir FROM projects WHERE status = 'active'\")\n .all() as Array<{ id: number; root_path: string; claude_notes_dir: string | null }>;\n\n const totals: IndexResult = {\n filesProcessed: 0,\n chunksCreated: 0,\n filesSkipped: 0,\n };\n\n for (const project of projects) {\n // Yield before each project so the event loop can drain IPC requests\n await yieldToEventLoop();\n\n const r = await indexProject(db, project.id, project.root_path, project.claude_notes_dir);\n totals.filesProcessed += r.filesProcessed;\n totals.chunksCreated += r.chunksCreated;\n totals.filesSkipped += r.filesSkipped;\n }\n\n return { projects: projects.length, result: totals };\n}\n\n// ---------------------------------------------------------------------------\n// Embedding generation\n// ---------------------------------------------------------------------------\n\nexport interface EmbedResult {\n chunksEmbedded: number;\n chunksSkipped: number;\n}\n\n/**\n * Generate and store embeddings for chunks that do not yet have one.\n *\n * Because better-sqlite3 is synchronous but the embedding pipeline is async,\n * we fetch all unembedded chunk texts first, generate embeddings in batches,\n * and then write them back in a transaction.\n *\n * @param db Open federation database.\n * @param projectId Optional — restrict to a specific project.\n * @param batchSize Number of chunks to embed per round. Default 50.\n * @param onProgress Optional callback called after each batch with running totals.\n */\nexport async function embedChunks(\n db: Database,\n projectId?: number,\n batchSize = 50,\n onProgress?: (embedded: number, total: number) => void,\n): Promise<EmbedResult> {\n // Dynamic import — keeps the heavy ML runtime out of the module load path\n const { generateEmbedding, serializeEmbedding } = await import(\"./embeddings.js\");\n\n const conditions = [\"embedding IS NULL\"];\n const params: (string | number)[] = [];\n\n if (projectId !== undefined) {\n conditions.push(\"project_id = ?\");\n params.push(projectId);\n }\n\n const where = \"WHERE \" + conditions.join(\" AND \");\n\n const rows = db\n .prepare(`SELECT id, text FROM memory_chunks ${where} ORDER BY id`)\n .all(...params) as Array<{ id: string; text: string }>;\n\n if (rows.length === 0) {\n return { chunksEmbedded: 0, chunksSkipped: 0 };\n }\n\n const updateStmt = db.prepare(\n \"UPDATE memory_chunks SET embedding = ? WHERE id = ?\",\n );\n\n let embedded = 0;\n const total = rows.length;\n\n // Process in batches so progress callbacks are meaningful\n for (let i = 0; i < rows.length; i += batchSize) {\n const batch = rows.slice(i, i + batchSize);\n\n // Generate embeddings for the batch (async — must happen OUTSIDE transaction)\n const embeddings: Array<{ id: string; blob: Buffer }> = [];\n for (const row of batch) {\n const vec = await generateEmbedding(row.text);\n const blob = serializeEmbedding(vec);\n embeddings.push({ id: row.id, blob });\n }\n\n // Write the batch in a single transaction\n db.transaction(() => {\n for (const { id, blob } of embeddings) {\n updateStmt.run(blob, id);\n }\n })();\n\n embedded += embeddings.length;\n onProgress?.(embedded, total);\n }\n\n return { chunksEmbedded: embedded, chunksSkipped: 0 };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2CA,SAAgB,WACd,cAC6C;CAE7C,MAAM,IAAI,aAAa,QAAQ,OAAO,IAAI,CAAC,QAAQ,SAAS,GAAG;AAG/D,KAAI,EAAE,WAAW,SAAS,IAAI,MAAM,QAClC,QAAO;CAGT,MAAM,WAAW,SAAS,EAAE;AAG5B,KAAI,aAAa,YACf,QAAO;AAIT,KAAI,0BAA0B,KAAK,SAAS,CAC1C,QAAO;AAIT,QAAO;;;;;;;;;;AAeT,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAOlB,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;;;;;;AAY3D,SAAgB,UACd,IACA,WACA,UACA,cACA,QACA,MACS;CACT,MAAM,UAAU,KAAK,UAAU,aAAa;CAG5C,IAAI;CACJ,IAAI;AACJ,KAAI;AACF,YAAU,aAAa,SAAS,OAAO;AACvC,SAAO,SAAS,QAAQ;SAClB;AAEN,SAAO;;CAGT,MAAM,OAAO,WAAW,QAAQ;CAChC,MAAM,QAAQ,KAAK,MAAM,KAAK,QAAQ;CACtC,MAAM,OAAO,KAAK;AASlB,KANiB,GACd,QACC,kEACD,CACA,IAAI,WAAW,aAAa,EAEjB,SAAS,KAErB,QAAO;CAIT,MAAM,cAAc,GACjB,QACC,iEACD,CACA,IAAI,WAAW,aAAa;CAE/B,MAAM,YAAY,GAAG,QAAQ,sCAAsC;CACnE,MAAM,cAAc,GAAG,QACrB,8DACD;AAED,IAAG,kBAAkB;AACnB,OAAK,MAAM,OAAO,YAChB,WAAU,IAAI,IAAI,GAAG;AAEvB,cAAY,IAAI,WAAW,aAAa;GACxC,EAAE;CAGJ,MAAM,SAAS,cAAc,QAAQ;CAGrC,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;CAEF,MAAM,YAAY,GAAG,QAAQ;;;IAG3B;CAEF,MAAM,aAAa,GAAG,QAAQ;;;;;;;;;IAS5B;CAEF,MAAM,YAAY,KAAK,KAAK;AAE5B,IAAG,kBAAkB;AACnB,OAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;GACtC,MAAM,QAAQ,OAAO;GACrB,MAAM,KAAK,QAAQ,WAAW,cAAc,GAAG,MAAM,WAAW,MAAM,QAAQ;AAC9E,eAAY,IACV,IACA,WACA,QACA,MACA,cACA,MAAM,WACN,MAAM,SACN,MAAM,MACN,MAAM,MACN,UACD;AACD,aAAU,IACR,MAAM,MACN,IACA,WACA,cACA,QACA,MACA,MAAM,WACN,MAAM,QACP;;AAEH,aAAW,IAAI,WAAW,cAAc,QAAQ,MAAM,MAAM,OAAO,KAAK;GACxE,EAAE;AAEJ,QAAO;;;;;;;AAYT,MAAM,wBAAwB;;;;;;;;AAS9B,MAAM,iBAAiB;;;;;;;;;;;AAYvB,SAAS,YACP,KACA,KACA,MAAM,uBACN,QAAQ,GACE;CACV,MAAM,UAAU,OAAO,EAAE;AACzB,KAAI,CAAC,WAAW,IAAI,CAAE,QAAO;AAC7B,KAAI,QAAQ,UAAU,IAAK,QAAO;AAClC,KAAI,QAAQ,eAAgB,QAAO;AAEnC,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;AAC7D,OAAI,QAAQ,UAAU,IAAK;AAC3B,OAAI,MAAM,gBAAgB,CAAE;AAE5B,OAAI,iBAAiB,IAAI,MAAM,KAAK,CAAE;GACtC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,KAAK,QAAQ,EAAE;YACjC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CACrD,SAAQ,KAAK,KAAK;;SAGhB;AAGR,QAAO;;;;;;;AAQT,MAAM,mBAAmB,IAAI,IAAI;CAE/B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CACD,CAAC;;;;;AAMF,MAAM,sBAAsB,IAAI,IAAI;CAClC;CACA;CACA;CACA;CAEA,GAAG;CACJ,CAAC;;;;;;AAOF,MAAM,yBAAyB,IAAI,IAAI;CAErC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CAEA;CAEA,GAAG;CACJ,CAAC;;;;;;;;;AAUF,SAAS,iBAAiB,UAA4B;AACpD,KAAI,CAAC,WAAW,SAAS,CAAE,QAAO,EAAE;CAEpC,MAAM,UAAoB,EAAE;AAC5B,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,UAAU,EAAE,eAAe,MAAM,CAAC,EAAE;AAClE,OAAI,QAAQ,UAAU,sBAAuB;AAC7C,OAAI,MAAM,gBAAgB,CAAE;AAC5B,OAAI,oBAAoB,IAAI,MAAM,KAAK,CAAE;AACzC,OAAI,uBAAuB,IAAI,MAAM,KAAK,CAAE;GAE5C,MAAM,OAAO,KAAK,UAAU,MAAM,KAAK;AACvC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,sBAAsB;YACxC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EAErD;QAAI,MAAM,SAAS,YACjB,SAAQ,KAAK,KAAK;;;SAIlB;AAGR,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BT,MAAM,oBAAoB;;;;;;;;;;;;;AAc1B,SAAS,6BAA6B,UAA2B;CAC/D,MAAM,aAAa,UAAU,SAAS;CACtC,MAAM,OAAO,SAAS;AAGtB,KAAI,KAAK,WAAW,WAAW,IAAI,eAAe,IAChD,QAAO;AAIT,KAAI,WAAW,WAAW,KAAK,EAAE;EAC/B,MAAM,MAAM,WAAW,MAAM,KAAK,OAAO,CAAC,QAAQ,OAAO,GAAG;AAE5D,OADc,MAAM,IAAI,MAAM,IAAI,CAAC,SAAS,OAC9B,EAAG,QAAO;;AAK1B,KAAI,WAAW,KAAK,YAAY,OAAO,CAAC,CACtC,QAAO;AAGT,QAAO;;AAGT,eAAsB,aACpB,IACA,WACA,UACA,gBACsB;CACtB,MAAM,SAAsB;EAC1B,gBAAgB;EAChB,eAAe;EACf,cAAc;EACf;CAED,MAAM,eAA2F,EAAE;CAGnG,MAAM,eAAe,KAAK,UAAU,YAAY;AAChD,KAAI,WAAW,aAAa,CAC1B,cAAa,KAAK;EAAE,SAAS;EAAc,UAAU;EAAU,QAAQ;EAAU,MAAM;EAAa,CAAC;CAIvG,MAAM,YAAY,KAAK,UAAU,SAAS;AAC1C,MAAK,MAAM,WAAW,YAAY,UAAU,EAAE;EAE5C,MAAM,OAAO,WADG,SAAS,UAAU,QAAQ,CACX;AAChC,eAAa,KAAK;GAAE;GAAS,UAAU;GAAU,QAAQ;GAAU;GAAM,CAAC;;CAI5E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,MAAK,MAAM,WAAW,YAAY,SAAS,CACzC,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAS,MAAM;EAAW,CAAC;CAMtF;EACE,MAAM,mBAAmB;EACzB,MAAM,mBAAmB,GAAG,QAAQ;;;MAGlC;EACF,MAAM,iBAAiB,GAAG,QAAQ;;;MAGhC;EACF,MAAM,YAAY,KAAK,KAAK;AAC5B,OAAK,MAAM,WAAW,YAAY,SAAS,EAAE;GAC3C,MAAM,WAAW,SAAS,QAAQ;GAClC,MAAM,IAAI,iBAAiB,KAAK,SAAS;AACzC,OAAI,CAAC,EAAG;GACR,MAAM,GAAG,KAAK,MAAM,SAAS;GAC7B,MAAM,OAAO,YAAY,IAAI,GAAG,KAAK,IAAI;GAEzC,MAAM,gBAAgB,GADN,SAAS,UAAU,QAAQ,CACV;GACjC,MAAM,KAAK,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;GACrD,MAAM,OAAO,WAAW,KAAK;AAC7B,MAAG,kBAAkB;AACnB,qBAAiB,IAAI,IAAI,WAAW,SAAS,WAAW,eAAe,GAAG,GAAG,MAAM,MAAM,UAAU;AACnG,mBAAe,IAAI,MAAM,IAAI,WAAW,eAAe,SAAS,WAAW,GAAG,EAAE;KAChF,EAAE;;;AASR,KAAI,CAAC,6BAA6B,SAAS,CACzC,MAAK,MAAM,WAAW,iBAAiB,SAAS,CAC9C,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAW,MAAM;EAAS,CAAC;AAMxF,KAAI,kBAAkB,mBAAmB,UAAU;AACjD,OAAK,MAAM,WAAW,YAAY,eAAe,CAC/C,cAAa,KAAK;GAAE;GAAS,UAAU;GAAgB,QAAQ;GAAS,MAAM;GAAW,CAAC;EAI5F;GACE,MAAM,0BAA0B;GAChC,MAAM,YAAY,KAAK,KAAK;GAC5B,MAAM,oBAAoB,GAAG,QAAQ;;;QAGnC;GACF,MAAM,kBAAkB,GAAG,QAAQ;;;QAGjC;AACF,QAAK,MAAM,WAAW,YAAY,eAAe,EAAE;IACjD,MAAM,WAAW,SAAS,QAAQ;IAClC,MAAM,IAAI,wBAAwB,KAAK,SAAS;AAChD,QAAI,CAAC,EAAG;IACR,MAAM,GAAG,KAAK,MAAM,SAAS;IAC7B,MAAM,OAAO,YAAY,IAAI,GAAG,KAAK,IAAI;IAEzC,MAAM,gBAAgB,GADN,SAAS,gBAAgB,QAAQ,CAChB;IACjC,MAAM,KAAK,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;IACrD,MAAM,OAAO,WAAW,KAAK;AAC7B,OAAG,kBAAkB;AACnB,uBAAkB,IAAI,IAAI,WAAW,SAAS,WAAW,eAAe,GAAG,GAAG,MAAM,MAAM,UAAU;AACpG,qBAAgB,IAAI,MAAM,IAAI,WAAW,eAAe,SAAS,WAAW,GAAG,EAAE;MACjF,EAAE;;;AAKR,MAAI,eAAe,SAAS,SAAS,EAAE;GACrC,MAAM,mBAAmB,eAAe,MAAM,GAAG,GAAiB;GAClE,MAAM,kBAAkB,KAAK,kBAAkB,SAAS;GAGxD,MAAM,iBAAiB,KAAK,kBAAkB,YAAY;AAC1D,OAAI,WAAW,eAAe,CAC5B,cAAa,KAAK;IAChB,SAAS;IACT,UAAU;IACV,QAAQ;IACR,MAAM;IACP,CAAC;AAIJ,QAAK,MAAM,WAAW,YAAY,gBAAgB,EAAE;IAElD,MAAM,OAAO,WADG,SAAS,kBAAkB,QAAQ,CACnB;AAChC,iBAAa,KAAK;KAAE;KAAS,UAAU;KAAkB,QAAQ;KAAU;KAAM,CAAC;;;;AAMxF,OAAM,kBAAkB;CAExB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,EAAE,SAAS,UAAU,QAAQ,UAAU,cAAc;AAE9D,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,UAAU,SAAS,UAAU,QAAQ;AAG3C,MAFgB,UAAU,IAAI,WAAW,UAAU,SAAS,QAAQ,KAAK,EAE5D;GAEX,MAAM,QAAQ,GACX,QACC,4EACD,CACA,IAAI,WAAW,QAAQ;AAE1B,UAAO;AACP,UAAO,iBAAiB,MAAM;QAE9B,QAAO;;AAIX,QAAO;;;;;;AAWT,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;;;AASxD,eAAsB,SACpB,IACA,YACoD;CACpD,MAAM,WAAW,WACd,QAAQ,+EAA+E,CACvF,KAAK;CAER,MAAM,SAAsB;EAC1B,gBAAgB;EAChB,eAAe;EACf,cAAc;EACf;AAED,MAAK,MAAM,WAAW,UAAU;AAE9B,QAAM,kBAAkB;EAExB,MAAM,IAAI,MAAM,aAAa,IAAI,QAAQ,IAAI,QAAQ,WAAW,QAAQ,iBAAiB;AACzF,SAAO,kBAAkB,EAAE;AAC3B,SAAO,iBAAiB,EAAE;AAC1B,SAAO,gBAAgB,EAAE;;AAG3B,QAAO;EAAE,UAAU,SAAS;EAAQ,QAAQ;EAAQ;;;;;;;;;;;;;;AAwBtD,eAAsB,YACpB,IACA,WACA,YAAY,IACZ,YACsB;CAEtB,MAAM,EAAE,mBAAmB,uBAAuB,MAAM,OAAO;CAE/D,MAAM,aAAa,CAAC,oBAAoB;CACxC,MAAM,SAA8B,EAAE;AAEtC,KAAI,cAAc,QAAW;AAC3B,aAAW,KAAK,iBAAiB;AACjC,SAAO,KAAK,UAAU;;CAGxB,MAAM,QAAQ,WAAW,WAAW,KAAK,QAAQ;CAEjD,MAAM,OAAO,GACV,QAAQ,sCAAsC,MAAM,cAAc,CAClE,IAAI,GAAG,OAAO;AAEjB,KAAI,KAAK,WAAW,EAClB,QAAO;EAAE,gBAAgB;EAAG,eAAe;EAAG;CAGhD,MAAM,aAAa,GAAG,QACpB,sDACD;CAED,IAAI,WAAW;CACf,MAAM,QAAQ,KAAK;AAGnB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,WAAW;EAC/C,MAAM,QAAQ,KAAK,MAAM,GAAG,IAAI,UAAU;EAG1C,MAAM,aAAkD,EAAE;AAC1D,OAAK,MAAM,OAAO,OAAO;GAEvB,MAAM,OAAO,mBADD,MAAM,kBAAkB,IAAI,KAAK,CACT;AACpC,cAAW,KAAK;IAAE,IAAI,IAAI;IAAI;IAAM,CAAC;;AAIvC,KAAG,kBAAkB;AACnB,QAAK,MAAM,EAAE,IAAI,UAAU,WACzB,YAAW,IAAI,MAAM,GAAG;IAE1B,EAAE;AAEJ,cAAY,WAAW;AACvB,eAAa,UAAU,MAAM;;AAG/B,QAAO;EAAE,gBAAgB;EAAU,eAAe;EAAG"}
@@ -1,4 +1,5 @@
1
- import { o as chunkMarkdown, t as detectTier } from "./indexer-B20bPHL-.mjs";
1
+ import { t as chunkMarkdown } from "./chunker-CbnBe0s0.mjs";
2
+ import { t as detectTier } from "./indexer-CKQcgKsz.mjs";
2
3
  import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
3
4
  import { homedir } from "node:os";
4
5
  import { basename, join, normalize, relative } from "node:path";
@@ -313,7 +314,7 @@ const EMBED_YIELD_EVERY = 10;
313
314
  * Returns the number of newly embedded chunks.
314
315
  */
315
316
  async function embedChunksWithBackend(backend, shouldStop) {
316
- const { generateEmbedding, serializeEmbedding } = await import("./embeddings-mfqv-jFu.mjs").then((n) => n.i);
317
+ const { generateEmbedding, serializeEmbedding } = await import("./embeddings-DGRAPAYb.mjs").then((n) => n.i);
317
318
  const rows = await backend.getUnembeddedChunkIds();
318
319
  if (rows.length === 0) return 0;
319
320
  const total = rows.length;
@@ -357,4 +358,4 @@ async function indexAllWithBackend(backend, registryDb) {
357
358
 
358
359
  //#endregion
359
360
  export { embedChunksWithBackend, indexAllWithBackend };
360
- //# sourceMappingURL=indexer-backend-BXaocO5r.mjs.map
361
+ //# sourceMappingURL=indexer-backend-BHztlJJg.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"indexer-backend-BXaocO5r.mjs","names":[],"sources":["../src/memory/indexer-backend.ts"],"sourcesContent":["/**\n * Backend-aware indexer for PAI federation memory.\n *\n * This module provides the same functionality as indexer.ts but writes\n * through the StorageBackend interface instead of directly to better-sqlite3.\n * Used when the daemon is configured with the Postgres backend.\n *\n * The SQLite path still uses indexer.ts directly (which is faster for SQLite\n * due to synchronous transactions).\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, normalize } from \"node:path\";\n\n// ---------------------------------------------------------------------------\n// Session title parsing\n// ---------------------------------------------------------------------------\n\nconst SESSION_TITLE_RE = /^(\\d{4})\\s*-\\s*(\\d{4}-\\d{2}-\\d{2})\\s*-\\s*(.+)\\.md$/;\n\n/**\n * Parse a session title from a Notes filename.\n * Format: \"NNNN - YYYY-MM-DD - Descriptive Title.md\"\n * Returns a synthetic chunk text like \"Session #0086 2026-02-23: Pai Daemon Background Service\"\n * or null if the filename doesn't match the expected pattern.\n */\nexport function parseSessionTitleChunk(fileName: string): string | null {\n const m = SESSION_TITLE_RE.exec(fileName);\n if (!m) return null;\n const [, num, date, title] = m;\n return `Session #${num} ${date}: ${title}`;\n}\nimport { homedir } from \"node:os\";\nimport type { Database } from \"better-sqlite3\";\nimport type { StorageBackend, ChunkRow } from \"../storage/interface.js\";\nimport type { IndexResult } from \"./indexer.js\";\nimport { chunkMarkdown } from \"./chunker.js\";\nimport { detectTier } from \"./indexer.js\";\n\n// ---------------------------------------------------------------------------\n// Constants (mirrored from indexer.ts)\n// ---------------------------------------------------------------------------\n\nconst MAX_FILES_PER_PROJECT = 5_000;\nconst MAX_WALK_DEPTH = 6;\nconst INDEX_YIELD_EVERY = 10;\n\n/**\n * Directories to ALWAYS skip, at any depth, during any directory walk.\n * These are build artifacts, dependency trees, and VCS internals that\n * should never be indexed regardless of where they appear in the tree.\n */\nconst ALWAYS_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\", // CocoaPods (iOS/macOS)\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\", // Xcode\n \".next\", // Next.js\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n]);\n\nconst ROOT_SCAN_SKIP_DIRS = new Set([\n \"memory\", \"Notes\", \".claude\", \".DS_Store\",\n ...ALWAYS_SKIP_DIRS,\n]);\n\nconst CONTENT_SCAN_SKIP_DIRS = new Set([\n \"Library\", \"Applications\", \"Music\", \"Movies\", \"Pictures\", \"Desktop\",\n \"Downloads\", \"Public\", \"coverage\",\n ...ALWAYS_SKIP_DIRS,\n]);\n\n// ---------------------------------------------------------------------------\n// Helpers (same logic as indexer.ts)\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\nfunction walkMdFiles(\n dir: string,\n acc?: string[],\n cap = MAX_FILES_PER_PROJECT,\n depth = 0,\n): string[] {\n const results = acc ?? [];\n if (!existsSync(dir)) return results;\n if (results.length >= cap) return results;\n if (depth > MAX_WALK_DEPTH) return results;\n try {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n if (results.length >= cap) break;\n if (entry.isSymbolicLink()) continue;\n // Skip known junk directories at every recursion depth\n if (ALWAYS_SKIP_DIRS.has(entry.name)) continue;\n const full = join(dir, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, cap, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n results.push(full);\n }\n }\n } catch { /* skip unreadable */ }\n return results;\n}\n\nfunction walkContentFiles(rootPath: string): string[] {\n if (!existsSync(rootPath)) return [];\n const results: string[] = [];\n try {\n for (const entry of readdirSync(rootPath, { withFileTypes: true })) {\n if (results.length >= MAX_FILES_PER_PROJECT) break;\n if (entry.isSymbolicLink()) continue;\n if (ROOT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n if (CONTENT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n const full = join(rootPath, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, MAX_FILES_PER_PROJECT);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n if (entry.name !== \"MEMORY.md\") results.push(full);\n }\n }\n } catch { /* skip */ }\n return results;\n}\n\nfunction isPathTooBroadForContentScan(rootPath: string): boolean {\n const normalized = normalize(rootPath);\n const home = homedir();\n if (home.startsWith(normalized) || normalized === \"/\") return true;\n if (normalized.startsWith(home)) {\n const rel = normalized.slice(home.length).replace(/^\\//, \"\");\n const depth = rel ? rel.split(\"/\").length : 0;\n if (depth === 0) return true;\n }\n if (existsSync(join(normalized, \".git\"))) return true;\n return false;\n}\n\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n// ---------------------------------------------------------------------------\n// File indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\n/**\n * Index a single file through the StorageBackend interface.\n * Returns true if the file was re-indexed (changed or new), false if skipped.\n */\nexport async function indexFileWithBackend(\n backend: StorageBackend,\n projectId: number,\n rootPath: string,\n relativePath: string,\n source: string,\n tier: string,\n): Promise<boolean> {\n const absPath = join(rootPath, relativePath);\n\n let content: string;\n let stat: ReturnType<typeof statSync>;\n try {\n content = readFileSync(absPath, \"utf8\");\n stat = statSync(absPath);\n } catch {\n return false;\n }\n\n const hash = sha256File(content);\n const mtime = Math.floor(stat.mtimeMs);\n const size = stat.size;\n\n // Change detection\n const existingHash = await backend.getFileHash(projectId, relativePath);\n if (existingHash === hash) return false;\n\n // Delete old chunks\n await backend.deleteChunksForFile(projectId, relativePath);\n\n // Chunk the content\n const rawChunks = chunkMarkdown(content);\n const updatedAt = Date.now();\n\n const chunks: ChunkRow[] = rawChunks.map((c, i) => ({\n id: chunkId(projectId, relativePath, i, c.startLine, c.endLine),\n projectId,\n source,\n tier,\n path: relativePath,\n startLine: c.startLine,\n endLine: c.endLine,\n hash: c.hash,\n text: c.text,\n updatedAt,\n embedding: null,\n }));\n\n // Insert chunks + update file record\n await backend.insertChunks(chunks);\n await backend.upsertFile({ projectId, path: relativePath, source, tier, hash, mtime, size });\n\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Project-level indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\nexport async function indexProjectWithBackend(\n backend: StorageBackend,\n projectId: number,\n rootPath: string,\n claudeNotesDir?: string | null,\n): Promise<IndexResult> {\n const result: IndexResult = { filesProcessed: 0, chunksCreated: 0, filesSkipped: 0 };\n\n const filesToIndex: Array<{ absPath: string; rootBase: string; source: string; tier: string }> = [];\n\n const rootMemoryMd = join(rootPath, \"MEMORY.md\");\n if (existsSync(rootMemoryMd)) {\n filesToIndex.push({ absPath: rootMemoryMd, rootBase: rootPath, source: \"memory\", tier: \"evergreen\" });\n }\n\n const memoryDir = join(rootPath, \"memory\");\n for (const absPath of walkMdFiles(memoryDir)) {\n const relPath = relative(rootPath, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"memory\", tier });\n }\n\n const notesDir = join(rootPath, \"Notes\");\n for (const absPath of walkMdFiles(notesDir)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic session-title chunks: parse titles from Notes filenames and insert\n // as high-signal chunks so session names are searchable via BM25 and embeddings.\n {\n const updatedAt = Date.now();\n for (const absPath of walkMdFiles(notesDir)) {\n const fileName = basename(absPath);\n const text = parseSessionTitleChunk(fileName);\n if (!text) continue;\n const relPath = relative(rootPath, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n const titleChunk: import(\"../storage/interface.js\").ChunkRow = {\n id, projectId, source: \"notes\", tier: \"session\",\n path: syntheticPath, startLine: 0, endLine: 0,\n hash, text, updatedAt, embedding: null,\n };\n await backend.insertChunks([titleChunk]);\n }\n }\n\n if (!isPathTooBroadForContentScan(rootPath)) {\n for (const absPath of walkContentFiles(rootPath)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"content\", tier: \"topic\" });\n }\n }\n\n if (claudeNotesDir && claudeNotesDir !== notesDir) {\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n filesToIndex.push({ absPath, rootBase: claudeNotesDir, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic title chunks for claude notes dir\n {\n const updatedAt = Date.now();\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n const fileName = basename(absPath);\n const text = parseSessionTitleChunk(fileName);\n if (!text) continue;\n const relPath = relative(claudeNotesDir, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n const titleChunk: import(\"../storage/interface.js\").ChunkRow = {\n id, projectId, source: \"notes\", tier: \"session\",\n path: syntheticPath, startLine: 0, endLine: 0,\n hash, text, updatedAt, embedding: null,\n };\n await backend.insertChunks([titleChunk]);\n }\n }\n\n if (claudeNotesDir.endsWith(\"/Notes\")) {\n const claudeProjectDir = claudeNotesDir.slice(0, -\"/Notes\".length);\n const claudeMemoryMd = join(claudeProjectDir, \"MEMORY.md\");\n if (existsSync(claudeMemoryMd)) {\n filesToIndex.push({ absPath: claudeMemoryMd, rootBase: claudeProjectDir, source: \"memory\", tier: \"evergreen\" });\n }\n const claudeMemoryDir = join(claudeProjectDir, \"memory\");\n for (const absPath of walkMdFiles(claudeMemoryDir)) {\n const relPath = relative(claudeProjectDir, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: claudeProjectDir, source: \"memory\", tier });\n }\n }\n }\n\n await yieldToEventLoop();\n\n let filesSinceYield = 0;\n\n for (const { absPath, rootBase, source, tier } of filesToIndex) {\n if (filesSinceYield >= INDEX_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const relPath = relative(rootBase, absPath);\n const changed = await indexFileWithBackend(backend, projectId, rootBase, relPath, source, tier);\n\n if (changed) {\n // Count chunks — we know we just inserted them, count from the chunk IDs\n const ids = await backend.getChunkIds(projectId, relPath);\n result.filesProcessed++;\n result.chunksCreated += ids.length;\n } else {\n result.filesSkipped++;\n }\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Embedding generation via StorageBackend\n// ---------------------------------------------------------------------------\n\nconst EMBED_BATCH_SIZE = 50;\nconst EMBED_YIELD_EVERY = 10;\n\n/**\n * Generate and store embeddings for all unembedded chunks via the StorageBackend.\n *\n * Processes chunks in batches of EMBED_BATCH_SIZE, yielding to the event loop\n * every EMBED_YIELD_EVERY chunks to avoid blocking IPC calls from MCP shims.\n *\n * The optional `shouldStop` callback is checked between every batch. When it\n * returns true the embed loop exits early so the caller (e.g. the daemon\n * shutdown handler) can close the pool without racing against active queries.\n *\n * Returns the number of newly embedded chunks.\n */\nexport async function embedChunksWithBackend(\n backend: StorageBackend,\n shouldStop?: () => boolean,\n): Promise<number> {\n const { generateEmbedding, serializeEmbedding } = await import(\"./embeddings.js\");\n\n const rows = await backend.getUnembeddedChunkIds();\n if (rows.length === 0) return 0;\n\n const total = rows.length;\n let embedded = 0;\n\n for (let i = 0; i < rows.length; i += EMBED_BATCH_SIZE) {\n // Check cancellation between every batch before touching the pool again\n if (shouldStop?.()) {\n process.stderr.write(\n `[pai-daemon] Embed pass cancelled after ${embedded}/${total} chunks (shutdown requested)\\n`\n );\n break;\n }\n\n const batch = rows.slice(i, i + EMBED_BATCH_SIZE);\n\n for (let j = 0; j < batch.length; j++) {\n const { id, text } = batch[j];\n\n // Yield to the event loop periodically to keep IPC responsive\n if ((embedded + j) % EMBED_YIELD_EVERY === 0) {\n await yieldToEventLoop();\n }\n\n const vec = await generateEmbedding(text);\n const blob = serializeEmbedding(vec);\n await backend.updateEmbedding(id, blob);\n }\n\n embedded += batch.length;\n process.stderr.write(\n `[pai-daemon] Embedded ${embedded}/${total} chunks\\n`\n );\n }\n\n return embedded;\n}\n\n// ---------------------------------------------------------------------------\n// Global indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\nexport async function indexAllWithBackend(\n backend: StorageBackend,\n registryDb: Database,\n): Promise<{ projects: number; result: IndexResult }> {\n const projects = registryDb\n .prepare(\"SELECT id, root_path, claude_notes_dir FROM projects WHERE status = 'active'\")\n .all() as Array<{ id: number; root_path: string; claude_notes_dir: string | null }>;\n\n const totals: IndexResult = { filesProcessed: 0, chunksCreated: 0, filesSkipped: 0 };\n\n for (const project of projects) {\n await yieldToEventLoop();\n const r = await indexProjectWithBackend(backend, project.id, project.root_path, project.claude_notes_dir);\n totals.filesProcessed += r.filesProcessed;\n totals.chunksCreated += r.chunksCreated;\n totals.filesSkipped += r.filesSkipped;\n }\n\n return { projects: projects.length, result: totals };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAmBA,MAAM,mBAAmB;;;;;;;AAQzB,SAAgB,uBAAuB,UAAiC;CACtE,MAAM,IAAI,iBAAiB,KAAK,SAAS;AACzC,KAAI,CAAC,EAAG,QAAO;CACf,MAAM,GAAG,KAAK,MAAM,SAAS;AAC7B,QAAO,YAAY,IAAI,GAAG,KAAK,IAAI;;AAarC,MAAM,wBAAwB;AAC9B,MAAM,iBAAiB;AACvB,MAAM,oBAAoB;;;;;;AAO1B,MAAM,mBAAmB,IAAI,IAAI;CAE/B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CACD,CAAC;AAEF,MAAM,sBAAsB,IAAI,IAAI;CAClC;CAAU;CAAS;CAAW;CAC9B,GAAG;CACJ,CAAC;AAEF,MAAM,yBAAyB,IAAI,IAAI;CACrC;CAAW;CAAgB;CAAS;CAAU;CAAY;CAC1D;CAAa;CAAU;CACvB,GAAG;CACJ,CAAC;AAMF,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;AAG3D,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAGlB,SAAS,YACP,KACA,KACA,MAAM,uBACN,QAAQ,GACE;CACV,MAAM,UAAU,OAAO,EAAE;AACzB,KAAI,CAAC,WAAW,IAAI,CAAE,QAAO;AAC7B,KAAI,QAAQ,UAAU,IAAK,QAAO;AAClC,KAAI,QAAQ,eAAgB,QAAO;AACnC,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;AAC7D,OAAI,QAAQ,UAAU,IAAK;AAC3B,OAAI,MAAM,gBAAgB,CAAE;AAE5B,OAAI,iBAAiB,IAAI,MAAM,KAAK,CAAE;GACtC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,KAAK,QAAQ,EAAE;YACjC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CACrD,SAAQ,KAAK,KAAK;;SAGhB;AACR,QAAO;;AAGT,SAAS,iBAAiB,UAA4B;AACpD,KAAI,CAAC,WAAW,SAAS,CAAE,QAAO,EAAE;CACpC,MAAM,UAAoB,EAAE;AAC5B,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,UAAU,EAAE,eAAe,MAAM,CAAC,EAAE;AAClE,OAAI,QAAQ,UAAU,sBAAuB;AAC7C,OAAI,MAAM,gBAAgB,CAAE;AAC5B,OAAI,oBAAoB,IAAI,MAAM,KAAK,CAAE;AACzC,OAAI,uBAAuB,IAAI,MAAM,KAAK,CAAE;GAC5C,MAAM,OAAO,KAAK,UAAU,MAAM,KAAK;AACvC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,sBAAsB;YACxC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EACrD;QAAI,MAAM,SAAS,YAAa,SAAQ,KAAK,KAAK;;;SAGhD;AACR,QAAO;;AAGT,SAAS,6BAA6B,UAA2B;CAC/D,MAAM,aAAa,UAAU,SAAS;CACtC,MAAM,OAAO,SAAS;AACtB,KAAI,KAAK,WAAW,WAAW,IAAI,eAAe,IAAK,QAAO;AAC9D,KAAI,WAAW,WAAW,KAAK,EAAE;EAC/B,MAAM,MAAM,WAAW,MAAM,KAAK,OAAO,CAAC,QAAQ,OAAO,GAAG;AAE5D,OADc,MAAM,IAAI,MAAM,IAAI,CAAC,SAAS,OAC9B,EAAG,QAAO;;AAE1B,KAAI,WAAW,KAAK,YAAY,OAAO,CAAC,CAAE,QAAO;AACjD,QAAO;;AAGT,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;AAWxD,eAAsB,qBACpB,SACA,WACA,UACA,cACA,QACA,MACkB;CAClB,MAAM,UAAU,KAAK,UAAU,aAAa;CAE5C,IAAI;CACJ,IAAI;AACJ,KAAI;AACF,YAAU,aAAa,SAAS,OAAO;AACvC,SAAO,SAAS,QAAQ;SAClB;AACN,SAAO;;CAGT,MAAM,OAAO,WAAW,QAAQ;CAChC,MAAM,QAAQ,KAAK,MAAM,KAAK,QAAQ;CACtC,MAAM,OAAO,KAAK;AAIlB,KADqB,MAAM,QAAQ,YAAY,WAAW,aAAa,KAClD,KAAM,QAAO;AAGlC,OAAM,QAAQ,oBAAoB,WAAW,aAAa;CAG1D,MAAM,YAAY,cAAc,QAAQ;CACxC,MAAM,YAAY,KAAK,KAAK;CAE5B,MAAM,SAAqB,UAAU,KAAK,GAAG,OAAO;EAClD,IAAI,QAAQ,WAAW,cAAc,GAAG,EAAE,WAAW,EAAE,QAAQ;EAC/D;EACA;EACA;EACA,MAAM;EACN,WAAW,EAAE;EACb,SAAS,EAAE;EACX,MAAM,EAAE;EACR,MAAM,EAAE;EACR;EACA,WAAW;EACZ,EAAE;AAGH,OAAM,QAAQ,aAAa,OAAO;AAClC,OAAM,QAAQ,WAAW;EAAE;EAAW,MAAM;EAAc;EAAQ;EAAM;EAAM;EAAO;EAAM,CAAC;AAE5F,QAAO;;AAOT,eAAsB,wBACpB,SACA,WACA,UACA,gBACsB;CACtB,MAAM,SAAsB;EAAE,gBAAgB;EAAG,eAAe;EAAG,cAAc;EAAG;CAEpF,MAAM,eAA2F,EAAE;CAEnG,MAAM,eAAe,KAAK,UAAU,YAAY;AAChD,KAAI,WAAW,aAAa,CAC1B,cAAa,KAAK;EAAE,SAAS;EAAc,UAAU;EAAU,QAAQ;EAAU,MAAM;EAAa,CAAC;CAGvG,MAAM,YAAY,KAAK,UAAU,SAAS;AAC1C,MAAK,MAAM,WAAW,YAAY,UAAU,EAAE;EAE5C,MAAM,OAAO,WADG,SAAS,UAAU,QAAQ,CACX;AAChC,eAAa,KAAK;GAAE;GAAS,UAAU;GAAU,QAAQ;GAAU;GAAM,CAAC;;CAG5E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,MAAK,MAAM,WAAW,YAAY,SAAS,CACzC,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAS,MAAM;EAAW,CAAC;CAKtF;EACE,MAAM,YAAY,KAAK,KAAK;AAC5B,OAAK,MAAM,WAAW,YAAY,SAAS,EAAE;GAE3C,MAAM,OAAO,uBADI,SAAS,QAAQ,CACW;AAC7C,OAAI,CAAC,KAAM;GAEX,MAAM,gBAAgB,GADN,SAAS,UAAU,QAAQ,CACV;GAGjC,MAAM,aAAyD;IAC7D,IAHS,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;IAG/C;IAAW,QAAQ;IAAS,MAAM;IACtC,MAAM;IAAe,WAAW;IAAG,SAAS;IAC5C,MAJW,WAAW,KAAK;IAIrB;IAAM;IAAW,WAAW;IACnC;AACD,SAAM,QAAQ,aAAa,CAAC,WAAW,CAAC;;;AAI5C,KAAI,CAAC,6BAA6B,SAAS,CACzC,MAAK,MAAM,WAAW,iBAAiB,SAAS,CAC9C,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAW,MAAM;EAAS,CAAC;AAIxF,KAAI,kBAAkB,mBAAmB,UAAU;AACjD,OAAK,MAAM,WAAW,YAAY,eAAe,CAC/C,cAAa,KAAK;GAAE;GAAS,UAAU;GAAgB,QAAQ;GAAS,MAAM;GAAW,CAAC;EAI5F;GACE,MAAM,YAAY,KAAK,KAAK;AAC5B,QAAK,MAAM,WAAW,YAAY,eAAe,EAAE;IAEjD,MAAM,OAAO,uBADI,SAAS,QAAQ,CACW;AAC7C,QAAI,CAAC,KAAM;IAEX,MAAM,gBAAgB,GADN,SAAS,gBAAgB,QAAQ,CAChB;IAGjC,MAAM,aAAyD;KAC7D,IAHS,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;KAG/C;KAAW,QAAQ;KAAS,MAAM;KACtC,MAAM;KAAe,WAAW;KAAG,SAAS;KAC5C,MAJW,WAAW,KAAK;KAIrB;KAAM;KAAW,WAAW;KACnC;AACD,UAAM,QAAQ,aAAa,CAAC,WAAW,CAAC;;;AAI5C,MAAI,eAAe,SAAS,SAAS,EAAE;GACrC,MAAM,mBAAmB,eAAe,MAAM,GAAG,GAAiB;GAClE,MAAM,iBAAiB,KAAK,kBAAkB,YAAY;AAC1D,OAAI,WAAW,eAAe,CAC5B,cAAa,KAAK;IAAE,SAAS;IAAgB,UAAU;IAAkB,QAAQ;IAAU,MAAM;IAAa,CAAC;GAEjH,MAAM,kBAAkB,KAAK,kBAAkB,SAAS;AACxD,QAAK,MAAM,WAAW,YAAY,gBAAgB,EAAE;IAElD,MAAM,OAAO,WADG,SAAS,kBAAkB,QAAQ,CACnB;AAChC,iBAAa,KAAK;KAAE;KAAS,UAAU;KAAkB,QAAQ;KAAU;KAAM,CAAC;;;;AAKxF,OAAM,kBAAkB;CAExB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,EAAE,SAAS,UAAU,QAAQ,UAAU,cAAc;AAC9D,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,UAAU,SAAS,UAAU,QAAQ;AAG3C,MAFgB,MAAM,qBAAqB,SAAS,WAAW,UAAU,SAAS,QAAQ,KAAK,EAElF;GAEX,MAAM,MAAM,MAAM,QAAQ,YAAY,WAAW,QAAQ;AACzD,UAAO;AACP,UAAO,iBAAiB,IAAI;QAE5B,QAAO;;AAIX,QAAO;;AAOT,MAAM,mBAAmB;AACzB,MAAM,oBAAoB;;;;;;;;;;;;;AAc1B,eAAsB,uBACpB,SACA,YACiB;CACjB,MAAM,EAAE,mBAAmB,uBAAuB,MAAM,OAAO;CAE/D,MAAM,OAAO,MAAM,QAAQ,uBAAuB;AAClD,KAAI,KAAK,WAAW,EAAG,QAAO;CAE9B,MAAM,QAAQ,KAAK;CACnB,IAAI,WAAW;AAEf,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,kBAAkB;AAEtD,MAAI,cAAc,EAAE;AAClB,WAAQ,OAAO,MACb,2CAA2C,SAAS,GAAG,MAAM,gCAC9D;AACD;;EAGF,MAAM,QAAQ,KAAK,MAAM,GAAG,IAAI,iBAAiB;AAEjD,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,EAAE,IAAI,SAAS,MAAM;AAG3B,QAAK,WAAW,KAAK,sBAAsB,EACzC,OAAM,kBAAkB;GAI1B,MAAM,OAAO,mBADD,MAAM,kBAAkB,KAAK,CACL;AACpC,SAAM,QAAQ,gBAAgB,IAAI,KAAK;;AAGzC,cAAY,MAAM;AAClB,UAAQ,OAAO,MACb,yBAAyB,SAAS,GAAG,MAAM,WAC5C;;AAGH,QAAO;;AAOT,eAAsB,oBACpB,SACA,YACoD;CACpD,MAAM,WAAW,WACd,QAAQ,+EAA+E,CACvF,KAAK;CAER,MAAM,SAAsB;EAAE,gBAAgB;EAAG,eAAe;EAAG,cAAc;EAAG;AAEpF,MAAK,MAAM,WAAW,UAAU;AAC9B,QAAM,kBAAkB;EACxB,MAAM,IAAI,MAAM,wBAAwB,SAAS,QAAQ,IAAI,QAAQ,WAAW,QAAQ,iBAAiB;AACzG,SAAO,kBAAkB,EAAE;AAC3B,SAAO,iBAAiB,EAAE;AAC1B,SAAO,gBAAgB,EAAE;;AAG3B,QAAO;EAAE,UAAU,SAAS;EAAQ,QAAQ;EAAQ"}
1
+ {"version":3,"file":"indexer-backend-BHztlJJg.mjs","names":[],"sources":["../src/memory/indexer-backend.ts"],"sourcesContent":["/**\n * Backend-aware indexer for PAI federation memory.\n *\n * This module provides the same functionality as indexer.ts but writes\n * through the StorageBackend interface instead of directly to better-sqlite3.\n * Used when the daemon is configured with the Postgres backend.\n *\n * The SQLite path still uses indexer.ts directly (which is faster for SQLite\n * due to synchronous transactions).\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, normalize } from \"node:path\";\n\n// ---------------------------------------------------------------------------\n// Session title parsing\n// ---------------------------------------------------------------------------\n\nconst SESSION_TITLE_RE = /^(\\d{4})\\s*-\\s*(\\d{4}-\\d{2}-\\d{2})\\s*-\\s*(.+)\\.md$/;\n\n/**\n * Parse a session title from a Notes filename.\n * Format: \"NNNN - YYYY-MM-DD - Descriptive Title.md\"\n * Returns a synthetic chunk text like \"Session #0086 2026-02-23: Pai Daemon Background Service\"\n * or null if the filename doesn't match the expected pattern.\n */\nexport function parseSessionTitleChunk(fileName: string): string | null {\n const m = SESSION_TITLE_RE.exec(fileName);\n if (!m) return null;\n const [, num, date, title] = m;\n return `Session #${num} ${date}: ${title}`;\n}\nimport { homedir } from \"node:os\";\nimport type { Database } from \"better-sqlite3\";\nimport type { StorageBackend, ChunkRow } from \"../storage/interface.js\";\nimport type { IndexResult } from \"./indexer.js\";\nimport { chunkMarkdown } from \"./chunker.js\";\nimport { detectTier } from \"./indexer.js\";\n\n// ---------------------------------------------------------------------------\n// Constants (mirrored from indexer.ts)\n// ---------------------------------------------------------------------------\n\nconst MAX_FILES_PER_PROJECT = 5_000;\nconst MAX_WALK_DEPTH = 6;\nconst INDEX_YIELD_EVERY = 10;\n\n/**\n * Directories to ALWAYS skip, at any depth, during any directory walk.\n * These are build artifacts, dependency trees, and VCS internals that\n * should never be indexed regardless of where they appear in the tree.\n */\nconst ALWAYS_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\", // CocoaPods (iOS/macOS)\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\", // Xcode\n \".next\", // Next.js\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n]);\n\nconst ROOT_SCAN_SKIP_DIRS = new Set([\n \"memory\", \"Notes\", \".claude\", \".DS_Store\",\n ...ALWAYS_SKIP_DIRS,\n]);\n\nconst CONTENT_SCAN_SKIP_DIRS = new Set([\n \"Library\", \"Applications\", \"Music\", \"Movies\", \"Pictures\", \"Desktop\",\n \"Downloads\", \"Public\", \"coverage\",\n ...ALWAYS_SKIP_DIRS,\n]);\n\n// ---------------------------------------------------------------------------\n// Helpers (same logic as indexer.ts)\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\nfunction walkMdFiles(\n dir: string,\n acc?: string[],\n cap = MAX_FILES_PER_PROJECT,\n depth = 0,\n): string[] {\n const results = acc ?? [];\n if (!existsSync(dir)) return results;\n if (results.length >= cap) return results;\n if (depth > MAX_WALK_DEPTH) return results;\n try {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n if (results.length >= cap) break;\n if (entry.isSymbolicLink()) continue;\n // Skip known junk directories at every recursion depth\n if (ALWAYS_SKIP_DIRS.has(entry.name)) continue;\n const full = join(dir, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, cap, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n results.push(full);\n }\n }\n } catch { /* skip unreadable */ }\n return results;\n}\n\nfunction walkContentFiles(rootPath: string): string[] {\n if (!existsSync(rootPath)) return [];\n const results: string[] = [];\n try {\n for (const entry of readdirSync(rootPath, { withFileTypes: true })) {\n if (results.length >= MAX_FILES_PER_PROJECT) break;\n if (entry.isSymbolicLink()) continue;\n if (ROOT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n if (CONTENT_SCAN_SKIP_DIRS.has(entry.name)) continue;\n const full = join(rootPath, entry.name);\n if (entry.isDirectory()) {\n walkMdFiles(full, results, MAX_FILES_PER_PROJECT);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n if (entry.name !== \"MEMORY.md\") results.push(full);\n }\n }\n } catch { /* skip */ }\n return results;\n}\n\nfunction isPathTooBroadForContentScan(rootPath: string): boolean {\n const normalized = normalize(rootPath);\n const home = homedir();\n if (home.startsWith(normalized) || normalized === \"/\") return true;\n if (normalized.startsWith(home)) {\n const rel = normalized.slice(home.length).replace(/^\\//, \"\");\n const depth = rel ? rel.split(\"/\").length : 0;\n if (depth === 0) return true;\n }\n if (existsSync(join(normalized, \".git\"))) return true;\n return false;\n}\n\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n// ---------------------------------------------------------------------------\n// File indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\n/**\n * Index a single file through the StorageBackend interface.\n * Returns true if the file was re-indexed (changed or new), false if skipped.\n */\nexport async function indexFileWithBackend(\n backend: StorageBackend,\n projectId: number,\n rootPath: string,\n relativePath: string,\n source: string,\n tier: string,\n): Promise<boolean> {\n const absPath = join(rootPath, relativePath);\n\n let content: string;\n let stat: ReturnType<typeof statSync>;\n try {\n content = readFileSync(absPath, \"utf8\");\n stat = statSync(absPath);\n } catch {\n return false;\n }\n\n const hash = sha256File(content);\n const mtime = Math.floor(stat.mtimeMs);\n const size = stat.size;\n\n // Change detection\n const existingHash = await backend.getFileHash(projectId, relativePath);\n if (existingHash === hash) return false;\n\n // Delete old chunks\n await backend.deleteChunksForFile(projectId, relativePath);\n\n // Chunk the content\n const rawChunks = chunkMarkdown(content);\n const updatedAt = Date.now();\n\n const chunks: ChunkRow[] = rawChunks.map((c, i) => ({\n id: chunkId(projectId, relativePath, i, c.startLine, c.endLine),\n projectId,\n source,\n tier,\n path: relativePath,\n startLine: c.startLine,\n endLine: c.endLine,\n hash: c.hash,\n text: c.text,\n updatedAt,\n embedding: null,\n }));\n\n // Insert chunks + update file record\n await backend.insertChunks(chunks);\n await backend.upsertFile({ projectId, path: relativePath, source, tier, hash, mtime, size });\n\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Project-level indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\nexport async function indexProjectWithBackend(\n backend: StorageBackend,\n projectId: number,\n rootPath: string,\n claudeNotesDir?: string | null,\n): Promise<IndexResult> {\n const result: IndexResult = { filesProcessed: 0, chunksCreated: 0, filesSkipped: 0 };\n\n const filesToIndex: Array<{ absPath: string; rootBase: string; source: string; tier: string }> = [];\n\n const rootMemoryMd = join(rootPath, \"MEMORY.md\");\n if (existsSync(rootMemoryMd)) {\n filesToIndex.push({ absPath: rootMemoryMd, rootBase: rootPath, source: \"memory\", tier: \"evergreen\" });\n }\n\n const memoryDir = join(rootPath, \"memory\");\n for (const absPath of walkMdFiles(memoryDir)) {\n const relPath = relative(rootPath, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"memory\", tier });\n }\n\n const notesDir = join(rootPath, \"Notes\");\n for (const absPath of walkMdFiles(notesDir)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic session-title chunks: parse titles from Notes filenames and insert\n // as high-signal chunks so session names are searchable via BM25 and embeddings.\n {\n const updatedAt = Date.now();\n for (const absPath of walkMdFiles(notesDir)) {\n const fileName = basename(absPath);\n const text = parseSessionTitleChunk(fileName);\n if (!text) continue;\n const relPath = relative(rootPath, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n const titleChunk: import(\"../storage/interface.js\").ChunkRow = {\n id, projectId, source: \"notes\", tier: \"session\",\n path: syntheticPath, startLine: 0, endLine: 0,\n hash, text, updatedAt, embedding: null,\n };\n await backend.insertChunks([titleChunk]);\n }\n }\n\n if (!isPathTooBroadForContentScan(rootPath)) {\n for (const absPath of walkContentFiles(rootPath)) {\n filesToIndex.push({ absPath, rootBase: rootPath, source: \"content\", tier: \"topic\" });\n }\n }\n\n if (claudeNotesDir && claudeNotesDir !== notesDir) {\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n filesToIndex.push({ absPath, rootBase: claudeNotesDir, source: \"notes\", tier: \"session\" });\n }\n\n // Synthetic title chunks for claude notes dir\n {\n const updatedAt = Date.now();\n for (const absPath of walkMdFiles(claudeNotesDir)) {\n const fileName = basename(absPath);\n const text = parseSessionTitleChunk(fileName);\n if (!text) continue;\n const relPath = relative(claudeNotesDir, absPath);\n const syntheticPath = `${relPath}::title`;\n const id = chunkId(projectId, syntheticPath, 0, 0, 0);\n const hash = sha256File(text);\n const titleChunk: import(\"../storage/interface.js\").ChunkRow = {\n id, projectId, source: \"notes\", tier: \"session\",\n path: syntheticPath, startLine: 0, endLine: 0,\n hash, text, updatedAt, embedding: null,\n };\n await backend.insertChunks([titleChunk]);\n }\n }\n\n if (claudeNotesDir.endsWith(\"/Notes\")) {\n const claudeProjectDir = claudeNotesDir.slice(0, -\"/Notes\".length);\n const claudeMemoryMd = join(claudeProjectDir, \"MEMORY.md\");\n if (existsSync(claudeMemoryMd)) {\n filesToIndex.push({ absPath: claudeMemoryMd, rootBase: claudeProjectDir, source: \"memory\", tier: \"evergreen\" });\n }\n const claudeMemoryDir = join(claudeProjectDir, \"memory\");\n for (const absPath of walkMdFiles(claudeMemoryDir)) {\n const relPath = relative(claudeProjectDir, absPath);\n const tier = detectTier(relPath);\n filesToIndex.push({ absPath, rootBase: claudeProjectDir, source: \"memory\", tier });\n }\n }\n }\n\n await yieldToEventLoop();\n\n let filesSinceYield = 0;\n\n for (const { absPath, rootBase, source, tier } of filesToIndex) {\n if (filesSinceYield >= INDEX_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const relPath = relative(rootBase, absPath);\n const changed = await indexFileWithBackend(backend, projectId, rootBase, relPath, source, tier);\n\n if (changed) {\n // Count chunks — we know we just inserted them, count from the chunk IDs\n const ids = await backend.getChunkIds(projectId, relPath);\n result.filesProcessed++;\n result.chunksCreated += ids.length;\n } else {\n result.filesSkipped++;\n }\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Embedding generation via StorageBackend\n// ---------------------------------------------------------------------------\n\nconst EMBED_BATCH_SIZE = 50;\nconst EMBED_YIELD_EVERY = 10;\n\n/**\n * Generate and store embeddings for all unembedded chunks via the StorageBackend.\n *\n * Processes chunks in batches of EMBED_BATCH_SIZE, yielding to the event loop\n * every EMBED_YIELD_EVERY chunks to avoid blocking IPC calls from MCP shims.\n *\n * The optional `shouldStop` callback is checked between every batch. When it\n * returns true the embed loop exits early so the caller (e.g. the daemon\n * shutdown handler) can close the pool without racing against active queries.\n *\n * Returns the number of newly embedded chunks.\n */\nexport async function embedChunksWithBackend(\n backend: StorageBackend,\n shouldStop?: () => boolean,\n): Promise<number> {\n const { generateEmbedding, serializeEmbedding } = await import(\"./embeddings.js\");\n\n const rows = await backend.getUnembeddedChunkIds();\n if (rows.length === 0) return 0;\n\n const total = rows.length;\n let embedded = 0;\n\n for (let i = 0; i < rows.length; i += EMBED_BATCH_SIZE) {\n // Check cancellation between every batch before touching the pool again\n if (shouldStop?.()) {\n process.stderr.write(\n `[pai-daemon] Embed pass cancelled after ${embedded}/${total} chunks (shutdown requested)\\n`\n );\n break;\n }\n\n const batch = rows.slice(i, i + EMBED_BATCH_SIZE);\n\n for (let j = 0; j < batch.length; j++) {\n const { id, text } = batch[j];\n\n // Yield to the event loop periodically to keep IPC responsive\n if ((embedded + j) % EMBED_YIELD_EVERY === 0) {\n await yieldToEventLoop();\n }\n\n const vec = await generateEmbedding(text);\n const blob = serializeEmbedding(vec);\n await backend.updateEmbedding(id, blob);\n }\n\n embedded += batch.length;\n process.stderr.write(\n `[pai-daemon] Embedded ${embedded}/${total} chunks\\n`\n );\n }\n\n return embedded;\n}\n\n// ---------------------------------------------------------------------------\n// Global indexing via StorageBackend\n// ---------------------------------------------------------------------------\n\nexport async function indexAllWithBackend(\n backend: StorageBackend,\n registryDb: Database,\n): Promise<{ projects: number; result: IndexResult }> {\n const projects = registryDb\n .prepare(\"SELECT id, root_path, claude_notes_dir FROM projects WHERE status = 'active'\")\n .all() as Array<{ id: number; root_path: string; claude_notes_dir: string | null }>;\n\n const totals: IndexResult = { filesProcessed: 0, chunksCreated: 0, filesSkipped: 0 };\n\n for (const project of projects) {\n await yieldToEventLoop();\n const r = await indexProjectWithBackend(backend, project.id, project.root_path, project.claude_notes_dir);\n totals.filesProcessed += r.filesProcessed;\n totals.chunksCreated += r.chunksCreated;\n totals.filesSkipped += r.filesSkipped;\n }\n\n return { projects: projects.length, result: totals };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAmBA,MAAM,mBAAmB;;;;;;;AAQzB,SAAgB,uBAAuB,UAAiC;CACtE,MAAM,IAAI,iBAAiB,KAAK,SAAS;AACzC,KAAI,CAAC,EAAG,QAAO;CACf,MAAM,GAAG,KAAK,MAAM,SAAS;AAC7B,QAAO,YAAY,IAAI,GAAG,KAAK,IAAI;;AAarC,MAAM,wBAAwB;AAC9B,MAAM,iBAAiB;AACvB,MAAM,oBAAoB;;;;;;AAO1B,MAAM,mBAAmB,IAAI,IAAI;CAE/B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CACD,CAAC;AAEF,MAAM,sBAAsB,IAAI,IAAI;CAClC;CAAU;CAAS;CAAW;CAC9B,GAAG;CACJ,CAAC;AAEF,MAAM,yBAAyB,IAAI,IAAI;CACrC;CAAW;CAAgB;CAAS;CAAU;CAAY;CAC1D;CAAa;CAAU;CACvB,GAAG;CACJ,CAAC;AAMF,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;AAG3D,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAGlB,SAAS,YACP,KACA,KACA,MAAM,uBACN,QAAQ,GACE;CACV,MAAM,UAAU,OAAO,EAAE;AACzB,KAAI,CAAC,WAAW,IAAI,CAAE,QAAO;AAC7B,KAAI,QAAQ,UAAU,IAAK,QAAO;AAClC,KAAI,QAAQ,eAAgB,QAAO;AACnC,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;AAC7D,OAAI,QAAQ,UAAU,IAAK;AAC3B,OAAI,MAAM,gBAAgB,CAAE;AAE5B,OAAI,iBAAiB,IAAI,MAAM,KAAK,CAAE;GACtC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,KAAK,QAAQ,EAAE;YACjC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CACrD,SAAQ,KAAK,KAAK;;SAGhB;AACR,QAAO;;AAGT,SAAS,iBAAiB,UAA4B;AACpD,KAAI,CAAC,WAAW,SAAS,CAAE,QAAO,EAAE;CACpC,MAAM,UAAoB,EAAE;AAC5B,KAAI;AACF,OAAK,MAAM,SAAS,YAAY,UAAU,EAAE,eAAe,MAAM,CAAC,EAAE;AAClE,OAAI,QAAQ,UAAU,sBAAuB;AAC7C,OAAI,MAAM,gBAAgB,CAAE;AAC5B,OAAI,oBAAoB,IAAI,MAAM,KAAK,CAAE;AACzC,OAAI,uBAAuB,IAAI,MAAM,KAAK,CAAE;GAC5C,MAAM,OAAO,KAAK,UAAU,MAAM,KAAK;AACvC,OAAI,MAAM,aAAa,CACrB,aAAY,MAAM,SAAS,sBAAsB;YACxC,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EACrD;QAAI,MAAM,SAAS,YAAa,SAAQ,KAAK,KAAK;;;SAGhD;AACR,QAAO;;AAGT,SAAS,6BAA6B,UAA2B;CAC/D,MAAM,aAAa,UAAU,SAAS;CACtC,MAAM,OAAO,SAAS;AACtB,KAAI,KAAK,WAAW,WAAW,IAAI,eAAe,IAAK,QAAO;AAC9D,KAAI,WAAW,WAAW,KAAK,EAAE;EAC/B,MAAM,MAAM,WAAW,MAAM,KAAK,OAAO,CAAC,QAAQ,OAAO,GAAG;AAE5D,OADc,MAAM,IAAI,MAAM,IAAI,CAAC,SAAS,OAC9B,EAAG,QAAO;;AAE1B,KAAI,WAAW,KAAK,YAAY,OAAO,CAAC,CAAE,QAAO;AACjD,QAAO;;AAGT,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;AAWxD,eAAsB,qBACpB,SACA,WACA,UACA,cACA,QACA,MACkB;CAClB,MAAM,UAAU,KAAK,UAAU,aAAa;CAE5C,IAAI;CACJ,IAAI;AACJ,KAAI;AACF,YAAU,aAAa,SAAS,OAAO;AACvC,SAAO,SAAS,QAAQ;SAClB;AACN,SAAO;;CAGT,MAAM,OAAO,WAAW,QAAQ;CAChC,MAAM,QAAQ,KAAK,MAAM,KAAK,QAAQ;CACtC,MAAM,OAAO,KAAK;AAIlB,KADqB,MAAM,QAAQ,YAAY,WAAW,aAAa,KAClD,KAAM,QAAO;AAGlC,OAAM,QAAQ,oBAAoB,WAAW,aAAa;CAG1D,MAAM,YAAY,cAAc,QAAQ;CACxC,MAAM,YAAY,KAAK,KAAK;CAE5B,MAAM,SAAqB,UAAU,KAAK,GAAG,OAAO;EAClD,IAAI,QAAQ,WAAW,cAAc,GAAG,EAAE,WAAW,EAAE,QAAQ;EAC/D;EACA;EACA;EACA,MAAM;EACN,WAAW,EAAE;EACb,SAAS,EAAE;EACX,MAAM,EAAE;EACR,MAAM,EAAE;EACR;EACA,WAAW;EACZ,EAAE;AAGH,OAAM,QAAQ,aAAa,OAAO;AAClC,OAAM,QAAQ,WAAW;EAAE;EAAW,MAAM;EAAc;EAAQ;EAAM;EAAM;EAAO;EAAM,CAAC;AAE5F,QAAO;;AAOT,eAAsB,wBACpB,SACA,WACA,UACA,gBACsB;CACtB,MAAM,SAAsB;EAAE,gBAAgB;EAAG,eAAe;EAAG,cAAc;EAAG;CAEpF,MAAM,eAA2F,EAAE;CAEnG,MAAM,eAAe,KAAK,UAAU,YAAY;AAChD,KAAI,WAAW,aAAa,CAC1B,cAAa,KAAK;EAAE,SAAS;EAAc,UAAU;EAAU,QAAQ;EAAU,MAAM;EAAa,CAAC;CAGvG,MAAM,YAAY,KAAK,UAAU,SAAS;AAC1C,MAAK,MAAM,WAAW,YAAY,UAAU,EAAE;EAE5C,MAAM,OAAO,WADG,SAAS,UAAU,QAAQ,CACX;AAChC,eAAa,KAAK;GAAE;GAAS,UAAU;GAAU,QAAQ;GAAU;GAAM,CAAC;;CAG5E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,MAAK,MAAM,WAAW,YAAY,SAAS,CACzC,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAS,MAAM;EAAW,CAAC;CAKtF;EACE,MAAM,YAAY,KAAK,KAAK;AAC5B,OAAK,MAAM,WAAW,YAAY,SAAS,EAAE;GAE3C,MAAM,OAAO,uBADI,SAAS,QAAQ,CACW;AAC7C,OAAI,CAAC,KAAM;GAEX,MAAM,gBAAgB,GADN,SAAS,UAAU,QAAQ,CACV;GAGjC,MAAM,aAAyD;IAC7D,IAHS,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;IAG/C;IAAW,QAAQ;IAAS,MAAM;IACtC,MAAM;IAAe,WAAW;IAAG,SAAS;IAC5C,MAJW,WAAW,KAAK;IAIrB;IAAM;IAAW,WAAW;IACnC;AACD,SAAM,QAAQ,aAAa,CAAC,WAAW,CAAC;;;AAI5C,KAAI,CAAC,6BAA6B,SAAS,CACzC,MAAK,MAAM,WAAW,iBAAiB,SAAS,CAC9C,cAAa,KAAK;EAAE;EAAS,UAAU;EAAU,QAAQ;EAAW,MAAM;EAAS,CAAC;AAIxF,KAAI,kBAAkB,mBAAmB,UAAU;AACjD,OAAK,MAAM,WAAW,YAAY,eAAe,CAC/C,cAAa,KAAK;GAAE;GAAS,UAAU;GAAgB,QAAQ;GAAS,MAAM;GAAW,CAAC;EAI5F;GACE,MAAM,YAAY,KAAK,KAAK;AAC5B,QAAK,MAAM,WAAW,YAAY,eAAe,EAAE;IAEjD,MAAM,OAAO,uBADI,SAAS,QAAQ,CACW;AAC7C,QAAI,CAAC,KAAM;IAEX,MAAM,gBAAgB,GADN,SAAS,gBAAgB,QAAQ,CAChB;IAGjC,MAAM,aAAyD;KAC7D,IAHS,QAAQ,WAAW,eAAe,GAAG,GAAG,EAAE;KAG/C;KAAW,QAAQ;KAAS,MAAM;KACtC,MAAM;KAAe,WAAW;KAAG,SAAS;KAC5C,MAJW,WAAW,KAAK;KAIrB;KAAM;KAAW,WAAW;KACnC;AACD,UAAM,QAAQ,aAAa,CAAC,WAAW,CAAC;;;AAI5C,MAAI,eAAe,SAAS,SAAS,EAAE;GACrC,MAAM,mBAAmB,eAAe,MAAM,GAAG,GAAiB;GAClE,MAAM,iBAAiB,KAAK,kBAAkB,YAAY;AAC1D,OAAI,WAAW,eAAe,CAC5B,cAAa,KAAK;IAAE,SAAS;IAAgB,UAAU;IAAkB,QAAQ;IAAU,MAAM;IAAa,CAAC;GAEjH,MAAM,kBAAkB,KAAK,kBAAkB,SAAS;AACxD,QAAK,MAAM,WAAW,YAAY,gBAAgB,EAAE;IAElD,MAAM,OAAO,WADG,SAAS,kBAAkB,QAAQ,CACnB;AAChC,iBAAa,KAAK;KAAE;KAAS,UAAU;KAAkB,QAAQ;KAAU;KAAM,CAAC;;;;AAKxF,OAAM,kBAAkB;CAExB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,EAAE,SAAS,UAAU,QAAQ,UAAU,cAAc;AAC9D,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,UAAU,SAAS,UAAU,QAAQ;AAG3C,MAFgB,MAAM,qBAAqB,SAAS,WAAW,UAAU,SAAS,QAAQ,KAAK,EAElF;GAEX,MAAM,MAAM,MAAM,QAAQ,YAAY,WAAW,QAAQ;AACzD,UAAO;AACP,UAAO,iBAAiB,IAAI;QAE5B,QAAO;;AAIX,QAAO;;AAOT,MAAM,mBAAmB;AACzB,MAAM,oBAAoB;;;;;;;;;;;;;AAc1B,eAAsB,uBACpB,SACA,YACiB;CACjB,MAAM,EAAE,mBAAmB,uBAAuB,MAAM,OAAO;CAE/D,MAAM,OAAO,MAAM,QAAQ,uBAAuB;AAClD,KAAI,KAAK,WAAW,EAAG,QAAO;CAE9B,MAAM,QAAQ,KAAK;CACnB,IAAI,WAAW;AAEf,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,kBAAkB;AAEtD,MAAI,cAAc,EAAE;AAClB,WAAQ,OAAO,MACb,2CAA2C,SAAS,GAAG,MAAM,gCAC9D;AACD;;EAGF,MAAM,QAAQ,KAAK,MAAM,GAAG,IAAI,iBAAiB;AAEjD,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,EAAE,IAAI,SAAS,MAAM;AAG3B,QAAK,WAAW,KAAK,sBAAsB,EACzC,OAAM,kBAAkB;GAI1B,MAAM,OAAO,mBADD,MAAM,kBAAkB,KAAK,CACL;AACpC,SAAM,QAAQ,gBAAgB,IAAI,KAAK;;AAGzC,cAAY,MAAM;AAClB,UAAQ,OAAO,MACb,yBAAyB,SAAS,GAAG,MAAM,WAC5C;;AAGH,QAAO;;AAOT,eAAsB,oBACpB,SACA,YACoD;CACpD,MAAM,WAAW,WACd,QAAQ,+EAA+E,CACvF,KAAK;CAER,MAAM,SAAsB;EAAE,gBAAgB;EAAG,eAAe;EAAG,cAAc;EAAG;AAEpF,MAAK,MAAM,WAAW,UAAU;AAC9B,QAAM,kBAAkB;EACxB,MAAM,IAAI,MAAM,wBAAwB,SAAS,QAAQ,IAAI,QAAQ,WAAW,QAAQ,iBAAiB;AACzG,SAAO,kBAAkB,EAAE;AAC3B,SAAO,iBAAiB,EAAE;AAC1B,SAAO,gBAAgB,EAAE;;AAG3B,QAAO;EAAE,UAAU,SAAS;EAAQ,QAAQ;EAAQ"}
@@ -153,4 +153,4 @@ var PaiClient = class {
153
153
 
154
154
  //#endregion
155
155
  export { ipc_client_exports as n, PaiClient as t };
156
- //# sourceMappingURL=ipc-client-DPy7s3iu.mjs.map
156
+ //# sourceMappingURL=ipc-client-CLt2fNlC.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ipc-client-CLt2fNlC.mjs","names":[],"sources":["../src/daemon/ipc-client.ts"],"sourcesContent":["/**\n * ipc-client.ts — IPC client for the PAI Daemon MCP shim\n *\n * PaiClient connects to the Unix Domain Socket served by daemon.ts\n * and forwards tool calls to the daemon. Uses a fresh socket connection per\n * call (connect → write JSON + newline → read response line → parse → destroy).\n * This keeps the client stateless and avoids connection management complexity.\n *\n * Adapted from the Coogle ipc-client pattern (which was adapted from Whazaa).\n */\n\nimport { connect, Socket } from \"node:net\";\nimport { randomUUID } from \"node:crypto\";\nimport type {\n NotificationConfig,\n NotificationMode,\n NotificationEvent,\n SendResult,\n} from \"../notifications/types.js\";\nimport type { TopicCheckParams, TopicCheckResult } from \"../topics/detector.js\";\nimport type { AutoRouteResult } from \"../session/auto-route.js\";\n\n// ---------------------------------------------------------------------------\n// Protocol types\n// ---------------------------------------------------------------------------\n\n/** Default socket path */\nexport const IPC_SOCKET_PATH = \"/tmp/pai.sock\";\n\n/** Timeout for IPC calls (60 seconds) */\nconst IPC_TIMEOUT_MS = 60_000;\n\ninterface IpcRequest {\n id: string;\n method: string;\n params: Record<string, unknown>;\n}\n\ninterface IpcResponse {\n id: string;\n ok: boolean;\n result?: unknown;\n error?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Client\n// ---------------------------------------------------------------------------\n\n/**\n * Thin IPC proxy that forwards tool calls to pai-daemon over a Unix\n * Domain Socket. Each call opens a fresh connection, sends one NDJSON request,\n * reads the response, and closes. Stateless and simple.\n */\nexport class PaiClient {\n private readonly socketPath: string;\n\n constructor(socketPath?: string) {\n this.socketPath = socketPath ?? IPC_SOCKET_PATH;\n }\n\n /**\n * Call a PAI tool by name with the given params.\n * Returns the tool result or throws on error.\n */\n async call(method: string, params: Record<string, unknown>): Promise<unknown> {\n return this.send(method, params);\n }\n\n /**\n * Check daemon status.\n */\n async status(): Promise<Record<string, unknown>> {\n const result = await this.send(\"status\", {});\n return result as Record<string, unknown>;\n }\n\n /**\n * Trigger an immediate index run.\n */\n async triggerIndex(): Promise<void> {\n await this.send(\"index_now\", {});\n }\n\n // -------------------------------------------------------------------------\n // Notification methods\n // -------------------------------------------------------------------------\n\n /**\n * Get the current notification config from the daemon.\n */\n async getNotificationConfig(): Promise<{\n config: NotificationConfig;\n activeChannels: string[];\n }> {\n const result = await this.send(\"notification_get_config\", {});\n return result as { config: NotificationConfig; activeChannels: string[] };\n }\n\n /**\n * Patch the notification config on the daemon (and persist to disk).\n */\n async setNotificationConfig(patch: {\n mode?: NotificationMode;\n channels?: Partial<NotificationConfig[\"channels\"]>;\n routing?: Partial<NotificationConfig[\"routing\"]>;\n }): Promise<{ config: NotificationConfig }> {\n const result = await this.send(\"notification_set_config\", patch as Record<string, unknown>);\n return result as { config: NotificationConfig };\n }\n\n /**\n * Send a notification via the daemon (routes to configured channels).\n */\n async sendNotification(payload: {\n event: NotificationEvent;\n message: string;\n title?: string;\n }): Promise<SendResult> {\n const result = await this.send(\"notification_send\", payload as Record<string, unknown>);\n return result as SendResult;\n }\n\n // -------------------------------------------------------------------------\n // Topic detection methods\n // -------------------------------------------------------------------------\n\n /**\n * Check whether the provided context text has drifted to a different project\n * than the session's current routing.\n */\n async topicCheck(params: TopicCheckParams): Promise<TopicCheckResult> {\n const result = await this.send(\"topic_check\", params as unknown as Record<string, unknown>);\n return result as TopicCheckResult;\n }\n\n // -------------------------------------------------------------------------\n // Session routing methods\n // -------------------------------------------------------------------------\n\n /**\n * Automatically detect which project a session belongs to.\n * Tries path match, PAI.md marker walk, then topic detection (if context given).\n */\n async sessionAutoRoute(params: {\n cwd?: string;\n context?: string;\n }): Promise<AutoRouteResult | null> {\n // session_auto_route returns a ToolResult (content array). Extract the text\n // and parse JSON from it.\n const result = await this.send(\"session_auto_route\", params as Record<string, unknown>);\n const toolResult = result as { content?: Array<{ text: string }>; isError?: boolean };\n if (toolResult.isError) return null;\n const text = toolResult.content?.[0]?.text ?? \"\";\n // Text is either JSON (on match) or a human-readable \"no match\" message\n try {\n return JSON.parse(text) as AutoRouteResult;\n } catch {\n return null;\n }\n }\n\n // -------------------------------------------------------------------------\n // Internal transport\n // -------------------------------------------------------------------------\n\n /**\n * Send a single IPC request and wait for the response.\n * Opens a new socket connection per call — simple and reliable.\n */\n private send(\n method: string,\n params: Record<string, unknown>\n ): Promise<unknown> {\n const socketPath = this.socketPath;\n\n return new Promise((resolve, reject) => {\n let socket: Socket | null = null;\n let done = false;\n let buffer = \"\";\n let timer: ReturnType<typeof setTimeout> | null = null;\n\n function finish(error: Error | null, value?: unknown): void {\n if (done) return;\n done = true;\n if (timer !== null) {\n clearTimeout(timer);\n timer = null;\n }\n try {\n socket?.destroy();\n } catch {\n // ignore\n }\n if (error) {\n reject(error);\n } else {\n resolve(value);\n }\n }\n\n socket = connect(socketPath, () => {\n const request: IpcRequest = {\n id: randomUUID(),\n method,\n params,\n };\n socket!.write(JSON.stringify(request) + \"\\n\");\n });\n\n socket.on(\"data\", (chunk: Buffer) => {\n buffer += chunk.toString();\n const nl = buffer.indexOf(\"\\n\");\n if (nl === -1) return;\n\n const line = buffer.slice(0, nl);\n buffer = buffer.slice(nl + 1);\n\n let response: IpcResponse;\n try {\n response = JSON.parse(line) as IpcResponse;\n } catch {\n finish(new Error(`IPC parse error: ${line}`));\n return;\n }\n\n if (!response.ok) {\n finish(new Error(response.error ?? \"IPC call failed\"));\n } else {\n finish(null, response.result);\n }\n });\n\n socket.on(\"error\", (e: NodeJS.ErrnoException) => {\n if (e.code === \"ENOENT\" || e.code === \"ECONNREFUSED\") {\n finish(\n new Error(\n \"PAI daemon not running. Start it with: pai daemon serve\"\n )\n );\n } else {\n finish(e);\n }\n });\n\n socket.on(\"end\", () => {\n if (!done) {\n finish(new Error(\"IPC connection closed before response\"));\n }\n });\n\n timer = setTimeout(() => {\n finish(new Error(\"IPC call timed out after 60s\"));\n }, IPC_TIMEOUT_MS);\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AA2BA,MAAa,kBAAkB;;AAG/B,MAAM,iBAAiB;;;;;;AAwBvB,IAAa,YAAb,MAAuB;CACrB,AAAiB;CAEjB,YAAY,YAAqB;AAC/B,OAAK,aAAa,cAAc;;;;;;CAOlC,MAAM,KAAK,QAAgB,QAAmD;AAC5E,SAAO,KAAK,KAAK,QAAQ,OAAO;;;;;CAMlC,MAAM,SAA2C;AAE/C,SADe,MAAM,KAAK,KAAK,UAAU,EAAE,CAAC;;;;;CAO9C,MAAM,eAA8B;AAClC,QAAM,KAAK,KAAK,aAAa,EAAE,CAAC;;;;;CAUlC,MAAM,wBAGH;AAED,SADe,MAAM,KAAK,KAAK,2BAA2B,EAAE,CAAC;;;;;CAO/D,MAAM,sBAAsB,OAIgB;AAE1C,SADe,MAAM,KAAK,KAAK,2BAA2B,MAAiC;;;;;CAO7F,MAAM,iBAAiB,SAIC;AAEtB,SADe,MAAM,KAAK,KAAK,qBAAqB,QAAmC;;;;;;CAYzF,MAAM,WAAW,QAAqD;AAEpE,SADe,MAAM,KAAK,KAAK,eAAe,OAA6C;;;;;;CAY7F,MAAM,iBAAiB,QAGa;EAIlC,MAAM,aADS,MAAM,KAAK,KAAK,sBAAsB,OAAkC;AAEvF,MAAI,WAAW,QAAS,QAAO;EAC/B,MAAM,OAAO,WAAW,UAAU,IAAI,QAAQ;AAE9C,MAAI;AACF,UAAO,KAAK,MAAM,KAAK;UACjB;AACN,UAAO;;;;;;;CAYX,AAAQ,KACN,QACA,QACkB;EAClB,MAAM,aAAa,KAAK;AAExB,SAAO,IAAI,SAAS,SAAS,WAAW;GACtC,IAAI,SAAwB;GAC5B,IAAI,OAAO;GACX,IAAI,SAAS;GACb,IAAI,QAA8C;GAElD,SAAS,OAAO,OAAqB,OAAuB;AAC1D,QAAI,KAAM;AACV,WAAO;AACP,QAAI,UAAU,MAAM;AAClB,kBAAa,MAAM;AACnB,aAAQ;;AAEV,QAAI;AACF,aAAQ,SAAS;YACX;AAGR,QAAI,MACF,QAAO,MAAM;QAEb,SAAQ,MAAM;;AAIlB,YAAS,QAAQ,kBAAkB;IACjC,MAAM,UAAsB;KAC1B,IAAI,YAAY;KAChB;KACA;KACD;AACD,WAAQ,MAAM,KAAK,UAAU,QAAQ,GAAG,KAAK;KAC7C;AAEF,UAAO,GAAG,SAAS,UAAkB;AACnC,cAAU,MAAM,UAAU;IAC1B,MAAM,KAAK,OAAO,QAAQ,KAAK;AAC/B,QAAI,OAAO,GAAI;IAEf,MAAM,OAAO,OAAO,MAAM,GAAG,GAAG;AAChC,aAAS,OAAO,MAAM,KAAK,EAAE;IAE7B,IAAI;AACJ,QAAI;AACF,gBAAW,KAAK,MAAM,KAAK;YACrB;AACN,4BAAO,IAAI,MAAM,oBAAoB,OAAO,CAAC;AAC7C;;AAGF,QAAI,CAAC,SAAS,GACZ,QAAO,IAAI,MAAM,SAAS,SAAS,kBAAkB,CAAC;QAEtD,QAAO,MAAM,SAAS,OAAO;KAE/B;AAEF,UAAO,GAAG,UAAU,MAA6B;AAC/C,QAAI,EAAE,SAAS,YAAY,EAAE,SAAS,eACpC,wBACE,IAAI,MACF,0DACD,CACF;QAED,QAAO,EAAE;KAEX;AAEF,UAAO,GAAG,aAAa;AACrB,QAAI,CAAC,KACH,wBAAO,IAAI,MAAM,wCAAwC,CAAC;KAE5D;AAEF,WAAQ,iBAAiB;AACvB,2BAAO,IAAI,MAAM,+BAA+B,CAAC;MAChD,eAAe;IAClB"}
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
  import { n as openRegistry } from "../db-4lSqLFb8.mjs";
3
- import { n as openFederation } from "../db-BcDxXVBu.mjs";
4
- import "../embeddings-mfqv-jFu.mjs";
5
- import "../search-PjftDxxs.mjs";
6
- import { a as record, i as number, n as array, o as string, r as boolean, s as unknown, t as _enum } from "../schemas-DjdwzIQ8.mjs";
7
- import { a as toolProjectHealth, d as toolSessionRoute, f as toolTopicDetect, i as toolProjectDetect, l as toolRegistrySearch, n as toolMemorySearch, o as toolProjectInfo, r as toolNotificationConfig, s as toolProjectList, t as toolMemoryGet, u as toolSessionList } from "../tools-CLK4080-.mjs";
3
+ import { n as openFederation } from "../db-Dp8VXIMR.mjs";
4
+ import "../embeddings-DGRAPAYb.mjs";
5
+ import "../search-GK0ibTJy.mjs";
6
+ import { a as record, i as number, n as array, o as string, r as boolean, s as unknown, t as _enum } from "../schemas-BY3Pjvje.mjs";
7
+ import { _ as toolZettelSurprise, a as toolProjectHealth, d as toolSessionRoute, f as toolTopicDetect, g as toolZettelSuggest, h as toolZettelHealth, i as toolProjectDetect, l as toolRegistrySearch, m as toolZettelExplore, n as toolMemorySearch, o as toolProjectInfo, p as toolZettelConverse, r as toolNotificationConfig, s as toolProjectList, t as toolMemoryGet, u as toolSessionList, v as toolZettelThemes } from "../tools-CUg0Lyg-.mjs";
8
8
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
9
9
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
10
10
 
@@ -349,6 +349,119 @@ async function startMcpServer() {
349
349
  isError: result.isError
350
350
  };
351
351
  });
352
+ server.tool("zettel_explore", [
353
+ "Explore the vault's knowledge graph using Luhmann's Folgezettel traversal.",
354
+ "Follow trains of thought forward, backward, or both from a starting note.",
355
+ "Classifies links as sequential (same-folder) or associative (cross-folder)."
356
+ ].join("\n"), {
357
+ start_note: string().describe("Path or title of the note to start traversal from."),
358
+ depth: number().int().min(1).max(10).optional().describe("How many link hops to traverse. Default: 3."),
359
+ direction: _enum([
360
+ "forward",
361
+ "backward",
362
+ "both"
363
+ ]).optional().describe("Traversal direction: 'forward' (outlinks), 'backward' (backlinks), or 'both'. Default: both."),
364
+ mode: _enum([
365
+ "sequential",
366
+ "associative",
367
+ "all"
368
+ ]).optional().describe("Link type filter: 'sequential' (same-folder), 'associative' (cross-folder), or 'all'. Default: all.")
369
+ }, async (args) => {
370
+ const result = await toolZettelExplore(getFederationDb(), args);
371
+ return {
372
+ content: result.content.map((c) => ({
373
+ type: c.type,
374
+ text: c.text
375
+ })),
376
+ isError: result.isError
377
+ };
378
+ });
379
+ server.tool("zettel_health", ["Audit the structural health of the Obsidian vault.", "Reports dead links, orphan notes, disconnected clusters, low-connectivity files, and an overall health score."].join("\n"), {
380
+ scope: _enum([
381
+ "full",
382
+ "recent",
383
+ "project"
384
+ ]).optional().describe("Audit scope: 'full' (entire vault), 'recent' (recently modified), or 'project' (specific path). Default: full."),
385
+ project_path: string().optional().describe("Absolute path to the project/folder to audit when scope='project'."),
386
+ recent_days: number().int().optional().describe("Number of days to look back when scope='recent'. Default: 30."),
387
+ include: array(_enum([
388
+ "dead_links",
389
+ "orphans",
390
+ "disconnected",
391
+ "low_connectivity"
392
+ ])).optional().describe("Specific checks to include. Omit to run all checks.")
393
+ }, async (args) => {
394
+ const result = await toolZettelHealth(getFederationDb(), args);
395
+ return {
396
+ content: result.content.map((c) => ({
397
+ type: c.type,
398
+ text: c.text
399
+ })),
400
+ isError: result.isError
401
+ };
402
+ });
403
+ server.tool("zettel_surprise", ["Find surprising connections — notes that are semantically similar to a reference note but far away in the link graph.", "High surprise = unexpected relevance."].join("\n"), {
404
+ reference_path: string().describe("Path to the reference note to find surprising connections for."),
405
+ vault_project_id: number().int().describe("Project ID of the vault to search within."),
406
+ limit: number().int().optional().describe("Maximum number of surprising notes to return. Default: 10."),
407
+ min_similarity: number().optional().describe("Minimum semantic similarity [0,1] for a note to be considered. Default: 0.5."),
408
+ min_graph_distance: number().int().optional().describe("Minimum link hops away from the reference note. Default: 3.")
409
+ }, async (args) => {
410
+ const result = await toolZettelSurprise(getFederationDb(), args);
411
+ return {
412
+ content: result.content.map((c) => ({
413
+ type: c.type,
414
+ text: c.text
415
+ })),
416
+ isError: result.isError
417
+ };
418
+ });
419
+ server.tool("zettel_suggest", ["Suggest new connections for a note using semantic similarity, shared tags, and graph neighborhood (friends-of-friends)."].join("\n"), {
420
+ note_path: string().describe("Path to the note to generate link suggestions for."),
421
+ vault_project_id: number().int().describe("Project ID of the vault to search within."),
422
+ limit: number().int().optional().describe("Maximum number of suggestions to return. Default: 10."),
423
+ exclude_linked: boolean().optional().describe("Exclude notes already linked from this note. Default: true.")
424
+ }, async (args) => {
425
+ const result = await toolZettelSuggest(getFederationDb(), args);
426
+ return {
427
+ content: result.content.map((c) => ({
428
+ type: c.type,
429
+ text: c.text
430
+ })),
431
+ isError: result.isError
432
+ };
433
+ });
434
+ server.tool("zettel_converse", ["Use the vault as a Zettelkasten communication partner.", "Ask a question, get relevant notes with cross-domain connections and a synthesis prompt for generating new insights."].join("\n"), {
435
+ question: string().describe("The question or topic to explore in the vault."),
436
+ vault_project_id: number().int().describe("Project ID of the vault to query."),
437
+ depth: number().int().optional().describe("How many link hops to follow from seed notes. Default: 2."),
438
+ limit: number().int().optional().describe("Maximum number of relevant notes to retrieve. Default: 10.")
439
+ }, async (args) => {
440
+ const result = await toolZettelConverse(getFederationDb(), args);
441
+ return {
442
+ content: result.content.map((c) => ({
443
+ type: c.type,
444
+ text: c.text
445
+ })),
446
+ isError: result.isError
447
+ };
448
+ });
449
+ server.tool("zettel_themes", ["Detect emerging themes by clustering recent notes with similar embeddings.", "Reveals forming idea clusters and suggests index notes for unlinked clusters."].join("\n"), {
450
+ vault_project_id: number().int().describe("Project ID of the vault to analyse."),
451
+ lookback_days: number().int().optional().describe("Number of days of recent notes to cluster. Default: 30."),
452
+ min_cluster_size: number().int().optional().describe("Minimum notes required to form a theme cluster. Default: 3."),
453
+ max_themes: number().int().optional().describe("Maximum number of theme clusters to return. Default: 10."),
454
+ similarity_threshold: number().optional().describe("Minimum cosine similarity to group notes into a cluster [0,1]. Default: 0.7.")
455
+ }, async (args) => {
456
+ const result = await toolZettelThemes(getFederationDb(), args);
457
+ return {
458
+ content: result.content.map((c) => ({
459
+ type: c.type,
460
+ text: c.text
461
+ })),
462
+ isError: result.isError
463
+ };
464
+ });
352
465
  const transport = new StdioServerTransport();
353
466
  await server.connect(transport);
354
467
  }