@tekmidian/pai 0.9.0 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{auto-route-C-DrW6BL.mjs → auto-route-CruBrTf-.mjs} +2 -2
- package/dist/{auto-route-C-DrW6BL.mjs.map → auto-route-CruBrTf-.mjs.map} +1 -1
- package/dist/cli/index.mjs +345 -23
- package/dist/cli/index.mjs.map +1 -1
- package/dist/{clusters-JIDQW65f.mjs → clusters-CRlPBpq8.mjs} +1 -1
- package/dist/{clusters-JIDQW65f.mjs.map → clusters-CRlPBpq8.mjs.map} +1 -1
- package/dist/daemon/index.mjs +6 -6
- package/dist/{daemon-VIFoKc_z.mjs → daemon-kp49BE7u.mjs} +74 -21
- package/dist/daemon-kp49BE7u.mjs.map +1 -0
- package/dist/{detector-jGBuYQJM.mjs → detector-CNU3zCwP.mjs} +1 -1
- package/dist/{detector-jGBuYQJM.mjs.map → detector-CNU3zCwP.mjs.map} +1 -1
- package/dist/{factory-e0k1HWuc.mjs → factory-DKDPRhAN.mjs} +3 -3
- package/dist/{factory-e0k1HWuc.mjs.map → factory-DKDPRhAN.mjs.map} +1 -1
- package/dist/{indexer-backend-jcJFsmB4.mjs → indexer-backend-CIIlrYh6.mjs} +1 -1
- package/dist/{indexer-backend-jcJFsmB4.mjs.map → indexer-backend-CIIlrYh6.mjs.map} +1 -1
- package/dist/kg-B5ysyRLC.mjs +94 -0
- package/dist/kg-B5ysyRLC.mjs.map +1 -0
- package/dist/kg-extraction-BlGM40q7.mjs +211 -0
- package/dist/kg-extraction-BlGM40q7.mjs.map +1 -0
- package/dist/{latent-ideas-bTJo6Omd.mjs → latent-ideas-DvWBRHsy.mjs} +2 -2
- package/dist/{latent-ideas-bTJo6Omd.mjs.map → latent-ideas-DvWBRHsy.mjs.map} +1 -1
- package/dist/{neighborhood-BYYbEkUJ.mjs → neighborhood-u8ytjmWq.mjs} +1 -1
- package/dist/{neighborhood-BYYbEkUJ.mjs.map → neighborhood-u8ytjmWq.mjs.map} +1 -1
- package/dist/{note-context-BK24bX8Y.mjs → note-context-CG2_e-0W.mjs} +1 -1
- package/dist/{note-context-BK24bX8Y.mjs.map → note-context-CG2_e-0W.mjs.map} +1 -1
- package/dist/{postgres-DvEPooLO.mjs → postgres-BGERehmX.mjs} +1 -1
- package/dist/{postgres-DvEPooLO.mjs.map → postgres-BGERehmX.mjs.map} +1 -1
- package/dist/{query-feedback-Dv43XKHM.mjs → query-feedback-CQSumXDy.mjs} +1 -1
- package/dist/{query-feedback-Dv43XKHM.mjs.map → query-feedback-CQSumXDy.mjs.map} +1 -1
- package/dist/skills/Reconstruct/SKILL.md +36 -0
- package/dist/{sqlite-l-s9xPjY.mjs → sqlite-BJrME_vg.mjs} +1 -1
- package/dist/{sqlite-l-s9xPjY.mjs.map → sqlite-BJrME_vg.mjs.map} +1 -1
- package/dist/{state-C6_vqz7w.mjs → state-BIlxNRUn.mjs} +1 -1
- package/dist/{state-C6_vqz7w.mjs.map → state-BIlxNRUn.mjs.map} +1 -1
- package/dist/{themes-BvYF0W8T.mjs → themes-9jxFn3Rf.mjs} +1 -1
- package/dist/{themes-BvYF0W8T.mjs.map → themes-9jxFn3Rf.mjs.map} +1 -1
- package/dist/{tools-C4SBZHga.mjs → tools-8t7BQrm9.mjs} +13 -104
- package/dist/tools-8t7BQrm9.mjs.map +1 -0
- package/dist/{trace-CRx9lPuc.mjs → trace-C2XrzssW.mjs} +1 -1
- package/dist/{trace-CRx9lPuc.mjs.map → trace-C2XrzssW.mjs.map} +1 -1
- package/dist/{vault-indexer-B-aJpRZC.mjs → vault-indexer-TTCl1QOL.mjs} +1 -1
- package/dist/{vault-indexer-B-aJpRZC.mjs.map → vault-indexer-TTCl1QOL.mjs.map} +1 -1
- package/dist/{zettelkasten-DhBKZQHF.mjs → zettelkasten-BdaMzTGQ.mjs} +3 -3
- package/dist/{zettelkasten-DhBKZQHF.mjs.map → zettelkasten-BdaMzTGQ.mjs.map} +1 -1
- package/package.json +1 -1
- package/dist/daemon-VIFoKc_z.mjs.map +0 -1
- package/dist/indexer-D53l5d1U.mjs +0 -1
- package/dist/tools-C4SBZHga.mjs.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"latent-ideas-bTJo6Omd.mjs","names":[],"sources":["../src/graph/latent-ideas.ts"],"sourcesContent":["/**\n * latent-ideas.ts — graph_latent_ideas and idea_materialize endpoint handlers\n *\n * \"Latent ideas\" are recurring themes in the vault that exist as embedding\n * clusters but have NO dedicated note written about them yet. PAI surfaces\n * these by running the same agglomerative clustering used by graph_clusters /\n * zettelThemes and then filtering OUT any cluster whose label is well-matched\n * by an existing note title.\n *\n * The materialize endpoint writes a new Markdown note to the vault filesystem\n * and returns its content so the plugin can open it immediately.\n */\n\nimport { mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport { TITLE_STOP_WORDS } from \"../utils/stop-words.js\";\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport { zettelThemes } from \"../zettelkasten/themes.js\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphLatentIdeasParams {\n project_id: number;\n /** Minimum notes in a cluster (default: 3) */\n min_cluster_size?: number;\n /** Cap on returned ideas (default: 15) */\n max_ideas?: number;\n /** How far back to look in days (default: 180) */\n lookback_days?: number;\n /** Cosine similarity clustering threshold (default: 0.65) */\n similarity_threshold?: number;\n}\n\nexport interface LatentIdeaSourceNote {\n vault_path: string;\n title: string;\n /** How strongly this note relates to the theme (0-1) */\n relevance: number;\n}\n\nexport interface LatentIdea {\n id: number;\n /** Auto-generated cluster label from zettelThemes */\n label: string;\n /** Number of notes touching this theme */\n size: number;\n /** 0-1, how likely this is a real coherent idea */\n confidence: number;\n /** Notes that contribute to this theme */\n source_notes: LatentIdeaSourceNote[];\n /** Cleaned-up version of label for a potential note title */\n suggested_title: string;\n /** Most common folder among source notes */\n suggested_folder: string;\n /** Number of distinct session date-folders (e.g. \"2026/03\") touching this theme */\n sessions_count: number;\n}\n\nexport interface GraphLatentIdeasResult {\n ideas: LatentIdea[];\n total_clusters_analyzed: number;\n /** How many clusters already have a matching note (excluded from results) */\n materialized_count: number;\n}\n\n// ---------------------------------------------------------------------------\n// Materialize params / result\n// ---------------------------------------------------------------------------\n\nexport interface IdeaMaterializeParams {\n idea_label: string;\n /** User-chosen title for the new note */\n title: string;\n /** Vault-relative folder path where the note should be created */\n folder: string;\n /** Vault-relative paths of the source notes to link from the new note */\n source_paths: string[];\n project_id: number;\n}\n\nexport interface IdeaMaterializeResult {\n /** Vault-relative path of the created note */\n vault_path: string;\n /** Generated markdown content */\n content: string;\n /** Number of wikilinks inserted */\n links_created: number;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: check if a cluster already has a matching note\n// ---------------------------------------------------------------------------\n\n/**\n * Returns true when any existing vault note title closely matches the cluster\n * label — meaning a dedicated note already exists for this topic.\n *\n * Matching strategy (simple, fast, no embeddings needed):\n * 1. Lowercase both sides and split into words.\n * 2. Remove stop words from the label words.\n * 3. If ≥ 60% of the significant label words appear in a note title → match.\n */\n// TITLE_STOP_WORDS imported from utils/stop-words.ts\n\nfunction labelMatchesTitle(label: string, title: string): boolean {\n const labelWords = label\n .toLowerCase()\n .split(/[\\s\\-_/]+/)\n .filter((w) => w.length > 2 && !TITLE_STOP_WORDS.has(w));\n\n if (labelWords.length === 0) return false;\n\n const titleLower = title.toLowerCase();\n const matchCount = labelWords.filter((w) => titleLower.includes(w)).length;\n return matchCount / labelWords.length >= 0.6;\n}\n\n/**\n * Check whether any note indexed in the vault has a title matching the label.\n * Fetches all vault file rows via StorageBackend for efficiency.\n */\nasync function clusterHasMatchingNote(\n backend: StorageBackend,\n label: string,\n notePaths: string[]\n): Promise<boolean> {\n // First check the notes already in the cluster themselves — if any cluster\n // member's title matches the label it IS the index note → materialized.\n const pathSet = new Set(notePaths);\n\n // Fetch all vault files (bounded — vault rarely > 50k notes)\n const rows = await backend.getAllVaultFiles();\n\n for (const row of rows) {\n if (!row.title) continue;\n // Skip notes already counted inside the cluster — they don't count as\n // \"dedicated notes\"; we only skip a cluster if a SEPARATE note exists.\n if (pathSet.has(row.vaultPath)) continue;\n if (labelMatchesTitle(label, row.title)) return true;\n }\n return false;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: generate a clean suggested title\n// ---------------------------------------------------------------------------\n\nfunction toSuggestedTitle(label: string): string {\n // Remove leading/trailing whitespace, capitalize each word, remove stop words\n // that are all-lowercase at the start of the title.\n const words = label\n .trim()\n .split(/\\s+/)\n .map((w, i) => {\n const lower = w.toLowerCase();\n // Drop leading stop words (but keep if they're the only word)\n if (i === 0 && TITLE_STOP_WORDS.has(lower) && label.trim().split(/\\s+/).length > 1) {\n return \"\";\n }\n return w.charAt(0).toUpperCase() + w.slice(1);\n })\n .filter(Boolean);\n\n return words.join(\" \") || label;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: find most common folder\n// ---------------------------------------------------------------------------\n\nfunction mostCommonFolder(vaultPaths: string[]): string {\n const counts = new Map<string, number>();\n for (const p of vaultPaths) {\n const parts = p.split(\"/\");\n const folder = parts.length > 1 ? parts.slice(0, -1).join(\"/\") : \"\";\n counts.set(folder, (counts.get(folder) ?? 0) + 1);\n }\n\n let best = \"\";\n let bestCount = 0;\n for (const [folder, count] of counts) {\n if (count > bestCount) {\n bestCount = count;\n best = folder;\n }\n }\n return best;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: count distinct session date-folders\n// ---------------------------------------------------------------------------\n\n/**\n * Heuristic: vault notes are often stored in date-based folders like\n * \"2026/03/15\" or \"Daily/2026-03\". We extract the first numeric path\n * segment that looks like a year (2020-2030) and group by year+month.\n *\n * Falls back to counting distinct top-level folders.\n */\nfunction countDistinctSessions(vaultPaths: string[]): number {\n const sessions = new Set<string>();\n const yearMonthRe = /\\b(202\\d)\\D?(0[1-9]|1[0-2])\\b/;\n\n for (const p of vaultPaths) {\n const m = yearMonthRe.exec(p);\n if (m) {\n sessions.add(`${m[1]}-${m[2]}`);\n } else {\n // Fallback: use top-level folder as a proxy for \"session bucket\"\n const topFolder = p.split(\"/\")[0];\n sessions.add(topFolder);\n }\n }\n return sessions.size;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: calculate confidence score\n// ---------------------------------------------------------------------------\n\n/**\n * Confidence combines:\n * - Cluster size (normalized, capped at 20 for max contribution)\n * - Folder diversity (0-1 already)\n * - Sessions count (normalized, capped at 5)\n *\n * Formula: 0.4 * sizeScore + 0.35 * folderDiversity + 0.25 * sessionScore\n */\nfunction calcConfidence(\n size: number,\n folderDiversity: number,\n sessionsCount: number\n): number {\n const sizeScore = Math.min(size / 20, 1.0);\n const sessionScore = Math.min(sessionsCount / 5, 1.0);\n const raw = 0.4 * sizeScore + 0.35 * folderDiversity + 0.25 * sessionScore;\n return Math.round(raw * 100) / 100;\n}\n\n// ---------------------------------------------------------------------------\n// Main handler: graph_latent_ideas\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphLatentIdeas(\n backend: StorageBackend,\n params: GraphLatentIdeasParams\n): Promise<GraphLatentIdeasResult> {\n const minClusterSize = params.min_cluster_size ?? 3;\n const maxIdeas = params.max_ideas ?? 15;\n const lookbackDays = params.lookback_days ?? 180;\n const similarityThreshold = params.similarity_threshold ?? 0.65;\n\n const { project_id: vaultProjectId } = params;\n if (!vaultProjectId) {\n throw new Error(\n \"graph_latent_ideas: project_id is required (pass the vault project's numeric ID)\"\n );\n }\n\n // Run the same clustering algorithm used by graph_clusters\n const themeResult = await zettelThemes(backend, {\n vaultProjectId,\n lookbackDays,\n minClusterSize,\n maxThemes: maxIdeas * 3, // Over-fetch — many will be filtered as materialized\n similarityThreshold,\n });\n\n const ideas: LatentIdea[] = [];\n let materializedCount = 0;\n\n for (const theme of themeResult.themes) {\n const notePaths = theme.notes.map((n) => n.path);\n\n // Check if a dedicated note already exists for this theme\n if (await clusterHasMatchingNote(backend, theme.label, notePaths)) {\n materializedCount++;\n continue;\n }\n\n // This is a latent idea — no dedicated note exists yet\n const suggestedFolder = mostCommonFolder(notePaths);\n const sessionsCount = countDistinctSessions(notePaths);\n const confidence = calcConfidence(theme.size, theme.folderDiversity, sessionsCount);\n\n // Build source notes with relevance scores\n // Relevance is approximated by position in cluster (centroid-closest first)\n // zettelThemes returns notes in no guaranteed order; assign uniform relevance\n // decreasing from 1.0 to 0.5 across the list.\n const sourceNotes: LatentIdeaSourceNote[] = theme.notes.map((n, idx) => ({\n vault_path: n.path,\n title: n.title ?? n.path.split(\"/\").pop()?.replace(/\\.md$/i, \"\") ?? n.path,\n relevance: Math.round((1.0 - (idx / Math.max(theme.notes.length - 1, 1)) * 0.5) * 100) / 100,\n }));\n\n ideas.push({\n id: theme.id,\n label: theme.label,\n size: theme.size,\n confidence,\n source_notes: sourceNotes,\n suggested_title: toSuggestedTitle(theme.label),\n suggested_folder: suggestedFolder,\n sessions_count: sessionsCount,\n });\n\n if (ideas.length >= maxIdeas) break;\n }\n\n // Sort by confidence descending\n ideas.sort((a, b) => b.confidence - a.confidence);\n\n return {\n ideas,\n total_clusters_analyzed: themeResult.themes.length + materializedCount,\n materialized_count: materializedCount,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Materialize handler: idea_materialize\n// ---------------------------------------------------------------------------\n\nexport function handleIdeaMaterialize(\n params: IdeaMaterializeParams,\n vaultPath: string\n): IdeaMaterializeResult {\n const { idea_label, title, folder, source_paths } = params;\n\n // Sanitize filename: replace characters illegal in filenames\n const safeTitle = title.replace(/[/\\\\:*?\"<>|]/g, \"-\");\n const fileName = `${safeTitle}.md`;\n\n // Vault-relative path (forward slashes, no leading slash)\n const relFolder = folder.replace(/^\\/+|\\/+$/g, \"\");\n const vault_path = relFolder ? `${relFolder}/${fileName}` : fileName;\n\n // Absolute filesystem path\n const absPath = join(vaultPath, vault_path);\n const absDir = dirname(absPath);\n\n // Build wikilinks from source_paths\n const wikilinks = source_paths\n .map((p) => {\n // Derive a display name: filename without extension\n const name = p.split(\"/\").pop()?.replace(/\\.md$/i, \"\") ?? p;\n // Relative wikilink — use just the filename (Obsidian resolves by title)\n return `- [[${name}]]`;\n })\n .join(\"\\n\");\n\n const links_created = source_paths.length;\n\n const content = [\n `# ${title}`,\n \"\",\n `*Materialized from latent idea: \"${idea_label}\"*`,\n `*Sources: ${links_created} notes*`,\n \"\",\n \"## Related Notes\",\n \"\",\n wikilinks || \"*(no source notes)*\",\n \"\",\n \"## Notes\",\n \"\",\n \"<!-- Add your thoughts about this idea here -->\",\n \"\",\n ].join(\"\\n\");\n\n // Write the file (create parent directories as needed)\n mkdirSync(absDir, { recursive: true });\n writeFileSync(absPath, content, \"utf-8\");\n\n return {\n vault_path,\n content,\n links_created,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GA,SAAS,kBAAkB,OAAe,OAAwB;CAChE,MAAM,aAAa,MAChB,aAAa,CACb,MAAM,YAAY,CAClB,QAAQ,MAAM,EAAE,SAAS,KAAK,CAAC,iBAAiB,IAAI,EAAE,CAAC;AAE1D,KAAI,WAAW,WAAW,EAAG,QAAO;CAEpC,MAAM,aAAa,MAAM,aAAa;AAEtC,QADmB,WAAW,QAAQ,MAAM,WAAW,SAAS,EAAE,CAAC,CAAC,SAChD,WAAW,UAAU;;;;;;AAO3C,eAAe,uBACb,SACA,OACA,WACkB;CAGlB,MAAM,UAAU,IAAI,IAAI,UAAU;CAGlC,MAAM,OAAO,MAAM,QAAQ,kBAAkB;AAE7C,MAAK,MAAM,OAAO,MAAM;AACtB,MAAI,CAAC,IAAI,MAAO;AAGhB,MAAI,QAAQ,IAAI,IAAI,UAAU,CAAE;AAChC,MAAI,kBAAkB,OAAO,IAAI,MAAM,CAAE,QAAO;;AAElD,QAAO;;AAOT,SAAS,iBAAiB,OAAuB;AAgB/C,QAbc,MACX,MAAM,CACN,MAAM,MAAM,CACZ,KAAK,GAAG,MAAM;EACb,MAAM,QAAQ,EAAE,aAAa;AAE7B,MAAI,MAAM,KAAK,iBAAiB,IAAI,MAAM,IAAI,MAAM,MAAM,CAAC,MAAM,MAAM,CAAC,SAAS,EAC/E,QAAO;AAET,SAAO,EAAE,OAAO,EAAE,CAAC,aAAa,GAAG,EAAE,MAAM,EAAE;GAC7C,CACD,OAAO,QAAQ,CAEL,KAAK,IAAI,IAAI;;AAO5B,SAAS,iBAAiB,YAA8B;CACtD,MAAM,yBAAS,IAAI,KAAqB;AACxC,MAAK,MAAM,KAAK,YAAY;EAC1B,MAAM,QAAQ,EAAE,MAAM,IAAI;EAC1B,MAAM,SAAS,MAAM,SAAS,IAAI,MAAM,MAAM,GAAG,GAAG,CAAC,KAAK,IAAI,GAAG;AACjE,SAAO,IAAI,SAAS,OAAO,IAAI,OAAO,IAAI,KAAK,EAAE;;CAGnD,IAAI,OAAO;CACX,IAAI,YAAY;AAChB,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,KAAI,QAAQ,WAAW;AACrB,cAAY;AACZ,SAAO;;AAGX,QAAO;;;;;;;;;AAcT,SAAS,sBAAsB,YAA8B;CAC3D,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,cAAc;AAEpB,MAAK,MAAM,KAAK,YAAY;EAC1B,MAAM,IAAI,YAAY,KAAK,EAAE;AAC7B,MAAI,EACF,UAAS,IAAI,GAAG,EAAE,GAAG,GAAG,EAAE,KAAK;OAC1B;GAEL,MAAM,YAAY,EAAE,MAAM,IAAI,CAAC;AAC/B,YAAS,IAAI,UAAU;;;AAG3B,QAAO,SAAS;;;;;;;;;;AAelB,SAAS,eACP,MACA,iBACA,eACQ;CACR,MAAM,YAAY,KAAK,IAAI,OAAO,IAAI,EAAI;CAC1C,MAAM,eAAe,KAAK,IAAI,gBAAgB,GAAG,EAAI;CACrD,MAAM,MAAM,KAAM,YAAY,MAAO,kBAAkB,MAAO;AAC9D,QAAO,KAAK,MAAM,MAAM,IAAI,GAAG;;AAOjC,eAAsB,uBACpB,SACA,QACiC;CACjC,MAAM,iBAAiB,OAAO,oBAAoB;CAClD,MAAM,WAAW,OAAO,aAAa;CACrC,MAAM,eAAe,OAAO,iBAAiB;CAC7C,MAAM,sBAAsB,OAAO,wBAAwB;CAE3D,MAAM,EAAE,YAAY,mBAAmB;AACvC,KAAI,CAAC,eACH,OAAM,IAAI,MACR,mFACD;CAIH,MAAM,cAAc,MAAM,aAAa,SAAS;EAC9C;EACA;EACA;EACA,WAAW,WAAW;EACtB;EACD,CAAC;CAEF,MAAM,QAAsB,EAAE;CAC9B,IAAI,oBAAoB;AAExB,MAAK,MAAM,SAAS,YAAY,QAAQ;EACtC,MAAM,YAAY,MAAM,MAAM,KAAK,MAAM,EAAE,KAAK;AAGhD,MAAI,MAAM,uBAAuB,SAAS,MAAM,OAAO,UAAU,EAAE;AACjE;AACA;;EAIF,MAAM,kBAAkB,iBAAiB,UAAU;EACnD,MAAM,gBAAgB,sBAAsB,UAAU;EACtD,MAAM,aAAa,eAAe,MAAM,MAAM,MAAM,iBAAiB,cAAc;EAMnF,MAAM,cAAsC,MAAM,MAAM,KAAK,GAAG,SAAS;GACvE,YAAY,EAAE;GACd,OAAO,EAAE,SAAS,EAAE,KAAK,MAAM,IAAI,CAAC,KAAK,EAAE,QAAQ,UAAU,GAAG,IAAI,EAAE;GACtE,WAAW,KAAK,OAAO,IAAO,MAAM,KAAK,IAAI,MAAM,MAAM,SAAS,GAAG,EAAE,GAAI,MAAO,IAAI,GAAG;GAC1F,EAAE;AAEH,QAAM,KAAK;GACT,IAAI,MAAM;GACV,OAAO,MAAM;GACb,MAAM,MAAM;GACZ;GACA,cAAc;GACd,iBAAiB,iBAAiB,MAAM,MAAM;GAC9C,kBAAkB;GAClB,gBAAgB;GACjB,CAAC;AAEF,MAAI,MAAM,UAAU,SAAU;;AAIhC,OAAM,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW;AAEjD,QAAO;EACL;EACA,yBAAyB,YAAY,OAAO,SAAS;EACrD,oBAAoB;EACrB;;AAOH,SAAgB,sBACd,QACA,WACuB;CACvB,MAAM,EAAE,YAAY,OAAO,QAAQ,iBAAiB;CAIpD,MAAM,WAAW,GADC,MAAM,QAAQ,iBAAiB,IAAI,CACvB;CAG9B,MAAM,YAAY,OAAO,QAAQ,cAAc,GAAG;CAClD,MAAM,aAAa,YAAY,GAAG,UAAU,GAAG,aAAa;CAG5D,MAAM,UAAU,KAAK,WAAW,WAAW;CAC3C,MAAM,SAAS,QAAQ,QAAQ;CAG/B,MAAM,YAAY,aACf,KAAK,MAAM;AAIV,SAAO,OAFM,EAAE,MAAM,IAAI,CAAC,KAAK,EAAE,QAAQ,UAAU,GAAG,IAAI,EAEvC;GACnB,CACD,KAAK,KAAK;CAEb,MAAM,gBAAgB,aAAa;CAEnC,MAAM,UAAU;EACd,KAAK;EACL;EACA,oCAAoC,WAAW;EAC/C,aAAa,cAAc;EAC3B;EACA;EACA;EACA,aAAa;EACb;EACA;EACA;EACA;EACA;EACD,CAAC,KAAK,KAAK;AAGZ,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;AACtC,eAAc,SAAS,SAAS,QAAQ;AAExC,QAAO;EACL;EACA;EACA;EACD"}
|
|
1
|
+
{"version":3,"file":"latent-ideas-DvWBRHsy.mjs","names":[],"sources":["../src/graph/latent-ideas.ts"],"sourcesContent":["/**\n * latent-ideas.ts — graph_latent_ideas and idea_materialize endpoint handlers\n *\n * \"Latent ideas\" are recurring themes in the vault that exist as embedding\n * clusters but have NO dedicated note written about them yet. PAI surfaces\n * these by running the same agglomerative clustering used by graph_clusters /\n * zettelThemes and then filtering OUT any cluster whose label is well-matched\n * by an existing note title.\n *\n * The materialize endpoint writes a new Markdown note to the vault filesystem\n * and returns its content so the plugin can open it immediately.\n */\n\nimport { mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport { TITLE_STOP_WORDS } from \"../utils/stop-words.js\";\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport { zettelThemes } from \"../zettelkasten/themes.js\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphLatentIdeasParams {\n project_id: number;\n /** Minimum notes in a cluster (default: 3) */\n min_cluster_size?: number;\n /** Cap on returned ideas (default: 15) */\n max_ideas?: number;\n /** How far back to look in days (default: 180) */\n lookback_days?: number;\n /** Cosine similarity clustering threshold (default: 0.65) */\n similarity_threshold?: number;\n}\n\nexport interface LatentIdeaSourceNote {\n vault_path: string;\n title: string;\n /** How strongly this note relates to the theme (0-1) */\n relevance: number;\n}\n\nexport interface LatentIdea {\n id: number;\n /** Auto-generated cluster label from zettelThemes */\n label: string;\n /** Number of notes touching this theme */\n size: number;\n /** 0-1, how likely this is a real coherent idea */\n confidence: number;\n /** Notes that contribute to this theme */\n source_notes: LatentIdeaSourceNote[];\n /** Cleaned-up version of label for a potential note title */\n suggested_title: string;\n /** Most common folder among source notes */\n suggested_folder: string;\n /** Number of distinct session date-folders (e.g. \"2026/03\") touching this theme */\n sessions_count: number;\n}\n\nexport interface GraphLatentIdeasResult {\n ideas: LatentIdea[];\n total_clusters_analyzed: number;\n /** How many clusters already have a matching note (excluded from results) */\n materialized_count: number;\n}\n\n// ---------------------------------------------------------------------------\n// Materialize params / result\n// ---------------------------------------------------------------------------\n\nexport interface IdeaMaterializeParams {\n idea_label: string;\n /** User-chosen title for the new note */\n title: string;\n /** Vault-relative folder path where the note should be created */\n folder: string;\n /** Vault-relative paths of the source notes to link from the new note */\n source_paths: string[];\n project_id: number;\n}\n\nexport interface IdeaMaterializeResult {\n /** Vault-relative path of the created note */\n vault_path: string;\n /** Generated markdown content */\n content: string;\n /** Number of wikilinks inserted */\n links_created: number;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: check if a cluster already has a matching note\n// ---------------------------------------------------------------------------\n\n/**\n * Returns true when any existing vault note title closely matches the cluster\n * label — meaning a dedicated note already exists for this topic.\n *\n * Matching strategy (simple, fast, no embeddings needed):\n * 1. Lowercase both sides and split into words.\n * 2. Remove stop words from the label words.\n * 3. If ≥ 60% of the significant label words appear in a note title → match.\n */\n// TITLE_STOP_WORDS imported from utils/stop-words.ts\n\nfunction labelMatchesTitle(label: string, title: string): boolean {\n const labelWords = label\n .toLowerCase()\n .split(/[\\s\\-_/]+/)\n .filter((w) => w.length > 2 && !TITLE_STOP_WORDS.has(w));\n\n if (labelWords.length === 0) return false;\n\n const titleLower = title.toLowerCase();\n const matchCount = labelWords.filter((w) => titleLower.includes(w)).length;\n return matchCount / labelWords.length >= 0.6;\n}\n\n/**\n * Check whether any note indexed in the vault has a title matching the label.\n * Fetches all vault file rows via StorageBackend for efficiency.\n */\nasync function clusterHasMatchingNote(\n backend: StorageBackend,\n label: string,\n notePaths: string[]\n): Promise<boolean> {\n // First check the notes already in the cluster themselves — if any cluster\n // member's title matches the label it IS the index note → materialized.\n const pathSet = new Set(notePaths);\n\n // Fetch all vault files (bounded — vault rarely > 50k notes)\n const rows = await backend.getAllVaultFiles();\n\n for (const row of rows) {\n if (!row.title) continue;\n // Skip notes already counted inside the cluster — they don't count as\n // \"dedicated notes\"; we only skip a cluster if a SEPARATE note exists.\n if (pathSet.has(row.vaultPath)) continue;\n if (labelMatchesTitle(label, row.title)) return true;\n }\n return false;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: generate a clean suggested title\n// ---------------------------------------------------------------------------\n\nfunction toSuggestedTitle(label: string): string {\n // Remove leading/trailing whitespace, capitalize each word, remove stop words\n // that are all-lowercase at the start of the title.\n const words = label\n .trim()\n .split(/\\s+/)\n .map((w, i) => {\n const lower = w.toLowerCase();\n // Drop leading stop words (but keep if they're the only word)\n if (i === 0 && TITLE_STOP_WORDS.has(lower) && label.trim().split(/\\s+/).length > 1) {\n return \"\";\n }\n return w.charAt(0).toUpperCase() + w.slice(1);\n })\n .filter(Boolean);\n\n return words.join(\" \") || label;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: find most common folder\n// ---------------------------------------------------------------------------\n\nfunction mostCommonFolder(vaultPaths: string[]): string {\n const counts = new Map<string, number>();\n for (const p of vaultPaths) {\n const parts = p.split(\"/\");\n const folder = parts.length > 1 ? parts.slice(0, -1).join(\"/\") : \"\";\n counts.set(folder, (counts.get(folder) ?? 0) + 1);\n }\n\n let best = \"\";\n let bestCount = 0;\n for (const [folder, count] of counts) {\n if (count > bestCount) {\n bestCount = count;\n best = folder;\n }\n }\n return best;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: count distinct session date-folders\n// ---------------------------------------------------------------------------\n\n/**\n * Heuristic: vault notes are often stored in date-based folders like\n * \"2026/03/15\" or \"Daily/2026-03\". We extract the first numeric path\n * segment that looks like a year (2020-2030) and group by year+month.\n *\n * Falls back to counting distinct top-level folders.\n */\nfunction countDistinctSessions(vaultPaths: string[]): number {\n const sessions = new Set<string>();\n const yearMonthRe = /\\b(202\\d)\\D?(0[1-9]|1[0-2])\\b/;\n\n for (const p of vaultPaths) {\n const m = yearMonthRe.exec(p);\n if (m) {\n sessions.add(`${m[1]}-${m[2]}`);\n } else {\n // Fallback: use top-level folder as a proxy for \"session bucket\"\n const topFolder = p.split(\"/\")[0];\n sessions.add(topFolder);\n }\n }\n return sessions.size;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: calculate confidence score\n// ---------------------------------------------------------------------------\n\n/**\n * Confidence combines:\n * - Cluster size (normalized, capped at 20 for max contribution)\n * - Folder diversity (0-1 already)\n * - Sessions count (normalized, capped at 5)\n *\n * Formula: 0.4 * sizeScore + 0.35 * folderDiversity + 0.25 * sessionScore\n */\nfunction calcConfidence(\n size: number,\n folderDiversity: number,\n sessionsCount: number\n): number {\n const sizeScore = Math.min(size / 20, 1.0);\n const sessionScore = Math.min(sessionsCount / 5, 1.0);\n const raw = 0.4 * sizeScore + 0.35 * folderDiversity + 0.25 * sessionScore;\n return Math.round(raw * 100) / 100;\n}\n\n// ---------------------------------------------------------------------------\n// Main handler: graph_latent_ideas\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphLatentIdeas(\n backend: StorageBackend,\n params: GraphLatentIdeasParams\n): Promise<GraphLatentIdeasResult> {\n const minClusterSize = params.min_cluster_size ?? 3;\n const maxIdeas = params.max_ideas ?? 15;\n const lookbackDays = params.lookback_days ?? 180;\n const similarityThreshold = params.similarity_threshold ?? 0.65;\n\n const { project_id: vaultProjectId } = params;\n if (!vaultProjectId) {\n throw new Error(\n \"graph_latent_ideas: project_id is required (pass the vault project's numeric ID)\"\n );\n }\n\n // Run the same clustering algorithm used by graph_clusters\n const themeResult = await zettelThemes(backend, {\n vaultProjectId,\n lookbackDays,\n minClusterSize,\n maxThemes: maxIdeas * 3, // Over-fetch — many will be filtered as materialized\n similarityThreshold,\n });\n\n const ideas: LatentIdea[] = [];\n let materializedCount = 0;\n\n for (const theme of themeResult.themes) {\n const notePaths = theme.notes.map((n) => n.path);\n\n // Check if a dedicated note already exists for this theme\n if (await clusterHasMatchingNote(backend, theme.label, notePaths)) {\n materializedCount++;\n continue;\n }\n\n // This is a latent idea — no dedicated note exists yet\n const suggestedFolder = mostCommonFolder(notePaths);\n const sessionsCount = countDistinctSessions(notePaths);\n const confidence = calcConfidence(theme.size, theme.folderDiversity, sessionsCount);\n\n // Build source notes with relevance scores\n // Relevance is approximated by position in cluster (centroid-closest first)\n // zettelThemes returns notes in no guaranteed order; assign uniform relevance\n // decreasing from 1.0 to 0.5 across the list.\n const sourceNotes: LatentIdeaSourceNote[] = theme.notes.map((n, idx) => ({\n vault_path: n.path,\n title: n.title ?? n.path.split(\"/\").pop()?.replace(/\\.md$/i, \"\") ?? n.path,\n relevance: Math.round((1.0 - (idx / Math.max(theme.notes.length - 1, 1)) * 0.5) * 100) / 100,\n }));\n\n ideas.push({\n id: theme.id,\n label: theme.label,\n size: theme.size,\n confidence,\n source_notes: sourceNotes,\n suggested_title: toSuggestedTitle(theme.label),\n suggested_folder: suggestedFolder,\n sessions_count: sessionsCount,\n });\n\n if (ideas.length >= maxIdeas) break;\n }\n\n // Sort by confidence descending\n ideas.sort((a, b) => b.confidence - a.confidence);\n\n return {\n ideas,\n total_clusters_analyzed: themeResult.themes.length + materializedCount,\n materialized_count: materializedCount,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Materialize handler: idea_materialize\n// ---------------------------------------------------------------------------\n\nexport function handleIdeaMaterialize(\n params: IdeaMaterializeParams,\n vaultPath: string\n): IdeaMaterializeResult {\n const { idea_label, title, folder, source_paths } = params;\n\n // Sanitize filename: replace characters illegal in filenames\n const safeTitle = title.replace(/[/\\\\:*?\"<>|]/g, \"-\");\n const fileName = `${safeTitle}.md`;\n\n // Vault-relative path (forward slashes, no leading slash)\n const relFolder = folder.replace(/^\\/+|\\/+$/g, \"\");\n const vault_path = relFolder ? `${relFolder}/${fileName}` : fileName;\n\n // Absolute filesystem path\n const absPath = join(vaultPath, vault_path);\n const absDir = dirname(absPath);\n\n // Build wikilinks from source_paths\n const wikilinks = source_paths\n .map((p) => {\n // Derive a display name: filename without extension\n const name = p.split(\"/\").pop()?.replace(/\\.md$/i, \"\") ?? p;\n // Relative wikilink — use just the filename (Obsidian resolves by title)\n return `- [[${name}]]`;\n })\n .join(\"\\n\");\n\n const links_created = source_paths.length;\n\n const content = [\n `# ${title}`,\n \"\",\n `*Materialized from latent idea: \"${idea_label}\"*`,\n `*Sources: ${links_created} notes*`,\n \"\",\n \"## Related Notes\",\n \"\",\n wikilinks || \"*(no source notes)*\",\n \"\",\n \"## Notes\",\n \"\",\n \"<!-- Add your thoughts about this idea here -->\",\n \"\",\n ].join(\"\\n\");\n\n // Write the file (create parent directories as needed)\n mkdirSync(absDir, { recursive: true });\n writeFileSync(absPath, content, \"utf-8\");\n\n return {\n vault_path,\n content,\n links_created,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GA,SAAS,kBAAkB,OAAe,OAAwB;CAChE,MAAM,aAAa,MAChB,aAAa,CACb,MAAM,YAAY,CAClB,QAAQ,MAAM,EAAE,SAAS,KAAK,CAAC,iBAAiB,IAAI,EAAE,CAAC;AAE1D,KAAI,WAAW,WAAW,EAAG,QAAO;CAEpC,MAAM,aAAa,MAAM,aAAa;AAEtC,QADmB,WAAW,QAAQ,MAAM,WAAW,SAAS,EAAE,CAAC,CAAC,SAChD,WAAW,UAAU;;;;;;AAO3C,eAAe,uBACb,SACA,OACA,WACkB;CAGlB,MAAM,UAAU,IAAI,IAAI,UAAU;CAGlC,MAAM,OAAO,MAAM,QAAQ,kBAAkB;AAE7C,MAAK,MAAM,OAAO,MAAM;AACtB,MAAI,CAAC,IAAI,MAAO;AAGhB,MAAI,QAAQ,IAAI,IAAI,UAAU,CAAE;AAChC,MAAI,kBAAkB,OAAO,IAAI,MAAM,CAAE,QAAO;;AAElD,QAAO;;AAOT,SAAS,iBAAiB,OAAuB;AAgB/C,QAbc,MACX,MAAM,CACN,MAAM,MAAM,CACZ,KAAK,GAAG,MAAM;EACb,MAAM,QAAQ,EAAE,aAAa;AAE7B,MAAI,MAAM,KAAK,iBAAiB,IAAI,MAAM,IAAI,MAAM,MAAM,CAAC,MAAM,MAAM,CAAC,SAAS,EAC/E,QAAO;AAET,SAAO,EAAE,OAAO,EAAE,CAAC,aAAa,GAAG,EAAE,MAAM,EAAE;GAC7C,CACD,OAAO,QAAQ,CAEL,KAAK,IAAI,IAAI;;AAO5B,SAAS,iBAAiB,YAA8B;CACtD,MAAM,yBAAS,IAAI,KAAqB;AACxC,MAAK,MAAM,KAAK,YAAY;EAC1B,MAAM,QAAQ,EAAE,MAAM,IAAI;EAC1B,MAAM,SAAS,MAAM,SAAS,IAAI,MAAM,MAAM,GAAG,GAAG,CAAC,KAAK,IAAI,GAAG;AACjE,SAAO,IAAI,SAAS,OAAO,IAAI,OAAO,IAAI,KAAK,EAAE;;CAGnD,IAAI,OAAO;CACX,IAAI,YAAY;AAChB,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,KAAI,QAAQ,WAAW;AACrB,cAAY;AACZ,SAAO;;AAGX,QAAO;;;;;;;;;AAcT,SAAS,sBAAsB,YAA8B;CAC3D,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,cAAc;AAEpB,MAAK,MAAM,KAAK,YAAY;EAC1B,MAAM,IAAI,YAAY,KAAK,EAAE;AAC7B,MAAI,EACF,UAAS,IAAI,GAAG,EAAE,GAAG,GAAG,EAAE,KAAK;OAC1B;GAEL,MAAM,YAAY,EAAE,MAAM,IAAI,CAAC;AAC/B,YAAS,IAAI,UAAU;;;AAG3B,QAAO,SAAS;;;;;;;;;;AAelB,SAAS,eACP,MACA,iBACA,eACQ;CACR,MAAM,YAAY,KAAK,IAAI,OAAO,IAAI,EAAI;CAC1C,MAAM,eAAe,KAAK,IAAI,gBAAgB,GAAG,EAAI;CACrD,MAAM,MAAM,KAAM,YAAY,MAAO,kBAAkB,MAAO;AAC9D,QAAO,KAAK,MAAM,MAAM,IAAI,GAAG;;AAOjC,eAAsB,uBACpB,SACA,QACiC;CACjC,MAAM,iBAAiB,OAAO,oBAAoB;CAClD,MAAM,WAAW,OAAO,aAAa;CACrC,MAAM,eAAe,OAAO,iBAAiB;CAC7C,MAAM,sBAAsB,OAAO,wBAAwB;CAE3D,MAAM,EAAE,YAAY,mBAAmB;AACvC,KAAI,CAAC,eACH,OAAM,IAAI,MACR,mFACD;CAIH,MAAM,cAAc,MAAM,aAAa,SAAS;EAC9C;EACA;EACA;EACA,WAAW,WAAW;EACtB;EACD,CAAC;CAEF,MAAM,QAAsB,EAAE;CAC9B,IAAI,oBAAoB;AAExB,MAAK,MAAM,SAAS,YAAY,QAAQ;EACtC,MAAM,YAAY,MAAM,MAAM,KAAK,MAAM,EAAE,KAAK;AAGhD,MAAI,MAAM,uBAAuB,SAAS,MAAM,OAAO,UAAU,EAAE;AACjE;AACA;;EAIF,MAAM,kBAAkB,iBAAiB,UAAU;EACnD,MAAM,gBAAgB,sBAAsB,UAAU;EACtD,MAAM,aAAa,eAAe,MAAM,MAAM,MAAM,iBAAiB,cAAc;EAMnF,MAAM,cAAsC,MAAM,MAAM,KAAK,GAAG,SAAS;GACvE,YAAY,EAAE;GACd,OAAO,EAAE,SAAS,EAAE,KAAK,MAAM,IAAI,CAAC,KAAK,EAAE,QAAQ,UAAU,GAAG,IAAI,EAAE;GACtE,WAAW,KAAK,OAAO,IAAO,MAAM,KAAK,IAAI,MAAM,MAAM,SAAS,GAAG,EAAE,GAAI,MAAO,IAAI,GAAG;GAC1F,EAAE;AAEH,QAAM,KAAK;GACT,IAAI,MAAM;GACV,OAAO,MAAM;GACb,MAAM,MAAM;GACZ;GACA,cAAc;GACd,iBAAiB,iBAAiB,MAAM,MAAM;GAC9C,kBAAkB;GAClB,gBAAgB;GACjB,CAAC;AAEF,MAAI,MAAM,UAAU,SAAU;;AAIhC,OAAM,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW;AAEjD,QAAO;EACL;EACA,yBAAyB,YAAY,OAAO,SAAS;EACrD,oBAAoB;EACrB;;AAOH,SAAgB,sBACd,QACA,WACuB;CACvB,MAAM,EAAE,YAAY,OAAO,QAAQ,iBAAiB;CAIpD,MAAM,WAAW,GADC,MAAM,QAAQ,iBAAiB,IAAI,CACvB;CAG9B,MAAM,YAAY,OAAO,QAAQ,cAAc,GAAG;CAClD,MAAM,aAAa,YAAY,GAAG,UAAU,GAAG,aAAa;CAG5D,MAAM,UAAU,KAAK,WAAW,WAAW;CAC3C,MAAM,SAAS,QAAQ,QAAQ;CAG/B,MAAM,YAAY,aACf,KAAK,MAAM;AAIV,SAAO,OAFM,EAAE,MAAM,IAAI,CAAC,KAAK,EAAE,QAAQ,UAAU,GAAG,IAAI,EAEvC;GACnB,CACD,KAAK,KAAK;CAEb,MAAM,gBAAgB,aAAa;CAEnC,MAAM,UAAU;EACd,KAAK;EACL;EACA,oCAAoC,WAAW;EAC/C,aAAa,cAAc;EAC3B;EACA;EACA;EACA,aAAa;EACb;EACA;EACA;EACA;EACA;EACD,CAAC,KAAK,KAAK;AAGZ,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;AACtC,eAAc,SAAS,SAAS,QAAQ;AAExC,QAAO;EACL;EACA;EACA;EACD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"neighborhood-BYYbEkUJ.mjs","names":[],"sources":["../src/graph/neighborhood.ts"],"sourcesContent":["/**\n * neighborhood.ts — graph_neighborhood endpoint handler\n *\n * Given a set of vault note paths (typically the notes inside a cluster),\n * returns the individual note nodes and the wikilink edges between them.\n *\n * Optionally enriches with semantic edges computed from cosine similarity\n * between chunk embeddings stored in the federation database.\n */\n\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport type { Pool } from \"pg\";\nimport { deserializeEmbedding } from \"../memory/embeddings.js\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphNeighborhoodParams {\n /** Vault-relative paths of notes in the cluster */\n vault_paths: string[];\n /** Numeric PAI project ID */\n project_id: number;\n /** Whether to compute semantic similarity edges (default: false) */\n include_semantic_edges?: boolean;\n /** Cosine similarity threshold for semantic edges (default: 0.7) */\n semantic_threshold?: number;\n}\n\nexport interface NoteNode {\n vault_path: string;\n title: string;\n folder: string;\n observation_types: Record<string, number>;\n dominant_type: string;\n updated_at: number;\n word_count: number;\n}\n\nexport interface NoteEdge {\n source: string;\n target: string;\n type: \"wikilink\" | \"semantic\";\n weight: number;\n}\n\nexport interface GraphNeighborhoodResult {\n nodes: NoteNode[];\n edges: NoteEdge[];\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction folderFromPath(vaultPath: string): string {\n const lastSlash = vaultPath.lastIndexOf(\"/\");\n return lastSlash === -1 ? \"\" : vaultPath.slice(0, lastSlash);\n}\n\nfunction cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length || a.length === 0) return 0;\n let dot = 0;\n let normA = 0;\n let normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n if (normA === 0 || normB === 0) return 0;\n return dot / (Math.sqrt(normA) * Math.sqrt(normB));\n}\n\nfunction dominantType(counts: Record<string, number>): string {\n let dominant = \"unknown\";\n let maxCount = 0;\n for (const [type, n] of Object.entries(counts)) {\n if (n > maxCount) {\n maxCount = n;\n dominant = type;\n }\n }\n return dominant;\n}\n\n// ---------------------------------------------------------------------------\n// Observation type enrichment (same pattern as clusters.ts)\n// ---------------------------------------------------------------------------\n\nasync function fetchObservationTypes(\n pool: Pool,\n filePaths: string[],\n projectId: number\n): Promise<Map<string, Record<string, number>>> {\n if (filePaths.length === 0) return new Map();\n\n try {\n const params: (string[] | number)[] = [filePaths, projectId];\n\n const result = await pool.query<{ path: string; type: string; cnt: string }>(\n `SELECT unnested_path AS path, type, COUNT(*) AS cnt\n FROM pai_observations,\n LATERAL unnest(files_modified || files_read) AS unnested_path\n WHERE unnested_path = ANY($1::text[])\n AND project_id = $2\n GROUP BY unnested_path, type`,\n params\n );\n\n const byPath = new Map<string, Record<string, number>>();\n for (const row of result.rows) {\n const existing = byPath.get(row.path) ?? {};\n existing[row.type] = (existing[row.type] ?? 0) + parseInt(row.cnt, 10);\n byPath.set(row.path, existing);\n }\n return byPath;\n } catch {\n return new Map();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Main handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphNeighborhood(\n pool: Pool | null,\n backend: StorageBackend,\n params: GraphNeighborhoodParams\n): Promise<GraphNeighborhoodResult> {\n const vaultPaths = params.vault_paths ?? [];\n if (vaultPaths.length === 0) {\n return { nodes: [], edges: [] };\n }\n\n const includeSemanticEdges = params.include_semantic_edges ?? false;\n const semanticThreshold = params.semantic_threshold ?? 0.7;\n\n // -------------------------------------------------------------------------\n // 1. Fetch node metadata from vault_files\n // -------------------------------------------------------------------------\n\n const fileRows = await backend.getVaultFilesByPaths(vaultPaths);\n\n const fileIndex = new Map<string, { vaultPath: string; title: string | null; indexedAt: number }>();\n for (const row of fileRows) {\n fileIndex.set(row.vaultPath, row);\n }\n\n // -------------------------------------------------------------------------\n // 2. Fetch observation types (Postgres if available)\n // -------------------------------------------------------------------------\n\n const observationsByPath =\n pool !== null\n ? await fetchObservationTypes(pool, vaultPaths, params.project_id)\n : new Map<string, Record<string, number>>();\n\n // -------------------------------------------------------------------------\n // 3. Build NoteNode array\n // -------------------------------------------------------------------------\n\n const nodes: NoteNode[] = vaultPaths.map((vp) => {\n const fileRow = fileIndex.get(vp);\n const fileName = vp.split(\"/\").pop() ?? vp;\n const rawTitle = fileRow?.title ?? fileName.replace(/\\.md$/i, \"\");\n\n const obsCounts = observationsByPath.get(vp) ?? {};\n\n return {\n vault_path: vp,\n title: rawTitle,\n folder: folderFromPath(vp),\n observation_types: obsCounts,\n dominant_type: dominantType(obsCounts),\n updated_at: fileRow?.indexedAt ?? 0,\n word_count: 0,\n };\n });\n\n // -------------------------------------------------------------------------\n // 4. Fetch wikilink edges between the provided paths\n // -------------------------------------------------------------------------\n\n const pathSet = new Set(vaultPaths);\n const linkRows = await backend.getVaultLinksFromPaths(vaultPaths);\n\n const edges: NoteEdge[] = [];\n\n for (const row of linkRows) {\n if (!row.targetPath || !pathSet.has(row.targetPath)) continue;\n\n edges.push({\n source: row.sourcePath,\n target: row.targetPath,\n type: \"wikilink\",\n weight: 1.0,\n });\n }\n\n // -------------------------------------------------------------------------\n // 5. Optional: semantic edges\n // -------------------------------------------------------------------------\n\n if (includeSemanticEdges && vaultPaths.length > 1) {\n // Fetch mean embeddings for all paths\n const embeddings = new Map<string, number[]>();\n for (const vp of vaultPaths) {\n const chunkRows = await backend.getChunksForPath(params.project_id, vp);\n const embRows = chunkRows.filter(r => r.embedding !== null) as Array<{ text: string; embedding: Buffer }>;\n if (embRows.length === 0) continue;\n\n let vecLen = 0;\n const vectors: Float32Array[] = [];\n\n for (const row of embRows) {\n const arr = deserializeEmbedding(row.embedding);\n if (vecLen === 0) vecLen = arr.length;\n if (arr.length === vecLen) vectors.push(arr);\n }\n\n if (vectors.length === 0 || vecLen === 0) continue;\n\n const mean = new Array<number>(vecLen).fill(0);\n for (const vec of vectors) {\n for (let i = 0; i < vecLen; i++) {\n mean[i] += vec[i];\n }\n }\n for (let i = 0; i < vecLen; i++) {\n mean[i] /= vectors.length;\n }\n embeddings.set(vp, mean);\n }\n\n const existingEdgeKeys = new Set<string>(\n edges.map((e) => `${e.source}|||${e.target}`)\n );\n\n const pathsWithEmbeddings = Array.from(embeddings.keys());\n for (let i = 0; i < pathsWithEmbeddings.length; i++) {\n for (let j = i + 1; j < pathsWithEmbeddings.length; j++) {\n const pathA = pathsWithEmbeddings[i];\n const pathB = pathsWithEmbeddings[j];\n\n const vecA = embeddings.get(pathA)!;\n const vecB = embeddings.get(pathB)!;\n\n const sim = cosineSimilarity(vecA, vecB);\n if (sim < semanticThreshold) continue;\n\n const keyAB = `${pathA}|||${pathB}`;\n const keyBA = `${pathB}|||${pathA}`;\n if (existingEdgeKeys.has(keyAB) || existingEdgeKeys.has(keyBA)) continue;\n\n edges.push({\n source: pathA,\n target: pathB,\n type: \"semantic\",\n weight: sim,\n });\n existingEdgeKeys.add(keyAB);\n }\n }\n }\n\n return { nodes, edges };\n}\n"],"mappings":";;;AAuDA,SAAS,eAAe,WAA2B;CACjD,MAAM,YAAY,UAAU,YAAY,IAAI;AAC5C,QAAO,cAAc,KAAK,KAAK,UAAU,MAAM,GAAG,UAAU;;AAG9D,SAAS,iBAAiB,GAAa,GAAqB;AAC1D,KAAI,EAAE,WAAW,EAAE,UAAU,EAAE,WAAW,EAAG,QAAO;CACpD,IAAI,MAAM;CACV,IAAI,QAAQ;CACZ,IAAI,QAAQ;AACZ,MAAK,IAAI,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,SAAO,EAAE,KAAK,EAAE;AAChB,WAAS,EAAE,KAAK,EAAE;AAClB,WAAS,EAAE,KAAK,EAAE;;AAEpB,KAAI,UAAU,KAAK,UAAU,EAAG,QAAO;AACvC,QAAO,OAAO,KAAK,KAAK,MAAM,GAAG,KAAK,KAAK,MAAM;;AAGnD,SAAS,aAAa,QAAwC;CAC5D,IAAI,WAAW;CACf,IAAI,WAAW;AACf,MAAK,MAAM,CAAC,MAAM,MAAM,OAAO,QAAQ,OAAO,CAC5C,KAAI,IAAI,UAAU;AAChB,aAAW;AACX,aAAW;;AAGf,QAAO;;AAOT,eAAe,sBACb,MACA,WACA,WAC8C;AAC9C,KAAI,UAAU,WAAW,EAAG,wBAAO,IAAI,KAAK;AAE5C,KAAI;EACF,MAAM,SAAgC,CAAC,WAAW,UAAU;EAE5D,MAAM,SAAS,MAAM,KAAK,MACxB;;;;;sCAMA,OACD;EAED,MAAM,yBAAS,IAAI,KAAqC;AACxD,OAAK,MAAM,OAAO,OAAO,MAAM;GAC7B,MAAM,WAAW,OAAO,IAAI,IAAI,KAAK,IAAI,EAAE;AAC3C,YAAS,IAAI,SAAS,SAAS,IAAI,SAAS,KAAK,SAAS,IAAI,KAAK,GAAG;AACtE,UAAO,IAAI,IAAI,MAAM,SAAS;;AAEhC,SAAO;SACD;AACN,yBAAO,IAAI,KAAK;;;AAQpB,eAAsB,wBACpB,MACA,SACA,QACkC;CAClC,MAAM,aAAa,OAAO,eAAe,EAAE;AAC3C,KAAI,WAAW,WAAW,EACxB,QAAO;EAAE,OAAO,EAAE;EAAE,OAAO,EAAE;EAAE;CAGjC,MAAM,uBAAuB,OAAO,0BAA0B;CAC9D,MAAM,oBAAoB,OAAO,sBAAsB;CAMvD,MAAM,WAAW,MAAM,QAAQ,qBAAqB,WAAW;CAE/D,MAAM,4BAAY,IAAI,KAA6E;AACnG,MAAK,MAAM,OAAO,SAChB,WAAU,IAAI,IAAI,WAAW,IAAI;CAOnC,MAAM,qBACJ,SAAS,OACL,MAAM,sBAAsB,MAAM,YAAY,OAAO,WAAW,mBAChE,IAAI,KAAqC;CAM/C,MAAM,QAAoB,WAAW,KAAK,OAAO;EAC/C,MAAM,UAAU,UAAU,IAAI,GAAG;EACjC,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,KAAK,IAAI;EACxC,MAAM,WAAW,SAAS,SAAS,SAAS,QAAQ,UAAU,GAAG;EAEjE,MAAM,YAAY,mBAAmB,IAAI,GAAG,IAAI,EAAE;AAElD,SAAO;GACL,YAAY;GACZ,OAAO;GACP,QAAQ,eAAe,GAAG;GAC1B,mBAAmB;GACnB,eAAe,aAAa,UAAU;GACtC,YAAY,SAAS,aAAa;GAClC,YAAY;GACb;GACD;CAMF,MAAM,UAAU,IAAI,IAAI,WAAW;CACnC,MAAM,WAAW,MAAM,QAAQ,uBAAuB,WAAW;CAEjE,MAAM,QAAoB,EAAE;AAE5B,MAAK,MAAM,OAAO,UAAU;AAC1B,MAAI,CAAC,IAAI,cAAc,CAAC,QAAQ,IAAI,IAAI,WAAW,CAAE;AAErD,QAAM,KAAK;GACT,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ,MAAM;GACN,QAAQ;GACT,CAAC;;AAOJ,KAAI,wBAAwB,WAAW,SAAS,GAAG;EAEjD,MAAM,6BAAa,IAAI,KAAuB;AAC9C,OAAK,MAAM,MAAM,YAAY;GAE3B,MAAM,WADY,MAAM,QAAQ,iBAAiB,OAAO,YAAY,GAAG,EAC7C,QAAO,MAAK,EAAE,cAAc,KAAK;AAC3D,OAAI,QAAQ,WAAW,EAAG;GAE1B,IAAI,SAAS;GACb,MAAM,UAA0B,EAAE;AAElC,QAAK,MAAM,OAAO,SAAS;IACzB,MAAM,MAAM,qBAAqB,IAAI,UAAU;AAC/C,QAAI,WAAW,EAAG,UAAS,IAAI;AAC/B,QAAI,IAAI,WAAW,OAAQ,SAAQ,KAAK,IAAI;;AAG9C,OAAI,QAAQ,WAAW,KAAK,WAAW,EAAG;GAE1C,MAAM,OAAO,IAAI,MAAc,OAAO,CAAC,KAAK,EAAE;AAC9C,QAAK,MAAM,OAAO,QAChB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,IAC1B,MAAK,MAAM,IAAI;AAGnB,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,IAC1B,MAAK,MAAM,QAAQ;AAErB,cAAW,IAAI,IAAI,KAAK;;EAG1B,MAAM,mBAAmB,IAAI,IAC3B,MAAM,KAAK,MAAM,GAAG,EAAE,OAAO,KAAK,EAAE,SAAS,CAC9C;EAED,MAAM,sBAAsB,MAAM,KAAK,WAAW,MAAM,CAAC;AACzD,OAAK,IAAI,IAAI,GAAG,IAAI,oBAAoB,QAAQ,IAC9C,MAAK,IAAI,IAAI,IAAI,GAAG,IAAI,oBAAoB,QAAQ,KAAK;GACvD,MAAM,QAAQ,oBAAoB;GAClC,MAAM,QAAQ,oBAAoB;GAKlC,MAAM,MAAM,iBAHC,WAAW,IAAI,MAAM,EACrB,WAAW,IAAI,MAAM,CAEM;AACxC,OAAI,MAAM,kBAAmB;GAE7B,MAAM,QAAQ,GAAG,MAAM,KAAK;GAC5B,MAAM,QAAQ,GAAG,MAAM,KAAK;AAC5B,OAAI,iBAAiB,IAAI,MAAM,IAAI,iBAAiB,IAAI,MAAM,CAAE;AAEhE,SAAM,KAAK;IACT,QAAQ;IACR,QAAQ;IACR,MAAM;IACN,QAAQ;IACT,CAAC;AACF,oBAAiB,IAAI,MAAM;;;AAKjC,QAAO;EAAE;EAAO;EAAO"}
|
|
1
|
+
{"version":3,"file":"neighborhood-u8ytjmWq.mjs","names":[],"sources":["../src/graph/neighborhood.ts"],"sourcesContent":["/**\n * neighborhood.ts — graph_neighborhood endpoint handler\n *\n * Given a set of vault note paths (typically the notes inside a cluster),\n * returns the individual note nodes and the wikilink edges between them.\n *\n * Optionally enriches with semantic edges computed from cosine similarity\n * between chunk embeddings stored in the federation database.\n */\n\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport type { Pool } from \"pg\";\nimport { deserializeEmbedding } from \"../memory/embeddings.js\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphNeighborhoodParams {\n /** Vault-relative paths of notes in the cluster */\n vault_paths: string[];\n /** Numeric PAI project ID */\n project_id: number;\n /** Whether to compute semantic similarity edges (default: false) */\n include_semantic_edges?: boolean;\n /** Cosine similarity threshold for semantic edges (default: 0.7) */\n semantic_threshold?: number;\n}\n\nexport interface NoteNode {\n vault_path: string;\n title: string;\n folder: string;\n observation_types: Record<string, number>;\n dominant_type: string;\n updated_at: number;\n word_count: number;\n}\n\nexport interface NoteEdge {\n source: string;\n target: string;\n type: \"wikilink\" | \"semantic\";\n weight: number;\n}\n\nexport interface GraphNeighborhoodResult {\n nodes: NoteNode[];\n edges: NoteEdge[];\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction folderFromPath(vaultPath: string): string {\n const lastSlash = vaultPath.lastIndexOf(\"/\");\n return lastSlash === -1 ? \"\" : vaultPath.slice(0, lastSlash);\n}\n\nfunction cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length || a.length === 0) return 0;\n let dot = 0;\n let normA = 0;\n let normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n if (normA === 0 || normB === 0) return 0;\n return dot / (Math.sqrt(normA) * Math.sqrt(normB));\n}\n\nfunction dominantType(counts: Record<string, number>): string {\n let dominant = \"unknown\";\n let maxCount = 0;\n for (const [type, n] of Object.entries(counts)) {\n if (n > maxCount) {\n maxCount = n;\n dominant = type;\n }\n }\n return dominant;\n}\n\n// ---------------------------------------------------------------------------\n// Observation type enrichment (same pattern as clusters.ts)\n// ---------------------------------------------------------------------------\n\nasync function fetchObservationTypes(\n pool: Pool,\n filePaths: string[],\n projectId: number\n): Promise<Map<string, Record<string, number>>> {\n if (filePaths.length === 0) return new Map();\n\n try {\n const params: (string[] | number)[] = [filePaths, projectId];\n\n const result = await pool.query<{ path: string; type: string; cnt: string }>(\n `SELECT unnested_path AS path, type, COUNT(*) AS cnt\n FROM pai_observations,\n LATERAL unnest(files_modified || files_read) AS unnested_path\n WHERE unnested_path = ANY($1::text[])\n AND project_id = $2\n GROUP BY unnested_path, type`,\n params\n );\n\n const byPath = new Map<string, Record<string, number>>();\n for (const row of result.rows) {\n const existing = byPath.get(row.path) ?? {};\n existing[row.type] = (existing[row.type] ?? 0) + parseInt(row.cnt, 10);\n byPath.set(row.path, existing);\n }\n return byPath;\n } catch {\n return new Map();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Main handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphNeighborhood(\n pool: Pool | null,\n backend: StorageBackend,\n params: GraphNeighborhoodParams\n): Promise<GraphNeighborhoodResult> {\n const vaultPaths = params.vault_paths ?? [];\n if (vaultPaths.length === 0) {\n return { nodes: [], edges: [] };\n }\n\n const includeSemanticEdges = params.include_semantic_edges ?? false;\n const semanticThreshold = params.semantic_threshold ?? 0.7;\n\n // -------------------------------------------------------------------------\n // 1. Fetch node metadata from vault_files\n // -------------------------------------------------------------------------\n\n const fileRows = await backend.getVaultFilesByPaths(vaultPaths);\n\n const fileIndex = new Map<string, { vaultPath: string; title: string | null; indexedAt: number }>();\n for (const row of fileRows) {\n fileIndex.set(row.vaultPath, row);\n }\n\n // -------------------------------------------------------------------------\n // 2. Fetch observation types (Postgres if available)\n // -------------------------------------------------------------------------\n\n const observationsByPath =\n pool !== null\n ? await fetchObservationTypes(pool, vaultPaths, params.project_id)\n : new Map<string, Record<string, number>>();\n\n // -------------------------------------------------------------------------\n // 3. Build NoteNode array\n // -------------------------------------------------------------------------\n\n const nodes: NoteNode[] = vaultPaths.map((vp) => {\n const fileRow = fileIndex.get(vp);\n const fileName = vp.split(\"/\").pop() ?? vp;\n const rawTitle = fileRow?.title ?? fileName.replace(/\\.md$/i, \"\");\n\n const obsCounts = observationsByPath.get(vp) ?? {};\n\n return {\n vault_path: vp,\n title: rawTitle,\n folder: folderFromPath(vp),\n observation_types: obsCounts,\n dominant_type: dominantType(obsCounts),\n updated_at: fileRow?.indexedAt ?? 0,\n word_count: 0,\n };\n });\n\n // -------------------------------------------------------------------------\n // 4. Fetch wikilink edges between the provided paths\n // -------------------------------------------------------------------------\n\n const pathSet = new Set(vaultPaths);\n const linkRows = await backend.getVaultLinksFromPaths(vaultPaths);\n\n const edges: NoteEdge[] = [];\n\n for (const row of linkRows) {\n if (!row.targetPath || !pathSet.has(row.targetPath)) continue;\n\n edges.push({\n source: row.sourcePath,\n target: row.targetPath,\n type: \"wikilink\",\n weight: 1.0,\n });\n }\n\n // -------------------------------------------------------------------------\n // 5. Optional: semantic edges\n // -------------------------------------------------------------------------\n\n if (includeSemanticEdges && vaultPaths.length > 1) {\n // Fetch mean embeddings for all paths\n const embeddings = new Map<string, number[]>();\n for (const vp of vaultPaths) {\n const chunkRows = await backend.getChunksForPath(params.project_id, vp);\n const embRows = chunkRows.filter(r => r.embedding !== null) as Array<{ text: string; embedding: Buffer }>;\n if (embRows.length === 0) continue;\n\n let vecLen = 0;\n const vectors: Float32Array[] = [];\n\n for (const row of embRows) {\n const arr = deserializeEmbedding(row.embedding);\n if (vecLen === 0) vecLen = arr.length;\n if (arr.length === vecLen) vectors.push(arr);\n }\n\n if (vectors.length === 0 || vecLen === 0) continue;\n\n const mean = new Array<number>(vecLen).fill(0);\n for (const vec of vectors) {\n for (let i = 0; i < vecLen; i++) {\n mean[i] += vec[i];\n }\n }\n for (let i = 0; i < vecLen; i++) {\n mean[i] /= vectors.length;\n }\n embeddings.set(vp, mean);\n }\n\n const existingEdgeKeys = new Set<string>(\n edges.map((e) => `${e.source}|||${e.target}`)\n );\n\n const pathsWithEmbeddings = Array.from(embeddings.keys());\n for (let i = 0; i < pathsWithEmbeddings.length; i++) {\n for (let j = i + 1; j < pathsWithEmbeddings.length; j++) {\n const pathA = pathsWithEmbeddings[i];\n const pathB = pathsWithEmbeddings[j];\n\n const vecA = embeddings.get(pathA)!;\n const vecB = embeddings.get(pathB)!;\n\n const sim = cosineSimilarity(vecA, vecB);\n if (sim < semanticThreshold) continue;\n\n const keyAB = `${pathA}|||${pathB}`;\n const keyBA = `${pathB}|||${pathA}`;\n if (existingEdgeKeys.has(keyAB) || existingEdgeKeys.has(keyBA)) continue;\n\n edges.push({\n source: pathA,\n target: pathB,\n type: \"semantic\",\n weight: sim,\n });\n existingEdgeKeys.add(keyAB);\n }\n }\n }\n\n return { nodes, edges };\n}\n"],"mappings":";;;AAuDA,SAAS,eAAe,WAA2B;CACjD,MAAM,YAAY,UAAU,YAAY,IAAI;AAC5C,QAAO,cAAc,KAAK,KAAK,UAAU,MAAM,GAAG,UAAU;;AAG9D,SAAS,iBAAiB,GAAa,GAAqB;AAC1D,KAAI,EAAE,WAAW,EAAE,UAAU,EAAE,WAAW,EAAG,QAAO;CACpD,IAAI,MAAM;CACV,IAAI,QAAQ;CACZ,IAAI,QAAQ;AACZ,MAAK,IAAI,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,SAAO,EAAE,KAAK,EAAE;AAChB,WAAS,EAAE,KAAK,EAAE;AAClB,WAAS,EAAE,KAAK,EAAE;;AAEpB,KAAI,UAAU,KAAK,UAAU,EAAG,QAAO;AACvC,QAAO,OAAO,KAAK,KAAK,MAAM,GAAG,KAAK,KAAK,MAAM;;AAGnD,SAAS,aAAa,QAAwC;CAC5D,IAAI,WAAW;CACf,IAAI,WAAW;AACf,MAAK,MAAM,CAAC,MAAM,MAAM,OAAO,QAAQ,OAAO,CAC5C,KAAI,IAAI,UAAU;AAChB,aAAW;AACX,aAAW;;AAGf,QAAO;;AAOT,eAAe,sBACb,MACA,WACA,WAC8C;AAC9C,KAAI,UAAU,WAAW,EAAG,wBAAO,IAAI,KAAK;AAE5C,KAAI;EACF,MAAM,SAAgC,CAAC,WAAW,UAAU;EAE5D,MAAM,SAAS,MAAM,KAAK,MACxB;;;;;sCAMA,OACD;EAED,MAAM,yBAAS,IAAI,KAAqC;AACxD,OAAK,MAAM,OAAO,OAAO,MAAM;GAC7B,MAAM,WAAW,OAAO,IAAI,IAAI,KAAK,IAAI,EAAE;AAC3C,YAAS,IAAI,SAAS,SAAS,IAAI,SAAS,KAAK,SAAS,IAAI,KAAK,GAAG;AACtE,UAAO,IAAI,IAAI,MAAM,SAAS;;AAEhC,SAAO;SACD;AACN,yBAAO,IAAI,KAAK;;;AAQpB,eAAsB,wBACpB,MACA,SACA,QACkC;CAClC,MAAM,aAAa,OAAO,eAAe,EAAE;AAC3C,KAAI,WAAW,WAAW,EACxB,QAAO;EAAE,OAAO,EAAE;EAAE,OAAO,EAAE;EAAE;CAGjC,MAAM,uBAAuB,OAAO,0BAA0B;CAC9D,MAAM,oBAAoB,OAAO,sBAAsB;CAMvD,MAAM,WAAW,MAAM,QAAQ,qBAAqB,WAAW;CAE/D,MAAM,4BAAY,IAAI,KAA6E;AACnG,MAAK,MAAM,OAAO,SAChB,WAAU,IAAI,IAAI,WAAW,IAAI;CAOnC,MAAM,qBACJ,SAAS,OACL,MAAM,sBAAsB,MAAM,YAAY,OAAO,WAAW,mBAChE,IAAI,KAAqC;CAM/C,MAAM,QAAoB,WAAW,KAAK,OAAO;EAC/C,MAAM,UAAU,UAAU,IAAI,GAAG;EACjC,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,KAAK,IAAI;EACxC,MAAM,WAAW,SAAS,SAAS,SAAS,QAAQ,UAAU,GAAG;EAEjE,MAAM,YAAY,mBAAmB,IAAI,GAAG,IAAI,EAAE;AAElD,SAAO;GACL,YAAY;GACZ,OAAO;GACP,QAAQ,eAAe,GAAG;GAC1B,mBAAmB;GACnB,eAAe,aAAa,UAAU;GACtC,YAAY,SAAS,aAAa;GAClC,YAAY;GACb;GACD;CAMF,MAAM,UAAU,IAAI,IAAI,WAAW;CACnC,MAAM,WAAW,MAAM,QAAQ,uBAAuB,WAAW;CAEjE,MAAM,QAAoB,EAAE;AAE5B,MAAK,MAAM,OAAO,UAAU;AAC1B,MAAI,CAAC,IAAI,cAAc,CAAC,QAAQ,IAAI,IAAI,WAAW,CAAE;AAErD,QAAM,KAAK;GACT,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ,MAAM;GACN,QAAQ;GACT,CAAC;;AAOJ,KAAI,wBAAwB,WAAW,SAAS,GAAG;EAEjD,MAAM,6BAAa,IAAI,KAAuB;AAC9C,OAAK,MAAM,MAAM,YAAY;GAE3B,MAAM,WADY,MAAM,QAAQ,iBAAiB,OAAO,YAAY,GAAG,EAC7C,QAAO,MAAK,EAAE,cAAc,KAAK;AAC3D,OAAI,QAAQ,WAAW,EAAG;GAE1B,IAAI,SAAS;GACb,MAAM,UAA0B,EAAE;AAElC,QAAK,MAAM,OAAO,SAAS;IACzB,MAAM,MAAM,qBAAqB,IAAI,UAAU;AAC/C,QAAI,WAAW,EAAG,UAAS,IAAI;AAC/B,QAAI,IAAI,WAAW,OAAQ,SAAQ,KAAK,IAAI;;AAG9C,OAAI,QAAQ,WAAW,KAAK,WAAW,EAAG;GAE1C,MAAM,OAAO,IAAI,MAAc,OAAO,CAAC,KAAK,EAAE;AAC9C,QAAK,MAAM,OAAO,QAChB,MAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,IAC1B,MAAK,MAAM,IAAI;AAGnB,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,IAC1B,MAAK,MAAM,QAAQ;AAErB,cAAW,IAAI,IAAI,KAAK;;EAG1B,MAAM,mBAAmB,IAAI,IAC3B,MAAM,KAAK,MAAM,GAAG,EAAE,OAAO,KAAK,EAAE,SAAS,CAC9C;EAED,MAAM,sBAAsB,MAAM,KAAK,WAAW,MAAM,CAAC;AACzD,OAAK,IAAI,IAAI,GAAG,IAAI,oBAAoB,QAAQ,IAC9C,MAAK,IAAI,IAAI,IAAI,GAAG,IAAI,oBAAoB,QAAQ,KAAK;GACvD,MAAM,QAAQ,oBAAoB;GAClC,MAAM,QAAQ,oBAAoB;GAKlC,MAAM,MAAM,iBAHC,WAAW,IAAI,MAAM,EACrB,WAAW,IAAI,MAAM,CAEM;AACxC,OAAI,MAAM,kBAAmB;GAE7B,MAAM,QAAQ,GAAG,MAAM,KAAK;GAC5B,MAAM,QAAQ,GAAG,MAAM,KAAK;AAC5B,OAAI,iBAAiB,IAAI,MAAM,IAAI,iBAAiB,IAAI,MAAM,CAAE;AAEhE,SAAM,KAAK;IACT,QAAQ;IACR,QAAQ;IACR,MAAM;IACN,QAAQ;IACT,CAAC;AACF,oBAAiB,IAAI,MAAM;;;AAKjC,QAAO;EAAE;EAAO;EAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"note-context-BK24bX8Y.mjs","names":[],"sources":["../src/graph/note-context.ts"],"sourcesContent":["/**\n * note-context.ts — graph_note_context endpoint handler\n *\n * Given a single vault note path, returns ALL notes linked to or from it\n * across the entire vault (1-hop neighbourhood), plus the edges.\n */\n\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport type { Pool } from \"pg\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphNoteContextParams {\n vault_path: string;\n project_id: number;\n max_neighbors?: number;\n include_backlinks?: boolean;\n include_outlinks?: boolean;\n}\n\nexport interface NoteNode {\n vault_path: string;\n title: string;\n folder: string;\n observation_types: Record<string, number>;\n dominant_type: string;\n updated_at: number;\n word_count: number;\n}\n\nexport interface NoteEdge {\n source: string;\n target: string;\n type: \"wikilink\" | \"semantic\";\n weight: number;\n}\n\nexport interface GraphNoteContextResult {\n focal: NoteNode;\n neighbors: NoteNode[];\n edges: NoteEdge[];\n cluster_membership: Record<string, number>;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction folderFromPath(vaultPath: string): string {\n const lastSlash = vaultPath.lastIndexOf(\"/\");\n return lastSlash === -1 ? \"\" : vaultPath.slice(0, lastSlash);\n}\n\nfunction dominantType(counts: Record<string, number>): string {\n let best = \"unknown\";\n let maxCount = 0;\n for (const [type, n] of Object.entries(counts)) {\n if (n > maxCount) {\n maxCount = n;\n best = type;\n }\n }\n return best;\n}\n\n// ---------------------------------------------------------------------------\n// Observation type enrichment\n// ---------------------------------------------------------------------------\n\nasync function fetchObservationTypes(\n pool: Pool,\n filePaths: string[],\n projectId: number\n): Promise<Map<string, Record<string, number>>> {\n if (filePaths.length === 0) return new Map();\n\n try {\n const result = await pool.query<{ path: string; type: string; cnt: string }>(\n `SELECT unnested_path AS path, type, COUNT(*) AS cnt\n FROM pai_observations,\n LATERAL unnest(files_modified || files_read) AS unnested_path\n WHERE unnested_path = ANY($1::text[])\n AND project_id = $2\n GROUP BY unnested_path, type`,\n [filePaths, projectId]\n );\n\n const byPath = new Map<string, Record<string, number>>();\n for (const row of result.rows) {\n const existing = byPath.get(row.path) ?? {};\n existing[row.type] = (existing[row.type] ?? 0) + parseInt(row.cnt, 10);\n byPath.set(row.path, existing);\n }\n return byPath;\n } catch {\n return new Map();\n }\n}\n\nfunction buildNoteNode(\n vaultPath: string,\n fileIndex: Map<string, { title: string | null; indexedAt: number }>,\n obsByPath: Map<string, Record<string, number>>\n): NoteNode {\n const fileRow = fileIndex.get(vaultPath);\n const fileName = vaultPath.split(\"/\").pop() ?? vaultPath;\n const rawTitle = fileRow?.title ?? fileName.replace(/\\.md$/i, \"\");\n const obsCounts = obsByPath.get(vaultPath) ?? {};\n\n return {\n vault_path: vaultPath,\n title: rawTitle,\n folder: folderFromPath(vaultPath),\n observation_types: obsCounts,\n dominant_type: dominantType(obsCounts),\n updated_at: fileRow?.indexedAt ?? 0,\n word_count: 0,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Main handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphNoteContext(\n pool: Pool | null,\n backend: StorageBackend,\n params: GraphNoteContextParams\n): Promise<GraphNoteContextResult> {\n const focalPath = params.vault_path;\n if (!focalPath) {\n throw new Error(\"graph_note_context: vault_path is required\");\n }\n\n const maxNeighbors = params.max_neighbors ?? 50;\n const includeBacklinks = params.include_backlinks !== false;\n const includeOutlinks = params.include_outlinks !== false;\n\n // -------------------------------------------------------------------------\n // 1. Collect 1-hop neighbor paths via vault_links\n // -------------------------------------------------------------------------\n\n const neighborPaths = new Set<string>();\n const rawEdges: Array<{ source: string; target: string }> = [];\n\n if (includeOutlinks) {\n const outLinks = await backend.getLinksFromSource(focalPath);\n for (const link of outLinks) {\n if (!link.targetPath) continue;\n neighborPaths.add(link.targetPath);\n rawEdges.push({ source: focalPath, target: link.targetPath });\n }\n }\n\n if (includeBacklinks) {\n const inLinks = await backend.getLinksToTarget(focalPath);\n for (const link of inLinks) {\n neighborPaths.add(link.sourcePath);\n rawEdges.push({ source: link.sourcePath, target: focalPath });\n }\n }\n\n // Cap neighbors at max_neighbors, keeping the most-linked ones\n let neighborPathList = Array.from(neighborPaths);\n if (neighborPathList.length > maxNeighbors) {\n const linkCount = new Map<string, number>();\n for (const e of rawEdges) {\n const neighbor = e.source === focalPath ? e.target : e.source;\n linkCount.set(neighbor, (linkCount.get(neighbor) ?? 0) + 1);\n }\n neighborPathList = neighborPathList\n .sort((a, b) => (linkCount.get(b) ?? 0) - (linkCount.get(a) ?? 0))\n .slice(0, maxNeighbors);\n }\n\n const retainedSet = new Set(neighborPathList);\n const retainedEdges = rawEdges.filter((e) => {\n const neighbor = e.source === focalPath ? e.target : e.source;\n return retainedSet.has(neighbor);\n });\n\n // -------------------------------------------------------------------------\n // 2. Fetch vault_files metadata for focal + all neighbors\n // -------------------------------------------------------------------------\n\n const allPaths = [focalPath, ...neighborPathList];\n const fileRows = await backend.getVaultFilesByPaths(allPaths);\n const fileIndex = new Map<string, { title: string | null; indexedAt: number }>(\n fileRows.map(f => [f.vaultPath, { title: f.title, indexedAt: f.indexedAt }])\n );\n\n // -------------------------------------------------------------------------\n // 3. Observation type enrichment (Postgres if available)\n // -------------------------------------------------------------------------\n\n const obsByPath =\n pool !== null\n ? await fetchObservationTypes(pool, allPaths, params.project_id)\n : new Map<string, Record<string, number>>();\n\n // -------------------------------------------------------------------------\n // 4. Build focal NoteNode\n // -------------------------------------------------------------------------\n\n const focal = buildNoteNode(focalPath, fileIndex, obsByPath);\n\n // -------------------------------------------------------------------------\n // 5. Build neighbor NoteNode array\n // -------------------------------------------------------------------------\n\n const neighbors: NoteNode[] = neighborPathList.map((vp) =>\n buildNoteNode(vp, fileIndex, obsByPath)\n );\n\n // -------------------------------------------------------------------------\n // 6. Deduplicate edges\n // -------------------------------------------------------------------------\n\n const edgeKeys = new Set<string>();\n const edges: NoteEdge[] = [];\n for (const e of retainedEdges) {\n const key = `${e.source}|||${e.target}`;\n if (!edgeKeys.has(key)) {\n edgeKeys.add(key);\n edges.push({\n source: e.source,\n target: e.target,\n type: \"wikilink\",\n weight: 1.0,\n });\n }\n }\n\n return {\n focal,\n neighbors,\n edges,\n cluster_membership: {},\n };\n}\n"],"mappings":";AAkDA,SAAS,eAAe,WAA2B;CACjD,MAAM,YAAY,UAAU,YAAY,IAAI;AAC5C,QAAO,cAAc,KAAK,KAAK,UAAU,MAAM,GAAG,UAAU;;AAG9D,SAAS,aAAa,QAAwC;CAC5D,IAAI,OAAO;CACX,IAAI,WAAW;AACf,MAAK,MAAM,CAAC,MAAM,MAAM,OAAO,QAAQ,OAAO,CAC5C,KAAI,IAAI,UAAU;AAChB,aAAW;AACX,SAAO;;AAGX,QAAO;;AAOT,eAAe,sBACb,MACA,WACA,WAC8C;AAC9C,KAAI,UAAU,WAAW,EAAG,wBAAO,IAAI,KAAK;AAE5C,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MACxB;;;;;sCAMA,CAAC,WAAW,UAAU,CACvB;EAED,MAAM,yBAAS,IAAI,KAAqC;AACxD,OAAK,MAAM,OAAO,OAAO,MAAM;GAC7B,MAAM,WAAW,OAAO,IAAI,IAAI,KAAK,IAAI,EAAE;AAC3C,YAAS,IAAI,SAAS,SAAS,IAAI,SAAS,KAAK,SAAS,IAAI,KAAK,GAAG;AACtE,UAAO,IAAI,IAAI,MAAM,SAAS;;AAEhC,SAAO;SACD;AACN,yBAAO,IAAI,KAAK;;;AAIpB,SAAS,cACP,WACA,WACA,WACU;CACV,MAAM,UAAU,UAAU,IAAI,UAAU;CACxC,MAAM,WAAW,UAAU,MAAM,IAAI,CAAC,KAAK,IAAI;CAC/C,MAAM,WAAW,SAAS,SAAS,SAAS,QAAQ,UAAU,GAAG;CACjE,MAAM,YAAY,UAAU,IAAI,UAAU,IAAI,EAAE;AAEhD,QAAO;EACL,YAAY;EACZ,OAAO;EACP,QAAQ,eAAe,UAAU;EACjC,mBAAmB;EACnB,eAAe,aAAa,UAAU;EACtC,YAAY,SAAS,aAAa;EAClC,YAAY;EACb;;AAOH,eAAsB,uBACpB,MACA,SACA,QACiC;CACjC,MAAM,YAAY,OAAO;AACzB,KAAI,CAAC,UACH,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,eAAe,OAAO,iBAAiB;CAC7C,MAAM,mBAAmB,OAAO,sBAAsB;CACtD,MAAM,kBAAkB,OAAO,qBAAqB;CAMpD,MAAM,gCAAgB,IAAI,KAAa;CACvC,MAAM,WAAsD,EAAE;AAE9D,KAAI,iBAAiB;EACnB,MAAM,WAAW,MAAM,QAAQ,mBAAmB,UAAU;AAC5D,OAAK,MAAM,QAAQ,UAAU;AAC3B,OAAI,CAAC,KAAK,WAAY;AACtB,iBAAc,IAAI,KAAK,WAAW;AAClC,YAAS,KAAK;IAAE,QAAQ;IAAW,QAAQ,KAAK;IAAY,CAAC;;;AAIjE,KAAI,kBAAkB;EACpB,MAAM,UAAU,MAAM,QAAQ,iBAAiB,UAAU;AACzD,OAAK,MAAM,QAAQ,SAAS;AAC1B,iBAAc,IAAI,KAAK,WAAW;AAClC,YAAS,KAAK;IAAE,QAAQ,KAAK;IAAY,QAAQ;IAAW,CAAC;;;CAKjE,IAAI,mBAAmB,MAAM,KAAK,cAAc;AAChD,KAAI,iBAAiB,SAAS,cAAc;EAC1C,MAAM,4BAAY,IAAI,KAAqB;AAC3C,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,WAAW,EAAE,WAAW,YAAY,EAAE,SAAS,EAAE;AACvD,aAAU,IAAI,WAAW,UAAU,IAAI,SAAS,IAAI,KAAK,EAAE;;AAE7D,qBAAmB,iBAChB,MAAM,GAAG,OAAO,UAAU,IAAI,EAAE,IAAI,MAAM,UAAU,IAAI,EAAE,IAAI,GAAG,CACjE,MAAM,GAAG,aAAa;;CAG3B,MAAM,cAAc,IAAI,IAAI,iBAAiB;CAC7C,MAAM,gBAAgB,SAAS,QAAQ,MAAM;EAC3C,MAAM,WAAW,EAAE,WAAW,YAAY,EAAE,SAAS,EAAE;AACvD,SAAO,YAAY,IAAI,SAAS;GAChC;CAMF,MAAM,WAAW,CAAC,WAAW,GAAG,iBAAiB;CACjD,MAAM,WAAW,MAAM,QAAQ,qBAAqB,SAAS;CAC7D,MAAM,YAAY,IAAI,IACpB,SAAS,KAAI,MAAK,CAAC,EAAE,WAAW;EAAE,OAAO,EAAE;EAAO,WAAW,EAAE;EAAW,CAAC,CAAC,CAC7E;CAMD,MAAM,YACJ,SAAS,OACL,MAAM,sBAAsB,MAAM,UAAU,OAAO,WAAW,mBAC9D,IAAI,KAAqC;CAM/C,MAAM,QAAQ,cAAc,WAAW,WAAW,UAAU;CAM5D,MAAM,YAAwB,iBAAiB,KAAK,OAClD,cAAc,IAAI,WAAW,UAAU,CACxC;CAMD,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,QAAoB,EAAE;AAC5B,MAAK,MAAM,KAAK,eAAe;EAC7B,MAAM,MAAM,GAAG,EAAE,OAAO,KAAK,EAAE;AAC/B,MAAI,CAAC,SAAS,IAAI,IAAI,EAAE;AACtB,YAAS,IAAI,IAAI;AACjB,SAAM,KAAK;IACT,QAAQ,EAAE;IACV,QAAQ,EAAE;IACV,MAAM;IACN,QAAQ;IACT,CAAC;;;AAIN,QAAO;EACL;EACA;EACA;EACA,oBAAoB,EAAE;EACvB"}
|
|
1
|
+
{"version":3,"file":"note-context-CG2_e-0W.mjs","names":[],"sources":["../src/graph/note-context.ts"],"sourcesContent":["/**\n * note-context.ts — graph_note_context endpoint handler\n *\n * Given a single vault note path, returns ALL notes linked to or from it\n * across the entire vault (1-hop neighbourhood), plus the edges.\n */\n\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport type { Pool } from \"pg\";\n\n// ---------------------------------------------------------------------------\n// Public param / result types\n// ---------------------------------------------------------------------------\n\nexport interface GraphNoteContextParams {\n vault_path: string;\n project_id: number;\n max_neighbors?: number;\n include_backlinks?: boolean;\n include_outlinks?: boolean;\n}\n\nexport interface NoteNode {\n vault_path: string;\n title: string;\n folder: string;\n observation_types: Record<string, number>;\n dominant_type: string;\n updated_at: number;\n word_count: number;\n}\n\nexport interface NoteEdge {\n source: string;\n target: string;\n type: \"wikilink\" | \"semantic\";\n weight: number;\n}\n\nexport interface GraphNoteContextResult {\n focal: NoteNode;\n neighbors: NoteNode[];\n edges: NoteEdge[];\n cluster_membership: Record<string, number>;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction folderFromPath(vaultPath: string): string {\n const lastSlash = vaultPath.lastIndexOf(\"/\");\n return lastSlash === -1 ? \"\" : vaultPath.slice(0, lastSlash);\n}\n\nfunction dominantType(counts: Record<string, number>): string {\n let best = \"unknown\";\n let maxCount = 0;\n for (const [type, n] of Object.entries(counts)) {\n if (n > maxCount) {\n maxCount = n;\n best = type;\n }\n }\n return best;\n}\n\n// ---------------------------------------------------------------------------\n// Observation type enrichment\n// ---------------------------------------------------------------------------\n\nasync function fetchObservationTypes(\n pool: Pool,\n filePaths: string[],\n projectId: number\n): Promise<Map<string, Record<string, number>>> {\n if (filePaths.length === 0) return new Map();\n\n try {\n const result = await pool.query<{ path: string; type: string; cnt: string }>(\n `SELECT unnested_path AS path, type, COUNT(*) AS cnt\n FROM pai_observations,\n LATERAL unnest(files_modified || files_read) AS unnested_path\n WHERE unnested_path = ANY($1::text[])\n AND project_id = $2\n GROUP BY unnested_path, type`,\n [filePaths, projectId]\n );\n\n const byPath = new Map<string, Record<string, number>>();\n for (const row of result.rows) {\n const existing = byPath.get(row.path) ?? {};\n existing[row.type] = (existing[row.type] ?? 0) + parseInt(row.cnt, 10);\n byPath.set(row.path, existing);\n }\n return byPath;\n } catch {\n return new Map();\n }\n}\n\nfunction buildNoteNode(\n vaultPath: string,\n fileIndex: Map<string, { title: string | null; indexedAt: number }>,\n obsByPath: Map<string, Record<string, number>>\n): NoteNode {\n const fileRow = fileIndex.get(vaultPath);\n const fileName = vaultPath.split(\"/\").pop() ?? vaultPath;\n const rawTitle = fileRow?.title ?? fileName.replace(/\\.md$/i, \"\");\n const obsCounts = obsByPath.get(vaultPath) ?? {};\n\n return {\n vault_path: vaultPath,\n title: rawTitle,\n folder: folderFromPath(vaultPath),\n observation_types: obsCounts,\n dominant_type: dominantType(obsCounts),\n updated_at: fileRow?.indexedAt ?? 0,\n word_count: 0,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Main handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGraphNoteContext(\n pool: Pool | null,\n backend: StorageBackend,\n params: GraphNoteContextParams\n): Promise<GraphNoteContextResult> {\n const focalPath = params.vault_path;\n if (!focalPath) {\n throw new Error(\"graph_note_context: vault_path is required\");\n }\n\n const maxNeighbors = params.max_neighbors ?? 50;\n const includeBacklinks = params.include_backlinks !== false;\n const includeOutlinks = params.include_outlinks !== false;\n\n // -------------------------------------------------------------------------\n // 1. Collect 1-hop neighbor paths via vault_links\n // -------------------------------------------------------------------------\n\n const neighborPaths = new Set<string>();\n const rawEdges: Array<{ source: string; target: string }> = [];\n\n if (includeOutlinks) {\n const outLinks = await backend.getLinksFromSource(focalPath);\n for (const link of outLinks) {\n if (!link.targetPath) continue;\n neighborPaths.add(link.targetPath);\n rawEdges.push({ source: focalPath, target: link.targetPath });\n }\n }\n\n if (includeBacklinks) {\n const inLinks = await backend.getLinksToTarget(focalPath);\n for (const link of inLinks) {\n neighborPaths.add(link.sourcePath);\n rawEdges.push({ source: link.sourcePath, target: focalPath });\n }\n }\n\n // Cap neighbors at max_neighbors, keeping the most-linked ones\n let neighborPathList = Array.from(neighborPaths);\n if (neighborPathList.length > maxNeighbors) {\n const linkCount = new Map<string, number>();\n for (const e of rawEdges) {\n const neighbor = e.source === focalPath ? e.target : e.source;\n linkCount.set(neighbor, (linkCount.get(neighbor) ?? 0) + 1);\n }\n neighborPathList = neighborPathList\n .sort((a, b) => (linkCount.get(b) ?? 0) - (linkCount.get(a) ?? 0))\n .slice(0, maxNeighbors);\n }\n\n const retainedSet = new Set(neighborPathList);\n const retainedEdges = rawEdges.filter((e) => {\n const neighbor = e.source === focalPath ? e.target : e.source;\n return retainedSet.has(neighbor);\n });\n\n // -------------------------------------------------------------------------\n // 2. Fetch vault_files metadata for focal + all neighbors\n // -------------------------------------------------------------------------\n\n const allPaths = [focalPath, ...neighborPathList];\n const fileRows = await backend.getVaultFilesByPaths(allPaths);\n const fileIndex = new Map<string, { title: string | null; indexedAt: number }>(\n fileRows.map(f => [f.vaultPath, { title: f.title, indexedAt: f.indexedAt }])\n );\n\n // -------------------------------------------------------------------------\n // 3. Observation type enrichment (Postgres if available)\n // -------------------------------------------------------------------------\n\n const obsByPath =\n pool !== null\n ? await fetchObservationTypes(pool, allPaths, params.project_id)\n : new Map<string, Record<string, number>>();\n\n // -------------------------------------------------------------------------\n // 4. Build focal NoteNode\n // -------------------------------------------------------------------------\n\n const focal = buildNoteNode(focalPath, fileIndex, obsByPath);\n\n // -------------------------------------------------------------------------\n // 5. Build neighbor NoteNode array\n // -------------------------------------------------------------------------\n\n const neighbors: NoteNode[] = neighborPathList.map((vp) =>\n buildNoteNode(vp, fileIndex, obsByPath)\n );\n\n // -------------------------------------------------------------------------\n // 6. Deduplicate edges\n // -------------------------------------------------------------------------\n\n const edgeKeys = new Set<string>();\n const edges: NoteEdge[] = [];\n for (const e of retainedEdges) {\n const key = `${e.source}|||${e.target}`;\n if (!edgeKeys.has(key)) {\n edgeKeys.add(key);\n edges.push({\n source: e.source,\n target: e.target,\n type: \"wikilink\",\n weight: 1.0,\n });\n }\n }\n\n return {\n focal,\n neighbors,\n edges,\n cluster_membership: {},\n };\n}\n"],"mappings":";AAkDA,SAAS,eAAe,WAA2B;CACjD,MAAM,YAAY,UAAU,YAAY,IAAI;AAC5C,QAAO,cAAc,KAAK,KAAK,UAAU,MAAM,GAAG,UAAU;;AAG9D,SAAS,aAAa,QAAwC;CAC5D,IAAI,OAAO;CACX,IAAI,WAAW;AACf,MAAK,MAAM,CAAC,MAAM,MAAM,OAAO,QAAQ,OAAO,CAC5C,KAAI,IAAI,UAAU;AAChB,aAAW;AACX,SAAO;;AAGX,QAAO;;AAOT,eAAe,sBACb,MACA,WACA,WAC8C;AAC9C,KAAI,UAAU,WAAW,EAAG,wBAAO,IAAI,KAAK;AAE5C,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MACxB;;;;;sCAMA,CAAC,WAAW,UAAU,CACvB;EAED,MAAM,yBAAS,IAAI,KAAqC;AACxD,OAAK,MAAM,OAAO,OAAO,MAAM;GAC7B,MAAM,WAAW,OAAO,IAAI,IAAI,KAAK,IAAI,EAAE;AAC3C,YAAS,IAAI,SAAS,SAAS,IAAI,SAAS,KAAK,SAAS,IAAI,KAAK,GAAG;AACtE,UAAO,IAAI,IAAI,MAAM,SAAS;;AAEhC,SAAO;SACD;AACN,yBAAO,IAAI,KAAK;;;AAIpB,SAAS,cACP,WACA,WACA,WACU;CACV,MAAM,UAAU,UAAU,IAAI,UAAU;CACxC,MAAM,WAAW,UAAU,MAAM,IAAI,CAAC,KAAK,IAAI;CAC/C,MAAM,WAAW,SAAS,SAAS,SAAS,QAAQ,UAAU,GAAG;CACjE,MAAM,YAAY,UAAU,IAAI,UAAU,IAAI,EAAE;AAEhD,QAAO;EACL,YAAY;EACZ,OAAO;EACP,QAAQ,eAAe,UAAU;EACjC,mBAAmB;EACnB,eAAe,aAAa,UAAU;EACtC,YAAY,SAAS,aAAa;EAClC,YAAY;EACb;;AAOH,eAAsB,uBACpB,MACA,SACA,QACiC;CACjC,MAAM,YAAY,OAAO;AACzB,KAAI,CAAC,UACH,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,eAAe,OAAO,iBAAiB;CAC7C,MAAM,mBAAmB,OAAO,sBAAsB;CACtD,MAAM,kBAAkB,OAAO,qBAAqB;CAMpD,MAAM,gCAAgB,IAAI,KAAa;CACvC,MAAM,WAAsD,EAAE;AAE9D,KAAI,iBAAiB;EACnB,MAAM,WAAW,MAAM,QAAQ,mBAAmB,UAAU;AAC5D,OAAK,MAAM,QAAQ,UAAU;AAC3B,OAAI,CAAC,KAAK,WAAY;AACtB,iBAAc,IAAI,KAAK,WAAW;AAClC,YAAS,KAAK;IAAE,QAAQ;IAAW,QAAQ,KAAK;IAAY,CAAC;;;AAIjE,KAAI,kBAAkB;EACpB,MAAM,UAAU,MAAM,QAAQ,iBAAiB,UAAU;AACzD,OAAK,MAAM,QAAQ,SAAS;AAC1B,iBAAc,IAAI,KAAK,WAAW;AAClC,YAAS,KAAK;IAAE,QAAQ,KAAK;IAAY,QAAQ;IAAW,CAAC;;;CAKjE,IAAI,mBAAmB,MAAM,KAAK,cAAc;AAChD,KAAI,iBAAiB,SAAS,cAAc;EAC1C,MAAM,4BAAY,IAAI,KAAqB;AAC3C,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,WAAW,EAAE,WAAW,YAAY,EAAE,SAAS,EAAE;AACvD,aAAU,IAAI,WAAW,UAAU,IAAI,SAAS,IAAI,KAAK,EAAE;;AAE7D,qBAAmB,iBAChB,MAAM,GAAG,OAAO,UAAU,IAAI,EAAE,IAAI,MAAM,UAAU,IAAI,EAAE,IAAI,GAAG,CACjE,MAAM,GAAG,aAAa;;CAG3B,MAAM,cAAc,IAAI,IAAI,iBAAiB;CAC7C,MAAM,gBAAgB,SAAS,QAAQ,MAAM;EAC3C,MAAM,WAAW,EAAE,WAAW,YAAY,EAAE,SAAS,EAAE;AACvD,SAAO,YAAY,IAAI,SAAS;GAChC;CAMF,MAAM,WAAW,CAAC,WAAW,GAAG,iBAAiB;CACjD,MAAM,WAAW,MAAM,QAAQ,qBAAqB,SAAS;CAC7D,MAAM,YAAY,IAAI,IACpB,SAAS,KAAI,MAAK,CAAC,EAAE,WAAW;EAAE,OAAO,EAAE;EAAO,WAAW,EAAE;EAAW,CAAC,CAAC,CAC7E;CAMD,MAAM,YACJ,SAAS,OACL,MAAM,sBAAsB,MAAM,UAAU,OAAO,WAAW,mBAC9D,IAAI,KAAqC;CAM/C,MAAM,QAAQ,cAAc,WAAW,WAAW,UAAU;CAM5D,MAAM,YAAwB,iBAAiB,KAAK,OAClD,cAAc,IAAI,WAAW,UAAU,CACxC;CAMD,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,QAAoB,EAAE;AAC5B,MAAK,MAAM,KAAK,eAAe;EAC7B,MAAM,MAAM,GAAG,EAAE,OAAO,KAAK,EAAE;AAC/B,MAAI,CAAC,SAAS,IAAI,IAAI,EAAE;AACtB,YAAS,IAAI,IAAI;AACjB,SAAM,KAAK;IACT,QAAQ,EAAE;IACV,QAAQ,EAAE;IACV,MAAM;IACN,QAAQ;IACT,CAAC;;;AAIN,QAAO;EACL;EACA;EACA;EACA,oBAAoB,EAAE;EACvB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"postgres-DvEPooLO.mjs","names":["vault.upsertVaultFile","vault.deleteVaultFile","vault.getVaultFile","vault.getVaultFileByInode","vault.getAllVaultFiles","vault.getRecentVaultFiles","vault.countVaultFiles","vault.countVaultFilesWithPrefix","vault.countVaultFilesAfter","vault.getVaultFilesByPaths","vault.getVaultFilesByPathsAfter","vault.getAllVaultFilePaths","vault.getVaultFilePathsWithPrefix","vault.getVaultFilePathsAfter","vault.upsertVaultAliases","vault.deleteVaultAliases","vault.getVaultAlias","vault.replaceLinksForSources","vault.getLinksFromSource","vault.getLinksToTarget","vault.getVaultLinkGraph","vault.getDeadLinks","vault.getDeadLinksWithLineNumbers","vault.getDeadLinksWithPrefix","vault.getDeadLinksAfter","vault.countVaultLinksWithPrefix","vault.countVaultLinksAfter","vault.getVaultLinksFromPaths","vault.getVaultLinkEdges","vault.getVaultLinkEdgesWithPrefix","vault.getVaultLinkEdgesAfter","vault.upsertVaultHealth","vault.getVaultHealth","vault.getOrphans","vault.getOrphansWithPrefix","vault.getOrphansAfter","vault.getLowConnectivity","vault.getLowConnectivityWithPrefix","vault.getLowConnectivityAfter","vault.upsertNameIndex","vault.replaceNameIndex","vault.resolveVaultName","vault.searchVaultNameIndex"],"sources":["../src/storage/postgres/helpers.ts","../src/storage/postgres/search.ts","../src/storage/postgres/vault.ts","../src/storage/postgres/backend.ts"],"sourcesContent":["/**\n * Internal helper utilities for the Postgres storage backend.\n */\n\nimport { STOP_WORDS } from \"../../utils/stop-words.js\";\n\n/**\n * Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].\n */\nexport function bufferToVector(buf: Buffer): number[] {\n const floats: number[] = [];\n for (let i = 0; i < buf.length; i += 4) {\n floats.push(buf.readFloatLE(i));\n }\n return floats;\n}\n\n/**\n * Convert a free-text query to a Postgres tsquery string.\n *\n * Uses OR (|) semantics so that a chunk matching ANY query term is returned,\n * ranked by ts_rank (which scores higher when more terms match). AND (&)\n * semantics are too strict for multi-word queries because all terms rarely\n * co-occur in a single chunk.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → \"synchrotech | interview | follow | gilles\"\n */\nexport function buildPgTsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Sanitize: strip tsquery special characters to prevent syntax errors\n .map((t) => t.replace(/'/g, \"''\").replace(/[&|!():]/g, \"\"))\n .filter(Boolean);\n\n if (tokens.length === 0) {\n const raw = query.replace(/[^a-z0-9]/gi, \" \").trim().split(/\\s+/).filter(Boolean).join(\" | \");\n return raw || \"\";\n }\n\n return tokens.join(\" | \");\n}\n","/**\n * Keyword and semantic search implementations for the Postgres backend.\n * Functions take a `pool` parameter so they can be called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport { buildPgTsQuery } from \"./helpers.js\";\n\n/**\n * Full-text keyword search using Postgres tsvector/tsquery with 'simple' dictionary.\n */\nexport async function searchKeyword(\n pool: Pool,\n query: string,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const tsQuery = buildPgTsQuery(query);\n if (!tsQuery) return [];\n\n const conditions: string[] = [\"fts_vector @@ to_tsquery('simple', $1)\"];\n const params: (string | number)[] = [tsQuery];\n let paramIdx = 2;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY rank_score DESC\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n rank_score: number;\n }>(sql, params);\n\n return result.rows.map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.rank_score,\n tier: row.tier,\n source: row.source,\n }));\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\\n`);\n return [];\n }\n}\n\n/**\n * Semantic vector similarity search using pgvector cosine distance (<=>).\n */\nexport async function searchSemantic(\n pool: Pool,\n queryEmbedding: Float32Array,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n let paramIdx = 1;\n\n const vecStr = \"[\" + Array.from(queryEmbedding).join(\",\") + \"]\";\n params.push(vecStr);\n const vecParam = `$${paramIdx++}`;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n // <=> is cosine distance; 1 - distance = cosine similarity\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n 1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY embedding <=> ${vecParam}::vector\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n cosine_similarity: number;\n }>(sql, params);\n\n const minScore = opts?.minScore ?? -Infinity;\n\n return result.rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.cosine_similarity,\n tier: row.tier,\n source: row.source,\n }))\n .filter((r) => r.score >= minScore);\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\\n`);\n return [];\n }\n}\n","/**\n * Vault storage operations for the Postgres backend.\n * All functions take a `pool` parameter — called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type {\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\n\n// ---------------------------------------------------------------------------\n// Vault files\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultFile(pool: Pool, file: VaultFileRow): Promise<void> {\n await pool.query(\n `INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inode = EXCLUDED.inode, device = EXCLUDED.device,\n hash = EXCLUDED.hash, title = EXCLUDED.title,\n indexed_at = EXCLUDED.indexed_at`,\n [file.vaultPath, file.inode, file.device, file.hash, file.title, file.indexedAt]\n );\n}\n\nexport async function deleteVaultFile(pool: Pool, vaultPath: string): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_links WHERE source_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_health WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_name_index WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_aliases WHERE vault_path = $1 OR canonical_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_files WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\ntype VaultFileDbRow = { vault_path: string; inode: string; device: string; hash: string; title: string | null; indexed_at: string };\n\nfunction mapVaultFileRow(row: VaultFileDbRow): VaultFileRow {\n return {\n vaultPath: row.vault_path,\n inode: Number(row.inode),\n device: Number(row.device),\n hash: row.hash,\n title: row.title,\n indexedAt: Number(row.indexed_at),\n };\n}\n\nexport async function getVaultFile(pool: Pool, vaultPath: string): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getVaultFileByInode(pool: Pool, inode: number, device: number): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE inode = $1 AND device = $2 LIMIT 1\",\n [inode, device]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getAllVaultFiles(pool: Pool): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files\");\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getRecentVaultFiles(pool: Pool, sinceMs: number): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function countVaultFiles(pool: Pool): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*)::text AS n FROM vault_files\");\n return parseInt(r.rows[0]?.n ?? \"0\", 10);\n}\n\nexport async function countVaultFilesWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE vault_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultFilesAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE indexed_at > $1\", [sinceMs]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultFilesByPaths(pool: Pool, paths: string[]): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders})`,\n paths\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getVaultFilesByPathsAfter(pool: Pool, paths: string[], sinceMs: number): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders}) AND indexed_at >= $${paths.length + 1} ORDER BY indexed_at ASC`,\n [...paths, sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getAllVaultFilePaths(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\"SELECT vault_path FROM vault_files\");\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault aliases\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultAliases(pool: Pool, aliases: VaultAliasRow[]): Promise<void> {\n if (aliases.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const a of aliases) {\n await client.query(\n `INSERT INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (vault_path) DO UPDATE SET\n canonical_path = EXCLUDED.canonical_path,\n inode = EXCLUDED.inode, device = EXCLUDED.device`,\n [a.vaultPath, a.canonicalPath, a.inode, a.device]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function deleteVaultAliases(pool: Pool, canonicalPath: string): Promise<void> {\n await pool.query(\"DELETE FROM vault_aliases WHERE canonical_path = $1\", [canonicalPath]);\n}\n\nexport async function getVaultAlias(pool: Pool, vaultPath: string): Promise<{ canonicalPath: string } | null> {\n const r = await pool.query<{ canonical_path: string }>(\n \"SELECT canonical_path FROM vault_aliases WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length > 0 ? { canonicalPath: r.rows[0].canonical_path } : null;\n}\n\n// ---------------------------------------------------------------------------\n// Vault links\n// ---------------------------------------------------------------------------\n\ntype VaultLinkDbRow = { source_path: string; target_raw: string; target_path: string | null; link_type: string; line_number: number; confidence?: string };\n\nfunction mapVaultLinkRow(row: VaultLinkDbRow): VaultLinkRow {\n return {\n sourcePath: row.source_path,\n targetRaw: row.target_raw,\n targetPath: row.target_path,\n linkType: row.link_type,\n lineNumber: row.line_number,\n confidence: (row.confidence as VaultLinkRow[\"confidence\"]) ?? \"EXTRACTED\",\n };\n}\n\nexport async function replaceLinksForSources(pool: Pool, sourcePaths: string[], links: VaultLinkRow[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n if (sourcePaths.length > 0) {\n await client.query(\n \"DELETE FROM vault_links WHERE source_path = ANY($1::text[])\",\n [sourcePaths]\n );\n }\n for (let i = 0; i < links.length; i += 500) {\n const batch = links.slice(i, i + 500);\n const values: string[] = [];\n const params: (string | number | null)[] = [];\n let idx = 1;\n for (const l of batch) {\n values.push(`($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++})`);\n params.push(l.sourcePath, l.targetRaw, l.targetPath, l.linkType, l.lineNumber, l.confidence ?? \"EXTRACTED\");\n }\n await client.query(\n `INSERT INTO vault_links (source_path, target_raw, target_path, link_type, line_number, confidence)\n VALUES ${values.join(\", \")}\n ON CONFLICT (source_path, target_raw, line_number) DO UPDATE SET\n target_path = EXCLUDED.target_path, link_type = EXCLUDED.link_type,\n confidence = EXCLUDED.confidence`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getLinksFromSource(pool: Pool, sourcePath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path = $1\",\n [sourcePath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getLinksToTarget(pool: Pool, targetPath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE target_path = $1\",\n [targetPath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkGraph(pool: Pool): Promise<Array<{ source_path: string; target_path: string }>> {\n const r = await pool.query<{ source_path: string; target_path: string }>(\n \"SELECT source_path, target_path FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getDeadLinks(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string }>> {\n const r = await pool.query<{ source_path: string; target_raw: string }>(\n \"SELECT source_path, target_raw FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw }));\n}\n\nexport async function getDeadLinksWithLineNumbers(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksWithPrefix(pool: Pool, prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksAfter(pool: Pool, sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function countVaultLinksWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_links WHERE source_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultLinksAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\n \"SELECT COUNT(*) AS n FROM vault_links WHERE source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultLinksFromPaths(pool: Pool, sourcePaths: string[]): Promise<VaultLinkRow[]> {\n if (sourcePaths.length === 0) return [];\n const placeholders = sourcePaths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultLinkDbRow>(\n `SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path IN (${placeholders}) AND target_path IS NOT NULL`,\n sourcePaths\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkEdges(pool: Pool): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesWithPrefix(pool: Pool, prefix: string): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesAfter(pool: Pool, sinceMs: number): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows;\n}\n\n// ---------------------------------------------------------------------------\n// Vault health\n// ---------------------------------------------------------------------------\n\ntype VaultHealthDbRow = { vault_path: string; inbound_count: number; outbound_count: number; dead_link_count: number; is_orphan: number; computed_at: string };\n\nfunction mapVaultHealthRow(row: VaultHealthDbRow): VaultHealthRow {\n return {\n vaultPath: row.vault_path,\n inboundCount: row.inbound_count,\n outboundCount: row.outbound_count,\n deadLinkCount: row.dead_link_count,\n isOrphan: row.is_orphan === 1,\n computedAt: Number(row.computed_at),\n };\n}\n\nexport async function upsertVaultHealth(pool: Pool, rows: VaultHealthRow[]): Promise<void> {\n if (rows.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const h of rows) {\n await client.query(\n `INSERT INTO vault_health (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inbound_count = EXCLUDED.inbound_count,\n outbound_count = EXCLUDED.outbound_count,\n dead_link_count = EXCLUDED.dead_link_count,\n is_orphan = EXCLUDED.is_orphan,\n computed_at = EXCLUDED.computed_at`,\n [h.vaultPath, h.inboundCount, h.outboundCount, h.deadLinkCount, h.isOrphan ? 1 : 0, h.computedAt]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getVaultHealth(pool: Pool, vaultPath: string): Promise<VaultHealthRow | null> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultHealthRow(r.rows[0]);\n}\n\nexport async function getOrphans(pool: Pool): Promise<VaultHealthRow[]> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE is_orphan = 1\"\n );\n return r.rows.map(row => ({ ...mapVaultHealthRow(row), isOrphan: true }));\n}\n\nexport async function getOrphansWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE is_orphan = 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getOrphansAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.is_orphan = 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivity(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1\"\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.inbound_count + vh.outbound_count <= 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault name index\n// ---------------------------------------------------------------------------\n\nexport async function upsertNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n if (entries.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const e of entries) {\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path)\n VALUES ($1, $2) ON CONFLICT (name, vault_path) DO NOTHING`,\n [e.name, e.vaultPath]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e_) {\n await client.query(\"ROLLBACK\");\n throw e_;\n } finally {\n client.release();\n }\n}\n\nexport async function replaceNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_name_index\");\n for (let i = 0; i < entries.length; i += 500) {\n const batch = entries.slice(i, i + 500);\n const values: string[] = [];\n const params: string[] = [];\n let idx = 1;\n for (const e of batch) {\n values.push(`($${idx++}, $${idx++})`);\n params.push(e.name, e.vaultPath);\n }\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path) VALUES ${values.join(\", \")}`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function resolveVaultName(pool: Pool, name: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_name_index WHERE name = $1\",\n [name]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function searchVaultNameIndex(pool: Pool, query: string, limit = 100): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT DISTINCT vault_path FROM vault_name_index WHERE lower(name) LIKE lower($1) LIMIT $2\",\n [`%${query}%`, limit]\n );\n return r.rows.map(row => row.vault_path);\n}\n","/**\n * PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.\n *\n * Vector similarity: pgvector's <=> cosine distance operator\n * Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)\n * Connection pooling: node-postgres Pool\n *\n * Schema is auto-initialized on first connection if tables don't exist.\n * Per-user database isolation: each macOS user gets their own database (pai_<username>).\n */\n\nimport pg from \"pg\";\nimport type { Pool, PoolClient } from \"pg\";\nimport { readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport type {\n StorageBackend, ChunkRow, FileRow, FederationStats,\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport type { PostgresConfig } from \"./config.js\";\nimport { bufferToVector } from \"./helpers.js\";\nimport { searchKeyword, searchSemantic } from \"./search.js\";\nimport * as vault from \"./vault.js\";\n\nconst { Pool: PgPool } = pg;\n\nexport class PostgresBackend implements StorageBackend {\n readonly backendType = \"postgres\" as const;\n\n private pool: Pool;\n\n /**\n * Ensure the per-user database exists and has the required schema.\n * Connects to the default 'postgres' database to CREATE DATABASE if needed,\n * then connects to the target database to apply init.sql schema.\n * Safe to call multiple times (fully idempotent).\n */\n static async ensureDatabase(config: PostgresConfig): Promise<void> {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n const url = new URL(connStr);\n const targetDb = url.pathname.slice(1);\n\n const adminUrl = new URL(connStr);\n adminUrl.pathname = \"/postgres\";\n const adminPool = new PgPool({\n connectionString: adminUrl.toString(),\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const check = await adminPool.query(\n \"SELECT 1 FROM pg_database WHERE datname = $1\",\n [targetDb]\n );\n if (check.rowCount === 0) {\n await adminPool.query(`CREATE DATABASE \"${targetDb}\"`);\n process.stderr.write(`[pai-postgres] Created database: ${targetDb}\\n`);\n }\n } finally {\n await adminPool.end();\n }\n\n const targetPool = new PgPool({\n connectionString: connStr,\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const tableCheck = await targetPool.query(\n \"SELECT 1 FROM information_schema.tables WHERE table_name = 'pai_chunks'\"\n );\n if (tableCheck.rowCount === 0) {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const initSqlPath = join(__dirname, \"../../docker/init.sql\");\n let initSql: string;\n try {\n initSql = readFileSync(initSqlPath, \"utf-8\");\n } catch {\n const altPath = join(__dirname, \"../docker/init.sql\");\n initSql = readFileSync(altPath, \"utf-8\");\n }\n await targetPool.query(initSql);\n process.stderr.write(`[pai-postgres] Applied schema to database: ${targetDb}\\n`);\n }\n\n // Run incremental migrations for existing databases\n await PostgresBackend.runMigrations(targetPool);\n } finally {\n await targetPool.end();\n }\n }\n\n /**\n * Run incremental migrations for existing databases.\n * Each migration is idempotent — safe to run on databases that already have the change.\n */\n private static async runMigrations(pool: Pool): Promise<void> {\n // Migration: add confidence column to vault_links if it does not exist\n const colCheck = await pool.query(\n `SELECT 1 FROM information_schema.columns\n WHERE table_name = 'vault_links' AND column_name = 'confidence'`\n );\n if (colCheck.rowCount === 0) {\n await pool.query(\n \"ALTER TABLE vault_links ADD COLUMN confidence TEXT NOT NULL DEFAULT 'EXTRACTED'\"\n );\n process.stderr.write(\"[pai-postgres] Migration: added confidence column to vault_links\\n\");\n }\n\n // Migration: create kg_triples table if it does not exist\n const kgCheck = await pool.query(\n `SELECT 1 FROM information_schema.tables WHERE table_name = 'kg_triples'`\n );\n if (kgCheck.rowCount === 0) {\n await pool.query(`\n CREATE TABLE kg_triples (\n id SERIAL PRIMARY KEY,\n subject TEXT NOT NULL,\n predicate TEXT NOT NULL,\n object TEXT NOT NULL,\n project_id INTEGER,\n source_session TEXT,\n valid_from TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n valid_to TIMESTAMP,\n confidence TEXT DEFAULT 'EXTRACTED',\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n )\n `);\n await pool.query(`CREATE INDEX idx_kg_subject ON kg_triples(subject)`);\n await pool.query(`CREATE INDEX idx_kg_predicate ON kg_triples(predicate)`);\n await pool.query(`CREATE INDEX idx_kg_object ON kg_triples(object)`);\n await pool.query(`CREATE INDEX idx_kg_valid ON kg_triples(valid_from, valid_to)`);\n process.stderr.write(\"[pai-postgres] Migration: created kg_triples table\\n\");\n }\n }\n\n constructor(config: PostgresConfig) {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n\n this.pool = new PgPool({\n connectionString: connStr,\n max: config.maxConnections ?? 5,\n connectionTimeoutMillis: config.connectionTimeoutMs ?? 5000,\n idleTimeoutMillis: 30_000,\n });\n\n this.pool.on(\"error\", (err) => {\n process.stderr.write(`[pai-postgres] Pool error: ${err.message}\\n`);\n });\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Expose the underlying pg.Pool for callers that need direct query access\n * (e.g. the daemon's observation IPC methods).\n */\n getPool(): Pool {\n return this.pool;\n }\n\n async getStats(): Promise<FederationStats> {\n const client = await this.pool.connect();\n try {\n const filesResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_files\"\n );\n const chunksResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_chunks\"\n );\n return {\n files: parseInt(filesResult.rows[0]?.n ?? \"0\", 10),\n chunks: parseInt(chunksResult.rows[0]?.n ?? \"0\", 10),\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * Test the connection by running a trivial query.\n * Returns null on success, error message on failure.\n */\n async testConnection(): Promise<string | null> {\n let client: PoolClient | null = null;\n try {\n client = await this.pool.connect();\n await client.query(\"SELECT 1\");\n return null;\n } catch (e) {\n return e instanceof Error ? e.message : String(e);\n } finally {\n client?.release();\n }\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const result = await this.pool.query<{ hash: string }>(\n \"SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows[0]?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n await this.pool.query(\n `INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (project_id, path) DO UPDATE SET\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n hash = EXCLUDED.hash,\n mtime = EXCLUDED.mtime,\n size = EXCLUDED.size`,\n [file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size]\n );\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const result = await this.pool.query<{ id: string }>(\n \"SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n await this.pool.query(\n \"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n for (const c of chunks) {\n const safeText = c.text.replace(/\\0/g, \"\");\n\n await client.query(\n `INSERT INTO pai_chunks\n (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n to_tsvector('simple', $9))\n ON CONFLICT (id) DO UPDATE SET\n project_id = EXCLUDED.project_id,\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n path = EXCLUDED.path,\n start_line = EXCLUDED.start_line,\n end_line = EXCLUDED.end_line,\n hash = EXCLUDED.hash,\n text = EXCLUDED.text,\n updated_at = EXCLUDED.updated_at,\n fts_vector = EXCLUDED.fts_vector`,\n [\n c.id, c.projectId, c.source, c.tier, c.path,\n c.startLine, c.endLine, c.hash, safeText, c.updatedAt,\n ]\n );\n }\n\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const result = await this.pool.query<{ path: string }>(\n \"SELECT DISTINCT path FROM pai_chunks WHERE project_id = $1\",\n [projectId]\n );\n return result.rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const path of paths) {\n await client.query(\"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\", [projectId, path]);\n await client.query(\"DELETE FROM pai_files WHERE project_id = $1 AND path = $2\", [projectId, path]);\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string; project_id: number; path: string }>> {\n if (projectId !== undefined) {\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id\",\n [projectId]\n );\n return result.rows;\n }\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL ORDER BY id\"\n );\n return result.rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n const vec = bufferToVector(embedding);\n const vecStr = \"[\" + vec.join(\",\") + \"]\";\n await this.pool.query(\n \"UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2\",\n [vecStr, chunkId]\n );\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchKeyword(this.pool, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchSemantic(this.pool, queryEmbedding, opts);\n }\n\n // -------------------------------------------------------------------------\n // Vault operations — delegated to vault.ts\n // -------------------------------------------------------------------------\n\n async upsertVaultFile(file: VaultFileRow): Promise<void> { return vault.upsertVaultFile(this.pool, file); }\n async deleteVaultFile(vaultPath: string): Promise<void> { return vault.deleteVaultFile(this.pool, vaultPath); }\n async getVaultFile(vaultPath: string): Promise<VaultFileRow | null> { return vault.getVaultFile(this.pool, vaultPath); }\n async getVaultFileByInode(inode: number, device: number): Promise<VaultFileRow | null> { return vault.getVaultFileByInode(this.pool, inode, device); }\n async getAllVaultFiles(): Promise<VaultFileRow[]> { return vault.getAllVaultFiles(this.pool); }\n async getRecentVaultFiles(sinceMs: number): Promise<VaultFileRow[]> { return vault.getRecentVaultFiles(this.pool, sinceMs); }\n async countVaultFiles(): Promise<number> { return vault.countVaultFiles(this.pool); }\n async countVaultFilesWithPrefix(prefix: string): Promise<number> { return vault.countVaultFilesWithPrefix(this.pool, prefix); }\n async countVaultFilesAfter(sinceMs: number): Promise<number> { return vault.countVaultFilesAfter(this.pool, sinceMs); }\n async getVaultFilesByPaths(paths: string[]): Promise<VaultFileRow[]> { return vault.getVaultFilesByPaths(this.pool, paths); }\n async getVaultFilesByPathsAfter(paths: string[], sinceMs: number): Promise<VaultFileRow[]> { return vault.getVaultFilesByPathsAfter(this.pool, paths, sinceMs); }\n async getAllVaultFilePaths(): Promise<string[]> { return vault.getAllVaultFilePaths(this.pool); }\n async getVaultFilePathsWithPrefix(prefix: string): Promise<string[]> { return vault.getVaultFilePathsWithPrefix(this.pool, prefix); }\n async getVaultFilePathsAfter(sinceMs: number): Promise<string[]> { return vault.getVaultFilePathsAfter(this.pool, sinceMs); }\n\n async upsertVaultAliases(aliases: VaultAliasRow[]): Promise<void> { return vault.upsertVaultAliases(this.pool, aliases); }\n async deleteVaultAliases(canonicalPath: string): Promise<void> { return vault.deleteVaultAliases(this.pool, canonicalPath); }\n async getVaultAlias(vaultPath: string): Promise<{ canonicalPath: string } | null> { return vault.getVaultAlias(this.pool, vaultPath); }\n\n async replaceLinksForSources(sourcePaths: string[], links: VaultLinkRow[]): Promise<void> { return vault.replaceLinksForSources(this.pool, sourcePaths, links); }\n async getLinksFromSource(sourcePath: string): Promise<VaultLinkRow[]> { return vault.getLinksFromSource(this.pool, sourcePath); }\n async getLinksToTarget(targetPath: string): Promise<VaultLinkRow[]> { return vault.getLinksToTarget(this.pool, targetPath); }\n async getVaultLinkGraph(): Promise<Array<{ source_path: string; target_path: string }>> { return vault.getVaultLinkGraph(this.pool); }\n async getDeadLinks(): Promise<Array<{ sourcePath: string; targetRaw: string }>> { return vault.getDeadLinks(this.pool); }\n async getDeadLinksWithLineNumbers(): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithLineNumbers(this.pool); }\n async getDeadLinksWithPrefix(prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithPrefix(this.pool, prefix); }\n async getDeadLinksAfter(sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksAfter(this.pool, sinceMs); }\n async countVaultLinksWithPrefix(prefix: string): Promise<number> { return vault.countVaultLinksWithPrefix(this.pool, prefix); }\n async countVaultLinksAfter(sinceMs: number): Promise<number> { return vault.countVaultLinksAfter(this.pool, sinceMs); }\n async getVaultLinksFromPaths(sourcePaths: string[]): Promise<VaultLinkRow[]> { return vault.getVaultLinksFromPaths(this.pool, sourcePaths); }\n async getVaultLinkEdges(): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdges(this.pool); }\n async getVaultLinkEdgesWithPrefix(prefix: string): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesWithPrefix(this.pool, prefix); }\n async getVaultLinkEdgesAfter(sinceMs: number): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesAfter(this.pool, sinceMs); }\n\n async upsertVaultHealth(rows: VaultHealthRow[]): Promise<void> { return vault.upsertVaultHealth(this.pool, rows); }\n async getVaultHealth(vaultPath: string): Promise<VaultHealthRow | null> { return vault.getVaultHealth(this.pool, vaultPath); }\n async getOrphans(): Promise<VaultHealthRow[]> { return vault.getOrphans(this.pool); }\n async getOrphansWithPrefix(prefix: string): Promise<string[]> { return vault.getOrphansWithPrefix(this.pool, prefix); }\n async getOrphansAfter(sinceMs: number): Promise<string[]> { return vault.getOrphansAfter(this.pool, sinceMs); }\n async getLowConnectivity(): Promise<string[]> { return vault.getLowConnectivity(this.pool); }\n async getLowConnectivityWithPrefix(prefix: string): Promise<string[]> { return vault.getLowConnectivityWithPrefix(this.pool, prefix); }\n async getLowConnectivityAfter(sinceMs: number): Promise<string[]> { return vault.getLowConnectivityAfter(this.pool, sinceMs); }\n\n async upsertNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.upsertNameIndex(this.pool, entries); }\n async replaceNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.replaceNameIndex(this.pool, entries); }\n async resolveVaultName(name: string): Promise<string[]> { return vault.resolveVaultName(this.pool, name); }\n async searchVaultNameIndex(query: string, limit?: number): Promise<string[]> { return vault.searchVaultNameIndex(this.pool, query, limit); }\n\n // Legacy memory_chunks methods (used by graph and zettelkasten modules)\n async getChunksWithEmbeddings(projectId: number, limit: number): Promise<Array<{ path: string; text: string; embedding: Buffer }>> {\n const r = await this.pool.query<{ path: string; text: string; embedding: Buffer }>(\n `SELECT path, text, embedding FROM memory_chunks WHERE project_id = $1 AND embedding IS NOT NULL ORDER BY path, start_line LIMIT $2`,\n [projectId, limit]\n );\n return r.rows;\n }\n\n async getChunksForPath(projectId: number, path: string, limit = 20): Promise<Array<{ text: string; embedding: Buffer | null }>> {\n const r = await this.pool.query<{ text: string; embedding: Buffer | null }>(\n `SELECT text, embedding FROM memory_chunks WHERE project_id = $1 AND path = $2 AND embedding IS NOT NULL ORDER BY start_line LIMIT $3`,\n [projectId, path, limit]\n );\n return r.rows;\n }\n\n async searchChunksByText(projectId: number, query: string, limit: number): Promise<Array<{ path: string; text: string }>> {\n const r = await this.pool.query<{ path: string; text: string }>(\n `SELECT DISTINCT path, text FROM memory_chunks WHERE project_id = $1 AND lower(text) LIKE lower($2) LIMIT $3`,\n [projectId, `%${query}%`, limit]\n );\n return r.rows;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AASA,SAAgB,eAAe,KAAuB;CACpD,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,EACnC,QAAO,KAAK,IAAI,YAAY,EAAE,CAAC;AAEjC,QAAO;;;;;;;;;;;;;AAcT,SAAgB,eAAe,OAAuB;CACpD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,EAAE,QAAQ,MAAM,KAAK,CAAC,QAAQ,aAAa,GAAG,CAAC,CAC1D,OAAO,QAAQ;AAElB,KAAI,OAAO,WAAW,EAEpB,QADY,MAAM,QAAQ,eAAe,IAAI,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,IAC/E;AAGhB,QAAO,OAAO,KAAK,MAAM;;;;;;;;AChC3B,eAAsB,cACpB,MACA,OACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,UAAU,eAAe,MAAM;AACrC,KAAI,CAAC,QAAS,QAAO,EAAE;CAEvB,MAAM,aAAuB,CAAC,yCAAyC;CACvE,MAAM,SAA8B,CAAC,QAAQ;CAC7C,IAAI,WAAW;AAEf,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAEvB,MAAM,MAAM;;;;;;;;;;;YAWF,WAAW,KAAK,QAAQ,CAAC;;YAEzB,WAAW;;AAGrB,KAAI;AAYF,UAXe,MAAM,KAAK,MASvB,KAAK,OAAO,EAED,KAAK,KAAK,SAAS;GAC/B,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE;UACI,GAAG;AACV,UAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;AAClE,SAAO,EAAE;;;;;;AAOb,eAAsB,eACpB,MACA,gBACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;CACtC,IAAI,WAAW;CAEf,MAAM,SAAS,MAAM,MAAM,KAAK,eAAe,CAAC,KAAK,IAAI,GAAG;AAC5D,QAAO,KAAK,OAAO;CACnB,MAAM,WAAW,IAAI;AAErB,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAGvB,MAAM,MAAM;;;;;;;;;2BASa,SAAS;;YAExB,WAAW,KAAK,QAAQ,CAAC;6BACR,SAAS;YAC1B,WAAW;;AAGrB,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MASvB,KAAK,OAAO;EAEf,MAAM,WAAW,MAAM,YAAY;AAEnC,SAAO,OAAO,KACX,KAAK,SAAS;GACb,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;UAC9B,GAAG;AACV,UAAQ,OAAO,MAAM,wCAAwC,EAAE,IAAI;AACnE,SAAO,EAAE;;;;;;ACjKb,eAAsB,gBAAgB,MAAY,MAAmC;AACnF,OAAM,KAAK,MACT;;;;;0CAMA;EAAC,KAAK;EAAW,KAAK;EAAO,KAAK;EAAQ,KAAK;EAAM,KAAK;EAAO,KAAK;EAAU,CACjF;;AAGH,eAAsB,gBAAgB,MAAY,WAAkC;CAClF,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,sDAAsD,CAAC,UAAU,CAAC;AACrF,QAAM,OAAO,MAAM,0EAA0E,CAAC,UAAU,CAAC;AACzG,QAAM,OAAO,MAAM,iDAAiD,CAAC,UAAU,CAAC;AAChF,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAMpB,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,WAAW,IAAI;EACf,OAAO,OAAO,IAAI,MAAM;EACxB,QAAQ,OAAO,IAAI,OAAO;EAC1B,MAAM,IAAI;EACV,OAAO,IAAI;EACX,WAAW,OAAO,IAAI,WAAW;EAClC;;AAGH,eAAsB,aAAa,MAAY,WAAiD;CAC9F,MAAM,IAAI,MAAM,KAAK,MACnB,oGACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,oBAAoB,MAAY,OAAe,QAA8C;CACjH,MAAM,IAAI,MAAM,KAAK,MACnB,uHACA,CAAC,OAAO,OAAO,CAChB;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,iBAAiB,MAAqC;AAE1E,SADU,MAAM,KAAK,MAAsB,6EAA6E,EAC/G,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,oBAAoB,MAAY,SAA0C;AAK9F,SAJU,MAAM,KAAK,MACnB,oGACA,CAAC,QAAQ,CACV,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,gBAAgB,MAA6B;CACjE,MAAM,IAAI,MAAM,KAAK,MAAqB,8CAA8C;AACxF,QAAO,SAAS,EAAE,KAAK,IAAI,KAAK,KAAK,GAAG;;AAG1C,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,kEAAkE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC3H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MAAqB,+DAA+D,CAAC,QAAQ,CAAC;AACnH,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,OAA0C;AAC/F,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,IAChH,MACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,0BAA0B,MAAY,OAAiB,SAA0C;AACrH,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,uBAAuB,MAAM,SAAS,EAAE,2BACxJ,CAAC,GAAG,OAAO,QAAQ,CACpB,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,qBAAqB,MAA+B;AAExE,SADU,MAAM,KAAK,MAA8B,qCAAqC,EAC/E,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,4BAA4B,MAAY,QAAmC;AAK/F,SAJU,MAAM,KAAK,MACnB,+DACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,uBAAuB,MAAY,SAAoC;AAK3F,SAJU,MAAM,KAAK,MACnB,4DACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,mBAAmB,MAAY,SAAyC;AAC5F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;;;;8DAKA;GAAC,EAAE;GAAW,EAAE;GAAe,EAAE;GAAO,EAAE;GAAO,CAClD;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,eAAsC;AACzF,OAAM,KAAK,MAAM,uDAAuD,CAAC,cAAc,CAAC;;AAG1F,eAAsB,cAAc,MAAY,WAA8D;CAC5G,MAAM,IAAI,MAAM,KAAK,MACnB,kEACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,SAAS,IAAI,EAAE,eAAe,EAAE,KAAK,GAAG,gBAAgB,GAAG;;AAS3E,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,YAAY,IAAI;EAChB,WAAW,IAAI;EACf,YAAY,IAAI;EAChB,UAAU,IAAI;EACd,YAAY,IAAI;EAChB,YAAa,IAAI,cAA6C;EAC/D;;AAGH,eAAsB,uBAAuB,MAAY,aAAuB,OAAsC;CACpH,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,MAAI,YAAY,SAAS,EACvB,OAAM,OAAO,MACX,+DACA,CAAC,YAAY,CACd;AAEH,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK;GAC1C,MAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,IAAI;GACrC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAqC,EAAE;GAC7C,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,GAAG;AACjF,WAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,cAAc,YAAY;;AAE7G,SAAM,OAAO,MACX;kBACU,OAAO,KAAK,KAAK,CAAC;;;8CAI5B,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,YAA6C;AAKhG,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,iBAAiB,MAAY,YAA6C;AAK9F,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAA0E;AAIhH,SAHU,MAAM,KAAK,MACnB,iFACD,EACQ;;AAGX,eAAsB,aAAa,MAAuE;AAIxG,SAHU,MAAM,KAAK,MACnB,4EACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,EAAE;;AAGxF,eAAsB,4BAA4B,MAA2F;AAI3I,SAHU,MAAM,KAAK,MACnB,yFACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,uBAAuB,MAAY,QAA+F;AAKtJ,SAJU,MAAM,KAAK,MACnB,kHACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,kBAAkB,MAAY,SAAgG;AAKlJ,SAJU,MAAM,KAAK,MACnB,wKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,mEAAmE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC5H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MACnB,yHACA,CAAC,QAAQ,CACV;AACD,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,uBAAuB,MAAY,aAAgD;AACvG,KAAI,YAAY,WAAW,EAAG,QAAO,EAAE;CACvC,MAAM,eAAe,YAAY,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKtE,SAJU,MAAM,KAAK,MACnB,0HAA0H,aAAa,gCACvI,YACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAAgE;AAItG,SAHU,MAAM,KAAK,MACnB,8GACD,EACQ;;AAGX,eAAsB,4BAA4B,MAAY,QAAoE;AAKhI,SAJU,MAAM,KAAK,MACnB,uIACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ;;AAGX,eAAsB,uBAAuB,MAAY,SAAqE;AAK5H,SAJU,MAAM,KAAK,MACnB,6LACA,CAAC,QAAQ,CACV,EACQ;;AASX,SAAS,kBAAkB,KAAuC;AAChE,QAAO;EACL,WAAW,IAAI;EACf,cAAc,IAAI;EAClB,eAAe,IAAI;EACnB,eAAe,IAAI;EACnB,UAAU,IAAI,cAAc;EAC5B,YAAY,OAAO,IAAI,YAAY;EACpC;;AAGH,eAAsB,kBAAkB,MAAY,MAAuC;AACzF,KAAI,KAAK,WAAW,EAAG;CACvB,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,KACd,OAAM,OAAO,MACX;;;;;;;gDAQA;GAAC,EAAE;GAAW,EAAE;GAAc,EAAE;GAAe,EAAE;GAAe,EAAE,WAAW,IAAI;GAAG,EAAE;GAAW,CAClG;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,eAAe,MAAY,WAAmD;CAClG,MAAM,IAAI,MAAM,KAAK,MACnB,qIACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,kBAAkB,EAAE,KAAK,GAAG;;AAGlE,eAAsB,WAAW,MAAuC;AAItE,SAHU,MAAM,KAAK,MACnB,kIACD,EACQ,KAAK,KAAI,SAAQ;EAAE,GAAG,kBAAkB,IAAI;EAAE,UAAU;EAAM,EAAE;;AAG3E,eAAsB,qBAAqB,MAAY,QAAmC;AAKxF,SAJU,MAAM,KAAK,MACnB,kFACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,gBAAgB,MAAY,SAAoC;AAKpF,SAJU,MAAM,KAAK,MACnB,gJACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,mBAAmB,MAA+B;AAItE,SAHU,MAAM,KAAK,MACnB,gFACD,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,6BAA6B,MAAY,QAAmC;AAKhG,SAJU,MAAM,KAAK,MACnB,wGACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,wBAAwB,MAAY,SAAoC;AAK5F,SAJU,MAAM,KAAK,MACnB,yKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,gBAAgB,MAAY,SAA0C;AAC1F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;qEAEA,CAAC,EAAE,MAAM,EAAE,UAAU,CACtB;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,IAAI;AACX,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,SAA0C;CAC3F,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,+BAA+B;AAClD,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,KAAK;GAC5C,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,IAAI;GACvC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAmB,EAAE;GAC3B,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,GAAG;AACrC,WAAO,KAAK,EAAE,MAAM,EAAE,UAAU;;AAElC,SAAM,OAAO,MACX,0DAA0D,OAAO,KAAK,KAAK,IAC3E,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,MAAiC;AAKlF,SAJU,MAAM,KAAK,MACnB,2DACA,CAAC,KAAK,CACP,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,qBAAqB,MAAY,OAAe,QAAQ,KAAwB;AAKpG,SAJU,MAAM,KAAK,MACnB,8FACA,CAAC,IAAI,MAAM,IAAI,MAAM,CACtB,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;;;;;;;;;;;;;;ACzd1C,MAAM,EAAE,MAAM,WAAW;AAEzB,IAAa,kBAAb,MAAa,gBAA0C;CACrD,AAAS,cAAc;CAEvB,AAAQ;;;;;;;CAQR,aAAa,eAAe,QAAuC;EACjE,MAAM,UACJ,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;EAE9I,MAAM,WADM,IAAI,IAAI,QAAQ,CACP,SAAS,MAAM,EAAE;EAEtC,MAAM,WAAW,IAAI,IAAI,QAAQ;AACjC,WAAS,WAAW;EACpB,MAAM,YAAY,IAAI,OAAO;GAC3B,kBAAkB,SAAS,UAAU;GACrC,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAKF,QAJc,MAAM,UAAU,MAC5B,gDACA,CAAC,SAAS,CACX,EACS,aAAa,GAAG;AACxB,UAAM,UAAU,MAAM,oBAAoB,SAAS,GAAG;AACtD,YAAQ,OAAO,MAAM,oCAAoC,SAAS,IAAI;;YAEhE;AACR,SAAM,UAAU,KAAK;;EAGvB,MAAM,aAAa,IAAI,OAAO;GAC5B,kBAAkB;GAClB,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAIF,QAHmB,MAAM,WAAW,MAClC,0EACD,EACc,aAAa,GAAG;IAC7B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;IACzD,MAAM,cAAc,KAAK,WAAW,wBAAwB;IAC5D,IAAI;AACJ,QAAI;AACF,eAAU,aAAa,aAAa,QAAQ;YACtC;AAEN,eAAU,aADM,KAAK,WAAW,qBAAqB,EACrB,QAAQ;;AAE1C,UAAM,WAAW,MAAM,QAAQ;AAC/B,YAAQ,OAAO,MAAM,8CAA8C,SAAS,IAAI;;AAIlF,SAAM,gBAAgB,cAAc,WAAW;YACvC;AACR,SAAM,WAAW,KAAK;;;;;;;CAQ1B,aAAqB,cAAc,MAA2B;AAM5D,OAJiB,MAAM,KAAK,MAC1B;wEAED,EACY,aAAa,GAAG;AAC3B,SAAM,KAAK,MACT,kFACD;AACD,WAAQ,OAAO,MAAM,qEAAqE;;AAO5F,OAHgB,MAAM,KAAK,MACzB,0EACD,EACW,aAAa,GAAG;AAC1B,SAAM,KAAK,MAAM;;;;;;;;;;;;;QAaf;AACF,SAAM,KAAK,MAAM,uDAAuD;AACxE,SAAM,KAAK,MAAM,yDAAyD;AAC1E,SAAM,KAAK,MAAM,sDAAsD;AACvE,SAAM,KAAK,MAAM,oEAAoE;AACrF,WAAQ,OAAO,MAAM,uDAAuD;;;CAIhF,YAAY,QAAwB;AAKlC,OAAK,OAAO,IAAI,OAAO;GACrB,kBAJA,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;GAI5I,KAAK,OAAO,kBAAkB;GAC9B,yBAAyB,OAAO,uBAAuB;GACvD,mBAAmB;GACpB,CAAC;AAEF,OAAK,KAAK,GAAG,UAAU,QAAQ;AAC7B,WAAQ,OAAO,MAAM,8BAA8B,IAAI,QAAQ,IAAI;IACnE;;CAOJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,UAAgB;AACd,SAAO,KAAK;;CAGd,MAAM,WAAqC;EACzC,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;GACF,MAAM,cAAc,MAAM,OAAO,MAC/B,4CACD;GACD,MAAM,eAAe,MAAM,OAAO,MAChC,6CACD;AACD,UAAO;IACL,OAAO,SAAS,YAAY,KAAK,IAAI,KAAK,KAAK,GAAG;IAClD,QAAQ,SAAS,aAAa,KAAK,IAAI,KAAK,KAAK,GAAG;IACrD;YACO;AACR,UAAO,SAAS;;;;;;;CAQpB,MAAM,iBAAyC;EAC7C,IAAI,SAA4B;AAChC,MAAI;AACF,YAAS,MAAM,KAAK,KAAK,SAAS;AAClC,SAAM,OAAO,MAAM,WAAW;AAC9B,UAAO;WACA,GAAG;AACV,UAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;YACzC;AACR,WAAQ,SAAS;;;CAQrB,MAAM,YAAY,WAAmB,MAA2C;AAK9E,UAJe,MAAM,KAAK,KAAK,MAC7B,kEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,IAAI;;CAGzB,MAAM,WAAW,MAA8B;AAC7C,QAAM,KAAK,KAAK,MACd;;;;;;;kCAQA;GAAC,KAAK;GAAW,KAAK;GAAM,KAAK;GAAQ,KAAK;GAAM,KAAK;GAAM,KAAK;GAAO,KAAK;GAAK,CACtF;;CAOH,MAAM,YAAY,WAAmB,MAAiC;AAKpE,UAJe,MAAM,KAAK,KAAK,MAC7B,iEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,KAAK,MAAM,EAAE,GAAG;;CAGrC,MAAM,oBAAoB,WAAmB,MAA6B;AACxE,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,WAAW,KAAK,CAClB;;CAGH,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAE3B,QAAK,MAAM,KAAK,QAAQ;IACtB,MAAM,WAAW,EAAE,KAAK,QAAQ,OAAO,GAAG;AAE1C,UAAM,OAAO,MACX;;;;;;;;;;;;;;;gDAgBA;KACE,EAAE;KAAI,EAAE;KAAW,EAAE;KAAQ,EAAE;KAAM,EAAE;KACvC,EAAE;KAAW,EAAE;KAAS,EAAE;KAAM;KAAU,EAAE;KAC7C,CACF;;AAGH,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAsC;AAKhE,UAJe,MAAM,KAAK,KAAK,MAC7B,8DACA,CAAC,UAAU,CACZ,EACa,KAAK,KAAK,MAAM,EAAE,KAAK;;CAGvC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAC3B,QAAK,MAAM,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,8DAA8D,CAAC,WAAW,KAAK,CAAC;AACnG,UAAM,OAAO,MAAM,6DAA6D,CAAC,WAAW,KAAK,CAAC;;AAEpG,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAoG;AAC9H,MAAI,cAAc,OAKhB,SAJe,MAAM,KAAK,KAAK,MAC7B,6GACA,CAAC,UAAU,CACZ,EACa;AAKhB,UAHe,MAAM,KAAK,KAAK,MAC7B,wFACD,EACa;;CAGhB,MAAM,gBAAgB,SAAiB,WAAkC;EAEvE,MAAM,SAAS,MADH,eAAe,UAAU,CACZ,KAAK,IAAI,GAAG;AACrC,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,QAAQ,QAAQ,CAClB;;CAOH,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,cAAc,KAAK,MAAM,OAAO,KAAK;;CAG9C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,eAAe,KAAK,MAAM,gBAAgB,KAAK;;CAOxD,MAAM,gBAAgB,MAAmC;AAAE,SAAOA,gBAAsB,KAAK,MAAM,KAAK;;CACxG,MAAM,gBAAgB,WAAkC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,UAAU;;CAC5G,MAAM,aAAa,WAAiD;AAAE,SAAOC,aAAmB,KAAK,MAAM,UAAU;;CACrH,MAAM,oBAAoB,OAAe,QAA8C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,OAAO,OAAO;;CACnJ,MAAM,mBAA4C;AAAE,SAAOC,iBAAuB,KAAK,KAAK;;CAC5F,MAAM,oBAAoB,SAA0C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,QAAQ;;CAC1H,MAAM,kBAAmC;AAAE,SAAOC,gBAAsB,KAAK,KAAK;;CAClF,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,qBAAqB,OAA0C;AAAE,SAAOC,qBAA2B,KAAK,MAAM,MAAM;;CAC1H,MAAM,0BAA0B,OAAiB,SAA0C;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO,QAAQ;;CAC9J,MAAM,uBAA0C;AAAE,SAAOC,qBAA2B,KAAK,KAAK;;CAC9F,MAAM,4BAA4B,QAAmC;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CAClI,MAAM,uBAAuB,SAAoC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE1H,MAAM,mBAAmB,SAAyC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,QAAQ;;CACvH,MAAM,mBAAmB,eAAsC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,cAAc;;CAC1H,MAAM,cAAc,WAA8D;AAAE,SAAOC,cAAoB,KAAK,MAAM,UAAU;;CAEpI,MAAM,uBAAuB,aAAuB,OAAsC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,aAAa,MAAM;;CAC9J,MAAM,mBAAmB,YAA6C;AAAE,SAAOC,mBAAyB,KAAK,MAAM,WAAW;;CAC9H,MAAM,iBAAiB,YAA6C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,WAAW;;CAC1H,MAAM,oBAAkF;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACnI,MAAM,eAA0E;AAAE,SAAOC,aAAmB,KAAK,KAAK;;CACtH,MAAM,8BAA6G;AAAE,SAAOC,4BAAkC,KAAK,KAAK;;CACxK,MAAM,uBAAuB,QAA+F;AAAE,SAAOC,uBAA6B,KAAK,MAAM,OAAO;;CACpL,MAAM,kBAAkB,SAAgG;AAAE,SAAOC,kBAAwB,KAAK,MAAM,QAAQ;;CAC5K,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,uBAAuB,aAAgD;AAAE,SAAOC,uBAA6B,KAAK,MAAM,YAAY;;CAC1I,MAAM,oBAAwE;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACzH,MAAM,4BAA4B,QAAoE;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CACnK,MAAM,uBAAuB,SAAqE;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE3J,MAAM,kBAAkB,MAAuC;AAAE,SAAOC,kBAAwB,KAAK,MAAM,KAAK;;CAChH,MAAM,eAAe,WAAmD;AAAE,SAAOC,eAAqB,KAAK,MAAM,UAAU;;CAC3H,MAAM,aAAwC;AAAE,SAAOC,WAAiB,KAAK,KAAK;;CAClF,MAAM,qBAAqB,QAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO;;CACpH,MAAM,gBAAgB,SAAoC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAC5G,MAAM,qBAAwC;AAAE,SAAOC,mBAAyB,KAAK,KAAK;;CAC1F,MAAM,6BAA6B,QAAmC;AAAE,SAAOC,6BAAmC,KAAK,MAAM,OAAO;;CACpI,MAAM,wBAAwB,SAAoC;AAAE,SAAOC,wBAA8B,KAAK,MAAM,QAAQ;;CAE5H,MAAM,gBAAgB,SAA0C;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAClH,MAAM,iBAAiB,SAA0C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,QAAQ;;CACpH,MAAM,iBAAiB,MAAiC;AAAE,SAAOC,iBAAuB,KAAK,MAAM,KAAK;;CACxG,MAAM,qBAAqB,OAAe,OAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO,MAAM;;CAGzI,MAAM,wBAAwB,WAAmB,OAAkF;AAKjI,UAJU,MAAM,KAAK,KAAK,MACxB,sIACA,CAAC,WAAW,MAAM,CACnB,EACQ;;CAGX,MAAM,iBAAiB,WAAmB,MAAc,QAAQ,IAAgE;AAK9H,UAJU,MAAM,KAAK,KAAK,MACxB,wIACA;GAAC;GAAW;GAAM;GAAM,CACzB,EACQ;;CAGX,MAAM,mBAAmB,WAAmB,OAAe,OAA+D;AAKxH,UAJU,MAAM,KAAK,KAAK,MACxB,+GACA;GAAC;GAAW,IAAI,MAAM;GAAI;GAAM,CACjC,EACQ"}
|
|
1
|
+
{"version":3,"file":"postgres-BGERehmX.mjs","names":["vault.upsertVaultFile","vault.deleteVaultFile","vault.getVaultFile","vault.getVaultFileByInode","vault.getAllVaultFiles","vault.getRecentVaultFiles","vault.countVaultFiles","vault.countVaultFilesWithPrefix","vault.countVaultFilesAfter","vault.getVaultFilesByPaths","vault.getVaultFilesByPathsAfter","vault.getAllVaultFilePaths","vault.getVaultFilePathsWithPrefix","vault.getVaultFilePathsAfter","vault.upsertVaultAliases","vault.deleteVaultAliases","vault.getVaultAlias","vault.replaceLinksForSources","vault.getLinksFromSource","vault.getLinksToTarget","vault.getVaultLinkGraph","vault.getDeadLinks","vault.getDeadLinksWithLineNumbers","vault.getDeadLinksWithPrefix","vault.getDeadLinksAfter","vault.countVaultLinksWithPrefix","vault.countVaultLinksAfter","vault.getVaultLinksFromPaths","vault.getVaultLinkEdges","vault.getVaultLinkEdgesWithPrefix","vault.getVaultLinkEdgesAfter","vault.upsertVaultHealth","vault.getVaultHealth","vault.getOrphans","vault.getOrphansWithPrefix","vault.getOrphansAfter","vault.getLowConnectivity","vault.getLowConnectivityWithPrefix","vault.getLowConnectivityAfter","vault.upsertNameIndex","vault.replaceNameIndex","vault.resolveVaultName","vault.searchVaultNameIndex"],"sources":["../src/storage/postgres/helpers.ts","../src/storage/postgres/search.ts","../src/storage/postgres/vault.ts","../src/storage/postgres/backend.ts"],"sourcesContent":["/**\n * Internal helper utilities for the Postgres storage backend.\n */\n\nimport { STOP_WORDS } from \"../../utils/stop-words.js\";\n\n/**\n * Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].\n */\nexport function bufferToVector(buf: Buffer): number[] {\n const floats: number[] = [];\n for (let i = 0; i < buf.length; i += 4) {\n floats.push(buf.readFloatLE(i));\n }\n return floats;\n}\n\n/**\n * Convert a free-text query to a Postgres tsquery string.\n *\n * Uses OR (|) semantics so that a chunk matching ANY query term is returned,\n * ranked by ts_rank (which scores higher when more terms match). AND (&)\n * semantics are too strict for multi-word queries because all terms rarely\n * co-occur in a single chunk.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → \"synchrotech | interview | follow | gilles\"\n */\nexport function buildPgTsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Sanitize: strip tsquery special characters to prevent syntax errors\n .map((t) => t.replace(/'/g, \"''\").replace(/[&|!():]/g, \"\"))\n .filter(Boolean);\n\n if (tokens.length === 0) {\n const raw = query.replace(/[^a-z0-9]/gi, \" \").trim().split(/\\s+/).filter(Boolean).join(\" | \");\n return raw || \"\";\n }\n\n return tokens.join(\" | \");\n}\n","/**\n * Keyword and semantic search implementations for the Postgres backend.\n * Functions take a `pool` parameter so they can be called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport { buildPgTsQuery } from \"./helpers.js\";\n\n/**\n * Full-text keyword search using Postgres tsvector/tsquery with 'simple' dictionary.\n */\nexport async function searchKeyword(\n pool: Pool,\n query: string,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const tsQuery = buildPgTsQuery(query);\n if (!tsQuery) return [];\n\n const conditions: string[] = [\"fts_vector @@ to_tsquery('simple', $1)\"];\n const params: (string | number)[] = [tsQuery];\n let paramIdx = 2;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY rank_score DESC\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n rank_score: number;\n }>(sql, params);\n\n return result.rows.map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.rank_score,\n tier: row.tier,\n source: row.source,\n }));\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\\n`);\n return [];\n }\n}\n\n/**\n * Semantic vector similarity search using pgvector cosine distance (<=>).\n */\nexport async function searchSemantic(\n pool: Pool,\n queryEmbedding: Float32Array,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n let paramIdx = 1;\n\n const vecStr = \"[\" + Array.from(queryEmbedding).join(\",\") + \"]\";\n params.push(vecStr);\n const vecParam = `$${paramIdx++}`;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n // <=> is cosine distance; 1 - distance = cosine similarity\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n 1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY embedding <=> ${vecParam}::vector\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n cosine_similarity: number;\n }>(sql, params);\n\n const minScore = opts?.minScore ?? -Infinity;\n\n return result.rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.cosine_similarity,\n tier: row.tier,\n source: row.source,\n }))\n .filter((r) => r.score >= minScore);\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\\n`);\n return [];\n }\n}\n","/**\n * Vault storage operations for the Postgres backend.\n * All functions take a `pool` parameter — called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type {\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\n\n// ---------------------------------------------------------------------------\n// Vault files\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultFile(pool: Pool, file: VaultFileRow): Promise<void> {\n await pool.query(\n `INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inode = EXCLUDED.inode, device = EXCLUDED.device,\n hash = EXCLUDED.hash, title = EXCLUDED.title,\n indexed_at = EXCLUDED.indexed_at`,\n [file.vaultPath, file.inode, file.device, file.hash, file.title, file.indexedAt]\n );\n}\n\nexport async function deleteVaultFile(pool: Pool, vaultPath: string): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_links WHERE source_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_health WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_name_index WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_aliases WHERE vault_path = $1 OR canonical_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_files WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\ntype VaultFileDbRow = { vault_path: string; inode: string; device: string; hash: string; title: string | null; indexed_at: string };\n\nfunction mapVaultFileRow(row: VaultFileDbRow): VaultFileRow {\n return {\n vaultPath: row.vault_path,\n inode: Number(row.inode),\n device: Number(row.device),\n hash: row.hash,\n title: row.title,\n indexedAt: Number(row.indexed_at),\n };\n}\n\nexport async function getVaultFile(pool: Pool, vaultPath: string): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getVaultFileByInode(pool: Pool, inode: number, device: number): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE inode = $1 AND device = $2 LIMIT 1\",\n [inode, device]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getAllVaultFiles(pool: Pool): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files\");\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getRecentVaultFiles(pool: Pool, sinceMs: number): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function countVaultFiles(pool: Pool): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*)::text AS n FROM vault_files\");\n return parseInt(r.rows[0]?.n ?? \"0\", 10);\n}\n\nexport async function countVaultFilesWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE vault_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultFilesAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE indexed_at > $1\", [sinceMs]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultFilesByPaths(pool: Pool, paths: string[]): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders})`,\n paths\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getVaultFilesByPathsAfter(pool: Pool, paths: string[], sinceMs: number): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders}) AND indexed_at >= $${paths.length + 1} ORDER BY indexed_at ASC`,\n [...paths, sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getAllVaultFilePaths(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\"SELECT vault_path FROM vault_files\");\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault aliases\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultAliases(pool: Pool, aliases: VaultAliasRow[]): Promise<void> {\n if (aliases.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const a of aliases) {\n await client.query(\n `INSERT INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (vault_path) DO UPDATE SET\n canonical_path = EXCLUDED.canonical_path,\n inode = EXCLUDED.inode, device = EXCLUDED.device`,\n [a.vaultPath, a.canonicalPath, a.inode, a.device]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function deleteVaultAliases(pool: Pool, canonicalPath: string): Promise<void> {\n await pool.query(\"DELETE FROM vault_aliases WHERE canonical_path = $1\", [canonicalPath]);\n}\n\nexport async function getVaultAlias(pool: Pool, vaultPath: string): Promise<{ canonicalPath: string } | null> {\n const r = await pool.query<{ canonical_path: string }>(\n \"SELECT canonical_path FROM vault_aliases WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length > 0 ? { canonicalPath: r.rows[0].canonical_path } : null;\n}\n\n// ---------------------------------------------------------------------------\n// Vault links\n// ---------------------------------------------------------------------------\n\ntype VaultLinkDbRow = { source_path: string; target_raw: string; target_path: string | null; link_type: string; line_number: number; confidence?: string };\n\nfunction mapVaultLinkRow(row: VaultLinkDbRow): VaultLinkRow {\n return {\n sourcePath: row.source_path,\n targetRaw: row.target_raw,\n targetPath: row.target_path,\n linkType: row.link_type,\n lineNumber: row.line_number,\n confidence: (row.confidence as VaultLinkRow[\"confidence\"]) ?? \"EXTRACTED\",\n };\n}\n\nexport async function replaceLinksForSources(pool: Pool, sourcePaths: string[], links: VaultLinkRow[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n if (sourcePaths.length > 0) {\n await client.query(\n \"DELETE FROM vault_links WHERE source_path = ANY($1::text[])\",\n [sourcePaths]\n );\n }\n for (let i = 0; i < links.length; i += 500) {\n const batch = links.slice(i, i + 500);\n const values: string[] = [];\n const params: (string | number | null)[] = [];\n let idx = 1;\n for (const l of batch) {\n values.push(`($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++})`);\n params.push(l.sourcePath, l.targetRaw, l.targetPath, l.linkType, l.lineNumber, l.confidence ?? \"EXTRACTED\");\n }\n await client.query(\n `INSERT INTO vault_links (source_path, target_raw, target_path, link_type, line_number, confidence)\n VALUES ${values.join(\", \")}\n ON CONFLICT (source_path, target_raw, line_number) DO UPDATE SET\n target_path = EXCLUDED.target_path, link_type = EXCLUDED.link_type,\n confidence = EXCLUDED.confidence`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getLinksFromSource(pool: Pool, sourcePath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path = $1\",\n [sourcePath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getLinksToTarget(pool: Pool, targetPath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE target_path = $1\",\n [targetPath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkGraph(pool: Pool): Promise<Array<{ source_path: string; target_path: string }>> {\n const r = await pool.query<{ source_path: string; target_path: string }>(\n \"SELECT source_path, target_path FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getDeadLinks(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string }>> {\n const r = await pool.query<{ source_path: string; target_raw: string }>(\n \"SELECT source_path, target_raw FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw }));\n}\n\nexport async function getDeadLinksWithLineNumbers(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksWithPrefix(pool: Pool, prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksAfter(pool: Pool, sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function countVaultLinksWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_links WHERE source_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultLinksAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\n \"SELECT COUNT(*) AS n FROM vault_links WHERE source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultLinksFromPaths(pool: Pool, sourcePaths: string[]): Promise<VaultLinkRow[]> {\n if (sourcePaths.length === 0) return [];\n const placeholders = sourcePaths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultLinkDbRow>(\n `SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path IN (${placeholders}) AND target_path IS NOT NULL`,\n sourcePaths\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkEdges(pool: Pool): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesWithPrefix(pool: Pool, prefix: string): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesAfter(pool: Pool, sinceMs: number): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows;\n}\n\n// ---------------------------------------------------------------------------\n// Vault health\n// ---------------------------------------------------------------------------\n\ntype VaultHealthDbRow = { vault_path: string; inbound_count: number; outbound_count: number; dead_link_count: number; is_orphan: number; computed_at: string };\n\nfunction mapVaultHealthRow(row: VaultHealthDbRow): VaultHealthRow {\n return {\n vaultPath: row.vault_path,\n inboundCount: row.inbound_count,\n outboundCount: row.outbound_count,\n deadLinkCount: row.dead_link_count,\n isOrphan: row.is_orphan === 1,\n computedAt: Number(row.computed_at),\n };\n}\n\nexport async function upsertVaultHealth(pool: Pool, rows: VaultHealthRow[]): Promise<void> {\n if (rows.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const h of rows) {\n await client.query(\n `INSERT INTO vault_health (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inbound_count = EXCLUDED.inbound_count,\n outbound_count = EXCLUDED.outbound_count,\n dead_link_count = EXCLUDED.dead_link_count,\n is_orphan = EXCLUDED.is_orphan,\n computed_at = EXCLUDED.computed_at`,\n [h.vaultPath, h.inboundCount, h.outboundCount, h.deadLinkCount, h.isOrphan ? 1 : 0, h.computedAt]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getVaultHealth(pool: Pool, vaultPath: string): Promise<VaultHealthRow | null> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultHealthRow(r.rows[0]);\n}\n\nexport async function getOrphans(pool: Pool): Promise<VaultHealthRow[]> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE is_orphan = 1\"\n );\n return r.rows.map(row => ({ ...mapVaultHealthRow(row), isOrphan: true }));\n}\n\nexport async function getOrphansWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE is_orphan = 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getOrphansAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.is_orphan = 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivity(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1\"\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.inbound_count + vh.outbound_count <= 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault name index\n// ---------------------------------------------------------------------------\n\nexport async function upsertNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n if (entries.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const e of entries) {\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path)\n VALUES ($1, $2) ON CONFLICT (name, vault_path) DO NOTHING`,\n [e.name, e.vaultPath]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e_) {\n await client.query(\"ROLLBACK\");\n throw e_;\n } finally {\n client.release();\n }\n}\n\nexport async function replaceNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_name_index\");\n for (let i = 0; i < entries.length; i += 500) {\n const batch = entries.slice(i, i + 500);\n const values: string[] = [];\n const params: string[] = [];\n let idx = 1;\n for (const e of batch) {\n values.push(`($${idx++}, $${idx++})`);\n params.push(e.name, e.vaultPath);\n }\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path) VALUES ${values.join(\", \")}`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function resolveVaultName(pool: Pool, name: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_name_index WHERE name = $1\",\n [name]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function searchVaultNameIndex(pool: Pool, query: string, limit = 100): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT DISTINCT vault_path FROM vault_name_index WHERE lower(name) LIKE lower($1) LIMIT $2\",\n [`%${query}%`, limit]\n );\n return r.rows.map(row => row.vault_path);\n}\n","/**\n * PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.\n *\n * Vector similarity: pgvector's <=> cosine distance operator\n * Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)\n * Connection pooling: node-postgres Pool\n *\n * Schema is auto-initialized on first connection if tables don't exist.\n * Per-user database isolation: each macOS user gets their own database (pai_<username>).\n */\n\nimport pg from \"pg\";\nimport type { Pool, PoolClient } from \"pg\";\nimport { readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport type {\n StorageBackend, ChunkRow, FileRow, FederationStats,\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport type { PostgresConfig } from \"./config.js\";\nimport { bufferToVector } from \"./helpers.js\";\nimport { searchKeyword, searchSemantic } from \"./search.js\";\nimport * as vault from \"./vault.js\";\n\nconst { Pool: PgPool } = pg;\n\nexport class PostgresBackend implements StorageBackend {\n readonly backendType = \"postgres\" as const;\n\n private pool: Pool;\n\n /**\n * Ensure the per-user database exists and has the required schema.\n * Connects to the default 'postgres' database to CREATE DATABASE if needed,\n * then connects to the target database to apply init.sql schema.\n * Safe to call multiple times (fully idempotent).\n */\n static async ensureDatabase(config: PostgresConfig): Promise<void> {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n const url = new URL(connStr);\n const targetDb = url.pathname.slice(1);\n\n const adminUrl = new URL(connStr);\n adminUrl.pathname = \"/postgres\";\n const adminPool = new PgPool({\n connectionString: adminUrl.toString(),\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const check = await adminPool.query(\n \"SELECT 1 FROM pg_database WHERE datname = $1\",\n [targetDb]\n );\n if (check.rowCount === 0) {\n await adminPool.query(`CREATE DATABASE \"${targetDb}\"`);\n process.stderr.write(`[pai-postgres] Created database: ${targetDb}\\n`);\n }\n } finally {\n await adminPool.end();\n }\n\n const targetPool = new PgPool({\n connectionString: connStr,\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const tableCheck = await targetPool.query(\n \"SELECT 1 FROM information_schema.tables WHERE table_name = 'pai_chunks'\"\n );\n if (tableCheck.rowCount === 0) {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const initSqlPath = join(__dirname, \"../../docker/init.sql\");\n let initSql: string;\n try {\n initSql = readFileSync(initSqlPath, \"utf-8\");\n } catch {\n const altPath = join(__dirname, \"../docker/init.sql\");\n initSql = readFileSync(altPath, \"utf-8\");\n }\n await targetPool.query(initSql);\n process.stderr.write(`[pai-postgres] Applied schema to database: ${targetDb}\\n`);\n }\n\n // Run incremental migrations for existing databases\n await PostgresBackend.runMigrations(targetPool);\n } finally {\n await targetPool.end();\n }\n }\n\n /**\n * Run incremental migrations for existing databases.\n * Each migration is idempotent — safe to run on databases that already have the change.\n */\n private static async runMigrations(pool: Pool): Promise<void> {\n // Migration: add confidence column to vault_links if it does not exist\n const colCheck = await pool.query(\n `SELECT 1 FROM information_schema.columns\n WHERE table_name = 'vault_links' AND column_name = 'confidence'`\n );\n if (colCheck.rowCount === 0) {\n await pool.query(\n \"ALTER TABLE vault_links ADD COLUMN confidence TEXT NOT NULL DEFAULT 'EXTRACTED'\"\n );\n process.stderr.write(\"[pai-postgres] Migration: added confidence column to vault_links\\n\");\n }\n\n // Migration: create kg_triples table if it does not exist\n const kgCheck = await pool.query(\n `SELECT 1 FROM information_schema.tables WHERE table_name = 'kg_triples'`\n );\n if (kgCheck.rowCount === 0) {\n await pool.query(`\n CREATE TABLE kg_triples (\n id SERIAL PRIMARY KEY,\n subject TEXT NOT NULL,\n predicate TEXT NOT NULL,\n object TEXT NOT NULL,\n project_id INTEGER,\n source_session TEXT,\n valid_from TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n valid_to TIMESTAMP,\n confidence TEXT DEFAULT 'EXTRACTED',\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n )\n `);\n await pool.query(`CREATE INDEX idx_kg_subject ON kg_triples(subject)`);\n await pool.query(`CREATE INDEX idx_kg_predicate ON kg_triples(predicate)`);\n await pool.query(`CREATE INDEX idx_kg_object ON kg_triples(object)`);\n await pool.query(`CREATE INDEX idx_kg_valid ON kg_triples(valid_from, valid_to)`);\n process.stderr.write(\"[pai-postgres] Migration: created kg_triples table\\n\");\n }\n }\n\n constructor(config: PostgresConfig) {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n\n this.pool = new PgPool({\n connectionString: connStr,\n max: config.maxConnections ?? 5,\n connectionTimeoutMillis: config.connectionTimeoutMs ?? 5000,\n idleTimeoutMillis: 30_000,\n });\n\n this.pool.on(\"error\", (err) => {\n process.stderr.write(`[pai-postgres] Pool error: ${err.message}\\n`);\n });\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Expose the underlying pg.Pool for callers that need direct query access\n * (e.g. the daemon's observation IPC methods).\n */\n getPool(): Pool {\n return this.pool;\n }\n\n async getStats(): Promise<FederationStats> {\n const client = await this.pool.connect();\n try {\n const filesResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_files\"\n );\n const chunksResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_chunks\"\n );\n return {\n files: parseInt(filesResult.rows[0]?.n ?? \"0\", 10),\n chunks: parseInt(chunksResult.rows[0]?.n ?? \"0\", 10),\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * Test the connection by running a trivial query.\n * Returns null on success, error message on failure.\n */\n async testConnection(): Promise<string | null> {\n let client: PoolClient | null = null;\n try {\n client = await this.pool.connect();\n await client.query(\"SELECT 1\");\n return null;\n } catch (e) {\n return e instanceof Error ? e.message : String(e);\n } finally {\n client?.release();\n }\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const result = await this.pool.query<{ hash: string }>(\n \"SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows[0]?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n await this.pool.query(\n `INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (project_id, path) DO UPDATE SET\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n hash = EXCLUDED.hash,\n mtime = EXCLUDED.mtime,\n size = EXCLUDED.size`,\n [file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size]\n );\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const result = await this.pool.query<{ id: string }>(\n \"SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n await this.pool.query(\n \"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n for (const c of chunks) {\n const safeText = c.text.replace(/\\0/g, \"\");\n\n await client.query(\n `INSERT INTO pai_chunks\n (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n to_tsvector('simple', $9))\n ON CONFLICT (id) DO UPDATE SET\n project_id = EXCLUDED.project_id,\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n path = EXCLUDED.path,\n start_line = EXCLUDED.start_line,\n end_line = EXCLUDED.end_line,\n hash = EXCLUDED.hash,\n text = EXCLUDED.text,\n updated_at = EXCLUDED.updated_at,\n fts_vector = EXCLUDED.fts_vector`,\n [\n c.id, c.projectId, c.source, c.tier, c.path,\n c.startLine, c.endLine, c.hash, safeText, c.updatedAt,\n ]\n );\n }\n\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const result = await this.pool.query<{ path: string }>(\n \"SELECT DISTINCT path FROM pai_chunks WHERE project_id = $1\",\n [projectId]\n );\n return result.rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const path of paths) {\n await client.query(\"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\", [projectId, path]);\n await client.query(\"DELETE FROM pai_files WHERE project_id = $1 AND path = $2\", [projectId, path]);\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string; project_id: number; path: string }>> {\n if (projectId !== undefined) {\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id\",\n [projectId]\n );\n return result.rows;\n }\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL ORDER BY id\"\n );\n return result.rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n const vec = bufferToVector(embedding);\n const vecStr = \"[\" + vec.join(\",\") + \"]\";\n await this.pool.query(\n \"UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2\",\n [vecStr, chunkId]\n );\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchKeyword(this.pool, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchSemantic(this.pool, queryEmbedding, opts);\n }\n\n // -------------------------------------------------------------------------\n // Vault operations — delegated to vault.ts\n // -------------------------------------------------------------------------\n\n async upsertVaultFile(file: VaultFileRow): Promise<void> { return vault.upsertVaultFile(this.pool, file); }\n async deleteVaultFile(vaultPath: string): Promise<void> { return vault.deleteVaultFile(this.pool, vaultPath); }\n async getVaultFile(vaultPath: string): Promise<VaultFileRow | null> { return vault.getVaultFile(this.pool, vaultPath); }\n async getVaultFileByInode(inode: number, device: number): Promise<VaultFileRow | null> { return vault.getVaultFileByInode(this.pool, inode, device); }\n async getAllVaultFiles(): Promise<VaultFileRow[]> { return vault.getAllVaultFiles(this.pool); }\n async getRecentVaultFiles(sinceMs: number): Promise<VaultFileRow[]> { return vault.getRecentVaultFiles(this.pool, sinceMs); }\n async countVaultFiles(): Promise<number> { return vault.countVaultFiles(this.pool); }\n async countVaultFilesWithPrefix(prefix: string): Promise<number> { return vault.countVaultFilesWithPrefix(this.pool, prefix); }\n async countVaultFilesAfter(sinceMs: number): Promise<number> { return vault.countVaultFilesAfter(this.pool, sinceMs); }\n async getVaultFilesByPaths(paths: string[]): Promise<VaultFileRow[]> { return vault.getVaultFilesByPaths(this.pool, paths); }\n async getVaultFilesByPathsAfter(paths: string[], sinceMs: number): Promise<VaultFileRow[]> { return vault.getVaultFilesByPathsAfter(this.pool, paths, sinceMs); }\n async getAllVaultFilePaths(): Promise<string[]> { return vault.getAllVaultFilePaths(this.pool); }\n async getVaultFilePathsWithPrefix(prefix: string): Promise<string[]> { return vault.getVaultFilePathsWithPrefix(this.pool, prefix); }\n async getVaultFilePathsAfter(sinceMs: number): Promise<string[]> { return vault.getVaultFilePathsAfter(this.pool, sinceMs); }\n\n async upsertVaultAliases(aliases: VaultAliasRow[]): Promise<void> { return vault.upsertVaultAliases(this.pool, aliases); }\n async deleteVaultAliases(canonicalPath: string): Promise<void> { return vault.deleteVaultAliases(this.pool, canonicalPath); }\n async getVaultAlias(vaultPath: string): Promise<{ canonicalPath: string } | null> { return vault.getVaultAlias(this.pool, vaultPath); }\n\n async replaceLinksForSources(sourcePaths: string[], links: VaultLinkRow[]): Promise<void> { return vault.replaceLinksForSources(this.pool, sourcePaths, links); }\n async getLinksFromSource(sourcePath: string): Promise<VaultLinkRow[]> { return vault.getLinksFromSource(this.pool, sourcePath); }\n async getLinksToTarget(targetPath: string): Promise<VaultLinkRow[]> { return vault.getLinksToTarget(this.pool, targetPath); }\n async getVaultLinkGraph(): Promise<Array<{ source_path: string; target_path: string }>> { return vault.getVaultLinkGraph(this.pool); }\n async getDeadLinks(): Promise<Array<{ sourcePath: string; targetRaw: string }>> { return vault.getDeadLinks(this.pool); }\n async getDeadLinksWithLineNumbers(): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithLineNumbers(this.pool); }\n async getDeadLinksWithPrefix(prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithPrefix(this.pool, prefix); }\n async getDeadLinksAfter(sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksAfter(this.pool, sinceMs); }\n async countVaultLinksWithPrefix(prefix: string): Promise<number> { return vault.countVaultLinksWithPrefix(this.pool, prefix); }\n async countVaultLinksAfter(sinceMs: number): Promise<number> { return vault.countVaultLinksAfter(this.pool, sinceMs); }\n async getVaultLinksFromPaths(sourcePaths: string[]): Promise<VaultLinkRow[]> { return vault.getVaultLinksFromPaths(this.pool, sourcePaths); }\n async getVaultLinkEdges(): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdges(this.pool); }\n async getVaultLinkEdgesWithPrefix(prefix: string): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesWithPrefix(this.pool, prefix); }\n async getVaultLinkEdgesAfter(sinceMs: number): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesAfter(this.pool, sinceMs); }\n\n async upsertVaultHealth(rows: VaultHealthRow[]): Promise<void> { return vault.upsertVaultHealth(this.pool, rows); }\n async getVaultHealth(vaultPath: string): Promise<VaultHealthRow | null> { return vault.getVaultHealth(this.pool, vaultPath); }\n async getOrphans(): Promise<VaultHealthRow[]> { return vault.getOrphans(this.pool); }\n async getOrphansWithPrefix(prefix: string): Promise<string[]> { return vault.getOrphansWithPrefix(this.pool, prefix); }\n async getOrphansAfter(sinceMs: number): Promise<string[]> { return vault.getOrphansAfter(this.pool, sinceMs); }\n async getLowConnectivity(): Promise<string[]> { return vault.getLowConnectivity(this.pool); }\n async getLowConnectivityWithPrefix(prefix: string): Promise<string[]> { return vault.getLowConnectivityWithPrefix(this.pool, prefix); }\n async getLowConnectivityAfter(sinceMs: number): Promise<string[]> { return vault.getLowConnectivityAfter(this.pool, sinceMs); }\n\n async upsertNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.upsertNameIndex(this.pool, entries); }\n async replaceNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.replaceNameIndex(this.pool, entries); }\n async resolveVaultName(name: string): Promise<string[]> { return vault.resolveVaultName(this.pool, name); }\n async searchVaultNameIndex(query: string, limit?: number): Promise<string[]> { return vault.searchVaultNameIndex(this.pool, query, limit); }\n\n // Legacy memory_chunks methods (used by graph and zettelkasten modules)\n async getChunksWithEmbeddings(projectId: number, limit: number): Promise<Array<{ path: string; text: string; embedding: Buffer }>> {\n const r = await this.pool.query<{ path: string; text: string; embedding: Buffer }>(\n `SELECT path, text, embedding FROM memory_chunks WHERE project_id = $1 AND embedding IS NOT NULL ORDER BY path, start_line LIMIT $2`,\n [projectId, limit]\n );\n return r.rows;\n }\n\n async getChunksForPath(projectId: number, path: string, limit = 20): Promise<Array<{ text: string; embedding: Buffer | null }>> {\n const r = await this.pool.query<{ text: string; embedding: Buffer | null }>(\n `SELECT text, embedding FROM memory_chunks WHERE project_id = $1 AND path = $2 AND embedding IS NOT NULL ORDER BY start_line LIMIT $3`,\n [projectId, path, limit]\n );\n return r.rows;\n }\n\n async searchChunksByText(projectId: number, query: string, limit: number): Promise<Array<{ path: string; text: string }>> {\n const r = await this.pool.query<{ path: string; text: string }>(\n `SELECT DISTINCT path, text FROM memory_chunks WHERE project_id = $1 AND lower(text) LIKE lower($2) LIMIT $3`,\n [projectId, `%${query}%`, limit]\n );\n return r.rows;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AASA,SAAgB,eAAe,KAAuB;CACpD,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,EACnC,QAAO,KAAK,IAAI,YAAY,EAAE,CAAC;AAEjC,QAAO;;;;;;;;;;;;;AAcT,SAAgB,eAAe,OAAuB;CACpD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,EAAE,QAAQ,MAAM,KAAK,CAAC,QAAQ,aAAa,GAAG,CAAC,CAC1D,OAAO,QAAQ;AAElB,KAAI,OAAO,WAAW,EAEpB,QADY,MAAM,QAAQ,eAAe,IAAI,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,IAC/E;AAGhB,QAAO,OAAO,KAAK,MAAM;;;;;;;;AChC3B,eAAsB,cACpB,MACA,OACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,UAAU,eAAe,MAAM;AACrC,KAAI,CAAC,QAAS,QAAO,EAAE;CAEvB,MAAM,aAAuB,CAAC,yCAAyC;CACvE,MAAM,SAA8B,CAAC,QAAQ;CAC7C,IAAI,WAAW;AAEf,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAEvB,MAAM,MAAM;;;;;;;;;;;YAWF,WAAW,KAAK,QAAQ,CAAC;;YAEzB,WAAW;;AAGrB,KAAI;AAYF,UAXe,MAAM,KAAK,MASvB,KAAK,OAAO,EAED,KAAK,KAAK,SAAS;GAC/B,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE;UACI,GAAG;AACV,UAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;AAClE,SAAO,EAAE;;;;;;AAOb,eAAsB,eACpB,MACA,gBACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;CACtC,IAAI,WAAW;CAEf,MAAM,SAAS,MAAM,MAAM,KAAK,eAAe,CAAC,KAAK,IAAI,GAAG;AAC5D,QAAO,KAAK,OAAO;CACnB,MAAM,WAAW,IAAI;AAErB,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAGvB,MAAM,MAAM;;;;;;;;;2BASa,SAAS;;YAExB,WAAW,KAAK,QAAQ,CAAC;6BACR,SAAS;YAC1B,WAAW;;AAGrB,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MASvB,KAAK,OAAO;EAEf,MAAM,WAAW,MAAM,YAAY;AAEnC,SAAO,OAAO,KACX,KAAK,SAAS;GACb,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;UAC9B,GAAG;AACV,UAAQ,OAAO,MAAM,wCAAwC,EAAE,IAAI;AACnE,SAAO,EAAE;;;;;;ACjKb,eAAsB,gBAAgB,MAAY,MAAmC;AACnF,OAAM,KAAK,MACT;;;;;0CAMA;EAAC,KAAK;EAAW,KAAK;EAAO,KAAK;EAAQ,KAAK;EAAM,KAAK;EAAO,KAAK;EAAU,CACjF;;AAGH,eAAsB,gBAAgB,MAAY,WAAkC;CAClF,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,sDAAsD,CAAC,UAAU,CAAC;AACrF,QAAM,OAAO,MAAM,0EAA0E,CAAC,UAAU,CAAC;AACzG,QAAM,OAAO,MAAM,iDAAiD,CAAC,UAAU,CAAC;AAChF,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAMpB,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,WAAW,IAAI;EACf,OAAO,OAAO,IAAI,MAAM;EACxB,QAAQ,OAAO,IAAI,OAAO;EAC1B,MAAM,IAAI;EACV,OAAO,IAAI;EACX,WAAW,OAAO,IAAI,WAAW;EAClC;;AAGH,eAAsB,aAAa,MAAY,WAAiD;CAC9F,MAAM,IAAI,MAAM,KAAK,MACnB,oGACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,oBAAoB,MAAY,OAAe,QAA8C;CACjH,MAAM,IAAI,MAAM,KAAK,MACnB,uHACA,CAAC,OAAO,OAAO,CAChB;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,iBAAiB,MAAqC;AAE1E,SADU,MAAM,KAAK,MAAsB,6EAA6E,EAC/G,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,oBAAoB,MAAY,SAA0C;AAK9F,SAJU,MAAM,KAAK,MACnB,oGACA,CAAC,QAAQ,CACV,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,gBAAgB,MAA6B;CACjE,MAAM,IAAI,MAAM,KAAK,MAAqB,8CAA8C;AACxF,QAAO,SAAS,EAAE,KAAK,IAAI,KAAK,KAAK,GAAG;;AAG1C,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,kEAAkE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC3H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MAAqB,+DAA+D,CAAC,QAAQ,CAAC;AACnH,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,OAA0C;AAC/F,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,IAChH,MACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,0BAA0B,MAAY,OAAiB,SAA0C;AACrH,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,uBAAuB,MAAM,SAAS,EAAE,2BACxJ,CAAC,GAAG,OAAO,QAAQ,CACpB,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,qBAAqB,MAA+B;AAExE,SADU,MAAM,KAAK,MAA8B,qCAAqC,EAC/E,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,4BAA4B,MAAY,QAAmC;AAK/F,SAJU,MAAM,KAAK,MACnB,+DACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,uBAAuB,MAAY,SAAoC;AAK3F,SAJU,MAAM,KAAK,MACnB,4DACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,mBAAmB,MAAY,SAAyC;AAC5F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;;;;8DAKA;GAAC,EAAE;GAAW,EAAE;GAAe,EAAE;GAAO,EAAE;GAAO,CAClD;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,eAAsC;AACzF,OAAM,KAAK,MAAM,uDAAuD,CAAC,cAAc,CAAC;;AAG1F,eAAsB,cAAc,MAAY,WAA8D;CAC5G,MAAM,IAAI,MAAM,KAAK,MACnB,kEACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,SAAS,IAAI,EAAE,eAAe,EAAE,KAAK,GAAG,gBAAgB,GAAG;;AAS3E,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,YAAY,IAAI;EAChB,WAAW,IAAI;EACf,YAAY,IAAI;EAChB,UAAU,IAAI;EACd,YAAY,IAAI;EAChB,YAAa,IAAI,cAA6C;EAC/D;;AAGH,eAAsB,uBAAuB,MAAY,aAAuB,OAAsC;CACpH,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,MAAI,YAAY,SAAS,EACvB,OAAM,OAAO,MACX,+DACA,CAAC,YAAY,CACd;AAEH,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK;GAC1C,MAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,IAAI;GACrC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAqC,EAAE;GAC7C,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,GAAG;AACjF,WAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,cAAc,YAAY;;AAE7G,SAAM,OAAO,MACX;kBACU,OAAO,KAAK,KAAK,CAAC;;;8CAI5B,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,YAA6C;AAKhG,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,iBAAiB,MAAY,YAA6C;AAK9F,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAA0E;AAIhH,SAHU,MAAM,KAAK,MACnB,iFACD,EACQ;;AAGX,eAAsB,aAAa,MAAuE;AAIxG,SAHU,MAAM,KAAK,MACnB,4EACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,EAAE;;AAGxF,eAAsB,4BAA4B,MAA2F;AAI3I,SAHU,MAAM,KAAK,MACnB,yFACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,uBAAuB,MAAY,QAA+F;AAKtJ,SAJU,MAAM,KAAK,MACnB,kHACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,kBAAkB,MAAY,SAAgG;AAKlJ,SAJU,MAAM,KAAK,MACnB,wKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,mEAAmE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC5H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MACnB,yHACA,CAAC,QAAQ,CACV;AACD,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,uBAAuB,MAAY,aAAgD;AACvG,KAAI,YAAY,WAAW,EAAG,QAAO,EAAE;CACvC,MAAM,eAAe,YAAY,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKtE,SAJU,MAAM,KAAK,MACnB,0HAA0H,aAAa,gCACvI,YACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAAgE;AAItG,SAHU,MAAM,KAAK,MACnB,8GACD,EACQ;;AAGX,eAAsB,4BAA4B,MAAY,QAAoE;AAKhI,SAJU,MAAM,KAAK,MACnB,uIACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ;;AAGX,eAAsB,uBAAuB,MAAY,SAAqE;AAK5H,SAJU,MAAM,KAAK,MACnB,6LACA,CAAC,QAAQ,CACV,EACQ;;AASX,SAAS,kBAAkB,KAAuC;AAChE,QAAO;EACL,WAAW,IAAI;EACf,cAAc,IAAI;EAClB,eAAe,IAAI;EACnB,eAAe,IAAI;EACnB,UAAU,IAAI,cAAc;EAC5B,YAAY,OAAO,IAAI,YAAY;EACpC;;AAGH,eAAsB,kBAAkB,MAAY,MAAuC;AACzF,KAAI,KAAK,WAAW,EAAG;CACvB,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,KACd,OAAM,OAAO,MACX;;;;;;;gDAQA;GAAC,EAAE;GAAW,EAAE;GAAc,EAAE;GAAe,EAAE;GAAe,EAAE,WAAW,IAAI;GAAG,EAAE;GAAW,CAClG;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,eAAe,MAAY,WAAmD;CAClG,MAAM,IAAI,MAAM,KAAK,MACnB,qIACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,kBAAkB,EAAE,KAAK,GAAG;;AAGlE,eAAsB,WAAW,MAAuC;AAItE,SAHU,MAAM,KAAK,MACnB,kIACD,EACQ,KAAK,KAAI,SAAQ;EAAE,GAAG,kBAAkB,IAAI;EAAE,UAAU;EAAM,EAAE;;AAG3E,eAAsB,qBAAqB,MAAY,QAAmC;AAKxF,SAJU,MAAM,KAAK,MACnB,kFACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,gBAAgB,MAAY,SAAoC;AAKpF,SAJU,MAAM,KAAK,MACnB,gJACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,mBAAmB,MAA+B;AAItE,SAHU,MAAM,KAAK,MACnB,gFACD,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,6BAA6B,MAAY,QAAmC;AAKhG,SAJU,MAAM,KAAK,MACnB,wGACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,wBAAwB,MAAY,SAAoC;AAK5F,SAJU,MAAM,KAAK,MACnB,yKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,gBAAgB,MAAY,SAA0C;AAC1F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;qEAEA,CAAC,EAAE,MAAM,EAAE,UAAU,CACtB;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,IAAI;AACX,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,SAA0C;CAC3F,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,+BAA+B;AAClD,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,KAAK;GAC5C,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,IAAI;GACvC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAmB,EAAE;GAC3B,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,GAAG;AACrC,WAAO,KAAK,EAAE,MAAM,EAAE,UAAU;;AAElC,SAAM,OAAO,MACX,0DAA0D,OAAO,KAAK,KAAK,IAC3E,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,MAAiC;AAKlF,SAJU,MAAM,KAAK,MACnB,2DACA,CAAC,KAAK,CACP,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,qBAAqB,MAAY,OAAe,QAAQ,KAAwB;AAKpG,SAJU,MAAM,KAAK,MACnB,8FACA,CAAC,IAAI,MAAM,IAAI,MAAM,CACtB,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;;;;;;;;;;;;;;ACzd1C,MAAM,EAAE,MAAM,WAAW;AAEzB,IAAa,kBAAb,MAAa,gBAA0C;CACrD,AAAS,cAAc;CAEvB,AAAQ;;;;;;;CAQR,aAAa,eAAe,QAAuC;EACjE,MAAM,UACJ,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;EAE9I,MAAM,WADM,IAAI,IAAI,QAAQ,CACP,SAAS,MAAM,EAAE;EAEtC,MAAM,WAAW,IAAI,IAAI,QAAQ;AACjC,WAAS,WAAW;EACpB,MAAM,YAAY,IAAI,OAAO;GAC3B,kBAAkB,SAAS,UAAU;GACrC,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAKF,QAJc,MAAM,UAAU,MAC5B,gDACA,CAAC,SAAS,CACX,EACS,aAAa,GAAG;AACxB,UAAM,UAAU,MAAM,oBAAoB,SAAS,GAAG;AACtD,YAAQ,OAAO,MAAM,oCAAoC,SAAS,IAAI;;YAEhE;AACR,SAAM,UAAU,KAAK;;EAGvB,MAAM,aAAa,IAAI,OAAO;GAC5B,kBAAkB;GAClB,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAIF,QAHmB,MAAM,WAAW,MAClC,0EACD,EACc,aAAa,GAAG;IAC7B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;IACzD,MAAM,cAAc,KAAK,WAAW,wBAAwB;IAC5D,IAAI;AACJ,QAAI;AACF,eAAU,aAAa,aAAa,QAAQ;YACtC;AAEN,eAAU,aADM,KAAK,WAAW,qBAAqB,EACrB,QAAQ;;AAE1C,UAAM,WAAW,MAAM,QAAQ;AAC/B,YAAQ,OAAO,MAAM,8CAA8C,SAAS,IAAI;;AAIlF,SAAM,gBAAgB,cAAc,WAAW;YACvC;AACR,SAAM,WAAW,KAAK;;;;;;;CAQ1B,aAAqB,cAAc,MAA2B;AAM5D,OAJiB,MAAM,KAAK,MAC1B;wEAED,EACY,aAAa,GAAG;AAC3B,SAAM,KAAK,MACT,kFACD;AACD,WAAQ,OAAO,MAAM,qEAAqE;;AAO5F,OAHgB,MAAM,KAAK,MACzB,0EACD,EACW,aAAa,GAAG;AAC1B,SAAM,KAAK,MAAM;;;;;;;;;;;;;QAaf;AACF,SAAM,KAAK,MAAM,uDAAuD;AACxE,SAAM,KAAK,MAAM,yDAAyD;AAC1E,SAAM,KAAK,MAAM,sDAAsD;AACvE,SAAM,KAAK,MAAM,oEAAoE;AACrF,WAAQ,OAAO,MAAM,uDAAuD;;;CAIhF,YAAY,QAAwB;AAKlC,OAAK,OAAO,IAAI,OAAO;GACrB,kBAJA,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;GAI5I,KAAK,OAAO,kBAAkB;GAC9B,yBAAyB,OAAO,uBAAuB;GACvD,mBAAmB;GACpB,CAAC;AAEF,OAAK,KAAK,GAAG,UAAU,QAAQ;AAC7B,WAAQ,OAAO,MAAM,8BAA8B,IAAI,QAAQ,IAAI;IACnE;;CAOJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,UAAgB;AACd,SAAO,KAAK;;CAGd,MAAM,WAAqC;EACzC,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;GACF,MAAM,cAAc,MAAM,OAAO,MAC/B,4CACD;GACD,MAAM,eAAe,MAAM,OAAO,MAChC,6CACD;AACD,UAAO;IACL,OAAO,SAAS,YAAY,KAAK,IAAI,KAAK,KAAK,GAAG;IAClD,QAAQ,SAAS,aAAa,KAAK,IAAI,KAAK,KAAK,GAAG;IACrD;YACO;AACR,UAAO,SAAS;;;;;;;CAQpB,MAAM,iBAAyC;EAC7C,IAAI,SAA4B;AAChC,MAAI;AACF,YAAS,MAAM,KAAK,KAAK,SAAS;AAClC,SAAM,OAAO,MAAM,WAAW;AAC9B,UAAO;WACA,GAAG;AACV,UAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;YACzC;AACR,WAAQ,SAAS;;;CAQrB,MAAM,YAAY,WAAmB,MAA2C;AAK9E,UAJe,MAAM,KAAK,KAAK,MAC7B,kEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,IAAI;;CAGzB,MAAM,WAAW,MAA8B;AAC7C,QAAM,KAAK,KAAK,MACd;;;;;;;kCAQA;GAAC,KAAK;GAAW,KAAK;GAAM,KAAK;GAAQ,KAAK;GAAM,KAAK;GAAM,KAAK;GAAO,KAAK;GAAK,CACtF;;CAOH,MAAM,YAAY,WAAmB,MAAiC;AAKpE,UAJe,MAAM,KAAK,KAAK,MAC7B,iEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,KAAK,MAAM,EAAE,GAAG;;CAGrC,MAAM,oBAAoB,WAAmB,MAA6B;AACxE,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,WAAW,KAAK,CAClB;;CAGH,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAE3B,QAAK,MAAM,KAAK,QAAQ;IACtB,MAAM,WAAW,EAAE,KAAK,QAAQ,OAAO,GAAG;AAE1C,UAAM,OAAO,MACX;;;;;;;;;;;;;;;gDAgBA;KACE,EAAE;KAAI,EAAE;KAAW,EAAE;KAAQ,EAAE;KAAM,EAAE;KACvC,EAAE;KAAW,EAAE;KAAS,EAAE;KAAM;KAAU,EAAE;KAC7C,CACF;;AAGH,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAsC;AAKhE,UAJe,MAAM,KAAK,KAAK,MAC7B,8DACA,CAAC,UAAU,CACZ,EACa,KAAK,KAAK,MAAM,EAAE,KAAK;;CAGvC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAC3B,QAAK,MAAM,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,8DAA8D,CAAC,WAAW,KAAK,CAAC;AACnG,UAAM,OAAO,MAAM,6DAA6D,CAAC,WAAW,KAAK,CAAC;;AAEpG,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAoG;AAC9H,MAAI,cAAc,OAKhB,SAJe,MAAM,KAAK,KAAK,MAC7B,6GACA,CAAC,UAAU,CACZ,EACa;AAKhB,UAHe,MAAM,KAAK,KAAK,MAC7B,wFACD,EACa;;CAGhB,MAAM,gBAAgB,SAAiB,WAAkC;EAEvE,MAAM,SAAS,MADH,eAAe,UAAU,CACZ,KAAK,IAAI,GAAG;AACrC,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,QAAQ,QAAQ,CAClB;;CAOH,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,cAAc,KAAK,MAAM,OAAO,KAAK;;CAG9C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,eAAe,KAAK,MAAM,gBAAgB,KAAK;;CAOxD,MAAM,gBAAgB,MAAmC;AAAE,SAAOA,gBAAsB,KAAK,MAAM,KAAK;;CACxG,MAAM,gBAAgB,WAAkC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,UAAU;;CAC5G,MAAM,aAAa,WAAiD;AAAE,SAAOC,aAAmB,KAAK,MAAM,UAAU;;CACrH,MAAM,oBAAoB,OAAe,QAA8C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,OAAO,OAAO;;CACnJ,MAAM,mBAA4C;AAAE,SAAOC,iBAAuB,KAAK,KAAK;;CAC5F,MAAM,oBAAoB,SAA0C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,QAAQ;;CAC1H,MAAM,kBAAmC;AAAE,SAAOC,gBAAsB,KAAK,KAAK;;CAClF,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,qBAAqB,OAA0C;AAAE,SAAOC,qBAA2B,KAAK,MAAM,MAAM;;CAC1H,MAAM,0BAA0B,OAAiB,SAA0C;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO,QAAQ;;CAC9J,MAAM,uBAA0C;AAAE,SAAOC,qBAA2B,KAAK,KAAK;;CAC9F,MAAM,4BAA4B,QAAmC;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CAClI,MAAM,uBAAuB,SAAoC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE1H,MAAM,mBAAmB,SAAyC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,QAAQ;;CACvH,MAAM,mBAAmB,eAAsC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,cAAc;;CAC1H,MAAM,cAAc,WAA8D;AAAE,SAAOC,cAAoB,KAAK,MAAM,UAAU;;CAEpI,MAAM,uBAAuB,aAAuB,OAAsC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,aAAa,MAAM;;CAC9J,MAAM,mBAAmB,YAA6C;AAAE,SAAOC,mBAAyB,KAAK,MAAM,WAAW;;CAC9H,MAAM,iBAAiB,YAA6C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,WAAW;;CAC1H,MAAM,oBAAkF;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACnI,MAAM,eAA0E;AAAE,SAAOC,aAAmB,KAAK,KAAK;;CACtH,MAAM,8BAA6G;AAAE,SAAOC,4BAAkC,KAAK,KAAK;;CACxK,MAAM,uBAAuB,QAA+F;AAAE,SAAOC,uBAA6B,KAAK,MAAM,OAAO;;CACpL,MAAM,kBAAkB,SAAgG;AAAE,SAAOC,kBAAwB,KAAK,MAAM,QAAQ;;CAC5K,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,uBAAuB,aAAgD;AAAE,SAAOC,uBAA6B,KAAK,MAAM,YAAY;;CAC1I,MAAM,oBAAwE;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACzH,MAAM,4BAA4B,QAAoE;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CACnK,MAAM,uBAAuB,SAAqE;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE3J,MAAM,kBAAkB,MAAuC;AAAE,SAAOC,kBAAwB,KAAK,MAAM,KAAK;;CAChH,MAAM,eAAe,WAAmD;AAAE,SAAOC,eAAqB,KAAK,MAAM,UAAU;;CAC3H,MAAM,aAAwC;AAAE,SAAOC,WAAiB,KAAK,KAAK;;CAClF,MAAM,qBAAqB,QAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO;;CACpH,MAAM,gBAAgB,SAAoC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAC5G,MAAM,qBAAwC;AAAE,SAAOC,mBAAyB,KAAK,KAAK;;CAC1F,MAAM,6BAA6B,QAAmC;AAAE,SAAOC,6BAAmC,KAAK,MAAM,OAAO;;CACpI,MAAM,wBAAwB,SAAoC;AAAE,SAAOC,wBAA8B,KAAK,MAAM,QAAQ;;CAE5H,MAAM,gBAAgB,SAA0C;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAClH,MAAM,iBAAiB,SAA0C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,QAAQ;;CACpH,MAAM,iBAAiB,MAAiC;AAAE,SAAOC,iBAAuB,KAAK,MAAM,KAAK;;CACxG,MAAM,qBAAqB,OAAe,OAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO,MAAM;;CAGzI,MAAM,wBAAwB,WAAmB,OAAkF;AAKjI,UAJU,MAAM,KAAK,KAAK,MACxB,sIACA,CAAC,WAAW,MAAM,CACnB,EACQ;;CAGX,MAAM,iBAAiB,WAAmB,MAAc,QAAQ,IAAgE;AAK9H,UAJU,MAAM,KAAK,KAAK,MACxB,wIACA;GAAC;GAAW;GAAM;GAAM,CACzB,EACQ;;CAGX,MAAM,mBAAmB,WAAmB,OAAe,OAA+D;AAKxH,UAJU,MAAM,KAAK,KAAK,MACxB,+GACA;GAAC;GAAW,IAAI,MAAM;GAAI;GAAM,CACjC,EACQ"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"query-feedback-
|
|
1
|
+
{"version":3,"file":"query-feedback-CQSumXDy.mjs","names":[],"sources":["../src/zettelkasten/query-feedback.ts"],"sourcesContent":["/**\n * Query feedback loop — persist search queries and results as markdown files.\n *\n * When memory_search or zettel_converse returns results, the query + result\n * metadata is saved to ~/.config/pai/queries/ as a markdown file with YAML\n * frontmatter. The daemon indexer picks these up on the next cycle and indexes\n * them into federation.db, creating a self-reinforcing feedback loop: past\n * queries become searchable context for future queries.\n */\n\nimport { existsSync, mkdirSync, writeFileSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport { homedir } from \"node:os\";\n\nexport interface QueryRecord {\n /** The original query string. */\n query: string;\n /** Timestamp of the query. */\n timestamp: number;\n /** Tool that produced the result: 'memory_search' | 'zettel_converse'. */\n source: string;\n /** Slugs of the top result paths (for linking back). */\n sourceSlugs: string[];\n /** Preview of the answer/result (first 500 chars). */\n answerPreview: string;\n /** Number of results returned. */\n resultCount: number;\n}\n\nconst QUERIES_DIR = join(homedir(), \".config\", \"pai\", \"queries\");\n\n/**\n * Ensure the queries directory exists.\n */\nfunction ensureQueriesDir(): void {\n if (!existsSync(QUERIES_DIR)) {\n mkdirSync(QUERIES_DIR, { recursive: true });\n }\n}\n\n/**\n * Generate a filename-safe slug from a query string.\n */\nfunction querySlug(query: string, timestamp: number): string {\n const slug = query\n .toLowerCase()\n .replace(/[^a-z0-9\\s]/g, \"\")\n .replace(/\\s+/g, \"-\")\n .slice(0, 60);\n const ts = new Date(timestamp).toISOString().slice(0, 10);\n const shortHash = timestamp.toString(36).slice(-4);\n return `${ts}-${slug}-${shortHash}`;\n}\n\n/**\n * Save a query + result record as a markdown file with YAML frontmatter.\n *\n * The file is written to ~/.config/pai/queries/ and will be picked up by\n * the daemon indexer on the next cycle.\n */\nexport function saveQueryResult(record: QueryRecord): string | null {\n try {\n ensureQueriesDir();\n\n const filename = querySlug(record.query, record.timestamp) + \".md\";\n const filepath = join(QUERIES_DIR, filename);\n\n // Don't overwrite if the exact file already exists\n if (existsSync(filepath)) return filepath;\n\n const frontmatter = [\n \"---\",\n `query: \"${record.query.replace(/\"/g, '\\\\\"')}\"`,\n `timestamp: ${new Date(record.timestamp).toISOString()}`,\n `source: ${record.source}`,\n `result_count: ${record.resultCount}`,\n `source_slugs:`,\n ...record.sourceSlugs.map((s) => ` - \"${s}\"`),\n \"---\",\n ].join(\"\\n\");\n\n const body = [\n \"\",\n `# Query: ${record.query}`,\n \"\",\n `**Source:** ${record.source} `,\n `**Date:** ${new Date(record.timestamp).toISOString().slice(0, 19).replace(\"T\", \" \")} `,\n `**Results:** ${record.resultCount}`,\n \"\",\n \"## Answer Preview\",\n \"\",\n record.answerPreview,\n \"\",\n \"## Source Paths\",\n \"\",\n ...record.sourceSlugs.map((s) => `- \\`${s}\\``),\n \"\",\n ].join(\"\\n\");\n\n writeFileSync(filepath, frontmatter + body, \"utf8\");\n return filepath;\n } catch {\n // Non-critical — don't crash the parent tool if query logging fails\n return null;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA6BA,MAAM,cAAc,KAAK,SAAS,EAAE,WAAW,OAAO,UAAU;;;;AAKhE,SAAS,mBAAyB;AAChC,KAAI,CAAC,WAAW,YAAY,CAC1B,WAAU,aAAa,EAAE,WAAW,MAAM,CAAC;;;;;AAO/C,SAAS,UAAU,OAAe,WAA2B;CAC3D,MAAM,OAAO,MACV,aAAa,CACb,QAAQ,gBAAgB,GAAG,CAC3B,QAAQ,QAAQ,IAAI,CACpB,MAAM,GAAG,GAAG;AAGf,QAAO,GAFI,IAAI,KAAK,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,CAE5C,GAAG,KAAK,GADH,UAAU,SAAS,GAAG,CAAC,MAAM,GAAG;;;;;;;;AAUpD,SAAgB,gBAAgB,QAAoC;AAClE,KAAI;AACF,oBAAkB;EAGlB,MAAM,WAAW,KAAK,aADL,UAAU,OAAO,OAAO,OAAO,UAAU,GAAG,MACjB;AAG5C,MAAI,WAAW,SAAS,CAAE,QAAO;AA+BjC,gBAAc,UA7BM;GAClB;GACA,WAAW,OAAO,MAAM,QAAQ,MAAM,OAAM,CAAC;GAC7C,cAAc,IAAI,KAAK,OAAO,UAAU,CAAC,aAAa;GACtD,WAAW,OAAO;GAClB,iBAAiB,OAAO;GACxB;GACA,GAAG,OAAO,YAAY,KAAK,MAAM,QAAQ,EAAE,GAAG;GAC9C;GACD,CAAC,KAAK,KAAK,GAEC;GACX;GACA,YAAY,OAAO;GACnB;GACA,eAAe,OAAO,OAAO;GAC7B,aAAa,IAAI,KAAK,OAAO,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;GACrF,gBAAgB,OAAO;GACvB;GACA;GACA;GACA,OAAO;GACP;GACA;GACA;GACA,GAAG,OAAO,YAAY,KAAK,MAAM,OAAO,EAAE,IAAI;GAC9C;GACD,CAAC,KAAK,KAAK,EAEgC,OAAO;AACnD,SAAO;SACD;AAEN,SAAO"}
|