@tekmidian/pai 0.8.4 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +121 -0
- package/FEATURE.md +5 -0
- package/README.md +54 -0
- package/dist/cli/index.mjs +11 -11
- package/dist/daemon/index.mjs +3 -3
- package/dist/{daemon-nXyhvdzz.mjs → daemon-VIFoKc_z.mjs} +31 -6
- package/dist/daemon-VIFoKc_z.mjs.map +1 -0
- package/dist/daemon-mcp/index.mjs +51 -0
- package/dist/daemon-mcp/index.mjs.map +1 -1
- package/dist/{factory-Ygqe_bVZ.mjs → factory-e0k1HWuc.mjs} +2 -2
- package/dist/{factory-Ygqe_bVZ.mjs.map → factory-e0k1HWuc.mjs.map} +1 -1
- package/dist/hooks/load-project-context.mjs +276 -89
- package/dist/hooks/load-project-context.mjs.map +4 -4
- package/dist/hooks/stop-hook.mjs +152 -2
- package/dist/hooks/stop-hook.mjs.map +3 -3
- package/dist/{postgres-CKf-EDtS.mjs → postgres-DvEPooLO.mjs} +45 -10
- package/dist/postgres-DvEPooLO.mjs.map +1 -0
- package/dist/query-feedback-Dv43XKHM.mjs +76 -0
- package/dist/query-feedback-Dv43XKHM.mjs.map +1 -0
- package/dist/tools-C4SBZHga.mjs +1731 -0
- package/dist/tools-C4SBZHga.mjs.map +1 -0
- package/dist/{vault-indexer-Bi2cRmn7.mjs → vault-indexer-B-aJpRZC.mjs} +3 -2
- package/dist/{vault-indexer-Bi2cRmn7.mjs.map → vault-indexer-B-aJpRZC.mjs.map} +1 -1
- package/dist/{zettelkasten-cdajbnPr.mjs → zettelkasten-DhBKZQHF.mjs} +358 -3
- package/dist/zettelkasten-DhBKZQHF.mjs.map +1 -0
- package/package.json +1 -1
- package/src/hooks/ts/session-start/load-project-context.ts +36 -0
- package/src/hooks/ts/stop/stop-hook.ts +203 -1
- package/dist/daemon-nXyhvdzz.mjs.map +0 -1
- package/dist/postgres-CKf-EDtS.mjs.map +0 -1
- package/dist/tools-DcaJlYDN.mjs +0 -869
- package/dist/tools-DcaJlYDN.mjs.map +0 -1
- package/dist/zettelkasten-cdajbnPr.mjs.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-DvEPooLO.mjs","names":["vault.upsertVaultFile","vault.deleteVaultFile","vault.getVaultFile","vault.getVaultFileByInode","vault.getAllVaultFiles","vault.getRecentVaultFiles","vault.countVaultFiles","vault.countVaultFilesWithPrefix","vault.countVaultFilesAfter","vault.getVaultFilesByPaths","vault.getVaultFilesByPathsAfter","vault.getAllVaultFilePaths","vault.getVaultFilePathsWithPrefix","vault.getVaultFilePathsAfter","vault.upsertVaultAliases","vault.deleteVaultAliases","vault.getVaultAlias","vault.replaceLinksForSources","vault.getLinksFromSource","vault.getLinksToTarget","vault.getVaultLinkGraph","vault.getDeadLinks","vault.getDeadLinksWithLineNumbers","vault.getDeadLinksWithPrefix","vault.getDeadLinksAfter","vault.countVaultLinksWithPrefix","vault.countVaultLinksAfter","vault.getVaultLinksFromPaths","vault.getVaultLinkEdges","vault.getVaultLinkEdgesWithPrefix","vault.getVaultLinkEdgesAfter","vault.upsertVaultHealth","vault.getVaultHealth","vault.getOrphans","vault.getOrphansWithPrefix","vault.getOrphansAfter","vault.getLowConnectivity","vault.getLowConnectivityWithPrefix","vault.getLowConnectivityAfter","vault.upsertNameIndex","vault.replaceNameIndex","vault.resolveVaultName","vault.searchVaultNameIndex"],"sources":["../src/storage/postgres/helpers.ts","../src/storage/postgres/search.ts","../src/storage/postgres/vault.ts","../src/storage/postgres/backend.ts"],"sourcesContent":["/**\n * Internal helper utilities for the Postgres storage backend.\n */\n\nimport { STOP_WORDS } from \"../../utils/stop-words.js\";\n\n/**\n * Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].\n */\nexport function bufferToVector(buf: Buffer): number[] {\n const floats: number[] = [];\n for (let i = 0; i < buf.length; i += 4) {\n floats.push(buf.readFloatLE(i));\n }\n return floats;\n}\n\n/**\n * Convert a free-text query to a Postgres tsquery string.\n *\n * Uses OR (|) semantics so that a chunk matching ANY query term is returned,\n * ranked by ts_rank (which scores higher when more terms match). AND (&)\n * semantics are too strict for multi-word queries because all terms rarely\n * co-occur in a single chunk.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → \"synchrotech | interview | follow | gilles\"\n */\nexport function buildPgTsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Sanitize: strip tsquery special characters to prevent syntax errors\n .map((t) => t.replace(/'/g, \"''\").replace(/[&|!():]/g, \"\"))\n .filter(Boolean);\n\n if (tokens.length === 0) {\n const raw = query.replace(/[^a-z0-9]/gi, \" \").trim().split(/\\s+/).filter(Boolean).join(\" | \");\n return raw || \"\";\n }\n\n return tokens.join(\" | \");\n}\n","/**\n * Keyword and semantic search implementations for the Postgres backend.\n * Functions take a `pool` parameter so they can be called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport { buildPgTsQuery } from \"./helpers.js\";\n\n/**\n * Full-text keyword search using Postgres tsvector/tsquery with 'simple' dictionary.\n */\nexport async function searchKeyword(\n pool: Pool,\n query: string,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const tsQuery = buildPgTsQuery(query);\n if (!tsQuery) return [];\n\n const conditions: string[] = [\"fts_vector @@ to_tsquery('simple', $1)\"];\n const params: (string | number)[] = [tsQuery];\n let paramIdx = 2;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY rank_score DESC\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n rank_score: number;\n }>(sql, params);\n\n return result.rows.map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.rank_score,\n tier: row.tier,\n source: row.source,\n }));\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\\n`);\n return [];\n }\n}\n\n/**\n * Semantic vector similarity search using pgvector cosine distance (<=>).\n */\nexport async function searchSemantic(\n pool: Pool,\n queryEmbedding: Float32Array,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n let paramIdx = 1;\n\n const vecStr = \"[\" + Array.from(queryEmbedding).join(\",\") + \"]\";\n params.push(vecStr);\n const vecParam = `$${paramIdx++}`;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n // <=> is cosine distance; 1 - distance = cosine similarity\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n 1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY embedding <=> ${vecParam}::vector\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n cosine_similarity: number;\n }>(sql, params);\n\n const minScore = opts?.minScore ?? -Infinity;\n\n return result.rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.cosine_similarity,\n tier: row.tier,\n source: row.source,\n }))\n .filter((r) => r.score >= minScore);\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\\n`);\n return [];\n }\n}\n","/**\n * Vault storage operations for the Postgres backend.\n * All functions take a `pool` parameter — called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type {\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\n\n// ---------------------------------------------------------------------------\n// Vault files\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultFile(pool: Pool, file: VaultFileRow): Promise<void> {\n await pool.query(\n `INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inode = EXCLUDED.inode, device = EXCLUDED.device,\n hash = EXCLUDED.hash, title = EXCLUDED.title,\n indexed_at = EXCLUDED.indexed_at`,\n [file.vaultPath, file.inode, file.device, file.hash, file.title, file.indexedAt]\n );\n}\n\nexport async function deleteVaultFile(pool: Pool, vaultPath: string): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_links WHERE source_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_health WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_name_index WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_aliases WHERE vault_path = $1 OR canonical_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_files WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\ntype VaultFileDbRow = { vault_path: string; inode: string; device: string; hash: string; title: string | null; indexed_at: string };\n\nfunction mapVaultFileRow(row: VaultFileDbRow): VaultFileRow {\n return {\n vaultPath: row.vault_path,\n inode: Number(row.inode),\n device: Number(row.device),\n hash: row.hash,\n title: row.title,\n indexedAt: Number(row.indexed_at),\n };\n}\n\nexport async function getVaultFile(pool: Pool, vaultPath: string): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getVaultFileByInode(pool: Pool, inode: number, device: number): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE inode = $1 AND device = $2 LIMIT 1\",\n [inode, device]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getAllVaultFiles(pool: Pool): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files\");\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getRecentVaultFiles(pool: Pool, sinceMs: number): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function countVaultFiles(pool: Pool): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*)::text AS n FROM vault_files\");\n return parseInt(r.rows[0]?.n ?? \"0\", 10);\n}\n\nexport async function countVaultFilesWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE vault_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultFilesAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE indexed_at > $1\", [sinceMs]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultFilesByPaths(pool: Pool, paths: string[]): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders})`,\n paths\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getVaultFilesByPathsAfter(pool: Pool, paths: string[], sinceMs: number): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders}) AND indexed_at >= $${paths.length + 1} ORDER BY indexed_at ASC`,\n [...paths, sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getAllVaultFilePaths(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\"SELECT vault_path FROM vault_files\");\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault aliases\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultAliases(pool: Pool, aliases: VaultAliasRow[]): Promise<void> {\n if (aliases.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const a of aliases) {\n await client.query(\n `INSERT INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (vault_path) DO UPDATE SET\n canonical_path = EXCLUDED.canonical_path,\n inode = EXCLUDED.inode, device = EXCLUDED.device`,\n [a.vaultPath, a.canonicalPath, a.inode, a.device]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function deleteVaultAliases(pool: Pool, canonicalPath: string): Promise<void> {\n await pool.query(\"DELETE FROM vault_aliases WHERE canonical_path = $1\", [canonicalPath]);\n}\n\nexport async function getVaultAlias(pool: Pool, vaultPath: string): Promise<{ canonicalPath: string } | null> {\n const r = await pool.query<{ canonical_path: string }>(\n \"SELECT canonical_path FROM vault_aliases WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length > 0 ? { canonicalPath: r.rows[0].canonical_path } : null;\n}\n\n// ---------------------------------------------------------------------------\n// Vault links\n// ---------------------------------------------------------------------------\n\ntype VaultLinkDbRow = { source_path: string; target_raw: string; target_path: string | null; link_type: string; line_number: number; confidence?: string };\n\nfunction mapVaultLinkRow(row: VaultLinkDbRow): VaultLinkRow {\n return {\n sourcePath: row.source_path,\n targetRaw: row.target_raw,\n targetPath: row.target_path,\n linkType: row.link_type,\n lineNumber: row.line_number,\n confidence: (row.confidence as VaultLinkRow[\"confidence\"]) ?? \"EXTRACTED\",\n };\n}\n\nexport async function replaceLinksForSources(pool: Pool, sourcePaths: string[], links: VaultLinkRow[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n if (sourcePaths.length > 0) {\n await client.query(\n \"DELETE FROM vault_links WHERE source_path = ANY($1::text[])\",\n [sourcePaths]\n );\n }\n for (let i = 0; i < links.length; i += 500) {\n const batch = links.slice(i, i + 500);\n const values: string[] = [];\n const params: (string | number | null)[] = [];\n let idx = 1;\n for (const l of batch) {\n values.push(`($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++})`);\n params.push(l.sourcePath, l.targetRaw, l.targetPath, l.linkType, l.lineNumber, l.confidence ?? \"EXTRACTED\");\n }\n await client.query(\n `INSERT INTO vault_links (source_path, target_raw, target_path, link_type, line_number, confidence)\n VALUES ${values.join(\", \")}\n ON CONFLICT (source_path, target_raw, line_number) DO UPDATE SET\n target_path = EXCLUDED.target_path, link_type = EXCLUDED.link_type,\n confidence = EXCLUDED.confidence`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getLinksFromSource(pool: Pool, sourcePath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path = $1\",\n [sourcePath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getLinksToTarget(pool: Pool, targetPath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE target_path = $1\",\n [targetPath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkGraph(pool: Pool): Promise<Array<{ source_path: string; target_path: string }>> {\n const r = await pool.query<{ source_path: string; target_path: string }>(\n \"SELECT source_path, target_path FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getDeadLinks(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string }>> {\n const r = await pool.query<{ source_path: string; target_raw: string }>(\n \"SELECT source_path, target_raw FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw }));\n}\n\nexport async function getDeadLinksWithLineNumbers(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksWithPrefix(pool: Pool, prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksAfter(pool: Pool, sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function countVaultLinksWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_links WHERE source_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultLinksAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\n \"SELECT COUNT(*) AS n FROM vault_links WHERE source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultLinksFromPaths(pool: Pool, sourcePaths: string[]): Promise<VaultLinkRow[]> {\n if (sourcePaths.length === 0) return [];\n const placeholders = sourcePaths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultLinkDbRow>(\n `SELECT source_path, target_raw, target_path, link_type, line_number, confidence FROM vault_links WHERE source_path IN (${placeholders}) AND target_path IS NOT NULL`,\n sourcePaths\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkEdges(pool: Pool): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesWithPrefix(pool: Pool, prefix: string): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesAfter(pool: Pool, sinceMs: number): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows;\n}\n\n// ---------------------------------------------------------------------------\n// Vault health\n// ---------------------------------------------------------------------------\n\ntype VaultHealthDbRow = { vault_path: string; inbound_count: number; outbound_count: number; dead_link_count: number; is_orphan: number; computed_at: string };\n\nfunction mapVaultHealthRow(row: VaultHealthDbRow): VaultHealthRow {\n return {\n vaultPath: row.vault_path,\n inboundCount: row.inbound_count,\n outboundCount: row.outbound_count,\n deadLinkCount: row.dead_link_count,\n isOrphan: row.is_orphan === 1,\n computedAt: Number(row.computed_at),\n };\n}\n\nexport async function upsertVaultHealth(pool: Pool, rows: VaultHealthRow[]): Promise<void> {\n if (rows.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const h of rows) {\n await client.query(\n `INSERT INTO vault_health (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inbound_count = EXCLUDED.inbound_count,\n outbound_count = EXCLUDED.outbound_count,\n dead_link_count = EXCLUDED.dead_link_count,\n is_orphan = EXCLUDED.is_orphan,\n computed_at = EXCLUDED.computed_at`,\n [h.vaultPath, h.inboundCount, h.outboundCount, h.deadLinkCount, h.isOrphan ? 1 : 0, h.computedAt]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getVaultHealth(pool: Pool, vaultPath: string): Promise<VaultHealthRow | null> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultHealthRow(r.rows[0]);\n}\n\nexport async function getOrphans(pool: Pool): Promise<VaultHealthRow[]> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE is_orphan = 1\"\n );\n return r.rows.map(row => ({ ...mapVaultHealthRow(row), isOrphan: true }));\n}\n\nexport async function getOrphansWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE is_orphan = 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getOrphansAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.is_orphan = 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivity(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1\"\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.inbound_count + vh.outbound_count <= 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault name index\n// ---------------------------------------------------------------------------\n\nexport async function upsertNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n if (entries.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const e of entries) {\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path)\n VALUES ($1, $2) ON CONFLICT (name, vault_path) DO NOTHING`,\n [e.name, e.vaultPath]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e_) {\n await client.query(\"ROLLBACK\");\n throw e_;\n } finally {\n client.release();\n }\n}\n\nexport async function replaceNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_name_index\");\n for (let i = 0; i < entries.length; i += 500) {\n const batch = entries.slice(i, i + 500);\n const values: string[] = [];\n const params: string[] = [];\n let idx = 1;\n for (const e of batch) {\n values.push(`($${idx++}, $${idx++})`);\n params.push(e.name, e.vaultPath);\n }\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path) VALUES ${values.join(\", \")}`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function resolveVaultName(pool: Pool, name: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_name_index WHERE name = $1\",\n [name]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function searchVaultNameIndex(pool: Pool, query: string, limit = 100): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT DISTINCT vault_path FROM vault_name_index WHERE lower(name) LIKE lower($1) LIMIT $2\",\n [`%${query}%`, limit]\n );\n return r.rows.map(row => row.vault_path);\n}\n","/**\n * PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.\n *\n * Vector similarity: pgvector's <=> cosine distance operator\n * Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)\n * Connection pooling: node-postgres Pool\n *\n * Schema is auto-initialized on first connection if tables don't exist.\n * Per-user database isolation: each macOS user gets their own database (pai_<username>).\n */\n\nimport pg from \"pg\";\nimport type { Pool, PoolClient } from \"pg\";\nimport { readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport type {\n StorageBackend, ChunkRow, FileRow, FederationStats,\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport type { PostgresConfig } from \"./config.js\";\nimport { bufferToVector } from \"./helpers.js\";\nimport { searchKeyword, searchSemantic } from \"./search.js\";\nimport * as vault from \"./vault.js\";\n\nconst { Pool: PgPool } = pg;\n\nexport class PostgresBackend implements StorageBackend {\n readonly backendType = \"postgres\" as const;\n\n private pool: Pool;\n\n /**\n * Ensure the per-user database exists and has the required schema.\n * Connects to the default 'postgres' database to CREATE DATABASE if needed,\n * then connects to the target database to apply init.sql schema.\n * Safe to call multiple times (fully idempotent).\n */\n static async ensureDatabase(config: PostgresConfig): Promise<void> {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n const url = new URL(connStr);\n const targetDb = url.pathname.slice(1);\n\n const adminUrl = new URL(connStr);\n adminUrl.pathname = \"/postgres\";\n const adminPool = new PgPool({\n connectionString: adminUrl.toString(),\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const check = await adminPool.query(\n \"SELECT 1 FROM pg_database WHERE datname = $1\",\n [targetDb]\n );\n if (check.rowCount === 0) {\n await adminPool.query(`CREATE DATABASE \"${targetDb}\"`);\n process.stderr.write(`[pai-postgres] Created database: ${targetDb}\\n`);\n }\n } finally {\n await adminPool.end();\n }\n\n const targetPool = new PgPool({\n connectionString: connStr,\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const tableCheck = await targetPool.query(\n \"SELECT 1 FROM information_schema.tables WHERE table_name = 'pai_chunks'\"\n );\n if (tableCheck.rowCount === 0) {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const initSqlPath = join(__dirname, \"../../docker/init.sql\");\n let initSql: string;\n try {\n initSql = readFileSync(initSqlPath, \"utf-8\");\n } catch {\n const altPath = join(__dirname, \"../docker/init.sql\");\n initSql = readFileSync(altPath, \"utf-8\");\n }\n await targetPool.query(initSql);\n process.stderr.write(`[pai-postgres] Applied schema to database: ${targetDb}\\n`);\n }\n\n // Run incremental migrations for existing databases\n await PostgresBackend.runMigrations(targetPool);\n } finally {\n await targetPool.end();\n }\n }\n\n /**\n * Run incremental migrations for existing databases.\n * Each migration is idempotent — safe to run on databases that already have the change.\n */\n private static async runMigrations(pool: Pool): Promise<void> {\n // Migration: add confidence column to vault_links if it does not exist\n const colCheck = await pool.query(\n `SELECT 1 FROM information_schema.columns\n WHERE table_name = 'vault_links' AND column_name = 'confidence'`\n );\n if (colCheck.rowCount === 0) {\n await pool.query(\n \"ALTER TABLE vault_links ADD COLUMN confidence TEXT NOT NULL DEFAULT 'EXTRACTED'\"\n );\n process.stderr.write(\"[pai-postgres] Migration: added confidence column to vault_links\\n\");\n }\n\n // Migration: create kg_triples table if it does not exist\n const kgCheck = await pool.query(\n `SELECT 1 FROM information_schema.tables WHERE table_name = 'kg_triples'`\n );\n if (kgCheck.rowCount === 0) {\n await pool.query(`\n CREATE TABLE kg_triples (\n id SERIAL PRIMARY KEY,\n subject TEXT NOT NULL,\n predicate TEXT NOT NULL,\n object TEXT NOT NULL,\n project_id INTEGER,\n source_session TEXT,\n valid_from TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n valid_to TIMESTAMP,\n confidence TEXT DEFAULT 'EXTRACTED',\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n )\n `);\n await pool.query(`CREATE INDEX idx_kg_subject ON kg_triples(subject)`);\n await pool.query(`CREATE INDEX idx_kg_predicate ON kg_triples(predicate)`);\n await pool.query(`CREATE INDEX idx_kg_object ON kg_triples(object)`);\n await pool.query(`CREATE INDEX idx_kg_valid ON kg_triples(valid_from, valid_to)`);\n process.stderr.write(\"[pai-postgres] Migration: created kg_triples table\\n\");\n }\n }\n\n constructor(config: PostgresConfig) {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n\n this.pool = new PgPool({\n connectionString: connStr,\n max: config.maxConnections ?? 5,\n connectionTimeoutMillis: config.connectionTimeoutMs ?? 5000,\n idleTimeoutMillis: 30_000,\n });\n\n this.pool.on(\"error\", (err) => {\n process.stderr.write(`[pai-postgres] Pool error: ${err.message}\\n`);\n });\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Expose the underlying pg.Pool for callers that need direct query access\n * (e.g. the daemon's observation IPC methods).\n */\n getPool(): Pool {\n return this.pool;\n }\n\n async getStats(): Promise<FederationStats> {\n const client = await this.pool.connect();\n try {\n const filesResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_files\"\n );\n const chunksResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_chunks\"\n );\n return {\n files: parseInt(filesResult.rows[0]?.n ?? \"0\", 10),\n chunks: parseInt(chunksResult.rows[0]?.n ?? \"0\", 10),\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * Test the connection by running a trivial query.\n * Returns null on success, error message on failure.\n */\n async testConnection(): Promise<string | null> {\n let client: PoolClient | null = null;\n try {\n client = await this.pool.connect();\n await client.query(\"SELECT 1\");\n return null;\n } catch (e) {\n return e instanceof Error ? e.message : String(e);\n } finally {\n client?.release();\n }\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const result = await this.pool.query<{ hash: string }>(\n \"SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows[0]?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n await this.pool.query(\n `INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (project_id, path) DO UPDATE SET\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n hash = EXCLUDED.hash,\n mtime = EXCLUDED.mtime,\n size = EXCLUDED.size`,\n [file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size]\n );\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const result = await this.pool.query<{ id: string }>(\n \"SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n await this.pool.query(\n \"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n for (const c of chunks) {\n const safeText = c.text.replace(/\\0/g, \"\");\n\n await client.query(\n `INSERT INTO pai_chunks\n (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n to_tsvector('simple', $9))\n ON CONFLICT (id) DO UPDATE SET\n project_id = EXCLUDED.project_id,\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n path = EXCLUDED.path,\n start_line = EXCLUDED.start_line,\n end_line = EXCLUDED.end_line,\n hash = EXCLUDED.hash,\n text = EXCLUDED.text,\n updated_at = EXCLUDED.updated_at,\n fts_vector = EXCLUDED.fts_vector`,\n [\n c.id, c.projectId, c.source, c.tier, c.path,\n c.startLine, c.endLine, c.hash, safeText, c.updatedAt,\n ]\n );\n }\n\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const result = await this.pool.query<{ path: string }>(\n \"SELECT DISTINCT path FROM pai_chunks WHERE project_id = $1\",\n [projectId]\n );\n return result.rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const path of paths) {\n await client.query(\"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\", [projectId, path]);\n await client.query(\"DELETE FROM pai_files WHERE project_id = $1 AND path = $2\", [projectId, path]);\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string; project_id: number; path: string }>> {\n if (projectId !== undefined) {\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id\",\n [projectId]\n );\n return result.rows;\n }\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL ORDER BY id\"\n );\n return result.rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n const vec = bufferToVector(embedding);\n const vecStr = \"[\" + vec.join(\",\") + \"]\";\n await this.pool.query(\n \"UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2\",\n [vecStr, chunkId]\n );\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchKeyword(this.pool, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchSemantic(this.pool, queryEmbedding, opts);\n }\n\n // -------------------------------------------------------------------------\n // Vault operations — delegated to vault.ts\n // -------------------------------------------------------------------------\n\n async upsertVaultFile(file: VaultFileRow): Promise<void> { return vault.upsertVaultFile(this.pool, file); }\n async deleteVaultFile(vaultPath: string): Promise<void> { return vault.deleteVaultFile(this.pool, vaultPath); }\n async getVaultFile(vaultPath: string): Promise<VaultFileRow | null> { return vault.getVaultFile(this.pool, vaultPath); }\n async getVaultFileByInode(inode: number, device: number): Promise<VaultFileRow | null> { return vault.getVaultFileByInode(this.pool, inode, device); }\n async getAllVaultFiles(): Promise<VaultFileRow[]> { return vault.getAllVaultFiles(this.pool); }\n async getRecentVaultFiles(sinceMs: number): Promise<VaultFileRow[]> { return vault.getRecentVaultFiles(this.pool, sinceMs); }\n async countVaultFiles(): Promise<number> { return vault.countVaultFiles(this.pool); }\n async countVaultFilesWithPrefix(prefix: string): Promise<number> { return vault.countVaultFilesWithPrefix(this.pool, prefix); }\n async countVaultFilesAfter(sinceMs: number): Promise<number> { return vault.countVaultFilesAfter(this.pool, sinceMs); }\n async getVaultFilesByPaths(paths: string[]): Promise<VaultFileRow[]> { return vault.getVaultFilesByPaths(this.pool, paths); }\n async getVaultFilesByPathsAfter(paths: string[], sinceMs: number): Promise<VaultFileRow[]> { return vault.getVaultFilesByPathsAfter(this.pool, paths, sinceMs); }\n async getAllVaultFilePaths(): Promise<string[]> { return vault.getAllVaultFilePaths(this.pool); }\n async getVaultFilePathsWithPrefix(prefix: string): Promise<string[]> { return vault.getVaultFilePathsWithPrefix(this.pool, prefix); }\n async getVaultFilePathsAfter(sinceMs: number): Promise<string[]> { return vault.getVaultFilePathsAfter(this.pool, sinceMs); }\n\n async upsertVaultAliases(aliases: VaultAliasRow[]): Promise<void> { return vault.upsertVaultAliases(this.pool, aliases); }\n async deleteVaultAliases(canonicalPath: string): Promise<void> { return vault.deleteVaultAliases(this.pool, canonicalPath); }\n async getVaultAlias(vaultPath: string): Promise<{ canonicalPath: string } | null> { return vault.getVaultAlias(this.pool, vaultPath); }\n\n async replaceLinksForSources(sourcePaths: string[], links: VaultLinkRow[]): Promise<void> { return vault.replaceLinksForSources(this.pool, sourcePaths, links); }\n async getLinksFromSource(sourcePath: string): Promise<VaultLinkRow[]> { return vault.getLinksFromSource(this.pool, sourcePath); }\n async getLinksToTarget(targetPath: string): Promise<VaultLinkRow[]> { return vault.getLinksToTarget(this.pool, targetPath); }\n async getVaultLinkGraph(): Promise<Array<{ source_path: string; target_path: string }>> { return vault.getVaultLinkGraph(this.pool); }\n async getDeadLinks(): Promise<Array<{ sourcePath: string; targetRaw: string }>> { return vault.getDeadLinks(this.pool); }\n async getDeadLinksWithLineNumbers(): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithLineNumbers(this.pool); }\n async getDeadLinksWithPrefix(prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithPrefix(this.pool, prefix); }\n async getDeadLinksAfter(sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksAfter(this.pool, sinceMs); }\n async countVaultLinksWithPrefix(prefix: string): Promise<number> { return vault.countVaultLinksWithPrefix(this.pool, prefix); }\n async countVaultLinksAfter(sinceMs: number): Promise<number> { return vault.countVaultLinksAfter(this.pool, sinceMs); }\n async getVaultLinksFromPaths(sourcePaths: string[]): Promise<VaultLinkRow[]> { return vault.getVaultLinksFromPaths(this.pool, sourcePaths); }\n async getVaultLinkEdges(): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdges(this.pool); }\n async getVaultLinkEdgesWithPrefix(prefix: string): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesWithPrefix(this.pool, prefix); }\n async getVaultLinkEdgesAfter(sinceMs: number): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesAfter(this.pool, sinceMs); }\n\n async upsertVaultHealth(rows: VaultHealthRow[]): Promise<void> { return vault.upsertVaultHealth(this.pool, rows); }\n async getVaultHealth(vaultPath: string): Promise<VaultHealthRow | null> { return vault.getVaultHealth(this.pool, vaultPath); }\n async getOrphans(): Promise<VaultHealthRow[]> { return vault.getOrphans(this.pool); }\n async getOrphansWithPrefix(prefix: string): Promise<string[]> { return vault.getOrphansWithPrefix(this.pool, prefix); }\n async getOrphansAfter(sinceMs: number): Promise<string[]> { return vault.getOrphansAfter(this.pool, sinceMs); }\n async getLowConnectivity(): Promise<string[]> { return vault.getLowConnectivity(this.pool); }\n async getLowConnectivityWithPrefix(prefix: string): Promise<string[]> { return vault.getLowConnectivityWithPrefix(this.pool, prefix); }\n async getLowConnectivityAfter(sinceMs: number): Promise<string[]> { return vault.getLowConnectivityAfter(this.pool, sinceMs); }\n\n async upsertNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.upsertNameIndex(this.pool, entries); }\n async replaceNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.replaceNameIndex(this.pool, entries); }\n async resolveVaultName(name: string): Promise<string[]> { return vault.resolveVaultName(this.pool, name); }\n async searchVaultNameIndex(query: string, limit?: number): Promise<string[]> { return vault.searchVaultNameIndex(this.pool, query, limit); }\n\n // Legacy memory_chunks methods (used by graph and zettelkasten modules)\n async getChunksWithEmbeddings(projectId: number, limit: number): Promise<Array<{ path: string; text: string; embedding: Buffer }>> {\n const r = await this.pool.query<{ path: string; text: string; embedding: Buffer }>(\n `SELECT path, text, embedding FROM memory_chunks WHERE project_id = $1 AND embedding IS NOT NULL ORDER BY path, start_line LIMIT $2`,\n [projectId, limit]\n );\n return r.rows;\n }\n\n async getChunksForPath(projectId: number, path: string, limit = 20): Promise<Array<{ text: string; embedding: Buffer | null }>> {\n const r = await this.pool.query<{ text: string; embedding: Buffer | null }>(\n `SELECT text, embedding FROM memory_chunks WHERE project_id = $1 AND path = $2 AND embedding IS NOT NULL ORDER BY start_line LIMIT $3`,\n [projectId, path, limit]\n );\n return r.rows;\n }\n\n async searchChunksByText(projectId: number, query: string, limit: number): Promise<Array<{ path: string; text: string }>> {\n const r = await this.pool.query<{ path: string; text: string }>(\n `SELECT DISTINCT path, text FROM memory_chunks WHERE project_id = $1 AND lower(text) LIKE lower($2) LIMIT $3`,\n [projectId, `%${query}%`, limit]\n );\n return r.rows;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AASA,SAAgB,eAAe,KAAuB;CACpD,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,EACnC,QAAO,KAAK,IAAI,YAAY,EAAE,CAAC;AAEjC,QAAO;;;;;;;;;;;;;AAcT,SAAgB,eAAe,OAAuB;CACpD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,EAAE,QAAQ,MAAM,KAAK,CAAC,QAAQ,aAAa,GAAG,CAAC,CAC1D,OAAO,QAAQ;AAElB,KAAI,OAAO,WAAW,EAEpB,QADY,MAAM,QAAQ,eAAe,IAAI,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,IAC/E;AAGhB,QAAO,OAAO,KAAK,MAAM;;;;;;;;AChC3B,eAAsB,cACpB,MACA,OACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,UAAU,eAAe,MAAM;AACrC,KAAI,CAAC,QAAS,QAAO,EAAE;CAEvB,MAAM,aAAuB,CAAC,yCAAyC;CACvE,MAAM,SAA8B,CAAC,QAAQ;CAC7C,IAAI,WAAW;AAEf,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAEvB,MAAM,MAAM;;;;;;;;;;;YAWF,WAAW,KAAK,QAAQ,CAAC;;YAEzB,WAAW;;AAGrB,KAAI;AAYF,UAXe,MAAM,KAAK,MASvB,KAAK,OAAO,EAED,KAAK,KAAK,SAAS;GAC/B,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE;UACI,GAAG;AACV,UAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;AAClE,SAAO,EAAE;;;;;;AAOb,eAAsB,eACpB,MACA,gBACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;CACtC,IAAI,WAAW;CAEf,MAAM,SAAS,MAAM,MAAM,KAAK,eAAe,CAAC,KAAK,IAAI,GAAG;AAC5D,QAAO,KAAK,OAAO;CACnB,MAAM,WAAW,IAAI;AAErB,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAGvB,MAAM,MAAM;;;;;;;;;2BASa,SAAS;;YAExB,WAAW,KAAK,QAAQ,CAAC;6BACR,SAAS;YAC1B,WAAW;;AAGrB,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MASvB,KAAK,OAAO;EAEf,MAAM,WAAW,MAAM,YAAY;AAEnC,SAAO,OAAO,KACX,KAAK,SAAS;GACb,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;UAC9B,GAAG;AACV,UAAQ,OAAO,MAAM,wCAAwC,EAAE,IAAI;AACnE,SAAO,EAAE;;;;;;ACjKb,eAAsB,gBAAgB,MAAY,MAAmC;AACnF,OAAM,KAAK,MACT;;;;;0CAMA;EAAC,KAAK;EAAW,KAAK;EAAO,KAAK;EAAQ,KAAK;EAAM,KAAK;EAAO,KAAK;EAAU,CACjF;;AAGH,eAAsB,gBAAgB,MAAY,WAAkC;CAClF,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,sDAAsD,CAAC,UAAU,CAAC;AACrF,QAAM,OAAO,MAAM,0EAA0E,CAAC,UAAU,CAAC;AACzG,QAAM,OAAO,MAAM,iDAAiD,CAAC,UAAU,CAAC;AAChF,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAMpB,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,WAAW,IAAI;EACf,OAAO,OAAO,IAAI,MAAM;EACxB,QAAQ,OAAO,IAAI,OAAO;EAC1B,MAAM,IAAI;EACV,OAAO,IAAI;EACX,WAAW,OAAO,IAAI,WAAW;EAClC;;AAGH,eAAsB,aAAa,MAAY,WAAiD;CAC9F,MAAM,IAAI,MAAM,KAAK,MACnB,oGACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,oBAAoB,MAAY,OAAe,QAA8C;CACjH,MAAM,IAAI,MAAM,KAAK,MACnB,uHACA,CAAC,OAAO,OAAO,CAChB;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,iBAAiB,MAAqC;AAE1E,SADU,MAAM,KAAK,MAAsB,6EAA6E,EAC/G,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,oBAAoB,MAAY,SAA0C;AAK9F,SAJU,MAAM,KAAK,MACnB,oGACA,CAAC,QAAQ,CACV,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,gBAAgB,MAA6B;CACjE,MAAM,IAAI,MAAM,KAAK,MAAqB,8CAA8C;AACxF,QAAO,SAAS,EAAE,KAAK,IAAI,KAAK,KAAK,GAAG;;AAG1C,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,kEAAkE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC3H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MAAqB,+DAA+D,CAAC,QAAQ,CAAC;AACnH,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,OAA0C;AAC/F,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,IAChH,MACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,0BAA0B,MAAY,OAAiB,SAA0C;AACrH,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,uBAAuB,MAAM,SAAS,EAAE,2BACxJ,CAAC,GAAG,OAAO,QAAQ,CACpB,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,qBAAqB,MAA+B;AAExE,SADU,MAAM,KAAK,MAA8B,qCAAqC,EAC/E,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,4BAA4B,MAAY,QAAmC;AAK/F,SAJU,MAAM,KAAK,MACnB,+DACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,uBAAuB,MAAY,SAAoC;AAK3F,SAJU,MAAM,KAAK,MACnB,4DACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,mBAAmB,MAAY,SAAyC;AAC5F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;;;;8DAKA;GAAC,EAAE;GAAW,EAAE;GAAe,EAAE;GAAO,EAAE;GAAO,CAClD;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,eAAsC;AACzF,OAAM,KAAK,MAAM,uDAAuD,CAAC,cAAc,CAAC;;AAG1F,eAAsB,cAAc,MAAY,WAA8D;CAC5G,MAAM,IAAI,MAAM,KAAK,MACnB,kEACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,SAAS,IAAI,EAAE,eAAe,EAAE,KAAK,GAAG,gBAAgB,GAAG;;AAS3E,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,YAAY,IAAI;EAChB,WAAW,IAAI;EACf,YAAY,IAAI;EAChB,UAAU,IAAI;EACd,YAAY,IAAI;EAChB,YAAa,IAAI,cAA6C;EAC/D;;AAGH,eAAsB,uBAAuB,MAAY,aAAuB,OAAsC;CACpH,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,MAAI,YAAY,SAAS,EACvB,OAAM,OAAO,MACX,+DACA,CAAC,YAAY,CACd;AAEH,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK;GAC1C,MAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,IAAI;GACrC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAqC,EAAE;GAC7C,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,GAAG;AACjF,WAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,cAAc,YAAY;;AAE7G,SAAM,OAAO,MACX;kBACU,OAAO,KAAK,KAAK,CAAC;;;8CAI5B,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,YAA6C;AAKhG,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,iBAAiB,MAAY,YAA6C;AAK9F,SAJU,MAAM,KAAK,MACnB,2HACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAA0E;AAIhH,SAHU,MAAM,KAAK,MACnB,iFACD,EACQ;;AAGX,eAAsB,aAAa,MAAuE;AAIxG,SAHU,MAAM,KAAK,MACnB,4EACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,EAAE;;AAGxF,eAAsB,4BAA4B,MAA2F;AAI3I,SAHU,MAAM,KAAK,MACnB,yFACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,uBAAuB,MAAY,QAA+F;AAKtJ,SAJU,MAAM,KAAK,MACnB,kHACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,kBAAkB,MAAY,SAAgG;AAKlJ,SAJU,MAAM,KAAK,MACnB,wKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,mEAAmE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC5H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MACnB,yHACA,CAAC,QAAQ,CACV;AACD,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,uBAAuB,MAAY,aAAgD;AACvG,KAAI,YAAY,WAAW,EAAG,QAAO,EAAE;CACvC,MAAM,eAAe,YAAY,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKtE,SAJU,MAAM,KAAK,MACnB,0HAA0H,aAAa,gCACvI,YACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAAgE;AAItG,SAHU,MAAM,KAAK,MACnB,8GACD,EACQ;;AAGX,eAAsB,4BAA4B,MAAY,QAAoE;AAKhI,SAJU,MAAM,KAAK,MACnB,uIACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ;;AAGX,eAAsB,uBAAuB,MAAY,SAAqE;AAK5H,SAJU,MAAM,KAAK,MACnB,6LACA,CAAC,QAAQ,CACV,EACQ;;AASX,SAAS,kBAAkB,KAAuC;AAChE,QAAO;EACL,WAAW,IAAI;EACf,cAAc,IAAI;EAClB,eAAe,IAAI;EACnB,eAAe,IAAI;EACnB,UAAU,IAAI,cAAc;EAC5B,YAAY,OAAO,IAAI,YAAY;EACpC;;AAGH,eAAsB,kBAAkB,MAAY,MAAuC;AACzF,KAAI,KAAK,WAAW,EAAG;CACvB,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,KACd,OAAM,OAAO,MACX;;;;;;;gDAQA;GAAC,EAAE;GAAW,EAAE;GAAc,EAAE;GAAe,EAAE;GAAe,EAAE,WAAW,IAAI;GAAG,EAAE;GAAW,CAClG;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,eAAe,MAAY,WAAmD;CAClG,MAAM,IAAI,MAAM,KAAK,MACnB,qIACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,kBAAkB,EAAE,KAAK,GAAG;;AAGlE,eAAsB,WAAW,MAAuC;AAItE,SAHU,MAAM,KAAK,MACnB,kIACD,EACQ,KAAK,KAAI,SAAQ;EAAE,GAAG,kBAAkB,IAAI;EAAE,UAAU;EAAM,EAAE;;AAG3E,eAAsB,qBAAqB,MAAY,QAAmC;AAKxF,SAJU,MAAM,KAAK,MACnB,kFACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,gBAAgB,MAAY,SAAoC;AAKpF,SAJU,MAAM,KAAK,MACnB,gJACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,mBAAmB,MAA+B;AAItE,SAHU,MAAM,KAAK,MACnB,gFACD,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,6BAA6B,MAAY,QAAmC;AAKhG,SAJU,MAAM,KAAK,MACnB,wGACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,wBAAwB,MAAY,SAAoC;AAK5F,SAJU,MAAM,KAAK,MACnB,yKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,gBAAgB,MAAY,SAA0C;AAC1F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;qEAEA,CAAC,EAAE,MAAM,EAAE,UAAU,CACtB;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,IAAI;AACX,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,SAA0C;CAC3F,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,+BAA+B;AAClD,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,KAAK;GAC5C,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,IAAI;GACvC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAmB,EAAE;GAC3B,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,GAAG;AACrC,WAAO,KAAK,EAAE,MAAM,EAAE,UAAU;;AAElC,SAAM,OAAO,MACX,0DAA0D,OAAO,KAAK,KAAK,IAC3E,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,MAAiC;AAKlF,SAJU,MAAM,KAAK,MACnB,2DACA,CAAC,KAAK,CACP,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,qBAAqB,MAAY,OAAe,QAAQ,KAAwB;AAKpG,SAJU,MAAM,KAAK,MACnB,8FACA,CAAC,IAAI,MAAM,IAAI,MAAM,CACtB,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;;;;;;;;;;;;;;ACzd1C,MAAM,EAAE,MAAM,WAAW;AAEzB,IAAa,kBAAb,MAAa,gBAA0C;CACrD,AAAS,cAAc;CAEvB,AAAQ;;;;;;;CAQR,aAAa,eAAe,QAAuC;EACjE,MAAM,UACJ,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;EAE9I,MAAM,WADM,IAAI,IAAI,QAAQ,CACP,SAAS,MAAM,EAAE;EAEtC,MAAM,WAAW,IAAI,IAAI,QAAQ;AACjC,WAAS,WAAW;EACpB,MAAM,YAAY,IAAI,OAAO;GAC3B,kBAAkB,SAAS,UAAU;GACrC,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAKF,QAJc,MAAM,UAAU,MAC5B,gDACA,CAAC,SAAS,CACX,EACS,aAAa,GAAG;AACxB,UAAM,UAAU,MAAM,oBAAoB,SAAS,GAAG;AACtD,YAAQ,OAAO,MAAM,oCAAoC,SAAS,IAAI;;YAEhE;AACR,SAAM,UAAU,KAAK;;EAGvB,MAAM,aAAa,IAAI,OAAO;GAC5B,kBAAkB;GAClB,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAIF,QAHmB,MAAM,WAAW,MAClC,0EACD,EACc,aAAa,GAAG;IAC7B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;IACzD,MAAM,cAAc,KAAK,WAAW,wBAAwB;IAC5D,IAAI;AACJ,QAAI;AACF,eAAU,aAAa,aAAa,QAAQ;YACtC;AAEN,eAAU,aADM,KAAK,WAAW,qBAAqB,EACrB,QAAQ;;AAE1C,UAAM,WAAW,MAAM,QAAQ;AAC/B,YAAQ,OAAO,MAAM,8CAA8C,SAAS,IAAI;;AAIlF,SAAM,gBAAgB,cAAc,WAAW;YACvC;AACR,SAAM,WAAW,KAAK;;;;;;;CAQ1B,aAAqB,cAAc,MAA2B;AAM5D,OAJiB,MAAM,KAAK,MAC1B;wEAED,EACY,aAAa,GAAG;AAC3B,SAAM,KAAK,MACT,kFACD;AACD,WAAQ,OAAO,MAAM,qEAAqE;;AAO5F,OAHgB,MAAM,KAAK,MACzB,0EACD,EACW,aAAa,GAAG;AAC1B,SAAM,KAAK,MAAM;;;;;;;;;;;;;QAaf;AACF,SAAM,KAAK,MAAM,uDAAuD;AACxE,SAAM,KAAK,MAAM,yDAAyD;AAC1E,SAAM,KAAK,MAAM,sDAAsD;AACvE,SAAM,KAAK,MAAM,oEAAoE;AACrF,WAAQ,OAAO,MAAM,uDAAuD;;;CAIhF,YAAY,QAAwB;AAKlC,OAAK,OAAO,IAAI,OAAO;GACrB,kBAJA,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;GAI5I,KAAK,OAAO,kBAAkB;GAC9B,yBAAyB,OAAO,uBAAuB;GACvD,mBAAmB;GACpB,CAAC;AAEF,OAAK,KAAK,GAAG,UAAU,QAAQ;AAC7B,WAAQ,OAAO,MAAM,8BAA8B,IAAI,QAAQ,IAAI;IACnE;;CAOJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,UAAgB;AACd,SAAO,KAAK;;CAGd,MAAM,WAAqC;EACzC,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;GACF,MAAM,cAAc,MAAM,OAAO,MAC/B,4CACD;GACD,MAAM,eAAe,MAAM,OAAO,MAChC,6CACD;AACD,UAAO;IACL,OAAO,SAAS,YAAY,KAAK,IAAI,KAAK,KAAK,GAAG;IAClD,QAAQ,SAAS,aAAa,KAAK,IAAI,KAAK,KAAK,GAAG;IACrD;YACO;AACR,UAAO,SAAS;;;;;;;CAQpB,MAAM,iBAAyC;EAC7C,IAAI,SAA4B;AAChC,MAAI;AACF,YAAS,MAAM,KAAK,KAAK,SAAS;AAClC,SAAM,OAAO,MAAM,WAAW;AAC9B,UAAO;WACA,GAAG;AACV,UAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;YACzC;AACR,WAAQ,SAAS;;;CAQrB,MAAM,YAAY,WAAmB,MAA2C;AAK9E,UAJe,MAAM,KAAK,KAAK,MAC7B,kEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,IAAI;;CAGzB,MAAM,WAAW,MAA8B;AAC7C,QAAM,KAAK,KAAK,MACd;;;;;;;kCAQA;GAAC,KAAK;GAAW,KAAK;GAAM,KAAK;GAAQ,KAAK;GAAM,KAAK;GAAM,KAAK;GAAO,KAAK;GAAK,CACtF;;CAOH,MAAM,YAAY,WAAmB,MAAiC;AAKpE,UAJe,MAAM,KAAK,KAAK,MAC7B,iEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,KAAK,MAAM,EAAE,GAAG;;CAGrC,MAAM,oBAAoB,WAAmB,MAA6B;AACxE,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,WAAW,KAAK,CAClB;;CAGH,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAE3B,QAAK,MAAM,KAAK,QAAQ;IACtB,MAAM,WAAW,EAAE,KAAK,QAAQ,OAAO,GAAG;AAE1C,UAAM,OAAO,MACX;;;;;;;;;;;;;;;gDAgBA;KACE,EAAE;KAAI,EAAE;KAAW,EAAE;KAAQ,EAAE;KAAM,EAAE;KACvC,EAAE;KAAW,EAAE;KAAS,EAAE;KAAM;KAAU,EAAE;KAC7C,CACF;;AAGH,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAsC;AAKhE,UAJe,MAAM,KAAK,KAAK,MAC7B,8DACA,CAAC,UAAU,CACZ,EACa,KAAK,KAAK,MAAM,EAAE,KAAK;;CAGvC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAC3B,QAAK,MAAM,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,8DAA8D,CAAC,WAAW,KAAK,CAAC;AACnG,UAAM,OAAO,MAAM,6DAA6D,CAAC,WAAW,KAAK,CAAC;;AAEpG,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAoG;AAC9H,MAAI,cAAc,OAKhB,SAJe,MAAM,KAAK,KAAK,MAC7B,6GACA,CAAC,UAAU,CACZ,EACa;AAKhB,UAHe,MAAM,KAAK,KAAK,MAC7B,wFACD,EACa;;CAGhB,MAAM,gBAAgB,SAAiB,WAAkC;EAEvE,MAAM,SAAS,MADH,eAAe,UAAU,CACZ,KAAK,IAAI,GAAG;AACrC,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,QAAQ,QAAQ,CAClB;;CAOH,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,cAAc,KAAK,MAAM,OAAO,KAAK;;CAG9C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,eAAe,KAAK,MAAM,gBAAgB,KAAK;;CAOxD,MAAM,gBAAgB,MAAmC;AAAE,SAAOA,gBAAsB,KAAK,MAAM,KAAK;;CACxG,MAAM,gBAAgB,WAAkC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,UAAU;;CAC5G,MAAM,aAAa,WAAiD;AAAE,SAAOC,aAAmB,KAAK,MAAM,UAAU;;CACrH,MAAM,oBAAoB,OAAe,QAA8C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,OAAO,OAAO;;CACnJ,MAAM,mBAA4C;AAAE,SAAOC,iBAAuB,KAAK,KAAK;;CAC5F,MAAM,oBAAoB,SAA0C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,QAAQ;;CAC1H,MAAM,kBAAmC;AAAE,SAAOC,gBAAsB,KAAK,KAAK;;CAClF,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,qBAAqB,OAA0C;AAAE,SAAOC,qBAA2B,KAAK,MAAM,MAAM;;CAC1H,MAAM,0BAA0B,OAAiB,SAA0C;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO,QAAQ;;CAC9J,MAAM,uBAA0C;AAAE,SAAOC,qBAA2B,KAAK,KAAK;;CAC9F,MAAM,4BAA4B,QAAmC;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CAClI,MAAM,uBAAuB,SAAoC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE1H,MAAM,mBAAmB,SAAyC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,QAAQ;;CACvH,MAAM,mBAAmB,eAAsC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,cAAc;;CAC1H,MAAM,cAAc,WAA8D;AAAE,SAAOC,cAAoB,KAAK,MAAM,UAAU;;CAEpI,MAAM,uBAAuB,aAAuB,OAAsC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,aAAa,MAAM;;CAC9J,MAAM,mBAAmB,YAA6C;AAAE,SAAOC,mBAAyB,KAAK,MAAM,WAAW;;CAC9H,MAAM,iBAAiB,YAA6C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,WAAW;;CAC1H,MAAM,oBAAkF;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACnI,MAAM,eAA0E;AAAE,SAAOC,aAAmB,KAAK,KAAK;;CACtH,MAAM,8BAA6G;AAAE,SAAOC,4BAAkC,KAAK,KAAK;;CACxK,MAAM,uBAAuB,QAA+F;AAAE,SAAOC,uBAA6B,KAAK,MAAM,OAAO;;CACpL,MAAM,kBAAkB,SAAgG;AAAE,SAAOC,kBAAwB,KAAK,MAAM,QAAQ;;CAC5K,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,uBAAuB,aAAgD;AAAE,SAAOC,uBAA6B,KAAK,MAAM,YAAY;;CAC1I,MAAM,oBAAwE;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACzH,MAAM,4BAA4B,QAAoE;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CACnK,MAAM,uBAAuB,SAAqE;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE3J,MAAM,kBAAkB,MAAuC;AAAE,SAAOC,kBAAwB,KAAK,MAAM,KAAK;;CAChH,MAAM,eAAe,WAAmD;AAAE,SAAOC,eAAqB,KAAK,MAAM,UAAU;;CAC3H,MAAM,aAAwC;AAAE,SAAOC,WAAiB,KAAK,KAAK;;CAClF,MAAM,qBAAqB,QAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO;;CACpH,MAAM,gBAAgB,SAAoC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAC5G,MAAM,qBAAwC;AAAE,SAAOC,mBAAyB,KAAK,KAAK;;CAC1F,MAAM,6BAA6B,QAAmC;AAAE,SAAOC,6BAAmC,KAAK,MAAM,OAAO;;CACpI,MAAM,wBAAwB,SAAoC;AAAE,SAAOC,wBAA8B,KAAK,MAAM,QAAQ;;CAE5H,MAAM,gBAAgB,SAA0C;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAClH,MAAM,iBAAiB,SAA0C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,QAAQ;;CACpH,MAAM,iBAAiB,MAAiC;AAAE,SAAOC,iBAAuB,KAAK,MAAM,KAAK;;CACxG,MAAM,qBAAqB,OAAe,OAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO,MAAM;;CAGzI,MAAM,wBAAwB,WAAmB,OAAkF;AAKjI,UAJU,MAAM,KAAK,KAAK,MACxB,sIACA,CAAC,WAAW,MAAM,CACnB,EACQ;;CAGX,MAAM,iBAAiB,WAAmB,MAAc,QAAQ,IAAgE;AAK9H,UAJU,MAAM,KAAK,KAAK,MACxB,wIACA;GAAC;GAAW;GAAM;GAAM,CACzB,EACQ;;CAGX,MAAM,mBAAmB,WAAmB,OAAe,OAA+D;AAKxH,UAJU,MAAM,KAAK,KAAK,MACxB,+GACA;GAAC;GAAW,IAAI,MAAM;GAAI;GAAM,CACjC,EACQ"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { t as __exportAll } from "./rolldown-runtime-95iHPtFO.mjs";
|
|
2
|
+
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { homedir } from "node:os";
|
|
4
|
+
import { join } from "node:path";
|
|
5
|
+
|
|
6
|
+
//#region src/zettelkasten/query-feedback.ts
|
|
7
|
+
/**
|
|
8
|
+
* Query feedback loop — persist search queries and results as markdown files.
|
|
9
|
+
*
|
|
10
|
+
* When memory_search or zettel_converse returns results, the query + result
|
|
11
|
+
* metadata is saved to ~/.config/pai/queries/ as a markdown file with YAML
|
|
12
|
+
* frontmatter. The daemon indexer picks these up on the next cycle and indexes
|
|
13
|
+
* them into federation.db, creating a self-reinforcing feedback loop: past
|
|
14
|
+
* queries become searchable context for future queries.
|
|
15
|
+
*/
|
|
16
|
+
var query_feedback_exports = /* @__PURE__ */ __exportAll({ saveQueryResult: () => saveQueryResult });
|
|
17
|
+
const QUERIES_DIR = join(homedir(), ".config", "pai", "queries");
|
|
18
|
+
/**
|
|
19
|
+
* Ensure the queries directory exists.
|
|
20
|
+
*/
|
|
21
|
+
function ensureQueriesDir() {
|
|
22
|
+
if (!existsSync(QUERIES_DIR)) mkdirSync(QUERIES_DIR, { recursive: true });
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Generate a filename-safe slug from a query string.
|
|
26
|
+
*/
|
|
27
|
+
function querySlug(query, timestamp) {
|
|
28
|
+
const slug = query.toLowerCase().replace(/[^a-z0-9\s]/g, "").replace(/\s+/g, "-").slice(0, 60);
|
|
29
|
+
return `${new Date(timestamp).toISOString().slice(0, 10)}-${slug}-${timestamp.toString(36).slice(-4)}`;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Save a query + result record as a markdown file with YAML frontmatter.
|
|
33
|
+
*
|
|
34
|
+
* The file is written to ~/.config/pai/queries/ and will be picked up by
|
|
35
|
+
* the daemon indexer on the next cycle.
|
|
36
|
+
*/
|
|
37
|
+
function saveQueryResult(record) {
|
|
38
|
+
try {
|
|
39
|
+
ensureQueriesDir();
|
|
40
|
+
const filepath = join(QUERIES_DIR, querySlug(record.query, record.timestamp) + ".md");
|
|
41
|
+
if (existsSync(filepath)) return filepath;
|
|
42
|
+
writeFileSync(filepath, [
|
|
43
|
+
"---",
|
|
44
|
+
`query: "${record.query.replace(/"/g, "\\\"")}"`,
|
|
45
|
+
`timestamp: ${new Date(record.timestamp).toISOString()}`,
|
|
46
|
+
`source: ${record.source}`,
|
|
47
|
+
`result_count: ${record.resultCount}`,
|
|
48
|
+
`source_slugs:`,
|
|
49
|
+
...record.sourceSlugs.map((s) => ` - "${s}"`),
|
|
50
|
+
"---"
|
|
51
|
+
].join("\n") + [
|
|
52
|
+
"",
|
|
53
|
+
`# Query: ${record.query}`,
|
|
54
|
+
"",
|
|
55
|
+
`**Source:** ${record.source} `,
|
|
56
|
+
`**Date:** ${new Date(record.timestamp).toISOString().slice(0, 19).replace("T", " ")} `,
|
|
57
|
+
`**Results:** ${record.resultCount}`,
|
|
58
|
+
"",
|
|
59
|
+
"## Answer Preview",
|
|
60
|
+
"",
|
|
61
|
+
record.answerPreview,
|
|
62
|
+
"",
|
|
63
|
+
"## Source Paths",
|
|
64
|
+
"",
|
|
65
|
+
...record.sourceSlugs.map((s) => `- \`${s}\``),
|
|
66
|
+
""
|
|
67
|
+
].join("\n"), "utf8");
|
|
68
|
+
return filepath;
|
|
69
|
+
} catch {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
//#endregion
|
|
75
|
+
export { saveQueryResult as n, query_feedback_exports as t };
|
|
76
|
+
//# sourceMappingURL=query-feedback-Dv43XKHM.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"query-feedback-Dv43XKHM.mjs","names":[],"sources":["../src/zettelkasten/query-feedback.ts"],"sourcesContent":["/**\n * Query feedback loop — persist search queries and results as markdown files.\n *\n * When memory_search or zettel_converse returns results, the query + result\n * metadata is saved to ~/.config/pai/queries/ as a markdown file with YAML\n * frontmatter. The daemon indexer picks these up on the next cycle and indexes\n * them into federation.db, creating a self-reinforcing feedback loop: past\n * queries become searchable context for future queries.\n */\n\nimport { existsSync, mkdirSync, writeFileSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport { homedir } from \"node:os\";\n\nexport interface QueryRecord {\n /** The original query string. */\n query: string;\n /** Timestamp of the query. */\n timestamp: number;\n /** Tool that produced the result: 'memory_search' | 'zettel_converse'. */\n source: string;\n /** Slugs of the top result paths (for linking back). */\n sourceSlugs: string[];\n /** Preview of the answer/result (first 500 chars). */\n answerPreview: string;\n /** Number of results returned. */\n resultCount: number;\n}\n\nconst QUERIES_DIR = join(homedir(), \".config\", \"pai\", \"queries\");\n\n/**\n * Ensure the queries directory exists.\n */\nfunction ensureQueriesDir(): void {\n if (!existsSync(QUERIES_DIR)) {\n mkdirSync(QUERIES_DIR, { recursive: true });\n }\n}\n\n/**\n * Generate a filename-safe slug from a query string.\n */\nfunction querySlug(query: string, timestamp: number): string {\n const slug = query\n .toLowerCase()\n .replace(/[^a-z0-9\\s]/g, \"\")\n .replace(/\\s+/g, \"-\")\n .slice(0, 60);\n const ts = new Date(timestamp).toISOString().slice(0, 10);\n const shortHash = timestamp.toString(36).slice(-4);\n return `${ts}-${slug}-${shortHash}`;\n}\n\n/**\n * Save a query + result record as a markdown file with YAML frontmatter.\n *\n * The file is written to ~/.config/pai/queries/ and will be picked up by\n * the daemon indexer on the next cycle.\n */\nexport function saveQueryResult(record: QueryRecord): string | null {\n try {\n ensureQueriesDir();\n\n const filename = querySlug(record.query, record.timestamp) + \".md\";\n const filepath = join(QUERIES_DIR, filename);\n\n // Don't overwrite if the exact file already exists\n if (existsSync(filepath)) return filepath;\n\n const frontmatter = [\n \"---\",\n `query: \"${record.query.replace(/\"/g, '\\\\\"')}\"`,\n `timestamp: ${new Date(record.timestamp).toISOString()}`,\n `source: ${record.source}`,\n `result_count: ${record.resultCount}`,\n `source_slugs:`,\n ...record.sourceSlugs.map((s) => ` - \"${s}\"`),\n \"---\",\n ].join(\"\\n\");\n\n const body = [\n \"\",\n `# Query: ${record.query}`,\n \"\",\n `**Source:** ${record.source} `,\n `**Date:** ${new Date(record.timestamp).toISOString().slice(0, 19).replace(\"T\", \" \")} `,\n `**Results:** ${record.resultCount}`,\n \"\",\n \"## Answer Preview\",\n \"\",\n record.answerPreview,\n \"\",\n \"## Source Paths\",\n \"\",\n ...record.sourceSlugs.map((s) => `- \\`${s}\\``),\n \"\",\n ].join(\"\\n\");\n\n writeFileSync(filepath, frontmatter + body, \"utf8\");\n return filepath;\n } catch {\n // Non-critical — don't crash the parent tool if query logging fails\n return null;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA6BA,MAAM,cAAc,KAAK,SAAS,EAAE,WAAW,OAAO,UAAU;;;;AAKhE,SAAS,mBAAyB;AAChC,KAAI,CAAC,WAAW,YAAY,CAC1B,WAAU,aAAa,EAAE,WAAW,MAAM,CAAC;;;;;AAO/C,SAAS,UAAU,OAAe,WAA2B;CAC3D,MAAM,OAAO,MACV,aAAa,CACb,QAAQ,gBAAgB,GAAG,CAC3B,QAAQ,QAAQ,IAAI,CACpB,MAAM,GAAG,GAAG;AAGf,QAAO,GAFI,IAAI,KAAK,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,CAE5C,GAAG,KAAK,GADH,UAAU,SAAS,GAAG,CAAC,MAAM,GAAG;;;;;;;;AAUpD,SAAgB,gBAAgB,QAAoC;AAClE,KAAI;AACF,oBAAkB;EAGlB,MAAM,WAAW,KAAK,aADL,UAAU,OAAO,OAAO,OAAO,UAAU,GAAG,MACjB;AAG5C,MAAI,WAAW,SAAS,CAAE,QAAO;AA+BjC,gBAAc,UA7BM;GAClB;GACA,WAAW,OAAO,MAAM,QAAQ,MAAM,OAAM,CAAC;GAC7C,cAAc,IAAI,KAAK,OAAO,UAAU,CAAC,aAAa;GACtD,WAAW,OAAO;GAClB,iBAAiB,OAAO;GACxB;GACA,GAAG,OAAO,YAAY,KAAK,MAAM,QAAQ,EAAE,GAAG;GAC9C;GACD,CAAC,KAAK,KAAK,GAEC;GACX;GACA,YAAY,OAAO;GACnB;GACA,eAAe,OAAO,OAAO;GAC7B,aAAa,IAAI,KAAK,OAAO,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;GACrF,gBAAgB,OAAO;GACvB;GACA;GACA;GACA,OAAO;GACP;GACA;GACA;GACA,GAAG,OAAO,YAAY,KAAK,MAAM,OAAO,EAAE,IAAI;GAC9C;GACD,CAAC,KAAK,KAAK,EAEgC,OAAO;AACnD,SAAO;SACD;AAEN,SAAO"}
|