@tekmidian/pai 0.5.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +72 -1
- package/README.md +107 -3
- package/dist/{auto-route-BG6I_4B1.mjs → auto-route-C-DrW6BL.mjs} +3 -3
- package/dist/{auto-route-BG6I_4B1.mjs.map → auto-route-C-DrW6BL.mjs.map} +1 -1
- package/dist/cli/index.mjs +1897 -1569
- package/dist/cli/index.mjs.map +1 -1
- package/dist/clusters-JIDQW65f.mjs +201 -0
- package/dist/clusters-JIDQW65f.mjs.map +1 -0
- package/dist/{config-Cf92lGX_.mjs → config-BuhHWyOK.mjs} +21 -6
- package/dist/config-BuhHWyOK.mjs.map +1 -0
- package/dist/daemon/index.mjs +12 -9
- package/dist/daemon/index.mjs.map +1 -1
- package/dist/{daemon-D9evGlgR.mjs → daemon-D3hYb5_C.mjs} +670 -219
- package/dist/daemon-D3hYb5_C.mjs.map +1 -0
- package/dist/daemon-mcp/index.mjs +4597 -4
- package/dist/daemon-mcp/index.mjs.map +1 -1
- package/dist/{db-4lSqLFb8.mjs → db-BtuN768f.mjs} +9 -2
- package/dist/db-BtuN768f.mjs.map +1 -0
- package/dist/db-DdUperSl.mjs +110 -0
- package/dist/db-DdUperSl.mjs.map +1 -0
- package/dist/{detect-BU3Nx_2L.mjs → detect-CdaA48EI.mjs} +1 -1
- package/dist/{detect-BU3Nx_2L.mjs.map → detect-CdaA48EI.mjs.map} +1 -1
- package/dist/{detector-Bp-2SM3x.mjs → detector-jGBuYQJM.mjs} +2 -2
- package/dist/{detector-Bp-2SM3x.mjs.map → detector-jGBuYQJM.mjs.map} +1 -1
- package/dist/{factory-Bzcy70G9.mjs → factory-Ygqe_bVZ.mjs} +7 -5
- package/dist/{factory-Bzcy70G9.mjs.map → factory-Ygqe_bVZ.mjs.map} +1 -1
- package/dist/helpers-BEST-4Gx.mjs +420 -0
- package/dist/helpers-BEST-4Gx.mjs.map +1 -0
- package/dist/hooks/capture-all-events.mjs +19 -4
- package/dist/hooks/capture-all-events.mjs.map +4 -4
- package/dist/hooks/capture-session-summary.mjs +38 -0
- package/dist/hooks/capture-session-summary.mjs.map +3 -3
- package/dist/hooks/cleanup-session-files.mjs +6 -12
- package/dist/hooks/cleanup-session-files.mjs.map +4 -4
- package/dist/hooks/context-compression-hook.mjs +105 -111
- package/dist/hooks/context-compression-hook.mjs.map +4 -4
- package/dist/hooks/initialize-session.mjs +26 -17
- package/dist/hooks/initialize-session.mjs.map +4 -4
- package/dist/hooks/inject-observations.mjs +220 -0
- package/dist/hooks/inject-observations.mjs.map +7 -0
- package/dist/hooks/load-core-context.mjs +18 -2
- package/dist/hooks/load-core-context.mjs.map +4 -4
- package/dist/hooks/load-project-context.mjs +102 -97
- package/dist/hooks/load-project-context.mjs.map +4 -4
- package/dist/hooks/observe.mjs +354 -0
- package/dist/hooks/observe.mjs.map +7 -0
- package/dist/hooks/stop-hook.mjs +174 -90
- package/dist/hooks/stop-hook.mjs.map +4 -4
- package/dist/hooks/sync-todo-to-md.mjs +31 -33
- package/dist/hooks/sync-todo-to-md.mjs.map +4 -4
- package/dist/index.d.mts +32 -9
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +6 -9
- package/dist/indexer-D53l5d1U.mjs +1 -0
- package/dist/{indexer-backend-CIMXedqk.mjs → indexer-backend-jcJFsmB4.mjs} +37 -127
- package/dist/indexer-backend-jcJFsmB4.mjs.map +1 -0
- package/dist/{ipc-client-Bjg_a1dc.mjs → ipc-client-CoyUHPod.mjs} +2 -7
- package/dist/{ipc-client-Bjg_a1dc.mjs.map → ipc-client-CoyUHPod.mjs.map} +1 -1
- package/dist/latent-ideas-bTJo6Omd.mjs +191 -0
- package/dist/latent-ideas-bTJo6Omd.mjs.map +1 -0
- package/dist/neighborhood-BYYbEkUJ.mjs +135 -0
- package/dist/neighborhood-BYYbEkUJ.mjs.map +1 -0
- package/dist/note-context-BK24bX8Y.mjs +126 -0
- package/dist/note-context-BK24bX8Y.mjs.map +1 -0
- package/dist/postgres-CKf-EDtS.mjs +846 -0
- package/dist/postgres-CKf-EDtS.mjs.map +1 -0
- package/dist/{reranker-D7bRAHi6.mjs → reranker-CMNZcfVx.mjs} +1 -1
- package/dist/{reranker-D7bRAHi6.mjs.map → reranker-CMNZcfVx.mjs.map} +1 -1
- package/dist/{search-_oHfguA5.mjs → search-DC1qhkKn.mjs} +2 -58
- package/dist/search-DC1qhkKn.mjs.map +1 -0
- package/dist/{sqlite-WWBq7_2C.mjs → sqlite-l-s9xPjY.mjs} +160 -3
- package/dist/sqlite-l-s9xPjY.mjs.map +1 -0
- package/dist/state-C6_vqz7w.mjs +102 -0
- package/dist/state-C6_vqz7w.mjs.map +1 -0
- package/dist/stop-words-BaMEGVeY.mjs +326 -0
- package/dist/stop-words-BaMEGVeY.mjs.map +1 -0
- package/dist/{indexer-CMPOiY1r.mjs → sync-BOsnEj2-.mjs} +14 -216
- package/dist/sync-BOsnEj2-.mjs.map +1 -0
- package/dist/themes-BvYF0W8T.mjs +148 -0
- package/dist/themes-BvYF0W8T.mjs.map +1 -0
- package/dist/{tools-DV_lsiCc.mjs → tools-DcaJlYDN.mjs} +162 -273
- package/dist/tools-DcaJlYDN.mjs.map +1 -0
- package/dist/trace-CRx9lPuc.mjs +137 -0
- package/dist/trace-CRx9lPuc.mjs.map +1 -0
- package/dist/{vault-indexer-DXWs9pDn.mjs → vault-indexer-Bi2cRmn7.mjs} +174 -138
- package/dist/vault-indexer-Bi2cRmn7.mjs.map +1 -0
- package/dist/zettelkasten-cdajbnPr.mjs +708 -0
- package/dist/zettelkasten-cdajbnPr.mjs.map +1 -0
- package/package.json +1 -2
- package/src/hooks/ts/capture-all-events.ts +6 -0
- package/src/hooks/ts/lib/project-utils/index.ts +50 -0
- package/src/hooks/ts/lib/project-utils/notify.ts +75 -0
- package/src/hooks/ts/lib/project-utils/paths.ts +218 -0
- package/src/hooks/ts/lib/project-utils/session-notes.ts +363 -0
- package/src/hooks/ts/lib/project-utils/todo.ts +178 -0
- package/src/hooks/ts/lib/project-utils/tokens.ts +39 -0
- package/src/hooks/ts/lib/project-utils.ts +40 -999
- package/src/hooks/ts/post-tool-use/observe.ts +327 -0
- package/src/hooks/ts/pre-compact/context-compression-hook.ts +6 -0
- package/src/hooks/ts/session-end/capture-session-summary.ts +41 -0
- package/src/hooks/ts/session-start/initialize-session.ts +7 -1
- package/src/hooks/ts/session-start/inject-observations.ts +254 -0
- package/src/hooks/ts/session-start/load-core-context.ts +7 -0
- package/src/hooks/ts/session-start/load-project-context.ts +8 -1
- package/src/hooks/ts/stop/stop-hook.ts +28 -0
- package/templates/claude-md.template.md +7 -74
- package/templates/skills/user/.gitkeep +0 -0
- package/dist/chunker-CbnBe0s0.mjs +0 -191
- package/dist/chunker-CbnBe0s0.mjs.map +0 -1
- package/dist/config-Cf92lGX_.mjs.map +0 -1
- package/dist/daemon-D9evGlgR.mjs.map +0 -1
- package/dist/db-4lSqLFb8.mjs.map +0 -1
- package/dist/db-Dp8VXIMR.mjs +0 -212
- package/dist/db-Dp8VXIMR.mjs.map +0 -1
- package/dist/indexer-CMPOiY1r.mjs.map +0 -1
- package/dist/indexer-backend-CIMXedqk.mjs.map +0 -1
- package/dist/mcp/index.d.mts +0 -1
- package/dist/mcp/index.mjs +0 -500
- package/dist/mcp/index.mjs.map +0 -1
- package/dist/postgres-FXrHDPcE.mjs +0 -358
- package/dist/postgres-FXrHDPcE.mjs.map +0 -1
- package/dist/schemas-BFIgGntb.mjs +0 -3405
- package/dist/schemas-BFIgGntb.mjs.map +0 -1
- package/dist/search-_oHfguA5.mjs.map +0 -1
- package/dist/sqlite-WWBq7_2C.mjs.map +0 -1
- package/dist/tools-DV_lsiCc.mjs.map +0 -1
- package/dist/vault-indexer-DXWs9pDn.mjs.map +0 -1
- package/dist/zettelkasten-e-a4rW_6.mjs +0 -901
- package/dist/zettelkasten-e-a4rW_6.mjs.map +0 -1
- package/templates/README.md +0 -181
- package/templates/skills/createskill-skill.template.md +0 -78
- package/templates/skills/history-system.template.md +0 -371
- package/templates/skills/hook-system.template.md +0 -913
- package/templates/skills/sessions-skill.template.md +0 -102
- package/templates/skills/skill-system.template.md +0 -214
- package/templates/skills/terminal-tabs.template.md +0 -120
- package/templates/templates.md +0 -20
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-CKf-EDtS.mjs","names":["vault.upsertVaultFile","vault.deleteVaultFile","vault.getVaultFile","vault.getVaultFileByInode","vault.getAllVaultFiles","vault.getRecentVaultFiles","vault.countVaultFiles","vault.countVaultFilesWithPrefix","vault.countVaultFilesAfter","vault.getVaultFilesByPaths","vault.getVaultFilesByPathsAfter","vault.getAllVaultFilePaths","vault.getVaultFilePathsWithPrefix","vault.getVaultFilePathsAfter","vault.upsertVaultAliases","vault.deleteVaultAliases","vault.getVaultAlias","vault.replaceLinksForSources","vault.getLinksFromSource","vault.getLinksToTarget","vault.getVaultLinkGraph","vault.getDeadLinks","vault.getDeadLinksWithLineNumbers","vault.getDeadLinksWithPrefix","vault.getDeadLinksAfter","vault.countVaultLinksWithPrefix","vault.countVaultLinksAfter","vault.getVaultLinksFromPaths","vault.getVaultLinkEdges","vault.getVaultLinkEdgesWithPrefix","vault.getVaultLinkEdgesAfter","vault.upsertVaultHealth","vault.getVaultHealth","vault.getOrphans","vault.getOrphansWithPrefix","vault.getOrphansAfter","vault.getLowConnectivity","vault.getLowConnectivityWithPrefix","vault.getLowConnectivityAfter","vault.upsertNameIndex","vault.replaceNameIndex","vault.resolveVaultName","vault.searchVaultNameIndex"],"sources":["../src/storage/postgres/helpers.ts","../src/storage/postgres/search.ts","../src/storage/postgres/vault.ts","../src/storage/postgres/backend.ts"],"sourcesContent":["/**\n * Internal helper utilities for the Postgres storage backend.\n */\n\nimport { STOP_WORDS } from \"../../utils/stop-words.js\";\n\n/**\n * Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].\n */\nexport function bufferToVector(buf: Buffer): number[] {\n const floats: number[] = [];\n for (let i = 0; i < buf.length; i += 4) {\n floats.push(buf.readFloatLE(i));\n }\n return floats;\n}\n\n/**\n * Convert a free-text query to a Postgres tsquery string.\n *\n * Uses OR (|) semantics so that a chunk matching ANY query term is returned,\n * ranked by ts_rank (which scores higher when more terms match). AND (&)\n * semantics are too strict for multi-word queries because all terms rarely\n * co-occur in a single chunk.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → \"synchrotech | interview | follow | gilles\"\n */\nexport function buildPgTsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Sanitize: strip tsquery special characters to prevent syntax errors\n .map((t) => t.replace(/'/g, \"''\").replace(/[&|!():]/g, \"\"))\n .filter(Boolean);\n\n if (tokens.length === 0) {\n const raw = query.replace(/[^a-z0-9]/gi, \" \").trim().split(/\\s+/).filter(Boolean).join(\" | \");\n return raw || \"\";\n }\n\n return tokens.join(\" | \");\n}\n","/**\n * Keyword and semantic search implementations for the Postgres backend.\n * Functions take a `pool` parameter so they can be called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport { buildPgTsQuery } from \"./helpers.js\";\n\n/**\n * Full-text keyword search using Postgres tsvector/tsquery with 'simple' dictionary.\n */\nexport async function searchKeyword(\n pool: Pool,\n query: string,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const tsQuery = buildPgTsQuery(query);\n if (!tsQuery) return [];\n\n const conditions: string[] = [\"fts_vector @@ to_tsquery('simple', $1)\"];\n const params: (string | number)[] = [tsQuery];\n let paramIdx = 2;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY rank_score DESC\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n rank_score: number;\n }>(sql, params);\n\n return result.rows.map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.rank_score,\n tier: row.tier,\n source: row.source,\n }));\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\\n`);\n return [];\n }\n}\n\n/**\n * Semantic vector similarity search using pgvector cosine distance (<=>).\n */\nexport async function searchSemantic(\n pool: Pool,\n queryEmbedding: Float32Array,\n opts?: SearchOptions\n): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n let paramIdx = 1;\n\n const vecStr = \"[\" + Array.from(queryEmbedding).join(\",\") + \"]\";\n params.push(vecStr);\n const vecParam = `$${paramIdx++}`;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n // <=> is cosine distance; 1 - distance = cosine similarity\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n 1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY embedding <=> ${vecParam}::vector\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n cosine_similarity: number;\n }>(sql, params);\n\n const minScore = opts?.minScore ?? -Infinity;\n\n return result.rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.cosine_similarity,\n tier: row.tier,\n source: row.source,\n }))\n .filter((r) => r.score >= minScore);\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\\n`);\n return [];\n }\n}\n","/**\n * Vault storage operations for the Postgres backend.\n * All functions take a `pool` parameter — called from PostgresBackend methods.\n */\n\nimport type { Pool } from \"pg\";\nimport type {\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\n\n// ---------------------------------------------------------------------------\n// Vault files\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultFile(pool: Pool, file: VaultFileRow): Promise<void> {\n await pool.query(\n `INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inode = EXCLUDED.inode, device = EXCLUDED.device,\n hash = EXCLUDED.hash, title = EXCLUDED.title,\n indexed_at = EXCLUDED.indexed_at`,\n [file.vaultPath, file.inode, file.device, file.hash, file.title, file.indexedAt]\n );\n}\n\nexport async function deleteVaultFile(pool: Pool, vaultPath: string): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_links WHERE source_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_health WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_name_index WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_aliases WHERE vault_path = $1 OR canonical_path = $1\", [vaultPath]);\n await client.query(\"DELETE FROM vault_files WHERE vault_path = $1\", [vaultPath]);\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\ntype VaultFileDbRow = { vault_path: string; inode: string; device: string; hash: string; title: string | null; indexed_at: string };\n\nfunction mapVaultFileRow(row: VaultFileDbRow): VaultFileRow {\n return {\n vaultPath: row.vault_path,\n inode: Number(row.inode),\n device: Number(row.device),\n hash: row.hash,\n title: row.title,\n indexedAt: Number(row.indexed_at),\n };\n}\n\nexport async function getVaultFile(pool: Pool, vaultPath: string): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getVaultFileByInode(pool: Pool, inode: number, device: number): Promise<VaultFileRow | null> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE inode = $1 AND device = $2 LIMIT 1\",\n [inode, device]\n );\n return r.rows.length === 0 ? null : mapVaultFileRow(r.rows[0]);\n}\n\nexport async function getAllVaultFiles(pool: Pool): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files\");\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getRecentVaultFiles(pool: Pool, sinceMs: number): Promise<VaultFileRow[]> {\n const r = await pool.query<VaultFileDbRow>(\n \"SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function countVaultFiles(pool: Pool): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*)::text AS n FROM vault_files\");\n return parseInt(r.rows[0]?.n ?? \"0\", 10);\n}\n\nexport async function countVaultFilesWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE vault_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultFilesAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_files WHERE indexed_at > $1\", [sinceMs]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultFilesByPaths(pool: Pool, paths: string[]): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders})`,\n paths\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getVaultFilesByPathsAfter(pool: Pool, paths: string[], sinceMs: number): Promise<VaultFileRow[]> {\n if (paths.length === 0) return [];\n const placeholders = paths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultFileDbRow>(\n `SELECT vault_path, inode, device, hash, title, indexed_at FROM vault_files WHERE vault_path IN (${placeholders}) AND indexed_at >= $${paths.length + 1} ORDER BY indexed_at ASC`,\n [...paths, sinceMs]\n );\n return r.rows.map(mapVaultFileRow);\n}\n\nexport async function getAllVaultFilePaths(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\"SELECT vault_path FROM vault_files\");\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getVaultFilePathsAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_files WHERE indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault aliases\n// ---------------------------------------------------------------------------\n\nexport async function upsertVaultAliases(pool: Pool, aliases: VaultAliasRow[]): Promise<void> {\n if (aliases.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const a of aliases) {\n await client.query(\n `INSERT INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (vault_path) DO UPDATE SET\n canonical_path = EXCLUDED.canonical_path,\n inode = EXCLUDED.inode, device = EXCLUDED.device`,\n [a.vaultPath, a.canonicalPath, a.inode, a.device]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function deleteVaultAliases(pool: Pool, canonicalPath: string): Promise<void> {\n await pool.query(\"DELETE FROM vault_aliases WHERE canonical_path = $1\", [canonicalPath]);\n}\n\nexport async function getVaultAlias(pool: Pool, vaultPath: string): Promise<{ canonicalPath: string } | null> {\n const r = await pool.query<{ canonical_path: string }>(\n \"SELECT canonical_path FROM vault_aliases WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length > 0 ? { canonicalPath: r.rows[0].canonical_path } : null;\n}\n\n// ---------------------------------------------------------------------------\n// Vault links\n// ---------------------------------------------------------------------------\n\ntype VaultLinkDbRow = { source_path: string; target_raw: string; target_path: string | null; link_type: string; line_number: number };\n\nfunction mapVaultLinkRow(row: VaultLinkDbRow): VaultLinkRow {\n return {\n sourcePath: row.source_path,\n targetRaw: row.target_raw,\n targetPath: row.target_path,\n linkType: row.link_type,\n lineNumber: row.line_number,\n };\n}\n\nexport async function replaceLinksForSources(pool: Pool, sourcePaths: string[], links: VaultLinkRow[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n if (sourcePaths.length > 0) {\n await client.query(\n \"DELETE FROM vault_links WHERE source_path = ANY($1::text[])\",\n [sourcePaths]\n );\n }\n for (let i = 0; i < links.length; i += 500) {\n const batch = links.slice(i, i + 500);\n const values: string[] = [];\n const params: (string | number | null)[] = [];\n let idx = 1;\n for (const l of batch) {\n values.push(`($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++})`);\n params.push(l.sourcePath, l.targetRaw, l.targetPath, l.linkType, l.lineNumber);\n }\n await client.query(\n `INSERT INTO vault_links (source_path, target_raw, target_path, link_type, line_number)\n VALUES ${values.join(\", \")}\n ON CONFLICT (source_path, target_raw, line_number) DO UPDATE SET\n target_path = EXCLUDED.target_path, link_type = EXCLUDED.link_type`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getLinksFromSource(pool: Pool, sourcePath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number FROM vault_links WHERE source_path = $1\",\n [sourcePath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getLinksToTarget(pool: Pool, targetPath: string): Promise<VaultLinkRow[]> {\n const r = await pool.query<VaultLinkDbRow>(\n \"SELECT source_path, target_raw, target_path, link_type, line_number FROM vault_links WHERE target_path = $1\",\n [targetPath]\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkGraph(pool: Pool): Promise<Array<{ source_path: string; target_path: string }>> {\n const r = await pool.query<{ source_path: string; target_path: string }>(\n \"SELECT source_path, target_path FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getDeadLinks(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string }>> {\n const r = await pool.query<{ source_path: string; target_raw: string }>(\n \"SELECT source_path, target_raw FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw }));\n}\n\nexport async function getDeadLinksWithLineNumbers(pool: Pool): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL\"\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksWithPrefix(pool: Pool, prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function getDeadLinksAfter(pool: Pool, sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> {\n const r = await pool.query<{ source_path: string; target_raw: string; line_number: number }>(\n \"SELECT source_path, target_raw, line_number FROM vault_links WHERE target_path IS NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows.map(row => ({ sourcePath: row.source_path, targetRaw: row.target_raw, lineNumber: row.line_number }));\n}\n\nexport async function countVaultLinksWithPrefix(pool: Pool, prefix: string): Promise<number> {\n const r = await pool.query<{ n: string }>(\"SELECT COUNT(*) AS n FROM vault_links WHERE source_path LIKE $1\", [`${prefix}%`]);\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function countVaultLinksAfter(pool: Pool, sinceMs: number): Promise<number> {\n const r = await pool.query<{ n: string }>(\n \"SELECT COUNT(*) AS n FROM vault_links WHERE source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return Number(r.rows[0]?.n ?? 0);\n}\n\nexport async function getVaultLinksFromPaths(pool: Pool, sourcePaths: string[]): Promise<VaultLinkRow[]> {\n if (sourcePaths.length === 0) return [];\n const placeholders = sourcePaths.map((_, i) => `$${i + 1}`).join(\", \");\n const r = await pool.query<VaultLinkDbRow>(\n `SELECT source_path, target_raw, target_path, link_type, line_number FROM vault_links WHERE source_path IN (${placeholders}) AND target_path IS NOT NULL`,\n sourcePaths\n );\n return r.rows.map(mapVaultLinkRow);\n}\n\nexport async function getVaultLinkEdges(pool: Pool): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL\"\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesWithPrefix(pool: Pool, prefix: string): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows;\n}\n\nexport async function getVaultLinkEdgesAfter(pool: Pool, sinceMs: number): Promise<Array<{ source: string; target: string }>> {\n const r = await pool.query<{ source: string; target: string }>(\n \"SELECT DISTINCT source_path AS source, target_path AS target FROM vault_links WHERE target_path IS NOT NULL AND source_path IN (SELECT vault_path FROM vault_files WHERE indexed_at > $1)\",\n [sinceMs]\n );\n return r.rows;\n}\n\n// ---------------------------------------------------------------------------\n// Vault health\n// ---------------------------------------------------------------------------\n\ntype VaultHealthDbRow = { vault_path: string; inbound_count: number; outbound_count: number; dead_link_count: number; is_orphan: number; computed_at: string };\n\nfunction mapVaultHealthRow(row: VaultHealthDbRow): VaultHealthRow {\n return {\n vaultPath: row.vault_path,\n inboundCount: row.inbound_count,\n outboundCount: row.outbound_count,\n deadLinkCount: row.dead_link_count,\n isOrphan: row.is_orphan === 1,\n computedAt: Number(row.computed_at),\n };\n}\n\nexport async function upsertVaultHealth(pool: Pool, rows: VaultHealthRow[]): Promise<void> {\n if (rows.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const h of rows) {\n await client.query(\n `INSERT INTO vault_health (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (vault_path) DO UPDATE SET\n inbound_count = EXCLUDED.inbound_count,\n outbound_count = EXCLUDED.outbound_count,\n dead_link_count = EXCLUDED.dead_link_count,\n is_orphan = EXCLUDED.is_orphan,\n computed_at = EXCLUDED.computed_at`,\n [h.vaultPath, h.inboundCount, h.outboundCount, h.deadLinkCount, h.isOrphan ? 1 : 0, h.computedAt]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function getVaultHealth(pool: Pool, vaultPath: string): Promise<VaultHealthRow | null> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE vault_path = $1\",\n [vaultPath]\n );\n return r.rows.length === 0 ? null : mapVaultHealthRow(r.rows[0]);\n}\n\nexport async function getOrphans(pool: Pool): Promise<VaultHealthRow[]> {\n const r = await pool.query<VaultHealthDbRow>(\n \"SELECT vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at FROM vault_health WHERE is_orphan = 1\"\n );\n return r.rows.map(row => ({ ...mapVaultHealthRow(row), isOrphan: true }));\n}\n\nexport async function getOrphansWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE is_orphan = 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getOrphansAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.is_orphan = 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivity(pool: Pool): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1\"\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityWithPrefix(pool: Pool, prefix: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_health WHERE inbound_count + outbound_count <= 1 AND vault_path LIKE $1\",\n [`${prefix}%`]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function getLowConnectivityAfter(pool: Pool, sinceMs: number): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vh.vault_path FROM vault_health vh JOIN vault_files vf ON vh.vault_path = vf.vault_path WHERE vh.inbound_count + vh.outbound_count <= 1 AND vf.indexed_at > $1\",\n [sinceMs]\n );\n return r.rows.map(row => row.vault_path);\n}\n\n// ---------------------------------------------------------------------------\n// Vault name index\n// ---------------------------------------------------------------------------\n\nexport async function upsertNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n if (entries.length === 0) return;\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const e of entries) {\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path)\n VALUES ($1, $2) ON CONFLICT (name, vault_path) DO NOTHING`,\n [e.name, e.vaultPath]\n );\n }\n await client.query(\"COMMIT\");\n } catch (e_) {\n await client.query(\"ROLLBACK\");\n throw e_;\n } finally {\n client.release();\n }\n}\n\nexport async function replaceNameIndex(pool: Pool, entries: VaultNameEntry[]): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n await client.query(\"DELETE FROM vault_name_index\");\n for (let i = 0; i < entries.length; i += 500) {\n const batch = entries.slice(i, i + 500);\n const values: string[] = [];\n const params: string[] = [];\n let idx = 1;\n for (const e of batch) {\n values.push(`($${idx++}, $${idx++})`);\n params.push(e.name, e.vaultPath);\n }\n await client.query(\n `INSERT INTO vault_name_index (name, vault_path) VALUES ${values.join(\", \")}`,\n params\n );\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n}\n\nexport async function resolveVaultName(pool: Pool, name: string): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT vault_path FROM vault_name_index WHERE name = $1\",\n [name]\n );\n return r.rows.map(row => row.vault_path);\n}\n\nexport async function searchVaultNameIndex(pool: Pool, query: string, limit = 100): Promise<string[]> {\n const r = await pool.query<{ vault_path: string }>(\n \"SELECT DISTINCT vault_path FROM vault_name_index WHERE lower(name) LIKE lower($1) LIMIT $2\",\n [`%${query}%`, limit]\n );\n return r.rows.map(row => row.vault_path);\n}\n","/**\n * PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.\n *\n * Vector similarity: pgvector's <=> cosine distance operator\n * Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)\n * Connection pooling: node-postgres Pool\n *\n * Schema is auto-initialized on first connection if tables don't exist.\n * Per-user database isolation: each macOS user gets their own database (pai_<username>).\n */\n\nimport pg from \"pg\";\nimport type { Pool, PoolClient } from \"pg\";\nimport { readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport type {\n StorageBackend, ChunkRow, FileRow, FederationStats,\n VaultFileRow, VaultAliasRow, VaultLinkRow, VaultHealthRow, VaultNameEntry,\n} from \"../interface.js\";\nimport type { SearchResult, SearchOptions } from \"../../memory/search.js\";\nimport type { PostgresConfig } from \"./config.js\";\nimport { bufferToVector } from \"./helpers.js\";\nimport { searchKeyword, searchSemantic } from \"./search.js\";\nimport * as vault from \"./vault.js\";\n\nconst { Pool: PgPool } = pg;\n\nexport class PostgresBackend implements StorageBackend {\n readonly backendType = \"postgres\" as const;\n\n private pool: Pool;\n\n /**\n * Ensure the per-user database exists and has the required schema.\n * Connects to the default 'postgres' database to CREATE DATABASE if needed,\n * then connects to the target database to apply init.sql schema.\n * Safe to call multiple times (fully idempotent).\n */\n static async ensureDatabase(config: PostgresConfig): Promise<void> {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n const url = new URL(connStr);\n const targetDb = url.pathname.slice(1);\n\n const adminUrl = new URL(connStr);\n adminUrl.pathname = \"/postgres\";\n const adminPool = new PgPool({\n connectionString: adminUrl.toString(),\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const check = await adminPool.query(\n \"SELECT 1 FROM pg_database WHERE datname = $1\",\n [targetDb]\n );\n if (check.rowCount === 0) {\n await adminPool.query(`CREATE DATABASE \"${targetDb}\"`);\n process.stderr.write(`[pai-postgres] Created database: ${targetDb}\\n`);\n }\n } finally {\n await adminPool.end();\n }\n\n const targetPool = new PgPool({\n connectionString: connStr,\n max: 1,\n connectionTimeoutMillis: 5000,\n });\n\n try {\n const tableCheck = await targetPool.query(\n \"SELECT 1 FROM information_schema.tables WHERE table_name = 'pai_chunks'\"\n );\n if (tableCheck.rowCount === 0) {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const initSqlPath = join(__dirname, \"../../docker/init.sql\");\n let initSql: string;\n try {\n initSql = readFileSync(initSqlPath, \"utf-8\");\n } catch {\n const altPath = join(__dirname, \"../docker/init.sql\");\n initSql = readFileSync(altPath, \"utf-8\");\n }\n await targetPool.query(initSql);\n process.stderr.write(`[pai-postgres] Applied schema to database: ${targetDb}\\n`);\n }\n } finally {\n await targetPool.end();\n }\n }\n\n constructor(config: PostgresConfig) {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n\n this.pool = new PgPool({\n connectionString: connStr,\n max: config.maxConnections ?? 5,\n connectionTimeoutMillis: config.connectionTimeoutMs ?? 5000,\n idleTimeoutMillis: 30_000,\n });\n\n this.pool.on(\"error\", (err) => {\n process.stderr.write(`[pai-postgres] Pool error: ${err.message}\\n`);\n });\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Expose the underlying pg.Pool for callers that need direct query access\n * (e.g. the daemon's observation IPC methods).\n */\n getPool(): Pool {\n return this.pool;\n }\n\n async getStats(): Promise<FederationStats> {\n const client = await this.pool.connect();\n try {\n const filesResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_files\"\n );\n const chunksResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_chunks\"\n );\n return {\n files: parseInt(filesResult.rows[0]?.n ?? \"0\", 10),\n chunks: parseInt(chunksResult.rows[0]?.n ?? \"0\", 10),\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * Test the connection by running a trivial query.\n * Returns null on success, error message on failure.\n */\n async testConnection(): Promise<string | null> {\n let client: PoolClient | null = null;\n try {\n client = await this.pool.connect();\n await client.query(\"SELECT 1\");\n return null;\n } catch (e) {\n return e instanceof Error ? e.message : String(e);\n } finally {\n client?.release();\n }\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const result = await this.pool.query<{ hash: string }>(\n \"SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows[0]?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n await this.pool.query(\n `INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (project_id, path) DO UPDATE SET\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n hash = EXCLUDED.hash,\n mtime = EXCLUDED.mtime,\n size = EXCLUDED.size`,\n [file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size]\n );\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const result = await this.pool.query<{ id: string }>(\n \"SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n await this.pool.query(\n \"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n for (const c of chunks) {\n const safeText = c.text.replace(/\\0/g, \"\");\n\n await client.query(\n `INSERT INTO pai_chunks\n (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n to_tsvector('simple', $9))\n ON CONFLICT (id) DO UPDATE SET\n project_id = EXCLUDED.project_id,\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n path = EXCLUDED.path,\n start_line = EXCLUDED.start_line,\n end_line = EXCLUDED.end_line,\n hash = EXCLUDED.hash,\n text = EXCLUDED.text,\n updated_at = EXCLUDED.updated_at,\n fts_vector = EXCLUDED.fts_vector`,\n [\n c.id, c.projectId, c.source, c.tier, c.path,\n c.startLine, c.endLine, c.hash, safeText, c.updatedAt,\n ]\n );\n }\n\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const result = await this.pool.query<{ path: string }>(\n \"SELECT DISTINCT path FROM pai_chunks WHERE project_id = $1\",\n [projectId]\n );\n return result.rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n for (const path of paths) {\n await client.query(\"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\", [projectId, path]);\n await client.query(\"DELETE FROM pai_files WHERE project_id = $1 AND path = $2\", [projectId, path]);\n }\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string; project_id: number; path: string }>> {\n if (projectId !== undefined) {\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id\",\n [projectId]\n );\n return result.rows;\n }\n const result = await this.pool.query<{ id: string; text: string; project_id: number; path: string }>(\n \"SELECT id, text, project_id, path FROM pai_chunks WHERE embedding IS NULL ORDER BY id\"\n );\n return result.rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n const vec = bufferToVector(embedding);\n const vecStr = \"[\" + vec.join(\",\") + \"]\";\n await this.pool.query(\n \"UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2\",\n [vecStr, chunkId]\n );\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchKeyword(this.pool, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchSemantic(this.pool, queryEmbedding, opts);\n }\n\n // -------------------------------------------------------------------------\n // Vault operations — delegated to vault.ts\n // -------------------------------------------------------------------------\n\n async upsertVaultFile(file: VaultFileRow): Promise<void> { return vault.upsertVaultFile(this.pool, file); }\n async deleteVaultFile(vaultPath: string): Promise<void> { return vault.deleteVaultFile(this.pool, vaultPath); }\n async getVaultFile(vaultPath: string): Promise<VaultFileRow | null> { return vault.getVaultFile(this.pool, vaultPath); }\n async getVaultFileByInode(inode: number, device: number): Promise<VaultFileRow | null> { return vault.getVaultFileByInode(this.pool, inode, device); }\n async getAllVaultFiles(): Promise<VaultFileRow[]> { return vault.getAllVaultFiles(this.pool); }\n async getRecentVaultFiles(sinceMs: number): Promise<VaultFileRow[]> { return vault.getRecentVaultFiles(this.pool, sinceMs); }\n async countVaultFiles(): Promise<number> { return vault.countVaultFiles(this.pool); }\n async countVaultFilesWithPrefix(prefix: string): Promise<number> { return vault.countVaultFilesWithPrefix(this.pool, prefix); }\n async countVaultFilesAfter(sinceMs: number): Promise<number> { return vault.countVaultFilesAfter(this.pool, sinceMs); }\n async getVaultFilesByPaths(paths: string[]): Promise<VaultFileRow[]> { return vault.getVaultFilesByPaths(this.pool, paths); }\n async getVaultFilesByPathsAfter(paths: string[], sinceMs: number): Promise<VaultFileRow[]> { return vault.getVaultFilesByPathsAfter(this.pool, paths, sinceMs); }\n async getAllVaultFilePaths(): Promise<string[]> { return vault.getAllVaultFilePaths(this.pool); }\n async getVaultFilePathsWithPrefix(prefix: string): Promise<string[]> { return vault.getVaultFilePathsWithPrefix(this.pool, prefix); }\n async getVaultFilePathsAfter(sinceMs: number): Promise<string[]> { return vault.getVaultFilePathsAfter(this.pool, sinceMs); }\n\n async upsertVaultAliases(aliases: VaultAliasRow[]): Promise<void> { return vault.upsertVaultAliases(this.pool, aliases); }\n async deleteVaultAliases(canonicalPath: string): Promise<void> { return vault.deleteVaultAliases(this.pool, canonicalPath); }\n async getVaultAlias(vaultPath: string): Promise<{ canonicalPath: string } | null> { return vault.getVaultAlias(this.pool, vaultPath); }\n\n async replaceLinksForSources(sourcePaths: string[], links: VaultLinkRow[]): Promise<void> { return vault.replaceLinksForSources(this.pool, sourcePaths, links); }\n async getLinksFromSource(sourcePath: string): Promise<VaultLinkRow[]> { return vault.getLinksFromSource(this.pool, sourcePath); }\n async getLinksToTarget(targetPath: string): Promise<VaultLinkRow[]> { return vault.getLinksToTarget(this.pool, targetPath); }\n async getVaultLinkGraph(): Promise<Array<{ source_path: string; target_path: string }>> { return vault.getVaultLinkGraph(this.pool); }\n async getDeadLinks(): Promise<Array<{ sourcePath: string; targetRaw: string }>> { return vault.getDeadLinks(this.pool); }\n async getDeadLinksWithLineNumbers(): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithLineNumbers(this.pool); }\n async getDeadLinksWithPrefix(prefix: string): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksWithPrefix(this.pool, prefix); }\n async getDeadLinksAfter(sinceMs: number): Promise<Array<{ sourcePath: string; targetRaw: string; lineNumber: number }>> { return vault.getDeadLinksAfter(this.pool, sinceMs); }\n async countVaultLinksWithPrefix(prefix: string): Promise<number> { return vault.countVaultLinksWithPrefix(this.pool, prefix); }\n async countVaultLinksAfter(sinceMs: number): Promise<number> { return vault.countVaultLinksAfter(this.pool, sinceMs); }\n async getVaultLinksFromPaths(sourcePaths: string[]): Promise<VaultLinkRow[]> { return vault.getVaultLinksFromPaths(this.pool, sourcePaths); }\n async getVaultLinkEdges(): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdges(this.pool); }\n async getVaultLinkEdgesWithPrefix(prefix: string): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesWithPrefix(this.pool, prefix); }\n async getVaultLinkEdgesAfter(sinceMs: number): Promise<Array<{ source: string; target: string }>> { return vault.getVaultLinkEdgesAfter(this.pool, sinceMs); }\n\n async upsertVaultHealth(rows: VaultHealthRow[]): Promise<void> { return vault.upsertVaultHealth(this.pool, rows); }\n async getVaultHealth(vaultPath: string): Promise<VaultHealthRow | null> { return vault.getVaultHealth(this.pool, vaultPath); }\n async getOrphans(): Promise<VaultHealthRow[]> { return vault.getOrphans(this.pool); }\n async getOrphansWithPrefix(prefix: string): Promise<string[]> { return vault.getOrphansWithPrefix(this.pool, prefix); }\n async getOrphansAfter(sinceMs: number): Promise<string[]> { return vault.getOrphansAfter(this.pool, sinceMs); }\n async getLowConnectivity(): Promise<string[]> { return vault.getLowConnectivity(this.pool); }\n async getLowConnectivityWithPrefix(prefix: string): Promise<string[]> { return vault.getLowConnectivityWithPrefix(this.pool, prefix); }\n async getLowConnectivityAfter(sinceMs: number): Promise<string[]> { return vault.getLowConnectivityAfter(this.pool, sinceMs); }\n\n async upsertNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.upsertNameIndex(this.pool, entries); }\n async replaceNameIndex(entries: VaultNameEntry[]): Promise<void> { return vault.replaceNameIndex(this.pool, entries); }\n async resolveVaultName(name: string): Promise<string[]> { return vault.resolveVaultName(this.pool, name); }\n async searchVaultNameIndex(query: string, limit?: number): Promise<string[]> { return vault.searchVaultNameIndex(this.pool, query, limit); }\n\n // Legacy memory_chunks methods (used by graph and zettelkasten modules)\n async getChunksWithEmbeddings(projectId: number, limit: number): Promise<Array<{ path: string; text: string; embedding: Buffer }>> {\n const r = await this.pool.query<{ path: string; text: string; embedding: Buffer }>(\n `SELECT path, text, embedding FROM memory_chunks WHERE project_id = $1 AND embedding IS NOT NULL ORDER BY path, start_line LIMIT $2`,\n [projectId, limit]\n );\n return r.rows;\n }\n\n async getChunksForPath(projectId: number, path: string, limit = 20): Promise<Array<{ text: string; embedding: Buffer | null }>> {\n const r = await this.pool.query<{ text: string; embedding: Buffer | null }>(\n `SELECT text, embedding FROM memory_chunks WHERE project_id = $1 AND path = $2 AND embedding IS NOT NULL ORDER BY start_line LIMIT $3`,\n [projectId, path, limit]\n );\n return r.rows;\n }\n\n async searchChunksByText(projectId: number, query: string, limit: number): Promise<Array<{ path: string; text: string }>> {\n const r = await this.pool.query<{ path: string; text: string }>(\n `SELECT DISTINCT path, text FROM memory_chunks WHERE project_id = $1 AND lower(text) LIKE lower($2) LIMIT $3`,\n [projectId, `%${query}%`, limit]\n );\n return r.rows;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AASA,SAAgB,eAAe,KAAuB;CACpD,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,EACnC,QAAO,KAAK,IAAI,YAAY,EAAE,CAAC;AAEjC,QAAO;;;;;;;;;;;;;AAcT,SAAgB,eAAe,OAAuB;CACpD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,EAAE,QAAQ,MAAM,KAAK,CAAC,QAAQ,aAAa,GAAG,CAAC,CAC1D,OAAO,QAAQ;AAElB,KAAI,OAAO,WAAW,EAEpB,QADY,MAAM,QAAQ,eAAe,IAAI,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,IAC/E;AAGhB,QAAO,OAAO,KAAK,MAAM;;;;;;;;AChC3B,eAAsB,cACpB,MACA,OACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,UAAU,eAAe,MAAM;AACrC,KAAI,CAAC,QAAS,QAAO,EAAE;CAEvB,MAAM,aAAuB,CAAC,yCAAyC;CACvE,MAAM,SAA8B,CAAC,QAAQ;CAC7C,IAAI,WAAW;AAEf,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAEvB,MAAM,MAAM;;;;;;;;;;;YAWF,WAAW,KAAK,QAAQ,CAAC;;YAEzB,WAAW;;AAGrB,KAAI;AAYF,UAXe,MAAM,KAAK,MASvB,KAAK,OAAO,EAED,KAAK,KAAK,SAAS;GAC/B,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE;UACI,GAAG;AACV,UAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;AAClE,SAAO,EAAE;;;;;;AAOb,eAAsB,eACpB,MACA,gBACA,MACyB;CACzB,MAAM,aAAa,MAAM,cAAc;CAEvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;CACtC,IAAI,WAAW;CAEf,MAAM,SAAS,MAAM,MAAM,KAAK,eAAe,CAAC,KAAK,IAAI,GAAG;AAC5D,QAAO,KAAK,OAAO;CACnB,MAAM,WAAW,IAAI;AAErB,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,QAAO,KAAK,WAAW;CACvB,MAAM,aAAa,IAAI;CAGvB,MAAM,MAAM;;;;;;;;;2BASa,SAAS;;YAExB,WAAW,KAAK,QAAQ,CAAC;6BACR,SAAS;YAC1B,WAAW;;AAGrB,KAAI;EACF,MAAM,SAAS,MAAM,KAAK,MASvB,KAAK,OAAO;EAEf,MAAM,WAAW,MAAM,YAAY;AAEnC,SAAO,OAAO,KACX,KAAK,SAAS;GACb,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb,OAAO,IAAI;GACX,MAAM,IAAI;GACV,QAAQ,IAAI;GACb,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;UAC9B,GAAG;AACV,UAAQ,OAAO,MAAM,wCAAwC,EAAE,IAAI;AACnE,SAAO,EAAE;;;;;;ACjKb,eAAsB,gBAAgB,MAAY,MAAmC;AACnF,OAAM,KAAK,MACT;;;;;0CAMA;EAAC,KAAK;EAAW,KAAK;EAAO,KAAK;EAAQ,KAAK;EAAM,KAAK;EAAO,KAAK;EAAU,CACjF;;AAGH,eAAsB,gBAAgB,MAAY,WAAkC;CAClF,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,kDAAkD,CAAC,UAAU,CAAC;AACjF,QAAM,OAAO,MAAM,sDAAsD,CAAC,UAAU,CAAC;AACrF,QAAM,OAAO,MAAM,0EAA0E,CAAC,UAAU,CAAC;AACzG,QAAM,OAAO,MAAM,iDAAiD,CAAC,UAAU,CAAC;AAChF,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAMpB,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,WAAW,IAAI;EACf,OAAO,OAAO,IAAI,MAAM;EACxB,QAAQ,OAAO,IAAI,OAAO;EAC1B,MAAM,IAAI;EACV,OAAO,IAAI;EACX,WAAW,OAAO,IAAI,WAAW;EAClC;;AAGH,eAAsB,aAAa,MAAY,WAAiD;CAC9F,MAAM,IAAI,MAAM,KAAK,MACnB,oGACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,oBAAoB,MAAY,OAAe,QAA8C;CACjH,MAAM,IAAI,MAAM,KAAK,MACnB,uHACA,CAAC,OAAO,OAAO,CAChB;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,gBAAgB,EAAE,KAAK,GAAG;;AAGhE,eAAsB,iBAAiB,MAAqC;AAE1E,SADU,MAAM,KAAK,MAAsB,6EAA6E,EAC/G,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,oBAAoB,MAAY,SAA0C;AAK9F,SAJU,MAAM,KAAK,MACnB,oGACA,CAAC,QAAQ,CACV,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,gBAAgB,MAA6B;CACjE,MAAM,IAAI,MAAM,KAAK,MAAqB,8CAA8C;AACxF,QAAO,SAAS,EAAE,KAAK,IAAI,KAAK,KAAK,GAAG;;AAG1C,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,kEAAkE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC3H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MAAqB,+DAA+D,CAAC,QAAQ,CAAC;AACnH,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,OAA0C;AAC/F,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,IAChH,MACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,0BAA0B,MAAY,OAAiB,SAA0C;AACrH,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CACjC,MAAM,eAAe,MAAM,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKhE,SAJU,MAAM,KAAK,MACnB,mGAAmG,aAAa,uBAAuB,MAAM,SAAS,EAAE,2BACxJ,CAAC,GAAG,OAAO,QAAQ,CACpB,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,qBAAqB,MAA+B;AAExE,SADU,MAAM,KAAK,MAA8B,qCAAqC,EAC/E,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,4BAA4B,MAAY,QAAmC;AAK/F,SAJU,MAAM,KAAK,MACnB,+DACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,uBAAuB,MAAY,SAAoC;AAK3F,SAJU,MAAM,KAAK,MACnB,4DACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,mBAAmB,MAAY,SAAyC;AAC5F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;;;;8DAKA;GAAC,EAAE;GAAW,EAAE;GAAe,EAAE;GAAO,EAAE;GAAO,CAClD;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,eAAsC;AACzF,OAAM,KAAK,MAAM,uDAAuD,CAAC,cAAc,CAAC;;AAG1F,eAAsB,cAAc,MAAY,WAA8D;CAC5G,MAAM,IAAI,MAAM,KAAK,MACnB,kEACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,SAAS,IAAI,EAAE,eAAe,EAAE,KAAK,GAAG,gBAAgB,GAAG;;AAS3E,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,YAAY,IAAI;EAChB,WAAW,IAAI;EACf,YAAY,IAAI;EAChB,UAAU,IAAI;EACd,YAAY,IAAI;EACjB;;AAGH,eAAsB,uBAAuB,MAAY,aAAuB,OAAsC;CACpH,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,MAAI,YAAY,SAAS,EACvB,OAAM,OAAO,MACX,+DACA,CAAC,YAAY,CACd;AAEH,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,KAAK;GAC1C,MAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,IAAI;GACrC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAqC,EAAE;GAC7C,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,GAAG;AACtE,WAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,YAAY,EAAE,UAAU,EAAE,WAAW;;AAEhF,SAAM,OAAO,MACX;kBACU,OAAO,KAAK,KAAK,CAAC;;gFAG5B,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,mBAAmB,MAAY,YAA6C;AAKhG,SAJU,MAAM,KAAK,MACnB,+GACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,iBAAiB,MAAY,YAA6C;AAK9F,SAJU,MAAM,KAAK,MACnB,+GACA,CAAC,WAAW,CACb,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAA0E;AAIhH,SAHU,MAAM,KAAK,MACnB,iFACD,EACQ;;AAGX,eAAsB,aAAa,MAAuE;AAIxG,SAHU,MAAM,KAAK,MACnB,4EACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,EAAE;;AAGxF,eAAsB,4BAA4B,MAA2F;AAI3I,SAHU,MAAM,KAAK,MACnB,yFACD,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,uBAAuB,MAAY,QAA+F;AAKtJ,SAJU,MAAM,KAAK,MACnB,kHACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,kBAAkB,MAAY,SAAgG;AAKlJ,SAJU,MAAM,KAAK,MACnB,wKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,SAAQ;EAAE,YAAY,IAAI;EAAa,WAAW,IAAI;EAAY,YAAY,IAAI;EAAa,EAAE;;AAGrH,eAAsB,0BAA0B,MAAY,QAAiC;CAC3F,MAAM,IAAI,MAAM,KAAK,MAAqB,mEAAmE,CAAC,GAAG,OAAO,GAAG,CAAC;AAC5H,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,qBAAqB,MAAY,SAAkC;CACvF,MAAM,IAAI,MAAM,KAAK,MACnB,yHACA,CAAC,QAAQ,CACV;AACD,QAAO,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE;;AAGlC,eAAsB,uBAAuB,MAAY,aAAgD;AACvG,KAAI,YAAY,WAAW,EAAG,QAAO,EAAE;CACvC,MAAM,eAAe,YAAY,KAAK,GAAG,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK;AAKtE,SAJU,MAAM,KAAK,MACnB,8GAA8G,aAAa,gCAC3H,YACD,EACQ,KAAK,IAAI,gBAAgB;;AAGpC,eAAsB,kBAAkB,MAAgE;AAItG,SAHU,MAAM,KAAK,MACnB,8GACD,EACQ;;AAGX,eAAsB,4BAA4B,MAAY,QAAoE;AAKhI,SAJU,MAAM,KAAK,MACnB,uIACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ;;AAGX,eAAsB,uBAAuB,MAAY,SAAqE;AAK5H,SAJU,MAAM,KAAK,MACnB,6LACA,CAAC,QAAQ,CACV,EACQ;;AASX,SAAS,kBAAkB,KAAuC;AAChE,QAAO;EACL,WAAW,IAAI;EACf,cAAc,IAAI;EAClB,eAAe,IAAI;EACnB,eAAe,IAAI;EACnB,UAAU,IAAI,cAAc;EAC5B,YAAY,OAAO,IAAI,YAAY;EACpC;;AAGH,eAAsB,kBAAkB,MAAY,MAAuC;AACzF,KAAI,KAAK,WAAW,EAAG;CACvB,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,KACd,OAAM,OAAO,MACX;;;;;;;gDAQA;GAAC,EAAE;GAAW,EAAE;GAAc,EAAE;GAAe,EAAE;GAAe,EAAE,WAAW,IAAI;GAAG,EAAE;GAAW,CAClG;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,eAAe,MAAY,WAAmD;CAClG,MAAM,IAAI,MAAM,KAAK,MACnB,qIACA,CAAC,UAAU,CACZ;AACD,QAAO,EAAE,KAAK,WAAW,IAAI,OAAO,kBAAkB,EAAE,KAAK,GAAG;;AAGlE,eAAsB,WAAW,MAAuC;AAItE,SAHU,MAAM,KAAK,MACnB,kIACD,EACQ,KAAK,KAAI,SAAQ;EAAE,GAAG,kBAAkB,IAAI;EAAE,UAAU;EAAM,EAAE;;AAG3E,eAAsB,qBAAqB,MAAY,QAAmC;AAKxF,SAJU,MAAM,KAAK,MACnB,kFACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,gBAAgB,MAAY,SAAoC;AAKpF,SAJU,MAAM,KAAK,MACnB,gJACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,mBAAmB,MAA+B;AAItE,SAHU,MAAM,KAAK,MACnB,gFACD,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,6BAA6B,MAAY,QAAmC;AAKhG,SAJU,MAAM,KAAK,MACnB,wGACA,CAAC,GAAG,OAAO,GAAG,CACf,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,wBAAwB,MAAY,SAAoC;AAK5F,SAJU,MAAM,KAAK,MACnB,yKACA,CAAC,QAAQ,CACV,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAO1C,eAAsB,gBAAgB,MAAY,SAA0C;AAC1F,KAAI,QAAQ,WAAW,EAAG;CAC1B,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,OAAK,MAAM,KAAK,QACd,OAAM,OAAO,MACX;qEAEA,CAAC,EAAE,MAAM,EAAE,UAAU,CACtB;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,IAAI;AACX,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,SAA0C;CAC3F,MAAM,SAAS,MAAM,KAAK,SAAS;AACnC,KAAI;AACF,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,OAAO,MAAM,+BAA+B;AAClD,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,KAAK;GAC5C,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,IAAI;GACvC,MAAM,SAAmB,EAAE;GAC3B,MAAM,SAAmB,EAAE;GAC3B,IAAI,MAAM;AACV,QAAK,MAAM,KAAK,OAAO;AACrB,WAAO,KAAK,KAAK,MAAM,KAAK,MAAM,GAAG;AACrC,WAAO,KAAK,EAAE,MAAM,EAAE,UAAU;;AAElC,SAAM,OAAO,MACX,0DAA0D,OAAO,KAAK,KAAK,IAC3E,OACD;;AAEH,QAAM,OAAO,MAAM,SAAS;UACrB,GAAG;AACV,QAAM,OAAO,MAAM,WAAW;AAC9B,QAAM;WACE;AACR,SAAO,SAAS;;;AAIpB,eAAsB,iBAAiB,MAAY,MAAiC;AAKlF,SAJU,MAAM,KAAK,MACnB,2DACA,CAAC,KAAK,CACP,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;AAG1C,eAAsB,qBAAqB,MAAY,OAAe,QAAQ,KAAwB;AAKpG,SAJU,MAAM,KAAK,MACnB,8FACA,CAAC,IAAI,MAAM,IAAI,MAAM,CACtB,EACQ,KAAK,KAAI,QAAO,IAAI,WAAW;;;;;;;;;;;;;;;ACvd1C,MAAM,EAAE,MAAM,WAAW;AAEzB,IAAa,kBAAb,MAAuD;CACrD,AAAS,cAAc;CAEvB,AAAQ;;;;;;;CAQR,aAAa,eAAe,QAAuC;EACjE,MAAM,UACJ,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;EAE9I,MAAM,WADM,IAAI,IAAI,QAAQ,CACP,SAAS,MAAM,EAAE;EAEtC,MAAM,WAAW,IAAI,IAAI,QAAQ;AACjC,WAAS,WAAW;EACpB,MAAM,YAAY,IAAI,OAAO;GAC3B,kBAAkB,SAAS,UAAU;GACrC,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAKF,QAJc,MAAM,UAAU,MAC5B,gDACA,CAAC,SAAS,CACX,EACS,aAAa,GAAG;AACxB,UAAM,UAAU,MAAM,oBAAoB,SAAS,GAAG;AACtD,YAAQ,OAAO,MAAM,oCAAoC,SAAS,IAAI;;YAEhE;AACR,SAAM,UAAU,KAAK;;EAGvB,MAAM,aAAa,IAAI,OAAO;GAC5B,kBAAkB;GAClB,KAAK;GACL,yBAAyB;GAC1B,CAAC;AAEF,MAAI;AAIF,QAHmB,MAAM,WAAW,MAClC,0EACD,EACc,aAAa,GAAG;IAC7B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;IACzD,MAAM,cAAc,KAAK,WAAW,wBAAwB;IAC5D,IAAI;AACJ,QAAI;AACF,eAAU,aAAa,aAAa,QAAQ;YACtC;AAEN,eAAU,aADM,KAAK,WAAW,qBAAqB,EACrB,QAAQ;;AAE1C,UAAM,WAAW,MAAM,QAAQ;AAC/B,YAAQ,OAAO,MAAM,8CAA8C,SAAS,IAAI;;YAE1E;AACR,SAAM,WAAW,KAAK;;;CAI1B,YAAY,QAAwB;AAKlC,OAAK,OAAO,IAAI,OAAO;GACrB,kBAJA,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;GAI5I,KAAK,OAAO,kBAAkB;GAC9B,yBAAyB,OAAO,uBAAuB;GACvD,mBAAmB;GACpB,CAAC;AAEF,OAAK,KAAK,GAAG,UAAU,QAAQ;AAC7B,WAAQ,OAAO,MAAM,8BAA8B,IAAI,QAAQ,IAAI;IACnE;;CAOJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,UAAgB;AACd,SAAO,KAAK;;CAGd,MAAM,WAAqC;EACzC,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;GACF,MAAM,cAAc,MAAM,OAAO,MAC/B,4CACD;GACD,MAAM,eAAe,MAAM,OAAO,MAChC,6CACD;AACD,UAAO;IACL,OAAO,SAAS,YAAY,KAAK,IAAI,KAAK,KAAK,GAAG;IAClD,QAAQ,SAAS,aAAa,KAAK,IAAI,KAAK,KAAK,GAAG;IACrD;YACO;AACR,UAAO,SAAS;;;;;;;CAQpB,MAAM,iBAAyC;EAC7C,IAAI,SAA4B;AAChC,MAAI;AACF,YAAS,MAAM,KAAK,KAAK,SAAS;AAClC,SAAM,OAAO,MAAM,WAAW;AAC9B,UAAO;WACA,GAAG;AACV,UAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;YACzC;AACR,WAAQ,SAAS;;;CAQrB,MAAM,YAAY,WAAmB,MAA2C;AAK9E,UAJe,MAAM,KAAK,KAAK,MAC7B,kEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,IAAI;;CAGzB,MAAM,WAAW,MAA8B;AAC7C,QAAM,KAAK,KAAK,MACd;;;;;;;kCAQA;GAAC,KAAK;GAAW,KAAK;GAAM,KAAK;GAAQ,KAAK;GAAM,KAAK;GAAM,KAAK;GAAO,KAAK;GAAK,CACtF;;CAOH,MAAM,YAAY,WAAmB,MAAiC;AAKpE,UAJe,MAAM,KAAK,KAAK,MAC7B,iEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,KAAK,MAAM,EAAE,GAAG;;CAGrC,MAAM,oBAAoB,WAAmB,MAA6B;AACxE,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,WAAW,KAAK,CAClB;;CAGH,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAE3B,QAAK,MAAM,KAAK,QAAQ;IACtB,MAAM,WAAW,EAAE,KAAK,QAAQ,OAAO,GAAG;AAE1C,UAAM,OAAO,MACX;;;;;;;;;;;;;;;gDAgBA;KACE,EAAE;KAAI,EAAE;KAAW,EAAE;KAAQ,EAAE;KAAM,EAAE;KACvC,EAAE;KAAW,EAAE;KAAS,EAAE;KAAM;KAAU,EAAE;KAC7C,CACF;;AAGH,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAsC;AAKhE,UAJe,MAAM,KAAK,KAAK,MAC7B,8DACA,CAAC,UAAU,CACZ,EACa,KAAK,KAAK,MAAM,EAAE,KAAK;;CAGvC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAC3B,QAAK,MAAM,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,8DAA8D,CAAC,WAAW,KAAK,CAAC;AACnG,UAAM,OAAO,MAAM,6DAA6D,CAAC,WAAW,KAAK,CAAC;;AAEpG,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAoG;AAC9H,MAAI,cAAc,OAKhB,SAJe,MAAM,KAAK,KAAK,MAC7B,6GACA,CAAC,UAAU,CACZ,EACa;AAKhB,UAHe,MAAM,KAAK,KAAK,MAC7B,wFACD,EACa;;CAGhB,MAAM,gBAAgB,SAAiB,WAAkC;EAEvE,MAAM,SAAS,MADH,eAAe,UAAU,CACZ,KAAK,IAAI,GAAG;AACrC,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,QAAQ,QAAQ,CAClB;;CAOH,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,cAAc,KAAK,MAAM,OAAO,KAAK;;CAG9C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,eAAe,KAAK,MAAM,gBAAgB,KAAK;;CAOxD,MAAM,gBAAgB,MAAmC;AAAE,SAAOA,gBAAsB,KAAK,MAAM,KAAK;;CACxG,MAAM,gBAAgB,WAAkC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,UAAU;;CAC5G,MAAM,aAAa,WAAiD;AAAE,SAAOC,aAAmB,KAAK,MAAM,UAAU;;CACrH,MAAM,oBAAoB,OAAe,QAA8C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,OAAO,OAAO;;CACnJ,MAAM,mBAA4C;AAAE,SAAOC,iBAAuB,KAAK,KAAK;;CAC5F,MAAM,oBAAoB,SAA0C;AAAE,SAAOC,oBAA0B,KAAK,MAAM,QAAQ;;CAC1H,MAAM,kBAAmC;AAAE,SAAOC,gBAAsB,KAAK,KAAK;;CAClF,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,qBAAqB,OAA0C;AAAE,SAAOC,qBAA2B,KAAK,MAAM,MAAM;;CAC1H,MAAM,0BAA0B,OAAiB,SAA0C;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO,QAAQ;;CAC9J,MAAM,uBAA0C;AAAE,SAAOC,qBAA2B,KAAK,KAAK;;CAC9F,MAAM,4BAA4B,QAAmC;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CAClI,MAAM,uBAAuB,SAAoC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE1H,MAAM,mBAAmB,SAAyC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,QAAQ;;CACvH,MAAM,mBAAmB,eAAsC;AAAE,SAAOC,mBAAyB,KAAK,MAAM,cAAc;;CAC1H,MAAM,cAAc,WAA8D;AAAE,SAAOC,cAAoB,KAAK,MAAM,UAAU;;CAEpI,MAAM,uBAAuB,aAAuB,OAAsC;AAAE,SAAOC,uBAA6B,KAAK,MAAM,aAAa,MAAM;;CAC9J,MAAM,mBAAmB,YAA6C;AAAE,SAAOC,mBAAyB,KAAK,MAAM,WAAW;;CAC9H,MAAM,iBAAiB,YAA6C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,WAAW;;CAC1H,MAAM,oBAAkF;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACnI,MAAM,eAA0E;AAAE,SAAOC,aAAmB,KAAK,KAAK;;CACtH,MAAM,8BAA6G;AAAE,SAAOC,4BAAkC,KAAK,KAAK;;CACxK,MAAM,uBAAuB,QAA+F;AAAE,SAAOC,uBAA6B,KAAK,MAAM,OAAO;;CACpL,MAAM,kBAAkB,SAAgG;AAAE,SAAOC,kBAAwB,KAAK,MAAM,QAAQ;;CAC5K,MAAM,0BAA0B,QAAiC;AAAE,SAAOC,0BAAgC,KAAK,MAAM,OAAO;;CAC5H,MAAM,qBAAqB,SAAkC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,QAAQ;;CACpH,MAAM,uBAAuB,aAAgD;AAAE,SAAOC,uBAA6B,KAAK,MAAM,YAAY;;CAC1I,MAAM,oBAAwE;AAAE,SAAOC,kBAAwB,KAAK,KAAK;;CACzH,MAAM,4BAA4B,QAAoE;AAAE,SAAOC,4BAAkC,KAAK,MAAM,OAAO;;CACnK,MAAM,uBAAuB,SAAqE;AAAE,SAAOC,uBAA6B,KAAK,MAAM,QAAQ;;CAE3J,MAAM,kBAAkB,MAAuC;AAAE,SAAOC,kBAAwB,KAAK,MAAM,KAAK;;CAChH,MAAM,eAAe,WAAmD;AAAE,SAAOC,eAAqB,KAAK,MAAM,UAAU;;CAC3H,MAAM,aAAwC;AAAE,SAAOC,WAAiB,KAAK,KAAK;;CAClF,MAAM,qBAAqB,QAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO;;CACpH,MAAM,gBAAgB,SAAoC;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAC5G,MAAM,qBAAwC;AAAE,SAAOC,mBAAyB,KAAK,KAAK;;CAC1F,MAAM,6BAA6B,QAAmC;AAAE,SAAOC,6BAAmC,KAAK,MAAM,OAAO;;CACpI,MAAM,wBAAwB,SAAoC;AAAE,SAAOC,wBAA8B,KAAK,MAAM,QAAQ;;CAE5H,MAAM,gBAAgB,SAA0C;AAAE,SAAOC,gBAAsB,KAAK,MAAM,QAAQ;;CAClH,MAAM,iBAAiB,SAA0C;AAAE,SAAOC,iBAAuB,KAAK,MAAM,QAAQ;;CACpH,MAAM,iBAAiB,MAAiC;AAAE,SAAOC,iBAAuB,KAAK,MAAM,KAAK;;CACxG,MAAM,qBAAqB,OAAe,OAAmC;AAAE,SAAOC,qBAA2B,KAAK,MAAM,OAAO,MAAM;;CAGzI,MAAM,wBAAwB,WAAmB,OAAkF;AAKjI,UAJU,MAAM,KAAK,KAAK,MACxB,sIACA,CAAC,WAAW,MAAM,CACnB,EACQ;;CAGX,MAAM,iBAAiB,WAAmB,MAAc,QAAQ,IAAgE;AAK9H,UAJU,MAAM,KAAK,KAAK,MACxB,wIACA;GAAC;GAAW;GAAM;GAAM,CACzB,EACQ;;CAGX,MAAM,mBAAmB,WAAmB,OAAe,OAA+D;AAKxH,UAJU,MAAM,KAAK,KAAK,MACxB,+GACA;GAAC;GAAW,IAAI,MAAM;GAAI;GAAM,CACjC,EACQ"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reranker-
|
|
1
|
+
{"version":3,"file":"reranker-CMNZcfVx.mjs","names":[],"sources":["../src/memory/reranker.ts"],"sourcesContent":["/**\n * Cross-encoder reranker for PAI memory search results.\n *\n * Uses Xenova/ms-marco-MiniLM-L-6-v2 — a 22.7M param cross-encoder trained on\n * MS MARCO passage ranking. The q8 quantized ONNX model is ~23 MB.\n *\n * Cross-encoders score (query, document) pairs jointly, producing more accurate\n * relevance scores than bi-encoder cosine similarity alone. The trade-off is\n * latency: cross-encoders must score each pair independently, so they are used\n * as a reranking step on top of a fast first-stage retriever (BM25 / cosine).\n *\n * The model is loaded as a lazy singleton — no startup cost until the first\n * rerank call. Subsequent calls reuse the loaded model.\n *\n * Inspired by QMD's Qwen3-reranker step (tobi/qmd).\n */\n\nimport type { SearchResult } from \"./search.js\";\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst DEFAULT_RERANKER_MODEL = \"Xenova/ms-marco-MiniLM-L-6-v2\";\n\n// ---------------------------------------------------------------------------\n// Lazy singleton\n// ---------------------------------------------------------------------------\n\nlet _tokenizer: any = null;\nlet _model: any = null;\nlet _currentModel: string | null = null;\nlet _loading: Promise<void> | null = null;\n\n/**\n * Configure the reranker model.\n * Must be called before the first rerank() call if you want a non-default model.\n */\nexport function configureRerankerModel(model?: string): void {\n const resolved = model?.trim() || DEFAULT_RERANKER_MODEL;\n if (_currentModel !== null && _currentModel !== resolved) {\n _tokenizer = null;\n _model = null;\n _loading = null;\n }\n _currentModel = resolved;\n}\n\nasync function ensureLoaded(): Promise<void> {\n if (_tokenizer && _model) return;\n if (_loading) return _loading;\n\n _loading = (async () => {\n const model = _currentModel ?? DEFAULT_RERANKER_MODEL;\n const {\n AutoTokenizer,\n AutoModelForSequenceClassification,\n } = await import(\"@huggingface/transformers\");\n\n _tokenizer = await AutoTokenizer.from_pretrained(model);\n _model = await AutoModelForSequenceClassification.from_pretrained(\n model,\n { dtype: \"q8\" },\n );\n _currentModel = model;\n })();\n\n return _loading;\n}\n\n// ---------------------------------------------------------------------------\n// Reranking\n// ---------------------------------------------------------------------------\n\nexport interface RerankOptions {\n /** Maximum number of results to return after reranking. */\n topK?: number;\n /**\n * Maximum number of candidates to rerank.\n * Cross-encoders are O(n) per candidate, so we cap to keep latency\n * reasonable. Default: 50.\n */\n maxCandidates?: number;\n}\n\n/**\n * Rerank search results using a cross-encoder model.\n *\n * Takes the top `maxCandidates` results from a first-stage retriever,\n * scores each (query, snippet) pair through the cross-encoder, and\n * returns them sorted by cross-encoder relevance score.\n *\n * The original retrieval score is replaced with the cross-encoder score.\n */\nexport async function rerankResults(\n query: string,\n results: SearchResult[],\n opts?: RerankOptions,\n): Promise<SearchResult[]> {\n if (results.length === 0) return [];\n\n const maxCandidates = opts?.maxCandidates ?? 50;\n const topK = opts?.topK ?? results.length;\n\n // Cap candidates to rerank\n const candidates = results.slice(0, maxCandidates);\n\n await ensureLoaded();\n\n // Tokenize all (query, document) pairs in a single batch\n const queries = new Array(candidates.length).fill(query);\n const documents = candidates.map((r) => r.snippet);\n\n const inputs = _tokenizer!(queries, {\n text_pair: documents,\n padding: true,\n truncation: true,\n });\n\n // Run the cross-encoder\n const output = await _model!(inputs);\n const logits = output.logits;\n\n // ms-marco-MiniLM returns raw logits (not sigmoid-normalized).\n // Higher = more relevant.\n const scores: number[][] = logits.tolist();\n\n // Build reranked results\n const scored = candidates.map((result, i) => ({\n ...result,\n score: scores[i][0],\n }));\n\n // Sort by cross-encoder score descending\n scored.sort((a, b) => b.score - a.score);\n\n return scored.slice(0, topK);\n}\n"],"mappings":";;;;;;;AAuBA,MAAM,yBAAyB;AAM/B,IAAI,aAAkB;AACtB,IAAI,SAAc;AAClB,IAAI,gBAA+B;AACnC,IAAI,WAAiC;;;;;AAMrC,SAAgB,uBAAuB,OAAsB;CAC3D,MAAM,WAAW,OAAO,MAAM,IAAI;AAClC,KAAI,kBAAkB,QAAQ,kBAAkB,UAAU;AACxD,eAAa;AACb,WAAS;AACT,aAAW;;AAEb,iBAAgB;;AAGlB,eAAe,eAA8B;AAC3C,KAAI,cAAc,OAAQ;AAC1B,KAAI,SAAU,QAAO;AAErB,aAAY,YAAY;EACtB,MAAM,QAAQ,iBAAiB;EAC/B,MAAM,EACJ,eACA,uCACE,MAAM,OAAO;AAEjB,eAAa,MAAM,cAAc,gBAAgB,MAAM;AACvD,WAAS,MAAM,mCAAmC,gBAChD,OACA,EAAE,OAAO,MAAM,CAChB;AACD,kBAAgB;KACd;AAEJ,QAAO;;;;;;;;;;;AA2BT,eAAsB,cACpB,OACA,SACA,MACyB;AACzB,KAAI,QAAQ,WAAW,EAAG,QAAO,EAAE;CAEnC,MAAM,gBAAgB,MAAM,iBAAiB;CAC7C,MAAM,OAAO,MAAM,QAAQ,QAAQ;CAGnC,MAAM,aAAa,QAAQ,MAAM,GAAG,cAAc;AAElD,OAAM,cAAc;CAGpB,MAAM,UAAU,IAAI,MAAM,WAAW,OAAO,CAAC,KAAK,MAAM;CACxD,MAAM,YAAY,WAAW,KAAK,MAAM,EAAE,QAAQ;CAElD,MAAM,SAAS,WAAY,SAAS;EAClC,WAAW;EACX,SAAS;EACT,YAAY;EACb,CAAC;CAQF,MAAM,UALS,MAAM,OAAQ,OAAO,EACd,OAIY,QAAQ;CAG1C,MAAM,SAAS,WAAW,KAAK,QAAQ,OAAO;EAC5C,GAAG;EACH,OAAO,OAAO,GAAG;EAClB,EAAE;AAGH,QAAO,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAExC,QAAO,OAAO,MAAM,GAAG,KAAK"}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { t as __exportAll } from "./rolldown-runtime-95iHPtFO.mjs";
|
|
2
2
|
import { n as cosineSimilarity, r as deserializeEmbedding } from "./embeddings-DGRAPAYb.mjs";
|
|
3
|
+
import { t as STOP_WORDS } from "./stop-words-BaMEGVeY.mjs";
|
|
3
4
|
|
|
4
5
|
//#region src/memory/search.ts
|
|
5
6
|
var search_exports = /* @__PURE__ */ __exportAll({
|
|
@@ -10,63 +11,6 @@ var search_exports = /* @__PURE__ */ __exportAll({
|
|
|
10
11
|
searchMemoryHybrid: () => searchMemoryHybrid,
|
|
11
12
|
searchMemorySemantic: () => searchMemorySemantic
|
|
12
13
|
});
|
|
13
|
-
const STOP_WORDS = new Set([
|
|
14
|
-
"a",
|
|
15
|
-
"an",
|
|
16
|
-
"and",
|
|
17
|
-
"are",
|
|
18
|
-
"as",
|
|
19
|
-
"at",
|
|
20
|
-
"be",
|
|
21
|
-
"been",
|
|
22
|
-
"but",
|
|
23
|
-
"by",
|
|
24
|
-
"do",
|
|
25
|
-
"for",
|
|
26
|
-
"from",
|
|
27
|
-
"has",
|
|
28
|
-
"have",
|
|
29
|
-
"he",
|
|
30
|
-
"her",
|
|
31
|
-
"him",
|
|
32
|
-
"his",
|
|
33
|
-
"how",
|
|
34
|
-
"i",
|
|
35
|
-
"if",
|
|
36
|
-
"in",
|
|
37
|
-
"is",
|
|
38
|
-
"it",
|
|
39
|
-
"its",
|
|
40
|
-
"me",
|
|
41
|
-
"my",
|
|
42
|
-
"not",
|
|
43
|
-
"of",
|
|
44
|
-
"on",
|
|
45
|
-
"or",
|
|
46
|
-
"our",
|
|
47
|
-
"out",
|
|
48
|
-
"she",
|
|
49
|
-
"so",
|
|
50
|
-
"that",
|
|
51
|
-
"the",
|
|
52
|
-
"their",
|
|
53
|
-
"them",
|
|
54
|
-
"they",
|
|
55
|
-
"this",
|
|
56
|
-
"to",
|
|
57
|
-
"up",
|
|
58
|
-
"us",
|
|
59
|
-
"was",
|
|
60
|
-
"we",
|
|
61
|
-
"were",
|
|
62
|
-
"what",
|
|
63
|
-
"when",
|
|
64
|
-
"who",
|
|
65
|
-
"will",
|
|
66
|
-
"with",
|
|
67
|
-
"you",
|
|
68
|
-
"your"
|
|
69
|
-
]);
|
|
70
14
|
/**
|
|
71
15
|
* Convert a free-text query into an FTS5 query string.
|
|
72
16
|
*
|
|
@@ -322,4 +266,4 @@ function applyRecencyBoost(results, halfLifeDays = 90) {
|
|
|
322
266
|
|
|
323
267
|
//#endregion
|
|
324
268
|
export { searchMemorySemantic as a, searchMemoryHybrid as i, populateSlugs as n, search_exports as o, searchMemory as r, buildFtsQuery as t };
|
|
325
|
-
//# sourceMappingURL=search-
|
|
269
|
+
//# sourceMappingURL=search-DC1qhkKn.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"search-DC1qhkKn.mjs","names":[],"sources":["../src/memory/search.ts"],"sourcesContent":["/**\n * Search over the PAI federation memory index.\n *\n * Provides three search modes:\n * - keyword — BM25 full-text search (default, fast, no ML required)\n * - semantic — Brute-force cosine similarity over pre-computed embeddings\n * - hybrid — Normalized combination of BM25 + cosine scores\n *\n * BM25 uses SQLite's FTS5 extension. Semantic search requires embeddings to\n * have been generated first via `embedChunks()` in the indexer.\n */\n\nimport type { Database } from \"better-sqlite3\";\nimport { deserializeEmbedding, cosineSimilarity } from \"./embeddings.js\";\nimport { STOP_WORDS } from \"../utils/stop-words.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface SearchResult {\n projectId: number;\n projectSlug?: string; // populated from registry after search when available\n path: string;\n startLine: number;\n endLine: number;\n snippet: string;\n score: number; // raw BM25 score (lower = more relevant in FTS5)\n tier: string;\n source: string;\n updatedAt?: number; // Unix ms from memory_chunks.updated_at\n}\n\nexport interface SearchOptions {\n /** Restrict search to these project IDs. */\n projectIds?: number[];\n /** Restrict to 'memory' or 'notes' sources. */\n sources?: string[];\n /** Restrict to specific tier(s): 'evergreen' | 'daily' | 'topic' | 'session' */\n tiers?: string[];\n /** Maximum number of results to return. Default 10. */\n maxResults?: number;\n /** Minimum BM25 score threshold (FTS5 scores are negative; 0.0 means no filter). */\n minScore?: number;\n}\n\n// STOP_WORDS imported from utils/stop-words.ts\n\n// ---------------------------------------------------------------------------\n// Query builder\n// ---------------------------------------------------------------------------\n\n/**\n * Convert a free-text query into an FTS5 query string.\n *\n * Strategy:\n * 1. Tokenise by whitespace and punctuation\n * 2. Remove stop words and tokens shorter than 2 characters\n * 3. Double-quote each remaining token (exact word form)\n * 4. Join with OR so that any matching token returns a result\n *\n * Using OR instead of AND is critical for multi-word queries: the words rarely\n * all appear in the same chunk, so AND would return zero results. FTS5 BM25\n * scoring naturally ranks chunks where more terms match higher, so the most\n * relevant chunks still surface at the top.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → `\"synchrotech\" OR \"interview\" OR \"follow\" OR \"gilles\"`\n * → chunks matching any term, ranked by how many terms match\n */\nexport function buildFtsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Escape any double-quotes inside the token (FTS5 uses them as delimiters)\n .map((t) => `\"${t.replace(/\"/g, '\"\"')}\"`)\n\n if (tokens.length === 0) {\n // Fallback: use original query as a raw string (may produce no results)\n return `\"${query.replace(/\"/g, '\"\"')}\"`;\n }\n\n return tokens.join(\" OR \");\n}\n\n// ---------------------------------------------------------------------------\n// Search\n// ---------------------------------------------------------------------------\n\n/**\n * Search across all indexed memory using FTS5 BM25 ranking.\n *\n * Results are ordered by BM25 score (most relevant first).\n * FTS5 bm25() returns negative values; closer to 0 = more relevant.\n * We negate the score so callers get positive values where higher = better.\n *\n * Multilingual note: SQLite FTS5 uses the `unicode61` tokenizer by default,\n * which handles Unicode correctly (German umlauts, French accents, etc.) without\n * language-specific stemming. No changes needed here — it is already\n * multilingual-safe.\n */\nexport function searchMemory(\n db: Database,\n query: string,\n opts?: SearchOptions,\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n const ftsQuery = buildFtsQuery(query);\n\n // Build the SQL with optional filters\n const conditions: string[] = [];\n const params: (string | number)[] = [ftsQuery];\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => \"?\").join(\", \");\n conditions.push(`c.project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => \"?\").join(\", \");\n conditions.push(`c.source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => \"?\").join(\", \");\n conditions.push(`c.tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n const whereClause = conditions.length > 0\n ? \"AND \" + conditions.join(\" AND \")\n : \"\";\n\n params.push(maxResults);\n\n // FTS5: join memory_fts with memory_chunks to get metadata\n // bm25(memory_fts) returns negative values (lower = better match)\n const sql = `\n SELECT\n c.project_id,\n c.path,\n c.start_line,\n c.end_line,\n c.text AS snippet,\n c.tier,\n c.source,\n c.updated_at,\n bm25(memory_fts) AS bm25_score\n FROM memory_fts\n JOIN memory_chunks c ON memory_fts.id = c.id\n WHERE memory_fts MATCH ?\n ${whereClause}\n ORDER BY bm25_score\n LIMIT ?\n `;\n\n let rows: Array<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n updated_at: number;\n bm25_score: number;\n }>;\n\n try {\n rows = db.prepare(sql).all(...params) as typeof rows;\n } catch {\n // FTS5 MATCH throws when the query is invalid — return empty results\n return [];\n }\n\n const minScore = opts?.minScore ?? 0.0;\n\n return rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n // Negate so higher = better match for callers\n score: -row.bm25_score,\n tier: row.tier,\n source: row.source,\n updatedAt: row.updated_at,\n }))\n .filter((r) => r.score >= minScore);\n}\n\n// ---------------------------------------------------------------------------\n// Semantic search\n// ---------------------------------------------------------------------------\n\n/**\n * Search chunks using brute-force cosine similarity over stored embeddings.\n *\n * Only chunks that have a non-null embedding BLOB are considered. Chunks\n * without embeddings are silently skipped (they can be embedded later via\n * `embedChunks()`).\n *\n * @param queryEmbedding Pre-computed Float32Array for the search query.\n */\nexport function searchMemorySemantic(\n db: Database,\n queryEmbedding: Float32Array,\n opts?: SearchOptions,\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n\n // Build the SQL filter conditions\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => \"?\").join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => \"?\").join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => \"?\").join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n const where = \"WHERE \" + conditions.join(\" AND \");\n\n // Hard cap for SQLite semantic path — prevents OOM on large corpora.\n // Use Postgres for production semantic search.\n const sql = `\n SELECT id, project_id, path, start_line, end_line, text, tier, source, embedding, updated_at\n FROM memory_chunks\n ${where}\n LIMIT 5000\n `;\n\n const rows = db.prepare(sql).all(...params) as Array<{\n id: string;\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n text: string;\n tier: string;\n source: string;\n embedding: Buffer;\n updated_at: number;\n }>;\n\n if (rows.length === 0) return [];\n\n // Compute cosine similarity for every chunk\n const scored = rows.map((row) => {\n const vec = deserializeEmbedding(row.embedding);\n const score = cosineSimilarity(queryEmbedding, vec);\n return {\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.text,\n score,\n tier: row.tier,\n source: row.source,\n updatedAt: row.updated_at,\n };\n });\n\n // Sort by descending similarity, apply optional min score filter, limit\n const minScore = opts?.minScore ?? -Infinity;\n\n return scored\n .filter((r) => r.score >= minScore)\n .sort((a, b) => b.score - a.score)\n .slice(0, maxResults);\n}\n\n// ---------------------------------------------------------------------------\n// Hybrid search\n// ---------------------------------------------------------------------------\n\n/**\n * Combine BM25 keyword search and semantic search using normalized scores.\n *\n * Both score sets are min-max normalized to [0,1] before combining, so neither\n * dominates the other regardless of their raw scales.\n *\n * @param queryEmbedding Pre-computed embedding for the query.\n * @param keywordWeight Weight for BM25 score (default 0.5).\n * @param semanticWeight Weight for cosine similarity score (default 0.5).\n */\nexport function searchMemoryHybrid(\n db: Database,\n query: string,\n queryEmbedding: Float32Array,\n opts?: SearchOptions & { keywordWeight?: number; semanticWeight?: number },\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n const kw = opts?.keywordWeight ?? 0.5;\n const sw = opts?.semanticWeight ?? 0.5;\n\n // Fetch keyword results — 50 candidates is sufficient for min-max normalization\n const keywordResults = searchMemory(db, query, {\n ...opts,\n maxResults: 50,\n });\n\n // Fetch semantic results — 50 candidates is sufficient for min-max normalization\n const semanticResults = searchMemorySemantic(db, queryEmbedding, {\n ...opts,\n maxResults: 50,\n });\n\n if (keywordResults.length === 0 && semanticResults.length === 0) return [];\n\n // Build a map of chunk ID → combined result\n // Use \"projectId:path:startLine:endLine\" as a stable key (same as chunk IDs)\n const keyFor = (r: SearchResult) =>\n `${r.projectId}:${r.path}:${r.startLine}:${r.endLine}`;\n\n // Min-max normalize helper\n function minMaxNormalize(items: SearchResult[]): Map<string, number> {\n if (items.length === 0) return new Map();\n const min = Math.min(...items.map((r) => r.score));\n const max = Math.max(...items.map((r) => r.score));\n const range = max - min;\n const m = new Map<string, number>();\n for (const r of items) {\n m.set(keyFor(r), range === 0 ? 1 : (r.score - min) / range);\n }\n return m;\n }\n\n const kwNorm = minMaxNormalize(keywordResults);\n const semNorm = minMaxNormalize(semanticResults);\n\n // Union of all chunk keys\n const allKeys = new Set<string>([\n ...keywordResults.map(keyFor),\n ...semanticResults.map(keyFor),\n ]);\n\n // Build a lookup from key → result metadata\n const metaMap = new Map<string, SearchResult>();\n for (const r of [...keywordResults, ...semanticResults]) {\n metaMap.set(keyFor(r), r);\n }\n\n // Combine scores\n const combined: Array<SearchResult & { combinedScore: number }> = [];\n for (const key of allKeys) {\n const meta = metaMap.get(key)!;\n const kwScore = kwNorm.get(key) ?? 0;\n const semScore = semNorm.get(key) ?? 0;\n const combinedScore = kw * kwScore + sw * semScore;\n combined.push({ ...meta, score: combinedScore, combinedScore });\n }\n\n // Sort by combined score descending\n return combined\n .sort((a, b) => b.score - a.score)\n .slice(0, maxResults)\n .map(({ combinedScore: _unused, ...r }) => r);\n}\n\n// ---------------------------------------------------------------------------\n// Slug lookup helper\n// ---------------------------------------------------------------------------\n\n/**\n * Populate the projectSlug field on search results by looking up project IDs\n * in the registry database.\n */\nexport function populateSlugs(\n results: SearchResult[],\n registryDb: Database,\n): SearchResult[] {\n if (results.length === 0) return results;\n\n const ids = [...new Set(results.map((r) => r.projectId))];\n const placeholders = ids.map(() => \"?\").join(\", \");\n const rows = registryDb\n .prepare(`SELECT id, slug FROM projects WHERE id IN (${placeholders})`)\n .all(...ids) as Array<{ id: number; slug: string }>;\n\n const slugMap = new Map(rows.map((r) => [r.id, r.slug]));\n\n return results.map((r) => ({\n ...r,\n projectSlug: slugMap.get(r.projectId),\n }));\n}\n\n// ---------------------------------------------------------------------------\n// Recency boost\n// ---------------------------------------------------------------------------\n\n/**\n * Apply exponential recency boost to search scores.\n *\n * Scores are first min-max normalized to [0,1], then multiplied by an\n * exponential decay factor based on chunk age. Normalization is required\n * because the cross-encoder reranker produces negative logit scores — naive\n * multiplication of a negative score by a decay factor (0 < d ≤ 1) would\n * make the score *less* negative, effectively boosting old results instead\n * of penalizing them.\n *\n * Formula: score_final = normalized * exp(-lambda * age_days)\n * where lambda = ln(2) / halfLifeDays, normalized ∈ [0,1]\n *\n * With default halfLifeDays=90, a 3-month-old chunk retains 50% of its\n * normalized score, a 6-month-old retains 25%, and a 1-year-old ~6%.\n *\n * Results without an updatedAt timestamp receive no decay penalty.\n * Results are re-sorted by the boosted score after application.\n *\n * @param results Search results with optional updatedAt timestamps.\n * @param halfLifeDays Score halves every N days. Default 90 (~3 months).\n * @returns New array sorted by decayed normalized score (descending).\n */\nexport function applyRecencyBoost(\n results: SearchResult[],\n halfLifeDays = 90,\n): SearchResult[] {\n if (halfLifeDays <= 0 || results.length === 0) return results;\n\n const lambda = Math.LN2 / halfLifeDays;\n const now = Date.now();\n\n // Min-max normalize scores to [0,1] so multiplicative decay works\n // correctly regardless of the raw score sign/scale.\n const scores = results.map((r) => r.score);\n const minScore = Math.min(...scores);\n const maxScore = Math.max(...scores);\n const range = maxScore - minScore;\n\n return results\n .map((r) => {\n const normalized = range === 0 ? 1 : (r.score - minScore) / range;\n const decay = r.updatedAt\n ? Math.exp(-lambda * Math.max(0, (now - r.updatedAt) / 86_400_000))\n : 1; // no timestamp → no penalty\n return { ...r, score: normalized * decay };\n })\n .sort((a, b) => b.score - a.score);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsEA,SAAgB,cAAc,OAAuB;CACnD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,IAAI,EAAE,QAAQ,MAAM,OAAK,CAAC,GAAG;AAE3C,KAAI,OAAO,WAAW,EAEpB,QAAO,IAAI,MAAM,QAAQ,MAAM,OAAK,CAAC;AAGvC,QAAO,OAAO,KAAK,OAAO;;;;;;;;;;;;;;AAmB5B,SAAgB,aACd,IACA,OACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CACvC,MAAM,WAAW,cAAc,MAAM;CAGrC,MAAM,aAAuB,EAAE;CAC/B,MAAM,SAA8B,CAAC,SAAS;AAE9C,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,CAAC,KAAK,KAAK;AAC9D,aAAW,KAAK,oBAAoB,aAAa,GAAG;AACpD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,CAAC,KAAK,KAAK;AAC3D,aAAW,KAAK,gBAAgB,aAAa,GAAG;AAChD,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,CAAC,KAAK,KAAK;AACzD,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,MAAM;;CAG5B,MAAM,cAAc,WAAW,SAAS,IACpC,SAAS,WAAW,KAAK,QAAQ,GACjC;AAEJ,QAAO,KAAK,WAAW;CAIvB,MAAM,MAAM;;;;;;;;;;;;;;QAcN,YAAY;;;;CAKlB,IAAI;AAYJ,KAAI;AACF,SAAO,GAAG,QAAQ,IAAI,CAAC,IAAI,GAAG,OAAO;SAC/B;AAEN,SAAO,EAAE;;CAGX,MAAM,WAAW,MAAM,YAAY;AAEnC,QAAO,KACJ,KAAK,SAAS;EACb,WAAW,IAAI;EACf,MAAM,IAAI;EACV,WAAW,IAAI;EACf,SAAS,IAAI;EACb,SAAS,IAAI;EAEb,OAAO,CAAC,IAAI;EACZ,MAAM,IAAI;EACV,QAAQ,IAAI;EACZ,WAAW,IAAI;EAChB,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;;;;;;;;;;;AAgBvC,SAAgB,qBACd,IACA,gBACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CAGvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;AAEtC,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,CAAC,KAAK,KAAK;AAC9D,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,CAAC,KAAK,KAAK;AAC3D,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,CAAC,KAAK,KAAK;AACzD,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;CAO5B,MAAM,MAAM;;;MAJE,WAAW,WAAW,KAAK,QAAQ,CAOvC;;;CAIV,MAAM,OAAO,GAAG,QAAQ,IAAI,CAAC,IAAI,GAAG,OAAO;AAa3C,KAAI,KAAK,WAAW,EAAG,QAAO,EAAE;CAGhC,MAAM,SAAS,KAAK,KAAK,QAAQ;EAE/B,MAAM,QAAQ,iBAAiB,gBADnB,qBAAqB,IAAI,UAAU,CACI;AACnD,SAAO;GACL,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb;GACA,MAAM,IAAI;GACV,QAAQ,IAAI;GACZ,WAAW,IAAI;GAChB;GACD;CAGF,MAAM,WAAW,MAAM,YAAY;AAEnC,QAAO,OACJ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAClC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,WAAW;;;;;;;;;;;;AAiBzB,SAAgB,mBACd,IACA,OACA,gBACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CACvC,MAAM,KAAK,MAAM,iBAAiB;CAClC,MAAM,KAAK,MAAM,kBAAkB;CAGnC,MAAM,iBAAiB,aAAa,IAAI,OAAO;EAC7C,GAAG;EACH,YAAY;EACb,CAAC;CAGF,MAAM,kBAAkB,qBAAqB,IAAI,gBAAgB;EAC/D,GAAG;EACH,YAAY;EACb,CAAC;AAEF,KAAI,eAAe,WAAW,KAAK,gBAAgB,WAAW,EAAG,QAAO,EAAE;CAI1E,MAAM,UAAU,MACd,GAAG,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,GAAG,EAAE;CAG/C,SAAS,gBAAgB,OAA4C;AACnE,MAAI,MAAM,WAAW,EAAG,wBAAO,IAAI,KAAK;EACxC,MAAM,MAAM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC;EAElD,MAAM,QADM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC,GAC9B;EACpB,MAAM,oBAAI,IAAI,KAAqB;AACnC,OAAK,MAAM,KAAK,MACd,GAAE,IAAI,OAAO,EAAE,EAAE,UAAU,IAAI,KAAK,EAAE,QAAQ,OAAO,MAAM;AAE7D,SAAO;;CAGT,MAAM,SAAS,gBAAgB,eAAe;CAC9C,MAAM,UAAU,gBAAgB,gBAAgB;CAGhD,MAAM,UAAU,IAAI,IAAY,CAC9B,GAAG,eAAe,IAAI,OAAO,EAC7B,GAAG,gBAAgB,IAAI,OAAO,CAC/B,CAAC;CAGF,MAAM,0BAAU,IAAI,KAA2B;AAC/C,MAAK,MAAM,KAAK,CAAC,GAAG,gBAAgB,GAAG,gBAAgB,CACrD,SAAQ,IAAI,OAAO,EAAE,EAAE,EAAE;CAI3B,MAAM,WAA4D,EAAE;AACpE,MAAK,MAAM,OAAO,SAAS;EACzB,MAAM,OAAO,QAAQ,IAAI,IAAI;EAC7B,MAAM,UAAU,OAAO,IAAI,IAAI,IAAI;EACnC,MAAM,WAAW,QAAQ,IAAI,IAAI,IAAI;EACrC,MAAM,gBAAgB,KAAK,UAAU,KAAK;AAC1C,WAAS,KAAK;GAAE,GAAG;GAAM,OAAO;GAAe;GAAe,CAAC;;AAIjE,QAAO,SACJ,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,WAAW,CACpB,KAAK,EAAE,eAAe,SAAS,GAAG,QAAQ,EAAE;;;;;;AAWjD,SAAgB,cACd,SACA,YACgB;AAChB,KAAI,QAAQ,WAAW,EAAG,QAAO;CAEjC,MAAM,MAAM,CAAC,GAAG,IAAI,IAAI,QAAQ,KAAK,MAAM,EAAE,UAAU,CAAC,CAAC;CACzD,MAAM,eAAe,IAAI,UAAU,IAAI,CAAC,KAAK,KAAK;CAClD,MAAM,OAAO,WACV,QAAQ,8CAA8C,aAAa,GAAG,CACtE,IAAI,GAAG,IAAI;CAEd,MAAM,UAAU,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AAExD,QAAO,QAAQ,KAAK,OAAO;EACzB,GAAG;EACH,aAAa,QAAQ,IAAI,EAAE,UAAU;EACtC,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;AA8BL,SAAgB,kBACd,SACA,eAAe,IACC;AAChB,KAAI,gBAAgB,KAAK,QAAQ,WAAW,EAAG,QAAO;CAEtD,MAAM,SAAS,KAAK,MAAM;CAC1B,MAAM,MAAM,KAAK,KAAK;CAItB,MAAM,SAAS,QAAQ,KAAK,MAAM,EAAE,MAAM;CAC1C,MAAM,WAAW,KAAK,IAAI,GAAG,OAAO;CAEpC,MAAM,QADW,KAAK,IAAI,GAAG,OAAO,GACX;AAEzB,QAAO,QACJ,KAAK,MAAM;EACV,MAAM,aAAa,UAAU,IAAI,KAAK,EAAE,QAAQ,YAAY;EAC5D,MAAM,QAAQ,EAAE,YACZ,KAAK,IAAI,CAAC,SAAS,KAAK,IAAI,IAAI,MAAM,EAAE,aAAa,MAAW,CAAC,GACjE;AACJ,SAAO;GAAE,GAAG;GAAG,OAAO,aAAa;GAAO;GAC1C,CACD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import "./embeddings-DGRAPAYb.mjs";
|
|
2
|
-
import { a as searchMemorySemantic, r as searchMemory } from "./search-
|
|
2
|
+
import { a as searchMemorySemantic, r as searchMemory } from "./search-DC1qhkKn.mjs";
|
|
3
3
|
|
|
4
4
|
//#region src/storage/sqlite.ts
|
|
5
5
|
var SQLiteBackend = class {
|
|
@@ -89,7 +89,23 @@ var SQLiteBackend = class {
|
|
|
89
89
|
params.push(projectId);
|
|
90
90
|
}
|
|
91
91
|
const where = "WHERE " + conditions.join(" AND ");
|
|
92
|
-
return this.db.prepare(`SELECT id, text FROM memory_chunks ${where}
|
|
92
|
+
return this.db.prepare(`SELECT id, text, project_id, path FROM memory_chunks ${where}
|
|
93
|
+
ORDER BY CASE
|
|
94
|
+
WHEN path LIKE '🧠 Ideaverse/%' THEN 0
|
|
95
|
+
WHEN path LIKE 'Z - Zettelkasten/%' THEN 0
|
|
96
|
+
WHEN path LIKE '💼 Business/%' THEN 0
|
|
97
|
+
WHEN path LIKE '📆 Meetings/%' THEN 1
|
|
98
|
+
WHEN path LIKE '💡 Insights/%' THEN 1
|
|
99
|
+
WHEN path LIKE '👨💻 People/%' THEN 1
|
|
100
|
+
WHEN path LIKE 'University/%' THEN 1
|
|
101
|
+
WHEN path LIKE 'Copilot/%' THEN 1
|
|
102
|
+
WHEN path LIKE '🗓️ Daily Notes/%' THEN 2
|
|
103
|
+
WHEN path LIKE 'PAI/%' THEN 3
|
|
104
|
+
WHEN path LIKE '09-job-search/%' THEN 4
|
|
105
|
+
WHEN path LIKE 'seriousletter/%' THEN 4
|
|
106
|
+
WHEN path LIKE 'Attachments/%' THEN 5
|
|
107
|
+
ELSE 2
|
|
108
|
+
END, id`).all(...params);
|
|
93
109
|
}
|
|
94
110
|
async updateEmbedding(chunkId, embedding) {
|
|
95
111
|
this.db.prepare("UPDATE memory_chunks SET embedding = ? WHERE id = ?").run(embedding, chunkId);
|
|
@@ -100,8 +116,149 @@ var SQLiteBackend = class {
|
|
|
100
116
|
async searchSemantic(queryEmbedding, opts) {
|
|
101
117
|
return searchMemorySemantic(this.db, queryEmbedding, opts);
|
|
102
118
|
}
|
|
119
|
+
vaultNotSupported() {
|
|
120
|
+
throw new Error("Vault operations require the Postgres backend");
|
|
121
|
+
}
|
|
122
|
+
async upsertVaultFile() {
|
|
123
|
+
this.vaultNotSupported();
|
|
124
|
+
}
|
|
125
|
+
async deleteVaultFile() {
|
|
126
|
+
this.vaultNotSupported();
|
|
127
|
+
}
|
|
128
|
+
async getVaultFile() {
|
|
129
|
+
this.vaultNotSupported();
|
|
130
|
+
}
|
|
131
|
+
async getVaultFileByInode() {
|
|
132
|
+
this.vaultNotSupported();
|
|
133
|
+
}
|
|
134
|
+
async getAllVaultFiles() {
|
|
135
|
+
this.vaultNotSupported();
|
|
136
|
+
}
|
|
137
|
+
async getRecentVaultFiles() {
|
|
138
|
+
this.vaultNotSupported();
|
|
139
|
+
}
|
|
140
|
+
async countVaultFiles() {
|
|
141
|
+
this.vaultNotSupported();
|
|
142
|
+
}
|
|
143
|
+
async upsertVaultAliases() {
|
|
144
|
+
this.vaultNotSupported();
|
|
145
|
+
}
|
|
146
|
+
async deleteVaultAliases() {
|
|
147
|
+
this.vaultNotSupported();
|
|
148
|
+
}
|
|
149
|
+
async replaceLinksForSources() {
|
|
150
|
+
this.vaultNotSupported();
|
|
151
|
+
}
|
|
152
|
+
async getLinksFromSource() {
|
|
153
|
+
this.vaultNotSupported();
|
|
154
|
+
}
|
|
155
|
+
async getLinksToTarget() {
|
|
156
|
+
this.vaultNotSupported();
|
|
157
|
+
}
|
|
158
|
+
async getVaultLinkGraph() {
|
|
159
|
+
this.vaultNotSupported();
|
|
160
|
+
}
|
|
161
|
+
async upsertVaultHealth() {
|
|
162
|
+
this.vaultNotSupported();
|
|
163
|
+
}
|
|
164
|
+
async getVaultHealth() {
|
|
165
|
+
this.vaultNotSupported();
|
|
166
|
+
}
|
|
167
|
+
async getOrphans() {
|
|
168
|
+
this.vaultNotSupported();
|
|
169
|
+
}
|
|
170
|
+
async getDeadLinks() {
|
|
171
|
+
this.vaultNotSupported();
|
|
172
|
+
}
|
|
173
|
+
async upsertNameIndex() {
|
|
174
|
+
this.vaultNotSupported();
|
|
175
|
+
}
|
|
176
|
+
async replaceNameIndex() {
|
|
177
|
+
this.vaultNotSupported();
|
|
178
|
+
}
|
|
179
|
+
async resolveVaultName() {
|
|
180
|
+
this.vaultNotSupported();
|
|
181
|
+
}
|
|
182
|
+
async searchVaultNameIndex() {
|
|
183
|
+
this.vaultNotSupported();
|
|
184
|
+
}
|
|
185
|
+
async getVaultFilesByPaths() {
|
|
186
|
+
this.vaultNotSupported();
|
|
187
|
+
}
|
|
188
|
+
async getVaultFilesByPathsAfter() {
|
|
189
|
+
this.vaultNotSupported();
|
|
190
|
+
}
|
|
191
|
+
async getVaultLinksFromPaths() {
|
|
192
|
+
this.vaultNotSupported();
|
|
193
|
+
}
|
|
194
|
+
async getChunksWithEmbeddings() {
|
|
195
|
+
this.vaultNotSupported();
|
|
196
|
+
}
|
|
197
|
+
async getChunksForPath() {
|
|
198
|
+
this.vaultNotSupported();
|
|
199
|
+
}
|
|
200
|
+
async searchChunksByText() {
|
|
201
|
+
this.vaultNotSupported();
|
|
202
|
+
}
|
|
203
|
+
async countVaultFilesWithPrefix() {
|
|
204
|
+
this.vaultNotSupported();
|
|
205
|
+
}
|
|
206
|
+
async countVaultFilesAfter() {
|
|
207
|
+
this.vaultNotSupported();
|
|
208
|
+
}
|
|
209
|
+
async countVaultLinksWithPrefix() {
|
|
210
|
+
this.vaultNotSupported();
|
|
211
|
+
}
|
|
212
|
+
async countVaultLinksAfter() {
|
|
213
|
+
this.vaultNotSupported();
|
|
214
|
+
}
|
|
215
|
+
async getDeadLinksWithLineNumbers() {
|
|
216
|
+
this.vaultNotSupported();
|
|
217
|
+
}
|
|
218
|
+
async getDeadLinksWithPrefix() {
|
|
219
|
+
this.vaultNotSupported();
|
|
220
|
+
}
|
|
221
|
+
async getDeadLinksAfter() {
|
|
222
|
+
this.vaultNotSupported();
|
|
223
|
+
}
|
|
224
|
+
async getOrphansWithPrefix() {
|
|
225
|
+
this.vaultNotSupported();
|
|
226
|
+
}
|
|
227
|
+
async getOrphansAfter() {
|
|
228
|
+
this.vaultNotSupported();
|
|
229
|
+
}
|
|
230
|
+
async getLowConnectivity() {
|
|
231
|
+
this.vaultNotSupported();
|
|
232
|
+
}
|
|
233
|
+
async getLowConnectivityWithPrefix() {
|
|
234
|
+
this.vaultNotSupported();
|
|
235
|
+
}
|
|
236
|
+
async getLowConnectivityAfter() {
|
|
237
|
+
this.vaultNotSupported();
|
|
238
|
+
}
|
|
239
|
+
async getAllVaultFilePaths() {
|
|
240
|
+
this.vaultNotSupported();
|
|
241
|
+
}
|
|
242
|
+
async getVaultFilePathsWithPrefix() {
|
|
243
|
+
this.vaultNotSupported();
|
|
244
|
+
}
|
|
245
|
+
async getVaultFilePathsAfter() {
|
|
246
|
+
this.vaultNotSupported();
|
|
247
|
+
}
|
|
248
|
+
async getVaultLinkEdges() {
|
|
249
|
+
this.vaultNotSupported();
|
|
250
|
+
}
|
|
251
|
+
async getVaultLinkEdgesWithPrefix() {
|
|
252
|
+
this.vaultNotSupported();
|
|
253
|
+
}
|
|
254
|
+
async getVaultLinkEdgesAfter() {
|
|
255
|
+
this.vaultNotSupported();
|
|
256
|
+
}
|
|
257
|
+
async getVaultAlias() {
|
|
258
|
+
this.vaultNotSupported();
|
|
259
|
+
}
|
|
103
260
|
};
|
|
104
261
|
|
|
105
262
|
//#endregion
|
|
106
263
|
export { SQLiteBackend };
|
|
107
|
-
//# sourceMappingURL=sqlite-
|
|
264
|
+
//# sourceMappingURL=sqlite-l-s9xPjY.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite-l-s9xPjY.mjs","names":[],"sources":["../src/storage/sqlite.ts"],"sourcesContent":["/**\n * SQLiteBackend — wraps the existing better-sqlite3 federation.db\n * behind the StorageBackend interface.\n *\n * This is a thin adapter. The heavy lifting is all in the existing\n * memory/indexer.ts and memory/search.ts code; we just provide a\n * backend-agnostic surface so the daemon and tools can call either\n * SQLite or Postgres transparently.\n */\n\nimport type { Database } from \"better-sqlite3\";\nimport type { StorageBackend, ChunkRow, FileRow, FederationStats } from \"./interface.js\";\nimport type { SearchResult, SearchOptions } from \"../memory/search.js\";\nimport { searchMemory, searchMemorySemantic } from \"../memory/search.js\";\n\nexport class SQLiteBackend implements StorageBackend {\n readonly backendType = \"sqlite\" as const;\n\n private db: Database;\n\n constructor(db: Database) {\n this.db = db;\n }\n\n /**\n * Expose the raw better-sqlite3 Database handle.\n * Used by the daemon to pass to indexAll() which still uses the synchronous API directly.\n */\n getRawDb(): Database {\n return this.db;\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n try {\n this.db.close();\n } catch {\n // ignore\n }\n }\n\n async getStats(): Promise<FederationStats> {\n const files = (\n this.db.prepare(\"SELECT COUNT(*) AS n FROM memory_files\").get() as { n: number }\n ).n;\n const chunks = (\n this.db.prepare(\"SELECT COUNT(*) AS n FROM memory_chunks\").get() as { n: number }\n ).n;\n return { files, chunks };\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const row = this.db\n .prepare(\"SELECT hash FROM memory_files WHERE project_id = ? AND path = ?\")\n .get(projectId, path) as { hash: string } | undefined;\n return row?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n this.db\n .prepare(\n `INSERT INTO memory_files (project_id, path, source, tier, hash, mtime, size)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT(project_id, path) DO UPDATE SET\n source = excluded.source,\n tier = excluded.tier,\n hash = excluded.hash,\n mtime = excluded.mtime,\n size = excluded.size`\n )\n .run(file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size);\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const rows = this.db\n .prepare(\"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\")\n .all(projectId, path) as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n const ids = await this.getChunkIds(projectId, path);\n const deleteFts = this.db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n const deleteChunks = this.db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\"\n );\n this.db.transaction(() => {\n for (const id of ids) {\n deleteFts.run(id);\n }\n deleteChunks.run(projectId, path);\n })();\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const insertChunk = this.db.prepare(\n `INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`\n );\n const insertFts = this.db.prepare(\n `INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)`\n );\n\n this.db.transaction(() => {\n for (const c of chunks) {\n insertChunk.run(\n c.id,\n c.projectId,\n c.source,\n c.tier,\n c.path,\n c.startLine,\n c.endLine,\n c.hash,\n c.text,\n c.updatedAt\n );\n insertFts.run(\n c.text,\n c.id,\n c.projectId,\n c.path,\n c.source,\n c.tier,\n c.startLine,\n c.endLine\n );\n }\n })();\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const rows = this.db\n .prepare(\"SELECT DISTINCT path FROM memory_chunks WHERE project_id = ?\")\n .all(projectId) as Array<{ path: string }>;\n return rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const deleteFts = this.db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n const deleteChunks = this.db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\"\n );\n const deleteFile = this.db.prepare(\n \"DELETE FROM memory_files WHERE project_id = ? AND path = ?\"\n );\n this.db.transaction(() => {\n for (const path of paths) {\n const ids = this.db\n .prepare(\"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\")\n .all(projectId, path) as Array<{ id: string }>;\n for (const { id } of ids) {\n deleteFts.run(id);\n }\n deleteChunks.run(projectId, path);\n deleteFile.run(projectId, path);\n }\n })();\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string; project_id: number; path: string }>> {\n const conditions = [\"embedding IS NULL\"];\n const params: (string | number)[] = [];\n\n if (projectId !== undefined) {\n conditions.push(\"project_id = ?\");\n params.push(projectId);\n }\n\n const where = \"WHERE \" + conditions.join(\" AND \");\n // Prioritize real knowledge notes over PAI session/job-search noise.\n // CASE expression assigns lower priority numbers to knowledge paths.\n const rows = this.db\n .prepare(`SELECT id, text, project_id, path FROM memory_chunks ${where}\n ORDER BY CASE\n WHEN path LIKE '🧠 Ideaverse/%' THEN 0\n WHEN path LIKE 'Z - Zettelkasten/%' THEN 0\n WHEN path LIKE '💼 Business/%' THEN 0\n WHEN path LIKE '📆 Meetings/%' THEN 1\n WHEN path LIKE '💡 Insights/%' THEN 1\n WHEN path LIKE '👨💻 People/%' THEN 1\n WHEN path LIKE 'University/%' THEN 1\n WHEN path LIKE 'Copilot/%' THEN 1\n WHEN path LIKE '🗓️ Daily Notes/%' THEN 2\n WHEN path LIKE 'PAI/%' THEN 3\n WHEN path LIKE '09-job-search/%' THEN 4\n WHEN path LIKE 'seriousletter/%' THEN 4\n WHEN path LIKE 'Attachments/%' THEN 5\n ELSE 2\n END, id`)\n .all(...params) as Array<{ id: string; text: string; project_id: number; path: string }>;\n return rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n this.db\n .prepare(\"UPDATE memory_chunks SET embedding = ? WHERE id = ?\")\n .run(embedding, chunkId);\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchMemory(this.db, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchMemorySemantic(this.db, queryEmbedding, opts);\n }\n\n // -------------------------------------------------------------------------\n // Vault operations — not supported on SQLite backend (use Postgres)\n // -------------------------------------------------------------------------\n\n private vaultNotSupported(): never {\n throw new Error(\"Vault operations require the Postgres backend\");\n }\n\n async upsertVaultFile(): Promise<void> { this.vaultNotSupported(); }\n async deleteVaultFile(): Promise<void> { this.vaultNotSupported(); }\n async getVaultFile(): Promise<null> { this.vaultNotSupported(); }\n async getVaultFileByInode(): Promise<null> { this.vaultNotSupported(); }\n async getAllVaultFiles(): Promise<never[]> { this.vaultNotSupported(); }\n async getRecentVaultFiles(): Promise<never[]> { this.vaultNotSupported(); }\n async countVaultFiles(): Promise<number> { this.vaultNotSupported(); }\n async upsertVaultAliases(): Promise<void> { this.vaultNotSupported(); }\n async deleteVaultAliases(): Promise<void> { this.vaultNotSupported(); }\n async replaceLinksForSources(): Promise<void> { this.vaultNotSupported(); }\n async getLinksFromSource(): Promise<never[]> { this.vaultNotSupported(); }\n async getLinksToTarget(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultLinkGraph(): Promise<never[]> { this.vaultNotSupported(); }\n async upsertVaultHealth(): Promise<void> { this.vaultNotSupported(); }\n async getVaultHealth(): Promise<null> { this.vaultNotSupported(); }\n async getOrphans(): Promise<never[]> { this.vaultNotSupported(); }\n async getDeadLinks(): Promise<never[]> { this.vaultNotSupported(); }\n async upsertNameIndex(): Promise<void> { this.vaultNotSupported(); }\n async replaceNameIndex(): Promise<void> { this.vaultNotSupported(); }\n async resolveVaultName(): Promise<never[]> { this.vaultNotSupported(); }\n async searchVaultNameIndex(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultFilesByPaths(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultFilesByPathsAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultLinksFromPaths(): Promise<never[]> { this.vaultNotSupported(); }\n async getChunksWithEmbeddings(): Promise<never[]> { this.vaultNotSupported(); }\n async getChunksForPath(): Promise<never[]> { this.vaultNotSupported(); }\n async searchChunksByText(): Promise<never[]> { this.vaultNotSupported(); }\n async countVaultFilesWithPrefix(): Promise<number> { this.vaultNotSupported(); }\n async countVaultFilesAfter(): Promise<number> { this.vaultNotSupported(); }\n async countVaultLinksWithPrefix(): Promise<number> { this.vaultNotSupported(); }\n async countVaultLinksAfter(): Promise<number> { this.vaultNotSupported(); }\n async getDeadLinksWithLineNumbers(): Promise<never[]> { this.vaultNotSupported(); }\n async getDeadLinksWithPrefix(): Promise<never[]> { this.vaultNotSupported(); }\n async getDeadLinksAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getOrphansWithPrefix(): Promise<never[]> { this.vaultNotSupported(); }\n async getOrphansAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getLowConnectivity(): Promise<never[]> { this.vaultNotSupported(); }\n async getLowConnectivityWithPrefix(): Promise<never[]> { this.vaultNotSupported(); }\n async getLowConnectivityAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getAllVaultFilePaths(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultFilePathsWithPrefix(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultFilePathsAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultLinkEdges(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultLinkEdgesWithPrefix(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultLinkEdgesAfter(): Promise<never[]> { this.vaultNotSupported(); }\n async getVaultAlias(): Promise<null> { this.vaultNotSupported(); }\n}\n"],"mappings":";;;;AAeA,IAAa,gBAAb,MAAqD;CACnD,AAAS,cAAc;CAEvB,AAAQ;CAER,YAAY,IAAc;AACxB,OAAK,KAAK;;;;;;CAOZ,WAAqB;AACnB,SAAO,KAAK;;CAOd,MAAM,QAAuB;AAC3B,MAAI;AACF,QAAK,GAAG,OAAO;UACT;;CAKV,MAAM,WAAqC;AAOzC,SAAO;GAAE,OALP,KAAK,GAAG,QAAQ,yCAAyC,CAAC,KAAK,CAC/D;GAIc,QAFd,KAAK,GAAG,QAAQ,0CAA0C,CAAC,KAAK,CAChE;GACsB;;CAO1B,MAAM,YAAY,WAAmB,MAA2C;AAI9E,SAHY,KAAK,GACd,QAAQ,kEAAkE,CAC1E,IAAI,WAAW,KAAK,EACX;;CAGd,MAAM,WAAW,MAA8B;AAC7C,OAAK,GACF,QACC;;;;;;;mCAQD,CACA,IAAI,KAAK,WAAW,KAAK,MAAM,KAAK,QAAQ,KAAK,MAAM,KAAK,MAAM,KAAK,OAAO,KAAK,KAAK;;CAO7F,MAAM,YAAY,WAAmB,MAAiC;AAIpE,SAHa,KAAK,GACf,QAAQ,iEAAiE,CACzE,IAAI,WAAW,KAAK,CACX,KAAK,MAAM,EAAE,GAAG;;CAG9B,MAAM,oBAAoB,WAAmB,MAA6B;EACxE,MAAM,MAAM,MAAM,KAAK,YAAY,WAAW,KAAK;EACnD,MAAM,YAAY,KAAK,GAAG,QAAQ,sCAAsC;EACxE,MAAM,eAAe,KAAK,GAAG,QAC3B,8DACD;AACD,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,MAAM,IACf,WAAU,IAAI,GAAG;AAEnB,gBAAa,IAAI,WAAW,KAAK;IACjC,EAAE;;CAGN,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,cAAc,KAAK,GAAG,QAC1B;8CAED;EACD,MAAM,YAAY,KAAK,GAAG,QACxB;wCAED;AAED,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,KAAK,QAAQ;AACtB,gBAAY,IACV,EAAE,IACF,EAAE,WACF,EAAE,QACF,EAAE,MACF,EAAE,MACF,EAAE,WACF,EAAE,SACF,EAAE,MACF,EAAE,MACF,EAAE,UACH;AACD,cAAU,IACR,EAAE,MACF,EAAE,IACF,EAAE,WACF,EAAE,MACF,EAAE,QACF,EAAE,MACF,EAAE,WACF,EAAE,QACH;;IAEH,EAAE;;CAGN,MAAM,sBAAsB,WAAsC;AAIhE,SAHa,KAAK,GACf,QAAQ,+DAA+D,CACvE,IAAI,UAAU,CACL,KAAK,MAAM,EAAE,KAAK;;CAGhC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,YAAY,KAAK,GAAG,QAAQ,sCAAsC;EACxE,MAAM,eAAe,KAAK,GAAG,QAC3B,8DACD;EACD,MAAM,aAAa,KAAK,GAAG,QACzB,6DACD;AACD,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,MAAM,KAAK,GACd,QAAQ,iEAAiE,CACzE,IAAI,WAAW,KAAK;AACvB,SAAK,MAAM,EAAE,QAAQ,IACnB,WAAU,IAAI,GAAG;AAEnB,iBAAa,IAAI,WAAW,KAAK;AACjC,eAAW,IAAI,WAAW,KAAK;;IAEjC,EAAE;;CAGN,MAAM,sBAAsB,WAAoG;EAC9H,MAAM,aAAa,CAAC,oBAAoB;EACxC,MAAM,SAA8B,EAAE;AAEtC,MAAI,cAAc,QAAW;AAC3B,cAAW,KAAK,iBAAiB;AACjC,UAAO,KAAK,UAAU;;EAGxB,MAAM,QAAQ,WAAW,WAAW,KAAK,QAAQ;AAsBjD,SAnBa,KAAK,GACf,QAAQ,wDAAwD,MAAM;;;;;;;;;;;;;;;;iBAgB5D,CACV,IAAI,GAAG,OAAO;;CAInB,MAAM,gBAAgB,SAAiB,WAAkC;AACvE,OAAK,GACF,QAAQ,sDAAsD,CAC9D,IAAI,WAAW,QAAQ;;CAO5B,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,aAAa,KAAK,IAAI,OAAO,KAAK;;CAG3C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,qBAAqB,KAAK,IAAI,gBAAgB,KAAK;;CAO5D,AAAQ,oBAA2B;AACjC,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,kBAAiC;AAAE,OAAK,mBAAmB;;CACjE,MAAM,kBAAiC;AAAE,OAAK,mBAAmB;;CACjE,MAAM,eAA8B;AAAE,OAAK,mBAAmB;;CAC9D,MAAM,sBAAqC;AAAE,OAAK,mBAAmB;;CACrE,MAAM,mBAAqC;AAAE,OAAK,mBAAmB;;CACrE,MAAM,sBAAwC;AAAE,OAAK,mBAAmB;;CACxE,MAAM,kBAAmC;AAAE,OAAK,mBAAmB;;CACnE,MAAM,qBAAoC;AAAE,OAAK,mBAAmB;;CACpE,MAAM,qBAAoC;AAAE,OAAK,mBAAmB;;CACpE,MAAM,yBAAwC;AAAE,OAAK,mBAAmB;;CACxE,MAAM,qBAAuC;AAAE,OAAK,mBAAmB;;CACvE,MAAM,mBAAqC;AAAE,OAAK,mBAAmB;;CACrE,MAAM,oBAAsC;AAAE,OAAK,mBAAmB;;CACtE,MAAM,oBAAmC;AAAE,OAAK,mBAAmB;;CACnE,MAAM,iBAAgC;AAAE,OAAK,mBAAmB;;CAChE,MAAM,aAA+B;AAAE,OAAK,mBAAmB;;CAC/D,MAAM,eAAiC;AAAE,OAAK,mBAAmB;;CACjE,MAAM,kBAAiC;AAAE,OAAK,mBAAmB;;CACjE,MAAM,mBAAkC;AAAE,OAAK,mBAAmB;;CAClE,MAAM,mBAAqC;AAAE,OAAK,mBAAmB;;CACrE,MAAM,uBAAyC;AAAE,OAAK,mBAAmB;;CACzE,MAAM,uBAAyC;AAAE,OAAK,mBAAmB;;CACzE,MAAM,4BAA8C;AAAE,OAAK,mBAAmB;;CAC9E,MAAM,yBAA2C;AAAE,OAAK,mBAAmB;;CAC3E,MAAM,0BAA4C;AAAE,OAAK,mBAAmB;;CAC5E,MAAM,mBAAqC;AAAE,OAAK,mBAAmB;;CACrE,MAAM,qBAAuC;AAAE,OAAK,mBAAmB;;CACvE,MAAM,4BAA6C;AAAE,OAAK,mBAAmB;;CAC7E,MAAM,uBAAwC;AAAE,OAAK,mBAAmB;;CACxE,MAAM,4BAA6C;AAAE,OAAK,mBAAmB;;CAC7E,MAAM,uBAAwC;AAAE,OAAK,mBAAmB;;CACxE,MAAM,8BAAgD;AAAE,OAAK,mBAAmB;;CAChF,MAAM,yBAA2C;AAAE,OAAK,mBAAmB;;CAC3E,MAAM,oBAAsC;AAAE,OAAK,mBAAmB;;CACtE,MAAM,uBAAyC;AAAE,OAAK,mBAAmB;;CACzE,MAAM,kBAAoC;AAAE,OAAK,mBAAmB;;CACpE,MAAM,qBAAuC;AAAE,OAAK,mBAAmB;;CACvE,MAAM,+BAAiD;AAAE,OAAK,mBAAmB;;CACjF,MAAM,0BAA4C;AAAE,OAAK,mBAAmB;;CAC5E,MAAM,uBAAyC;AAAE,OAAK,mBAAmB;;CACzE,MAAM,8BAAgD;AAAE,OAAK,mBAAmB;;CAChF,MAAM,yBAA2C;AAAE,OAAK,mBAAmB;;CAC3E,MAAM,oBAAsC;AAAE,OAAK,mBAAmB;;CACtE,MAAM,8BAAgD;AAAE,OAAK,mBAAmB;;CAChF,MAAM,yBAA2C;AAAE,OAAK,mBAAmB;;CAC3E,MAAM,gBAA+B;AAAE,OAAK,mBAAmB"}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { t as __exportAll } from "./rolldown-runtime-95iHPtFO.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/daemon/daemon/state.ts
|
|
4
|
+
var state_exports = /* @__PURE__ */ __exportAll({
|
|
5
|
+
daemonConfig: () => daemonConfig,
|
|
6
|
+
embedInProgress: () => embedInProgress,
|
|
7
|
+
embedSchedulerTimer: () => embedSchedulerTimer,
|
|
8
|
+
indexInProgress: () => indexInProgress,
|
|
9
|
+
indexSchedulerTimer: () => indexSchedulerTimer,
|
|
10
|
+
lastEmbedTime: () => lastEmbedTime,
|
|
11
|
+
lastIndexTime: () => lastIndexTime,
|
|
12
|
+
lastVaultIndexTime: () => lastVaultIndexTime,
|
|
13
|
+
notificationConfig: () => notificationConfig,
|
|
14
|
+
registryDb: () => registryDb,
|
|
15
|
+
setDaemonConfig: () => setDaemonConfig,
|
|
16
|
+
setEmbedInProgress: () => setEmbedInProgress,
|
|
17
|
+
setEmbedSchedulerTimer: () => setEmbedSchedulerTimer,
|
|
18
|
+
setIndexInProgress: () => setIndexInProgress,
|
|
19
|
+
setIndexSchedulerTimer: () => setIndexSchedulerTimer,
|
|
20
|
+
setLastEmbedTime: () => setLastEmbedTime,
|
|
21
|
+
setLastIndexTime: () => setLastIndexTime,
|
|
22
|
+
setLastVaultIndexTime: () => setLastVaultIndexTime,
|
|
23
|
+
setNotificationConfig: () => setNotificationConfig,
|
|
24
|
+
setRegistryDb: () => setRegistryDb,
|
|
25
|
+
setShutdownRequested: () => setShutdownRequested,
|
|
26
|
+
setStartTime: () => setStartTime,
|
|
27
|
+
setStorageBackend: () => setStorageBackend,
|
|
28
|
+
setVaultIndexInProgress: () => setVaultIndexInProgress,
|
|
29
|
+
shutdownRequested: () => shutdownRequested,
|
|
30
|
+
startTime: () => startTime,
|
|
31
|
+
storageBackend: () => storageBackend,
|
|
32
|
+
vaultIndexInProgress: () => vaultIndexInProgress
|
|
33
|
+
});
|
|
34
|
+
let registryDb;
|
|
35
|
+
let storageBackend;
|
|
36
|
+
let daemonConfig;
|
|
37
|
+
let startTime = Date.now();
|
|
38
|
+
/** True while a project index pass is running. */
|
|
39
|
+
let indexInProgress = false;
|
|
40
|
+
let lastIndexTime = 0;
|
|
41
|
+
let indexSchedulerTimer = null;
|
|
42
|
+
/** True while an embedding pass is running. */
|
|
43
|
+
let embedInProgress = false;
|
|
44
|
+
let lastEmbedTime = 0;
|
|
45
|
+
let embedSchedulerTimer = null;
|
|
46
|
+
/** True while a vault index pass is running. */
|
|
47
|
+
let vaultIndexInProgress = false;
|
|
48
|
+
let lastVaultIndexTime = 0;
|
|
49
|
+
/** Mutable notification config — loaded from disk at startup, patchable at runtime. */
|
|
50
|
+
let notificationConfig;
|
|
51
|
+
/**
|
|
52
|
+
* Set to true when a SIGTERM/SIGINT is received so that long-running loops
|
|
53
|
+
* (embed, index) can detect the signal and exit their inner loops before the
|
|
54
|
+
* pool/backend is closed.
|
|
55
|
+
*/
|
|
56
|
+
let shutdownRequested = false;
|
|
57
|
+
function setRegistryDb(db) {
|
|
58
|
+
registryDb = db;
|
|
59
|
+
}
|
|
60
|
+
function setStorageBackend(b) {
|
|
61
|
+
storageBackend = b;
|
|
62
|
+
}
|
|
63
|
+
function setDaemonConfig(c) {
|
|
64
|
+
daemonConfig = c;
|
|
65
|
+
}
|
|
66
|
+
function setStartTime(t) {
|
|
67
|
+
startTime = t;
|
|
68
|
+
}
|
|
69
|
+
function setNotificationConfig(c) {
|
|
70
|
+
notificationConfig = c;
|
|
71
|
+
}
|
|
72
|
+
function setShutdownRequested(v) {
|
|
73
|
+
shutdownRequested = v;
|
|
74
|
+
}
|
|
75
|
+
function setIndexInProgress(v) {
|
|
76
|
+
indexInProgress = v;
|
|
77
|
+
}
|
|
78
|
+
function setLastIndexTime(v) {
|
|
79
|
+
lastIndexTime = v;
|
|
80
|
+
}
|
|
81
|
+
function setIndexSchedulerTimer(v) {
|
|
82
|
+
indexSchedulerTimer = v;
|
|
83
|
+
}
|
|
84
|
+
function setEmbedInProgress(v) {
|
|
85
|
+
embedInProgress = v;
|
|
86
|
+
}
|
|
87
|
+
function setLastEmbedTime(v) {
|
|
88
|
+
lastEmbedTime = v;
|
|
89
|
+
}
|
|
90
|
+
function setEmbedSchedulerTimer(v) {
|
|
91
|
+
embedSchedulerTimer = v;
|
|
92
|
+
}
|
|
93
|
+
function setVaultIndexInProgress(v) {
|
|
94
|
+
vaultIndexInProgress = v;
|
|
95
|
+
}
|
|
96
|
+
function setLastVaultIndexTime(v) {
|
|
97
|
+
lastVaultIndexTime = v;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
//#endregion
|
|
101
|
+
export { setStorageBackend as C, state_exports as D, startTime as E, storageBackend as O, setStartTime as S, shutdownRequested as T, setLastIndexTime as _, indexSchedulerTimer as a, setRegistryDb as b, lastVaultIndexTime as c, setDaemonConfig as d, setEmbedInProgress as f, setLastEmbedTime as g, setIndexSchedulerTimer as h, indexInProgress as i, vaultIndexInProgress as k, notificationConfig as l, setIndexInProgress as m, embedInProgress as n, lastEmbedTime as o, setEmbedSchedulerTimer as p, embedSchedulerTimer as r, lastIndexTime as s, daemonConfig as t, registryDb as u, setLastVaultIndexTime as v, setVaultIndexInProgress as w, setShutdownRequested as x, setNotificationConfig as y };
|
|
102
|
+
//# sourceMappingURL=state-C6_vqz7w.mjs.map
|