@tekmidian/pai 0.5.7 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. package/ARCHITECTURE.md +72 -1
  2. package/README.md +87 -1
  3. package/dist/{auto-route-BG6I_4B1.mjs → auto-route-C-DrW6BL.mjs} +3 -3
  4. package/dist/{auto-route-BG6I_4B1.mjs.map → auto-route-C-DrW6BL.mjs.map} +1 -1
  5. package/dist/cli/index.mjs +1482 -1628
  6. package/dist/cli/index.mjs.map +1 -1
  7. package/dist/clusters-JIDQW65f.mjs +201 -0
  8. package/dist/clusters-JIDQW65f.mjs.map +1 -0
  9. package/dist/{config-Cf92lGX_.mjs → config-BuhHWyOK.mjs} +21 -6
  10. package/dist/config-BuhHWyOK.mjs.map +1 -0
  11. package/dist/daemon/index.mjs +11 -8
  12. package/dist/daemon/index.mjs.map +1 -1
  13. package/dist/{daemon-2ND5WO2j.mjs → daemon-D3hYb5_C.mjs} +669 -218
  14. package/dist/daemon-D3hYb5_C.mjs.map +1 -0
  15. package/dist/daemon-mcp/index.mjs +4597 -4
  16. package/dist/daemon-mcp/index.mjs.map +1 -1
  17. package/dist/db-DdUperSl.mjs +110 -0
  18. package/dist/db-DdUperSl.mjs.map +1 -0
  19. package/dist/{detect-BU3Nx_2L.mjs → detect-CdaA48EI.mjs} +1 -1
  20. package/dist/{detect-BU3Nx_2L.mjs.map → detect-CdaA48EI.mjs.map} +1 -1
  21. package/dist/{detector-Bp-2SM3x.mjs → detector-jGBuYQJM.mjs} +2 -2
  22. package/dist/{detector-Bp-2SM3x.mjs.map → detector-jGBuYQJM.mjs.map} +1 -1
  23. package/dist/{factory-Bzcy70G9.mjs → factory-Ygqe_bVZ.mjs} +7 -5
  24. package/dist/{factory-Bzcy70G9.mjs.map → factory-Ygqe_bVZ.mjs.map} +1 -1
  25. package/dist/helpers-BEST-4Gx.mjs +420 -0
  26. package/dist/helpers-BEST-4Gx.mjs.map +1 -0
  27. package/dist/hooks/capture-all-events.mjs +2 -2
  28. package/dist/hooks/capture-all-events.mjs.map +3 -3
  29. package/dist/hooks/capture-session-summary.mjs +38 -0
  30. package/dist/hooks/capture-session-summary.mjs.map +3 -3
  31. package/dist/hooks/cleanup-session-files.mjs +6 -12
  32. package/dist/hooks/cleanup-session-files.mjs.map +4 -4
  33. package/dist/hooks/context-compression-hook.mjs +93 -104
  34. package/dist/hooks/context-compression-hook.mjs.map +4 -4
  35. package/dist/hooks/initialize-session.mjs +14 -11
  36. package/dist/hooks/initialize-session.mjs.map +4 -4
  37. package/dist/hooks/inject-observations.mjs +220 -0
  38. package/dist/hooks/inject-observations.mjs.map +7 -0
  39. package/dist/hooks/load-core-context.mjs +2 -2
  40. package/dist/hooks/load-core-context.mjs.map +3 -3
  41. package/dist/hooks/load-project-context.mjs +90 -91
  42. package/dist/hooks/load-project-context.mjs.map +4 -4
  43. package/dist/hooks/observe.mjs +354 -0
  44. package/dist/hooks/observe.mjs.map +7 -0
  45. package/dist/hooks/stop-hook.mjs +94 -107
  46. package/dist/hooks/stop-hook.mjs.map +4 -4
  47. package/dist/hooks/sync-todo-to-md.mjs +31 -33
  48. package/dist/hooks/sync-todo-to-md.mjs.map +4 -4
  49. package/dist/index.d.mts +30 -7
  50. package/dist/index.d.mts.map +1 -1
  51. package/dist/index.mjs +5 -8
  52. package/dist/indexer-D53l5d1U.mjs +1 -0
  53. package/dist/{indexer-backend-CIMXedqk.mjs → indexer-backend-jcJFsmB4.mjs} +37 -127
  54. package/dist/indexer-backend-jcJFsmB4.mjs.map +1 -0
  55. package/dist/{ipc-client-Bjg_a1dc.mjs → ipc-client-CoyUHPod.mjs} +2 -7
  56. package/dist/{ipc-client-Bjg_a1dc.mjs.map → ipc-client-CoyUHPod.mjs.map} +1 -1
  57. package/dist/latent-ideas-bTJo6Omd.mjs +191 -0
  58. package/dist/latent-ideas-bTJo6Omd.mjs.map +1 -0
  59. package/dist/neighborhood-BYYbEkUJ.mjs +135 -0
  60. package/dist/neighborhood-BYYbEkUJ.mjs.map +1 -0
  61. package/dist/note-context-BK24bX8Y.mjs +126 -0
  62. package/dist/note-context-BK24bX8Y.mjs.map +1 -0
  63. package/dist/postgres-CKf-EDtS.mjs +846 -0
  64. package/dist/postgres-CKf-EDtS.mjs.map +1 -0
  65. package/dist/{reranker-D7bRAHi6.mjs → reranker-CMNZcfVx.mjs} +1 -1
  66. package/dist/{reranker-D7bRAHi6.mjs.map → reranker-CMNZcfVx.mjs.map} +1 -1
  67. package/dist/{search-_oHfguA5.mjs → search-DC1qhkKn.mjs} +2 -58
  68. package/dist/search-DC1qhkKn.mjs.map +1 -0
  69. package/dist/{sqlite-WWBq7_2C.mjs → sqlite-l-s9xPjY.mjs} +160 -3
  70. package/dist/sqlite-l-s9xPjY.mjs.map +1 -0
  71. package/dist/state-C6_vqz7w.mjs +102 -0
  72. package/dist/state-C6_vqz7w.mjs.map +1 -0
  73. package/dist/stop-words-BaMEGVeY.mjs +326 -0
  74. package/dist/stop-words-BaMEGVeY.mjs.map +1 -0
  75. package/dist/{indexer-CMPOiY1r.mjs → sync-BOsnEj2-.mjs} +14 -216
  76. package/dist/sync-BOsnEj2-.mjs.map +1 -0
  77. package/dist/themes-BvYF0W8T.mjs +148 -0
  78. package/dist/themes-BvYF0W8T.mjs.map +1 -0
  79. package/dist/{tools-DV_lsiCc.mjs → tools-DcaJlYDN.mjs} +162 -273
  80. package/dist/tools-DcaJlYDN.mjs.map +1 -0
  81. package/dist/trace-CRx9lPuc.mjs +137 -0
  82. package/dist/trace-CRx9lPuc.mjs.map +1 -0
  83. package/dist/{vault-indexer-k-kUlaZ-.mjs → vault-indexer-Bi2cRmn7.mjs} +134 -132
  84. package/dist/vault-indexer-Bi2cRmn7.mjs.map +1 -0
  85. package/dist/zettelkasten-cdajbnPr.mjs +708 -0
  86. package/dist/zettelkasten-cdajbnPr.mjs.map +1 -0
  87. package/package.json +1 -2
  88. package/src/hooks/ts/lib/project-utils/index.ts +50 -0
  89. package/src/hooks/ts/lib/project-utils/notify.ts +75 -0
  90. package/src/hooks/ts/lib/project-utils/paths.ts +218 -0
  91. package/src/hooks/ts/lib/project-utils/session-notes.ts +363 -0
  92. package/src/hooks/ts/lib/project-utils/todo.ts +178 -0
  93. package/src/hooks/ts/lib/project-utils/tokens.ts +39 -0
  94. package/src/hooks/ts/lib/project-utils.ts +40 -1018
  95. package/src/hooks/ts/post-tool-use/observe.ts +327 -0
  96. package/src/hooks/ts/session-end/capture-session-summary.ts +41 -0
  97. package/src/hooks/ts/session-start/inject-observations.ts +254 -0
  98. package/dist/chunker-CbnBe0s0.mjs +0 -191
  99. package/dist/chunker-CbnBe0s0.mjs.map +0 -1
  100. package/dist/config-Cf92lGX_.mjs.map +0 -1
  101. package/dist/daemon-2ND5WO2j.mjs.map +0 -1
  102. package/dist/db-Dp8VXIMR.mjs +0 -212
  103. package/dist/db-Dp8VXIMR.mjs.map +0 -1
  104. package/dist/indexer-CMPOiY1r.mjs.map +0 -1
  105. package/dist/indexer-backend-CIMXedqk.mjs.map +0 -1
  106. package/dist/mcp/index.d.mts +0 -1
  107. package/dist/mcp/index.mjs +0 -500
  108. package/dist/mcp/index.mjs.map +0 -1
  109. package/dist/postgres-FXrHDPcE.mjs +0 -358
  110. package/dist/postgres-FXrHDPcE.mjs.map +0 -1
  111. package/dist/schemas-BFIgGntb.mjs +0 -3405
  112. package/dist/schemas-BFIgGntb.mjs.map +0 -1
  113. package/dist/search-_oHfguA5.mjs.map +0 -1
  114. package/dist/sqlite-WWBq7_2C.mjs.map +0 -1
  115. package/dist/tools-DV_lsiCc.mjs.map +0 -1
  116. package/dist/vault-indexer-k-kUlaZ-.mjs.map +0 -1
  117. package/dist/zettelkasten-e-a4rW_6.mjs +0 -901
  118. package/dist/zettelkasten-e-a4rW_6.mjs.map +0 -1
  119. package/templates/README.md +0 -181
  120. package/templates/skills/CORE/Aesthetic.md +0 -333
  121. package/templates/skills/CORE/CONSTITUTION.md +0 -1502
  122. package/templates/skills/CORE/HistorySystem.md +0 -427
  123. package/templates/skills/CORE/HookSystem.md +0 -1082
  124. package/templates/skills/CORE/Prompting.md +0 -509
  125. package/templates/skills/CORE/ProsodyAgentTemplate.md +0 -53
  126. package/templates/skills/CORE/ProsodyGuide.md +0 -416
  127. package/templates/skills/CORE/SKILL.md +0 -741
  128. package/templates/skills/CORE/SkillSystem.md +0 -213
  129. package/templates/skills/CORE/TerminalTabs.md +0 -119
  130. package/templates/skills/CORE/VOICE.md +0 -106
  131. package/templates/skills/createskill-skill.template.md +0 -78
  132. package/templates/skills/history-system.template.md +0 -371
  133. package/templates/skills/hook-system.template.md +0 -913
  134. package/templates/skills/sessions-skill.template.md +0 -102
  135. package/templates/skills/skill-system.template.md +0 -214
  136. package/templates/skills/terminal-tabs.template.md +0 -120
  137. package/templates/templates.md +0 -20
@@ -1 +0,0 @@
1
- {"version":3,"file":"search-_oHfguA5.mjs","names":[],"sources":["../src/memory/search.ts"],"sourcesContent":["/**\n * Search over the PAI federation memory index.\n *\n * Provides three search modes:\n * - keyword — BM25 full-text search (default, fast, no ML required)\n * - semantic — Brute-force cosine similarity over pre-computed embeddings\n * - hybrid — Normalized combination of BM25 + cosine scores\n *\n * BM25 uses SQLite's FTS5 extension. Semantic search requires embeddings to\n * have been generated first via `embedChunks()` in the indexer.\n */\n\nimport type { Database } from \"better-sqlite3\";\nimport { deserializeEmbedding, cosineSimilarity } from \"./embeddings.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface SearchResult {\n projectId: number;\n projectSlug?: string; // populated from registry after search when available\n path: string;\n startLine: number;\n endLine: number;\n snippet: string;\n score: number; // raw BM25 score (lower = more relevant in FTS5)\n tier: string;\n source: string;\n updatedAt?: number; // Unix ms from memory_chunks.updated_at\n}\n\nexport interface SearchOptions {\n /** Restrict search to these project IDs. */\n projectIds?: number[];\n /** Restrict to 'memory' or 'notes' sources. */\n sources?: string[];\n /** Restrict to specific tier(s): 'evergreen' | 'daily' | 'topic' | 'session' */\n tiers?: string[];\n /** Maximum number of results to return. Default 10. */\n maxResults?: number;\n /** Minimum BM25 score threshold (FTS5 scores are negative; 0.0 means no filter). */\n minScore?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Stop words\n// ---------------------------------------------------------------------------\n\nconst STOP_WORDS = new Set([\n \"a\", \"an\", \"and\", \"are\", \"as\", \"at\", \"be\", \"been\", \"but\", \"by\",\n \"do\", \"for\", \"from\", \"has\", \"have\", \"he\", \"her\", \"him\", \"his\",\n \"how\", \"i\", \"if\", \"in\", \"is\", \"it\", \"its\", \"me\", \"my\", \"not\",\n \"of\", \"on\", \"or\", \"our\", \"out\", \"she\", \"so\", \"that\", \"the\",\n \"their\", \"them\", \"they\", \"this\", \"to\", \"up\", \"us\", \"was\", \"we\",\n \"were\", \"what\", \"when\", \"who\", \"will\", \"with\", \"you\", \"your\",\n]);\n\n// ---------------------------------------------------------------------------\n// Query builder\n// ---------------------------------------------------------------------------\n\n/**\n * Convert a free-text query into an FTS5 query string.\n *\n * Strategy:\n * 1. Tokenise by whitespace and punctuation\n * 2. Remove stop words and tokens shorter than 2 characters\n * 3. Double-quote each remaining token (exact word form)\n * 4. Join with OR so that any matching token returns a result\n *\n * Using OR instead of AND is critical for multi-word queries: the words rarely\n * all appear in the same chunk, so AND would return zero results. FTS5 BM25\n * scoring naturally ranks chunks where more terms match higher, so the most\n * relevant chunks still surface at the top.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → `\"synchrotech\" OR \"interview\" OR \"follow\" OR \"gilles\"`\n * → chunks matching any term, ranked by how many terms match\n */\nexport function buildFtsQuery(query: string): string {\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Escape any double-quotes inside the token (FTS5 uses them as delimiters)\n .map((t) => `\"${t.replace(/\"/g, '\"\"')}\"`)\n\n if (tokens.length === 0) {\n // Fallback: use original query as a raw string (may produce no results)\n return `\"${query.replace(/\"/g, '\"\"')}\"`;\n }\n\n return tokens.join(\" OR \");\n}\n\n// ---------------------------------------------------------------------------\n// Search\n// ---------------------------------------------------------------------------\n\n/**\n * Search across all indexed memory using FTS5 BM25 ranking.\n *\n * Results are ordered by BM25 score (most relevant first).\n * FTS5 bm25() returns negative values; closer to 0 = more relevant.\n * We negate the score so callers get positive values where higher = better.\n *\n * Multilingual note: SQLite FTS5 uses the `unicode61` tokenizer by default,\n * which handles Unicode correctly (German umlauts, French accents, etc.) without\n * language-specific stemming. No changes needed here — it is already\n * multilingual-safe.\n */\nexport function searchMemory(\n db: Database,\n query: string,\n opts?: SearchOptions,\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n const ftsQuery = buildFtsQuery(query);\n\n // Build the SQL with optional filters\n const conditions: string[] = [];\n const params: (string | number)[] = [ftsQuery];\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => \"?\").join(\", \");\n conditions.push(`c.project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => \"?\").join(\", \");\n conditions.push(`c.source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => \"?\").join(\", \");\n conditions.push(`c.tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n const whereClause = conditions.length > 0\n ? \"AND \" + conditions.join(\" AND \")\n : \"\";\n\n params.push(maxResults);\n\n // FTS5: join memory_fts with memory_chunks to get metadata\n // bm25(memory_fts) returns negative values (lower = better match)\n const sql = `\n SELECT\n c.project_id,\n c.path,\n c.start_line,\n c.end_line,\n c.text AS snippet,\n c.tier,\n c.source,\n c.updated_at,\n bm25(memory_fts) AS bm25_score\n FROM memory_fts\n JOIN memory_chunks c ON memory_fts.id = c.id\n WHERE memory_fts MATCH ?\n ${whereClause}\n ORDER BY bm25_score\n LIMIT ?\n `;\n\n let rows: Array<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n updated_at: number;\n bm25_score: number;\n }>;\n\n try {\n rows = db.prepare(sql).all(...params) as typeof rows;\n } catch {\n // FTS5 MATCH throws when the query is invalid — return empty results\n return [];\n }\n\n const minScore = opts?.minScore ?? 0.0;\n\n return rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n // Negate so higher = better match for callers\n score: -row.bm25_score,\n tier: row.tier,\n source: row.source,\n updatedAt: row.updated_at,\n }))\n .filter((r) => r.score >= minScore);\n}\n\n// ---------------------------------------------------------------------------\n// Semantic search\n// ---------------------------------------------------------------------------\n\n/**\n * Search chunks using brute-force cosine similarity over stored embeddings.\n *\n * Only chunks that have a non-null embedding BLOB are considered. Chunks\n * without embeddings are silently skipped (they can be embedded later via\n * `embedChunks()`).\n *\n * @param queryEmbedding Pre-computed Float32Array for the search query.\n */\nexport function searchMemorySemantic(\n db: Database,\n queryEmbedding: Float32Array,\n opts?: SearchOptions,\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n\n // Build the SQL filter conditions\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number)[] = [];\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => \"?\").join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => \"?\").join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => \"?\").join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n const where = \"WHERE \" + conditions.join(\" AND \");\n\n // Hard cap for SQLite semantic path — prevents OOM on large corpora.\n // Use Postgres for production semantic search.\n const sql = `\n SELECT id, project_id, path, start_line, end_line, text, tier, source, embedding, updated_at\n FROM memory_chunks\n ${where}\n LIMIT 5000\n `;\n\n const rows = db.prepare(sql).all(...params) as Array<{\n id: string;\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n text: string;\n tier: string;\n source: string;\n embedding: Buffer;\n updated_at: number;\n }>;\n\n if (rows.length === 0) return [];\n\n // Compute cosine similarity for every chunk\n const scored = rows.map((row) => {\n const vec = deserializeEmbedding(row.embedding);\n const score = cosineSimilarity(queryEmbedding, vec);\n return {\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.text,\n score,\n tier: row.tier,\n source: row.source,\n updatedAt: row.updated_at,\n };\n });\n\n // Sort by descending similarity, apply optional min score filter, limit\n const minScore = opts?.minScore ?? -Infinity;\n\n return scored\n .filter((r) => r.score >= minScore)\n .sort((a, b) => b.score - a.score)\n .slice(0, maxResults);\n}\n\n// ---------------------------------------------------------------------------\n// Hybrid search\n// ---------------------------------------------------------------------------\n\n/**\n * Combine BM25 keyword search and semantic search using normalized scores.\n *\n * Both score sets are min-max normalized to [0,1] before combining, so neither\n * dominates the other regardless of their raw scales.\n *\n * @param queryEmbedding Pre-computed embedding for the query.\n * @param keywordWeight Weight for BM25 score (default 0.5).\n * @param semanticWeight Weight for cosine similarity score (default 0.5).\n */\nexport function searchMemoryHybrid(\n db: Database,\n query: string,\n queryEmbedding: Float32Array,\n opts?: SearchOptions & { keywordWeight?: number; semanticWeight?: number },\n): SearchResult[] {\n const maxResults = opts?.maxResults ?? 10;\n const kw = opts?.keywordWeight ?? 0.5;\n const sw = opts?.semanticWeight ?? 0.5;\n\n // Fetch keyword results — 50 candidates is sufficient for min-max normalization\n const keywordResults = searchMemory(db, query, {\n ...opts,\n maxResults: 50,\n });\n\n // Fetch semantic results — 50 candidates is sufficient for min-max normalization\n const semanticResults = searchMemorySemantic(db, queryEmbedding, {\n ...opts,\n maxResults: 50,\n });\n\n if (keywordResults.length === 0 && semanticResults.length === 0) return [];\n\n // Build a map of chunk ID → combined result\n // Use \"projectId:path:startLine:endLine\" as a stable key (same as chunk IDs)\n const keyFor = (r: SearchResult) =>\n `${r.projectId}:${r.path}:${r.startLine}:${r.endLine}`;\n\n // Min-max normalize helper\n function minMaxNormalize(items: SearchResult[]): Map<string, number> {\n if (items.length === 0) return new Map();\n const min = Math.min(...items.map((r) => r.score));\n const max = Math.max(...items.map((r) => r.score));\n const range = max - min;\n const m = new Map<string, number>();\n for (const r of items) {\n m.set(keyFor(r), range === 0 ? 1 : (r.score - min) / range);\n }\n return m;\n }\n\n const kwNorm = minMaxNormalize(keywordResults);\n const semNorm = minMaxNormalize(semanticResults);\n\n // Union of all chunk keys\n const allKeys = new Set<string>([\n ...keywordResults.map(keyFor),\n ...semanticResults.map(keyFor),\n ]);\n\n // Build a lookup from key → result metadata\n const metaMap = new Map<string, SearchResult>();\n for (const r of [...keywordResults, ...semanticResults]) {\n metaMap.set(keyFor(r), r);\n }\n\n // Combine scores\n const combined: Array<SearchResult & { combinedScore: number }> = [];\n for (const key of allKeys) {\n const meta = metaMap.get(key)!;\n const kwScore = kwNorm.get(key) ?? 0;\n const semScore = semNorm.get(key) ?? 0;\n const combinedScore = kw * kwScore + sw * semScore;\n combined.push({ ...meta, score: combinedScore, combinedScore });\n }\n\n // Sort by combined score descending\n return combined\n .sort((a, b) => b.score - a.score)\n .slice(0, maxResults)\n .map(({ combinedScore: _unused, ...r }) => r);\n}\n\n// ---------------------------------------------------------------------------\n// Slug lookup helper\n// ---------------------------------------------------------------------------\n\n/**\n * Populate the projectSlug field on search results by looking up project IDs\n * in the registry database.\n */\nexport function populateSlugs(\n results: SearchResult[],\n registryDb: Database,\n): SearchResult[] {\n if (results.length === 0) return results;\n\n const ids = [...new Set(results.map((r) => r.projectId))];\n const placeholders = ids.map(() => \"?\").join(\", \");\n const rows = registryDb\n .prepare(`SELECT id, slug FROM projects WHERE id IN (${placeholders})`)\n .all(...ids) as Array<{ id: number; slug: string }>;\n\n const slugMap = new Map(rows.map((r) => [r.id, r.slug]));\n\n return results.map((r) => ({\n ...r,\n projectSlug: slugMap.get(r.projectId),\n }));\n}\n\n// ---------------------------------------------------------------------------\n// Recency boost\n// ---------------------------------------------------------------------------\n\n/**\n * Apply exponential recency boost to search scores.\n *\n * Scores are first min-max normalized to [0,1], then multiplied by an\n * exponential decay factor based on chunk age. Normalization is required\n * because the cross-encoder reranker produces negative logit scores — naive\n * multiplication of a negative score by a decay factor (0 < d ≤ 1) would\n * make the score *less* negative, effectively boosting old results instead\n * of penalizing them.\n *\n * Formula: score_final = normalized * exp(-lambda * age_days)\n * where lambda = ln(2) / halfLifeDays, normalized ∈ [0,1]\n *\n * With default halfLifeDays=90, a 3-month-old chunk retains 50% of its\n * normalized score, a 6-month-old retains 25%, and a 1-year-old ~6%.\n *\n * Results without an updatedAt timestamp receive no decay penalty.\n * Results are re-sorted by the boosted score after application.\n *\n * @param results Search results with optional updatedAt timestamps.\n * @param halfLifeDays Score halves every N days. Default 90 (~3 months).\n * @returns New array sorted by decayed normalized score (descending).\n */\nexport function applyRecencyBoost(\n results: SearchResult[],\n halfLifeDays = 90,\n): SearchResult[] {\n if (halfLifeDays <= 0 || results.length === 0) return results;\n\n const lambda = Math.LN2 / halfLifeDays;\n const now = Date.now();\n\n // Min-max normalize scores to [0,1] so multiplicative decay works\n // correctly regardless of the raw score sign/scale.\n const scores = results.map((r) => r.score);\n const minScore = Math.min(...scores);\n const maxScore = Math.max(...scores);\n const range = maxScore - minScore;\n\n return results\n .map((r) => {\n const normalized = range === 0 ? 1 : (r.score - minScore) / range;\n const decay = r.updatedAt\n ? Math.exp(-lambda * Math.max(0, (now - r.updatedAt) / 86_400_000))\n : 1; // no timestamp → no penalty\n return { ...r, score: normalized * decay };\n })\n .sort((a, b) => b.score - a.score);\n}\n"],"mappings":";;;;;;;;;;;;AAiDA,MAAM,aAAa,IAAI,IAAI;CACzB;CAAK;CAAM;CAAO;CAAO;CAAM;CAAM;CAAM;CAAQ;CAAO;CAC1D;CAAM;CAAO;CAAQ;CAAO;CAAQ;CAAM;CAAO;CAAO;CACxD;CAAO;CAAK;CAAM;CAAM;CAAM;CAAM;CAAO;CAAM;CAAM;CACvD;CAAM;CAAM;CAAM;CAAO;CAAO;CAAO;CAAM;CAAQ;CACrD;CAAS;CAAQ;CAAQ;CAAQ;CAAM;CAAM;CAAM;CAAO;CAC1D;CAAQ;CAAQ;CAAQ;CAAO;CAAQ;CAAQ;CAAO;CACvD,CAAC;;;;;;;;;;;;;;;;;;;AAwBF,SAAgB,cAAc,OAAuB;CACnD,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,IAAI,EAAE,QAAQ,MAAM,OAAK,CAAC,GAAG;AAE3C,KAAI,OAAO,WAAW,EAEpB,QAAO,IAAI,MAAM,QAAQ,MAAM,OAAK,CAAC;AAGvC,QAAO,OAAO,KAAK,OAAO;;;;;;;;;;;;;;AAmB5B,SAAgB,aACd,IACA,OACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CACvC,MAAM,WAAW,cAAc,MAAM;CAGrC,MAAM,aAAuB,EAAE;CAC/B,MAAM,SAA8B,CAAC,SAAS;AAE9C,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,CAAC,KAAK,KAAK;AAC9D,aAAW,KAAK,oBAAoB,aAAa,GAAG;AACpD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,CAAC,KAAK,KAAK;AAC3D,aAAW,KAAK,gBAAgB,aAAa,GAAG;AAChD,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,CAAC,KAAK,KAAK;AACzD,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,MAAM;;CAG5B,MAAM,cAAc,WAAW,SAAS,IACpC,SAAS,WAAW,KAAK,QAAQ,GACjC;AAEJ,QAAO,KAAK,WAAW;CAIvB,MAAM,MAAM;;;;;;;;;;;;;;QAcN,YAAY;;;;CAKlB,IAAI;AAYJ,KAAI;AACF,SAAO,GAAG,QAAQ,IAAI,CAAC,IAAI,GAAG,OAAO;SAC/B;AAEN,SAAO,EAAE;;CAGX,MAAM,WAAW,MAAM,YAAY;AAEnC,QAAO,KACJ,KAAK,SAAS;EACb,WAAW,IAAI;EACf,MAAM,IAAI;EACV,WAAW,IAAI;EACf,SAAS,IAAI;EACb,SAAS,IAAI;EAEb,OAAO,CAAC,IAAI;EACZ,MAAM,IAAI;EACV,QAAQ,IAAI;EACZ,WAAW,IAAI;EAChB,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;;;;;;;;;;;AAgBvC,SAAgB,qBACd,IACA,gBACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CAGvC,MAAM,aAAuB,CAAC,wBAAwB;CACtD,MAAM,SAA8B,EAAE;AAEtC,KAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;EAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,CAAC,KAAK,KAAK;AAC9D,aAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,SAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,KAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;EAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,CAAC,KAAK,KAAK;AAC3D,aAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,SAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,KAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;EACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,CAAC,KAAK,KAAK;AACzD,aAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,SAAO,KAAK,GAAG,KAAK,MAAM;;CAO5B,MAAM,MAAM;;;MAJE,WAAW,WAAW,KAAK,QAAQ,CAOvC;;;CAIV,MAAM,OAAO,GAAG,QAAQ,IAAI,CAAC,IAAI,GAAG,OAAO;AAa3C,KAAI,KAAK,WAAW,EAAG,QAAO,EAAE;CAGhC,MAAM,SAAS,KAAK,KAAK,QAAQ;EAE/B,MAAM,QAAQ,iBAAiB,gBADnB,qBAAqB,IAAI,UAAU,CACI;AACnD,SAAO;GACL,WAAW,IAAI;GACf,MAAM,IAAI;GACV,WAAW,IAAI;GACf,SAAS,IAAI;GACb,SAAS,IAAI;GACb;GACA,MAAM,IAAI;GACV,QAAQ,IAAI;GACZ,WAAW,IAAI;GAChB;GACD;CAGF,MAAM,WAAW,MAAM,YAAY;AAEnC,QAAO,OACJ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAClC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,WAAW;;;;;;;;;;;;AAiBzB,SAAgB,mBACd,IACA,OACA,gBACA,MACgB;CAChB,MAAM,aAAa,MAAM,cAAc;CACvC,MAAM,KAAK,MAAM,iBAAiB;CAClC,MAAM,KAAK,MAAM,kBAAkB;CAGnC,MAAM,iBAAiB,aAAa,IAAI,OAAO;EAC7C,GAAG;EACH,YAAY;EACb,CAAC;CAGF,MAAM,kBAAkB,qBAAqB,IAAI,gBAAgB;EAC/D,GAAG;EACH,YAAY;EACb,CAAC;AAEF,KAAI,eAAe,WAAW,KAAK,gBAAgB,WAAW,EAAG,QAAO,EAAE;CAI1E,MAAM,UAAU,MACd,GAAG,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,GAAG,EAAE;CAG/C,SAAS,gBAAgB,OAA4C;AACnE,MAAI,MAAM,WAAW,EAAG,wBAAO,IAAI,KAAK;EACxC,MAAM,MAAM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC;EAElD,MAAM,QADM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC,GAC9B;EACpB,MAAM,oBAAI,IAAI,KAAqB;AACnC,OAAK,MAAM,KAAK,MACd,GAAE,IAAI,OAAO,EAAE,EAAE,UAAU,IAAI,KAAK,EAAE,QAAQ,OAAO,MAAM;AAE7D,SAAO;;CAGT,MAAM,SAAS,gBAAgB,eAAe;CAC9C,MAAM,UAAU,gBAAgB,gBAAgB;CAGhD,MAAM,UAAU,IAAI,IAAY,CAC9B,GAAG,eAAe,IAAI,OAAO,EAC7B,GAAG,gBAAgB,IAAI,OAAO,CAC/B,CAAC;CAGF,MAAM,0BAAU,IAAI,KAA2B;AAC/C,MAAK,MAAM,KAAK,CAAC,GAAG,gBAAgB,GAAG,gBAAgB,CACrD,SAAQ,IAAI,OAAO,EAAE,EAAE,EAAE;CAI3B,MAAM,WAA4D,EAAE;AACpE,MAAK,MAAM,OAAO,SAAS;EACzB,MAAM,OAAO,QAAQ,IAAI,IAAI;EAC7B,MAAM,UAAU,OAAO,IAAI,IAAI,IAAI;EACnC,MAAM,WAAW,QAAQ,IAAI,IAAI,IAAI;EACrC,MAAM,gBAAgB,KAAK,UAAU,KAAK;AAC1C,WAAS,KAAK;GAAE,GAAG;GAAM,OAAO;GAAe;GAAe,CAAC;;AAIjE,QAAO,SACJ,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,WAAW,CACpB,KAAK,EAAE,eAAe,SAAS,GAAG,QAAQ,EAAE;;;;;;AAWjD,SAAgB,cACd,SACA,YACgB;AAChB,KAAI,QAAQ,WAAW,EAAG,QAAO;CAEjC,MAAM,MAAM,CAAC,GAAG,IAAI,IAAI,QAAQ,KAAK,MAAM,EAAE,UAAU,CAAC,CAAC;CACzD,MAAM,eAAe,IAAI,UAAU,IAAI,CAAC,KAAK,KAAK;CAClD,MAAM,OAAO,WACV,QAAQ,8CAA8C,aAAa,GAAG,CACtE,IAAI,GAAG,IAAI;CAEd,MAAM,UAAU,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AAExD,QAAO,QAAQ,KAAK,OAAO;EACzB,GAAG;EACH,aAAa,QAAQ,IAAI,EAAE,UAAU;EACtC,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;AA8BL,SAAgB,kBACd,SACA,eAAe,IACC;AAChB,KAAI,gBAAgB,KAAK,QAAQ,WAAW,EAAG,QAAO;CAEtD,MAAM,SAAS,KAAK,MAAM;CAC1B,MAAM,MAAM,KAAK,KAAK;CAItB,MAAM,SAAS,QAAQ,KAAK,MAAM,EAAE,MAAM;CAC1C,MAAM,WAAW,KAAK,IAAI,GAAG,OAAO;CAEpC,MAAM,QADW,KAAK,IAAI,GAAG,OAAO,GACX;AAEzB,QAAO,QACJ,KAAK,MAAM;EACV,MAAM,aAAa,UAAU,IAAI,KAAK,EAAE,QAAQ,YAAY;EAC5D,MAAM,QAAQ,EAAE,YACZ,KAAK,IAAI,CAAC,SAAS,KAAK,IAAI,IAAI,MAAM,EAAE,aAAa,MAAW,CAAC,GACjE;AACJ,SAAO;GAAE,GAAG;GAAG,OAAO,aAAa;GAAO;GAC1C,CACD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"sqlite-WWBq7_2C.mjs","names":[],"sources":["../src/storage/sqlite.ts"],"sourcesContent":["/**\n * SQLiteBackend — wraps the existing better-sqlite3 federation.db\n * behind the StorageBackend interface.\n *\n * This is a thin adapter. The heavy lifting is all in the existing\n * memory/indexer.ts and memory/search.ts code; we just provide a\n * backend-agnostic surface so the daemon and tools can call either\n * SQLite or Postgres transparently.\n */\n\nimport type { Database } from \"better-sqlite3\";\nimport type { StorageBackend, ChunkRow, FileRow, FederationStats } from \"./interface.js\";\nimport type { SearchResult, SearchOptions } from \"../memory/search.js\";\nimport { searchMemory, searchMemorySemantic } from \"../memory/search.js\";\n\nexport class SQLiteBackend implements StorageBackend {\n readonly backendType = \"sqlite\" as const;\n\n private db: Database;\n\n constructor(db: Database) {\n this.db = db;\n }\n\n /**\n * Expose the raw better-sqlite3 Database handle.\n * Used by the daemon to pass to indexAll() which still uses the synchronous API directly.\n */\n getRawDb(): Database {\n return this.db;\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n try {\n this.db.close();\n } catch {\n // ignore\n }\n }\n\n async getStats(): Promise<FederationStats> {\n const files = (\n this.db.prepare(\"SELECT COUNT(*) AS n FROM memory_files\").get() as { n: number }\n ).n;\n const chunks = (\n this.db.prepare(\"SELECT COUNT(*) AS n FROM memory_chunks\").get() as { n: number }\n ).n;\n return { files, chunks };\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const row = this.db\n .prepare(\"SELECT hash FROM memory_files WHERE project_id = ? AND path = ?\")\n .get(projectId, path) as { hash: string } | undefined;\n return row?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n this.db\n .prepare(\n `INSERT INTO memory_files (project_id, path, source, tier, hash, mtime, size)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT(project_id, path) DO UPDATE SET\n source = excluded.source,\n tier = excluded.tier,\n hash = excluded.hash,\n mtime = excluded.mtime,\n size = excluded.size`\n )\n .run(file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size);\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const rows = this.db\n .prepare(\"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\")\n .all(projectId, path) as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n const ids = await this.getChunkIds(projectId, path);\n const deleteFts = this.db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n const deleteChunks = this.db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\"\n );\n this.db.transaction(() => {\n for (const id of ids) {\n deleteFts.run(id);\n }\n deleteChunks.run(projectId, path);\n })();\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const insertChunk = this.db.prepare(\n `INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`\n );\n const insertFts = this.db.prepare(\n `INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)`\n );\n\n this.db.transaction(() => {\n for (const c of chunks) {\n insertChunk.run(\n c.id,\n c.projectId,\n c.source,\n c.tier,\n c.path,\n c.startLine,\n c.endLine,\n c.hash,\n c.text,\n c.updatedAt\n );\n insertFts.run(\n c.text,\n c.id,\n c.projectId,\n c.path,\n c.source,\n c.tier,\n c.startLine,\n c.endLine\n );\n }\n })();\n }\n\n async getDistinctChunkPaths(projectId: number): Promise<string[]> {\n const rows = this.db\n .prepare(\"SELECT DISTINCT path FROM memory_chunks WHERE project_id = ?\")\n .all(projectId) as Array<{ path: string }>;\n return rows.map((r) => r.path);\n }\n\n async deletePaths(projectId: number, paths: string[]): Promise<void> {\n if (paths.length === 0) return;\n const deleteFts = this.db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n const deleteChunks = this.db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\"\n );\n const deleteFile = this.db.prepare(\n \"DELETE FROM memory_files WHERE project_id = ? AND path = ?\"\n );\n this.db.transaction(() => {\n for (const path of paths) {\n const ids = this.db\n .prepare(\"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\")\n .all(projectId, path) as Array<{ id: string }>;\n for (const { id } of ids) {\n deleteFts.run(id);\n }\n deleteChunks.run(projectId, path);\n deleteFile.run(projectId, path);\n }\n })();\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string }>> {\n const conditions = [\"embedding IS NULL\"];\n const params: (string | number)[] = [];\n\n if (projectId !== undefined) {\n conditions.push(\"project_id = ?\");\n params.push(projectId);\n }\n\n const where = \"WHERE \" + conditions.join(\" AND \");\n const rows = this.db\n .prepare(`SELECT id, text FROM memory_chunks ${where} ORDER BY id`)\n .all(...params) as Array<{ id: string; text: string }>;\n return rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n this.db\n .prepare(\"UPDATE memory_chunks SET embedding = ? WHERE id = ?\")\n .run(embedding, chunkId);\n }\n\n // -------------------------------------------------------------------------\n // Search\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchMemory(this.db, query, opts);\n }\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n return searchMemorySemantic(this.db, queryEmbedding, opts);\n }\n}\n"],"mappings":";;;;AAeA,IAAa,gBAAb,MAAqD;CACnD,AAAS,cAAc;CAEvB,AAAQ;CAER,YAAY,IAAc;AACxB,OAAK,KAAK;;;;;;CAOZ,WAAqB;AACnB,SAAO,KAAK;;CAOd,MAAM,QAAuB;AAC3B,MAAI;AACF,QAAK,GAAG,OAAO;UACT;;CAKV,MAAM,WAAqC;AAOzC,SAAO;GAAE,OALP,KAAK,GAAG,QAAQ,yCAAyC,CAAC,KAAK,CAC/D;GAIc,QAFd,KAAK,GAAG,QAAQ,0CAA0C,CAAC,KAAK,CAChE;GACsB;;CAO1B,MAAM,YAAY,WAAmB,MAA2C;AAI9E,SAHY,KAAK,GACd,QAAQ,kEAAkE,CAC1E,IAAI,WAAW,KAAK,EACX;;CAGd,MAAM,WAAW,MAA8B;AAC7C,OAAK,GACF,QACC;;;;;;;mCAQD,CACA,IAAI,KAAK,WAAW,KAAK,MAAM,KAAK,QAAQ,KAAK,MAAM,KAAK,MAAM,KAAK,OAAO,KAAK,KAAK;;CAO7F,MAAM,YAAY,WAAmB,MAAiC;AAIpE,SAHa,KAAK,GACf,QAAQ,iEAAiE,CACzE,IAAI,WAAW,KAAK,CACX,KAAK,MAAM,EAAE,GAAG;;CAG9B,MAAM,oBAAoB,WAAmB,MAA6B;EACxE,MAAM,MAAM,MAAM,KAAK,YAAY,WAAW,KAAK;EACnD,MAAM,YAAY,KAAK,GAAG,QAAQ,sCAAsC;EACxE,MAAM,eAAe,KAAK,GAAG,QAC3B,8DACD;AACD,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,MAAM,IACf,WAAU,IAAI,GAAG;AAEnB,gBAAa,IAAI,WAAW,KAAK;IACjC,EAAE;;CAGN,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,cAAc,KAAK,GAAG,QAC1B;8CAED;EACD,MAAM,YAAY,KAAK,GAAG,QACxB;wCAED;AAED,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,KAAK,QAAQ;AACtB,gBAAY,IACV,EAAE,IACF,EAAE,WACF,EAAE,QACF,EAAE,MACF,EAAE,MACF,EAAE,WACF,EAAE,SACF,EAAE,MACF,EAAE,MACF,EAAE,UACH;AACD,cAAU,IACR,EAAE,MACF,EAAE,IACF,EAAE,WACF,EAAE,MACF,EAAE,QACF,EAAE,MACF,EAAE,WACF,EAAE,QACH;;IAEH,EAAE;;CAGN,MAAM,sBAAsB,WAAsC;AAIhE,SAHa,KAAK,GACf,QAAQ,+DAA+D,CACvE,IAAI,UAAU,CACL,KAAK,MAAM,EAAE,KAAK;;CAGhC,MAAM,YAAY,WAAmB,OAAgC;AACnE,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,YAAY,KAAK,GAAG,QAAQ,sCAAsC;EACxE,MAAM,eAAe,KAAK,GAAG,QAC3B,8DACD;EACD,MAAM,aAAa,KAAK,GAAG,QACzB,6DACD;AACD,OAAK,GAAG,kBAAkB;AACxB,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,MAAM,KAAK,GACd,QAAQ,iEAAiE,CACzE,IAAI,WAAW,KAAK;AACvB,SAAK,MAAM,EAAE,QAAQ,IACnB,WAAU,IAAI,GAAG;AAEnB,iBAAa,IAAI,WAAW,KAAK;AACjC,eAAW,IAAI,WAAW,KAAK;;IAEjC,EAAE;;CAGN,MAAM,sBAAsB,WAAkE;EAC5F,MAAM,aAAa,CAAC,oBAAoB;EACxC,MAAM,SAA8B,EAAE;AAEtC,MAAI,cAAc,QAAW;AAC3B,cAAW,KAAK,iBAAiB;AACjC,UAAO,KAAK,UAAU;;EAGxB,MAAM,QAAQ,WAAW,WAAW,KAAK,QAAQ;AAIjD,SAHa,KAAK,GACf,QAAQ,sCAAsC,MAAM,cAAc,CAClE,IAAI,GAAG,OAAO;;CAInB,MAAM,gBAAgB,SAAiB,WAAkC;AACvE,OAAK,GACF,QAAQ,sDAAsD,CAC9D,IAAI,WAAW,QAAQ;;CAO5B,MAAM,cAAc,OAAe,MAA+C;AAChF,SAAO,aAAa,KAAK,IAAI,OAAO,KAAK;;CAG3C,MAAM,eAAe,gBAA8B,MAA+C;AAChG,SAAO,qBAAqB,KAAK,IAAI,gBAAgB,KAAK"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"tools-DV_lsiCc.mjs","names":[],"sources":["../src/mcp/tools.ts"],"sourcesContent":["/**\n * PAI Knowledge OS — Pure tool handler functions (shared by daemon + legacy MCP server)\n *\n * Each function accepts pre-opened database handles and raw params, executes\n * the tool logic, and returns an MCP-style content array.\n *\n * This module does NOT import indexAll() — indexing is handled by the daemon\n * on its own schedule. The search hot path is pure DB read.\n */\n\nimport { readFileSync, existsSync, statSync } from \"node:fs\";\nimport { join, resolve, isAbsolute } from \"node:path\";\nimport type { Database } from \"better-sqlite3\";\nimport { populateSlugs, searchMemoryHybrid } from \"../memory/search.js\";\nimport { detectProject, formatDetectionJson } from \"../cli/commands/detect.js\";\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport type { NotificationMode, NotificationEvent } from \"../notifications/types.js\";\nimport type { SearchConfig } from \"../daemon/config.js\";\n\n// ---------------------------------------------------------------------------\n// Shared types\n// ---------------------------------------------------------------------------\n\nexport interface ToolContent {\n type: \"text\";\n text: string;\n}\n\nexport interface ToolResult {\n content: ToolContent[];\n isError?: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: lookup project_id by slug (also checks aliases)\n// ---------------------------------------------------------------------------\n\nexport function lookupProjectId(\n registryDb: Database,\n slug: string\n): number | null {\n const bySlug = registryDb\n .prepare(\"SELECT id FROM projects WHERE slug = ?\")\n .get(slug) as { id: number } | undefined;\n if (bySlug) return bySlug.id;\n\n const byAlias = registryDb\n .prepare(\"SELECT project_id FROM aliases WHERE alias = ?\")\n .get(slug) as { project_id: number } | undefined;\n if (byAlias) return byAlias.project_id;\n\n return null;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: detect project from a filesystem path\n// ---------------------------------------------------------------------------\n\ninterface ProjectRow {\n id: number;\n slug: string;\n display_name: string;\n root_path: string;\n type: string;\n status: string;\n created_at: number;\n updated_at: number;\n archived_at?: number | null;\n parent_id?: number | null;\n obsidian_link?: string | null;\n}\n\nexport function detectProjectFromPath(\n registryDb: Database,\n fsPath: string\n): ProjectRow | null {\n const resolved = resolve(fsPath);\n\n const exact = registryDb\n .prepare(\n \"SELECT id, slug, display_name, root_path, type, status, created_at, updated_at FROM projects WHERE root_path = ?\"\n )\n .get(resolved) as ProjectRow | undefined;\n\n if (exact) return exact;\n\n const all = registryDb\n .prepare(\n \"SELECT id, slug, display_name, root_path, type, status, created_at, updated_at FROM projects ORDER BY LENGTH(root_path) DESC\"\n )\n .all() as ProjectRow[];\n\n for (const project of all) {\n if (\n resolved.startsWith(project.root_path + \"/\") ||\n resolved === project.root_path\n ) {\n return project;\n }\n }\n\n return null;\n}\n\n// ---------------------------------------------------------------------------\n// Helper: format project row for tool output\n// ---------------------------------------------------------------------------\n\nexport function formatProject(registryDb: Database, project: ProjectRow): string {\n const sessionCount = (\n registryDb\n .prepare(\"SELECT COUNT(*) AS n FROM sessions WHERE project_id = ?\")\n .get(project.id) as { n: number }\n ).n;\n\n const lastSession = registryDb\n .prepare(\n \"SELECT date FROM sessions WHERE project_id = ? ORDER BY date DESC LIMIT 1\"\n )\n .get(project.id) as { date: string } | undefined;\n\n const tags = (\n registryDb\n .prepare(\n `SELECT t.name FROM tags t\n JOIN project_tags pt ON pt.tag_id = t.id\n WHERE pt.project_id = ?\n ORDER BY t.name`\n )\n .all(project.id) as Array<{ name: string }>\n ).map((r) => r.name);\n\n const aliases = (\n registryDb\n .prepare(\"SELECT alias FROM aliases WHERE project_id = ? ORDER BY alias\")\n .all(project.id) as Array<{ alias: string }>\n ).map((r) => r.alias);\n\n const lines: string[] = [\n `slug: ${project.slug}`,\n `display_name: ${project.display_name}`,\n `root_path: ${project.root_path}`,\n `type: ${project.type}`,\n `status: ${project.status}`,\n `sessions: ${sessionCount}`,\n ];\n\n if (lastSession) lines.push(`last_session: ${lastSession.date}`);\n if (tags.length) lines.push(`tags: ${tags.join(\", \")}`);\n if (aliases.length) lines.push(`aliases: ${aliases.join(\", \")}`);\n if (project.obsidian_link) lines.push(`obsidian_link: ${project.obsidian_link}`);\n if (project.archived_at) {\n lines.push(\n `archived_at: ${new Date(project.archived_at).toISOString().slice(0, 10)}`\n );\n }\n\n return lines.join(\"\\n\");\n}\n\n// ---------------------------------------------------------------------------\n// Tool: memory_search\n// ---------------------------------------------------------------------------\n\nexport interface MemorySearchParams {\n query: string;\n project?: string;\n all_projects?: boolean;\n sources?: Array<\"memory\" | \"notes\">;\n limit?: number;\n mode?: \"keyword\" | \"semantic\" | \"hybrid\";\n /** Rerank results using cross-encoder model for better relevance ordering. */\n rerank?: boolean;\n /** Apply recency boost — score decays by half every N days. 0 = off (default). */\n recencyBoost?: number;\n /** Maximum characters per result snippet. Default 200.\n * Limit context consumption — MCP results go into Claude's context window. */\n snippetLength?: number;\n}\n\nexport async function toolMemorySearch(\n registryDb: Database,\n federation: Database | StorageBackend,\n params: MemorySearchParams,\n searchDefaults?: SearchConfig,\n): Promise<ToolResult> {\n try {\n const projectIds: number[] | undefined = params.project\n ? (() => {\n const id = lookupProjectId(registryDb, params.project!);\n return id != null ? [id] : [];\n })()\n : undefined;\n\n if (params.project && (!projectIds || projectIds.length === 0)) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n // NOTE: No indexAll() here — indexing is handled by the daemon scheduler.\n // The daemon ensures the index stays fresh; the search hot path is read-only.\n\n const mode = params.mode ?? (searchDefaults?.mode ?? \"keyword\");\n // Limit context consumption — MCP results go into Claude's context window.\n // Default to 5 results and 200-char snippets to keep a single search call\n // within ~1-2K tokens rather than 5K+.\n const snippetLength = params.snippetLength ?? (searchDefaults?.snippetLength ?? 200);\n const searchOpts = {\n projectIds,\n sources: params.sources,\n maxResults: params.limit ?? (searchDefaults?.defaultLimit ?? 5),\n };\n\n let results;\n\n // Determine if federation is a StorageBackend or a raw Database\n const isBackend = (x: Database | StorageBackend): x is StorageBackend =>\n \"backendType\" in x;\n\n if (isBackend(federation)) {\n // Use the storage backend interface (works for both SQLite and Postgres)\n if (mode === \"keyword\") {\n results = await federation.searchKeyword(params.query, searchOpts);\n } else if (mode === \"semantic\" || mode === \"hybrid\") {\n const { generateEmbedding } = await import(\"../memory/embeddings.js\");\n const queryEmbedding = await generateEmbedding(params.query, true);\n\n if (mode === \"semantic\") {\n results = await federation.searchSemantic(queryEmbedding, searchOpts);\n } else {\n // Hybrid: combine keyword + semantic\n const [kwResults, semResults] = await Promise.all([\n federation.searchKeyword(params.query, { ...searchOpts, maxResults: 50 }),\n federation.searchSemantic(queryEmbedding, { ...searchOpts, maxResults: 50 }),\n ]); // 50 candidates is sufficient for min-max normalization\n // Reuse the existing hybrid scoring logic\n results = combineHybridResults(kwResults, semResults, searchOpts.maxResults ?? 10);\n }\n } else {\n results = await federation.searchKeyword(params.query, searchOpts);\n }\n } else {\n // Legacy path: raw better-sqlite3 Database (for direct MCP server usage)\n const { searchMemory, searchMemorySemantic } = await import(\"../memory/search.js\");\n\n if (mode === \"keyword\") {\n results = searchMemory(federation, params.query, searchOpts);\n } else if (mode === \"semantic\" || mode === \"hybrid\") {\n const { generateEmbedding } = await import(\"../memory/embeddings.js\");\n const queryEmbedding = await generateEmbedding(params.query, true);\n\n if (mode === \"semantic\") {\n results = searchMemorySemantic(federation, queryEmbedding, searchOpts);\n } else {\n results = searchMemoryHybrid(\n federation,\n params.query,\n queryEmbedding,\n searchOpts\n );\n }\n } else {\n results = searchMemory(federation, params.query, searchOpts);\n }\n }\n\n // Cross-encoder reranking (on by default)\n const shouldRerank = params.rerank ?? (searchDefaults?.rerank ?? true);\n if (shouldRerank && results.length > 0) {\n const { rerankResults } = await import(\"../memory/reranker.js\");\n results = await rerankResults(params.query, results, {\n topK: searchOpts.maxResults ?? 5,\n });\n }\n\n // Recency boost (off by default, applied after reranking)\n const recencyDays = params.recencyBoost ?? (searchDefaults?.recencyBoostDays ?? 0);\n if (recencyDays > 0 && results.length > 0) {\n const { applyRecencyBoost } = await import(\"../memory/search.js\");\n results = applyRecencyBoost(results, recencyDays);\n }\n\n const withSlugs = populateSlugs(results, registryDb);\n\n if (withSlugs.length === 0) {\n return {\n content: [\n {\n type: \"text\",\n text: `No results found for query: \"${params.query}\" (mode: ${mode})`,\n },\n ],\n };\n }\n\n const rerankLabel = shouldRerank ? \" +rerank\" : \"\";\n const formatted = withSlugs\n .map((r, i) => {\n const header = `[${i + 1}] ${r.projectSlug ?? `project:${r.projectId}`} — ${r.path} (lines ${r.startLine}-${r.endLine}) score=${r.score.toFixed(4)} tier=${r.tier} source=${r.source}`;\n // Truncate snippet to snippetLength — limit context consumption.\n // MCP results go into Claude's context window; keep each result tight.\n const raw = r.snippet.trim();\n const snippet = raw.length > snippetLength\n ? raw.slice(0, snippetLength) + \"...\"\n : raw;\n return `${header}\\n${snippet}`;\n })\n .join(\"\\n\\n---\\n\\n\");\n\n return {\n content: [\n {\n type: \"text\",\n text: `Found ${withSlugs.length} result(s) for \"${params.query}\" (mode: ${mode}${rerankLabel}):\\n\\n${formatted}`,\n },\n ],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `Search error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: memory_get\n// ---------------------------------------------------------------------------\n\nexport interface MemoryGetParams {\n project: string;\n path: string;\n from?: number;\n lines?: number;\n}\n\nexport function toolMemoryGet(\n registryDb: Database,\n params: MemoryGetParams\n): ToolResult {\n try {\n const projectId = lookupProjectId(registryDb, params.project);\n if (projectId == null) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n const project = registryDb\n .prepare(\"SELECT root_path FROM projects WHERE id = ?\")\n .get(projectId) as { root_path: string } | undefined;\n\n if (!project) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n const requestedPath = params.path;\n if (requestedPath.includes(\"..\") || isAbsolute(requestedPath)) {\n return {\n content: [\n {\n type: \"text\",\n text: `Invalid path: ${params.path} (must be a relative path within the project root, no ../ allowed)`,\n },\n ],\n isError: true,\n };\n }\n\n const fullPath = join(project.root_path, requestedPath);\n const resolvedFull = resolve(fullPath);\n const resolvedRoot = resolve(project.root_path);\n\n if (\n !resolvedFull.startsWith(resolvedRoot + \"/\") &&\n resolvedFull !== resolvedRoot\n ) {\n return {\n content: [\n { type: \"text\", text: `Path traversal blocked: ${params.path}` },\n ],\n isError: true,\n };\n }\n\n if (!existsSync(fullPath)) {\n return {\n content: [\n {\n type: \"text\",\n text: `File not found: ${requestedPath} (project: ${params.project})`,\n },\n ],\n isError: true,\n };\n }\n\n const stat = statSync(fullPath);\n if (stat.size > 5 * 1024 * 1024) {\n return {\n content: [\n {\n type: \"text\",\n text: `Error: file too large (${(stat.size / 1024 / 1024).toFixed(1)} MB). Maximum 5 MB.`,\n },\n ],\n };\n }\n\n const content = readFileSync(fullPath, \"utf8\");\n const allLines = content.split(\"\\n\");\n\n const fromLine = (params.from ?? 1) - 1;\n const toLine =\n params.lines != null\n ? Math.min(fromLine + params.lines, allLines.length)\n : allLines.length;\n\n const selectedLines = allLines.slice(fromLine, toLine);\n const text = selectedLines.join(\"\\n\");\n\n const header =\n params.from != null\n ? `${params.project}/${requestedPath} (lines ${fromLine + 1}-${toLine}):`\n : `${params.project}/${requestedPath}:`;\n\n return {\n content: [{ type: \"text\", text: `${header}\\n\\n${text}` }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `Read error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: project_info\n// ---------------------------------------------------------------------------\n\nexport interface ProjectInfoParams {\n slug?: string;\n}\n\nexport function toolProjectInfo(\n registryDb: Database,\n params: ProjectInfoParams\n): ToolResult {\n try {\n let project: ProjectRow | null = null;\n\n if (params.slug) {\n const projectId = lookupProjectId(registryDb, params.slug);\n if (projectId != null) {\n project = registryDb\n .prepare(\n \"SELECT id, slug, display_name, root_path, type, status, created_at, updated_at, archived_at, parent_id, obsidian_link FROM projects WHERE id = ?\"\n )\n .get(projectId) as ProjectRow | null;\n }\n } else {\n const cwd = process.cwd();\n project = detectProjectFromPath(registryDb, cwd);\n }\n\n if (!project) {\n const message = params.slug\n ? `Project not found: ${params.slug}`\n : `No PAI project found matching the current directory: ${process.cwd()}`;\n return {\n content: [{ type: \"text\", text: message }],\n isError: !params.slug,\n };\n }\n\n return {\n content: [{ type: \"text\", text: formatProject(registryDb, project) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `project_info error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: project_list\n// ---------------------------------------------------------------------------\n\nexport interface ProjectListParams {\n status?: \"active\" | \"archived\" | \"migrating\";\n tag?: string;\n limit?: number;\n}\n\nexport function toolProjectList(\n registryDb: Database,\n params: ProjectListParams\n): ToolResult {\n try {\n const conditions: string[] = [];\n const queryParams: (string | number)[] = [];\n\n if (params.status) {\n conditions.push(\"p.status = ?\");\n queryParams.push(params.status);\n }\n\n if (params.tag) {\n conditions.push(\n \"p.id IN (SELECT pt.project_id FROM project_tags pt JOIN tags t ON pt.tag_id = t.id WHERE t.name = ?)\"\n );\n queryParams.push(params.tag);\n }\n\n const where =\n conditions.length > 0 ? `WHERE ${conditions.join(\" AND \")}` : \"\";\n const limit = params.limit ?? 50;\n queryParams.push(limit);\n\n const projects = registryDb\n .prepare(\n `SELECT p.id, p.slug, p.display_name, p.root_path, p.type, p.status, p.updated_at\n FROM projects p\n ${where}\n ORDER BY p.updated_at DESC\n LIMIT ?`\n )\n .all(...queryParams) as Array<{\n id: number;\n slug: string;\n display_name: string;\n root_path: string;\n type: string;\n status: string;\n updated_at: number;\n }>;\n\n if (projects.length === 0) {\n return {\n content: [\n {\n type: \"text\",\n text: \"No projects found matching the given filters.\",\n },\n ],\n };\n }\n\n const lines = projects.map(\n (p) =>\n `${p.slug} [${p.status}] ${p.root_path} (updated: ${new Date(p.updated_at).toISOString().slice(0, 10)})`\n );\n\n return {\n content: [\n {\n type: \"text\",\n text: `${projects.length} project(s):\\n\\n${lines.join(\"\\n\")}`,\n },\n ],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `project_list error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: session_list\n// ---------------------------------------------------------------------------\n\nexport interface SessionListParams {\n project: string;\n limit?: number;\n status?: \"open\" | \"completed\" | \"compacted\";\n}\n\nexport function toolSessionList(\n registryDb: Database,\n params: SessionListParams\n): ToolResult {\n try {\n const projectId = lookupProjectId(registryDb, params.project);\n if (projectId == null) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n const conditions = [\"project_id = ?\"];\n const queryParams: (string | number)[] = [projectId];\n\n if (params.status) {\n conditions.push(\"status = ?\");\n queryParams.push(params.status);\n }\n\n const limit = params.limit ?? 10;\n queryParams.push(limit);\n\n const sessions = registryDb\n .prepare(\n `SELECT number, date, title, filename, status\n FROM sessions\n WHERE ${conditions.join(\" AND \")}\n ORDER BY number DESC\n LIMIT ?`\n )\n .all(...queryParams) as Array<{\n number: number;\n date: string;\n title: string;\n filename: string;\n status: string;\n }>;\n\n if (sessions.length === 0) {\n return {\n content: [\n {\n type: \"text\",\n text: `No sessions found for project: ${params.project}`,\n },\n ],\n };\n }\n\n const lines = sessions.map(\n (s) =>\n `#${String(s.number).padStart(4, \"0\")} ${s.date} [${s.status}] ${s.title}\\n file: Notes/${s.filename}`\n );\n\n return {\n content: [\n {\n type: \"text\",\n text: `${sessions.length} session(s) for ${params.project}:\\n\\n${lines.join(\"\\n\\n\")}`,\n },\n ],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `session_list error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: registry_search\n// ---------------------------------------------------------------------------\n\nexport interface RegistrySearchParams {\n query: string;\n}\n\nexport function toolRegistrySearch(\n registryDb: Database,\n params: RegistrySearchParams\n): ToolResult {\n try {\n const q = `%${params.query}%`;\n const projects = registryDb\n .prepare(\n `SELECT id, slug, display_name, root_path, type, status, updated_at\n FROM projects\n WHERE slug LIKE ?\n OR display_name LIKE ?\n OR root_path LIKE ?\n ORDER BY updated_at DESC\n LIMIT 20`\n )\n .all(q, q, q) as Array<{\n id: number;\n slug: string;\n display_name: string;\n root_path: string;\n type: string;\n status: string;\n updated_at: number;\n }>;\n\n if (projects.length === 0) {\n return {\n content: [\n {\n type: \"text\",\n text: `No projects found matching: \"${params.query}\"`,\n },\n ],\n };\n }\n\n const lines = projects.map((p) => `${p.slug} [${p.status}] ${p.root_path}`);\n\n return {\n content: [\n {\n type: \"text\",\n text: `${projects.length} match(es) for \"${params.query}\":\\n\\n${lines.join(\"\\n\")}`,\n },\n ],\n };\n } catch (e) {\n return {\n content: [\n { type: \"text\", text: `registry_search error: ${String(e)}` },\n ],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: project_detect\n// ---------------------------------------------------------------------------\n\nexport interface ProjectDetectParams {\n cwd?: string;\n}\n\nexport function toolProjectDetect(\n registryDb: Database,\n params: ProjectDetectParams\n): ToolResult {\n try {\n const detection = detectProject(registryDb, params.cwd);\n\n if (!detection) {\n const target = params.cwd ?? process.cwd();\n return {\n content: [\n {\n type: \"text\",\n text: `No registered project found for path: ${target}\\n\\nRun 'pai project add .' to register this directory.`,\n },\n ],\n };\n }\n\n return {\n content: [{ type: \"text\", text: formatDetectionJson(detection) }],\n };\n } catch (e) {\n return {\n content: [\n { type: \"text\", text: `project_detect error: ${String(e)}` },\n ],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: project_health\n// ---------------------------------------------------------------------------\n\nexport interface ProjectHealthParams {\n category?: \"active\" | \"stale\" | \"dead\" | \"all\";\n}\n\nexport async function toolProjectHealth(\n registryDb: Database,\n params: ProjectHealthParams\n): Promise<ToolResult> {\n try {\n const { existsSync: fsExists, readdirSync, statSync } = await import(\n \"node:fs\"\n );\n const {\n join: pathJoin,\n basename: pathBasename,\n } = await import(\"node:path\");\n const { homedir } = await import(\"node:os\");\n const { encodeDir: enc } = await import(\"../cli/utils.js\");\n\n interface HealthRowLocal {\n id: number;\n slug: string;\n display_name: string;\n root_path: string;\n encoded_dir: string;\n status: string;\n type: string;\n session_count: number;\n }\n\n const rows = registryDb\n .prepare(\n `SELECT p.id, p.slug, p.display_name, p.root_path, p.encoded_dir, p.status, p.type,\n (SELECT COUNT(*) FROM sessions s WHERE s.project_id = p.id) AS session_count\n FROM projects p\n ORDER BY p.slug ASC`\n )\n .all() as HealthRowLocal[];\n\n const home = homedir();\n const claudeProjects = pathJoin(home, \".claude\", \"projects\");\n\n function suggestMoved(rootPath: string): string | undefined {\n const name = pathBasename(rootPath);\n const candidates = [\n pathJoin(home, \"dev\", name),\n pathJoin(home, \"dev\", \"ai\", name),\n pathJoin(home, \"Desktop\", name),\n pathJoin(home, \"Projects\", name),\n ];\n return candidates.find((c) => fsExists(c));\n }\n\n function hasClaudeNotes(encodedDir: string): boolean {\n if (!fsExists(claudeProjects)) return false;\n try {\n for (const entry of readdirSync(claudeProjects)) {\n if (entry !== encodedDir && !entry.startsWith(encodedDir)) continue;\n const full = pathJoin(claudeProjects, entry);\n try {\n if (!statSync(full).isDirectory()) continue;\n } catch {\n continue;\n }\n if (fsExists(pathJoin(full, \"Notes\"))) return true;\n }\n } catch {\n /* ignore */\n }\n return false;\n }\n\n interface HealthResult {\n slug: string;\n display_name: string;\n root_path: string;\n status: string;\n type: string;\n session_count: number;\n health: string;\n suggested_path: string | null;\n has_claude_notes: boolean;\n todo: {\n found: boolean;\n path: string | null;\n has_continue: boolean;\n };\n }\n\n function findTodoForProject(rootPath: string): {\n found: boolean;\n path: string | null;\n has_continue: boolean;\n } {\n const locs = [\n \"Notes/TODO.md\",\n \".claude/Notes/TODO.md\",\n \"tasks/todo.md\",\n \"TODO.md\",\n ];\n for (const rel of locs) {\n const full = pathJoin(rootPath, rel);\n if (fsExists(full)) {\n try {\n const raw = readFileSync(full, \"utf8\");\n const hasContinue = /^## Continue$/m.test(raw);\n return { found: true, path: rel, has_continue: hasContinue };\n } catch {\n return { found: true, path: rel, has_continue: false };\n }\n }\n }\n return { found: false, path: null, has_continue: false };\n }\n\n const results: HealthResult[] = rows.map((p) => {\n const pathExists = fsExists(p.root_path);\n let health: string;\n let suggestedPath: string | null = null;\n\n if (pathExists) {\n health = \"active\";\n } else {\n suggestedPath = suggestMoved(p.root_path) ?? null;\n health = suggestedPath ? \"stale\" : \"dead\";\n }\n\n const todo = pathExists\n ? findTodoForProject(p.root_path)\n : { found: false, path: null, has_continue: false };\n\n return {\n slug: p.slug,\n display_name: p.display_name,\n root_path: p.root_path,\n status: p.status,\n type: p.type,\n session_count: p.session_count,\n health,\n suggested_path: suggestedPath,\n has_claude_notes: hasClaudeNotes(p.encoded_dir),\n todo,\n };\n });\n\n const filtered =\n !params.category || params.category === \"all\"\n ? results\n : results.filter((r) => r.health === params.category);\n\n const summary = {\n total: rows.length,\n active: results.filter((r) => r.health === \"active\").length,\n stale: results.filter((r) => r.health === \"stale\").length,\n dead: results.filter((r) => r.health === \"dead\").length,\n };\n\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify({ summary, projects: filtered }, null, 2),\n },\n ],\n };\n } catch (e) {\n return {\n content: [\n { type: \"text\", text: `project_health error: ${String(e)}` },\n ],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: project_todo\n// ---------------------------------------------------------------------------\n\nexport interface ProjectTodoParams {\n project?: string;\n}\n\n/**\n * TODO candidate locations searched in priority order.\n * Returns the first one that exists, along with its label.\n */\nconst TODO_LOCATIONS = [\n { rel: \"Notes/TODO.md\", label: \"Notes/TODO.md\" },\n { rel: \".claude/Notes/TODO.md\", label: \".claude/Notes/TODO.md\" },\n { rel: \"tasks/todo.md\", label: \"tasks/todo.md\" },\n { rel: \"TODO.md\", label: \"TODO.md\" },\n];\n\n/**\n * Given TODO file content, extract and surface the ## Continue section first,\n * then return the remaining content. Returns an object with:\n * continueSection: string | null\n * fullContent: string\n * hasContinue: boolean\n */\nfunction parseTodoContent(raw: string): {\n continueSection: string | null;\n fullContent: string;\n hasContinue: boolean;\n} {\n const lines = raw.split(\"\\n\");\n\n // Find the ## Continue heading\n const continueIdx = lines.findIndex(\n (l) => l.trim() === \"## Continue\"\n );\n\n if (continueIdx === -1) {\n return { continueSection: null, fullContent: raw, hasContinue: false };\n }\n\n // The section ends at the first `---` separator or next `##` heading after\n // the Continue heading (whichever comes first).\n let endIdx = lines.length;\n for (let i = continueIdx + 1; i < lines.length; i++) {\n const trimmed = lines[i].trim();\n if (trimmed === \"---\" || (trimmed.startsWith(\"##\") && trimmed !== \"## Continue\")) {\n endIdx = i;\n break;\n }\n }\n\n const continueLines = lines.slice(continueIdx, endIdx);\n const continueSection = continueLines.join(\"\\n\").trim();\n\n return { continueSection, fullContent: raw, hasContinue: true };\n}\n\nexport function toolProjectTodo(\n registryDb: Database,\n params: ProjectTodoParams\n): ToolResult {\n try {\n let rootPath: string;\n let projectSlug: string;\n\n if (params.project) {\n const projectId = lookupProjectId(registryDb, params.project);\n if (projectId == null) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n const row = registryDb\n .prepare(\"SELECT root_path, slug FROM projects WHERE id = ?\")\n .get(projectId) as { root_path: string; slug: string } | undefined;\n\n if (!row) {\n return {\n content: [\n { type: \"text\", text: `Project not found: ${params.project}` },\n ],\n isError: true,\n };\n }\n\n rootPath = row.root_path;\n projectSlug = row.slug;\n } else {\n // Auto-detect from cwd\n const project = detectProjectFromPath(registryDb, process.cwd());\n if (!project) {\n return {\n content: [\n {\n type: \"text\",\n text: `No PAI project found matching the current directory: ${process.cwd()}\\n\\nProvide a project slug or run 'pai project add .' to register this directory.`,\n },\n ],\n };\n }\n rootPath = project.root_path;\n projectSlug = project.slug;\n }\n\n // Search for TODO in priority order\n for (const loc of TODO_LOCATIONS) {\n const fullPath = join(rootPath, loc.rel);\n if (existsSync(fullPath)) {\n const raw = readFileSync(fullPath, \"utf8\");\n const { continueSection, fullContent, hasContinue } = parseTodoContent(raw);\n\n let output: string;\n if (hasContinue && continueSection) {\n // Surface the ## Continue section first, then the full content\n output = [\n `TODO found: ${projectSlug}/${loc.label}`,\n \"\",\n \"=== CONTINUE SECTION (surfaced first) ===\",\n continueSection,\n \"\",\n \"=== FULL TODO CONTENT ===\",\n fullContent,\n ].join(\"\\n\");\n } else {\n output = [\n `TODO found: ${projectSlug}/${loc.label}`,\n \"\",\n fullContent,\n ].join(\"\\n\");\n }\n\n return {\n content: [{ type: \"text\", text: output }],\n };\n }\n }\n\n // No TODO found in any location\n const searched = TODO_LOCATIONS.map((l) => ` ${rootPath}/${l.rel}`).join(\"\\n\");\n return {\n content: [\n {\n type: \"text\",\n text: [\n `No TODO.md found for project: ${projectSlug}`,\n \"\",\n \"Searched locations (in order):\",\n searched,\n \"\",\n \"Create a TODO with: echo '## Tasks\\\\n- [ ] First task' > Notes/TODO.md\",\n ].join(\"\\n\"),\n },\n ],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `project_todo error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: notification_config\n// ---------------------------------------------------------------------------\n\nexport interface NotificationConfigParams {\n /** Action to perform */\n action: \"get\" | \"set\" | \"send\";\n /** For action=\"set\": the notification mode to activate */\n mode?: NotificationMode;\n /** For action=\"set\": partial channel config overrides (JSON object) */\n channels?: Record<string, unknown>;\n /** For action=\"set\": partial routing overrides (JSON object) */\n routing?: Record<string, unknown>;\n /** For action=\"send\": the event type */\n event?: NotificationEvent;\n /** For action=\"send\": the notification message */\n message?: string;\n /** For action=\"send\": optional title */\n title?: string;\n}\n\n/**\n * Handle notification config queries and updates via the daemon IPC.\n * Falls back gracefully if the daemon is not running.\n */\nexport async function toolNotificationConfig(\n params: NotificationConfigParams\n): Promise<ToolResult> {\n try {\n const { PaiClient } = await import(\"../daemon/ipc-client.js\");\n const client = new PaiClient();\n\n if (params.action === \"get\") {\n const { config, activeChannels } = await client.getNotificationConfig();\n const lines = [\n `mode: ${config.mode}`,\n `active_channels: ${activeChannels.join(\", \") || \"(none)\"}`,\n \"\",\n \"channels:\",\n ...Object.entries(config.channels).map(([ch, cfg]) => {\n const c = cfg as { enabled: boolean };\n return ` ${ch}: ${c.enabled ? \"enabled\" : \"disabled\"}`;\n }),\n \"\",\n \"routing:\",\n ...Object.entries(config.routing).map(\n ([event, channels]) => ` ${event}: ${(channels as string[]).join(\", \") || \"(none)\"}`\n ),\n ];\n return {\n content: [{ type: \"text\", text: lines.join(\"\\n\") }],\n };\n }\n\n if (params.action === \"set\") {\n if (!params.mode && !params.channels && !params.routing) {\n return {\n content: [\n {\n type: \"text\",\n text: \"notification_config set: provide at least one of mode, channels, or routing.\",\n },\n ],\n isError: true,\n };\n }\n const result = await client.setNotificationConfig({\n mode: params.mode,\n channels: params.channels as Parameters<typeof client.setNotificationConfig>[0][\"channels\"],\n routing: params.routing as Parameters<typeof client.setNotificationConfig>[0][\"routing\"],\n });\n return {\n content: [\n {\n type: \"text\",\n text: `Notification config updated. Mode: ${result.config.mode}`,\n },\n ],\n };\n }\n\n if (params.action === \"send\") {\n if (!params.message) {\n return {\n content: [\n { type: \"text\", text: \"notification_config send: message is required.\" },\n ],\n isError: true,\n };\n }\n const result = await client.sendNotification({\n event: params.event ?? \"info\",\n message: params.message,\n title: params.title,\n });\n const lines = [\n `mode: ${result.mode}`,\n `attempted: ${result.channelsAttempted.join(\", \") || \"(none)\"}`,\n `succeeded: ${result.channelsSucceeded.join(\", \") || \"(none)\"}`,\n ...(result.channelsFailed.length > 0\n ? [`failed: ${result.channelsFailed.join(\", \")}`]\n : []),\n ];\n return {\n content: [{ type: \"text\", text: lines.join(\"\\n\") }],\n };\n }\n\n return {\n content: [\n {\n type: \"text\",\n text: `Unknown action: ${String(params.action)}. Use \"get\", \"set\", or \"send\".`,\n },\n ],\n isError: true,\n };\n } catch (e) {\n return {\n content: [\n { type: \"text\", text: `notification_config error: ${String(e)}` },\n ],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: topic_detect\n// ---------------------------------------------------------------------------\n\nexport interface TopicDetectParams {\n /** Recent conversation context (a few sentences summarising recent activity) */\n context: string;\n /** The project slug the session is currently routed to. */\n current_project?: string;\n /**\n * Minimum confidence [0,1] to declare a shift. Default: 0.6.\n * Higher = less sensitive (fewer false positives).\n */\n threshold?: number;\n}\n\n/**\n * Detect whether recent conversation context has shifted to a different project.\n * Uses memory_search to find which project best matches the context, then\n * compares against the current project.\n *\n * Calls the daemon via IPC so it has access to the storage backend.\n * Falls back gracefully if the daemon is not running.\n */\nexport async function toolTopicDetect(\n params: TopicDetectParams\n): Promise<ToolResult> {\n try {\n const { PaiClient } = await import(\"../daemon/ipc-client.js\");\n const client = new PaiClient();\n\n const result = await client.topicCheck({\n context: params.context,\n currentProject: params.current_project,\n threshold: params.threshold,\n });\n\n const lines: string[] = [\n `shifted: ${result.shifted}`,\n `current_project: ${result.currentProject ?? \"(none)\"}`,\n `suggested_project: ${result.suggestedProject ?? \"(none)\"}`,\n `confidence: ${result.confidence.toFixed(3)}`,\n `chunks_scored: ${result.chunkCount}`,\n ];\n\n if (result.topProjects.length > 0) {\n lines.push(\"\");\n lines.push(\"top_matches:\");\n for (const p of result.topProjects) {\n lines.push(` ${p.slug}: ${(p.score * 100).toFixed(1)}%`);\n }\n }\n\n if (result.shifted) {\n lines.push(\"\");\n lines.push(\n `TOPIC SHIFT DETECTED: conversation appears to be about \"${result.suggestedProject}\" ` +\n `(confidence: ${(result.confidence * 100).toFixed(0)}%), not \"${result.currentProject}\".`\n );\n }\n\n return {\n content: [{ type: \"text\", text: lines.join(\"\\n\") }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `topic_detect error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: session_route\n// ---------------------------------------------------------------------------\n\nexport interface SessionRouteParams {\n /** Working directory to route from (defaults to process.cwd()) */\n cwd?: string;\n /** Optional conversation context for topic-based fallback routing */\n context?: string;\n}\n\n/**\n * Automatically suggest which project a session belongs to.\n *\n * Strategy (in priority order):\n * 1. path — exact or parent-directory match in the project registry\n * 2. marker — walk up from cwd looking for Notes/PAI.md\n * 3. topic — BM25 keyword search against memory (requires context)\n *\n * Call this at session start (e.g., from CLAUDE.md or a session-start hook)\n * to automatically route the session to the correct project.\n */\nexport async function toolSessionRoute(\n registryDb: Database,\n federation: Database | StorageBackend,\n params: SessionRouteParams\n): Promise<ToolResult> {\n try {\n const { autoRoute, formatAutoRouteJson } = await import(\"../session/auto-route.js\");\n\n const result = await autoRoute(\n registryDb,\n federation,\n params.cwd,\n params.context\n );\n\n if (!result) {\n const target = params.cwd ?? process.cwd();\n return {\n content: [\n {\n type: \"text\",\n text: [\n `No project match found for: ${target}`,\n \"\",\n \"Tried: path match, PAI.md marker walk\" +\n (params.context ? \", topic detection\" : \"\"),\n \"\",\n \"Run 'pai project add .' to register this directory,\",\n \"or provide conversation context for topic-based routing.\",\n ].join(\"\\n\"),\n },\n ],\n };\n }\n\n return {\n content: [{ type: \"text\", text: formatAutoRouteJson(result) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `session_route error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_explore\n// ---------------------------------------------------------------------------\n\nexport interface ZettelExploreParams {\n start_note: string;\n depth?: number;\n direction?: string;\n mode?: string;\n}\n\nexport async function toolZettelExplore(\n federationDb: Database,\n params: ZettelExploreParams\n): Promise<ToolResult> {\n try {\n const { zettelExplore } = await import(\"../zettelkasten/index.js\");\n const result = zettelExplore(federationDb, {\n startNote: params.start_note,\n depth: params.depth,\n direction: params.direction as \"forward\" | \"backward\" | \"both\" | undefined,\n mode: params.mode as \"sequential\" | \"associative\" | \"all\" | undefined,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(result, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_explore error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_health\n// ---------------------------------------------------------------------------\n\nexport interface ZettelHealthParams {\n scope?: string;\n project_path?: string;\n recent_days?: number;\n include?: string[];\n}\n\nexport async function toolZettelHealth(\n federationDb: Database,\n params: ZettelHealthParams\n): Promise<ToolResult> {\n try {\n const { zettelHealth } = await import(\"../zettelkasten/index.js\");\n const result = zettelHealth(federationDb, {\n scope: params.scope as \"full\" | \"recent\" | \"project\" | undefined,\n projectPath: params.project_path,\n recentDays: params.recent_days,\n include: params.include as Array<\"dead_links\" | \"orphans\" | \"disconnected\" | \"low_connectivity\"> | undefined,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(result, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_health error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_surprise\n// ---------------------------------------------------------------------------\n\nexport interface ZettelSurpriseParams {\n reference_path: string;\n vault_project_id: number;\n limit?: number;\n min_similarity?: number;\n min_graph_distance?: number;\n}\n\nexport async function toolZettelSurprise(\n federationDb: Database,\n params: ZettelSurpriseParams\n): Promise<ToolResult> {\n try {\n const { zettelSurprise } = await import(\"../zettelkasten/index.js\");\n const results = await zettelSurprise(federationDb, {\n referencePath: params.reference_path,\n vaultProjectId: params.vault_project_id,\n limit: params.limit,\n minSimilarity: params.min_similarity,\n minGraphDistance: params.min_graph_distance,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(results, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_surprise error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_suggest\n// ---------------------------------------------------------------------------\n\nexport interface ZettelSuggestParams {\n note_path: string;\n vault_project_id: number;\n limit?: number;\n exclude_linked?: boolean;\n}\n\nexport async function toolZettelSuggest(\n federationDb: Database,\n params: ZettelSuggestParams\n): Promise<ToolResult> {\n try {\n const { zettelSuggest } = await import(\"../zettelkasten/index.js\");\n const results = await zettelSuggest(federationDb, {\n notePath: params.note_path,\n vaultProjectId: params.vault_project_id,\n limit: params.limit,\n excludeLinked: params.exclude_linked,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(results, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_suggest error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_converse\n// ---------------------------------------------------------------------------\n\nexport interface ZettelConverseParams {\n question: string;\n vault_project_id: number;\n depth?: number;\n limit?: number;\n}\n\nexport async function toolZettelConverse(\n federationDb: Database,\n params: ZettelConverseParams\n): Promise<ToolResult> {\n try {\n const { zettelConverse } = await import(\"../zettelkasten/index.js\");\n const result = await zettelConverse(federationDb, {\n question: params.question,\n vaultProjectId: params.vault_project_id,\n depth: params.depth,\n limit: params.limit,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(result, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_converse error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool: zettel_themes\n// ---------------------------------------------------------------------------\n\nexport interface ZettelThemesParams {\n vault_project_id: number;\n lookback_days?: number;\n min_cluster_size?: number;\n max_themes?: number;\n similarity_threshold?: number;\n}\n\nexport async function toolZettelThemes(\n federationDb: Database,\n params: ZettelThemesParams\n): Promise<ToolResult> {\n try {\n const { zettelThemes } = await import(\"../zettelkasten/index.js\");\n const result = await zettelThemes(federationDb, {\n vaultProjectId: params.vault_project_id,\n lookbackDays: params.lookback_days,\n minClusterSize: params.min_cluster_size,\n maxThemes: params.max_themes,\n similarityThreshold: params.similarity_threshold,\n });\n return {\n content: [{ type: \"text\", text: JSON.stringify(result, null, 2) }],\n };\n } catch (e) {\n return {\n content: [{ type: \"text\", text: `zettel_themes error: ${String(e)}` }],\n isError: true,\n };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Hybrid search helper (backend-agnostic)\n// ---------------------------------------------------------------------------\n\nimport type { SearchResult } from \"../memory/search.js\";\n\n/**\n * Combine keyword + semantic results using min-max normalized scoring.\n * Mirrors the logic in searchMemoryHybrid() from memory/search.ts,\n * but works on pre-computed result arrays so it works for any backend.\n */\nfunction combineHybridResults(\n keywordResults: SearchResult[],\n semanticResults: SearchResult[],\n maxResults: number,\n keywordWeight = 0.5,\n semanticWeight = 0.5\n): SearchResult[] {\n if (keywordResults.length === 0 && semanticResults.length === 0) return [];\n\n const keyFor = (r: SearchResult) =>\n `${r.projectId}:${r.path}:${r.startLine}:${r.endLine}`;\n\n function minMaxNormalize(items: SearchResult[]): Map<string, number> {\n if (items.length === 0) return new Map();\n const min = Math.min(...items.map((r) => r.score));\n const max = Math.max(...items.map((r) => r.score));\n const range = max - min;\n const m = new Map<string, number>();\n for (const r of items) {\n m.set(keyFor(r), range === 0 ? 1 : (r.score - min) / range);\n }\n return m;\n }\n\n const kwNorm = minMaxNormalize(keywordResults);\n const semNorm = minMaxNormalize(semanticResults);\n\n const allKeys = new Set<string>([\n ...keywordResults.map(keyFor),\n ...semanticResults.map(keyFor),\n ]);\n\n const metaMap = new Map<string, SearchResult>();\n for (const r of [...keywordResults, ...semanticResults]) {\n metaMap.set(keyFor(r), r);\n }\n\n const combined: Array<SearchResult & { combinedScore: number }> = [];\n for (const key of allKeys) {\n const meta = metaMap.get(key)!;\n const kwScore = kwNorm.get(key) ?? 0;\n const semScore = semNorm.get(key) ?? 0;\n const combinedScore = keywordWeight * kwScore + semanticWeight * semScore;\n combined.push({ ...meta, score: combinedScore, combinedScore });\n }\n\n return combined\n .sort((a, b) => b.score - a.score)\n .slice(0, maxResults)\n .map(({ combinedScore: _unused, ...r }) => r);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqCA,SAAgB,gBACd,YACA,MACe;CACf,MAAM,SAAS,WACZ,QAAQ,yCAAyC,CACjD,IAAI,KAAK;AACZ,KAAI,OAAQ,QAAO,OAAO;CAE1B,MAAM,UAAU,WACb,QAAQ,iDAAiD,CACzD,IAAI,KAAK;AACZ,KAAI,QAAS,QAAO,QAAQ;AAE5B,QAAO;;AAqBT,SAAgB,sBACd,YACA,QACmB;CACnB,MAAM,WAAW,QAAQ,OAAO;CAEhC,MAAM,QAAQ,WACX,QACC,mHACD,CACA,IAAI,SAAS;AAEhB,KAAI,MAAO,QAAO;CAElB,MAAM,MAAM,WACT,QACC,+HACD,CACA,KAAK;AAER,MAAK,MAAM,WAAW,IACpB,KACE,SAAS,WAAW,QAAQ,YAAY,IAAI,IAC5C,aAAa,QAAQ,UAErB,QAAO;AAIX,QAAO;;AAOT,SAAgB,cAAc,YAAsB,SAA6B;CAC/E,MAAM,eACJ,WACG,QAAQ,0DAA0D,CAClE,IAAI,QAAQ,GAAG,CAClB;CAEF,MAAM,cAAc,WACjB,QACC,4EACD,CACA,IAAI,QAAQ,GAAG;CAElB,MAAM,OACJ,WACG,QACC;;;0BAID,CACA,IAAI,QAAQ,GAAG,CAClB,KAAK,MAAM,EAAE,KAAK;CAEpB,MAAM,UACJ,WACG,QAAQ,gEAAgE,CACxE,IAAI,QAAQ,GAAG,CAClB,KAAK,MAAM,EAAE,MAAM;CAErB,MAAM,QAAkB;EACtB,SAAS,QAAQ;EACjB,iBAAiB,QAAQ;EACzB,cAAc,QAAQ;EACtB,SAAS,QAAQ;EACjB,WAAW,QAAQ;EACnB,aAAa;EACd;AAED,KAAI,YAAa,OAAM,KAAK,iBAAiB,YAAY,OAAO;AAChE,KAAI,KAAK,OAAQ,OAAM,KAAK,SAAS,KAAK,KAAK,KAAK,GAAG;AACvD,KAAI,QAAQ,OAAQ,OAAM,KAAK,YAAY,QAAQ,KAAK,KAAK,GAAG;AAChE,KAAI,QAAQ,cAAe,OAAM,KAAK,kBAAkB,QAAQ,gBAAgB;AAChF,KAAI,QAAQ,YACV,OAAM,KACJ,gBAAgB,IAAI,KAAK,QAAQ,YAAY,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,GACzE;AAGH,QAAO,MAAM,KAAK,KAAK;;AAuBzB,eAAsB,iBACpB,YACA,YACA,QACA,gBACqB;AACrB,KAAI;EACF,MAAM,aAAmC,OAAO,iBACrC;GACL,MAAM,KAAK,gBAAgB,YAAY,OAAO,QAAS;AACvD,UAAO,MAAM,OAAO,CAAC,GAAG,GAAG,EAAE;MAC3B,GACJ;AAEJ,MAAI,OAAO,YAAY,CAAC,cAAc,WAAW,WAAW,GAC1D,QAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,sBAAsB,OAAO;IAAW,CAC/D;GACD,SAAS;GACV;EAMH,MAAM,OAAO,OAAO,QAAS,gBAAgB,QAAQ;EAIrD,MAAM,gBAAgB,OAAO,iBAAkB,gBAAgB,iBAAiB;EAChF,MAAM,aAAa;GACjB;GACA,SAAS,OAAO;GAChB,YAAY,OAAO,SAAU,gBAAgB,gBAAgB;GAC9D;EAED,IAAI;EAGJ,MAAM,aAAa,MACjB,iBAAiB;AAEnB,MAAI,UAAU,WAAW,CAEvB,KAAI,SAAS,UACX,WAAU,MAAM,WAAW,cAAc,OAAO,OAAO,WAAW;WACzD,SAAS,cAAc,SAAS,UAAU;GACnD,MAAM,EAAE,sBAAsB,MAAM,OAAO;GAC3C,MAAM,iBAAiB,MAAM,kBAAkB,OAAO,OAAO,KAAK;AAElE,OAAI,SAAS,WACX,WAAU,MAAM,WAAW,eAAe,gBAAgB,WAAW;QAChE;IAEL,MAAM,CAAC,WAAW,cAAc,MAAM,QAAQ,IAAI,CAChD,WAAW,cAAc,OAAO,OAAO;KAAE,GAAG;KAAY,YAAY;KAAI,CAAC,EACzE,WAAW,eAAe,gBAAgB;KAAE,GAAG;KAAY,YAAY;KAAI,CAAC,CAC7E,CAAC;AAEF,cAAU,qBAAqB,WAAW,YAAY,WAAW,cAAc,GAAG;;QAGpF,WAAU,MAAM,WAAW,cAAc,OAAO,OAAO,WAAW;OAE/D;GAEL,MAAM,EAAE,cAAc,yBAAyB,MAAM,OAAO;AAE5D,OAAI,SAAS,UACX,WAAU,aAAa,YAAY,OAAO,OAAO,WAAW;YACnD,SAAS,cAAc,SAAS,UAAU;IACnD,MAAM,EAAE,sBAAsB,MAAM,OAAO;IAC3C,MAAM,iBAAiB,MAAM,kBAAkB,OAAO,OAAO,KAAK;AAElE,QAAI,SAAS,WACX,WAAU,qBAAqB,YAAY,gBAAgB,WAAW;QAEtE,WAAU,mBACR,YACA,OAAO,OACP,gBACA,WACD;SAGH,WAAU,aAAa,YAAY,OAAO,OAAO,WAAW;;EAKhE,MAAM,eAAe,OAAO,UAAW,gBAAgB,UAAU;AACjE,MAAI,gBAAgB,QAAQ,SAAS,GAAG;GACtC,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,aAAU,MAAM,cAAc,OAAO,OAAO,SAAS,EACnD,MAAM,WAAW,cAAc,GAChC,CAAC;;EAIJ,MAAM,cAAc,OAAO,gBAAiB,gBAAgB,oBAAoB;AAChF,MAAI,cAAc,KAAK,QAAQ,SAAS,GAAG;GACzC,MAAM,EAAE,sBAAsB,MAAM,OAAO;AAC3C,aAAU,kBAAkB,SAAS,YAAY;;EAGnD,MAAM,YAAY,cAAc,SAAS,WAAW;AAEpD,MAAI,UAAU,WAAW,EACvB,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,gCAAgC,OAAO,MAAM,WAAW,KAAK;GACpE,CACF,EACF;EAGH,MAAM,cAAc,eAAe,aAAa;EAChD,MAAM,YAAY,UACf,KAAK,GAAG,MAAM;GACb,MAAM,SAAS,IAAI,IAAI,EAAE,IAAI,EAAE,eAAe,WAAW,EAAE,YAAY,KAAK,EAAE,KAAK,UAAU,EAAE,UAAU,GAAG,EAAE,QAAQ,UAAU,EAAE,MAAM,QAAQ,EAAE,CAAC,QAAQ,EAAE,KAAK,UAAU,EAAE;GAG9K,MAAM,MAAM,EAAE,QAAQ,MAAM;AAI5B,UAAO,GAAG,OAAO,IAHD,IAAI,SAAS,gBACzB,IAAI,MAAM,GAAG,cAAc,GAAG,QAC9B;IAEJ,CACD,KAAK,cAAc;AAEtB,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,SAAS,UAAU,OAAO,kBAAkB,OAAO,MAAM,WAAW,OAAO,YAAY,QAAQ;GACtG,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,iBAAiB,OAAO,EAAE;IAAI,CAAC;GAC/D,SAAS;GACV;;;AAeL,SAAgB,cACd,YACA,QACY;AACZ,KAAI;EACF,MAAM,YAAY,gBAAgB,YAAY,OAAO,QAAQ;AAC7D,MAAI,aAAa,KACf,QAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,sBAAsB,OAAO;IAAW,CAC/D;GACD,SAAS;GACV;EAGH,MAAM,UAAU,WACb,QAAQ,8CAA8C,CACtD,IAAI,UAAU;AAEjB,MAAI,CAAC,QACH,QAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,sBAAsB,OAAO;IAAW,CAC/D;GACD,SAAS;GACV;EAGH,MAAM,gBAAgB,OAAO;AAC7B,MAAI,cAAc,SAAS,KAAK,IAAI,WAAW,cAAc,CAC3D,QAAO;GACL,SAAS,CACP;IACE,MAAM;IACN,MAAM,iBAAiB,OAAO,KAAK;IACpC,CACF;GACD,SAAS;GACV;EAGH,MAAM,WAAW,KAAK,QAAQ,WAAW,cAAc;EACvD,MAAM,eAAe,QAAQ,SAAS;EACtC,MAAM,eAAe,QAAQ,QAAQ,UAAU;AAE/C,MACE,CAAC,aAAa,WAAW,eAAe,IAAI,IAC5C,iBAAiB,aAEjB,QAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,2BAA2B,OAAO;IAAQ,CACjE;GACD,SAAS;GACV;AAGH,MAAI,CAAC,WAAW,SAAS,CACvB,QAAO;GACL,SAAS,CACP;IACE,MAAM;IACN,MAAM,mBAAmB,cAAc,aAAa,OAAO,QAAQ;IACpE,CACF;GACD,SAAS;GACV;EAGH,MAAM,OAAO,SAAS,SAAS;AAC/B,MAAI,KAAK,OAAO,IAAI,OAAO,KACzB,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,2BAA2B,KAAK,OAAO,OAAO,MAAM,QAAQ,EAAE,CAAC;GACtE,CACF,EACF;EAIH,MAAM,WADU,aAAa,UAAU,OAAO,CACrB,MAAM,KAAK;EAEpC,MAAM,YAAY,OAAO,QAAQ,KAAK;EACtC,MAAM,SACJ,OAAO,SAAS,OACZ,KAAK,IAAI,WAAW,OAAO,OAAO,SAAS,OAAO,GAClD,SAAS;EAGf,MAAM,OADgB,SAAS,MAAM,UAAU,OAAO,CAC3B,KAAK,KAAK;AAOrC,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,GALhC,OAAO,QAAQ,OACX,GAAG,OAAO,QAAQ,GAAG,cAAc,UAAU,WAAW,EAAE,GAAG,OAAO,MACpE,GAAG,OAAO,QAAQ,GAAG,cAAc,GAGG,MAAM;GAAQ,CAAC,EAC1D;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,eAAe,OAAO,EAAE;IAAI,CAAC;GAC7D,SAAS;GACV;;;AAYL,SAAgB,gBACd,YACA,QACY;AACZ,KAAI;EACF,IAAI,UAA6B;AAEjC,MAAI,OAAO,MAAM;GACf,MAAM,YAAY,gBAAgB,YAAY,OAAO,KAAK;AAC1D,OAAI,aAAa,KACf,WAAU,WACP,QACC,mJACD,CACA,IAAI,UAAU;QAInB,WAAU,sBAAsB,YADpB,QAAQ,KAAK,CACuB;AAGlD,MAAI,CAAC,QAIH,QAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAJZ,OAAO,OACnB,sBAAsB,OAAO,SAC7B,wDAAwD,QAAQ,KAAK;IAE9B,CAAC;GAC1C,SAAS,CAAC,OAAO;GAClB;AAGH,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,cAAc,YAAY,QAAQ;GAAE,CAAC,EACtE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,uBAAuB,OAAO,EAAE;IAAI,CAAC;GACrE,SAAS;GACV;;;AAcL,SAAgB,gBACd,YACA,QACY;AACZ,KAAI;EACF,MAAM,aAAuB,EAAE;EAC/B,MAAM,cAAmC,EAAE;AAE3C,MAAI,OAAO,QAAQ;AACjB,cAAW,KAAK,eAAe;AAC/B,eAAY,KAAK,OAAO,OAAO;;AAGjC,MAAI,OAAO,KAAK;AACd,cAAW,KACT,uGACD;AACD,eAAY,KAAK,OAAO,IAAI;;EAG9B,MAAM,QACJ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,QAAQ,KAAK;EAChE,MAAM,QAAQ,OAAO,SAAS;AAC9B,cAAY,KAAK,MAAM;EAEvB,MAAM,WAAW,WACd,QACC;;WAEG,MAAM;;kBAGV,CACA,IAAI,GAAG,YAAY;AAUtB,MAAI,SAAS,WAAW,EACtB,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM;GACP,CACF,EACF;EAGH,MAAM,QAAQ,SAAS,KACpB,MACC,GAAG,EAAE,KAAK,KAAK,EAAE,OAAO,KAAK,EAAE,UAAU,cAAc,IAAI,KAAK,EAAE,WAAW,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG,CAAC,GAC5G;AAED,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,GAAG,SAAS,OAAO,kBAAkB,MAAM,KAAK,KAAK;GAC5D,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,uBAAuB,OAAO,EAAE;IAAI,CAAC;GACrE,SAAS;GACV;;;AAcL,SAAgB,gBACd,YACA,QACY;AACZ,KAAI;EACF,MAAM,YAAY,gBAAgB,YAAY,OAAO,QAAQ;AAC7D,MAAI,aAAa,KACf,QAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,sBAAsB,OAAO;IAAW,CAC/D;GACD,SAAS;GACV;EAGH,MAAM,aAAa,CAAC,iBAAiB;EACrC,MAAM,cAAmC,CAAC,UAAU;AAEpD,MAAI,OAAO,QAAQ;AACjB,cAAW,KAAK,aAAa;AAC7B,eAAY,KAAK,OAAO,OAAO;;EAGjC,MAAM,QAAQ,OAAO,SAAS;AAC9B,cAAY,KAAK,MAAM;EAEvB,MAAM,WAAW,WACd,QACC;;iBAES,WAAW,KAAK,QAAQ,CAAC;;kBAGnC,CACA,IAAI,GAAG,YAAY;AAQtB,MAAI,SAAS,WAAW,EACtB,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,kCAAkC,OAAO;GAChD,CACF,EACF;EAGH,MAAM,QAAQ,SAAS,KACpB,MACC,IAAI,OAAO,EAAE,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE,KAAK,KAAK,EAAE,OAAO,KAAK,EAAE,MAAM,wBAAwB,EAAE,WACzG;AAED,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,GAAG,SAAS,OAAO,kBAAkB,OAAO,QAAQ,OAAO,MAAM,KAAK,OAAO;GACpF,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,uBAAuB,OAAO,EAAE;IAAI,CAAC;GACrE,SAAS;GACV;;;AAYL,SAAgB,mBACd,YACA,QACY;AACZ,KAAI;EACF,MAAM,IAAI,IAAI,OAAO,MAAM;EAC3B,MAAM,WAAW,WACd,QACC;;;;;;mBAOD,CACA,IAAI,GAAG,GAAG,EAAE;AAUf,MAAI,SAAS,WAAW,EACtB,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,gCAAgC,OAAO,MAAM;GACpD,CACF,EACF;EAGH,MAAM,QAAQ,SAAS,KAAK,MAAM,GAAG,EAAE,KAAK,KAAK,EAAE,OAAO,KAAK,EAAE,YAAY;AAE7E,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,GAAG,SAAS,OAAO,kBAAkB,OAAO,MAAM,QAAQ,MAAM,KAAK,KAAK;GACjF,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,0BAA0B,OAAO,EAAE;IAAI,CAC9D;GACD,SAAS;GACV;;;AAYL,SAAgB,kBACd,YACA,QACY;AACZ,KAAI;EACF,MAAM,YAAY,cAAc,YAAY,OAAO,IAAI;AAEvD,MAAI,CAAC,UAEH,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,yCALG,OAAO,OAAO,QAAQ,KAAK,CAKkB;GACvD,CACF,EACF;AAGH,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,oBAAoB,UAAU;GAAE,CAAC,EAClE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,yBAAyB,OAAO,EAAE;IAAI,CAC7D;GACD,SAAS;GACV;;;AAYL,eAAsB,kBACpB,YACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,YAAY,UAAU,aAAa,aAAa,MAAM,OAC5D;EAEF,MAAM,EACJ,MAAM,UACN,UAAU,iBACR,MAAM,OAAO;EACjB,MAAM,EAAE,YAAY,MAAM,OAAO;EACjC,MAAM,EAAE,WAAW,QAAQ,MAAM,OAAO;EAaxC,MAAM,OAAO,WACV,QACC;;;8BAID,CACA,KAAK;EAER,MAAM,OAAO,SAAS;EACtB,MAAM,iBAAiB,SAAS,MAAM,WAAW,WAAW;EAE5D,SAAS,aAAa,UAAsC;GAC1D,MAAM,OAAO,aAAa,SAAS;AAOnC,UANmB;IACjB,SAAS,MAAM,OAAO,KAAK;IAC3B,SAAS,MAAM,OAAO,MAAM,KAAK;IACjC,SAAS,MAAM,WAAW,KAAK;IAC/B,SAAS,MAAM,YAAY,KAAK;IACjC,CACiB,MAAM,MAAM,SAAS,EAAE,CAAC;;EAG5C,SAAS,eAAe,YAA6B;AACnD,OAAI,CAAC,SAAS,eAAe,CAAE,QAAO;AACtC,OAAI;AACF,SAAK,MAAM,SAAS,YAAY,eAAe,EAAE;AAC/C,SAAI,UAAU,cAAc,CAAC,MAAM,WAAW,WAAW,CAAE;KAC3D,MAAM,OAAO,SAAS,gBAAgB,MAAM;AAC5C,SAAI;AACF,UAAI,CAAC,SAAS,KAAK,CAAC,aAAa,CAAE;aAC7B;AACN;;AAEF,SAAI,SAAS,SAAS,MAAM,QAAQ,CAAC,CAAE,QAAO;;WAE1C;AAGR,UAAO;;EAoBT,SAAS,mBAAmB,UAI1B;AAOA,QAAK,MAAM,OANE;IACX;IACA;IACA;IACA;IACD,EACuB;IACtB,MAAM,OAAO,SAAS,UAAU,IAAI;AACpC,QAAI,SAAS,KAAK,CAChB,KAAI;KACF,MAAM,MAAM,aAAa,MAAM,OAAO;AAEtC,YAAO;MAAE,OAAO;MAAM,MAAM;MAAK,cADb,iBAAiB,KAAK,IAAI;MACc;YACtD;AACN,YAAO;MAAE,OAAO;MAAM,MAAM;MAAK,cAAc;MAAO;;;AAI5D,UAAO;IAAE,OAAO;IAAO,MAAM;IAAM,cAAc;IAAO;;EAG1D,MAAM,UAA0B,KAAK,KAAK,MAAM;GAC9C,MAAM,aAAa,SAAS,EAAE,UAAU;GACxC,IAAI;GACJ,IAAI,gBAA+B;AAEnC,OAAI,WACF,UAAS;QACJ;AACL,oBAAgB,aAAa,EAAE,UAAU,IAAI;AAC7C,aAAS,gBAAgB,UAAU;;GAGrC,MAAM,OAAO,aACT,mBAAmB,EAAE,UAAU,GAC/B;IAAE,OAAO;IAAO,MAAM;IAAM,cAAc;IAAO;AAErD,UAAO;IACL,MAAM,EAAE;IACR,cAAc,EAAE;IAChB,WAAW,EAAE;IACb,QAAQ,EAAE;IACV,MAAM,EAAE;IACR,eAAe,EAAE;IACjB;IACA,gBAAgB;IAChB,kBAAkB,eAAe,EAAE,YAAY;IAC/C;IACD;IACD;EAEF,MAAM,WACJ,CAAC,OAAO,YAAY,OAAO,aAAa,QACpC,UACA,QAAQ,QAAQ,MAAM,EAAE,WAAW,OAAO,SAAS;EAEzD,MAAM,UAAU;GACd,OAAO,KAAK;GACZ,QAAQ,QAAQ,QAAQ,MAAM,EAAE,WAAW,SAAS,CAAC;GACrD,OAAO,QAAQ,QAAQ,MAAM,EAAE,WAAW,QAAQ,CAAC;GACnD,MAAM,QAAQ,QAAQ,MAAM,EAAE,WAAW,OAAO,CAAC;GAClD;AAED,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM,KAAK,UAAU;IAAE;IAAS,UAAU;IAAU,EAAE,MAAM,EAAE;GAC/D,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,yBAAyB,OAAO,EAAE;IAAI,CAC7D;GACD,SAAS;GACV;;;;;;;AAgBL,MAAM,iBAAiB;CACrB;EAAE,KAAK;EAAuB,OAAO;EAAiB;CACtD;EAAE,KAAK;EAAyB,OAAO;EAAyB;CAChE;EAAE,KAAK;EAAuB,OAAO;EAAiB;CACtD;EAAE,KAAK;EAAuB,OAAO;EAAW;CACjD;;;;;;;;AASD,SAAS,iBAAiB,KAIxB;CACA,MAAM,QAAQ,IAAI,MAAM,KAAK;CAG7B,MAAM,cAAc,MAAM,WACvB,MAAM,EAAE,MAAM,KAAK,cACrB;AAED,KAAI,gBAAgB,GAClB,QAAO;EAAE,iBAAiB;EAAM,aAAa;EAAK,aAAa;EAAO;CAKxE,IAAI,SAAS,MAAM;AACnB,MAAK,IAAI,IAAI,cAAc,GAAG,IAAI,MAAM,QAAQ,KAAK;EACnD,MAAM,UAAU,MAAM,GAAG,MAAM;AAC/B,MAAI,YAAY,SAAU,QAAQ,WAAW,KAAK,IAAI,YAAY,eAAgB;AAChF,YAAS;AACT;;;AAOJ,QAAO;EAAE,iBAHa,MAAM,MAAM,aAAa,OAAO,CAChB,KAAK,KAAK,CAAC,MAAM;EAE7B,aAAa;EAAK,aAAa;EAAM;;AAGjE,SAAgB,gBACd,YACA,QACY;AACZ,KAAI;EACF,IAAI;EACJ,IAAI;AAEJ,MAAI,OAAO,SAAS;GAClB,MAAM,YAAY,gBAAgB,YAAY,OAAO,QAAQ;AAC7D,OAAI,aAAa,KACf,QAAO;IACL,SAAS,CACP;KAAE,MAAM;KAAQ,MAAM,sBAAsB,OAAO;KAAW,CAC/D;IACD,SAAS;IACV;GAGH,MAAM,MAAM,WACT,QAAQ,oDAAoD,CAC5D,IAAI,UAAU;AAEjB,OAAI,CAAC,IACH,QAAO;IACL,SAAS,CACP;KAAE,MAAM;KAAQ,MAAM,sBAAsB,OAAO;KAAW,CAC/D;IACD,SAAS;IACV;AAGH,cAAW,IAAI;AACf,iBAAc,IAAI;SACb;GAEL,MAAM,UAAU,sBAAsB,YAAY,QAAQ,KAAK,CAAC;AAChE,OAAI,CAAC,QACH,QAAO,EACL,SAAS,CACP;IACE,MAAM;IACN,MAAM,wDAAwD,QAAQ,KAAK,CAAC;IAC7E,CACF,EACF;AAEH,cAAW,QAAQ;AACnB,iBAAc,QAAQ;;AAIxB,OAAK,MAAM,OAAO,gBAAgB;GAChC,MAAM,WAAW,KAAK,UAAU,IAAI,IAAI;AACxC,OAAI,WAAW,SAAS,EAAE;IAExB,MAAM,EAAE,iBAAiB,aAAa,gBAAgB,iBAD1C,aAAa,UAAU,OAAO,CACiC;IAE3E,IAAI;AACJ,QAAI,eAAe,gBAEjB,UAAS;KACP,eAAe,YAAY,GAAG,IAAI;KAClC;KACA;KACA;KACA;KACA;KACA;KACD,CAAC,KAAK,KAAK;QAEZ,UAAS;KACP,eAAe,YAAY,GAAG,IAAI;KAClC;KACA;KACD,CAAC,KAAK,KAAK;AAGd,WAAO,EACL,SAAS,CAAC;KAAE,MAAM;KAAQ,MAAM;KAAQ,CAAC,EAC1C;;;EAKL,MAAM,WAAW,eAAe,KAAK,MAAM,KAAK,SAAS,GAAG,EAAE,MAAM,CAAC,KAAK,KAAK;AAC/E,SAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM;IACJ,iCAAiC;IACjC;IACA;IACA;IACA;IACA;IACD,CAAC,KAAK,KAAK;GACb,CACF,EACF;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,uBAAuB,OAAO,EAAE;IAAI,CAAC;GACrE,SAAS;GACV;;;;;;;AA6BL,eAAsB,uBACpB,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,SAAS,IAAI,WAAW;AAE9B,MAAI,OAAO,WAAW,OAAO;GAC3B,MAAM,EAAE,QAAQ,mBAAmB,MAAM,OAAO,uBAAuB;AAgBvE,UAAO,EACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAhBd;KACZ,SAAS,OAAO;KAChB,oBAAoB,eAAe,KAAK,KAAK,IAAI;KACjD;KACA;KACA,GAAG,OAAO,QAAQ,OAAO,SAAS,CAAC,KAAK,CAAC,IAAI,SAAS;AAEpD,aAAO,KAAK,GAAG,IADL,IACW,UAAU,YAAY;OAC3C;KACF;KACA;KACA,GAAG,OAAO,QAAQ,OAAO,QAAQ,CAAC,KAC/B,CAAC,OAAO,cAAc,KAAK,MAAM,IAAK,SAAsB,KAAK,KAAK,IAAI,WAC5E;KACF,CAEuC,KAAK,KAAK;IAAE,CAAC,EACpD;;AAGH,MAAI,OAAO,WAAW,OAAO;AAC3B,OAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,YAAY,CAAC,OAAO,QAC9C,QAAO;IACL,SAAS,CACP;KACE,MAAM;KACN,MAAM;KACP,CACF;IACD,SAAS;IACV;AAOH,UAAO,EACL,SAAS,CACP;IACE,MAAM;IACN,MAAM,uCATG,MAAM,OAAO,sBAAsB;KAChD,MAAM,OAAO;KACb,UAAU,OAAO;KACjB,SAAS,OAAO;KACjB,CAAC,EAKuD,OAAO;IAC3D,CACF,EACF;;AAGH,MAAI,OAAO,WAAW,QAAQ;AAC5B,OAAI,CAAC,OAAO,QACV,QAAO;IACL,SAAS,CACP;KAAE,MAAM;KAAQ,MAAM;KAAkD,CACzE;IACD,SAAS;IACV;GAEH,MAAM,SAAS,MAAM,OAAO,iBAAiB;IAC3C,OAAO,OAAO,SAAS;IACvB,SAAS,OAAO;IAChB,OAAO,OAAO;IACf,CAAC;AASF,UAAO,EACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MATd;KACZ,SAAS,OAAO;KAChB,cAAc,OAAO,kBAAkB,KAAK,KAAK,IAAI;KACrD,cAAc,OAAO,kBAAkB,KAAK,KAAK,IAAI;KACrD,GAAI,OAAO,eAAe,SAAS,IAC/B,CAAC,WAAW,OAAO,eAAe,KAAK,KAAK,GAAG,GAC/C,EAAE;KACP,CAEuC,KAAK,KAAK;IAAE,CAAC,EACpD;;AAGH,SAAO;GACL,SAAS,CACP;IACE,MAAM;IACN,MAAM,mBAAmB,OAAO,OAAO,OAAO,CAAC;IAChD,CACF;GACD,SAAS;GACV;UACM,GAAG;AACV,SAAO;GACL,SAAS,CACP;IAAE,MAAM;IAAQ,MAAM,8BAA8B,OAAO,EAAE;IAAI,CAClE;GACD,SAAS;GACV;;;;;;;;;;;AA4BL,eAAsB,gBACpB,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,cAAc,MAAM,OAAO;EAGnC,MAAM,SAAS,MAFA,IAAI,WAAW,CAEF,WAAW;GACrC,SAAS,OAAO;GAChB,gBAAgB,OAAO;GACvB,WAAW,OAAO;GACnB,CAAC;EAEF,MAAM,QAAkB;GACtB,YAAY,OAAO;GACnB,oBAAoB,OAAO,kBAAkB;GAC7C,sBAAsB,OAAO,oBAAoB;GACjD,eAAe,OAAO,WAAW,QAAQ,EAAE;GAC3C,kBAAkB,OAAO;GAC1B;AAED,MAAI,OAAO,YAAY,SAAS,GAAG;AACjC,SAAM,KAAK,GAAG;AACd,SAAM,KAAK,eAAe;AAC1B,QAAK,MAAM,KAAK,OAAO,YACrB,OAAM,KAAK,KAAK,EAAE,KAAK,KAAK,EAAE,QAAQ,KAAK,QAAQ,EAAE,CAAC,GAAG;;AAI7D,MAAI,OAAO,SAAS;AAClB,SAAM,KAAK,GAAG;AACd,SAAM,KACJ,2DAA2D,OAAO,iBAAiB,kBAClE,OAAO,aAAa,KAAK,QAAQ,EAAE,CAAC,WAAW,OAAO,eAAe,IACvF;;AAGH,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,MAAM,KAAK,KAAK;GAAE,CAAC,EACpD;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,uBAAuB,OAAO,EAAE;IAAI,CAAC;GACrE,SAAS;GACV;;;;;;;;;;;;;;AA0BL,eAAsB,iBACpB,YACA,YACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,WAAW,wBAAwB,MAAM,OAAO;EAExD,MAAM,SAAS,MAAM,UACnB,YACA,YACA,OAAO,KACP,OAAO,QACR;AAED,MAAI,CAAC,OAEH,QAAO,EACL,SAAS,CACP;GACE,MAAM;GACN,MAAM;IACJ,+BANO,OAAO,OAAO,QAAQ,KAAK;IAOlC;IACA,2CACG,OAAO,UAAU,sBAAsB;IAC1C;IACA;IACA;IACD,CAAC,KAAK,KAAK;GACb,CACF,EACF;AAGH,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,oBAAoB,OAAO;GAAE,CAAC,EAC/D;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,wBAAwB,OAAO,EAAE;IAAI,CAAC;GACtE,SAAS;GACV;;;AAeL,eAAsB,kBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,kBAAkB,MAAM,OAAO;EACvC,MAAM,SAAS,cAAc,cAAc;GACzC,WAAW,OAAO;GAClB,OAAO,OAAO;GACd,WAAW,OAAO;GAClB,MAAM,OAAO;GACd,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,QAAQ,MAAM,EAAE;GAAE,CAAC,EACnE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,yBAAyB,OAAO,EAAE;IAAI,CAAC;GACvE,SAAS;GACV;;;AAeL,eAAsB,iBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,iBAAiB,MAAM,OAAO;EACtC,MAAM,SAAS,aAAa,cAAc;GACxC,OAAO,OAAO;GACd,aAAa,OAAO;GACpB,YAAY,OAAO;GACnB,SAAS,OAAO;GACjB,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,QAAQ,MAAM,EAAE;GAAE,CAAC,EACnE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,wBAAwB,OAAO,EAAE;IAAI,CAAC;GACtE,SAAS;GACV;;;AAgBL,eAAsB,mBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,mBAAmB,MAAM,OAAO;EACxC,MAAM,UAAU,MAAM,eAAe,cAAc;GACjD,eAAe,OAAO;GACtB,gBAAgB,OAAO;GACvB,OAAO,OAAO;GACd,eAAe,OAAO;GACtB,kBAAkB,OAAO;GAC1B,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,SAAS,MAAM,EAAE;GAAE,CAAC,EACpE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,0BAA0B,OAAO,EAAE;IAAI,CAAC;GACxE,SAAS;GACV;;;AAeL,eAAsB,kBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,kBAAkB,MAAM,OAAO;EACvC,MAAM,UAAU,MAAM,cAAc,cAAc;GAChD,UAAU,OAAO;GACjB,gBAAgB,OAAO;GACvB,OAAO,OAAO;GACd,eAAe,OAAO;GACvB,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,SAAS,MAAM,EAAE;GAAE,CAAC,EACpE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,yBAAyB,OAAO,EAAE;IAAI,CAAC;GACvE,SAAS;GACV;;;AAeL,eAAsB,mBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,mBAAmB,MAAM,OAAO;EACxC,MAAM,SAAS,MAAM,eAAe,cAAc;GAChD,UAAU,OAAO;GACjB,gBAAgB,OAAO;GACvB,OAAO,OAAO;GACd,OAAO,OAAO;GACf,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,QAAQ,MAAM,EAAE;GAAE,CAAC,EACnE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,0BAA0B,OAAO,EAAE;IAAI,CAAC;GACxE,SAAS;GACV;;;AAgBL,eAAsB,iBACpB,cACA,QACqB;AACrB,KAAI;EACF,MAAM,EAAE,iBAAiB,MAAM,OAAO;EACtC,MAAM,SAAS,MAAM,aAAa,cAAc;GAC9C,gBAAgB,OAAO;GACvB,cAAc,OAAO;GACrB,gBAAgB,OAAO;GACvB,WAAW,OAAO;GAClB,qBAAqB,OAAO;GAC7B,CAAC;AACF,SAAO,EACL,SAAS,CAAC;GAAE,MAAM;GAAQ,MAAM,KAAK,UAAU,QAAQ,MAAM,EAAE;GAAE,CAAC,EACnE;UACM,GAAG;AACV,SAAO;GACL,SAAS,CAAC;IAAE,MAAM;IAAQ,MAAM,wBAAwB,OAAO,EAAE;IAAI,CAAC;GACtE,SAAS;GACV;;;;;;;;AAeL,SAAS,qBACP,gBACA,iBACA,YACA,gBAAgB,IAChB,iBAAiB,IACD;AAChB,KAAI,eAAe,WAAW,KAAK,gBAAgB,WAAW,EAAG,QAAO,EAAE;CAE1E,MAAM,UAAU,MACd,GAAG,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,GAAG,EAAE;CAE/C,SAAS,gBAAgB,OAA4C;AACnE,MAAI,MAAM,WAAW,EAAG,wBAAO,IAAI,KAAK;EACxC,MAAM,MAAM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC;EAElD,MAAM,QADM,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,MAAM,CAAC,GAC9B;EACpB,MAAM,oBAAI,IAAI,KAAqB;AACnC,OAAK,MAAM,KAAK,MACd,GAAE,IAAI,OAAO,EAAE,EAAE,UAAU,IAAI,KAAK,EAAE,QAAQ,OAAO,MAAM;AAE7D,SAAO;;CAGT,MAAM,SAAS,gBAAgB,eAAe;CAC9C,MAAM,UAAU,gBAAgB,gBAAgB;CAEhD,MAAM,UAAU,IAAI,IAAY,CAC9B,GAAG,eAAe,IAAI,OAAO,EAC7B,GAAG,gBAAgB,IAAI,OAAO,CAC/B,CAAC;CAEF,MAAM,0BAAU,IAAI,KAA2B;AAC/C,MAAK,MAAM,KAAK,CAAC,GAAG,gBAAgB,GAAG,gBAAgB,CACrD,SAAQ,IAAI,OAAO,EAAE,EAAE,EAAE;CAG3B,MAAM,WAA4D,EAAE;AACpE,MAAK,MAAM,OAAO,SAAS;EACzB,MAAM,OAAO,QAAQ,IAAI,IAAI;EAC7B,MAAM,UAAU,OAAO,IAAI,IAAI,IAAI;EACnC,MAAM,WAAW,QAAQ,IAAI,IAAI,IAAI;EACrC,MAAM,gBAAgB,gBAAgB,UAAU,iBAAiB;AACjE,WAAS,KAAK;GAAE,GAAG;GAAM,OAAO;GAAe;GAAe,CAAC;;AAGjE,QAAO,SACJ,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,WAAW,CACpB,KAAK,EAAE,eAAe,SAAS,GAAG,QAAQ,EAAE"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"vault-indexer-k-kUlaZ-.mjs","names":[],"sources":["../src/memory/vault-indexer.ts"],"sourcesContent":["/**\n * Vault indexer for the PAI federation memory engine.\n *\n * Indexes an entire Obsidian vault (or any markdown knowledge base), following\n * symlinks, deduplicating files by inode, parsing wikilinks, and computing\n * per-file health metrics (orphan detection, dead links).\n *\n * Key differences from the project indexer (indexer.ts):\n * - Follows symbolic links (project indexer skips them)\n * - Deduplicates files with the same inode (same content reachable via multiple paths)\n * - Parses [[wikilinks]] and builds a directed link graph\n * - Resolves wikilinks using Obsidian's shortest-match algorithm\n * - Computes health metrics per file: inbound/outbound link counts, dead links, orphans\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, dirname, normalize } from \"node:path\";\nimport type { Database } from \"better-sqlite3\";\nimport { chunkMarkdown } from \"./chunker.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface VaultFile {\n absPath: string;\n vaultRelPath: string;\n inode: number;\n device: number;\n}\n\nexport interface InodeGroup {\n canonical: VaultFile;\n aliases: VaultFile[];\n}\n\nexport interface ParsedLink {\n raw: string;\n alias: string | null;\n lineNumber: number;\n isEmbed: boolean;\n /** True when parsed from markdown `[text](path)` syntax (vs `[[wikilink]]`). */\n isMdLink?: boolean;\n}\n\nexport interface VaultIndexResult {\n filesIndexed: number;\n chunksCreated: number;\n filesSkipped: number;\n aliasesRecorded: number;\n linksExtracted: number;\n deadLinksFound: number;\n orphansFound: number;\n elapsed: number;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Maximum number of .md files to collect from a vault. */\nconst VAULT_MAX_FILES = 10_000;\n\n/** Maximum recursion depth for vault directory walks. */\nconst VAULT_MAX_DEPTH = 10;\n\n/** Number of files to process before yielding to the event loop. */\nconst VAULT_YIELD_EVERY = 10;\n\n/**\n * Directories to always skip, at any depth, during vault walks.\n * Includes standard build/VCS noise plus Obsidian-specific directories.\n */\nconst VAULT_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\",\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\",\n \".next\",\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n // Obsidian internals\n \".obsidian\",\n \".trash\",\n]);\n\n// ---------------------------------------------------------------------------\n// Utility\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n// ---------------------------------------------------------------------------\n// Vault directory walker (follows symlinks)\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively collect all .md files under a vault root, following symlinks.\n *\n * Symlink-following behaviour:\n * - Symbolic links to files: followed if the target is a .md file\n * - Symbolic links to directories: followed with cycle detection via inode\n *\n * Cycle detection is based on the real inode of each visited directory.\n * Using the real stat (not lstat) ensures that symlinked dirs resolve to\n * their actual inode, preventing infinite loops.\n *\n * @param dir Directory to scan.\n * @param vaultRoot Absolute root of the vault (for computing vaultRelPath).\n * @param acc Shared accumulator (mutated in place for early exit).\n * @param visited Set of \"device:inode\" strings for visited directories.\n * @param depth Current recursion depth.\n */\nexport function walkVaultMdFiles(\n vaultRoot: string,\n opts?: { maxFiles?: number; maxDepth?: number },\n): VaultFile[] {\n const maxFiles = opts?.maxFiles ?? VAULT_MAX_FILES;\n const maxDepth = opts?.maxDepth ?? VAULT_MAX_DEPTH;\n\n const results: VaultFile[] = [];\n const visitedDirs = new Set<string>();\n\n function walk(dir: string, depth: number): void {\n if (results.length >= maxFiles) return;\n if (depth > maxDepth) return;\n\n // Get the real inode of this directory (follows symlinks on the dir itself)\n let dirStat: ReturnType<typeof statSync>;\n try {\n dirStat = statSync(dir);\n } catch {\n return; // Unreadable or broken symlink — skip\n }\n\n const dirKey = `${dirStat.dev}:${dirStat.ino}`;\n if (visitedDirs.has(dirKey)) return; // Cycle detected\n visitedDirs.add(dirKey);\n\n let entries: import(\"node:fs\").Dirent<string>[];\n try {\n entries = readdirSync(dir, { withFileTypes: true, encoding: \"utf8\" });\n } catch {\n return; // Unreadable directory — skip\n }\n\n for (const entry of entries) {\n if (results.length >= maxFiles) break;\n if (VAULT_SKIP_DIRS.has(entry.name)) continue;\n\n const full = join(dir, entry.name);\n\n if (entry.isSymbolicLink()) {\n // Follow the symlink — resolve to real target\n let targetStat: ReturnType<typeof statSync>;\n try {\n targetStat = statSync(full); // statSync follows symlinks\n } catch {\n continue; // Broken symlink — skip\n }\n\n if (targetStat.isDirectory()) {\n if (!VAULT_SKIP_DIRS.has(entry.name)) {\n walk(full, depth + 1);\n }\n } else if (targetStat.isFile() && entry.name.endsWith(\".md\")) {\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: targetStat.ino,\n device: targetStat.dev,\n });\n }\n } else if (entry.isDirectory()) {\n walk(full, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n let fileStat: ReturnType<typeof statSync>;\n try {\n fileStat = statSync(full);\n } catch {\n continue;\n }\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: fileStat.ino,\n device: fileStat.dev,\n });\n }\n }\n }\n\n if (existsSync(vaultRoot)) {\n walk(vaultRoot, 0);\n }\n\n return results;\n}\n\n// ---------------------------------------------------------------------------\n// Inode deduplication\n// ---------------------------------------------------------------------------\n\n/**\n * Group vault files by inode identity (device + inode).\n *\n * Within each group, the canonical file is chosen as the one with the\n * fewest path separators (shallowest), breaking ties by shortest string.\n * All other group members become aliases.\n */\nexport function deduplicateByInode(files: VaultFile[]): InodeGroup[] {\n const groups = new Map<string, VaultFile[]>();\n\n for (const file of files) {\n const key = `${file.device}:${file.inode}`;\n const existing = groups.get(key);\n if (existing) {\n existing.push(file);\n } else {\n groups.set(key, [file]);\n }\n }\n\n const result: InodeGroup[] = [];\n\n for (const group of groups.values()) {\n if (group.length === 0) continue;\n\n // Sort: fewest path separators first, then shortest string\n const sorted = [...group].sort((a, b) => {\n const aDepth = (a.vaultRelPath.match(/\\//g) ?? []).length;\n const bDepth = (b.vaultRelPath.match(/\\//g) ?? []).length;\n if (aDepth !== bDepth) return aDepth - bDepth;\n return a.vaultRelPath.length - b.vaultRelPath.length;\n });\n\n const [canonical, ...aliases] = sorted as [VaultFile, ...VaultFile[]];\n result.push({ canonical, aliases });\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Link parser (wikilinks + markdown links)\n// ---------------------------------------------------------------------------\n\n/**\n * Parse all links from markdown content.\n *\n * Handles:\n * - Standard wikilinks: [[Target Note]]\n * - Aliased wikilinks: [[Target Note|Display Text]]\n * - Heading anchors: [[Target Note#Heading]] (stripped for resolution)\n * - Embeds: ![[Target Note]]\n * - Frontmatter wikilinks (YAML between --- delimiters)\n * - Markdown links: [text](path/to/note.md)\n * - Markdown embeds: ![alt](image.png)\n *\n * External URLs (http://, https://, mailto:, etc.) are excluded — only\n * relative paths are treated as vault links.\n *\n * @param content Raw markdown file content.\n * @returns Array of parsed links in document order.\n */\nexport function parseLinks(content: string): ParsedLink[] {\n const links: ParsedLink[] = [];\n const lines = content.split(\"\\n\");\n\n // Determine frontmatter range (YAML between opening and closing ---)\n let frontmatterEnd = 0;\n if (content.startsWith(\"---\")) {\n const closingIdx = content.indexOf(\"\\n---\", 3);\n if (closingIdx !== -1) {\n frontmatterEnd = content.slice(0, closingIdx + 4).split(\"\\n\").length - 1;\n }\n }\n\n // Regex for [[wikilinks]] and ![[embeds]]\n const wikilinkRe = /(!?)\\[\\[([^\\]]+?)\\]\\]/g;\n\n // Regex for markdown links [text](target) and embeds ![alt](target)\n // Negative lookbehind avoids matching wikilinks already captured above.\n // The target group excludes closing paren and whitespace-after-URL.\n const mdLinkRe = /(!)?\\[([^\\]]*)\\]\\(([^)]+)\\)/g;\n\n for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {\n const line = lines[lineIdx]!;\n const lineNumber = lineIdx + 1; // 1-indexed\n const isFrontmatter = lineIdx < frontmatterEnd;\n\n // --- Wikilinks ---\n wikilinkRe.lastIndex = 0;\n let match: RegExpExecArray | null;\n while ((match = wikilinkRe.exec(line)) !== null) {\n const isEmbed = match[1] === \"!\";\n const inner = match[2]!;\n\n // Split on first | for alias\n const pipeIdx = inner.indexOf(\"|\");\n const beforePipe = pipeIdx === -1 ? inner : inner.slice(0, pipeIdx);\n const alias = pipeIdx === -1 ? null : inner.slice(pipeIdx + 1);\n\n // Strip heading anchor (everything after #)\n const hashIdx = beforePipe.indexOf(\"#\");\n const raw = hashIdx === -1 ? beforePipe.trim() : beforePipe.slice(0, hashIdx).trim();\n\n if (!raw) continue; // Skip links with empty targets (e.g. [[#Heading]])\n\n links.push({\n raw,\n alias: alias?.trim() ?? null,\n lineNumber,\n isEmbed: isEmbed && !isFrontmatter,\n isMdLink: false,\n });\n }\n\n // --- Markdown links --- (skip inside frontmatter)\n if (!isFrontmatter) {\n mdLinkRe.lastIndex = 0;\n while ((match = mdLinkRe.exec(line)) !== null) {\n const isEmbed = match[1] === \"!\";\n const displayText = match[2]!;\n let target = match[3]!.trim();\n\n // Skip external URLs\n if (/^[a-z][a-z0-9+.-]*:/i.test(target)) continue;\n\n // Skip pure anchor links (#heading)\n if (target.startsWith(\"#\")) continue;\n\n // Strip heading anchor from target\n const hashIdx = target.indexOf(\"#\");\n if (hashIdx !== -1) target = target.slice(0, hashIdx);\n\n // URL-decode (Obsidian encodes spaces as %20 in md links)\n try {\n target = decodeURIComponent(target);\n } catch {\n // Malformed encoding — use as-is\n }\n\n // Strip .md extension for resolution (resolveWikilink adds it back)\n const raw = target.replace(/\\.md$/i, \"\").trim();\n if (!raw) continue;\n\n // Skip if this exact position was already captured as a wikilink\n // (e.g. [[link]] inside a markdown link won't happen, but be safe)\n links.push({\n raw,\n alias: displayText || null,\n lineNumber,\n isEmbed,\n isMdLink: true,\n });\n }\n }\n }\n\n return links;\n}\n\n/** @deprecated Use {@link parseLinks} instead. */\nexport const parseWikilinks = parseLinks;\n\n// ---------------------------------------------------------------------------\n// Name index builder\n// ---------------------------------------------------------------------------\n\n/**\n * Build a name lookup index for Obsidian wikilink resolution.\n *\n * Maps lowercase filename (without .md extension) to all vault-relative paths\n * that share that name. Includes both canonical paths and alias paths so that\n * wikilinks resolve regardless of which path the file is accessed through.\n */\nexport function buildNameIndex(files: VaultFile[]): Map<string, string[]> {\n const index = new Map<string, string[]>();\n\n for (const file of files) {\n const name = basename(file.vaultRelPath, \".md\").toLowerCase();\n const existing = index.get(name);\n if (existing) {\n existing.push(file.vaultRelPath);\n } else {\n index.set(name, [file.vaultRelPath]);\n }\n }\n\n return index;\n}\n\n// ---------------------------------------------------------------------------\n// Wikilink resolver\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a wikilink target to a vault-relative path using Obsidian's rules.\n *\n * Resolution algorithm:\n * 1. If raw contains \"/\", attempt exact path match (with and without .md).\n * 2. Normalize: lowercase the raw target, strip .md extension.\n * 3. Look up in the name index (all files with that basename).\n * 4. If exactly one match, return it.\n * 5. If multiple matches, pick the one closest to the source file\n * (longest common directory prefix, then shortest overall path).\n * 6. If no matches, return null (dead link).\n *\n * @param raw The raw link target (heading-stripped, pipe-stripped).\n * @param nameIndex Map from lowercase basename-without-ext to vault paths.\n * @param sourcePath Vault-relative path of the file containing the link.\n * @returns Vault-relative path of the resolved target, or null.\n */\nexport function resolveWikilink(\n raw: string,\n nameIndex: Map<string, string[]>,\n sourcePath: string,\n): string | null {\n if (!raw) return null;\n\n // Case 1: path contains \"/\" — try exact match with and without .md\n if (raw.includes(\"/\")) {\n const normalized = normalize(raw);\n const normalizedMd = normalized.endsWith(\".md\") ? normalized : normalized + \".md\";\n\n // Check if any indexed path matches (case-insensitive for macOS compatibility)\n for (const [, paths] of nameIndex) {\n for (const p of paths) {\n if (p === normalizedMd || p === normalized) return p;\n if (p.toLowerCase() === normalizedMd.toLowerCase()) return p;\n }\n }\n // Fall through to name lookup in case the path prefix was wrong\n }\n\n // Normalize the raw target for name lookup.\n // Use the basename only — Obsidian resolves by filename, not full path.\n // E.g. \"PAI/20-webseiten/_20-webseiten-master\" → \"_20-webseiten-master\"\n const rawBase = basename(raw)\n .replace(/\\.md$/i, \"\")\n .toLowerCase()\n .trim();\n\n if (!rawBase) return null;\n\n const candidates = nameIndex.get(rawBase);\n\n if (!candidates || candidates.length === 0) {\n return null; // Dead link\n }\n\n if (candidates.length === 1) {\n return candidates[0]!;\n }\n\n // Multiple matches — pick the one closest to the source file\n const sourceDir = dirname(sourcePath);\n\n let bestPath: string | null = null;\n let bestPrefixLen = -1;\n let bestPathLen = Infinity;\n\n for (const candidate of candidates) {\n const candidateDir = dirname(candidate);\n const prefixLen = commonPrefixLength(sourceDir, candidateDir);\n const pathLen = candidate.length;\n\n if (\n prefixLen > bestPrefixLen ||\n (prefixLen === bestPrefixLen && pathLen < bestPathLen)\n ) {\n bestPrefixLen = prefixLen;\n bestPathLen = pathLen;\n bestPath = candidate;\n }\n }\n\n return bestPath;\n}\n\n/**\n * Compute the length of the common prefix between two directory paths,\n * measured in path segments (not raw characters).\n *\n * Example: \"a/b/c\" and \"a/b/d\" → 2 (common: \"a\", \"b\")\n */\nfunction commonPrefixLength(a: string, b: string): number {\n if (a === \".\" && b === \".\") return 0;\n const aParts = a === \".\" ? [] : a.split(\"/\");\n const bParts = b === \".\" ? [] : b.split(\"/\");\n let count = 0;\n const len = Math.min(aParts.length, bParts.length);\n for (let i = 0; i < len; i++) {\n if (aParts[i] === bParts[i]) {\n count++;\n } else {\n break;\n }\n }\n return count;\n}\n\n// ---------------------------------------------------------------------------\n// Main vault indexing orchestrator\n// ---------------------------------------------------------------------------\n\n/**\n * Index an entire Obsidian vault (or markdown knowledge base) into the\n * federation database.\n *\n * Steps:\n * 1. Walk vault root, following symlinks.\n * 2. Deduplicate by inode — each unique file is indexed once.\n * 3. Build a name index for wikilink resolution.\n * 4. For each canonical file:\n * a. SHA-256 hash for change detection — skip unchanged files.\n * b. Read content, chunk with chunkMarkdown().\n * c. Insert chunks into memory_chunks and memory_fts.\n * d. Upsert vault_files row.\n * 5. Record aliases in vault_aliases.\n * 6. Rebuild vault_name_index table.\n * 7. Rebuild vault_links:\n * a. Parse [[wikilinks]] from each canonical file.\n * b. Resolve each link with resolveWikilink().\n * c. Insert into vault_links.\n * 8. Compute and upsert health metrics (vault_health).\n * 9. Return statistics.\n *\n * @param db Open federation database.\n * @param vaultProjectId Registry project ID for the vault \"project\".\n * @param vaultRoot Absolute path to the vault root directory.\n */\nexport async function indexVault(\n db: Database,\n vaultProjectId: number,\n vaultRoot: string,\n): Promise<VaultIndexResult> {\n const startTime = Date.now();\n\n const result: VaultIndexResult = {\n filesIndexed: 0,\n chunksCreated: 0,\n filesSkipped: 0,\n aliasesRecorded: 0,\n linksExtracted: 0,\n deadLinksFound: 0,\n orphansFound: 0,\n elapsed: 0,\n };\n\n // ---------------------------------------------------------------------------\n // Step 1: Walk vault, collecting all .md files (follows symlinks)\n // ---------------------------------------------------------------------------\n\n const allFiles = walkVaultMdFiles(vaultRoot);\n\n // ---------------------------------------------------------------------------\n // Step 2: Deduplicate by inode\n // ---------------------------------------------------------------------------\n\n const inodeGroups = deduplicateByInode(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 3: Build name index (from all files including aliases, for resolution)\n // ---------------------------------------------------------------------------\n\n const nameIndex = buildNameIndex(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 4: Prepare SQL statements\n // ---------------------------------------------------------------------------\n\n const selectFileHash = db.prepare(\n \"SELECT hash FROM vault_files WHERE vault_path = ?\",\n );\n\n const deleteOldChunkIds = db.prepare(\n \"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const deleteFts = db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n\n const deleteChunks = db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const insertChunk = db.prepare(`\n INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const insertFts = db.prepare(`\n INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const upsertVaultFile = db.prepare(`\n INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inode = excluded.inode,\n device = excluded.device,\n hash = excluded.hash,\n title = excluded.title,\n indexed_at = excluded.indexed_at\n `);\n\n // ---------------------------------------------------------------------------\n // Step 4 (cont.): Index each canonical file\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n let filesSinceYield = 0;\n\n for (const group of inodeGroups) {\n // Yield periodically to keep the IPC server responsive\n if (filesSinceYield >= VAULT_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const { canonical } = group;\n\n // Read file content\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n result.filesSkipped++;\n continue;\n }\n\n const hash = sha256File(content);\n\n // Change detection: skip if hash is unchanged\n const existing = selectFileHash.get(canonical.vaultRelPath) as\n | { hash: string }\n | undefined;\n\n if (existing?.hash === hash) {\n result.filesSkipped++;\n continue;\n }\n\n // Delete old chunks for this vault path\n const oldChunkIds = deleteOldChunkIds.all(\n vaultProjectId,\n canonical.vaultRelPath,\n ) as Array<{ id: string }>;\n\n db.transaction(() => {\n for (const row of oldChunkIds) {\n deleteFts.run(row.id);\n }\n deleteChunks.run(vaultProjectId, canonical.vaultRelPath);\n })();\n\n // Chunk the content\n const chunks = chunkMarkdown(content);\n const updatedAt = Date.now();\n\n // Extract title from first H1 heading or filename\n const titleMatch = /^#\\s+(.+)$/m.exec(content);\n const title = titleMatch\n ? titleMatch[1]!.trim()\n : basename(canonical.vaultRelPath, \".md\");\n\n db.transaction(() => {\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n const id = chunkId(\n vaultProjectId,\n canonical.vaultRelPath,\n i,\n chunk.startLine,\n chunk.endLine,\n );\n insertChunk.run(\n id,\n vaultProjectId,\n \"vault\",\n \"topic\",\n canonical.vaultRelPath,\n chunk.startLine,\n chunk.endLine,\n chunk.hash,\n chunk.text,\n updatedAt,\n );\n insertFts.run(\n chunk.text,\n id,\n vaultProjectId,\n canonical.vaultRelPath,\n \"vault\",\n \"topic\",\n chunk.startLine,\n chunk.endLine,\n );\n }\n upsertVaultFile.run(\n canonical.vaultRelPath,\n canonical.inode,\n canonical.device,\n hash,\n title,\n updatedAt,\n );\n })();\n\n result.filesIndexed++;\n result.chunksCreated += chunks.length;\n }\n\n // ---------------------------------------------------------------------------\n // Step 5: Record aliases in vault_aliases\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Clear old aliases for this vault before rebuilding\n // (We identify vault aliases by checking which canonical paths belong to\n // the canonical files we just indexed — simpler to clear + rebuild all.)\n db.exec(\"DELETE FROM vault_aliases\");\n\n const insertAlias = db.prepare(`\n INSERT OR REPLACE INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES (?, ?, ?, ?)\n `);\n\n const insertAliasesTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n for (const alias of group.aliases) {\n insertAlias.run(\n alias.vaultRelPath,\n group.canonical.vaultRelPath,\n alias.inode,\n alias.device,\n );\n result.aliasesRecorded++;\n }\n }\n });\n insertAliasesTx(inodeGroups);\n\n // ---------------------------------------------------------------------------\n // Step 6: Rebuild vault_name_index\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_name_index\");\n\n const insertNameIndex = db.prepare(`\n INSERT OR REPLACE INTO vault_name_index (name, vault_path) VALUES (?, ?)\n `);\n\n const insertNameIndexTx = db.transaction(\n (entries: Array<[string, string]>) => {\n for (const [name, path] of entries) {\n insertNameIndex.run(name, path);\n }\n },\n );\n\n const nameEntries: Array<[string, string]> = [];\n for (const [name, paths] of nameIndex) {\n for (const path of paths) {\n nameEntries.push([name, path]);\n }\n }\n insertNameIndexTx(nameEntries);\n\n // ---------------------------------------------------------------------------\n // Step 7: Rebuild vault_links\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_links\");\n\n const insertLink = db.prepare(`\n INSERT OR IGNORE INTO vault_links\n (source_path, target_raw, target_path, link_type, line_number)\n VALUES (?, ?, ?, ?, ?)\n `);\n\n // Parse and resolve wikilinks in bulk transaction\n const linkRows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }> = [];\n\n for (const group of inodeGroups) {\n const { canonical } = group;\n\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n continue;\n }\n\n const parsedLinks = parseLinks(content);\n for (const link of parsedLinks) {\n const target = resolveWikilink(link.raw, nameIndex, canonical.vaultRelPath);\n let linkType: string;\n if (link.isMdLink) {\n linkType = link.isEmbed ? \"md-embed\" : \"md-link\";\n } else {\n linkType = link.isEmbed ? \"embed\" : \"wikilink\";\n }\n linkRows.push({\n source: canonical.vaultRelPath,\n raw: link.raw,\n target,\n linkType,\n lineNumber: link.lineNumber,\n });\n }\n }\n\n const insertLinksTx = db.transaction(\n (\n rows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }>,\n ) => {\n for (const row of rows) {\n insertLink.run(row.source, row.raw, row.target, row.linkType, row.lineNumber);\n }\n },\n );\n insertLinksTx(linkRows);\n\n result.linksExtracted = linkRows.length;\n result.deadLinksFound = linkRows.filter((r) => r.target === null).length;\n\n // ---------------------------------------------------------------------------\n // Step 8: Compute and upsert vault_health metrics\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Count outbound links per source\n const outboundCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count dead links per source\n const deadLinkCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NULL GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count inbound links per target\n const inboundCounts = db\n .prepare(\n `SELECT target_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NOT NULL GROUP BY target_path`,\n )\n .all() as Array<{ target_path: string; cnt: number }>;\n\n // Build maps for O(1) lookup\n const outboundMap = new Map<string, number>(\n outboundCounts.map((r) => [r.source_path, r.cnt]),\n );\n const deadMap = new Map<string, number>(\n deadLinkCounts.map((r) => [r.source_path, r.cnt]),\n );\n const inboundMap = new Map<string, number>(\n inboundCounts.map((r) => [r.target_path, r.cnt]),\n );\n\n const upsertHealth = db.prepare(`\n INSERT INTO vault_health\n (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inbound_count = excluded.inbound_count,\n outbound_count = excluded.outbound_count,\n dead_link_count = excluded.dead_link_count,\n is_orphan = excluded.is_orphan,\n computed_at = excluded.computed_at\n `);\n\n const computedAt = Date.now();\n let orphanCount = 0;\n\n const upsertHealthTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n const path = group.canonical.vaultRelPath;\n const inbound = inboundMap.get(path) ?? 0;\n const outbound = outboundMap.get(path) ?? 0;\n const dead = deadMap.get(path) ?? 0;\n const isOrphan = inbound === 0 ? 1 : 0;\n if (isOrphan) orphanCount++;\n upsertHealth.run(path, inbound, outbound, dead, isOrphan, computedAt);\n }\n });\n upsertHealthTx(inodeGroups);\n\n result.orphansFound = orphanCount;\n result.elapsed = Date.now() - startTime;\n\n return result;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA8DA,MAAM,kBAAkB;;AAGxB,MAAM,kBAAkB;;AAGxB,MAAM,oBAAoB;;;;;AAM1B,MAAM,kBAAkB,IAAI,IAAI;CAE9B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CAEA;CACA;CACD,CAAC;AAMF,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;AAG3D,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAGlB,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;AAwBxD,SAAgB,iBACd,WACA,MACa;CACb,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,WAAW,MAAM,YAAY;CAEnC,MAAM,UAAuB,EAAE;CAC/B,MAAM,8BAAc,IAAI,KAAa;CAErC,SAAS,KAAK,KAAa,OAAqB;AAC9C,MAAI,QAAQ,UAAU,SAAU;AAChC,MAAI,QAAQ,SAAU;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,SAAS,IAAI;UACjB;AACN;;EAGF,MAAM,SAAS,GAAG,QAAQ,IAAI,GAAG,QAAQ;AACzC,MAAI,YAAY,IAAI,OAAO,CAAE;AAC7B,cAAY,IAAI,OAAO;EAEvB,IAAI;AACJ,MAAI;AACF,aAAU,YAAY,KAAK;IAAE,eAAe;IAAM,UAAU;IAAQ,CAAC;UAC/D;AACN;;AAGF,OAAK,MAAM,SAAS,SAAS;AAC3B,OAAI,QAAQ,UAAU,SAAU;AAChC,OAAI,gBAAgB,IAAI,MAAM,KAAK,CAAE;GAErC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAElC,OAAI,MAAM,gBAAgB,EAAE;IAE1B,IAAI;AACJ,QAAI;AACF,kBAAa,SAAS,KAAK;YACrB;AACN;;AAGF,QAAI,WAAW,aAAa,EAC1B;SAAI,CAAC,gBAAgB,IAAI,MAAM,KAAK,CAClC,MAAK,MAAM,QAAQ,EAAE;eAEd,WAAW,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CAC1D,SAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,WAAW;KAClB,QAAQ,WAAW;KACpB,CAAC;cAEK,MAAM,aAAa,CAC5B,MAAK,MAAM,QAAQ,EAAE;YACZ,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EAAE;IACvD,IAAI;AACJ,QAAI;AACF,gBAAW,SAAS,KAAK;YACnB;AACN;;AAEF,YAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,SAAS;KAChB,QAAQ,SAAS;KAClB,CAAC;;;;AAKR,KAAI,WAAW,UAAU,CACvB,MAAK,WAAW,EAAE;AAGpB,QAAO;;;;;;;;;AAcT,SAAgB,mBAAmB,OAAkC;CACnE,MAAM,yBAAS,IAAI,KAA0B;AAE7C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,KAAK;EACnC,MAAM,WAAW,OAAO,IAAI,IAAI;AAChC,MAAI,SACF,UAAS,KAAK,KAAK;MAEnB,QAAO,IAAI,KAAK,CAAC,KAAK,CAAC;;CAI3B,MAAM,SAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,OAAO,QAAQ,EAAE;AACnC,MAAI,MAAM,WAAW,EAAG;EAUxB,MAAM,CAAC,WAAW,GAAG,WAPN,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;GACvC,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;GACnD,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;AACnD,OAAI,WAAW,OAAQ,QAAO,SAAS;AACvC,UAAO,EAAE,aAAa,SAAS,EAAE,aAAa;IAC9C;AAGF,SAAO,KAAK;GAAE;GAAW;GAAS,CAAC;;AAGrC,QAAO;;;;;;;;;;;;;;;;;;;;AAyBT,SAAgB,WAAW,SAA+B;CACxD,MAAM,QAAsB,EAAE;CAC9B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAGjC,IAAI,iBAAiB;AACrB,KAAI,QAAQ,WAAW,MAAM,EAAE;EAC7B,MAAM,aAAa,QAAQ,QAAQ,SAAS,EAAE;AAC9C,MAAI,eAAe,GACjB,kBAAiB,QAAQ,MAAM,GAAG,aAAa,EAAE,CAAC,MAAM,KAAK,CAAC,SAAS;;CAK3E,MAAM,aAAa;CAKnB,MAAM,WAAW;AAEjB,MAAK,IAAI,UAAU,GAAG,UAAU,MAAM,QAAQ,WAAW;EACvD,MAAM,OAAO,MAAM;EACnB,MAAM,aAAa,UAAU;EAC7B,MAAM,gBAAgB,UAAU;AAGhC,aAAW,YAAY;EACvB,IAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,KAAK,MAAM,MAAM;GAC/C,MAAM,UAAU,MAAM,OAAO;GAC7B,MAAM,QAAQ,MAAM;GAGpB,MAAM,UAAU,MAAM,QAAQ,IAAI;GAClC,MAAM,aAAa,YAAY,KAAK,QAAQ,MAAM,MAAM,GAAG,QAAQ;GACnE,MAAM,QAAQ,YAAY,KAAK,OAAO,MAAM,MAAM,UAAU,EAAE;GAG9D,MAAM,UAAU,WAAW,QAAQ,IAAI;GACvC,MAAM,MAAM,YAAY,KAAK,WAAW,MAAM,GAAG,WAAW,MAAM,GAAG,QAAQ,CAAC,MAAM;AAEpF,OAAI,CAAC,IAAK;AAEV,SAAM,KAAK;IACT;IACA,OAAO,OAAO,MAAM,IAAI;IACxB;IACA,SAAS,WAAW,CAAC;IACrB,UAAU;IACX,CAAC;;AAIJ,MAAI,CAAC,eAAe;AAClB,YAAS,YAAY;AACrB,WAAQ,QAAQ,SAAS,KAAK,KAAK,MAAM,MAAM;IAC7C,MAAM,UAAU,MAAM,OAAO;IAC7B,MAAM,cAAc,MAAM;IAC1B,IAAI,SAAS,MAAM,GAAI,MAAM;AAG7B,QAAI,uBAAuB,KAAK,OAAO,CAAE;AAGzC,QAAI,OAAO,WAAW,IAAI,CAAE;IAG5B,MAAM,UAAU,OAAO,QAAQ,IAAI;AACnC,QAAI,YAAY,GAAI,UAAS,OAAO,MAAM,GAAG,QAAQ;AAGrD,QAAI;AACF,cAAS,mBAAmB,OAAO;YAC7B;IAKR,MAAM,MAAM,OAAO,QAAQ,UAAU,GAAG,CAAC,MAAM;AAC/C,QAAI,CAAC,IAAK;AAIV,UAAM,KAAK;KACT;KACA,OAAO,eAAe;KACtB;KACA;KACA,UAAU;KACX,CAAC;;;;AAKR,QAAO;;;;;;;;;AAiBT,SAAgB,eAAe,OAA2C;CACxE,MAAM,wBAAQ,IAAI,KAAuB;AAEzC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,SAAS,KAAK,cAAc,MAAM,CAAC,aAAa;EAC7D,MAAM,WAAW,MAAM,IAAI,KAAK;AAChC,MAAI,SACF,UAAS,KAAK,KAAK,aAAa;MAEhC,OAAM,IAAI,MAAM,CAAC,KAAK,aAAa,CAAC;;AAIxC,QAAO;;;;;;;;;;;;;;;;;;;AAwBT,SAAgB,gBACd,KACA,WACA,YACe;AACf,KAAI,CAAC,IAAK,QAAO;AAGjB,KAAI,IAAI,SAAS,IAAI,EAAE;EACrB,MAAM,aAAa,UAAU,IAAI;EACjC,MAAM,eAAe,WAAW,SAAS,MAAM,GAAG,aAAa,aAAa;AAG5E,OAAK,MAAM,GAAG,UAAU,UACtB,MAAK,MAAM,KAAK,OAAO;AACrB,OAAI,MAAM,gBAAgB,MAAM,WAAY,QAAO;AACnD,OAAI,EAAE,aAAa,KAAK,aAAa,aAAa,CAAE,QAAO;;;CASjE,MAAM,UAAU,SAAS,IAAI,CAC1B,QAAQ,UAAU,GAAG,CACrB,aAAa,CACb,MAAM;AAET,KAAI,CAAC,QAAS,QAAO;CAErB,MAAM,aAAa,UAAU,IAAI,QAAQ;AAEzC,KAAI,CAAC,cAAc,WAAW,WAAW,EACvC,QAAO;AAGT,KAAI,WAAW,WAAW,EACxB,QAAO,WAAW;CAIpB,MAAM,YAAY,QAAQ,WAAW;CAErC,IAAI,WAA0B;CAC9B,IAAI,gBAAgB;CACpB,IAAI,cAAc;AAElB,MAAK,MAAM,aAAa,YAAY;EAElC,MAAM,YAAY,mBAAmB,WADhB,QAAQ,UAAU,CACsB;EAC7D,MAAM,UAAU,UAAU;AAE1B,MACE,YAAY,iBACX,cAAc,iBAAiB,UAAU,aAC1C;AACA,mBAAgB;AAChB,iBAAc;AACd,cAAW;;;AAIf,QAAO;;;;;;;;AAST,SAAS,mBAAmB,GAAW,GAAmB;AACxD,KAAI,MAAM,OAAO,MAAM,IAAK,QAAO;CACnC,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,IAAI,QAAQ;CACZ,MAAM,MAAM,KAAK,IAAI,OAAO,QAAQ,OAAO,OAAO;AAClD,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IACvB,KAAI,OAAO,OAAO,OAAO,GACvB;KAEA;AAGJ,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCT,eAAsB,WACpB,IACA,gBACA,WAC2B;CAC3B,MAAM,YAAY,KAAK,KAAK;CAE5B,MAAM,SAA2B;EAC/B,cAAc;EACd,eAAe;EACf,cAAc;EACd,iBAAiB;EACjB,gBAAgB;EAChB,gBAAgB;EAChB,cAAc;EACd,SAAS;EACV;CAMD,MAAM,WAAW,iBAAiB,UAAU;CAM5C,MAAM,cAAc,mBAAmB,SAAS;CAMhD,MAAM,YAAY,eAAe,SAAS;CAM1C,MAAM,iBAAiB,GAAG,QACxB,oDACD;CAED,MAAM,oBAAoB,GAAG,QAC3B,iEACD;CAED,MAAM,YAAY,GAAG,QAAQ,sCAAsC;CAEnE,MAAM,eAAe,GAAG,QACtB,8DACD;CAED,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;CAEF,MAAM,YAAY,GAAG,QAAQ;;;IAG3B;CAEF,MAAM,kBAAkB,GAAG,QAAQ;;;;;;;;;IASjC;AAMF,OAAM,kBAAkB;CACxB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,SAAS,aAAa;AAE/B,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,EAAE,cAAc;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN,UAAO;AACP;;EAGF,MAAM,OAAO,WAAW,QAAQ;AAOhC,MAJiB,eAAe,IAAI,UAAU,aAAa,EAI7C,SAAS,MAAM;AAC3B,UAAO;AACP;;EAIF,MAAM,cAAc,kBAAkB,IACpC,gBACA,UAAU,aACX;AAED,KAAG,kBAAkB;AACnB,QAAK,MAAM,OAAO,YAChB,WAAU,IAAI,IAAI,GAAG;AAEvB,gBAAa,IAAI,gBAAgB,UAAU,aAAa;IACxD,EAAE;EAGJ,MAAM,SAAS,cAAc,QAAQ;EACrC,MAAM,YAAY,KAAK,KAAK;EAG5B,MAAM,aAAa,cAAc,KAAK,QAAQ;EAC9C,MAAM,QAAQ,aACV,WAAW,GAAI,MAAM,GACrB,SAAS,UAAU,cAAc,MAAM;AAE3C,KAAG,kBAAkB;AACnB,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;IACtC,MAAM,QAAQ,OAAO;IACrB,MAAM,KAAK,QACT,gBACA,UAAU,cACV,GACA,MAAM,WACN,MAAM,QACP;AACD,gBAAY,IACV,IACA,gBACA,SACA,SACA,UAAU,cACV,MAAM,WACN,MAAM,SACN,MAAM,MACN,MAAM,MACN,UACD;AACD,cAAU,IACR,MAAM,MACN,IACA,gBACA,UAAU,cACV,SACA,SACA,MAAM,WACN,MAAM,QACP;;AAEH,mBAAgB,IACd,UAAU,cACV,UAAU,OACV,UAAU,QACV,MACA,OACA,UACD;IACD,EAAE;AAEJ,SAAO;AACP,SAAO,iBAAiB,OAAO;;AAOjC,OAAM,kBAAkB;AAKxB,IAAG,KAAK,4BAA4B;CAEpC,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;AAeF,CAbwB,GAAG,aAAa,WAAyB;AAC/D,OAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,SAAS;AACjC,eAAY,IACV,MAAM,cACN,MAAM,UAAU,cAChB,MAAM,OACN,MAAM,OACP;AACD,UAAO;;GAGX,CACc,YAAY;AAM5B,OAAM,kBAAkB;AAExB,IAAG,KAAK,+BAA+B;CAEvC,MAAM,kBAAkB,GAAG,QAAQ;;IAEjC;CAEF,MAAM,oBAAoB,GAAG,aAC1B,YAAqC;AACpC,OAAK,MAAM,CAAC,MAAM,SAAS,QACzB,iBAAgB,IAAI,MAAM,KAAK;GAGpC;CAED,MAAM,cAAuC,EAAE;AAC/C,MAAK,MAAM,CAAC,MAAM,UAAU,UAC1B,MAAK,MAAM,QAAQ,MACjB,aAAY,KAAK,CAAC,MAAM,KAAK,CAAC;AAGlC,mBAAkB,YAAY;AAM9B,OAAM,kBAAkB;AAExB,IAAG,KAAK,0BAA0B;CAElC,MAAM,aAAa,GAAG,QAAQ;;;;IAI5B;CAGF,MAAM,WAMD,EAAE;AAEP,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,EAAE,cAAc;EAEtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN;;EAGF,MAAM,cAAc,WAAW,QAAQ;AACvC,OAAK,MAAM,QAAQ,aAAa;GAC9B,MAAM,SAAS,gBAAgB,KAAK,KAAK,WAAW,UAAU,aAAa;GAC3E,IAAI;AACJ,OAAI,KAAK,SACP,YAAW,KAAK,UAAU,aAAa;OAEvC,YAAW,KAAK,UAAU,UAAU;AAEtC,YAAS,KAAK;IACZ,QAAQ,UAAU;IAClB,KAAK,KAAK;IACV;IACA;IACA,YAAY,KAAK;IAClB,CAAC;;;AAmBN,CAfsB,GAAG,aAErB,SAOG;AACH,OAAK,MAAM,OAAO,KAChB,YAAW,IAAI,IAAI,QAAQ,IAAI,KAAK,IAAI,QAAQ,IAAI,UAAU,IAAI,WAAW;GAGlF,CACa,SAAS;AAEvB,QAAO,iBAAiB,SAAS;AACjC,QAAO,iBAAiB,SAAS,QAAQ,MAAM,EAAE,WAAW,KAAK,CAAC;AAMlE,OAAM,kBAAkB;CAGxB,MAAM,iBAAiB,GACpB,QACC,4EACD,CACA,KAAK;CAGR,MAAM,iBAAiB,GACpB,QACC;uDAED,CACA,KAAK;CAGR,MAAM,gBAAgB,GACnB,QACC;2DAED,CACA,KAAK;CAGR,MAAM,cAAc,IAAI,IACtB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,UAAU,IAAI,IAClB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,aAAa,IAAI,IACrB,cAAc,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CACjD;CAED,MAAM,eAAe,GAAG,QAAQ;;;;;;;;;;IAU9B;CAEF,MAAM,aAAa,KAAK,KAAK;CAC7B,IAAI,cAAc;AAalB,CAXuB,GAAG,aAAa,WAAyB;AAC9D,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,OAAO,MAAM,UAAU;GAC7B,MAAM,UAAU,WAAW,IAAI,KAAK,IAAI;GACxC,MAAM,WAAW,YAAY,IAAI,KAAK,IAAI;GAC1C,MAAM,OAAO,QAAQ,IAAI,KAAK,IAAI;GAClC,MAAM,WAAW,YAAY,IAAI,IAAI;AACrC,OAAI,SAAU;AACd,gBAAa,IAAI,MAAM,SAAS,UAAU,MAAM,UAAU,WAAW;;GAEvE,CACa,YAAY;AAE3B,QAAO,eAAe;AACtB,QAAO,UAAU,KAAK,KAAK,GAAG;AAE9B,QAAO"}