@tekmidian/pai 0.5.7 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +72 -1
- package/README.md +87 -1
- package/dist/{auto-route-BG6I_4B1.mjs → auto-route-C-DrW6BL.mjs} +3 -3
- package/dist/{auto-route-BG6I_4B1.mjs.map → auto-route-C-DrW6BL.mjs.map} +1 -1
- package/dist/cli/index.mjs +1482 -1628
- package/dist/cli/index.mjs.map +1 -1
- package/dist/clusters-JIDQW65f.mjs +201 -0
- package/dist/clusters-JIDQW65f.mjs.map +1 -0
- package/dist/{config-Cf92lGX_.mjs → config-BuhHWyOK.mjs} +21 -6
- package/dist/config-BuhHWyOK.mjs.map +1 -0
- package/dist/daemon/index.mjs +11 -8
- package/dist/daemon/index.mjs.map +1 -1
- package/dist/{daemon-2ND5WO2j.mjs → daemon-D3hYb5_C.mjs} +669 -218
- package/dist/daemon-D3hYb5_C.mjs.map +1 -0
- package/dist/daemon-mcp/index.mjs +4597 -4
- package/dist/daemon-mcp/index.mjs.map +1 -1
- package/dist/db-DdUperSl.mjs +110 -0
- package/dist/db-DdUperSl.mjs.map +1 -0
- package/dist/{detect-BU3Nx_2L.mjs → detect-CdaA48EI.mjs} +1 -1
- package/dist/{detect-BU3Nx_2L.mjs.map → detect-CdaA48EI.mjs.map} +1 -1
- package/dist/{detector-Bp-2SM3x.mjs → detector-jGBuYQJM.mjs} +2 -2
- package/dist/{detector-Bp-2SM3x.mjs.map → detector-jGBuYQJM.mjs.map} +1 -1
- package/dist/{factory-Bzcy70G9.mjs → factory-Ygqe_bVZ.mjs} +7 -5
- package/dist/{factory-Bzcy70G9.mjs.map → factory-Ygqe_bVZ.mjs.map} +1 -1
- package/dist/helpers-BEST-4Gx.mjs +420 -0
- package/dist/helpers-BEST-4Gx.mjs.map +1 -0
- package/dist/hooks/capture-all-events.mjs +2 -2
- package/dist/hooks/capture-all-events.mjs.map +3 -3
- package/dist/hooks/capture-session-summary.mjs +38 -0
- package/dist/hooks/capture-session-summary.mjs.map +3 -3
- package/dist/hooks/cleanup-session-files.mjs +6 -12
- package/dist/hooks/cleanup-session-files.mjs.map +4 -4
- package/dist/hooks/context-compression-hook.mjs +93 -104
- package/dist/hooks/context-compression-hook.mjs.map +4 -4
- package/dist/hooks/initialize-session.mjs +14 -11
- package/dist/hooks/initialize-session.mjs.map +4 -4
- package/dist/hooks/inject-observations.mjs +220 -0
- package/dist/hooks/inject-observations.mjs.map +7 -0
- package/dist/hooks/load-core-context.mjs +2 -2
- package/dist/hooks/load-core-context.mjs.map +3 -3
- package/dist/hooks/load-project-context.mjs +90 -91
- package/dist/hooks/load-project-context.mjs.map +4 -4
- package/dist/hooks/observe.mjs +354 -0
- package/dist/hooks/observe.mjs.map +7 -0
- package/dist/hooks/stop-hook.mjs +94 -107
- package/dist/hooks/stop-hook.mjs.map +4 -4
- package/dist/hooks/sync-todo-to-md.mjs +31 -33
- package/dist/hooks/sync-todo-to-md.mjs.map +4 -4
- package/dist/index.d.mts +30 -7
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +5 -8
- package/dist/indexer-D53l5d1U.mjs +1 -0
- package/dist/{indexer-backend-CIMXedqk.mjs → indexer-backend-jcJFsmB4.mjs} +37 -127
- package/dist/indexer-backend-jcJFsmB4.mjs.map +1 -0
- package/dist/{ipc-client-Bjg_a1dc.mjs → ipc-client-CoyUHPod.mjs} +2 -7
- package/dist/{ipc-client-Bjg_a1dc.mjs.map → ipc-client-CoyUHPod.mjs.map} +1 -1
- package/dist/latent-ideas-bTJo6Omd.mjs +191 -0
- package/dist/latent-ideas-bTJo6Omd.mjs.map +1 -0
- package/dist/neighborhood-BYYbEkUJ.mjs +135 -0
- package/dist/neighborhood-BYYbEkUJ.mjs.map +1 -0
- package/dist/note-context-BK24bX8Y.mjs +126 -0
- package/dist/note-context-BK24bX8Y.mjs.map +1 -0
- package/dist/postgres-CKf-EDtS.mjs +846 -0
- package/dist/postgres-CKf-EDtS.mjs.map +1 -0
- package/dist/{reranker-D7bRAHi6.mjs → reranker-CMNZcfVx.mjs} +1 -1
- package/dist/{reranker-D7bRAHi6.mjs.map → reranker-CMNZcfVx.mjs.map} +1 -1
- package/dist/{search-_oHfguA5.mjs → search-DC1qhkKn.mjs} +2 -58
- package/dist/search-DC1qhkKn.mjs.map +1 -0
- package/dist/{sqlite-WWBq7_2C.mjs → sqlite-l-s9xPjY.mjs} +160 -3
- package/dist/sqlite-l-s9xPjY.mjs.map +1 -0
- package/dist/state-C6_vqz7w.mjs +102 -0
- package/dist/state-C6_vqz7w.mjs.map +1 -0
- package/dist/stop-words-BaMEGVeY.mjs +326 -0
- package/dist/stop-words-BaMEGVeY.mjs.map +1 -0
- package/dist/{indexer-CMPOiY1r.mjs → sync-BOsnEj2-.mjs} +14 -216
- package/dist/sync-BOsnEj2-.mjs.map +1 -0
- package/dist/themes-BvYF0W8T.mjs +148 -0
- package/dist/themes-BvYF0W8T.mjs.map +1 -0
- package/dist/{tools-DV_lsiCc.mjs → tools-DcaJlYDN.mjs} +162 -273
- package/dist/tools-DcaJlYDN.mjs.map +1 -0
- package/dist/trace-CRx9lPuc.mjs +137 -0
- package/dist/trace-CRx9lPuc.mjs.map +1 -0
- package/dist/{vault-indexer-k-kUlaZ-.mjs → vault-indexer-Bi2cRmn7.mjs} +134 -132
- package/dist/vault-indexer-Bi2cRmn7.mjs.map +1 -0
- package/dist/zettelkasten-cdajbnPr.mjs +708 -0
- package/dist/zettelkasten-cdajbnPr.mjs.map +1 -0
- package/package.json +1 -2
- package/src/hooks/ts/lib/project-utils/index.ts +50 -0
- package/src/hooks/ts/lib/project-utils/notify.ts +75 -0
- package/src/hooks/ts/lib/project-utils/paths.ts +218 -0
- package/src/hooks/ts/lib/project-utils/session-notes.ts +363 -0
- package/src/hooks/ts/lib/project-utils/todo.ts +178 -0
- package/src/hooks/ts/lib/project-utils/tokens.ts +39 -0
- package/src/hooks/ts/lib/project-utils.ts +40 -1018
- package/src/hooks/ts/post-tool-use/observe.ts +327 -0
- package/src/hooks/ts/session-end/capture-session-summary.ts +41 -0
- package/src/hooks/ts/session-start/inject-observations.ts +254 -0
- package/dist/chunker-CbnBe0s0.mjs +0 -191
- package/dist/chunker-CbnBe0s0.mjs.map +0 -1
- package/dist/config-Cf92lGX_.mjs.map +0 -1
- package/dist/daemon-2ND5WO2j.mjs.map +0 -1
- package/dist/db-Dp8VXIMR.mjs +0 -212
- package/dist/db-Dp8VXIMR.mjs.map +0 -1
- package/dist/indexer-CMPOiY1r.mjs.map +0 -1
- package/dist/indexer-backend-CIMXedqk.mjs.map +0 -1
- package/dist/mcp/index.d.mts +0 -1
- package/dist/mcp/index.mjs +0 -500
- package/dist/mcp/index.mjs.map +0 -1
- package/dist/postgres-FXrHDPcE.mjs +0 -358
- package/dist/postgres-FXrHDPcE.mjs.map +0 -1
- package/dist/schemas-BFIgGntb.mjs +0 -3405
- package/dist/schemas-BFIgGntb.mjs.map +0 -1
- package/dist/search-_oHfguA5.mjs.map +0 -1
- package/dist/sqlite-WWBq7_2C.mjs.map +0 -1
- package/dist/tools-DV_lsiCc.mjs.map +0 -1
- package/dist/vault-indexer-k-kUlaZ-.mjs.map +0 -1
- package/dist/zettelkasten-e-a4rW_6.mjs +0 -901
- package/dist/zettelkasten-e-a4rW_6.mjs.map +0 -1
- package/templates/README.md +0 -181
- package/templates/skills/CORE/Aesthetic.md +0 -333
- package/templates/skills/CORE/CONSTITUTION.md +0 -1502
- package/templates/skills/CORE/HistorySystem.md +0 -427
- package/templates/skills/CORE/HookSystem.md +0 -1082
- package/templates/skills/CORE/Prompting.md +0 -509
- package/templates/skills/CORE/ProsodyAgentTemplate.md +0 -53
- package/templates/skills/CORE/ProsodyGuide.md +0 -416
- package/templates/skills/CORE/SKILL.md +0 -741
- package/templates/skills/CORE/SkillSystem.md +0 -213
- package/templates/skills/CORE/TerminalTabs.md +0 -119
- package/templates/skills/CORE/VOICE.md +0 -106
- package/templates/skills/createskill-skill.template.md +0 -78
- package/templates/skills/history-system.template.md +0 -371
- package/templates/skills/hook-system.template.md +0 -913
- package/templates/skills/sessions-skill.template.md +0 -102
- package/templates/skills/skill-system.template.md +0 -214
- package/templates/skills/terminal-tabs.template.md +0 -120
- package/templates/templates.md +0 -20
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"zettelkasten-cdajbnPr.mjs","names":["MAX_CHUNKS"],"sources":["../src/zettelkasten/explore.ts","../src/zettelkasten/surprise.ts","../src/zettelkasten/converse.ts","../src/zettelkasten/health.ts","../src/zettelkasten/suggest.ts"],"sourcesContent":["import type { StorageBackend } from \"../storage/interface.js\";\nimport { dirname } from \"node:path\";\n\nexport interface ExploreOptions {\n startNote: string;\n depth?: number;\n direction?: \"forward\" | \"backward\" | \"both\";\n mode?: \"sequential\" | \"associative\" | \"all\";\n}\n\nexport interface ExploreNode {\n path: string;\n title: string | null;\n depth: number;\n linkType: \"sequential\" | \"associative\";\n inbound: number;\n outbound: number;\n}\n\nexport interface ExploreResult {\n root: string;\n nodes: ExploreNode[];\n edges: Array<{ from: string; to: string; type: \"sequential\" | \"associative\" }>;\n branchingPoints: string[];\n maxDepthReached: boolean;\n}\n\nfunction classifyEdge(source: string, target: string): \"sequential\" | \"associative\" {\n return dirname(source) === dirname(target) ? \"sequential\" : \"associative\";\n}\n\nasync function resolveStart(backend: StorageBackend, startNote: string): Promise<string | null> {\n // Try direct lookup first\n const files = await backend.getVaultFilesByPaths([startNote]);\n if (files.length > 0) return files[0].vaultPath;\n\n // Try alias lookup\n const alias = await backend.getVaultAlias(startNote);\n if (!alias) return null;\n\n const canonical = await backend.getVaultFilesByPaths([alias.canonicalPath]);\n return canonical.length > 0 ? canonical[0].vaultPath : null;\n}\n\nasync function getForwardNeighbors(backend: StorageBackend, path: string): Promise<string[]> {\n const links = await backend.getLinksFromSource(path);\n return links.filter(l => l.targetPath !== null).map(l => l.targetPath as string);\n}\n\nasync function getBackwardNeighbors(backend: StorageBackend, path: string): Promise<string[]> {\n const links = await backend.getLinksToTarget(path);\n return links.map(l => l.sourcePath);\n}\n\nasync function getFileInfo(\n backend: StorageBackend,\n path: string,\n): Promise<{ title: string | null; inbound: number; outbound: number }> {\n const [files, health] = await Promise.all([\n backend.getVaultFilesByPaths([path]),\n backend.getVaultHealth(path),\n ]);\n\n return {\n title: files[0]?.title ?? null,\n inbound: health?.inboundCount ?? 0,\n outbound: health?.outboundCount ?? 0,\n };\n}\n\n/**\n * Traverse the Zettelkasten link graph using BFS, following chains of thought\n * from a starting note up to a configurable depth.\n */\nexport async function zettelExplore(backend: StorageBackend, opts: ExploreOptions): Promise<ExploreResult> {\n const depth = Math.min(Math.max(opts.depth ?? 3, 1), 10);\n const direction = opts.direction ?? \"both\";\n const mode = opts.mode ?? \"all\";\n\n const root = await resolveStart(backend, opts.startNote);\n if (!root) {\n return {\n root: opts.startNote,\n nodes: [],\n edges: [],\n branchingPoints: [],\n maxDepthReached: false,\n };\n }\n\n const visited = new Set<string>([root]);\n const nodes: ExploreNode[] = [];\n const edges: Array<{ from: string; to: string; type: \"sequential\" | \"associative\" }> = [];\n let maxDepthReached = false;\n\n const queue: Array<{ path: string; depth: number }> = [{ path: root, depth: 0 }];\n\n while (queue.length > 0) {\n const current = queue.shift()!;\n\n if (current.depth >= depth) {\n maxDepthReached = true;\n continue;\n }\n\n const neighbors: Array<{ neighbor: string; from: string; to: string }> = [];\n\n if (direction === \"forward\" || direction === \"both\") {\n for (const n of await getForwardNeighbors(backend, current.path)) {\n neighbors.push({ neighbor: n, from: current.path, to: n });\n }\n }\n\n if (direction === \"backward\" || direction === \"both\") {\n for (const n of await getBackwardNeighbors(backend, current.path)) {\n neighbors.push({ neighbor: n, from: n, to: current.path });\n }\n }\n\n for (const { neighbor, from, to } of neighbors) {\n const edgeType = classifyEdge(from, to);\n\n if (mode !== \"all\" && edgeType !== mode) {\n continue;\n }\n\n const alreadyHasEdge = edges.some((e) => e.from === from && e.to === to);\n if (!alreadyHasEdge) {\n edges.push({ from, to, type: edgeType });\n }\n\n if (!visited.has(neighbor)) {\n visited.add(neighbor);\n\n const info = await getFileInfo(backend, neighbor);\n nodes.push({\n path: neighbor,\n title: info.title,\n depth: current.depth + 1,\n linkType: edgeType,\n inbound: info.inbound,\n outbound: info.outbound,\n });\n\n queue.push({ path: neighbor, depth: current.depth + 1 });\n }\n }\n }\n\n const branchingPoints = nodes\n .filter((n) => n.outbound > 2)\n .map((n) => n.path);\n\n const rootInfo = await getFileInfo(backend, root);\n if (rootInfo.outbound > 2) {\n branchingPoints.unshift(root);\n }\n\n return { root, nodes, edges, branchingPoints, maxDepthReached };\n}\n","import type { StorageBackend } from \"../storage/interface.js\";\nimport {\n deserializeEmbedding,\n generateEmbedding,\n cosineSimilarity,\n} from \"../memory/embeddings.js\";\n\nexport interface SurpriseOptions {\n referencePath: string;\n vaultProjectId: number;\n limit?: number;\n minSimilarity?: number;\n minGraphDistance?: number;\n}\n\nexport interface SurpriseResult {\n path: string;\n title: string | null;\n cosineSimilarity: number;\n graphDistance: number;\n surpriseScore: number;\n sharedSnippet: string;\n}\n\nconst MAX_CHUNKS = 5000;\nconst BFS_HOP_CAP = 20;\n\nasync function getFileEmbeddings(\n backend: StorageBackend,\n projectId: number,\n): Promise<Map<string, { embedding: Float32Array; text: string }>> {\n const rows = await backend.getChunksWithEmbeddings(projectId, MAX_CHUNKS);\n\n const byPath = new Map<string, { sum: Float32Array; count: number; text: string }>();\n for (const row of rows) {\n const vec = deserializeEmbedding(row.embedding);\n const entry = byPath.get(row.path);\n if (!entry) {\n byPath.set(row.path, { sum: new Float32Array(vec), count: 1, text: row.text });\n } else {\n for (let i = 0; i < vec.length; i++) {\n entry.sum[i] += vec[i];\n }\n entry.count++;\n }\n }\n\n const result = new Map<string, { embedding: Float32Array; text: string }>();\n for (const [path, { sum, count, text }] of byPath) {\n const avg = new Float32Array(sum.length);\n for (let i = 0; i < sum.length; i++) {\n avg[i] = sum[i] / count;\n }\n result.set(path, { embedding: avg, text });\n }\n return result;\n}\n\nasync function getReferenceEmbedding(\n backend: StorageBackend,\n projectId: number,\n path: string,\n): Promise<{ embedding: Float32Array; found: boolean }> {\n const rows = await backend.getChunksForPath(projectId, path);\n\n if (rows.length === 0) {\n return { embedding: new Float32Array(0), found: false };\n }\n\n const embRows = rows.filter(r => r.embedding !== null) as Array<{ text: string; embedding: Buffer }>;\n if (embRows.length === 0) {\n return { embedding: new Float32Array(0), found: false };\n }\n\n const dim = deserializeEmbedding(embRows[0].embedding).length;\n const sum = new Float32Array(dim);\n for (const row of embRows) {\n const vec = deserializeEmbedding(row.embedding);\n for (let i = 0; i < dim; i++) {\n sum[i] += vec[i];\n }\n }\n const avg = new Float32Array(dim);\n for (let i = 0; i < dim; i++) {\n avg[i] = sum[i] / embRows.length;\n }\n return { embedding: avg, found: true };\n}\n\nasync function bfsGraphDistance(backend: StorageBackend, source: string, target: string): Promise<number> {\n if (source === target) return 0;\n\n const visited = new Set<string>([source]);\n const queue: Array<{ path: string; hops: number }> = [{ path: source, hops: 0 }];\n\n while (queue.length > 0) {\n const { path, hops } = queue.shift()!;\n if (hops >= BFS_HOP_CAP) continue;\n\n const [forwardLinks, backwardLinks] = await Promise.all([\n backend.getLinksFromSource(path),\n backend.getLinksToTarget(path),\n ]);\n\n const neighbors: string[] = [\n ...forwardLinks.filter(l => l.targetPath !== null).map(l => l.targetPath as string),\n ...backwardLinks.map(l => l.sourcePath),\n ];\n\n for (const neighbor of neighbors) {\n if (neighbor === target) return hops + 1;\n if (!visited.has(neighbor)) {\n visited.add(neighbor);\n queue.push({ path: neighbor, hops: hops + 1 });\n }\n }\n }\n\n return Infinity;\n}\n\nfunction getBestChunkText(\n chunkRows: Array<{ text: string; embedding: Buffer | null }>,\n refEmbedding: Float32Array,\n): string {\n const rows = chunkRows.filter(r => r.embedding !== null) as Array<{ text: string; embedding: Buffer }>;\n if (rows.length === 0) return \"\";\n\n let bestText = rows[0].text;\n let bestSim = -Infinity;\n\n for (const row of rows) {\n const vec = deserializeEmbedding(row.embedding);\n const sim = cosineSimilarity(refEmbedding, vec);\n if (sim > bestSim) {\n bestSim = sim;\n bestText = row.text;\n }\n }\n\n return bestText.trim().slice(0, 200);\n}\n\n/**\n * Find notes that are semantically similar to a reference note but graph-distant —\n * revealing surprising conceptual connections across unrelated areas of the Zettelkasten.\n */\nexport async function zettelSurprise(\n backend: StorageBackend,\n opts: SurpriseOptions,\n): Promise<SurpriseResult[]> {\n const limit = opts.limit ?? 10;\n const minSimilarity = opts.minSimilarity ?? 0.3;\n const minGraphDistance = opts.minGraphDistance ?? 3;\n\n let { embedding: refEmbedding, found } = await getReferenceEmbedding(\n backend,\n opts.vaultProjectId,\n opts.referencePath,\n );\n\n // Fall back to generating an embedding from the file title if no chunks exist\n if (!found) {\n const files = await backend.getVaultFilesByPaths([opts.referencePath]);\n const text = files[0]?.title ?? opts.referencePath;\n refEmbedding = await generateEmbedding(text, true);\n }\n\n const allFileEmbeddings = await getFileEmbeddings(backend, opts.vaultProjectId);\n\n // Remove the reference note itself from candidates\n allFileEmbeddings.delete(opts.referencePath);\n\n // First pass: filter by semantic similarity to avoid BFS on all nodes\n const semanticCandidates: Array<{ path: string; sim: number }> = [];\n for (const [path, { embedding }] of allFileEmbeddings) {\n const sim = cosineSimilarity(refEmbedding, embedding);\n if (sim >= minSimilarity) {\n semanticCandidates.push({ path, sim });\n }\n }\n\n // Compute graph distances for semantic candidates\n const results: SurpriseResult[] = [];\n\n for (const { path, sim } of semanticCandidates) {\n const graphDistance = await bfsGraphDistance(backend, opts.referencePath, path);\n\n const effectiveDistance = isFinite(graphDistance) ? graphDistance : BFS_HOP_CAP;\n if (effectiveDistance < minGraphDistance) continue;\n\n const files = await backend.getVaultFilesByPaths([path]);\n const chunkRows = await backend.getChunksForPath(opts.vaultProjectId, path, 20);\n\n const surpriseScore = sim * Math.log2(effectiveDistance + 1);\n const sharedSnippet = getBestChunkText(chunkRows, refEmbedding);\n\n results.push({\n path,\n title: files[0]?.title ?? null,\n cosineSimilarity: sim,\n graphDistance: isFinite(graphDistance) ? graphDistance : Infinity,\n surpriseScore,\n sharedSnippet,\n });\n }\n\n results.sort((a, b) => b.surpriseScore - a.surpriseScore);\n return results.slice(0, limit);\n}\n","import type { StorageBackend } from \"../storage/interface.js\";\nimport type { SearchResult } from \"../memory/search.js\";\nimport { generateEmbedding } from \"../memory/embeddings.js\";\n\nexport interface ConverseOptions {\n /** The user's question or topic to explore. */\n question: string;\n /** project_id for vault chunks in memory_chunks. */\n vaultProjectId: number;\n /** Graph expansion depth. Default 2. */\n depth?: number;\n /** Maximum number of relevant notes to return. Default 15. */\n limit?: number;\n}\n\nexport interface ConverseConnection {\n fromPath: string;\n toPath: string;\n /** Top-level folder of fromPath. */\n fromDomain: string;\n /** Top-level folder of toPath. */\n toDomain: string;\n /** Link count between these two notes (can be > 1). */\n strength: number;\n}\n\nexport interface ConverseResult {\n relevantNotes: Array<{\n path: string;\n title: string | null;\n snippet: string;\n score: number;\n domain: string;\n }>;\n /** Cross-domain connections found among the selected notes. */\n connections: ConverseConnection[];\n /** Unique domains involved across all selected notes. */\n domains: string[];\n /** AI-ready prompt combining notes + connections for insight generation. */\n synthesisPrompt: string;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Extract the top-level folder from a vault path (first path segment). */\nfunction extractDomain(vaultPath: string): string {\n const slash = vaultPath.indexOf(\"/\");\n return slash === -1 ? vaultPath : vaultPath.slice(0, slash);\n}\n\n/**\n * Expand one level of graph neighbors for a set of paths.\n * Returns all outbound and inbound neighbor paths (excluding already-visited).\n */\nasync function expandNeighbors(backend: StorageBackend, paths: Set<string>): Promise<string[]> {\n if (paths.size === 0) return [];\n const pathList = Array.from(paths);\n\n const [forwardLinks, backwardLinks] = await Promise.all([\n backend.getVaultLinksFromPaths(pathList),\n Promise.all(pathList.map(p => backend.getLinksToTarget(p))),\n ]);\n\n const neighbors: string[] = [];\n for (const link of forwardLinks) {\n if (link.targetPath) neighbors.push(link.targetPath);\n }\n for (const linkList of backwardLinks) {\n for (const link of linkList) {\n neighbors.push(link.sourcePath);\n }\n }\n return neighbors;\n}\n\n/**\n * Hybrid search combining keyword + semantic results using the StorageBackend.\n */\nasync function hybridSearch(\n backend: StorageBackend,\n query: string,\n queryEmbedding: Float32Array,\n opts: { projectIds?: number[]; maxResults?: number },\n): Promise<SearchResult[]> {\n const maxResults = opts.maxResults ?? 10;\n const kw = 0.5;\n const sw = 0.5;\n\n const [keywordResults, semanticResults] = await Promise.all([\n backend.searchKeyword(query, { ...opts, maxResults: 50 }),\n backend.searchSemantic(queryEmbedding, { ...opts, maxResults: 50 }),\n ]);\n\n if (keywordResults.length === 0 && semanticResults.length === 0) return [];\n\n const keyFor = (r: SearchResult) =>\n `${r.projectId}:${r.path}:${r.startLine}:${r.endLine}`;\n\n function minMaxNormalize(scores: number[]): number[] {\n const min = Math.min(...scores);\n const max = Math.max(...scores);\n const range = max - min;\n if (range === 0) return scores.map(() => 1.0);\n return scores.map(s => (s - min) / range);\n }\n\n const kwNorm = minMaxNormalize(keywordResults.map(r => r.score));\n const semNorm = minMaxNormalize(semanticResults.map(r => r.score));\n\n const combined = new Map<string, SearchResult & { combinedScore: number }>();\n\n for (let i = 0; i < keywordResults.length; i++) {\n const r = keywordResults[i];\n const k = keyFor(r);\n combined.set(k, { ...r, combinedScore: kw * kwNorm[i] });\n }\n\n for (let i = 0; i < semanticResults.length; i++) {\n const r = semanticResults[i];\n const k = keyFor(r);\n const existing = combined.get(k);\n if (existing) {\n existing.combinedScore += sw * semNorm[i];\n } else {\n combined.set(k, { ...r, combinedScore: sw * semNorm[i] });\n }\n }\n\n const sorted = Array.from(combined.values())\n .sort((a, b) => b.combinedScore - a.combinedScore)\n .slice(0, maxResults);\n\n return sorted.map(r => ({ ...r, score: r.combinedScore }));\n}\n\n// ---------------------------------------------------------------------------\n// Main export\n// ---------------------------------------------------------------------------\n\n/**\n * Let the vault \"talk back\" — find notes relevant to a question, expand\n * through the link graph, identify cross-domain connections, and return a\n * structured result including a synthesis prompt for an AI to generate insights.\n */\nexport async function zettelConverse(\n backend: StorageBackend,\n opts: ConverseOptions,\n): Promise<ConverseResult> {\n const depth = Math.max(opts.depth ?? 2, 0);\n const limit = Math.max(opts.limit ?? 15, 1);\n const candidateLimit = 20;\n\n // ------------------------------------------------------------------\n // 1. Hybrid search: find top candidates via BM25 + semantic similarity\n // ------------------------------------------------------------------\n const queryEmbedding = await generateEmbedding(opts.question, true);\n\n const searchResults = await hybridSearch(\n backend,\n opts.question,\n queryEmbedding,\n {\n projectIds: [opts.vaultProjectId],\n maxResults: candidateLimit,\n },\n );\n\n // Map of path -> best score + snippet from search results\n const searchHits = new Map<string, { score: number; snippet: string }>();\n for (const r of searchResults) {\n const existing = searchHits.get(r.path);\n if (!existing || r.score > existing.score) {\n searchHits.set(r.path, { score: r.score, snippet: r.snippet });\n }\n }\n\n // ------------------------------------------------------------------\n // 2. Graph expansion: BFS from each search result up to `depth` levels\n // ------------------------------------------------------------------\n const allPaths = new Set<string>(searchHits.keys());\n let frontier = new Set<string>(searchHits.keys());\n\n for (let d = 0; d < depth; d++) {\n const neighbors = await expandNeighbors(backend, frontier);\n const newFrontier = new Set<string>();\n for (const n of neighbors) {\n if (!allPaths.has(n)) {\n allPaths.add(n);\n newFrontier.add(n);\n }\n }\n if (newFrontier.size === 0) break;\n frontier = newFrontier;\n }\n\n // ------------------------------------------------------------------\n // 3. Deduplicate + trim to limit\n // ------------------------------------------------------------------\n const searchRanked = Array.from(searchHits.entries())\n .sort((a, b) => b[1].score - a[1].score)\n .map(([path, info]) => ({ path, ...info, isSearchResult: true }));\n\n const neighborPaths = Array.from(allPaths).filter((p) => !searchHits.has(p));\n\n // Fetch health data for neighbor ranking\n const neighborHealthRows = await Promise.all(\n neighborPaths.map(p => backend.getVaultHealth(p))\n );\n const neighborRanked = neighborPaths\n .map((path, idx) => ({\n path,\n score: 0,\n snippet: \"\",\n inbound: neighborHealthRows[idx]?.inboundCount ?? 0,\n isSearchResult: false,\n }))\n .sort((a, b) => b.inbound - a.inbound);\n\n const budgetForNeighbors = Math.max(limit - searchRanked.length, 0);\n const selectedNeighbors = neighborRanked.slice(0, budgetForNeighbors);\n\n const selectedSearchPaths = searchRanked.slice(0, limit);\n const selectedPaths = new Set<string>([\n ...selectedSearchPaths.map((r) => r.path),\n ...selectedNeighbors.map((r) => r.path),\n ]);\n\n // ------------------------------------------------------------------\n // 4. Build relevantNotes with titles + domains\n // ------------------------------------------------------------------\n\n // Fetch titles in bulk\n const allSelectedPaths = Array.from(selectedPaths);\n const fileRows = await backend.getVaultFilesByPaths(allSelectedPaths);\n const titleMap = new Map<string, string | null>(fileRows.map(f => [f.vaultPath, f.title]));\n\n const relevantNotes: ConverseResult[\"relevantNotes\"] = [];\n\n for (const r of selectedSearchPaths) {\n if (!selectedPaths.has(r.path)) continue;\n relevantNotes.push({\n path: r.path,\n title: titleMap.get(r.path) ?? null,\n snippet: r.snippet,\n score: r.score,\n domain: extractDomain(r.path),\n });\n }\n\n for (const r of selectedNeighbors) {\n relevantNotes.push({\n path: r.path,\n title: titleMap.get(r.path) ?? null,\n snippet: r.snippet,\n score: 0,\n domain: extractDomain(r.path),\n });\n }\n\n // ------------------------------------------------------------------\n // 5. Find connections between the selected notes\n // ------------------------------------------------------------------\n let connections: ConverseConnection[] = [];\n\n if (selectedPaths.size > 0) {\n const pathList = Array.from(selectedPaths);\n const pathSet = new Set(pathList);\n\n // Get all outbound links from the selected paths\n const linkRows = await backend.getVaultLinksFromPaths(pathList);\n\n // Count links between selected paths\n const edgeCounts = new Map<string, number>();\n for (const link of linkRows) {\n if (link.targetPath && pathSet.has(link.targetPath)) {\n const key = `${link.sourcePath}|||${link.targetPath}`;\n edgeCounts.set(key, (edgeCounts.get(key) ?? 0) + 1);\n }\n }\n\n for (const [key, cnt] of edgeCounts) {\n const [sourcePath, targetPath] = key.split(\"|||\");\n connections.push({\n fromPath: sourcePath,\n toPath: targetPath,\n fromDomain: extractDomain(sourcePath),\n toDomain: extractDomain(targetPath),\n strength: cnt,\n });\n }\n }\n\n // ------------------------------------------------------------------\n // 6. Domains + cross-domain filter\n // ------------------------------------------------------------------\n const domainSet = new Set<string>(relevantNotes.map((n) => n.domain));\n const domains = Array.from(domainSet).sort();\n\n const crossDomainConnections = connections.filter(\n (c) => c.fromDomain !== c.toDomain,\n );\n\n // ------------------------------------------------------------------\n // 7. Build synthesis prompt\n // ------------------------------------------------------------------\n const notesSummary = relevantNotes\n .map((n, i) => {\n const title = n.title ? `\"${n.title}\"` : \"(untitled)\";\n const domain = n.domain;\n const scoreLabel = n.score > 0 ? ` [relevance: ${n.score.toFixed(3)}]` : \" [context]\";\n const snippet = n.snippet.trim().slice(0, 300);\n return `${i + 1}. [${domain}] ${title}${scoreLabel}\\n Path: ${n.path}\\n \"${snippet}\"`;\n })\n .join(\"\\n\\n\");\n\n const connectionSummary =\n crossDomainConnections.length > 0\n ? crossDomainConnections\n .map(\n (c) =>\n `- \"${c.fromPath}\" (${c.fromDomain}) → \"${c.toPath}\" (${c.toDomain}) [strength: ${c.strength}]`,\n )\n .join(\"\\n\")\n : \"(no cross-domain connections found)\";\n\n const domainList = domains.join(\", \");\n\n const synthesisPrompt = `You are a Zettelkasten research assistant. The vault has surfaced the following notes in response to this question:\n\nQUESTION: ${opts.question}\n\n---\n\nRELEVANT NOTES (${relevantNotes.length} notes across ${domains.length} domain(s): ${domainList}):\n\n${notesSummary}\n\n---\n\nCROSS-DOMAIN CONNECTIONS (links bridging different knowledge areas):\n\n${connectionSummary}\n\n---\n\nSYNTHESIS TASK:\n\nBased on these notes and the connections between them, please:\n\n1. Identify the key insights that emerge in direct response to the question.\n2. Highlight any unexpected connections between notes from different domains (${domainList}).\n3. Point out tensions, contradictions, or open questions the vault raises but does not resolve.\n4. Suggest what is notably absent — what the vault does NOT yet contain that would strengthen the understanding of this topic.\n5. Propose 2-3 new notes that would meaningfully extend this knowledge cluster.\n\nThink like a scholar who has deeply internalized these ideas and is now synthesizing them for the first time.`;\n\n return {\n relevantNotes,\n connections: crossDomainConnections,\n domains,\n synthesisPrompt,\n };\n}\n","import type { StorageBackend } from \"../storage/interface.js\";\n\nexport interface HealthOptions {\n scope?: \"full\" | \"recent\" | \"project\";\n projectPath?: string;\n recentDays?: number;\n include?: Array<\"dead_links\" | \"orphans\" | \"disconnected\" | \"low_connectivity\">;\n}\n\nexport interface DeadLink {\n sourcePath: string;\n targetRaw: string;\n lineNumber: number;\n}\n\nexport interface HealthResult {\n totalFiles: number;\n totalLinks: number;\n deadLinks: DeadLink[];\n orphans: string[];\n disconnectedClusters: number;\n lowConnectivity: string[];\n healthScore: number;\n computedAt: number;\n}\n\nfunction countComponents(nodes: string[], edges: Array<{ source: string; target: string }>): number {\n if (nodes.length === 0) return 0;\n\n const parent = new Map<string, string>();\n const rank = new Map<string, number>();\n\n for (const n of nodes) {\n parent.set(n, n);\n rank.set(n, 0);\n }\n\n function find(x: string): string {\n let root = x;\n while (parent.get(root) !== root) {\n root = parent.get(root)!;\n }\n let current = x;\n while (current !== root) {\n const next = parent.get(current)!;\n parent.set(current, root);\n current = next;\n }\n return root;\n }\n\n function union(a: string, b: string): void {\n const ra = find(a);\n const rb = find(b);\n if (ra === rb) return;\n const rankA = rank.get(ra) ?? 0;\n const rankB = rank.get(rb) ?? 0;\n if (rankA < rankB) {\n parent.set(ra, rb);\n } else if (rankA > rankB) {\n parent.set(rb, ra);\n } else {\n parent.set(rb, ra);\n rank.set(ra, rankA + 1);\n }\n }\n\n for (const { source, target } of edges) {\n if (parent.has(source) && parent.has(target)) {\n union(source, target);\n }\n }\n\n const roots = new Set<string>();\n for (const n of nodes) {\n roots.add(find(n));\n }\n return roots.size;\n}\n\n/**\n * Audit the structural health of the Zettelkasten vault using graph metrics.\n */\nexport async function zettelHealth(backend: StorageBackend, opts?: HealthOptions): Promise<HealthResult> {\n const options = opts ?? {};\n const scope = options.scope ?? \"full\";\n const include = options.include ?? [\"dead_links\", \"orphans\", \"disconnected\", \"low_connectivity\"];\n\n const computedAt = Date.now();\n\n // --- totalFiles ---\n let totalFiles = 0;\n if (scope === \"full\") {\n totalFiles = await backend.countVaultFiles();\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n totalFiles = await backend.countVaultFilesWithPrefix(prefix);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n totalFiles = await backend.countVaultFilesAfter(cutoff);\n }\n\n // --- totalLinks ---\n let totalLinks = 0;\n if (scope === \"full\") {\n // Count total links via link graph length\n const graph = await backend.getVaultLinkGraph();\n totalLinks = graph.length;\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n totalLinks = await backend.countVaultLinksWithPrefix(prefix);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n totalLinks = await backend.countVaultLinksAfter(cutoff);\n }\n\n // --- deadLinks ---\n let deadLinks: DeadLink[] = [];\n if (include.includes(\"dead_links\")) {\n if (scope === \"full\") {\n deadLinks = await backend.getDeadLinksWithLineNumbers();\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n deadLinks = await backend.getDeadLinksWithPrefix(prefix);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n deadLinks = await backend.getDeadLinksAfter(cutoff);\n }\n }\n\n // --- orphans ---\n let orphans: string[] = [];\n if (include.includes(\"orphans\")) {\n if (scope === \"full\") {\n const orphanRows = await backend.getOrphans();\n orphans = orphanRows.map(r => r.vaultPath);\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n orphans = await backend.getOrphansWithPrefix(prefix);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n orphans = await backend.getOrphansAfter(cutoff);\n }\n }\n\n // --- disconnectedClusters (union-find) ---\n let disconnectedClusters = 1;\n if (include.includes(\"disconnected\")) {\n let allNodes: string[];\n let allEdges: Array<{ source: string; target: string }>;\n\n if (scope === \"full\") {\n [allNodes, allEdges] = await Promise.all([\n backend.getAllVaultFilePaths(),\n backend.getVaultLinkEdges(),\n ]);\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n [allNodes, allEdges] = await Promise.all([\n backend.getVaultFilePathsWithPrefix(prefix),\n backend.getVaultLinkEdgesWithPrefix(prefix),\n ]);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n [allNodes, allEdges] = await Promise.all([\n backend.getVaultFilePathsAfter(cutoff),\n backend.getVaultLinkEdgesAfter(cutoff),\n ]);\n }\n\n disconnectedClusters = countComponents(allNodes, allEdges);\n }\n\n // --- lowConnectivity ---\n let lowConnectivity: string[] = [];\n if (include.includes(\"low_connectivity\")) {\n if (scope === \"full\") {\n lowConnectivity = await backend.getLowConnectivity();\n } else if (scope === \"project\") {\n const prefix = options.projectPath ?? \"\";\n lowConnectivity = await backend.getLowConnectivityWithPrefix(prefix);\n } else {\n const days = options.recentDays ?? 30;\n const cutoff = computedAt - days * 86400000;\n lowConnectivity = await backend.getLowConnectivityAfter(cutoff);\n }\n }\n\n // --- healthScore ---\n const deadRatio = totalLinks > 0 ? deadLinks.length / totalLinks : 0;\n const orphanRatio = totalFiles > 0 ? orphans.length / totalFiles : 0;\n const lowConnRatio = totalFiles > 0 ? lowConnectivity.length / totalFiles : 0;\n const healthScore = Math.round(\n 100 * (1 - deadRatio) * (1 - orphanRatio * 0.5) * (1 - lowConnRatio * 0.3),\n );\n\n return {\n totalFiles,\n totalLinks,\n deadLinks,\n orphans,\n disconnectedClusters,\n lowConnectivity,\n healthScore,\n computedAt,\n };\n}\n","import type { StorageBackend } from \"../storage/interface.js\";\nimport { deserializeEmbedding, cosineSimilarity } from \"../memory/embeddings.js\";\nimport { basename } from \"node:path\";\nimport { STOP_WORDS } from \"../utils/stop-words.js\";\n\nexport interface SuggestOptions {\n notePath: string;\n vaultProjectId: number;\n limit?: number;\n excludeLinked?: boolean;\n}\n\nexport interface Suggestion {\n path: string;\n title: string | null;\n score: number;\n semanticScore: number;\n tagScore: number;\n neighborScore: number;\n reason: string;\n suggestedWikilink: string;\n}\n\nconst MAX_CHUNKS = 5000;\nconst SEMANTIC_WEIGHT = 0.5;\nconst TAG_WEIGHT = 0.2;\nconst NEIGHBOR_WEIGHT = 0.3;\n\n// STOP_WORDS imported from utils/stop-words.ts\n\nfunction extractTagsFromChunkTexts(texts: string[]): Set<string> {\n const tags = new Set<string>();\n for (const text of texts) {\n // Match YAML frontmatter tags block: \"tags:\\n - tag1\\n - tag2\"\n const match = text.match(/^tags:\\s*\\n((?:[ \\t]*-[ \\t]*.+\\n?)*)/m);\n if (!match) continue;\n const block = match[1];\n const lines = block.split(\"\\n\");\n for (const line of lines) {\n const tagMatch = line.match(/^[ \\t]*-[ \\t]*(.+)/);\n if (tagMatch) {\n const tag = tagMatch[1].trim().toLowerCase();\n if (tag) tags.add(tag);\n }\n }\n }\n return tags;\n}\n\nfunction jaccardSimilarity(a: Set<string>, b: Set<string>): number {\n if (a.size === 0 && b.size === 0) return 0;\n let intersection = 0;\n for (const tag of a) {\n if (b.has(tag)) intersection++;\n }\n const union = a.size + b.size - intersection;\n return union === 0 ? 0 : intersection / union;\n}\n\nfunction buildReason(\n semanticScore: number,\n tagScore: number,\n neighborScore: number,\n neighborCount: number,\n): string {\n const signals: Array<{ label: string; value: number }> = [\n { label: `Semantically similar (${semanticScore.toFixed(2)})`, value: semanticScore * SEMANTIC_WEIGHT },\n { label: `Shared tags (${tagScore.toFixed(2)} Jaccard)`, value: tagScore * TAG_WEIGHT },\n { label: `Linked by ${neighborCount} mutual connection${neighborCount !== 1 ? \"s\" : \"\"}`, value: neighborScore * NEIGHBOR_WEIGHT },\n ];\n signals.sort((a, b) => b.value - a.value);\n return signals[0].label;\n}\n\nfunction suggestedWikilink(vaultPath: string): string {\n const base = basename(vaultPath);\n const name = base.endsWith(\".md\") ? base.slice(0, -3) : base;\n return `[[${name}]]`;\n}\n\n/**\n * Proactively find notes worth linking to a given note, combining semantic similarity,\n * shared tags, and graph-neighborhood signals into a ranked list of suggestions.\n */\nexport async function zettelSuggest(\n backend: StorageBackend,\n opts: SuggestOptions,\n): Promise<Suggestion[]> {\n const limit = opts.limit ?? 5;\n const excludeLinked = opts.excludeLinked ?? true;\n\n // Step 1: get current outbound links\n const outboundLinks = await backend.getLinksFromSource(opts.notePath);\n const linkedPaths = new Set(outboundLinks.filter(l => l.targetPath !== null).map(l => l.targetPath as string));\n\n // Step 2a: get all file-level embeddings for semantic scoring\n const chunkRows = await backend.getChunksWithEmbeddings(opts.vaultProjectId, MAX_CHUNKS);\n\n const byPath = new Map<string, { sum: Float32Array; count: number }>();\n for (const row of chunkRows) {\n const vec = deserializeEmbedding(row.embedding);\n const entry = byPath.get(row.path);\n if (!entry) {\n byPath.set(row.path, { sum: new Float32Array(vec), count: 1 });\n } else {\n for (let i = 0; i < vec.length; i++) {\n entry.sum[i] += vec[i];\n }\n entry.count++;\n }\n }\n\n const allEmbeddings = new Map<string, Float32Array>();\n for (const [path, { sum, count }] of byPath) {\n const avg = new Float32Array(sum.length);\n for (let i = 0; i < sum.length; i++) {\n avg[i] = sum[i] / count;\n }\n allEmbeddings.set(path, avg);\n }\n allEmbeddings.delete(opts.notePath);\n\n // Step 2b: get source embedding\n const sourceEmbedding = allEmbeddings.get(opts.notePath) ?? null;\n\n // Step 2c: get source tags\n const sourceChunkTexts = await backend.getChunksForPath(opts.vaultProjectId, opts.notePath, 5);\n const sourceTags = extractTagsFromChunkTexts(sourceChunkTexts.map(r => r.text));\n\n // Step 2d: compute graph neighborhood (friends-of-friends)\n const directLinks = await backend.getLinksFromSource(opts.notePath);\n const directTargets = directLinks.filter(l => l.targetPath !== null).map(l => l.targetPath as string);\n\n const friendLinkCounts = new Map<string, number>();\n for (const target of directTargets) {\n const friendLinks = await backend.getLinksFromSource(target);\n for (const link of friendLinks) {\n if (link.targetPath && link.targetPath !== opts.notePath) {\n friendLinkCounts.set(link.targetPath, (friendLinkCounts.get(link.targetPath) ?? 0) + 1);\n }\n }\n }\n const maxFriendLinks = Math.max(1, ...friendLinkCounts.values());\n\n // Get all vault files to enumerate candidates\n const allFiles = await backend.getAllVaultFiles();\n\n const suggestions: Suggestion[] = [];\n\n for (const fileRow of allFiles) {\n const vault_path = fileRow.vaultPath;\n const title = fileRow.title;\n\n if (vault_path === opts.notePath) continue;\n if (excludeLinked && linkedPaths.has(vault_path)) continue;\n\n // Semantic score\n let semanticScore = 0;\n if (sourceEmbedding) {\n const candidateEmbedding = allEmbeddings.get(vault_path);\n if (candidateEmbedding) {\n semanticScore = Math.max(0, cosineSimilarity(sourceEmbedding, candidateEmbedding));\n }\n }\n\n // Tag score (only compute if candidate might have chunks)\n let tagScore = 0;\n if (allEmbeddings.has(vault_path)) {\n const candidateChunkTexts = await backend.getChunksForPath(opts.vaultProjectId, vault_path, 5);\n const candidateTags = extractTagsFromChunkTexts(candidateChunkTexts.map(r => r.text));\n tagScore = jaccardSimilarity(sourceTags, candidateTags);\n }\n\n // Neighbor score\n const friendCount = friendLinkCounts.get(vault_path) ?? 0;\n const neighborScore = friendCount / maxFriendLinks;\n\n const score =\n SEMANTIC_WEIGHT * semanticScore +\n TAG_WEIGHT * tagScore +\n NEIGHBOR_WEIGHT * neighborScore;\n\n // Only include if there is at least some signal\n if (score <= 0) continue;\n\n const reason = buildReason(semanticScore, tagScore, neighborScore, friendCount);\n\n suggestions.push({\n path: vault_path,\n title,\n score,\n semanticScore,\n tagScore,\n neighborScore,\n reason,\n suggestedWikilink: suggestedWikilink(vault_path),\n });\n }\n\n suggestions.sort((a, b) => b.score - a.score);\n return suggestions.slice(0, limit);\n}\n"],"mappings":";;;;;AA2BA,SAAS,aAAa,QAAgB,QAA8C;AAClF,QAAO,QAAQ,OAAO,KAAK,QAAQ,OAAO,GAAG,eAAe;;AAG9D,eAAe,aAAa,SAAyB,WAA2C;CAE9F,MAAM,QAAQ,MAAM,QAAQ,qBAAqB,CAAC,UAAU,CAAC;AAC7D,KAAI,MAAM,SAAS,EAAG,QAAO,MAAM,GAAG;CAGtC,MAAM,QAAQ,MAAM,QAAQ,cAAc,UAAU;AACpD,KAAI,CAAC,MAAO,QAAO;CAEnB,MAAM,YAAY,MAAM,QAAQ,qBAAqB,CAAC,MAAM,cAAc,CAAC;AAC3E,QAAO,UAAU,SAAS,IAAI,UAAU,GAAG,YAAY;;AAGzD,eAAe,oBAAoB,SAAyB,MAAiC;AAE3F,SADc,MAAM,QAAQ,mBAAmB,KAAK,EACvC,QAAO,MAAK,EAAE,eAAe,KAAK,CAAC,KAAI,MAAK,EAAE,WAAqB;;AAGlF,eAAe,qBAAqB,SAAyB,MAAiC;AAE5F,SADc,MAAM,QAAQ,iBAAiB,KAAK,EACrC,KAAI,MAAK,EAAE,WAAW;;AAGrC,eAAe,YACb,SACA,MACsE;CACtE,MAAM,CAAC,OAAO,UAAU,MAAM,QAAQ,IAAI,CACxC,QAAQ,qBAAqB,CAAC,KAAK,CAAC,EACpC,QAAQ,eAAe,KAAK,CAC7B,CAAC;AAEF,QAAO;EACL,OAAO,MAAM,IAAI,SAAS;EAC1B,SAAS,QAAQ,gBAAgB;EACjC,UAAU,QAAQ,iBAAiB;EACpC;;;;;;AAOH,eAAsB,cAAc,SAAyB,MAA8C;CACzG,MAAM,QAAQ,KAAK,IAAI,KAAK,IAAI,KAAK,SAAS,GAAG,EAAE,EAAE,GAAG;CACxD,MAAM,YAAY,KAAK,aAAa;CACpC,MAAM,OAAO,KAAK,QAAQ;CAE1B,MAAM,OAAO,MAAM,aAAa,SAAS,KAAK,UAAU;AACxD,KAAI,CAAC,KACH,QAAO;EACL,MAAM,KAAK;EACX,OAAO,EAAE;EACT,OAAO,EAAE;EACT,iBAAiB,EAAE;EACnB,iBAAiB;EAClB;CAGH,MAAM,UAAU,IAAI,IAAY,CAAC,KAAK,CAAC;CACvC,MAAM,QAAuB,EAAE;CAC/B,MAAM,QAAiF,EAAE;CACzF,IAAI,kBAAkB;CAEtB,MAAM,QAAgD,CAAC;EAAE,MAAM;EAAM,OAAO;EAAG,CAAC;AAEhF,QAAO,MAAM,SAAS,GAAG;EACvB,MAAM,UAAU,MAAM,OAAO;AAE7B,MAAI,QAAQ,SAAS,OAAO;AAC1B,qBAAkB;AAClB;;EAGF,MAAM,YAAmE,EAAE;AAE3E,MAAI,cAAc,aAAa,cAAc,OAC3C,MAAK,MAAM,KAAK,MAAM,oBAAoB,SAAS,QAAQ,KAAK,CAC9D,WAAU,KAAK;GAAE,UAAU;GAAG,MAAM,QAAQ;GAAM,IAAI;GAAG,CAAC;AAI9D,MAAI,cAAc,cAAc,cAAc,OAC5C,MAAK,MAAM,KAAK,MAAM,qBAAqB,SAAS,QAAQ,KAAK,CAC/D,WAAU,KAAK;GAAE,UAAU;GAAG,MAAM;GAAG,IAAI,QAAQ;GAAM,CAAC;AAI9D,OAAK,MAAM,EAAE,UAAU,MAAM,QAAQ,WAAW;GAC9C,MAAM,WAAW,aAAa,MAAM,GAAG;AAEvC,OAAI,SAAS,SAAS,aAAa,KACjC;AAIF,OAAI,CADmB,MAAM,MAAM,MAAM,EAAE,SAAS,QAAQ,EAAE,OAAO,GAAG,CAEtE,OAAM,KAAK;IAAE;IAAM;IAAI,MAAM;IAAU,CAAC;AAG1C,OAAI,CAAC,QAAQ,IAAI,SAAS,EAAE;AAC1B,YAAQ,IAAI,SAAS;IAErB,MAAM,OAAO,MAAM,YAAY,SAAS,SAAS;AACjD,UAAM,KAAK;KACT,MAAM;KACN,OAAO,KAAK;KACZ,OAAO,QAAQ,QAAQ;KACvB,UAAU;KACV,SAAS,KAAK;KACd,UAAU,KAAK;KAChB,CAAC;AAEF,UAAM,KAAK;KAAE,MAAM;KAAU,OAAO,QAAQ,QAAQ;KAAG,CAAC;;;;CAK9D,MAAM,kBAAkB,MACrB,QAAQ,MAAM,EAAE,WAAW,EAAE,CAC7B,KAAK,MAAM,EAAE,KAAK;AAGrB,MADiB,MAAM,YAAY,SAAS,KAAK,EACpC,WAAW,EACtB,iBAAgB,QAAQ,KAAK;AAG/B,QAAO;EAAE;EAAM;EAAO;EAAO;EAAiB;EAAiB;;;;;ACtIjE,MAAMA,eAAa;AACnB,MAAM,cAAc;AAEpB,eAAe,kBACb,SACA,WACiE;CACjE,MAAM,OAAO,MAAM,QAAQ,wBAAwB,WAAWA,aAAW;CAEzE,MAAM,yBAAS,IAAI,KAAiE;AACpF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,MAAM,qBAAqB,IAAI,UAAU;EAC/C,MAAM,QAAQ,OAAO,IAAI,IAAI,KAAK;AAClC,MAAI,CAAC,MACH,QAAO,IAAI,IAAI,MAAM;GAAE,KAAK,IAAI,aAAa,IAAI;GAAE,OAAO;GAAG,MAAM,IAAI;GAAM,CAAC;OACzE;AACL,QAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,OAAM,IAAI,MAAM,IAAI;AAEtB,SAAM;;;CAIV,MAAM,yBAAS,IAAI,KAAwD;AAC3E,MAAK,MAAM,CAAC,MAAM,EAAE,KAAK,OAAO,WAAW,QAAQ;EACjD,MAAM,MAAM,IAAI,aAAa,IAAI,OAAO;AACxC,OAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,KAAI,KAAK,IAAI,KAAK;AAEpB,SAAO,IAAI,MAAM;GAAE,WAAW;GAAK;GAAM,CAAC;;AAE5C,QAAO;;AAGT,eAAe,sBACb,SACA,WACA,MACsD;CACtD,MAAM,OAAO,MAAM,QAAQ,iBAAiB,WAAW,KAAK;AAE5D,KAAI,KAAK,WAAW,EAClB,QAAO;EAAE,WAAW,IAAI,aAAa,EAAE;EAAE,OAAO;EAAO;CAGzD,MAAM,UAAU,KAAK,QAAO,MAAK,EAAE,cAAc,KAAK;AACtD,KAAI,QAAQ,WAAW,EACrB,QAAO;EAAE,WAAW,IAAI,aAAa,EAAE;EAAE,OAAO;EAAO;CAGzD,MAAM,MAAM,qBAAqB,QAAQ,GAAG,UAAU,CAAC;CACvD,MAAM,MAAM,IAAI,aAAa,IAAI;AACjC,MAAK,MAAM,OAAO,SAAS;EACzB,MAAM,MAAM,qBAAqB,IAAI,UAAU;AAC/C,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IACvB,KAAI,MAAM,IAAI;;CAGlB,MAAM,MAAM,IAAI,aAAa,IAAI;AACjC,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IACvB,KAAI,KAAK,IAAI,KAAK,QAAQ;AAE5B,QAAO;EAAE,WAAW;EAAK,OAAO;EAAM;;AAGxC,eAAe,iBAAiB,SAAyB,QAAgB,QAAiC;AACxG,KAAI,WAAW,OAAQ,QAAO;CAE9B,MAAM,UAAU,IAAI,IAAY,CAAC,OAAO,CAAC;CACzC,MAAM,QAA+C,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAG,CAAC;AAEhF,QAAO,MAAM,SAAS,GAAG;EACvB,MAAM,EAAE,MAAM,SAAS,MAAM,OAAO;AACpC,MAAI,QAAQ,YAAa;EAEzB,MAAM,CAAC,cAAc,iBAAiB,MAAM,QAAQ,IAAI,CACtD,QAAQ,mBAAmB,KAAK,EAChC,QAAQ,iBAAiB,KAAK,CAC/B,CAAC;EAEF,MAAM,YAAsB,CAC1B,GAAG,aAAa,QAAO,MAAK,EAAE,eAAe,KAAK,CAAC,KAAI,MAAK,EAAE,WAAqB,EACnF,GAAG,cAAc,KAAI,MAAK,EAAE,WAAW,CACxC;AAED,OAAK,MAAM,YAAY,WAAW;AAChC,OAAI,aAAa,OAAQ,QAAO,OAAO;AACvC,OAAI,CAAC,QAAQ,IAAI,SAAS,EAAE;AAC1B,YAAQ,IAAI,SAAS;AACrB,UAAM,KAAK;KAAE,MAAM;KAAU,MAAM,OAAO;KAAG,CAAC;;;;AAKpD,QAAO;;AAGT,SAAS,iBACP,WACA,cACQ;CACR,MAAM,OAAO,UAAU,QAAO,MAAK,EAAE,cAAc,KAAK;AACxD,KAAI,KAAK,WAAW,EAAG,QAAO;CAE9B,IAAI,WAAW,KAAK,GAAG;CACvB,IAAI,UAAU;AAEd,MAAK,MAAM,OAAO,MAAM;EAEtB,MAAM,MAAM,iBAAiB,cADjB,qBAAqB,IAAI,UAAU,CACA;AAC/C,MAAI,MAAM,SAAS;AACjB,aAAU;AACV,cAAW,IAAI;;;AAInB,QAAO,SAAS,MAAM,CAAC,MAAM,GAAG,IAAI;;;;;;AAOtC,eAAsB,eACpB,SACA,MAC2B;CAC3B,MAAM,QAAQ,KAAK,SAAS;CAC5B,MAAM,gBAAgB,KAAK,iBAAiB;CAC5C,MAAM,mBAAmB,KAAK,oBAAoB;CAElD,IAAI,EAAE,WAAW,cAAc,UAAU,MAAM,sBAC7C,SACA,KAAK,gBACL,KAAK,cACN;AAGD,KAAI,CAAC,MAGH,gBAAe,MAAM,mBAFP,MAAM,QAAQ,qBAAqB,CAAC,KAAK,cAAc,CAAC,EACnD,IAAI,SAAS,KAAK,eACQ,KAAK;CAGpD,MAAM,oBAAoB,MAAM,kBAAkB,SAAS,KAAK,eAAe;AAG/E,mBAAkB,OAAO,KAAK,cAAc;CAG5C,MAAM,qBAA2D,EAAE;AACnE,MAAK,MAAM,CAAC,MAAM,EAAE,gBAAgB,mBAAmB;EACrD,MAAM,MAAM,iBAAiB,cAAc,UAAU;AACrD,MAAI,OAAO,cACT,oBAAmB,KAAK;GAAE;GAAM;GAAK,CAAC;;CAK1C,MAAM,UAA4B,EAAE;AAEpC,MAAK,MAAM,EAAE,MAAM,SAAS,oBAAoB;EAC9C,MAAM,gBAAgB,MAAM,iBAAiB,SAAS,KAAK,eAAe,KAAK;EAE/E,MAAM,oBAAoB,SAAS,cAAc,GAAG,gBAAgB;AACpE,MAAI,oBAAoB,iBAAkB;EAE1C,MAAM,QAAQ,MAAM,QAAQ,qBAAqB,CAAC,KAAK,CAAC;EACxD,MAAM,YAAY,MAAM,QAAQ,iBAAiB,KAAK,gBAAgB,MAAM,GAAG;EAE/E,MAAM,gBAAgB,MAAM,KAAK,KAAK,oBAAoB,EAAE;EAC5D,MAAM,gBAAgB,iBAAiB,WAAW,aAAa;AAE/D,UAAQ,KAAK;GACX;GACA,OAAO,MAAM,IAAI,SAAS;GAC1B,kBAAkB;GAClB,eAAe,SAAS,cAAc,GAAG,gBAAgB;GACzD;GACA;GACD,CAAC;;AAGJ,SAAQ,MAAM,GAAG,MAAM,EAAE,gBAAgB,EAAE,cAAc;AACzD,QAAO,QAAQ,MAAM,GAAG,MAAM;;;;;;ACjKhC,SAAS,cAAc,WAA2B;CAChD,MAAM,QAAQ,UAAU,QAAQ,IAAI;AACpC,QAAO,UAAU,KAAK,YAAY,UAAU,MAAM,GAAG,MAAM;;;;;;AAO7D,eAAe,gBAAgB,SAAyB,OAAuC;AAC7F,KAAI,MAAM,SAAS,EAAG,QAAO,EAAE;CAC/B,MAAM,WAAW,MAAM,KAAK,MAAM;CAElC,MAAM,CAAC,cAAc,iBAAiB,MAAM,QAAQ,IAAI,CACtD,QAAQ,uBAAuB,SAAS,EACxC,QAAQ,IAAI,SAAS,KAAI,MAAK,QAAQ,iBAAiB,EAAE,CAAC,CAAC,CAC5D,CAAC;CAEF,MAAM,YAAsB,EAAE;AAC9B,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,WAAY,WAAU,KAAK,KAAK,WAAW;AAEtD,MAAK,MAAM,YAAY,cACrB,MAAK,MAAM,QAAQ,SACjB,WAAU,KAAK,KAAK,WAAW;AAGnC,QAAO;;;;;AAMT,eAAe,aACb,SACA,OACA,gBACA,MACyB;CACzB,MAAM,aAAa,KAAK,cAAc;CACtC,MAAM,KAAK;CACX,MAAM,KAAK;CAEX,MAAM,CAAC,gBAAgB,mBAAmB,MAAM,QAAQ,IAAI,CAC1D,QAAQ,cAAc,OAAO;EAAE,GAAG;EAAM,YAAY;EAAI,CAAC,EACzD,QAAQ,eAAe,gBAAgB;EAAE,GAAG;EAAM,YAAY;EAAI,CAAC,CACpE,CAAC;AAEF,KAAI,eAAe,WAAW,KAAK,gBAAgB,WAAW,EAAG,QAAO,EAAE;CAE1E,MAAM,UAAU,MACd,GAAG,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,GAAG,EAAE;CAE/C,SAAS,gBAAgB,QAA4B;EACnD,MAAM,MAAM,KAAK,IAAI,GAAG,OAAO;EAE/B,MAAM,QADM,KAAK,IAAI,GAAG,OAAO,GACX;AACpB,MAAI,UAAU,EAAG,QAAO,OAAO,UAAU,EAAI;AAC7C,SAAO,OAAO,KAAI,OAAM,IAAI,OAAO,MAAM;;CAG3C,MAAM,SAAS,gBAAgB,eAAe,KAAI,MAAK,EAAE,MAAM,CAAC;CAChE,MAAM,UAAU,gBAAgB,gBAAgB,KAAI,MAAK,EAAE,MAAM,CAAC;CAElE,MAAM,2BAAW,IAAI,KAAuD;AAE5E,MAAK,IAAI,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK;EAC9C,MAAM,IAAI,eAAe;EACzB,MAAM,IAAI,OAAO,EAAE;AACnB,WAAS,IAAI,GAAG;GAAE,GAAG;GAAG,eAAe,KAAK,OAAO;GAAI,CAAC;;AAG1D,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;EAC/C,MAAM,IAAI,gBAAgB;EAC1B,MAAM,IAAI,OAAO,EAAE;EACnB,MAAM,WAAW,SAAS,IAAI,EAAE;AAChC,MAAI,SACF,UAAS,iBAAiB,KAAK,QAAQ;MAEvC,UAAS,IAAI,GAAG;GAAE,GAAG;GAAG,eAAe,KAAK,QAAQ;GAAI,CAAC;;AAQ7D,QAJe,MAAM,KAAK,SAAS,QAAQ,CAAC,CACzC,MAAM,GAAG,MAAM,EAAE,gBAAgB,EAAE,cAAc,CACjD,MAAM,GAAG,WAAW,CAET,KAAI,OAAM;EAAE,GAAG;EAAG,OAAO,EAAE;EAAe,EAAE;;;;;;;AAY5D,eAAsB,eACpB,SACA,MACyB;CACzB,MAAM,QAAQ,KAAK,IAAI,KAAK,SAAS,GAAG,EAAE;CAC1C,MAAM,QAAQ,KAAK,IAAI,KAAK,SAAS,IAAI,EAAE;CAC3C,MAAM,iBAAiB;CAKvB,MAAM,iBAAiB,MAAM,kBAAkB,KAAK,UAAU,KAAK;CAEnE,MAAM,gBAAgB,MAAM,aAC1B,SACA,KAAK,UACL,gBACA;EACE,YAAY,CAAC,KAAK,eAAe;EACjC,YAAY;EACb,CACF;CAGD,MAAM,6BAAa,IAAI,KAAiD;AACxE,MAAK,MAAM,KAAK,eAAe;EAC7B,MAAM,WAAW,WAAW,IAAI,EAAE,KAAK;AACvC,MAAI,CAAC,YAAY,EAAE,QAAQ,SAAS,MAClC,YAAW,IAAI,EAAE,MAAM;GAAE,OAAO,EAAE;GAAO,SAAS,EAAE;GAAS,CAAC;;CAOlE,MAAM,WAAW,IAAI,IAAY,WAAW,MAAM,CAAC;CACnD,IAAI,WAAW,IAAI,IAAY,WAAW,MAAM,CAAC;AAEjD,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,KAAK;EAC9B,MAAM,YAAY,MAAM,gBAAgB,SAAS,SAAS;EAC1D,MAAM,8BAAc,IAAI,KAAa;AACrC,OAAK,MAAM,KAAK,UACd,KAAI,CAAC,SAAS,IAAI,EAAE,EAAE;AACpB,YAAS,IAAI,EAAE;AACf,eAAY,IAAI,EAAE;;AAGtB,MAAI,YAAY,SAAS,EAAG;AAC5B,aAAW;;CAMb,MAAM,eAAe,MAAM,KAAK,WAAW,SAAS,CAAC,CAClD,MAAM,GAAG,MAAM,EAAE,GAAG,QAAQ,EAAE,GAAG,MAAM,CACvC,KAAK,CAAC,MAAM,WAAW;EAAE;EAAM,GAAG;EAAM,gBAAgB;EAAM,EAAE;CAEnE,MAAM,gBAAgB,MAAM,KAAK,SAAS,CAAC,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC;CAG5E,MAAM,qBAAqB,MAAM,QAAQ,IACvC,cAAc,KAAI,MAAK,QAAQ,eAAe,EAAE,CAAC,CAClD;CACD,MAAM,iBAAiB,cACpB,KAAK,MAAM,SAAS;EACnB;EACA,OAAO;EACP,SAAS;EACT,SAAS,mBAAmB,MAAM,gBAAgB;EAClD,gBAAgB;EACjB,EAAE,CACF,MAAM,GAAG,MAAM,EAAE,UAAU,EAAE,QAAQ;CAExC,MAAM,qBAAqB,KAAK,IAAI,QAAQ,aAAa,QAAQ,EAAE;CACnE,MAAM,oBAAoB,eAAe,MAAM,GAAG,mBAAmB;CAErE,MAAM,sBAAsB,aAAa,MAAM,GAAG,MAAM;CACxD,MAAM,gBAAgB,IAAI,IAAY,CACpC,GAAG,oBAAoB,KAAK,MAAM,EAAE,KAAK,EACzC,GAAG,kBAAkB,KAAK,MAAM,EAAE,KAAK,CACxC,CAAC;CAOF,MAAM,mBAAmB,MAAM,KAAK,cAAc;CAClD,MAAM,WAAW,MAAM,QAAQ,qBAAqB,iBAAiB;CACrE,MAAM,WAAW,IAAI,IAA2B,SAAS,KAAI,MAAK,CAAC,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;CAE1F,MAAM,gBAAiD,EAAE;AAEzD,MAAK,MAAM,KAAK,qBAAqB;AACnC,MAAI,CAAC,cAAc,IAAI,EAAE,KAAK,CAAE;AAChC,gBAAc,KAAK;GACjB,MAAM,EAAE;GACR,OAAO,SAAS,IAAI,EAAE,KAAK,IAAI;GAC/B,SAAS,EAAE;GACX,OAAO,EAAE;GACT,QAAQ,cAAc,EAAE,KAAK;GAC9B,CAAC;;AAGJ,MAAK,MAAM,KAAK,kBACd,eAAc,KAAK;EACjB,MAAM,EAAE;EACR,OAAO,SAAS,IAAI,EAAE,KAAK,IAAI;EAC/B,SAAS,EAAE;EACX,OAAO;EACP,QAAQ,cAAc,EAAE,KAAK;EAC9B,CAAC;CAMJ,IAAI,cAAoC,EAAE;AAE1C,KAAI,cAAc,OAAO,GAAG;EAC1B,MAAM,WAAW,MAAM,KAAK,cAAc;EAC1C,MAAM,UAAU,IAAI,IAAI,SAAS;EAGjC,MAAM,WAAW,MAAM,QAAQ,uBAAuB,SAAS;EAG/D,MAAM,6BAAa,IAAI,KAAqB;AAC5C,OAAK,MAAM,QAAQ,SACjB,KAAI,KAAK,cAAc,QAAQ,IAAI,KAAK,WAAW,EAAE;GACnD,MAAM,MAAM,GAAG,KAAK,WAAW,KAAK,KAAK;AACzC,cAAW,IAAI,MAAM,WAAW,IAAI,IAAI,IAAI,KAAK,EAAE;;AAIvD,OAAK,MAAM,CAAC,KAAK,QAAQ,YAAY;GACnC,MAAM,CAAC,YAAY,cAAc,IAAI,MAAM,MAAM;AACjD,eAAY,KAAK;IACf,UAAU;IACV,QAAQ;IACR,YAAY,cAAc,WAAW;IACrC,UAAU,cAAc,WAAW;IACnC,UAAU;IACX,CAAC;;;CAON,MAAM,YAAY,IAAI,IAAY,cAAc,KAAK,MAAM,EAAE,OAAO,CAAC;CACrE,MAAM,UAAU,MAAM,KAAK,UAAU,CAAC,MAAM;CAE5C,MAAM,yBAAyB,YAAY,QACxC,MAAM,EAAE,eAAe,EAAE,SAC3B;CAKD,MAAM,eAAe,cAClB,KAAK,GAAG,MAAM;EACb,MAAM,QAAQ,EAAE,QAAQ,IAAI,EAAE,MAAM,KAAK;EACzC,MAAM,SAAS,EAAE;EACjB,MAAM,aAAa,EAAE,QAAQ,IAAI,gBAAgB,EAAE,MAAM,QAAQ,EAAE,CAAC,KAAK;EACzE,MAAM,UAAU,EAAE,QAAQ,MAAM,CAAC,MAAM,GAAG,IAAI;AAC9C,SAAO,GAAG,IAAI,EAAE,KAAK,OAAO,IAAI,QAAQ,WAAW,aAAa,EAAE,KAAK,QAAQ,QAAQ;GACvF,CACD,KAAK,OAAO;CAEf,MAAM,oBACJ,uBAAuB,SAAS,IAC5B,uBACG,KACE,MACC,MAAM,EAAE,SAAS,KAAK,EAAE,WAAW,OAAO,EAAE,OAAO,KAAK,EAAE,SAAS,eAAe,EAAE,SAAS,GAChG,CACA,KAAK,KAAK,GACb;CAEN,MAAM,aAAa,QAAQ,KAAK,KAAK;AAgCrC,QAAO;EACL;EACA,aAAa;EACb;EACA,iBAlCsB;;YAEd,KAAK,SAAS;;;;kBAIR,cAAc,OAAO,gBAAgB,QAAQ,OAAO,cAAc,WAAW;;EAE7F,aAAa;;;;;;EAMb,kBAAkB;;;;;;;;;gFAS4D,WAAW;;;;;;EAYxF;;;;;AClVH,SAAS,gBAAgB,OAAiB,OAA0D;AAClG,KAAI,MAAM,WAAW,EAAG,QAAO;CAE/B,MAAM,yBAAS,IAAI,KAAqB;CACxC,MAAM,uBAAO,IAAI,KAAqB;AAEtC,MAAK,MAAM,KAAK,OAAO;AACrB,SAAO,IAAI,GAAG,EAAE;AAChB,OAAK,IAAI,GAAG,EAAE;;CAGhB,SAAS,KAAK,GAAmB;EAC/B,IAAI,OAAO;AACX,SAAO,OAAO,IAAI,KAAK,KAAK,KAC1B,QAAO,OAAO,IAAI,KAAK;EAEzB,IAAI,UAAU;AACd,SAAO,YAAY,MAAM;GACvB,MAAM,OAAO,OAAO,IAAI,QAAQ;AAChC,UAAO,IAAI,SAAS,KAAK;AACzB,aAAU;;AAEZ,SAAO;;CAGT,SAAS,MAAM,GAAW,GAAiB;EACzC,MAAM,KAAK,KAAK,EAAE;EAClB,MAAM,KAAK,KAAK,EAAE;AAClB,MAAI,OAAO,GAAI;EACf,MAAM,QAAQ,KAAK,IAAI,GAAG,IAAI;EAC9B,MAAM,QAAQ,KAAK,IAAI,GAAG,IAAI;AAC9B,MAAI,QAAQ,MACV,QAAO,IAAI,IAAI,GAAG;WACT,QAAQ,MACjB,QAAO,IAAI,IAAI,GAAG;OACb;AACL,UAAO,IAAI,IAAI,GAAG;AAClB,QAAK,IAAI,IAAI,QAAQ,EAAE;;;AAI3B,MAAK,MAAM,EAAE,QAAQ,YAAY,MAC/B,KAAI,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO,CAC1C,OAAM,QAAQ,OAAO;CAIzB,MAAM,wBAAQ,IAAI,KAAa;AAC/B,MAAK,MAAM,KAAK,MACd,OAAM,IAAI,KAAK,EAAE,CAAC;AAEpB,QAAO,MAAM;;;;;AAMf,eAAsB,aAAa,SAAyB,MAA6C;CACvG,MAAM,UAAU,QAAQ,EAAE;CAC1B,MAAM,QAAQ,QAAQ,SAAS;CAC/B,MAAM,UAAU,QAAQ,WAAW;EAAC;EAAc;EAAW;EAAgB;EAAmB;CAEhG,MAAM,aAAa,KAAK,KAAK;CAG7B,IAAI,aAAa;AACjB,KAAI,UAAU,OACZ,cAAa,MAAM,QAAQ,iBAAiB;UACnC,UAAU,WAAW;EAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,eAAa,MAAM,QAAQ,0BAA0B,OAAO;QACvD;EAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,eAAa,MAAM,QAAQ,qBAAqB,OAAO;;CAIzD,IAAI,aAAa;AACjB,KAAI,UAAU,OAGZ,eADc,MAAM,QAAQ,mBAAmB,EAC5B;UACV,UAAU,WAAW;EAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,eAAa,MAAM,QAAQ,0BAA0B,OAAO;QACvD;EAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,eAAa,MAAM,QAAQ,qBAAqB,OAAO;;CAIzD,IAAI,YAAwB,EAAE;AAC9B,KAAI,QAAQ,SAAS,aAAa,CAChC,KAAI,UAAU,OACZ,aAAY,MAAM,QAAQ,6BAA6B;UAC9C,UAAU,WAAW;EAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,cAAY,MAAM,QAAQ,uBAAuB,OAAO;QACnD;EAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,cAAY,MAAM,QAAQ,kBAAkB,OAAO;;CAKvD,IAAI,UAAoB,EAAE;AAC1B,KAAI,QAAQ,SAAS,UAAU,CAC7B,KAAI,UAAU,OAEZ,YADmB,MAAM,QAAQ,YAAY,EACxB,KAAI,MAAK,EAAE,UAAU;UACjC,UAAU,WAAW;EAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,YAAU,MAAM,QAAQ,qBAAqB,OAAO;QAC/C;EAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,YAAU,MAAM,QAAQ,gBAAgB,OAAO;;CAKnD,IAAI,uBAAuB;AAC3B,KAAI,QAAQ,SAAS,eAAe,EAAE;EACpC,IAAI;EACJ,IAAI;AAEJ,MAAI,UAAU,OACZ,EAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CACvC,QAAQ,sBAAsB,EAC9B,QAAQ,mBAAmB,CAC5B,CAAC;WACO,UAAU,WAAW;GAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,IAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CACvC,QAAQ,4BAA4B,OAAO,EAC3C,QAAQ,4BAA4B,OAAO,CAC5C,CAAC;SACG;GAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,IAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CACvC,QAAQ,uBAAuB,OAAO,EACtC,QAAQ,uBAAuB,OAAO,CACvC,CAAC;;AAGJ,yBAAuB,gBAAgB,UAAU,SAAS;;CAI5D,IAAI,kBAA4B,EAAE;AAClC,KAAI,QAAQ,SAAS,mBAAmB,CACtC,KAAI,UAAU,OACZ,mBAAkB,MAAM,QAAQ,oBAAoB;UAC3C,UAAU,WAAW;EAC9B,MAAM,SAAS,QAAQ,eAAe;AACtC,oBAAkB,MAAM,QAAQ,6BAA6B,OAAO;QAC/D;EAEL,MAAM,SAAS,cADF,QAAQ,cAAc,MACA;AACnC,oBAAkB,MAAM,QAAQ,wBAAwB,OAAO;;CAKnE,MAAM,YAAY,aAAa,IAAI,UAAU,SAAS,aAAa;CACnE,MAAM,cAAc,aAAa,IAAI,QAAQ,SAAS,aAAa;CACnE,MAAM,eAAe,aAAa,IAAI,gBAAgB,SAAS,aAAa;CAC5E,MAAM,cAAc,KAAK,MACvB,OAAO,IAAI,cAAc,IAAI,cAAc,OAAQ,IAAI,eAAe,IACvE;AAED,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD;;;;;AC3LH,MAAM,aAAa;AACnB,MAAM,kBAAkB;AACxB,MAAM,aAAa;AACnB,MAAM,kBAAkB;AAIxB,SAAS,0BAA0B,OAA8B;CAC/D,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,QAAQ,OAAO;EAExB,MAAM,QAAQ,KAAK,MAAM,wCAAwC;AACjE,MAAI,CAAC,MAAO;EAEZ,MAAM,QADQ,MAAM,GACA,MAAM,KAAK;AAC/B,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,WAAW,KAAK,MAAM,qBAAqB;AACjD,OAAI,UAAU;IACZ,MAAM,MAAM,SAAS,GAAG,MAAM,CAAC,aAAa;AAC5C,QAAI,IAAK,MAAK,IAAI,IAAI;;;;AAI5B,QAAO;;AAGT,SAAS,kBAAkB,GAAgB,GAAwB;AACjE,KAAI,EAAE,SAAS,KAAK,EAAE,SAAS,EAAG,QAAO;CACzC,IAAI,eAAe;AACnB,MAAK,MAAM,OAAO,EAChB,KAAI,EAAE,IAAI,IAAI,CAAE;CAElB,MAAM,QAAQ,EAAE,OAAO,EAAE,OAAO;AAChC,QAAO,UAAU,IAAI,IAAI,eAAe;;AAG1C,SAAS,YACP,eACA,UACA,eACA,eACQ;CACR,MAAM,UAAmD;EACvD;GAAE,OAAO,yBAAyB,cAAc,QAAQ,EAAE,CAAC;GAAI,OAAO,gBAAgB;GAAiB;EACvG;GAAE,OAAO,gBAAgB,SAAS,QAAQ,EAAE,CAAC;GAAY,OAAO,WAAW;GAAY;EACvF;GAAE,OAAO,aAAa,cAAc,oBAAoB,kBAAkB,IAAI,MAAM;GAAM,OAAO,gBAAgB;GAAiB;EACnI;AACD,SAAQ,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AACzC,QAAO,QAAQ,GAAG;;AAGpB,SAAS,kBAAkB,WAA2B;CACpD,MAAM,OAAO,SAAS,UAAU;AAEhC,QAAO,KADM,KAAK,SAAS,MAAM,GAAG,KAAK,MAAM,GAAG,GAAG,GAAG,KACvC;;;;;;AAOnB,eAAsB,cACpB,SACA,MACuB;CACvB,MAAM,QAAQ,KAAK,SAAS;CAC5B,MAAM,gBAAgB,KAAK,iBAAiB;CAG5C,MAAM,gBAAgB,MAAM,QAAQ,mBAAmB,KAAK,SAAS;CACrE,MAAM,cAAc,IAAI,IAAI,cAAc,QAAO,MAAK,EAAE,eAAe,KAAK,CAAC,KAAI,MAAK,EAAE,WAAqB,CAAC;CAG9G,MAAM,YAAY,MAAM,QAAQ,wBAAwB,KAAK,gBAAgB,WAAW;CAExF,MAAM,yBAAS,IAAI,KAAmD;AACtE,MAAK,MAAM,OAAO,WAAW;EAC3B,MAAM,MAAM,qBAAqB,IAAI,UAAU;EAC/C,MAAM,QAAQ,OAAO,IAAI,IAAI,KAAK;AAClC,MAAI,CAAC,MACH,QAAO,IAAI,IAAI,MAAM;GAAE,KAAK,IAAI,aAAa,IAAI;GAAE,OAAO;GAAG,CAAC;OACzD;AACL,QAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,OAAM,IAAI,MAAM,IAAI;AAEtB,SAAM;;;CAIV,MAAM,gCAAgB,IAAI,KAA2B;AACrD,MAAK,MAAM,CAAC,MAAM,EAAE,KAAK,YAAY,QAAQ;EAC3C,MAAM,MAAM,IAAI,aAAa,IAAI,OAAO;AACxC,OAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,KAAI,KAAK,IAAI,KAAK;AAEpB,gBAAc,IAAI,MAAM,IAAI;;AAE9B,eAAc,OAAO,KAAK,SAAS;CAGnC,MAAM,kBAAkB,cAAc,IAAI,KAAK,SAAS,IAAI;CAI5D,MAAM,aAAa,2BADM,MAAM,QAAQ,iBAAiB,KAAK,gBAAgB,KAAK,UAAU,EAAE,EAChC,KAAI,MAAK,EAAE,KAAK,CAAC;CAI/E,MAAM,iBADc,MAAM,QAAQ,mBAAmB,KAAK,SAAS,EACjC,QAAO,MAAK,EAAE,eAAe,KAAK,CAAC,KAAI,MAAK,EAAE,WAAqB;CAErG,MAAM,mCAAmB,IAAI,KAAqB;AAClD,MAAK,MAAM,UAAU,eAAe;EAClC,MAAM,cAAc,MAAM,QAAQ,mBAAmB,OAAO;AAC5D,OAAK,MAAM,QAAQ,YACjB,KAAI,KAAK,cAAc,KAAK,eAAe,KAAK,SAC9C,kBAAiB,IAAI,KAAK,aAAa,iBAAiB,IAAI,KAAK,WAAW,IAAI,KAAK,EAAE;;CAI7F,MAAM,iBAAiB,KAAK,IAAI,GAAG,GAAG,iBAAiB,QAAQ,CAAC;CAGhE,MAAM,WAAW,MAAM,QAAQ,kBAAkB;CAEjD,MAAM,cAA4B,EAAE;AAEpC,MAAK,MAAM,WAAW,UAAU;EAC9B,MAAM,aAAa,QAAQ;EAC3B,MAAM,QAAQ,QAAQ;AAEtB,MAAI,eAAe,KAAK,SAAU;AAClC,MAAI,iBAAiB,YAAY,IAAI,WAAW,CAAE;EAGlD,IAAI,gBAAgB;AACpB,MAAI,iBAAiB;GACnB,MAAM,qBAAqB,cAAc,IAAI,WAAW;AACxD,OAAI,mBACF,iBAAgB,KAAK,IAAI,GAAG,iBAAiB,iBAAiB,mBAAmB,CAAC;;EAKtF,IAAI,WAAW;AACf,MAAI,cAAc,IAAI,WAAW,CAG/B,YAAW,kBAAkB,YADP,2BADM,MAAM,QAAQ,iBAAiB,KAAK,gBAAgB,YAAY,EAAE,EAC1B,KAAI,MAAK,EAAE,KAAK,CAAC,CAC9B;EAIzD,MAAM,cAAc,iBAAiB,IAAI,WAAW,IAAI;EACxD,MAAM,gBAAgB,cAAc;EAEpC,MAAM,QACJ,kBAAkB,gBAClB,aAAa,WACb,kBAAkB;AAGpB,MAAI,SAAS,EAAG;EAEhB,MAAM,SAAS,YAAY,eAAe,UAAU,eAAe,YAAY;AAE/E,cAAY,KAAK;GACf,MAAM;GACN;GACA;GACA;GACA;GACA;GACA;GACA,mBAAmB,kBAAkB,WAAW;GACjD,CAAC;;AAGJ,aAAY,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAC7C,QAAO,YAAY,MAAM,GAAG,MAAM"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tekmidian/pai",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.6.0",
|
|
4
4
|
"description": "PAI Knowledge OS — Personal AI Infrastructure with federated memory and project management",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.mjs",
|
|
@@ -43,7 +43,6 @@
|
|
|
43
43
|
"homepage": "https://github.com/mnott/PAI",
|
|
44
44
|
"bin": {
|
|
45
45
|
"pai": "dist/cli/index.mjs",
|
|
46
|
-
"pai-mcp": "dist/mcp/index.mjs",
|
|
47
46
|
"pai-daemon": "dist/daemon/index.mjs",
|
|
48
47
|
"pai-daemon-mcp": "dist/daemon-mcp/index.mjs"
|
|
49
48
|
},
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Entry point for the project-utils/ sub-module directory.
|
|
3
|
+
* Re-exports the full public API.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export {
|
|
7
|
+
PAI_DIR,
|
|
8
|
+
PROJECTS_DIR,
|
|
9
|
+
isProbeSession,
|
|
10
|
+
encodePath,
|
|
11
|
+
getProjectDir,
|
|
12
|
+
getNotesDir,
|
|
13
|
+
findNotesDir,
|
|
14
|
+
getSessionsDir,
|
|
15
|
+
getSessionsDirFromProjectDir,
|
|
16
|
+
ensureNotesDir,
|
|
17
|
+
ensureNotesDirSmart,
|
|
18
|
+
ensureSessionsDir,
|
|
19
|
+
ensureSessionsDirFromProjectDir,
|
|
20
|
+
moveSessionFilesToSessionsDir,
|
|
21
|
+
findTodoPath,
|
|
22
|
+
findClaudeMdPath,
|
|
23
|
+
findAllClaudeMdPaths,
|
|
24
|
+
} from "./paths.js";
|
|
25
|
+
|
|
26
|
+
export { isWhatsAppEnabled, sendNtfyNotification } from "./notify.js";
|
|
27
|
+
|
|
28
|
+
export {
|
|
29
|
+
getNextNoteNumber,
|
|
30
|
+
getCurrentNotePath,
|
|
31
|
+
createSessionNote,
|
|
32
|
+
appendCheckpoint,
|
|
33
|
+
addWorkToSessionNote,
|
|
34
|
+
sanitizeForFilename,
|
|
35
|
+
extractMeaningfulName,
|
|
36
|
+
renameSessionNote,
|
|
37
|
+
updateSessionNoteTitle,
|
|
38
|
+
finalizeSessionNote,
|
|
39
|
+
} from "./session-notes.js";
|
|
40
|
+
export type { WorkItem } from "./session-notes.js";
|
|
41
|
+
|
|
42
|
+
export { calculateSessionTokens } from "./tokens.js";
|
|
43
|
+
|
|
44
|
+
export {
|
|
45
|
+
ensureTodoMd,
|
|
46
|
+
updateTodoMd,
|
|
47
|
+
addTodoCheckpoint,
|
|
48
|
+
updateTodoContinue,
|
|
49
|
+
} from "./todo.js";
|
|
50
|
+
export type { TodoItem } from "./todo.js";
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Push notification helpers — WhatsApp-aware with ntfy.sh fallback.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { existsSync, readFileSync } from 'fs';
|
|
6
|
+
import { join } from 'path';
|
|
7
|
+
import { homedir } from 'os';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Check if a messaging MCP server (AIBroker, Whazaa, or Telex) is configured.
|
|
11
|
+
* When any messaging server is active, the AI handles notifications via MCP
|
|
12
|
+
* and ntfy is skipped to avoid duplicates.
|
|
13
|
+
*/
|
|
14
|
+
export function isWhatsAppEnabled(): boolean {
|
|
15
|
+
try {
|
|
16
|
+
const settingsPath = join(homedir(), '.claude', 'settings.json');
|
|
17
|
+
if (!existsSync(settingsPath)) return false;
|
|
18
|
+
|
|
19
|
+
const settings = JSON.parse(readFileSync(settingsPath, 'utf-8'));
|
|
20
|
+
const enabled: string[] = settings.enabledMcpjsonServers || [];
|
|
21
|
+
return enabled.includes('aibroker') || enabled.includes('whazaa') || enabled.includes('telex');
|
|
22
|
+
} catch {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Send push notification — WhatsApp-aware with ntfy fallback.
|
|
29
|
+
*
|
|
30
|
+
* When WhatsApp (Whazaa) is enabled in MCP config, ntfy is SKIPPED
|
|
31
|
+
* because the AI sends WhatsApp messages directly via MCP.
|
|
32
|
+
* When WhatsApp is NOT configured, ntfy fires as the fallback channel.
|
|
33
|
+
*/
|
|
34
|
+
export async function sendNtfyNotification(message: string, retries = 2): Promise<boolean> {
|
|
35
|
+
if (isWhatsAppEnabled()) {
|
|
36
|
+
console.error(`WhatsApp (Whazaa) enabled in MCP config — skipping ntfy`);
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const topic = process.env.NTFY_TOPIC;
|
|
41
|
+
|
|
42
|
+
if (!topic) {
|
|
43
|
+
console.error('NTFY_TOPIC not set and WhatsApp not active — notifications disabled');
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
for (let attempt = 0; attempt <= retries; attempt++) {
|
|
48
|
+
try {
|
|
49
|
+
const response = await fetch(`https://ntfy.sh/${topic}`, {
|
|
50
|
+
method: 'POST',
|
|
51
|
+
body: message,
|
|
52
|
+
headers: {
|
|
53
|
+
'Title': 'Claude Code',
|
|
54
|
+
'Priority': 'default',
|
|
55
|
+
},
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
if (response.ok) {
|
|
59
|
+
console.error(`ntfy.sh notification sent (WhatsApp inactive): "${message}"`);
|
|
60
|
+
return true;
|
|
61
|
+
} else {
|
|
62
|
+
console.error(`ntfy.sh attempt ${attempt + 1} failed: ${response.status}`);
|
|
63
|
+
}
|
|
64
|
+
} catch (error) {
|
|
65
|
+
console.error(`ntfy.sh attempt ${attempt + 1} error: ${error}`);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (attempt < retries) {
|
|
69
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
console.error('ntfy.sh notification failed after all retries');
|
|
74
|
+
return false;
|
|
75
|
+
}
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Path utilities — encoding, Notes/Sessions directory discovery and creation.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { existsSync, mkdirSync, readdirSync, renameSync } from 'fs';
|
|
6
|
+
import { join, basename } from 'path';
|
|
7
|
+
import { PAI_DIR } from '../pai-paths.js';
|
|
8
|
+
|
|
9
|
+
// Re-export PAI_DIR for consumers
|
|
10
|
+
export { PAI_DIR };
|
|
11
|
+
export const PROJECTS_DIR = join(PAI_DIR, 'projects');
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Directories known to be automated health-check / probe sessions.
|
|
15
|
+
* Hooks should exit early for these to avoid registry clutter and wasted work.
|
|
16
|
+
*/
|
|
17
|
+
const PROBE_CWD_PATTERNS = [
|
|
18
|
+
'/CodexBar/ClaudeProbe',
|
|
19
|
+
'/ClaudeProbe',
|
|
20
|
+
];
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Check if the current working directory belongs to a probe/health-check session.
|
|
24
|
+
* Returns true if hooks should skip this session entirely.
|
|
25
|
+
*/
|
|
26
|
+
export function isProbeSession(cwd?: string): boolean {
|
|
27
|
+
const dir = cwd || process.cwd();
|
|
28
|
+
return PROBE_CWD_PATTERNS.some(pattern => dir.includes(pattern));
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Encode a path the same way Claude Code does:
|
|
33
|
+
* - Replace / with -
|
|
34
|
+
* - Replace . with -
|
|
35
|
+
* - Replace space with -
|
|
36
|
+
*/
|
|
37
|
+
export function encodePath(path: string): string {
|
|
38
|
+
return path
|
|
39
|
+
.replace(/\//g, '-')
|
|
40
|
+
.replace(/\./g, '-')
|
|
41
|
+
.replace(/ /g, '-');
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/** Get the project directory for a given working directory. */
|
|
45
|
+
export function getProjectDir(cwd: string): string {
|
|
46
|
+
const encoded = encodePath(cwd);
|
|
47
|
+
return join(PROJECTS_DIR, encoded);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/** Get the Notes directory for a project (central location). */
|
|
51
|
+
export function getNotesDir(cwd: string): string {
|
|
52
|
+
return join(getProjectDir(cwd), 'Notes');
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Find Notes directory — checks local first, falls back to central.
|
|
57
|
+
* Does NOT create the directory.
|
|
58
|
+
*/
|
|
59
|
+
export function findNotesDir(cwd: string): { path: string; isLocal: boolean } {
|
|
60
|
+
const cwdBasename = basename(cwd).toLowerCase();
|
|
61
|
+
if (cwdBasename === 'notes' && existsSync(cwd)) {
|
|
62
|
+
return { path: cwd, isLocal: true };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const localPaths = [
|
|
66
|
+
join(cwd, 'Notes'),
|
|
67
|
+
join(cwd, 'notes'),
|
|
68
|
+
join(cwd, '.claude', 'Notes'),
|
|
69
|
+
];
|
|
70
|
+
|
|
71
|
+
for (const path of localPaths) {
|
|
72
|
+
if (existsSync(path)) {
|
|
73
|
+
return { path, isLocal: true };
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return { path: getNotesDir(cwd), isLocal: false };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/** Get the sessions/ directory for a project (stores .jsonl transcripts). */
|
|
81
|
+
export function getSessionsDir(cwd: string): string {
|
|
82
|
+
return join(getProjectDir(cwd), 'sessions');
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/** Get the sessions/ directory from a project directory path. */
|
|
86
|
+
export function getSessionsDirFromProjectDir(projectDir: string): string {
|
|
87
|
+
return join(projectDir, 'sessions');
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// ---------------------------------------------------------------------------
|
|
91
|
+
// Directory creation helpers
|
|
92
|
+
// ---------------------------------------------------------------------------
|
|
93
|
+
|
|
94
|
+
/** Ensure the Notes directory exists for a project. @deprecated Use ensureNotesDirSmart() */
|
|
95
|
+
export function ensureNotesDir(cwd: string): string {
|
|
96
|
+
const notesDir = getNotesDir(cwd);
|
|
97
|
+
if (!existsSync(notesDir)) {
|
|
98
|
+
mkdirSync(notesDir, { recursive: true });
|
|
99
|
+
console.error(`Created Notes directory: ${notesDir}`);
|
|
100
|
+
}
|
|
101
|
+
return notesDir;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Smart Notes directory handling:
|
|
106
|
+
* - If local Notes/ exists → use it (don't create anything new)
|
|
107
|
+
* - If no local Notes/ → ensure central exists and use that
|
|
108
|
+
*/
|
|
109
|
+
export function ensureNotesDirSmart(cwd: string): { path: string; isLocal: boolean } {
|
|
110
|
+
const found = findNotesDir(cwd);
|
|
111
|
+
if (found.isLocal) return found;
|
|
112
|
+
if (!existsSync(found.path)) {
|
|
113
|
+
mkdirSync(found.path, { recursive: true });
|
|
114
|
+
console.error(`Created central Notes directory: ${found.path}`);
|
|
115
|
+
}
|
|
116
|
+
return found;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/** Ensure the sessions/ directory exists for a project. */
|
|
120
|
+
export function ensureSessionsDir(cwd: string): string {
|
|
121
|
+
const sessionsDir = getSessionsDir(cwd);
|
|
122
|
+
if (!existsSync(sessionsDir)) {
|
|
123
|
+
mkdirSync(sessionsDir, { recursive: true });
|
|
124
|
+
console.error(`Created sessions directory: ${sessionsDir}`);
|
|
125
|
+
}
|
|
126
|
+
return sessionsDir;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/** Ensure the sessions/ directory exists (from project dir path). */
|
|
130
|
+
export function ensureSessionsDirFromProjectDir(projectDir: string): string {
|
|
131
|
+
const sessionsDir = getSessionsDirFromProjectDir(projectDir);
|
|
132
|
+
if (!existsSync(sessionsDir)) {
|
|
133
|
+
mkdirSync(sessionsDir, { recursive: true });
|
|
134
|
+
console.error(`Created sessions directory: ${sessionsDir}`);
|
|
135
|
+
}
|
|
136
|
+
return sessionsDir;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Move all .jsonl session files from project root to sessions/ subdirectory.
|
|
141
|
+
* Returns the number of files moved.
|
|
142
|
+
*/
|
|
143
|
+
export function moveSessionFilesToSessionsDir(
|
|
144
|
+
projectDir: string,
|
|
145
|
+
excludeFile?: string,
|
|
146
|
+
silent = false
|
|
147
|
+
): number {
|
|
148
|
+
const sessionsDir = ensureSessionsDirFromProjectDir(projectDir);
|
|
149
|
+
|
|
150
|
+
if (!existsSync(projectDir)) return 0;
|
|
151
|
+
|
|
152
|
+
const files = readdirSync(projectDir);
|
|
153
|
+
let movedCount = 0;
|
|
154
|
+
|
|
155
|
+
for (const file of files) {
|
|
156
|
+
if (file.endsWith('.jsonl') && file !== excludeFile) {
|
|
157
|
+
const sourcePath = join(projectDir, file);
|
|
158
|
+
const destPath = join(sessionsDir, file);
|
|
159
|
+
try {
|
|
160
|
+
renameSync(sourcePath, destPath);
|
|
161
|
+
if (!silent) console.error(`Moved ${file} → sessions/`);
|
|
162
|
+
movedCount++;
|
|
163
|
+
} catch (error) {
|
|
164
|
+
if (!silent) console.error(`Could not move ${file}: ${error}`);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return movedCount;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// ---------------------------------------------------------------------------
|
|
173
|
+
// CLAUDE.md / TODO.md discovery
|
|
174
|
+
// ---------------------------------------------------------------------------
|
|
175
|
+
|
|
176
|
+
/** Find TODO.md — check local first, fallback to central. */
|
|
177
|
+
export function findTodoPath(cwd: string): string {
|
|
178
|
+
const localPaths = [
|
|
179
|
+
join(cwd, 'TODO.md'),
|
|
180
|
+
join(cwd, 'notes', 'TODO.md'),
|
|
181
|
+
join(cwd, 'Notes', 'TODO.md'),
|
|
182
|
+
join(cwd, '.claude', 'TODO.md'),
|
|
183
|
+
];
|
|
184
|
+
|
|
185
|
+
for (const path of localPaths) {
|
|
186
|
+
if (existsSync(path)) return path;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return join(getNotesDir(cwd), 'TODO.md');
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/** Find CLAUDE.md — returns the FIRST found path. */
|
|
193
|
+
export function findClaudeMdPath(cwd: string): string | null {
|
|
194
|
+
const paths = findAllClaudeMdPaths(cwd);
|
|
195
|
+
return paths.length > 0 ? paths[0] : null;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Find ALL CLAUDE.md files in local locations in priority order.
|
|
200
|
+
*/
|
|
201
|
+
export function findAllClaudeMdPaths(cwd: string): string[] {
|
|
202
|
+
const foundPaths: string[] = [];
|
|
203
|
+
|
|
204
|
+
const localPaths = [
|
|
205
|
+
join(cwd, '.claude', 'CLAUDE.md'),
|
|
206
|
+
join(cwd, 'CLAUDE.md'),
|
|
207
|
+
join(cwd, 'Notes', 'CLAUDE.md'),
|
|
208
|
+
join(cwd, 'notes', 'CLAUDE.md'),
|
|
209
|
+
join(cwd, 'Prompts', 'CLAUDE.md'),
|
|
210
|
+
join(cwd, 'prompts', 'CLAUDE.md'),
|
|
211
|
+
];
|
|
212
|
+
|
|
213
|
+
for (const path of localPaths) {
|
|
214
|
+
if (existsSync(path)) foundPaths.push(path);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
return foundPaths;
|
|
218
|
+
}
|