@tekmidian/pai 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +148 -6
- package/FEATURE.md +8 -1
- package/README.md +79 -0
- package/dist/{auto-route-D7W6RE06.mjs → auto-route-JjW3f7pV.mjs} +4 -4
- package/dist/{auto-route-D7W6RE06.mjs.map → auto-route-JjW3f7pV.mjs.map} +1 -1
- package/dist/chunker-CbnBe0s0.mjs +191 -0
- package/dist/chunker-CbnBe0s0.mjs.map +1 -0
- package/dist/cli/index.mjs +835 -40
- package/dist/cli/index.mjs.map +1 -1
- package/dist/{config-DBh1bYM2.mjs → config-DELNqq3Z.mjs} +4 -2
- package/dist/{config-DBh1bYM2.mjs.map → config-DELNqq3Z.mjs.map} +1 -1
- package/dist/daemon/index.mjs +9 -9
- package/dist/{daemon-v5O897D4.mjs → daemon-CeTX4NpF.mjs} +94 -13
- package/dist/daemon-CeTX4NpF.mjs.map +1 -0
- package/dist/daemon-mcp/index.mjs +3 -3
- package/dist/db-Dp8VXIMR.mjs +212 -0
- package/dist/db-Dp8VXIMR.mjs.map +1 -0
- package/dist/{detect-BHqYcjJ1.mjs → detect-D7gPV3fQ.mjs} +1 -1
- package/dist/{detect-BHqYcjJ1.mjs.map → detect-D7gPV3fQ.mjs.map} +1 -1
- package/dist/{detector-DKA83aTZ.mjs → detector-cYYhK2Mi.mjs} +2 -2
- package/dist/{detector-DKA83aTZ.mjs.map → detector-cYYhK2Mi.mjs.map} +1 -1
- package/dist/{embeddings-mfqv-jFu.mjs → embeddings-DGRAPAYb.mjs} +2 -2
- package/dist/{embeddings-mfqv-jFu.mjs.map → embeddings-DGRAPAYb.mjs.map} +1 -1
- package/dist/{factory-BDAiKtYR.mjs → factory-DZLvRf4m.mjs} +4 -4
- package/dist/{factory-BDAiKtYR.mjs.map → factory-DZLvRf4m.mjs.map} +1 -1
- package/dist/index.d.mts +1 -1
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +9 -7
- package/dist/{indexer-B20bPHL-.mjs → indexer-CKQcgKsz.mjs} +4 -190
- package/dist/indexer-CKQcgKsz.mjs.map +1 -0
- package/dist/{indexer-backend-BXaocO5r.mjs → indexer-backend-BHztlJJg.mjs} +4 -3
- package/dist/{indexer-backend-BXaocO5r.mjs.map → indexer-backend-BHztlJJg.mjs.map} +1 -1
- package/dist/{ipc-client-DPy7s3iu.mjs → ipc-client-CLt2fNlC.mjs} +1 -1
- package/dist/ipc-client-CLt2fNlC.mjs.map +1 -0
- package/dist/mcp/index.mjs +118 -5
- package/dist/mcp/index.mjs.map +1 -1
- package/dist/{migrate-Bwj7qPaE.mjs → migrate-jokLenje.mjs} +8 -1
- package/dist/migrate-jokLenje.mjs.map +1 -0
- package/dist/{pai-marker-DX_mFLum.mjs → pai-marker-CXQPX2P6.mjs} +1 -1
- package/dist/{pai-marker-DX_mFLum.mjs.map → pai-marker-CXQPX2P6.mjs.map} +1 -1
- package/dist/{postgres-Ccvpc6fC.mjs → postgres-CRBe30Ag.mjs} +1 -1
- package/dist/{postgres-Ccvpc6fC.mjs.map → postgres-CRBe30Ag.mjs.map} +1 -1
- package/dist/{schemas-DjdwzIQ8.mjs → schemas-BY3Pjvje.mjs} +1 -1
- package/dist/{schemas-DjdwzIQ8.mjs.map → schemas-BY3Pjvje.mjs.map} +1 -1
- package/dist/{search-PjftDxxs.mjs → search-GK0ibTJy.mjs} +2 -2
- package/dist/{search-PjftDxxs.mjs.map → search-GK0ibTJy.mjs.map} +1 -1
- package/dist/{sqlite-CHUrNtbI.mjs → sqlite-RyR8Up1v.mjs} +3 -3
- package/dist/{sqlite-CHUrNtbI.mjs.map → sqlite-RyR8Up1v.mjs.map} +1 -1
- package/dist/{tools-CLK4080-.mjs → tools-CUg0Lyg-.mjs} +175 -11
- package/dist/{tools-CLK4080-.mjs.map → tools-CUg0Lyg-.mjs.map} +1 -1
- package/dist/{utils-DEWdIFQ0.mjs → utils-QSfKagcj.mjs} +62 -2
- package/dist/utils-QSfKagcj.mjs.map +1 -0
- package/dist/vault-indexer-Bo2aPSzP.mjs +499 -0
- package/dist/vault-indexer-Bo2aPSzP.mjs.map +1 -0
- package/dist/zettelkasten-Co-w0XSZ.mjs +901 -0
- package/dist/zettelkasten-Co-w0XSZ.mjs.map +1 -0
- package/package.json +2 -1
- package/src/hooks/README.md +99 -0
- package/src/hooks/hooks.md +13 -0
- package/src/hooks/pre-compact.sh +95 -0
- package/src/hooks/session-stop.sh +93 -0
- package/statusline-command.sh +9 -4
- package/templates/README.md +7 -0
- package/templates/agent-prefs.example.md +7 -0
- package/templates/claude-md.template.md +7 -0
- package/templates/pai-project.template.md +4 -6
- package/templates/pai-skill.template.md +295 -0
- package/templates/templates.md +20 -0
- package/dist/daemon-v5O897D4.mjs.map +0 -1
- package/dist/db-BcDxXVBu.mjs +0 -110
- package/dist/db-BcDxXVBu.mjs.map +0 -1
- package/dist/indexer-B20bPHL-.mjs.map +0 -1
- package/dist/ipc-client-DPy7s3iu.mjs.map +0 -1
- package/dist/migrate-Bwj7qPaE.mjs.map +0 -1
- package/dist/utils-DEWdIFQ0.mjs.map +0 -1
|
@@ -0,0 +1,499 @@
|
|
|
1
|
+
import { t as chunkMarkdown } from "./chunker-CbnBe0s0.mjs";
|
|
2
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
|
|
3
|
+
import { basename, dirname, join, normalize, relative } from "node:path";
|
|
4
|
+
import { createHash } from "node:crypto";
|
|
5
|
+
|
|
6
|
+
//#region src/memory/vault-indexer.ts
|
|
7
|
+
/**
|
|
8
|
+
* Vault indexer for the PAI federation memory engine.
|
|
9
|
+
*
|
|
10
|
+
* Indexes an entire Obsidian vault (or any markdown knowledge base), following
|
|
11
|
+
* symlinks, deduplicating files by inode, parsing wikilinks, and computing
|
|
12
|
+
* per-file health metrics (orphan detection, dead links).
|
|
13
|
+
*
|
|
14
|
+
* Key differences from the project indexer (indexer.ts):
|
|
15
|
+
* - Follows symbolic links (project indexer skips them)
|
|
16
|
+
* - Deduplicates files with the same inode (same content reachable via multiple paths)
|
|
17
|
+
* - Parses [[wikilinks]] and builds a directed link graph
|
|
18
|
+
* - Resolves wikilinks using Obsidian's shortest-match algorithm
|
|
19
|
+
* - Computes health metrics per file: inbound/outbound link counts, dead links, orphans
|
|
20
|
+
*/
|
|
21
|
+
/** Maximum number of .md files to collect from a vault. */
|
|
22
|
+
const VAULT_MAX_FILES = 1e4;
|
|
23
|
+
/** Maximum recursion depth for vault directory walks. */
|
|
24
|
+
const VAULT_MAX_DEPTH = 10;
|
|
25
|
+
/** Number of files to process before yielding to the event loop. */
|
|
26
|
+
const VAULT_YIELD_EVERY = 10;
|
|
27
|
+
/**
|
|
28
|
+
* Directories to always skip, at any depth, during vault walks.
|
|
29
|
+
* Includes standard build/VCS noise plus Obsidian-specific directories.
|
|
30
|
+
*/
|
|
31
|
+
const VAULT_SKIP_DIRS = new Set([
|
|
32
|
+
".git",
|
|
33
|
+
"node_modules",
|
|
34
|
+
"vendor",
|
|
35
|
+
"Pods",
|
|
36
|
+
"dist",
|
|
37
|
+
"build",
|
|
38
|
+
"out",
|
|
39
|
+
"DerivedData",
|
|
40
|
+
".next",
|
|
41
|
+
".venv",
|
|
42
|
+
"venv",
|
|
43
|
+
"__pycache__",
|
|
44
|
+
".cache",
|
|
45
|
+
".bun",
|
|
46
|
+
".obsidian",
|
|
47
|
+
".trash"
|
|
48
|
+
]);
|
|
49
|
+
function sha256File(content) {
|
|
50
|
+
return createHash("sha256").update(content).digest("hex");
|
|
51
|
+
}
|
|
52
|
+
function chunkId(projectId, path, chunkIndex, startLine, endLine) {
|
|
53
|
+
return createHash("sha256").update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`).digest("hex");
|
|
54
|
+
}
|
|
55
|
+
function yieldToEventLoop() {
|
|
56
|
+
return new Promise((resolve) => setImmediate(resolve));
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Recursively collect all .md files under a vault root, following symlinks.
|
|
60
|
+
*
|
|
61
|
+
* Symlink-following behaviour:
|
|
62
|
+
* - Symbolic links to files: followed if the target is a .md file
|
|
63
|
+
* - Symbolic links to directories: followed with cycle detection via inode
|
|
64
|
+
*
|
|
65
|
+
* Cycle detection is based on the real inode of each visited directory.
|
|
66
|
+
* Using the real stat (not lstat) ensures that symlinked dirs resolve to
|
|
67
|
+
* their actual inode, preventing infinite loops.
|
|
68
|
+
*
|
|
69
|
+
* @param dir Directory to scan.
|
|
70
|
+
* @param vaultRoot Absolute root of the vault (for computing vaultRelPath).
|
|
71
|
+
* @param acc Shared accumulator (mutated in place for early exit).
|
|
72
|
+
* @param visited Set of "device:inode" strings for visited directories.
|
|
73
|
+
* @param depth Current recursion depth.
|
|
74
|
+
*/
|
|
75
|
+
function walkVaultMdFiles(vaultRoot, opts) {
|
|
76
|
+
const maxFiles = opts?.maxFiles ?? VAULT_MAX_FILES;
|
|
77
|
+
const maxDepth = opts?.maxDepth ?? VAULT_MAX_DEPTH;
|
|
78
|
+
const results = [];
|
|
79
|
+
const visitedDirs = /* @__PURE__ */ new Set();
|
|
80
|
+
function walk(dir, depth) {
|
|
81
|
+
if (results.length >= maxFiles) return;
|
|
82
|
+
if (depth > maxDepth) return;
|
|
83
|
+
let dirStat;
|
|
84
|
+
try {
|
|
85
|
+
dirStat = statSync(dir);
|
|
86
|
+
} catch {
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
const dirKey = `${dirStat.dev}:${dirStat.ino}`;
|
|
90
|
+
if (visitedDirs.has(dirKey)) return;
|
|
91
|
+
visitedDirs.add(dirKey);
|
|
92
|
+
let entries;
|
|
93
|
+
try {
|
|
94
|
+
entries = readdirSync(dir, {
|
|
95
|
+
withFileTypes: true,
|
|
96
|
+
encoding: "utf8"
|
|
97
|
+
});
|
|
98
|
+
} catch {
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
for (const entry of entries) {
|
|
102
|
+
if (results.length >= maxFiles) break;
|
|
103
|
+
if (VAULT_SKIP_DIRS.has(entry.name)) continue;
|
|
104
|
+
const full = join(dir, entry.name);
|
|
105
|
+
if (entry.isSymbolicLink()) {
|
|
106
|
+
let targetStat;
|
|
107
|
+
try {
|
|
108
|
+
targetStat = statSync(full);
|
|
109
|
+
} catch {
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
if (targetStat.isDirectory()) {
|
|
113
|
+
if (!VAULT_SKIP_DIRS.has(entry.name)) walk(full, depth + 1);
|
|
114
|
+
} else if (targetStat.isFile() && entry.name.endsWith(".md")) results.push({
|
|
115
|
+
absPath: full,
|
|
116
|
+
vaultRelPath: relative(vaultRoot, full),
|
|
117
|
+
inode: targetStat.ino,
|
|
118
|
+
device: targetStat.dev
|
|
119
|
+
});
|
|
120
|
+
} else if (entry.isDirectory()) walk(full, depth + 1);
|
|
121
|
+
else if (entry.isFile() && entry.name.endsWith(".md")) {
|
|
122
|
+
let fileStat;
|
|
123
|
+
try {
|
|
124
|
+
fileStat = statSync(full);
|
|
125
|
+
} catch {
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
results.push({
|
|
129
|
+
absPath: full,
|
|
130
|
+
vaultRelPath: relative(vaultRoot, full),
|
|
131
|
+
inode: fileStat.ino,
|
|
132
|
+
device: fileStat.dev
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
if (existsSync(vaultRoot)) walk(vaultRoot, 0);
|
|
138
|
+
return results;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Group vault files by inode identity (device + inode).
|
|
142
|
+
*
|
|
143
|
+
* Within each group, the canonical file is chosen as the one with the
|
|
144
|
+
* fewest path separators (shallowest), breaking ties by shortest string.
|
|
145
|
+
* All other group members become aliases.
|
|
146
|
+
*/
|
|
147
|
+
function deduplicateByInode(files) {
|
|
148
|
+
const groups = /* @__PURE__ */ new Map();
|
|
149
|
+
for (const file of files) {
|
|
150
|
+
const key = `${file.device}:${file.inode}`;
|
|
151
|
+
const existing = groups.get(key);
|
|
152
|
+
if (existing) existing.push(file);
|
|
153
|
+
else groups.set(key, [file]);
|
|
154
|
+
}
|
|
155
|
+
const result = [];
|
|
156
|
+
for (const group of groups.values()) {
|
|
157
|
+
if (group.length === 0) continue;
|
|
158
|
+
const [canonical, ...aliases] = [...group].sort((a, b) => {
|
|
159
|
+
const aDepth = (a.vaultRelPath.match(/\//g) ?? []).length;
|
|
160
|
+
const bDepth = (b.vaultRelPath.match(/\//g) ?? []).length;
|
|
161
|
+
if (aDepth !== bDepth) return aDepth - bDepth;
|
|
162
|
+
return a.vaultRelPath.length - b.vaultRelPath.length;
|
|
163
|
+
});
|
|
164
|
+
result.push({
|
|
165
|
+
canonical,
|
|
166
|
+
aliases
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
return result;
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Parse all [[wikilinks]] and ![[embeds]] from markdown content.
|
|
173
|
+
*
|
|
174
|
+
* Handles:
|
|
175
|
+
* - Standard wikilinks: [[Target Note]]
|
|
176
|
+
* - Aliased wikilinks: [[Target Note|Display Text]]
|
|
177
|
+
* - Heading anchors: [[Target Note#Heading]] (stripped for resolution)
|
|
178
|
+
* - Embeds: ![[Target Note]]
|
|
179
|
+
* - Frontmatter wikilinks (YAML between --- delimiters)
|
|
180
|
+
*
|
|
181
|
+
* @param content Raw markdown file content.
|
|
182
|
+
* @returns Array of parsed links in document order.
|
|
183
|
+
*/
|
|
184
|
+
function parseWikilinks(content) {
|
|
185
|
+
const links = [];
|
|
186
|
+
const lines = content.split("\n");
|
|
187
|
+
let frontmatterEnd = 0;
|
|
188
|
+
if (content.startsWith("---")) {
|
|
189
|
+
const closingIdx = content.indexOf("\n---", 3);
|
|
190
|
+
if (closingIdx !== -1) frontmatterEnd = content.slice(0, closingIdx + 4).split("\n").length - 1;
|
|
191
|
+
}
|
|
192
|
+
const wikilinkRe = /(!?)\[\[([^\]]+?)\]\]/g;
|
|
193
|
+
for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {
|
|
194
|
+
const line = lines[lineIdx];
|
|
195
|
+
const lineNumber = lineIdx + 1;
|
|
196
|
+
wikilinkRe.lastIndex = 0;
|
|
197
|
+
let match;
|
|
198
|
+
while ((match = wikilinkRe.exec(line)) !== null) {
|
|
199
|
+
const isEmbed = match[1] === "!";
|
|
200
|
+
const inner = match[2];
|
|
201
|
+
const pipeIdx = inner.indexOf("|");
|
|
202
|
+
const beforePipe = pipeIdx === -1 ? inner : inner.slice(0, pipeIdx);
|
|
203
|
+
const alias = pipeIdx === -1 ? null : inner.slice(pipeIdx + 1);
|
|
204
|
+
const hashIdx = beforePipe.indexOf("#");
|
|
205
|
+
const raw = hashIdx === -1 ? beforePipe.trim() : beforePipe.slice(0, hashIdx).trim();
|
|
206
|
+
if (!raw) continue;
|
|
207
|
+
const isFrontmatter = lineIdx < frontmatterEnd;
|
|
208
|
+
links.push({
|
|
209
|
+
raw,
|
|
210
|
+
alias: alias?.trim() ?? null,
|
|
211
|
+
lineNumber,
|
|
212
|
+
isEmbed: isEmbed && !isFrontmatter
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
return links;
|
|
217
|
+
}
|
|
218
|
+
/**
|
|
219
|
+
* Build a name lookup index for Obsidian wikilink resolution.
|
|
220
|
+
*
|
|
221
|
+
* Maps lowercase filename (without .md extension) to all vault-relative paths
|
|
222
|
+
* that share that name. Includes both canonical paths and alias paths so that
|
|
223
|
+
* wikilinks resolve regardless of which path the file is accessed through.
|
|
224
|
+
*/
|
|
225
|
+
function buildNameIndex(files) {
|
|
226
|
+
const index = /* @__PURE__ */ new Map();
|
|
227
|
+
for (const file of files) {
|
|
228
|
+
const name = basename(file.vaultRelPath, ".md").toLowerCase();
|
|
229
|
+
const existing = index.get(name);
|
|
230
|
+
if (existing) existing.push(file.vaultRelPath);
|
|
231
|
+
else index.set(name, [file.vaultRelPath]);
|
|
232
|
+
}
|
|
233
|
+
return index;
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Resolve a wikilink target to a vault-relative path using Obsidian's rules.
|
|
237
|
+
*
|
|
238
|
+
* Resolution algorithm:
|
|
239
|
+
* 1. If raw contains "/", attempt exact path match (with and without .md).
|
|
240
|
+
* 2. Normalize: lowercase the raw target, strip .md extension.
|
|
241
|
+
* 3. Look up in the name index (all files with that basename).
|
|
242
|
+
* 4. If exactly one match, return it.
|
|
243
|
+
* 5. If multiple matches, pick the one closest to the source file
|
|
244
|
+
* (longest common directory prefix, then shortest overall path).
|
|
245
|
+
* 6. If no matches, return null (dead link).
|
|
246
|
+
*
|
|
247
|
+
* @param raw The raw link target (heading-stripped, pipe-stripped).
|
|
248
|
+
* @param nameIndex Map from lowercase basename-without-ext to vault paths.
|
|
249
|
+
* @param sourcePath Vault-relative path of the file containing the link.
|
|
250
|
+
* @returns Vault-relative path of the resolved target, or null.
|
|
251
|
+
*/
|
|
252
|
+
function resolveWikilink(raw, nameIndex, sourcePath) {
|
|
253
|
+
if (!raw) return null;
|
|
254
|
+
if (raw.includes("/")) {
|
|
255
|
+
const normalized = normalize(raw);
|
|
256
|
+
const normalizedMd = normalized.endsWith(".md") ? normalized : normalized + ".md";
|
|
257
|
+
for (const [, paths] of nameIndex) for (const p of paths) {
|
|
258
|
+
if (p === normalizedMd || p === normalized) return p;
|
|
259
|
+
if (p.toLowerCase() === normalizedMd.toLowerCase()) return p;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
const rawBase = basename(raw).replace(/\.md$/i, "").toLowerCase().trim();
|
|
263
|
+
if (!rawBase) return null;
|
|
264
|
+
const candidates = nameIndex.get(rawBase);
|
|
265
|
+
if (!candidates || candidates.length === 0) return null;
|
|
266
|
+
if (candidates.length === 1) return candidates[0];
|
|
267
|
+
const sourceDir = dirname(sourcePath);
|
|
268
|
+
let bestPath = null;
|
|
269
|
+
let bestPrefixLen = -1;
|
|
270
|
+
let bestPathLen = Infinity;
|
|
271
|
+
for (const candidate of candidates) {
|
|
272
|
+
const prefixLen = commonPrefixLength(sourceDir, dirname(candidate));
|
|
273
|
+
const pathLen = candidate.length;
|
|
274
|
+
if (prefixLen > bestPrefixLen || prefixLen === bestPrefixLen && pathLen < bestPathLen) {
|
|
275
|
+
bestPrefixLen = prefixLen;
|
|
276
|
+
bestPathLen = pathLen;
|
|
277
|
+
bestPath = candidate;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
return bestPath;
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Compute the length of the common prefix between two directory paths,
|
|
284
|
+
* measured in path segments (not raw characters).
|
|
285
|
+
*
|
|
286
|
+
* Example: "a/b/c" and "a/b/d" → 2 (common: "a", "b")
|
|
287
|
+
*/
|
|
288
|
+
function commonPrefixLength(a, b) {
|
|
289
|
+
if (a === "." && b === ".") return 0;
|
|
290
|
+
const aParts = a === "." ? [] : a.split("/");
|
|
291
|
+
const bParts = b === "." ? [] : b.split("/");
|
|
292
|
+
let count = 0;
|
|
293
|
+
const len = Math.min(aParts.length, bParts.length);
|
|
294
|
+
for (let i = 0; i < len; i++) if (aParts[i] === bParts[i]) count++;
|
|
295
|
+
else break;
|
|
296
|
+
return count;
|
|
297
|
+
}
|
|
298
|
+
/**
|
|
299
|
+
* Index an entire Obsidian vault (or markdown knowledge base) into the
|
|
300
|
+
* federation database.
|
|
301
|
+
*
|
|
302
|
+
* Steps:
|
|
303
|
+
* 1. Walk vault root, following symlinks.
|
|
304
|
+
* 2. Deduplicate by inode — each unique file is indexed once.
|
|
305
|
+
* 3. Build a name index for wikilink resolution.
|
|
306
|
+
* 4. For each canonical file:
|
|
307
|
+
* a. SHA-256 hash for change detection — skip unchanged files.
|
|
308
|
+
* b. Read content, chunk with chunkMarkdown().
|
|
309
|
+
* c. Insert chunks into memory_chunks and memory_fts.
|
|
310
|
+
* d. Upsert vault_files row.
|
|
311
|
+
* 5. Record aliases in vault_aliases.
|
|
312
|
+
* 6. Rebuild vault_name_index table.
|
|
313
|
+
* 7. Rebuild vault_links:
|
|
314
|
+
* a. Parse [[wikilinks]] from each canonical file.
|
|
315
|
+
* b. Resolve each link with resolveWikilink().
|
|
316
|
+
* c. Insert into vault_links.
|
|
317
|
+
* 8. Compute and upsert health metrics (vault_health).
|
|
318
|
+
* 9. Return statistics.
|
|
319
|
+
*
|
|
320
|
+
* @param db Open federation database.
|
|
321
|
+
* @param vaultProjectId Registry project ID for the vault "project".
|
|
322
|
+
* @param vaultRoot Absolute path to the vault root directory.
|
|
323
|
+
*/
|
|
324
|
+
async function indexVault(db, vaultProjectId, vaultRoot) {
|
|
325
|
+
const startTime = Date.now();
|
|
326
|
+
const result = {
|
|
327
|
+
filesIndexed: 0,
|
|
328
|
+
chunksCreated: 0,
|
|
329
|
+
filesSkipped: 0,
|
|
330
|
+
aliasesRecorded: 0,
|
|
331
|
+
linksExtracted: 0,
|
|
332
|
+
deadLinksFound: 0,
|
|
333
|
+
orphansFound: 0,
|
|
334
|
+
elapsed: 0
|
|
335
|
+
};
|
|
336
|
+
const allFiles = walkVaultMdFiles(vaultRoot);
|
|
337
|
+
const inodeGroups = deduplicateByInode(allFiles);
|
|
338
|
+
const nameIndex = buildNameIndex(allFiles);
|
|
339
|
+
const selectFileHash = db.prepare("SELECT hash FROM vault_files WHERE vault_path = ?");
|
|
340
|
+
const deleteOldChunkIds = db.prepare("SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?");
|
|
341
|
+
const deleteFts = db.prepare("DELETE FROM memory_fts WHERE id = ?");
|
|
342
|
+
const deleteChunks = db.prepare("DELETE FROM memory_chunks WHERE project_id = ? AND path = ?");
|
|
343
|
+
const insertChunk = db.prepare(`
|
|
344
|
+
INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)
|
|
345
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
346
|
+
`);
|
|
347
|
+
const insertFts = db.prepare(`
|
|
348
|
+
INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)
|
|
349
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
350
|
+
`);
|
|
351
|
+
const upsertVaultFile = db.prepare(`
|
|
352
|
+
INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)
|
|
353
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
354
|
+
ON CONFLICT(vault_path) DO UPDATE SET
|
|
355
|
+
inode = excluded.inode,
|
|
356
|
+
device = excluded.device,
|
|
357
|
+
hash = excluded.hash,
|
|
358
|
+
title = excluded.title,
|
|
359
|
+
indexed_at = excluded.indexed_at
|
|
360
|
+
`);
|
|
361
|
+
await yieldToEventLoop();
|
|
362
|
+
let filesSinceYield = 0;
|
|
363
|
+
for (const group of inodeGroups) {
|
|
364
|
+
if (filesSinceYield >= VAULT_YIELD_EVERY) {
|
|
365
|
+
await yieldToEventLoop();
|
|
366
|
+
filesSinceYield = 0;
|
|
367
|
+
}
|
|
368
|
+
filesSinceYield++;
|
|
369
|
+
const { canonical } = group;
|
|
370
|
+
let content;
|
|
371
|
+
try {
|
|
372
|
+
content = readFileSync(canonical.absPath, "utf8");
|
|
373
|
+
} catch {
|
|
374
|
+
result.filesSkipped++;
|
|
375
|
+
continue;
|
|
376
|
+
}
|
|
377
|
+
const hash = sha256File(content);
|
|
378
|
+
if (selectFileHash.get(canonical.vaultRelPath)?.hash === hash) {
|
|
379
|
+
result.filesSkipped++;
|
|
380
|
+
continue;
|
|
381
|
+
}
|
|
382
|
+
const oldChunkIds = deleteOldChunkIds.all(vaultProjectId, canonical.vaultRelPath);
|
|
383
|
+
db.transaction(() => {
|
|
384
|
+
for (const row of oldChunkIds) deleteFts.run(row.id);
|
|
385
|
+
deleteChunks.run(vaultProjectId, canonical.vaultRelPath);
|
|
386
|
+
})();
|
|
387
|
+
const chunks = chunkMarkdown(content);
|
|
388
|
+
const updatedAt = Date.now();
|
|
389
|
+
const titleMatch = /^#\s+(.+)$/m.exec(content);
|
|
390
|
+
const title = titleMatch ? titleMatch[1].trim() : basename(canonical.vaultRelPath, ".md");
|
|
391
|
+
db.transaction(() => {
|
|
392
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
393
|
+
const chunk = chunks[i];
|
|
394
|
+
const id = chunkId(vaultProjectId, canonical.vaultRelPath, i, chunk.startLine, chunk.endLine);
|
|
395
|
+
insertChunk.run(id, vaultProjectId, "vault", "topic", canonical.vaultRelPath, chunk.startLine, chunk.endLine, chunk.hash, chunk.text, updatedAt);
|
|
396
|
+
insertFts.run(chunk.text, id, vaultProjectId, canonical.vaultRelPath, "vault", "topic", chunk.startLine, chunk.endLine);
|
|
397
|
+
}
|
|
398
|
+
upsertVaultFile.run(canonical.vaultRelPath, canonical.inode, canonical.device, hash, title, updatedAt);
|
|
399
|
+
})();
|
|
400
|
+
result.filesIndexed++;
|
|
401
|
+
result.chunksCreated += chunks.length;
|
|
402
|
+
}
|
|
403
|
+
await yieldToEventLoop();
|
|
404
|
+
db.exec("DELETE FROM vault_aliases");
|
|
405
|
+
const insertAlias = db.prepare(`
|
|
406
|
+
INSERT OR REPLACE INTO vault_aliases (vault_path, canonical_path, inode, device)
|
|
407
|
+
VALUES (?, ?, ?, ?)
|
|
408
|
+
`);
|
|
409
|
+
db.transaction((groups) => {
|
|
410
|
+
for (const group of groups) for (const alias of group.aliases) {
|
|
411
|
+
insertAlias.run(alias.vaultRelPath, group.canonical.vaultRelPath, alias.inode, alias.device);
|
|
412
|
+
result.aliasesRecorded++;
|
|
413
|
+
}
|
|
414
|
+
})(inodeGroups);
|
|
415
|
+
await yieldToEventLoop();
|
|
416
|
+
db.exec("DELETE FROM vault_name_index");
|
|
417
|
+
const insertNameIndex = db.prepare(`
|
|
418
|
+
INSERT OR REPLACE INTO vault_name_index (name, vault_path) VALUES (?, ?)
|
|
419
|
+
`);
|
|
420
|
+
const insertNameIndexTx = db.transaction((entries) => {
|
|
421
|
+
for (const [name, path] of entries) insertNameIndex.run(name, path);
|
|
422
|
+
});
|
|
423
|
+
const nameEntries = [];
|
|
424
|
+
for (const [name, paths] of nameIndex) for (const path of paths) nameEntries.push([name, path]);
|
|
425
|
+
insertNameIndexTx(nameEntries);
|
|
426
|
+
await yieldToEventLoop();
|
|
427
|
+
db.exec("DELETE FROM vault_links");
|
|
428
|
+
const insertLink = db.prepare(`
|
|
429
|
+
INSERT OR IGNORE INTO vault_links
|
|
430
|
+
(source_path, target_raw, target_path, link_type, line_number)
|
|
431
|
+
VALUES (?, ?, ?, ?, ?)
|
|
432
|
+
`);
|
|
433
|
+
const linkRows = [];
|
|
434
|
+
for (const group of inodeGroups) {
|
|
435
|
+
const { canonical } = group;
|
|
436
|
+
let content;
|
|
437
|
+
try {
|
|
438
|
+
content = readFileSync(canonical.absPath, "utf8");
|
|
439
|
+
} catch {
|
|
440
|
+
continue;
|
|
441
|
+
}
|
|
442
|
+
const parsedLinks = parseWikilinks(content);
|
|
443
|
+
for (const link of parsedLinks) {
|
|
444
|
+
const target = resolveWikilink(link.raw, nameIndex, canonical.vaultRelPath);
|
|
445
|
+
linkRows.push({
|
|
446
|
+
source: canonical.vaultRelPath,
|
|
447
|
+
raw: link.raw,
|
|
448
|
+
target,
|
|
449
|
+
linkType: link.isEmbed ? "embed" : "wikilink",
|
|
450
|
+
lineNumber: link.lineNumber
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
db.transaction((rows) => {
|
|
455
|
+
for (const row of rows) insertLink.run(row.source, row.raw, row.target, row.linkType, row.lineNumber);
|
|
456
|
+
})(linkRows);
|
|
457
|
+
result.linksExtracted = linkRows.length;
|
|
458
|
+
result.deadLinksFound = linkRows.filter((r) => r.target === null).length;
|
|
459
|
+
await yieldToEventLoop();
|
|
460
|
+
const outboundCounts = db.prepare(`SELECT source_path, COUNT(*) AS cnt FROM vault_links GROUP BY source_path`).all();
|
|
461
|
+
const deadLinkCounts = db.prepare(`SELECT source_path, COUNT(*) AS cnt FROM vault_links
|
|
462
|
+
WHERE target_path IS NULL GROUP BY source_path`).all();
|
|
463
|
+
const inboundCounts = db.prepare(`SELECT target_path, COUNT(*) AS cnt FROM vault_links
|
|
464
|
+
WHERE target_path IS NOT NULL GROUP BY target_path`).all();
|
|
465
|
+
const outboundMap = new Map(outboundCounts.map((r) => [r.source_path, r.cnt]));
|
|
466
|
+
const deadMap = new Map(deadLinkCounts.map((r) => [r.source_path, r.cnt]));
|
|
467
|
+
const inboundMap = new Map(inboundCounts.map((r) => [r.target_path, r.cnt]));
|
|
468
|
+
const upsertHealth = db.prepare(`
|
|
469
|
+
INSERT INTO vault_health
|
|
470
|
+
(vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)
|
|
471
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
472
|
+
ON CONFLICT(vault_path) DO UPDATE SET
|
|
473
|
+
inbound_count = excluded.inbound_count,
|
|
474
|
+
outbound_count = excluded.outbound_count,
|
|
475
|
+
dead_link_count = excluded.dead_link_count,
|
|
476
|
+
is_orphan = excluded.is_orphan,
|
|
477
|
+
computed_at = excluded.computed_at
|
|
478
|
+
`);
|
|
479
|
+
const computedAt = Date.now();
|
|
480
|
+
let orphanCount = 0;
|
|
481
|
+
db.transaction((groups) => {
|
|
482
|
+
for (const group of groups) {
|
|
483
|
+
const path = group.canonical.vaultRelPath;
|
|
484
|
+
const inbound = inboundMap.get(path) ?? 0;
|
|
485
|
+
const outbound = outboundMap.get(path) ?? 0;
|
|
486
|
+
const dead = deadMap.get(path) ?? 0;
|
|
487
|
+
const isOrphan = inbound === 0 ? 1 : 0;
|
|
488
|
+
if (isOrphan) orphanCount++;
|
|
489
|
+
upsertHealth.run(path, inbound, outbound, dead, isOrphan, computedAt);
|
|
490
|
+
}
|
|
491
|
+
})(inodeGroups);
|
|
492
|
+
result.orphansFound = orphanCount;
|
|
493
|
+
result.elapsed = Date.now() - startTime;
|
|
494
|
+
return result;
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
//#endregion
|
|
498
|
+
export { indexVault };
|
|
499
|
+
//# sourceMappingURL=vault-indexer-Bo2aPSzP.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vault-indexer-Bo2aPSzP.mjs","names":[],"sources":["../src/memory/vault-indexer.ts"],"sourcesContent":["/**\n * Vault indexer for the PAI federation memory engine.\n *\n * Indexes an entire Obsidian vault (or any markdown knowledge base), following\n * symlinks, deduplicating files by inode, parsing wikilinks, and computing\n * per-file health metrics (orphan detection, dead links).\n *\n * Key differences from the project indexer (indexer.ts):\n * - Follows symbolic links (project indexer skips them)\n * - Deduplicates files with the same inode (same content reachable via multiple paths)\n * - Parses [[wikilinks]] and builds a directed link graph\n * - Resolves wikilinks using Obsidian's shortest-match algorithm\n * - Computes health metrics per file: inbound/outbound link counts, dead links, orphans\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, dirname, normalize } from \"node:path\";\nimport type { Database } from \"better-sqlite3\";\nimport { chunkMarkdown } from \"./chunker.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface VaultFile {\n absPath: string;\n vaultRelPath: string;\n inode: number;\n device: number;\n}\n\nexport interface InodeGroup {\n canonical: VaultFile;\n aliases: VaultFile[];\n}\n\nexport interface ParsedLink {\n raw: string;\n alias: string | null;\n lineNumber: number;\n isEmbed: boolean;\n}\n\nexport interface VaultIndexResult {\n filesIndexed: number;\n chunksCreated: number;\n filesSkipped: number;\n aliasesRecorded: number;\n linksExtracted: number;\n deadLinksFound: number;\n orphansFound: number;\n elapsed: number;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Maximum number of .md files to collect from a vault. */\nconst VAULT_MAX_FILES = 10_000;\n\n/** Maximum recursion depth for vault directory walks. */\nconst VAULT_MAX_DEPTH = 10;\n\n/** Number of files to process before yielding to the event loop. */\nconst VAULT_YIELD_EVERY = 10;\n\n/**\n * Directories to always skip, at any depth, during vault walks.\n * Includes standard build/VCS noise plus Obsidian-specific directories.\n */\nconst VAULT_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\",\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\",\n \".next\",\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n // Obsidian internals\n \".obsidian\",\n \".trash\",\n]);\n\n// ---------------------------------------------------------------------------\n// Utility\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n// ---------------------------------------------------------------------------\n// Vault directory walker (follows symlinks)\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively collect all .md files under a vault root, following symlinks.\n *\n * Symlink-following behaviour:\n * - Symbolic links to files: followed if the target is a .md file\n * - Symbolic links to directories: followed with cycle detection via inode\n *\n * Cycle detection is based on the real inode of each visited directory.\n * Using the real stat (not lstat) ensures that symlinked dirs resolve to\n * their actual inode, preventing infinite loops.\n *\n * @param dir Directory to scan.\n * @param vaultRoot Absolute root of the vault (for computing vaultRelPath).\n * @param acc Shared accumulator (mutated in place for early exit).\n * @param visited Set of \"device:inode\" strings for visited directories.\n * @param depth Current recursion depth.\n */\nexport function walkVaultMdFiles(\n vaultRoot: string,\n opts?: { maxFiles?: number; maxDepth?: number },\n): VaultFile[] {\n const maxFiles = opts?.maxFiles ?? VAULT_MAX_FILES;\n const maxDepth = opts?.maxDepth ?? VAULT_MAX_DEPTH;\n\n const results: VaultFile[] = [];\n const visitedDirs = new Set<string>();\n\n function walk(dir: string, depth: number): void {\n if (results.length >= maxFiles) return;\n if (depth > maxDepth) return;\n\n // Get the real inode of this directory (follows symlinks on the dir itself)\n let dirStat: ReturnType<typeof statSync>;\n try {\n dirStat = statSync(dir);\n } catch {\n return; // Unreadable or broken symlink — skip\n }\n\n const dirKey = `${dirStat.dev}:${dirStat.ino}`;\n if (visitedDirs.has(dirKey)) return; // Cycle detected\n visitedDirs.add(dirKey);\n\n let entries: import(\"node:fs\").Dirent<string>[];\n try {\n entries = readdirSync(dir, { withFileTypes: true, encoding: \"utf8\" });\n } catch {\n return; // Unreadable directory — skip\n }\n\n for (const entry of entries) {\n if (results.length >= maxFiles) break;\n if (VAULT_SKIP_DIRS.has(entry.name)) continue;\n\n const full = join(dir, entry.name);\n\n if (entry.isSymbolicLink()) {\n // Follow the symlink — resolve to real target\n let targetStat: ReturnType<typeof statSync>;\n try {\n targetStat = statSync(full); // statSync follows symlinks\n } catch {\n continue; // Broken symlink — skip\n }\n\n if (targetStat.isDirectory()) {\n if (!VAULT_SKIP_DIRS.has(entry.name)) {\n walk(full, depth + 1);\n }\n } else if (targetStat.isFile() && entry.name.endsWith(\".md\")) {\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: targetStat.ino,\n device: targetStat.dev,\n });\n }\n } else if (entry.isDirectory()) {\n walk(full, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n let fileStat: ReturnType<typeof statSync>;\n try {\n fileStat = statSync(full);\n } catch {\n continue;\n }\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: fileStat.ino,\n device: fileStat.dev,\n });\n }\n }\n }\n\n if (existsSync(vaultRoot)) {\n walk(vaultRoot, 0);\n }\n\n return results;\n}\n\n// ---------------------------------------------------------------------------\n// Inode deduplication\n// ---------------------------------------------------------------------------\n\n/**\n * Group vault files by inode identity (device + inode).\n *\n * Within each group, the canonical file is chosen as the one with the\n * fewest path separators (shallowest), breaking ties by shortest string.\n * All other group members become aliases.\n */\nexport function deduplicateByInode(files: VaultFile[]): InodeGroup[] {\n const groups = new Map<string, VaultFile[]>();\n\n for (const file of files) {\n const key = `${file.device}:${file.inode}`;\n const existing = groups.get(key);\n if (existing) {\n existing.push(file);\n } else {\n groups.set(key, [file]);\n }\n }\n\n const result: InodeGroup[] = [];\n\n for (const group of groups.values()) {\n if (group.length === 0) continue;\n\n // Sort: fewest path separators first, then shortest string\n const sorted = [...group].sort((a, b) => {\n const aDepth = (a.vaultRelPath.match(/\\//g) ?? []).length;\n const bDepth = (b.vaultRelPath.match(/\\//g) ?? []).length;\n if (aDepth !== bDepth) return aDepth - bDepth;\n return a.vaultRelPath.length - b.vaultRelPath.length;\n });\n\n const [canonical, ...aliases] = sorted as [VaultFile, ...VaultFile[]];\n result.push({ canonical, aliases });\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Wikilink parser\n// ---------------------------------------------------------------------------\n\n/**\n * Parse all [[wikilinks]] and ![[embeds]] from markdown content.\n *\n * Handles:\n * - Standard wikilinks: [[Target Note]]\n * - Aliased wikilinks: [[Target Note|Display Text]]\n * - Heading anchors: [[Target Note#Heading]] (stripped for resolution)\n * - Embeds: ![[Target Note]]\n * - Frontmatter wikilinks (YAML between --- delimiters)\n *\n * @param content Raw markdown file content.\n * @returns Array of parsed links in document order.\n */\nexport function parseWikilinks(content: string): ParsedLink[] {\n const links: ParsedLink[] = [];\n const lines = content.split(\"\\n\");\n\n // Determine frontmatter range (YAML between opening and closing ---)\n let frontmatterEnd = 0;\n if (content.startsWith(\"---\")) {\n const closingIdx = content.indexOf(\"\\n---\", 3);\n if (closingIdx !== -1) {\n frontmatterEnd = content.slice(0, closingIdx + 4).split(\"\\n\").length - 1;\n }\n }\n\n // Regex for [[wikilinks]] and ![[embeds]]\n const wikilinkRe = /(!?)\\[\\[([^\\]]+?)\\]\\]/g;\n\n for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {\n const line = lines[lineIdx]!;\n const lineNumber = lineIdx + 1; // 1-indexed\n\n wikilinkRe.lastIndex = 0;\n let match: RegExpExecArray | null;\n while ((match = wikilinkRe.exec(line)) !== null) {\n const isEmbed = match[1] === \"!\";\n const inner = match[2]!;\n\n // Split on first | for alias\n const pipeIdx = inner.indexOf(\"|\");\n const beforePipe = pipeIdx === -1 ? inner : inner.slice(0, pipeIdx);\n const alias = pipeIdx === -1 ? null : inner.slice(pipeIdx + 1);\n\n // Strip heading anchor (everything after #)\n const hashIdx = beforePipe.indexOf(\"#\");\n const raw = hashIdx === -1 ? beforePipe.trim() : beforePipe.slice(0, hashIdx).trim();\n\n if (!raw) continue; // Skip links with empty targets (e.g. [[#Heading]])\n\n // For frontmatter lines, mark as non-embed regardless of !\n const isFrontmatter = lineIdx < frontmatterEnd;\n links.push({\n raw,\n alias: alias?.trim() ?? null,\n lineNumber,\n isEmbed: isEmbed && !isFrontmatter,\n });\n }\n }\n\n return links;\n}\n\n// ---------------------------------------------------------------------------\n// Name index builder\n// ---------------------------------------------------------------------------\n\n/**\n * Build a name lookup index for Obsidian wikilink resolution.\n *\n * Maps lowercase filename (without .md extension) to all vault-relative paths\n * that share that name. Includes both canonical paths and alias paths so that\n * wikilinks resolve regardless of which path the file is accessed through.\n */\nexport function buildNameIndex(files: VaultFile[]): Map<string, string[]> {\n const index = new Map<string, string[]>();\n\n for (const file of files) {\n const name = basename(file.vaultRelPath, \".md\").toLowerCase();\n const existing = index.get(name);\n if (existing) {\n existing.push(file.vaultRelPath);\n } else {\n index.set(name, [file.vaultRelPath]);\n }\n }\n\n return index;\n}\n\n// ---------------------------------------------------------------------------\n// Wikilink resolver\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a wikilink target to a vault-relative path using Obsidian's rules.\n *\n * Resolution algorithm:\n * 1. If raw contains \"/\", attempt exact path match (with and without .md).\n * 2. Normalize: lowercase the raw target, strip .md extension.\n * 3. Look up in the name index (all files with that basename).\n * 4. If exactly one match, return it.\n * 5. If multiple matches, pick the one closest to the source file\n * (longest common directory prefix, then shortest overall path).\n * 6. If no matches, return null (dead link).\n *\n * @param raw The raw link target (heading-stripped, pipe-stripped).\n * @param nameIndex Map from lowercase basename-without-ext to vault paths.\n * @param sourcePath Vault-relative path of the file containing the link.\n * @returns Vault-relative path of the resolved target, or null.\n */\nexport function resolveWikilink(\n raw: string,\n nameIndex: Map<string, string[]>,\n sourcePath: string,\n): string | null {\n if (!raw) return null;\n\n // Case 1: path contains \"/\" — try exact match with and without .md\n if (raw.includes(\"/\")) {\n const normalized = normalize(raw);\n const normalizedMd = normalized.endsWith(\".md\") ? normalized : normalized + \".md\";\n\n // Check if any indexed path matches (case-insensitive for macOS compatibility)\n for (const [, paths] of nameIndex) {\n for (const p of paths) {\n if (p === normalizedMd || p === normalized) return p;\n if (p.toLowerCase() === normalizedMd.toLowerCase()) return p;\n }\n }\n // Fall through to name lookup in case the path prefix was wrong\n }\n\n // Normalize the raw target for name lookup.\n // Use the basename only — Obsidian resolves by filename, not full path.\n // E.g. \"PAI/20-webseiten/_20-webseiten-master\" → \"_20-webseiten-master\"\n const rawBase = basename(raw)\n .replace(/\\.md$/i, \"\")\n .toLowerCase()\n .trim();\n\n if (!rawBase) return null;\n\n const candidates = nameIndex.get(rawBase);\n\n if (!candidates || candidates.length === 0) {\n return null; // Dead link\n }\n\n if (candidates.length === 1) {\n return candidates[0]!;\n }\n\n // Multiple matches — pick the one closest to the source file\n const sourceDir = dirname(sourcePath);\n\n let bestPath: string | null = null;\n let bestPrefixLen = -1;\n let bestPathLen = Infinity;\n\n for (const candidate of candidates) {\n const candidateDir = dirname(candidate);\n const prefixLen = commonPrefixLength(sourceDir, candidateDir);\n const pathLen = candidate.length;\n\n if (\n prefixLen > bestPrefixLen ||\n (prefixLen === bestPrefixLen && pathLen < bestPathLen)\n ) {\n bestPrefixLen = prefixLen;\n bestPathLen = pathLen;\n bestPath = candidate;\n }\n }\n\n return bestPath;\n}\n\n/**\n * Compute the length of the common prefix between two directory paths,\n * measured in path segments (not raw characters).\n *\n * Example: \"a/b/c\" and \"a/b/d\" → 2 (common: \"a\", \"b\")\n */\nfunction commonPrefixLength(a: string, b: string): number {\n if (a === \".\" && b === \".\") return 0;\n const aParts = a === \".\" ? [] : a.split(\"/\");\n const bParts = b === \".\" ? [] : b.split(\"/\");\n let count = 0;\n const len = Math.min(aParts.length, bParts.length);\n for (let i = 0; i < len; i++) {\n if (aParts[i] === bParts[i]) {\n count++;\n } else {\n break;\n }\n }\n return count;\n}\n\n// ---------------------------------------------------------------------------\n// Main vault indexing orchestrator\n// ---------------------------------------------------------------------------\n\n/**\n * Index an entire Obsidian vault (or markdown knowledge base) into the\n * federation database.\n *\n * Steps:\n * 1. Walk vault root, following symlinks.\n * 2. Deduplicate by inode — each unique file is indexed once.\n * 3. Build a name index for wikilink resolution.\n * 4. For each canonical file:\n * a. SHA-256 hash for change detection — skip unchanged files.\n * b. Read content, chunk with chunkMarkdown().\n * c. Insert chunks into memory_chunks and memory_fts.\n * d. Upsert vault_files row.\n * 5. Record aliases in vault_aliases.\n * 6. Rebuild vault_name_index table.\n * 7. Rebuild vault_links:\n * a. Parse [[wikilinks]] from each canonical file.\n * b. Resolve each link with resolveWikilink().\n * c. Insert into vault_links.\n * 8. Compute and upsert health metrics (vault_health).\n * 9. Return statistics.\n *\n * @param db Open federation database.\n * @param vaultProjectId Registry project ID for the vault \"project\".\n * @param vaultRoot Absolute path to the vault root directory.\n */\nexport async function indexVault(\n db: Database,\n vaultProjectId: number,\n vaultRoot: string,\n): Promise<VaultIndexResult> {\n const startTime = Date.now();\n\n const result: VaultIndexResult = {\n filesIndexed: 0,\n chunksCreated: 0,\n filesSkipped: 0,\n aliasesRecorded: 0,\n linksExtracted: 0,\n deadLinksFound: 0,\n orphansFound: 0,\n elapsed: 0,\n };\n\n // ---------------------------------------------------------------------------\n // Step 1: Walk vault, collecting all .md files (follows symlinks)\n // ---------------------------------------------------------------------------\n\n const allFiles = walkVaultMdFiles(vaultRoot);\n\n // ---------------------------------------------------------------------------\n // Step 2: Deduplicate by inode\n // ---------------------------------------------------------------------------\n\n const inodeGroups = deduplicateByInode(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 3: Build name index (from all files including aliases, for resolution)\n // ---------------------------------------------------------------------------\n\n const nameIndex = buildNameIndex(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 4: Prepare SQL statements\n // ---------------------------------------------------------------------------\n\n const selectFileHash = db.prepare(\n \"SELECT hash FROM vault_files WHERE vault_path = ?\",\n );\n\n const deleteOldChunkIds = db.prepare(\n \"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const deleteFts = db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n\n const deleteChunks = db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const insertChunk = db.prepare(`\n INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const insertFts = db.prepare(`\n INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const upsertVaultFile = db.prepare(`\n INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inode = excluded.inode,\n device = excluded.device,\n hash = excluded.hash,\n title = excluded.title,\n indexed_at = excluded.indexed_at\n `);\n\n // ---------------------------------------------------------------------------\n // Step 4 (cont.): Index each canonical file\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n let filesSinceYield = 0;\n\n for (const group of inodeGroups) {\n // Yield periodically to keep the IPC server responsive\n if (filesSinceYield >= VAULT_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const { canonical } = group;\n\n // Read file content\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n result.filesSkipped++;\n continue;\n }\n\n const hash = sha256File(content);\n\n // Change detection: skip if hash is unchanged\n const existing = selectFileHash.get(canonical.vaultRelPath) as\n | { hash: string }\n | undefined;\n\n if (existing?.hash === hash) {\n result.filesSkipped++;\n continue;\n }\n\n // Delete old chunks for this vault path\n const oldChunkIds = deleteOldChunkIds.all(\n vaultProjectId,\n canonical.vaultRelPath,\n ) as Array<{ id: string }>;\n\n db.transaction(() => {\n for (const row of oldChunkIds) {\n deleteFts.run(row.id);\n }\n deleteChunks.run(vaultProjectId, canonical.vaultRelPath);\n })();\n\n // Chunk the content\n const chunks = chunkMarkdown(content);\n const updatedAt = Date.now();\n\n // Extract title from first H1 heading or filename\n const titleMatch = /^#\\s+(.+)$/m.exec(content);\n const title = titleMatch\n ? titleMatch[1]!.trim()\n : basename(canonical.vaultRelPath, \".md\");\n\n db.transaction(() => {\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n const id = chunkId(\n vaultProjectId,\n canonical.vaultRelPath,\n i,\n chunk.startLine,\n chunk.endLine,\n );\n insertChunk.run(\n id,\n vaultProjectId,\n \"vault\",\n \"topic\",\n canonical.vaultRelPath,\n chunk.startLine,\n chunk.endLine,\n chunk.hash,\n chunk.text,\n updatedAt,\n );\n insertFts.run(\n chunk.text,\n id,\n vaultProjectId,\n canonical.vaultRelPath,\n \"vault\",\n \"topic\",\n chunk.startLine,\n chunk.endLine,\n );\n }\n upsertVaultFile.run(\n canonical.vaultRelPath,\n canonical.inode,\n canonical.device,\n hash,\n title,\n updatedAt,\n );\n })();\n\n result.filesIndexed++;\n result.chunksCreated += chunks.length;\n }\n\n // ---------------------------------------------------------------------------\n // Step 5: Record aliases in vault_aliases\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Clear old aliases for this vault before rebuilding\n // (We identify vault aliases by checking which canonical paths belong to\n // the canonical files we just indexed — simpler to clear + rebuild all.)\n db.exec(\"DELETE FROM vault_aliases\");\n\n const insertAlias = db.prepare(`\n INSERT OR REPLACE INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES (?, ?, ?, ?)\n `);\n\n const insertAliasesTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n for (const alias of group.aliases) {\n insertAlias.run(\n alias.vaultRelPath,\n group.canonical.vaultRelPath,\n alias.inode,\n alias.device,\n );\n result.aliasesRecorded++;\n }\n }\n });\n insertAliasesTx(inodeGroups);\n\n // ---------------------------------------------------------------------------\n // Step 6: Rebuild vault_name_index\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_name_index\");\n\n const insertNameIndex = db.prepare(`\n INSERT OR REPLACE INTO vault_name_index (name, vault_path) VALUES (?, ?)\n `);\n\n const insertNameIndexTx = db.transaction(\n (entries: Array<[string, string]>) => {\n for (const [name, path] of entries) {\n insertNameIndex.run(name, path);\n }\n },\n );\n\n const nameEntries: Array<[string, string]> = [];\n for (const [name, paths] of nameIndex) {\n for (const path of paths) {\n nameEntries.push([name, path]);\n }\n }\n insertNameIndexTx(nameEntries);\n\n // ---------------------------------------------------------------------------\n // Step 7: Rebuild vault_links\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_links\");\n\n const insertLink = db.prepare(`\n INSERT OR IGNORE INTO vault_links\n (source_path, target_raw, target_path, link_type, line_number)\n VALUES (?, ?, ?, ?, ?)\n `);\n\n // Parse and resolve wikilinks in bulk transaction\n const linkRows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }> = [];\n\n for (const group of inodeGroups) {\n const { canonical } = group;\n\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n continue;\n }\n\n const parsedLinks = parseWikilinks(content);\n for (const link of parsedLinks) {\n const target = resolveWikilink(link.raw, nameIndex, canonical.vaultRelPath);\n linkRows.push({\n source: canonical.vaultRelPath,\n raw: link.raw,\n target,\n linkType: link.isEmbed ? \"embed\" : \"wikilink\",\n lineNumber: link.lineNumber,\n });\n }\n }\n\n const insertLinksTx = db.transaction(\n (\n rows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }>,\n ) => {\n for (const row of rows) {\n insertLink.run(row.source, row.raw, row.target, row.linkType, row.lineNumber);\n }\n },\n );\n insertLinksTx(linkRows);\n\n result.linksExtracted = linkRows.length;\n result.deadLinksFound = linkRows.filter((r) => r.target === null).length;\n\n // ---------------------------------------------------------------------------\n // Step 8: Compute and upsert vault_health metrics\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Count outbound links per source\n const outboundCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count dead links per source\n const deadLinkCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NULL GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count inbound links per target\n const inboundCounts = db\n .prepare(\n `SELECT target_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NOT NULL GROUP BY target_path`,\n )\n .all() as Array<{ target_path: string; cnt: number }>;\n\n // Build maps for O(1) lookup\n const outboundMap = new Map<string, number>(\n outboundCounts.map((r) => [r.source_path, r.cnt]),\n );\n const deadMap = new Map<string, number>(\n deadLinkCounts.map((r) => [r.source_path, r.cnt]),\n );\n const inboundMap = new Map<string, number>(\n inboundCounts.map((r) => [r.target_path, r.cnt]),\n );\n\n const upsertHealth = db.prepare(`\n INSERT INTO vault_health\n (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inbound_count = excluded.inbound_count,\n outbound_count = excluded.outbound_count,\n dead_link_count = excluded.dead_link_count,\n is_orphan = excluded.is_orphan,\n computed_at = excluded.computed_at\n `);\n\n const computedAt = Date.now();\n let orphanCount = 0;\n\n const upsertHealthTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n const path = group.canonical.vaultRelPath;\n const inbound = inboundMap.get(path) ?? 0;\n const outbound = outboundMap.get(path) ?? 0;\n const dead = deadMap.get(path) ?? 0;\n const isOrphan = inbound === 0 ? 1 : 0;\n if (isOrphan) orphanCount++;\n upsertHealth.run(path, inbound, outbound, dead, isOrphan, computedAt);\n }\n });\n upsertHealthTx(inodeGroups);\n\n result.orphansFound = orphanCount;\n result.elapsed = Date.now() - startTime;\n\n return result;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA4DA,MAAM,kBAAkB;;AAGxB,MAAM,kBAAkB;;AAGxB,MAAM,oBAAoB;;;;;AAM1B,MAAM,kBAAkB,IAAI,IAAI;CAE9B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CAEA;CACA;CACD,CAAC;AAMF,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;AAG3D,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAGlB,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;AAwBxD,SAAgB,iBACd,WACA,MACa;CACb,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,WAAW,MAAM,YAAY;CAEnC,MAAM,UAAuB,EAAE;CAC/B,MAAM,8BAAc,IAAI,KAAa;CAErC,SAAS,KAAK,KAAa,OAAqB;AAC9C,MAAI,QAAQ,UAAU,SAAU;AAChC,MAAI,QAAQ,SAAU;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,SAAS,IAAI;UACjB;AACN;;EAGF,MAAM,SAAS,GAAG,QAAQ,IAAI,GAAG,QAAQ;AACzC,MAAI,YAAY,IAAI,OAAO,CAAE;AAC7B,cAAY,IAAI,OAAO;EAEvB,IAAI;AACJ,MAAI;AACF,aAAU,YAAY,KAAK;IAAE,eAAe;IAAM,UAAU;IAAQ,CAAC;UAC/D;AACN;;AAGF,OAAK,MAAM,SAAS,SAAS;AAC3B,OAAI,QAAQ,UAAU,SAAU;AAChC,OAAI,gBAAgB,IAAI,MAAM,KAAK,CAAE;GAErC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAElC,OAAI,MAAM,gBAAgB,EAAE;IAE1B,IAAI;AACJ,QAAI;AACF,kBAAa,SAAS,KAAK;YACrB;AACN;;AAGF,QAAI,WAAW,aAAa,EAC1B;SAAI,CAAC,gBAAgB,IAAI,MAAM,KAAK,CAClC,MAAK,MAAM,QAAQ,EAAE;eAEd,WAAW,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CAC1D,SAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,WAAW;KAClB,QAAQ,WAAW;KACpB,CAAC;cAEK,MAAM,aAAa,CAC5B,MAAK,MAAM,QAAQ,EAAE;YACZ,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EAAE;IACvD,IAAI;AACJ,QAAI;AACF,gBAAW,SAAS,KAAK;YACnB;AACN;;AAEF,YAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,SAAS;KAChB,QAAQ,SAAS;KAClB,CAAC;;;;AAKR,KAAI,WAAW,UAAU,CACvB,MAAK,WAAW,EAAE;AAGpB,QAAO;;;;;;;;;AAcT,SAAgB,mBAAmB,OAAkC;CACnE,MAAM,yBAAS,IAAI,KAA0B;AAE7C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,KAAK;EACnC,MAAM,WAAW,OAAO,IAAI,IAAI;AAChC,MAAI,SACF,UAAS,KAAK,KAAK;MAEnB,QAAO,IAAI,KAAK,CAAC,KAAK,CAAC;;CAI3B,MAAM,SAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,OAAO,QAAQ,EAAE;AACnC,MAAI,MAAM,WAAW,EAAG;EAUxB,MAAM,CAAC,WAAW,GAAG,WAPN,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;GACvC,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;GACnD,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;AACnD,OAAI,WAAW,OAAQ,QAAO,SAAS;AACvC,UAAO,EAAE,aAAa,SAAS,EAAE,aAAa;IAC9C;AAGF,SAAO,KAAK;GAAE;GAAW;GAAS,CAAC;;AAGrC,QAAO;;;;;;;;;;;;;;;AAoBT,SAAgB,eAAe,SAA+B;CAC5D,MAAM,QAAsB,EAAE;CAC9B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAGjC,IAAI,iBAAiB;AACrB,KAAI,QAAQ,WAAW,MAAM,EAAE;EAC7B,MAAM,aAAa,QAAQ,QAAQ,SAAS,EAAE;AAC9C,MAAI,eAAe,GACjB,kBAAiB,QAAQ,MAAM,GAAG,aAAa,EAAE,CAAC,MAAM,KAAK,CAAC,SAAS;;CAK3E,MAAM,aAAa;AAEnB,MAAK,IAAI,UAAU,GAAG,UAAU,MAAM,QAAQ,WAAW;EACvD,MAAM,OAAO,MAAM;EACnB,MAAM,aAAa,UAAU;AAE7B,aAAW,YAAY;EACvB,IAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,KAAK,MAAM,MAAM;GAC/C,MAAM,UAAU,MAAM,OAAO;GAC7B,MAAM,QAAQ,MAAM;GAGpB,MAAM,UAAU,MAAM,QAAQ,IAAI;GAClC,MAAM,aAAa,YAAY,KAAK,QAAQ,MAAM,MAAM,GAAG,QAAQ;GACnE,MAAM,QAAQ,YAAY,KAAK,OAAO,MAAM,MAAM,UAAU,EAAE;GAG9D,MAAM,UAAU,WAAW,QAAQ,IAAI;GACvC,MAAM,MAAM,YAAY,KAAK,WAAW,MAAM,GAAG,WAAW,MAAM,GAAG,QAAQ,CAAC,MAAM;AAEpF,OAAI,CAAC,IAAK;GAGV,MAAM,gBAAgB,UAAU;AAChC,SAAM,KAAK;IACT;IACA,OAAO,OAAO,MAAM,IAAI;IACxB;IACA,SAAS,WAAW,CAAC;IACtB,CAAC;;;AAIN,QAAO;;;;;;;;;AAcT,SAAgB,eAAe,OAA2C;CACxE,MAAM,wBAAQ,IAAI,KAAuB;AAEzC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,SAAS,KAAK,cAAc,MAAM,CAAC,aAAa;EAC7D,MAAM,WAAW,MAAM,IAAI,KAAK;AAChC,MAAI,SACF,UAAS,KAAK,KAAK,aAAa;MAEhC,OAAM,IAAI,MAAM,CAAC,KAAK,aAAa,CAAC;;AAIxC,QAAO;;;;;;;;;;;;;;;;;;;AAwBT,SAAgB,gBACd,KACA,WACA,YACe;AACf,KAAI,CAAC,IAAK,QAAO;AAGjB,KAAI,IAAI,SAAS,IAAI,EAAE;EACrB,MAAM,aAAa,UAAU,IAAI;EACjC,MAAM,eAAe,WAAW,SAAS,MAAM,GAAG,aAAa,aAAa;AAG5E,OAAK,MAAM,GAAG,UAAU,UACtB,MAAK,MAAM,KAAK,OAAO;AACrB,OAAI,MAAM,gBAAgB,MAAM,WAAY,QAAO;AACnD,OAAI,EAAE,aAAa,KAAK,aAAa,aAAa,CAAE,QAAO;;;CASjE,MAAM,UAAU,SAAS,IAAI,CAC1B,QAAQ,UAAU,GAAG,CACrB,aAAa,CACb,MAAM;AAET,KAAI,CAAC,QAAS,QAAO;CAErB,MAAM,aAAa,UAAU,IAAI,QAAQ;AAEzC,KAAI,CAAC,cAAc,WAAW,WAAW,EACvC,QAAO;AAGT,KAAI,WAAW,WAAW,EACxB,QAAO,WAAW;CAIpB,MAAM,YAAY,QAAQ,WAAW;CAErC,IAAI,WAA0B;CAC9B,IAAI,gBAAgB;CACpB,IAAI,cAAc;AAElB,MAAK,MAAM,aAAa,YAAY;EAElC,MAAM,YAAY,mBAAmB,WADhB,QAAQ,UAAU,CACsB;EAC7D,MAAM,UAAU,UAAU;AAE1B,MACE,YAAY,iBACX,cAAc,iBAAiB,UAAU,aAC1C;AACA,mBAAgB;AAChB,iBAAc;AACd,cAAW;;;AAIf,QAAO;;;;;;;;AAST,SAAS,mBAAmB,GAAW,GAAmB;AACxD,KAAI,MAAM,OAAO,MAAM,IAAK,QAAO;CACnC,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,IAAI,QAAQ;CACZ,MAAM,MAAM,KAAK,IAAI,OAAO,QAAQ,OAAO,OAAO;AAClD,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IACvB,KAAI,OAAO,OAAO,OAAO,GACvB;KAEA;AAGJ,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCT,eAAsB,WACpB,IACA,gBACA,WAC2B;CAC3B,MAAM,YAAY,KAAK,KAAK;CAE5B,MAAM,SAA2B;EAC/B,cAAc;EACd,eAAe;EACf,cAAc;EACd,iBAAiB;EACjB,gBAAgB;EAChB,gBAAgB;EAChB,cAAc;EACd,SAAS;EACV;CAMD,MAAM,WAAW,iBAAiB,UAAU;CAM5C,MAAM,cAAc,mBAAmB,SAAS;CAMhD,MAAM,YAAY,eAAe,SAAS;CAM1C,MAAM,iBAAiB,GAAG,QACxB,oDACD;CAED,MAAM,oBAAoB,GAAG,QAC3B,iEACD;CAED,MAAM,YAAY,GAAG,QAAQ,sCAAsC;CAEnE,MAAM,eAAe,GAAG,QACtB,8DACD;CAED,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;CAEF,MAAM,YAAY,GAAG,QAAQ;;;IAG3B;CAEF,MAAM,kBAAkB,GAAG,QAAQ;;;;;;;;;IASjC;AAMF,OAAM,kBAAkB;CACxB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,SAAS,aAAa;AAE/B,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,EAAE,cAAc;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN,UAAO;AACP;;EAGF,MAAM,OAAO,WAAW,QAAQ;AAOhC,MAJiB,eAAe,IAAI,UAAU,aAAa,EAI7C,SAAS,MAAM;AAC3B,UAAO;AACP;;EAIF,MAAM,cAAc,kBAAkB,IACpC,gBACA,UAAU,aACX;AAED,KAAG,kBAAkB;AACnB,QAAK,MAAM,OAAO,YAChB,WAAU,IAAI,IAAI,GAAG;AAEvB,gBAAa,IAAI,gBAAgB,UAAU,aAAa;IACxD,EAAE;EAGJ,MAAM,SAAS,cAAc,QAAQ;EACrC,MAAM,YAAY,KAAK,KAAK;EAG5B,MAAM,aAAa,cAAc,KAAK,QAAQ;EAC9C,MAAM,QAAQ,aACV,WAAW,GAAI,MAAM,GACrB,SAAS,UAAU,cAAc,MAAM;AAE3C,KAAG,kBAAkB;AACnB,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;IACtC,MAAM,QAAQ,OAAO;IACrB,MAAM,KAAK,QACT,gBACA,UAAU,cACV,GACA,MAAM,WACN,MAAM,QACP;AACD,gBAAY,IACV,IACA,gBACA,SACA,SACA,UAAU,cACV,MAAM,WACN,MAAM,SACN,MAAM,MACN,MAAM,MACN,UACD;AACD,cAAU,IACR,MAAM,MACN,IACA,gBACA,UAAU,cACV,SACA,SACA,MAAM,WACN,MAAM,QACP;;AAEH,mBAAgB,IACd,UAAU,cACV,UAAU,OACV,UAAU,QACV,MACA,OACA,UACD;IACD,EAAE;AAEJ,SAAO;AACP,SAAO,iBAAiB,OAAO;;AAOjC,OAAM,kBAAkB;AAKxB,IAAG,KAAK,4BAA4B;CAEpC,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;AAeF,CAbwB,GAAG,aAAa,WAAyB;AAC/D,OAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,SAAS;AACjC,eAAY,IACV,MAAM,cACN,MAAM,UAAU,cAChB,MAAM,OACN,MAAM,OACP;AACD,UAAO;;GAGX,CACc,YAAY;AAM5B,OAAM,kBAAkB;AAExB,IAAG,KAAK,+BAA+B;CAEvC,MAAM,kBAAkB,GAAG,QAAQ;;IAEjC;CAEF,MAAM,oBAAoB,GAAG,aAC1B,YAAqC;AACpC,OAAK,MAAM,CAAC,MAAM,SAAS,QACzB,iBAAgB,IAAI,MAAM,KAAK;GAGpC;CAED,MAAM,cAAuC,EAAE;AAC/C,MAAK,MAAM,CAAC,MAAM,UAAU,UAC1B,MAAK,MAAM,QAAQ,MACjB,aAAY,KAAK,CAAC,MAAM,KAAK,CAAC;AAGlC,mBAAkB,YAAY;AAM9B,OAAM,kBAAkB;AAExB,IAAG,KAAK,0BAA0B;CAElC,MAAM,aAAa,GAAG,QAAQ;;;;IAI5B;CAGF,MAAM,WAMD,EAAE;AAEP,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,EAAE,cAAc;EAEtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN;;EAGF,MAAM,cAAc,eAAe,QAAQ;AAC3C,OAAK,MAAM,QAAQ,aAAa;GAC9B,MAAM,SAAS,gBAAgB,KAAK,KAAK,WAAW,UAAU,aAAa;AAC3E,YAAS,KAAK;IACZ,QAAQ,UAAU;IAClB,KAAK,KAAK;IACV;IACA,UAAU,KAAK,UAAU,UAAU;IACnC,YAAY,KAAK;IAClB,CAAC;;;AAmBN,CAfsB,GAAG,aAErB,SAOG;AACH,OAAK,MAAM,OAAO,KAChB,YAAW,IAAI,IAAI,QAAQ,IAAI,KAAK,IAAI,QAAQ,IAAI,UAAU,IAAI,WAAW;GAGlF,CACa,SAAS;AAEvB,QAAO,iBAAiB,SAAS;AACjC,QAAO,iBAAiB,SAAS,QAAQ,MAAM,EAAE,WAAW,KAAK,CAAC;AAMlE,OAAM,kBAAkB;CAGxB,MAAM,iBAAiB,GACpB,QACC,4EACD,CACA,KAAK;CAGR,MAAM,iBAAiB,GACpB,QACC;uDAED,CACA,KAAK;CAGR,MAAM,gBAAgB,GACnB,QACC;2DAED,CACA,KAAK;CAGR,MAAM,cAAc,IAAI,IACtB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,UAAU,IAAI,IAClB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,aAAa,IAAI,IACrB,cAAc,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CACjD;CAED,MAAM,eAAe,GAAG,QAAQ;;;;;;;;;;IAU9B;CAEF,MAAM,aAAa,KAAK,KAAK;CAC7B,IAAI,cAAc;AAalB,CAXuB,GAAG,aAAa,WAAyB;AAC9D,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,OAAO,MAAM,UAAU;GAC7B,MAAM,UAAU,WAAW,IAAI,KAAK,IAAI;GACxC,MAAM,WAAW,YAAY,IAAI,KAAK,IAAI;GAC1C,MAAM,OAAO,QAAQ,IAAI,KAAK,IAAI;GAClC,MAAM,WAAW,YAAY,IAAI,IAAI;AACrC,OAAI,SAAU;AACd,gBAAa,IAAI,MAAM,SAAS,UAAU,MAAM,UAAU,WAAW;;GAEvE,CACa,YAAY;AAE3B,QAAO,eAAe;AACtB,QAAO,UAAU,KAAK,KAAK,GAAG;AAE9B,QAAO"}
|