kb-core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/config.d.ts +22 -0
  2. package/dist/config.d.ts.map +1 -0
  3. package/dist/config.test.d.ts +2 -0
  4. package/dist/config.test.d.ts.map +1 -0
  5. package/dist/db.d.ts +5 -0
  6. package/dist/db.d.ts.map +1 -0
  7. package/dist/db.test.d.ts +2 -0
  8. package/dist/db.test.d.ts.map +1 -0
  9. package/dist/index.cjs +1391 -0
  10. package/dist/index.cjs.map +1 -0
  11. package/dist/index.d.ts +28 -0
  12. package/dist/index.d.ts.map +1 -0
  13. package/dist/index.js +1339 -0
  14. package/dist/index.js.map +1 -0
  15. package/dist/indexer.d.ts +9 -0
  16. package/dist/indexer.d.ts.map +1 -0
  17. package/dist/indexer.test.d.ts +2 -0
  18. package/dist/indexer.test.d.ts.map +1 -0
  19. package/dist/ingest-types.d.ts +19 -0
  20. package/dist/ingest-types.d.ts.map +1 -0
  21. package/dist/ingest.d.ts +14 -0
  22. package/dist/ingest.d.ts.map +1 -0
  23. package/dist/ingest.test.d.ts +2 -0
  24. package/dist/ingest.test.d.ts.map +1 -0
  25. package/dist/init.d.ts +6 -0
  26. package/dist/init.d.ts.map +1 -0
  27. package/dist/init.test.d.ts +2 -0
  28. package/dist/init.test.d.ts.map +1 -0
  29. package/dist/integration.test.d.ts +2 -0
  30. package/dist/integration.test.d.ts.map +1 -0
  31. package/dist/lint.d.ts +16 -0
  32. package/dist/lint.d.ts.map +1 -0
  33. package/dist/lint.test.d.ts +2 -0
  34. package/dist/lint.test.d.ts.map +1 -0
  35. package/dist/llm.d.ts +10 -0
  36. package/dist/llm.d.ts.map +1 -0
  37. package/dist/llm.test.d.ts +2 -0
  38. package/dist/llm.test.d.ts.map +1 -0
  39. package/dist/log-parser.d.ts +11 -0
  40. package/dist/log-parser.d.ts.map +1 -0
  41. package/dist/log-parser.test.d.ts +2 -0
  42. package/dist/log-parser.test.d.ts.map +1 -0
  43. package/dist/markdown.d.ts +11 -0
  44. package/dist/markdown.d.ts.map +1 -0
  45. package/dist/markdown.test.d.ts +2 -0
  46. package/dist/markdown.test.d.ts.map +1 -0
  47. package/dist/project.d.ts +12 -0
  48. package/dist/project.d.ts.map +1 -0
  49. package/dist/project.test.d.ts +2 -0
  50. package/dist/project.test.d.ts.map +1 -0
  51. package/dist/query.d.ts +11 -0
  52. package/dist/query.d.ts.map +1 -0
  53. package/dist/query.test.d.ts +2 -0
  54. package/dist/query.test.d.ts.map +1 -0
  55. package/dist/search.d.ts +14 -0
  56. package/dist/search.d.ts.map +1 -0
  57. package/dist/search.test.d.ts +2 -0
  58. package/dist/search.test.d.ts.map +1 -0
  59. package/dist/source-reader.d.ts +9 -0
  60. package/dist/source-reader.d.ts.map +1 -0
  61. package/dist/source-reader.test.d.ts +2 -0
  62. package/dist/source-reader.test.d.ts.map +1 -0
  63. package/package.json +32 -0
package/dist/index.js ADDED
@@ -0,0 +1,1339 @@
1
+ // src/init.js
2
+ import { mkdir, writeFile, access, rm } from "fs/promises";
3
+ import { join, basename } from "path";
4
+ import TOML from "@iarna/toml";
5
+ function resolveProjectName(options) {
6
+ return options.name || basename(options.directory);
7
+ }
8
+ function buildConfigToml(projectName) {
9
+ const config = {
10
+ project: {
11
+ name: projectName,
12
+ version: "0.1.0"
13
+ },
14
+ directories: {
15
+ sources: "sources",
16
+ wiki: "wiki"
17
+ },
18
+ llm: {
19
+ provider: "anthropic",
20
+ model: "claude-sonnet-4-20250514"
21
+ }
22
+ };
23
+ const tomlStr = TOML.stringify(config);
24
+ return tomlStr + '\n[dependencies]\n# shared-glossary = { path = "../shared-glossary" }\n';
25
+ }
26
+ function buildSchemaMd() {
27
+ return `# KB Schema \u2014 LLM Instructions
28
+
29
+ This file defines the conventions for this knowledge base. The \`kb\` CLI and any
30
+ LLM operating on this wiki MUST follow these rules.
31
+
32
+ ---
33
+
34
+ ## Wiki Structure Conventions
35
+
36
+ - All pages live under the \`wiki/\` directory.
37
+ - \`wiki/_index.md\` is the wiki root and serves as a table of contents.
38
+ - Sub-topics may be organised into sub-directories: \`wiki/<topic>/_index.md\`.
39
+ - File names use kebab-case, e.g. \`wiki/authentication-flow.md\`.
40
+ - Every page must have a valid YAML frontmatter block.
41
+
42
+ ---
43
+
44
+ ## Frontmatter Schema
45
+
46
+ Every wiki page must begin with a YAML frontmatter block:
47
+
48
+ \`\`\`yaml
49
+ ---
50
+ title: <Human-readable page title>
51
+ tags: [tag1, tag2] # optional; array of lowercase strings
52
+ created: <ISO 8601 date> # e.g. 2026-04-05
53
+ updated: <ISO 8601 date> # updated whenever content changes
54
+ source: <path or URL> # optional; original source material
55
+ ---
56
+ \`\`\`
57
+
58
+ Required fields: \`title\`, \`created\`.
59
+
60
+ ---
61
+
62
+ ## Page Templates
63
+
64
+ ### Entity Page
65
+ Use for: people, systems, services, tools.
66
+
67
+ \`\`\`markdown
68
+ ---
69
+ title: <Entity Name>
70
+ tags: [entity]
71
+ created: <ISO date>
72
+ updated: <ISO date>
73
+ ---
74
+
75
+ # <Entity Name>
76
+
77
+ **Type**: <system | person | service | tool>
78
+
79
+ ## Overview
80
+
81
+ <One-paragraph description.>
82
+
83
+ ## Key Attributes
84
+
85
+ - **Attribute**: value
86
+
87
+ ## Related
88
+
89
+ - [[related-page]]
90
+ \`\`\`
91
+
92
+ ### Concept Page
93
+ Use for: ideas, patterns, terminology.
94
+
95
+ \`\`\`markdown
96
+ ---
97
+ title: <Concept Name>
98
+ tags: [concept]
99
+ created: <ISO date>
100
+ updated: <ISO date>
101
+ ---
102
+
103
+ # <Concept Name>
104
+
105
+ ## Definition
106
+
107
+ <Clear definition in 1-3 sentences.>
108
+
109
+ ## Context
110
+
111
+ <When and why this concept matters in the project.>
112
+
113
+ ## See Also
114
+
115
+ - [[related-concept]]
116
+ \`\`\`
117
+
118
+ ### Source Summary Page
119
+ Use for: summarised source material (docs, papers, meetings).
120
+
121
+ \`\`\`markdown
122
+ ---
123
+ title: Summary \u2014 <Source Title>
124
+ tags: [source-summary]
125
+ created: <ISO date>
126
+ source: <path or URL>
127
+ ---
128
+
129
+ # Summary \u2014 <Source Title>
130
+
131
+ ## Key Points
132
+
133
+ - Point one
134
+ - Point two
135
+
136
+ ## Decisions / Implications
137
+
138
+ <What this source means for the project.>
139
+
140
+ ## Raw Source
141
+
142
+ See \`sources/<filename>\`.
143
+ \`\`\`
144
+
145
+ ### Comparison Page
146
+ Use for: side-by-side evaluation of options.
147
+
148
+ \`\`\`markdown
149
+ ---
150
+ title: Comparison \u2014 <Topic>
151
+ tags: [comparison]
152
+ created: <ISO date>
153
+ updated: <ISO date>
154
+ ---
155
+
156
+ # Comparison \u2014 <Topic>
157
+
158
+ | Criterion | Option A | Option B |
159
+ |-----------|----------|----------|
160
+ | ... | ... | ... |
161
+
162
+ ## Recommendation
163
+
164
+ <Which option and why.>
165
+ \`\`\`
166
+
167
+ ---
168
+
169
+ ## Wikilink Conventions
170
+
171
+ - Basic link: \`[[page-name]]\` \u2014 links to \`wiki/page-name.md\`.
172
+ - Display text: \`[[page-name|display text]]\` \u2014 renders as "display text".
173
+ - Cross-directory: \`[[topic/sub-page]]\`.
174
+ - All wikilink targets must be lowercase kebab-case matching the file name without \`.md\`.
175
+
176
+ ---
177
+
178
+ ## Ingest Workflow
179
+
180
+ 1. Place the source file in \`sources/\` (PDF, Markdown, plain text, etc.).
181
+ 2. Run \`kb ingest sources/<filename>\`.
182
+ 3. The CLI reads the file, calls the configured LLM, and generates a source-summary
183
+ page in \`wiki/\`.
184
+ 4. The summary page is linked from \`wiki/_index.md\` under **Sources**.
185
+ 5. An entry is appended to \`log.md\`.
186
+
187
+ ---
188
+
189
+ ## Query Workflow
190
+
191
+ 1. Run \`kb query "<natural-language question>"\`.
192
+ 2. The CLI searches the wiki index for relevant pages.
193
+ 3. Relevant page content is assembled into a prompt context.
194
+ 4. The LLM answers the question, citing wikilinks.
195
+ 5. The answer is printed to stdout. Nothing is written to disk unless \`--save\` is passed.
196
+
197
+ ---
198
+
199
+ ## Lint Workflow
200
+
201
+ Run \`kb lint\` to check for:
202
+
203
+ - Pages missing required frontmatter fields (\`title\`, \`created\`).
204
+ - Broken wikilinks (targets that don't resolve to an existing page).
205
+ - Pages not reachable from \`wiki/_index.md\`.
206
+ - Duplicate page titles across the wiki.
207
+ - Frontmatter fields with invalid types or formats.
208
+
209
+ Lint exits with code 0 on success, 1 if errors are found.
210
+ `;
211
+ }
212
+ function buildIndexMd(projectName, isoDate) {
213
+ return `---
214
+ title: ${projectName} Knowledge Base
215
+ created: ${isoDate}
216
+ ---
217
+
218
+ # ${projectName} Knowledge Base
219
+
220
+ > This wiki is maintained by the \`kb\` CLI tool.
221
+
222
+ ## Pages
223
+
224
+ (No pages yet. Use \`kb ingest <source>\` to add content.)
225
+
226
+ ## Sources
227
+
228
+ (No sources yet.)
229
+ `;
230
+ }
231
+ function buildLogMd(projectName, isoDate) {
232
+ return `# Activity Log
233
+
234
+ ## ${isoDate} \u2014 Project initialized
235
+
236
+ Project \`${projectName}\` initialized.
237
+ `;
238
+ }
239
+ async function kbDirExists(directory) {
240
+ try {
241
+ await access(join(directory, ".kb"));
242
+ return true;
243
+ } catch {
244
+ return false;
245
+ }
246
+ }
247
+ async function initProject(options) {
248
+ const projectName = resolveProjectName(options);
249
+ const { directory } = options;
250
+ if (await kbDirExists(directory)) {
251
+ throw new Error(`Knowledge base already initialized: .kb/ already exists in ${directory}`);
252
+ }
253
+ const isoDate = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
254
+ try {
255
+ await Promise.all([
256
+ mkdir(join(directory, ".kb"), { recursive: true }),
257
+ mkdir(join(directory, "sources"), { recursive: true }),
258
+ mkdir(join(directory, "wiki"), { recursive: true })
259
+ ]);
260
+ await Promise.all([
261
+ writeFile(join(directory, ".kb", "config.toml"), buildConfigToml(projectName), "utf8"),
262
+ writeFile(join(directory, ".kb", "schema.md"), buildSchemaMd(), "utf8"),
263
+ writeFile(join(directory, "sources", ".gitkeep"), "", "utf8"),
264
+ writeFile(join(directory, "wiki", "_index.md"), buildIndexMd(projectName, isoDate), "utf8"),
265
+ writeFile(join(directory, "log.md"), buildLogMd(projectName, isoDate), "utf8")
266
+ ]);
267
+ } catch (error) {
268
+ await rm(join(directory, ".kb"), { recursive: true, force: true });
269
+ throw error;
270
+ }
271
+ }
272
+
273
+ // src/config.js
274
+ import { readFile } from "fs/promises";
275
+ import TOML2 from "@iarna/toml";
276
+ var VALID_PROVIDERS = ["anthropic", "openai", "ollama"];
277
+ function requireSafeRelativePath(val, field) {
278
+ if (val.startsWith("/") || val.split("/").includes("..")) {
279
+ throw new Error(`Invalid config: ${field} must be a safe relative path, got "${val}"`);
280
+ }
281
+ }
282
+ function requireString(obj, key, context) {
283
+ const val = obj[key];
284
+ if (typeof val !== "string" || val.trim() === "") {
285
+ throw new Error(`Invalid config: missing required field "${context}.${key}"`);
286
+ }
287
+ return val;
288
+ }
289
+ function requireSection(obj, key) {
290
+ const val = obj[key];
291
+ if (val === void 0 || val === null || typeof val !== "object" || Array.isArray(val)) {
292
+ throw new Error(`Invalid config: missing required section "[${key}]"`);
293
+ }
294
+ return val;
295
+ }
296
+ async function parseConfig(configPath) {
297
+ let raw;
298
+ try {
299
+ raw = await readFile(configPath, "utf8");
300
+ } catch (err) {
301
+ const message = err instanceof Error ? err.message : String(err);
302
+ throw new Error(`Config file not found: ${configPath}
303
+ ${message}`);
304
+ }
305
+ let parsed;
306
+ try {
307
+ parsed = TOML2.parse(raw);
308
+ } catch (err) {
309
+ const message = err instanceof Error ? err.message : String(err);
310
+ throw new Error(`Invalid TOML in config file ${configPath}: ${message}`);
311
+ }
312
+ const project = requireSection(parsed, "project");
313
+ const name = requireString(project, "name", "project");
314
+ const version = requireString(project, "version", "project");
315
+ const directories = requireSection(parsed, "directories");
316
+ const sources = requireString(directories, "sources", "directories");
317
+ requireSafeRelativePath(sources, "directories.sources");
318
+ const wiki = requireString(directories, "wiki", "directories");
319
+ requireSafeRelativePath(wiki, "directories.wiki");
320
+ const llm = requireSection(parsed, "llm");
321
+ const providerRaw = requireString(llm, "provider", "llm");
322
+ if (!VALID_PROVIDERS.includes(providerRaw)) {
323
+ throw new Error(`Invalid config: llm.provider must be one of ${VALID_PROVIDERS.join(", ")}, got "${providerRaw}"`);
324
+ }
325
+ const provider = providerRaw;
326
+ const model = requireString(llm, "model", "llm");
327
+ const rawDeps = parsed["dependencies"];
328
+ const dependencies = {};
329
+ if (rawDeps !== void 0 && rawDeps !== null && typeof rawDeps === "object" && !Array.isArray(rawDeps)) {
330
+ for (const [depKey, depVal] of Object.entries(rawDeps)) {
331
+ if (typeof depVal === "object" && depVal !== null && !Array.isArray(depVal)) {
332
+ const dep = depVal;
333
+ dependencies[depKey] = {
334
+ ...typeof dep["path"] === "string" ? { path: dep["path"] } : {},
335
+ ...typeof dep["git"] === "string" ? { git: dep["git"] } : {},
336
+ ...typeof dep["branch"] === "string" ? { branch: dep["branch"] } : {},
337
+ ...typeof dep["mode"] === "string" ? { mode: dep["mode"] } : {}
338
+ };
339
+ }
340
+ }
341
+ }
342
+ return {
343
+ project: { name, version },
344
+ directories: { sources, wiki },
345
+ llm: { provider, model },
346
+ dependencies
347
+ };
348
+ }
349
+
350
+ // src/project.js
351
+ import { access as access2 } from "fs/promises";
352
+ import { join as join2, dirname, resolve } from "path";
353
+ async function hasKbDir(dir) {
354
+ try {
355
+ await access2(join2(dir, ".kb", "config.toml"));
356
+ return true;
357
+ } catch {
358
+ return false;
359
+ }
360
+ }
361
+ async function findProjectRoot(startDir) {
362
+ let current = resolve(startDir);
363
+ while (true) {
364
+ if (await hasKbDir(current)) {
365
+ return current;
366
+ }
367
+ const parent = dirname(current);
368
+ if (parent === current) {
369
+ return null;
370
+ }
371
+ current = parent;
372
+ }
373
+ }
374
+ async function loadProject(startDir) {
375
+ const root = await findProjectRoot(startDir);
376
+ if (root === null) {
377
+ throw new Error(`No kb project found. Run "kb init" to initialize a knowledge base in the current directory.`);
378
+ }
379
+ const kbDir = join2(root, ".kb");
380
+ const configPath = join2(kbDir, "config.toml");
381
+ const config = await parseConfig(configPath);
382
+ return {
383
+ name: config.project.name,
384
+ root,
385
+ kbDir,
386
+ sourcesDir: join2(root, config.directories.sources),
387
+ wikiDir: join2(root, config.directories.wiki),
388
+ config
389
+ };
390
+ }
391
+ async function tryLoadProject(startDir) {
392
+ try {
393
+ return await loadProject(startDir);
394
+ } catch (err) {
395
+ if (err instanceof Error && /no kb project found/i.test(err.message)) {
396
+ return null;
397
+ }
398
+ throw err;
399
+ }
400
+ }
401
+
402
+ // src/db.js
403
+ import Database from "better-sqlite3";
404
+ import { join as join3 } from "path";
405
+ var SCHEMA_SQL = `
406
+ CREATE VIRTUAL TABLE IF NOT EXISTS pages USING fts5(
407
+ path,
408
+ title,
409
+ content,
410
+ tags,
411
+ project,
412
+ tokenize='porter unicode61'
413
+ );
414
+
415
+ CREATE TABLE IF NOT EXISTS page_meta (
416
+ path TEXT PRIMARY KEY,
417
+ sha256 TEXT NOT NULL,
418
+ mtime INTEGER NOT NULL,
419
+ word_count INTEGER NOT NULL DEFAULT 0,
420
+ frontmatter TEXT NOT NULL DEFAULT '{}',
421
+ outgoing_links TEXT NOT NULL DEFAULT '[]',
422
+ updated_at INTEGER NOT NULL
423
+ );
424
+ `;
425
+ function openDb(project) {
426
+ const dbPath = join3(project.kbDir, "index.db");
427
+ const db = new Database(dbPath);
428
+ db.pragma("journal_mode = WAL");
429
+ db.exec(SCHEMA_SQL);
430
+ return db;
431
+ }
432
+ function closeDb(db) {
433
+ db.close();
434
+ }
435
+
436
+ // src/markdown.js
437
+ import { readFile as readFile2 } from "fs/promises";
438
+ import matter from "gray-matter";
439
+ var WIKILINK_RE = /\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g;
440
+ var H1_RE = /^#\s+(.+)$/m;
441
+ function extractTitle(fm, content, relativePath) {
442
+ if (typeof fm["title"] === "string" && fm["title"].trim() !== "") {
443
+ return fm["title"].trim();
444
+ }
445
+ const h1Match = H1_RE.exec(content);
446
+ if (h1Match) {
447
+ return h1Match[1].trim();
448
+ }
449
+ const filename = relativePath.split("/").pop() ?? relativePath;
450
+ return filename.replace(/\.md$/i, "");
451
+ }
452
+ function extractTags(fm) {
453
+ const tags = fm["tags"];
454
+ if (!Array.isArray(tags))
455
+ return "";
456
+ return tags.filter((t) => typeof t === "string").join(",");
457
+ }
458
+ function extractWikiLinks(content) {
459
+ const links = [];
460
+ let match;
461
+ const re = new RegExp(WIKILINK_RE.source, "g");
462
+ while ((match = re.exec(content)) !== null) {
463
+ links.push(match[1].trim());
464
+ }
465
+ return links;
466
+ }
467
+ function countWords(text) {
468
+ const trimmed = text.trim();
469
+ if (trimmed === "")
470
+ return 0;
471
+ return trimmed.split(/\s+/).length;
472
+ }
473
+ async function parsePage(filePath, relativePath, rawContent) {
474
+ const raw = rawContent ?? await readFile2(filePath, "utf8");
475
+ const parsed = matter(raw);
476
+ const fm = parsed.data;
477
+ const content = parsed.content;
478
+ const title = extractTitle(fm, content, relativePath);
479
+ const tags = extractTags(fm);
480
+ const outgoingLinks = extractWikiLinks(content);
481
+ const wordCount = countWords(content);
482
+ return {
483
+ path: relativePath,
484
+ title,
485
+ content,
486
+ tags,
487
+ frontmatter: fm,
488
+ outgoingLinks,
489
+ wordCount
490
+ };
491
+ }
492
+
493
+ // src/indexer.js
494
+ import { createHash } from "crypto";
495
+ import { readFile as readFile3, stat, readdir } from "fs/promises";
496
+ import { join as join4, relative } from "path";
497
+ async function collectMdFiles(dir) {
498
+ try {
499
+ const entries = await readdir(dir, {
500
+ recursive: true,
501
+ withFileTypes: true
502
+ });
503
+ return entries.filter((e) => e.isFile() && e.name.endsWith(".md")).map((e) => join4(e.parentPath ?? e.path, e.name));
504
+ } catch (err) {
505
+ if (err.code !== "ENOENT")
506
+ throw err;
507
+ return [];
508
+ }
509
+ }
510
+ function sha256(content) {
511
+ return createHash("sha256").update(content).digest("hex");
512
+ }
513
+ function upsertParsedPage(stmts, project, page, hash, mtime) {
514
+ stmts.deletePages.run(page.path);
515
+ stmts.insertPage.run(page.path, page.title, page.content, page.tags, project.name);
516
+ stmts.upsertMeta.run(page.path, hash, mtime, page.wordCount, JSON.stringify(page.frontmatter), JSON.stringify(page.outgoingLinks), Date.now());
517
+ }
518
+ async function indexProject(project, rebuild = false) {
519
+ const db = openDb(project);
520
+ try {
521
+ if (rebuild) {
522
+ db.exec("DELETE FROM pages; DELETE FROM page_meta;");
523
+ }
524
+ const files = await collectMdFiles(project.wikiDir);
525
+ const stats = { indexed: 0, skipped: 0, deleted: 0, errors: 0 };
526
+ const getMetaStmt = db.prepare("SELECT sha256 FROM page_meta WHERE path = ?");
527
+ const upsertStmts = {
528
+ deletePages: db.prepare("DELETE FROM pages WHERE path = ?"),
529
+ insertPage: db.prepare("INSERT INTO pages(path, title, content, tags, project) VALUES (?, ?, ?, ?, ?)"),
530
+ upsertMeta: db.prepare(`
531
+ INSERT INTO page_meta(path, sha256, mtime, word_count, frontmatter, outgoing_links, updated_at)
532
+ VALUES (?, ?, ?, ?, ?, ?, ?)
533
+ ON CONFLICT(path) DO UPDATE SET
534
+ sha256 = excluded.sha256,
535
+ mtime = excluded.mtime,
536
+ word_count = excluded.word_count,
537
+ frontmatter = excluded.frontmatter,
538
+ outgoing_links = excluded.outgoing_links,
539
+ updated_at = excluded.updated_at
540
+ `)
541
+ };
542
+ const deleteStalePages = db.prepare("DELETE FROM pages WHERE path = ?");
543
+ const deleteStaleMeta = db.prepare("DELETE FROM page_meta WHERE path = ?");
544
+ const listMetaStmt = db.prepare("SELECT path FROM page_meta");
545
+ const processFile = db.transaction((page, hash, mtime) => {
546
+ upsertParsedPage(upsertStmts, project, page, hash, mtime);
547
+ });
548
+ const onDiskPaths = /* @__PURE__ */ new Set();
549
+ for (const absPath of files) {
550
+ const relPath = relative(project.root, absPath);
551
+ onDiskPaths.add(relPath);
552
+ let raw;
553
+ try {
554
+ raw = await readFile3(absPath, "utf8");
555
+ } catch (err) {
556
+ stats.errors++;
557
+ continue;
558
+ }
559
+ const hash = sha256(raw);
560
+ const existing = getMetaStmt.get(relPath);
561
+ if (existing && existing.sha256 === hash) {
562
+ stats.skipped++;
563
+ continue;
564
+ }
565
+ let fileStat;
566
+ try {
567
+ fileStat = await stat(absPath);
568
+ } catch {
569
+ stats.errors++;
570
+ continue;
571
+ }
572
+ let page;
573
+ try {
574
+ page = await parsePage(absPath, relPath, raw);
575
+ } catch {
576
+ stats.errors++;
577
+ continue;
578
+ }
579
+ try {
580
+ processFile(page, hash, Math.floor(fileStat.mtimeMs));
581
+ stats.indexed++;
582
+ } catch {
583
+ stats.errors++;
584
+ }
585
+ }
586
+ const allMetaPaths = listMetaStmt.all().map((r) => r.path);
587
+ const stalePaths = allMetaPaths.filter((p) => !onDiskPaths.has(p));
588
+ const deleteStale = db.transaction((paths) => {
589
+ for (const p of paths) {
590
+ deleteStalePages.run(p);
591
+ deleteStaleMeta.run(p);
592
+ }
593
+ });
594
+ deleteStale(stalePaths);
595
+ stats.deleted += stalePaths.length;
596
+ return stats;
597
+ } finally {
598
+ closeDb(db);
599
+ }
600
+ }
601
+
602
+ // src/search.js
603
+ function sanitizeFtsQuery(query) {
604
+ const tokens = query.trim().split(/\s+/).filter((t) => t.length > 0);
605
+ if (tokens.length === 0)
606
+ return '""';
607
+ return tokens.map((t) => `"${t.replace(/"/g, '""')}"`).join(" ");
608
+ }
609
+ function parseTags(raw) {
610
+ if (!raw || raw.trim() === "")
611
+ return [];
612
+ return raw.split(",").map((t) => t.trim()).filter((t) => t.length > 0);
613
+ }
614
+ function searchWiki(db, query, projectName, options) {
615
+ if (!query || query.trim() === "") {
616
+ return [];
617
+ }
618
+ const limit = options?.limit ?? 10;
619
+ const ftsQuery = sanitizeFtsQuery(query.trim());
620
+ const filterTags = options?.tags?.length ? options.tags.map((t) => t.trim().toLowerCase()).filter((t) => t.length > 0) : [];
621
+ const tagClauses = filterTags.map(() => "AND lower(tags) LIKE ?").join(" ");
622
+ const tagParams = filterTags.map((t) => `%${t}%`);
623
+ const stmt = db.prepare(`
624
+ SELECT path, title, tags, bm25(pages) as rank,
625
+ snippet(pages, 2, '', '', '...', 8) as snippet
626
+ FROM pages
627
+ WHERE pages MATCH ? AND project = ?
628
+ ${tagClauses}
629
+ ORDER BY rank
630
+ LIMIT ?
631
+ `);
632
+ const rows = stmt.all(ftsQuery, projectName, ...tagParams, limit);
633
+ const results = rows.map((row) => ({
634
+ rank: row.rank,
635
+ path: row.path,
636
+ title: row.title,
637
+ snippet: row.snippet,
638
+ tags: parseTags(row.tags)
639
+ }));
640
+ return results;
641
+ }
642
+
643
+ // src/source-reader.js
644
+ import { readFile as readFile4 } from "fs/promises";
645
+ import { basename as basename2, extname } from "path";
646
+ function sanitizeFilename(name) {
647
+ return name.toLowerCase().replace(/\s/g, "-");
648
+ }
649
+ function detectType(sourcePath) {
650
+ if (sourcePath.startsWith("http://") || sourcePath.startsWith("https://")) {
651
+ return "url";
652
+ }
653
+ const ext = extname(sourcePath).toLowerCase();
654
+ if (ext === ".pdf")
655
+ return "pdf";
656
+ if (ext === ".md")
657
+ return "markdown";
658
+ return "text";
659
+ }
660
+ function filenameFromUrl(url) {
661
+ try {
662
+ const parsed = new URL(url);
663
+ const parts = parsed.pathname.split("/").filter(Boolean);
664
+ const last = parts[parts.length - 1];
665
+ const pagePart = last ? last.includes(".") ? last : `${last}.html` : "index.html";
666
+ return sanitizeFilename(`${parsed.hostname}-${pagePart}`);
667
+ } catch {
668
+ return "url-content.html";
669
+ }
670
+ }
671
+ function stripHtml(html) {
672
+ let text = html.replace(/<script[\s\S]*?<\/script>/gi, " ");
673
+ text = text.replace(/<style[\s\S]*?<\/style>/gi, " ");
674
+ text = text.replace(/<[^>]+>/g, " ");
675
+ text = text.replace(/&amp;/gi, "&").replace(/&lt;/gi, "<").replace(/&gt;/gi, ">").replace(/&quot;/gi, '"').replace(/&#39;/gi, "'").replace(/&nbsp;/gi, " ");
676
+ text = text.replace(/\s+/g, " ").trim();
677
+ return text;
678
+ }
679
+ async function readPdf(filePath) {
680
+ const pdfParse = await import("pdf-parse").then((m) => m.default ?? m);
681
+ const buffer = await readFile4(filePath);
682
+ const data = await pdfParse(buffer);
683
+ return data.text;
684
+ }
685
+ function isPrivateUrl(url) {
686
+ try {
687
+ const { hostname } = new URL(url);
688
+ return hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1" || hostname.startsWith("169.254.") || // link-local
689
+ hostname.startsWith("10.") || hostname.startsWith("192.168.") || /^172\.(1[6-9]|2\d|3[01])\./.test(hostname);
690
+ } catch {
691
+ return false;
692
+ }
693
+ }
694
+ async function fetchUrl(url) {
695
+ if (isPrivateUrl(url)) {
696
+ throw new Error(`Fetching private/localhost URLs is not allowed: ${url}`);
697
+ }
698
+ const response = await fetch(url);
699
+ if (!response.ok) {
700
+ throw new Error(`Failed to fetch URL ${url}: HTTP ${response.status} ${response.statusText}`);
701
+ }
702
+ const html = await response.text();
703
+ return stripHtml(html);
704
+ }
705
+ async function readSource(sourcePath) {
706
+ const type = detectType(sourcePath);
707
+ if (type === "url") {
708
+ const content2 = await fetchUrl(sourcePath);
709
+ const filename2 = filenameFromUrl(sourcePath);
710
+ return { type, originalPath: sourcePath, content: content2, filename: filename2 };
711
+ }
712
+ if (type === "pdf") {
713
+ const content2 = await readPdf(sourcePath);
714
+ const raw2 = basename2(sourcePath);
715
+ const filename2 = sanitizeFilename(raw2);
716
+ return { type, originalPath: sourcePath, content: content2, filename: filename2 };
717
+ }
718
+ const content = await readFile4(sourcePath, "utf8");
719
+ const raw = basename2(sourcePath);
720
+ const filename = sanitizeFilename(raw);
721
+ return { type, originalPath: sourcePath, content, filename };
722
+ }
723
+
724
+ // src/llm.js
725
+ function createAnthropicAdapter(model) {
726
+ return {
727
+ async complete(messages, systemPrompt) {
728
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
729
+ if (!apiKey) {
730
+ throw new Error("ANTHROPIC_API_KEY environment variable is not set");
731
+ }
732
+ const Anthropic = await import("@anthropic-ai/sdk").then((m) => m.default ?? m);
733
+ const client = new Anthropic({ apiKey });
734
+ const response = await client.messages.create({
735
+ model,
736
+ max_tokens: 8192,
737
+ system: systemPrompt,
738
+ messages: messages.map((m) => ({
739
+ role: m.role,
740
+ content: m.content
741
+ }))
742
+ });
743
+ const block = response.content[0];
744
+ if (!block || block.type !== "text") {
745
+ throw new Error("Anthropic returned no text content");
746
+ }
747
+ return block.text;
748
+ }
749
+ };
750
+ }
751
+ function createOpenAiAdapter(model) {
752
+ return {
753
+ async complete(messages, systemPrompt) {
754
+ const apiKey = process.env["OPENAI_API_KEY"];
755
+ if (!apiKey) {
756
+ throw new Error("OPENAI_API_KEY environment variable is not set");
757
+ }
758
+ const body = {
759
+ model,
760
+ messages: [
761
+ { role: "system", content: systemPrompt },
762
+ ...messages.map((m) => ({ role: m.role, content: m.content }))
763
+ ]
764
+ };
765
+ const response = await fetch("https://api.openai.com/v1/chat/completions", {
766
+ method: "POST",
767
+ headers: {
768
+ "Content-Type": "application/json",
769
+ Authorization: `Bearer ${apiKey}`
770
+ },
771
+ body: JSON.stringify(body)
772
+ });
773
+ if (!response.ok) {
774
+ const text = await response.text();
775
+ throw new Error(`OpenAI API error: HTTP ${response.status} \u2014 ${text}`);
776
+ }
777
+ const data = await response.json();
778
+ const content = data.choices[0]?.message?.content;
779
+ if (!content) {
780
+ throw new Error("OpenAI returned no content");
781
+ }
782
+ return content;
783
+ }
784
+ };
785
+ }
786
+ function createOllamaAdapter(model) {
787
+ return {
788
+ async complete(messages, systemPrompt) {
789
+ const baseUrl = process.env["OLLAMA_BASE_URL"] ?? "http://localhost:11434";
790
+ const body = {
791
+ model,
792
+ messages: [
793
+ { role: "system", content: systemPrompt },
794
+ ...messages.map((m) => ({ role: m.role, content: m.content }))
795
+ ],
796
+ stream: false
797
+ };
798
+ const response = await fetch(`${baseUrl}/api/chat`, {
799
+ method: "POST",
800
+ headers: { "Content-Type": "application/json" },
801
+ body: JSON.stringify(body)
802
+ });
803
+ if (!response.ok) {
804
+ const text = await response.text();
805
+ throw new Error(`Ollama API error: HTTP ${response.status} \u2014 ${text}`);
806
+ }
807
+ const data = await response.json();
808
+ const content = data.message?.content;
809
+ if (!content) {
810
+ throw new Error("Ollama returned no content");
811
+ }
812
+ return content;
813
+ }
814
+ };
815
+ }
816
+ function createLlmAdapter(config) {
817
+ const { provider, model } = config.llm;
818
+ switch (provider) {
819
+ case "anthropic":
820
+ return createAnthropicAdapter(model);
821
+ case "openai":
822
+ return createOpenAiAdapter(model);
823
+ case "ollama":
824
+ return createOllamaAdapter(model);
825
+ default: {
826
+ const _exhaustive = provider;
827
+ throw new Error(`Unsupported LLM provider: ${String(_exhaustive)}`);
828
+ }
829
+ }
830
+ }
831
+
832
+ // src/ingest.js
833
+ import { readFile as readFile5, writeFile as writeFile2, mkdir as mkdir2, appendFile } from "fs/promises";
834
+ import { join as join5, dirname as dirname2, resolve as resolve2 } from "path";
835
+ var SYSTEM_PROMPT = `You are an AI assistant maintaining a knowledge base wiki.
836
+ You will be given a new source document and the current state of the wiki.
837
+ Your task is to integrate the new knowledge into the wiki.
838
+
839
+ Return ONLY a JSON object matching this exact schema (no markdown fences):
840
+ {
841
+ "summary": { "path": "wiki/sources/<filename>-summary.md", "content": "..." },
842
+ "updates": [{ "path": "...", "content": "...", "reason": "..." }],
843
+ "newPages": [{ "path": "...", "content": "...", "reason": "..." }],
844
+ "indexUpdate": "...",
845
+ "logEntry": "..."
846
+ }`;
847
+ function assertWithinRoot(absPath, root) {
848
+ const resolvedPath = resolve2(absPath);
849
+ const resolvedRoot = resolve2(root) + "/";
850
+ if (!resolvedPath.startsWith(resolvedRoot)) {
851
+ throw new Error(`Unsafe path rejected: "${absPath}" is outside project root`);
852
+ }
853
+ }
854
+ async function readFileSafe(filePath) {
855
+ try {
856
+ return await readFile5(filePath, "utf8");
857
+ } catch {
858
+ return "";
859
+ }
860
+ }
861
+ function parseIngestResult(raw) {
862
+ const cleaned = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/i, "").trim();
863
+ let parsed;
864
+ try {
865
+ parsed = JSON.parse(cleaned);
866
+ } catch (err) {
867
+ throw new Error(`Invalid LLM response: could not parse JSON. Raw response: ${cleaned.slice(0, 200)}`);
868
+ }
869
+ if (parsed === null || typeof parsed !== "object" || Array.isArray(parsed)) {
870
+ throw new Error("Invalid LLM response: expected a JSON object");
871
+ }
872
+ const obj = parsed;
873
+ if (!obj["summary"] || typeof obj["summary"] !== "object" || Array.isArray(obj["summary"])) {
874
+ throw new Error('Invalid LLM response: missing "summary" object');
875
+ }
876
+ const summary = obj["summary"];
877
+ if (typeof summary["path"] !== "string" || typeof summary["content"] !== "string") {
878
+ throw new Error('Invalid LLM response: "summary" must have "path" and "content" strings');
879
+ }
880
+ if (!Array.isArray(obj["updates"])) {
881
+ throw new Error('Invalid LLM response: "updates" must be an array');
882
+ }
883
+ if (!Array.isArray(obj["newPages"])) {
884
+ throw new Error('Invalid LLM response: "newPages" must be an array');
885
+ }
886
+ if (typeof obj["indexUpdate"] !== "string") {
887
+ throw new Error('Invalid LLM response: "indexUpdate" must be a string');
888
+ }
889
+ if (typeof obj["logEntry"] !== "string") {
890
+ throw new Error('Invalid LLM response: "logEntry" must be a string');
891
+ }
892
+ const updates = obj["updates"].map((u, i) => {
893
+ if (typeof u !== "object" || u === null || Array.isArray(u)) {
894
+ throw new Error(`Invalid LLM response: updates[${i}] must be an object`);
895
+ }
896
+ const update = u;
897
+ if (typeof update["path"] !== "string" || typeof update["content"] !== "string" || typeof update["reason"] !== "string") {
898
+ throw new Error(`Invalid LLM response: updates[${i}] must have path, content, and reason strings`);
899
+ }
900
+ return {
901
+ path: update["path"],
902
+ content: update["content"],
903
+ reason: update["reason"]
904
+ };
905
+ });
906
+ const newPages = obj["newPages"].map((p, i) => {
907
+ if (typeof p !== "object" || p === null || Array.isArray(p)) {
908
+ throw new Error(`Invalid LLM response: newPages[${i}] must be an object`);
909
+ }
910
+ const page = p;
911
+ if (typeof page["path"] !== "string" || typeof page["content"] !== "string" || typeof page["reason"] !== "string") {
912
+ throw new Error(`Invalid LLM response: newPages[${i}] must have path, content, and reason strings`);
913
+ }
914
+ return {
915
+ path: page["path"],
916
+ content: page["content"],
917
+ reason: page["reason"]
918
+ };
919
+ });
920
+ return {
921
+ summary: { path: summary["path"], content: summary["content"] },
922
+ updates,
923
+ newPages,
924
+ indexUpdate: obj["indexUpdate"],
925
+ logEntry: obj["logEntry"]
926
+ };
927
+ }
928
+ async function applyIngestResult(project, result, sourceContent, sourceFilename) {
929
+ const summaryAbsPath = join5(project.root, result.summary.path);
930
+ assertWithinRoot(summaryAbsPath, project.root);
931
+ await mkdir2(dirname2(summaryAbsPath), { recursive: true });
932
+ await writeFile2(summaryAbsPath, result.summary.content, "utf8");
933
+ for (const update of result.updates) {
934
+ const absPath = join5(project.root, update.path);
935
+ assertWithinRoot(absPath, project.root);
936
+ await mkdir2(dirname2(absPath), { recursive: true });
937
+ await writeFile2(absPath, update.content, "utf8");
938
+ }
939
+ for (const newPage of result.newPages) {
940
+ const absPath = join5(project.root, newPage.path);
941
+ assertWithinRoot(absPath, project.root);
942
+ await mkdir2(dirname2(absPath), { recursive: true });
943
+ await writeFile2(absPath, newPage.content, "utf8");
944
+ }
945
+ const indexPath = join5(project.wikiDir, "_index.md");
946
+ await writeFile2(indexPath, result.indexUpdate, "utf8");
947
+ const sourceDestPath = join5(project.sourcesDir, sourceFilename);
948
+ await mkdir2(project.sourcesDir, { recursive: true });
949
+ await writeFile2(sourceDestPath, sourceContent, "utf8");
950
+ const logPath = join5(project.wikiDir, "log.md");
951
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
952
+ const logLine = `- ${timestamp}: ${result.logEntry}
953
+ `;
954
+ await appendFile(logPath, logLine, "utf8");
955
+ await indexProject(project);
956
+ }
957
+ async function ingestSource(project, sourcePath, llm, options) {
958
+ const apply = options?.apply ?? false;
959
+ const sourceContent = await readSource(sourcePath);
960
+ const indexPath = join5(project.wikiDir, "_index.md");
961
+ const currentIndex = await readFileSafe(indexPath);
962
+ const schemaPath = join5(project.kbDir, "schema.md");
963
+ const schema = await readFileSafe(schemaPath);
964
+ const userMessage = `## Wiki Schema
965
+ ${schema}
966
+
967
+ ## Current Wiki Index
968
+ ${currentIndex}
969
+
970
+ ## New Source: ${sourceContent.filename}
971
+ ${sourceContent.content}
972
+
973
+ Integrate this source into the wiki following the schema above.`;
974
+ const raw = await llm.complete([{ role: "user", content: userMessage }], SYSTEM_PROMPT);
975
+ const result = parseIngestResult(raw);
976
+ if (apply) {
977
+ await applyIngestResult(project, result, sourceContent.content, sourceContent.filename);
978
+ }
979
+ const sourceFile = join5(project.sourcesDir, sourceContent.filename);
980
+ return {
981
+ result,
982
+ sourceFile,
983
+ dryRun: !apply
984
+ };
985
+ }
986
+
987
+ // src/query.ts
988
+ import { readFile as readFile6, writeFile as writeFile3, mkdir as mkdir3, appendFile as appendFile2 } from "fs/promises";
989
+ import { existsSync } from "fs";
990
+ import { join as join6, dirname as dirname3, resolve as resolve3 } from "path";
991
+ var SYSTEM_PROMPT2 = `You are a knowledgeable assistant answering questions about a project's knowledge base.
992
+ Answer concisely using only information from the provided wiki pages.
993
+ Use [[page-name]] wikilink syntax to cite specific wiki pages in your answer.
994
+ Format your answer in markdown.`;
995
+ function assertWithinRoot2(absPath, root) {
996
+ const resolvedPath = resolve3(absPath);
997
+ const resolvedRoot = resolve3(root) + "/";
998
+ if (!resolvedPath.startsWith(resolvedRoot)) {
999
+ throw new Error(
1000
+ `Unsafe path rejected: "${absPath}" is outside project root`
1001
+ );
1002
+ }
1003
+ }
1004
+ async function readFileSafe2(filePath) {
1005
+ try {
1006
+ return await readFile6(filePath, "utf8");
1007
+ } catch {
1008
+ return "";
1009
+ }
1010
+ }
1011
+ async function queryWiki(project, question, llm, options) {
1012
+ const dbPath = join6(project.kbDir, "index.db");
1013
+ if (!existsSync(dbPath)) {
1014
+ await indexProject(project);
1015
+ }
1016
+ const db = openDb(project);
1017
+ let searchResults;
1018
+ try {
1019
+ searchResults = searchWiki(db, question, project.name, { limit: 10 });
1020
+ } finally {
1021
+ closeDb(db);
1022
+ }
1023
+ const pages = [];
1024
+ for (const result of searchResults) {
1025
+ const absPath = join6(project.root, result.path);
1026
+ const content = await readFileSafe2(absPath);
1027
+ if (content) {
1028
+ pages.push({ path: result.path, title: result.title, content });
1029
+ }
1030
+ }
1031
+ const pagesSection = pages.length > 0 ? pages.map((p) => `### ${p.title} (${p.path})
1032
+ ${p.content}`).join("\n\n") : "(No wiki pages found for this query.)";
1033
+ const userMessage = `## Question
1034
+ ${question}
1035
+
1036
+ ## Relevant Wiki Pages
1037
+
1038
+ ${pagesSection}`;
1039
+ const answer = await llm.complete(
1040
+ [{ role: "user", content: userMessage }],
1041
+ SYSTEM_PROMPT2
1042
+ );
1043
+ const sources = pages.map((p) => p.path);
1044
+ if (options?.save) {
1045
+ const saveRelPath = options.save;
1046
+ const saveAbsPath = join6(project.root, saveRelPath);
1047
+ assertWithinRoot2(saveAbsPath, project.root);
1048
+ await mkdir3(dirname3(saveAbsPath), { recursive: true });
1049
+ await writeFile3(saveAbsPath, answer, "utf8");
1050
+ const logPath = join6(project.wikiDir, "log.md");
1051
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
1052
+ const logEntry = `
1053
+ ## ${timestamp} \u2014 Queried: ${question}
1054
+
1055
+ Saved to: ${saveRelPath}
1056
+ `;
1057
+ await appendFile2(logPath, logEntry, "utf8");
1058
+ await indexProject(project);
1059
+ }
1060
+ return { answer, sources };
1061
+ }
1062
+
1063
+ // src/lint.ts
1064
+ import { readdir as readdir2, stat as stat2 } from "fs/promises";
1065
+ import { join as join7, relative as relative2, basename as basename3, extname as extname2 } from "path";
1066
+ async function collectMdFiles2(dir) {
1067
+ try {
1068
+ const entries = await readdir2(dir, {
1069
+ recursive: true,
1070
+ withFileTypes: true
1071
+ });
1072
+ return entries.filter((e) => e.isFile() && e.name.endsWith(".md")).map((e) => join7(e.parentPath ?? e.path, e.name));
1073
+ } catch (err) {
1074
+ if (err.code !== "ENOENT") throw err;
1075
+ return [];
1076
+ }
1077
+ }
1078
+ async function collectSourceFiles(dir) {
1079
+ try {
1080
+ const entries = await readdir2(dir, {
1081
+ recursive: true,
1082
+ withFileTypes: true
1083
+ });
1084
+ return entries.filter((e) => e.isFile()).map((e) => join7(e.parentPath ?? e.path, e.name));
1085
+ } catch (err) {
1086
+ if (err.code !== "ENOENT") throw err;
1087
+ return [];
1088
+ }
1089
+ }
1090
+ function buildPageKeySet(relPaths, projectRoot, wikiDir) {
1091
+ const keys = /* @__PURE__ */ new Set();
1092
+ for (const rp of relPaths) {
1093
+ keys.add(rp);
1094
+ keys.add(rp.replace(/\.md$/i, ""));
1095
+ const fname = basename3(rp, ".md");
1096
+ keys.add(fname);
1097
+ const absPath = join7(projectRoot, rp);
1098
+ const relToWiki = relative2(wikiDir, absPath);
1099
+ keys.add(relToWiki);
1100
+ keys.add(relToWiki.replace(/\.md$/i, ""));
1101
+ }
1102
+ return keys;
1103
+ }
1104
+ async function lintProject(project) {
1105
+ await indexProject(project);
1106
+ const issues = [];
1107
+ const absWikiFiles = await collectMdFiles2(project.wikiDir);
1108
+ const relWikiPaths = absWikiFiles.map((f) => relative2(project.root, f));
1109
+ const pagesChecked = relWikiPaths.length;
1110
+ const sourceFiles = await collectSourceFiles(project.sourcesDir);
1111
+ const sourcesChecked = sourceFiles.filter(
1112
+ (f) => basename3(f) !== ".gitkeep"
1113
+ ).length;
1114
+ if (pagesChecked === 0) {
1115
+ return { issues, pagesChecked: 0, sourcesChecked };
1116
+ }
1117
+ const pageKeySet = buildPageKeySet(
1118
+ relWikiPaths,
1119
+ project.root,
1120
+ project.wikiDir
1121
+ );
1122
+ const db = openDb(project);
1123
+ let rows;
1124
+ try {
1125
+ rows = db.prepare("SELECT path, outgoing_links, word_count, mtime, updated_at FROM page_meta").all();
1126
+ } finally {
1127
+ closeDb(db);
1128
+ }
1129
+ const metaMap = /* @__PURE__ */ new Map();
1130
+ for (const row of rows) {
1131
+ metaMap.set(row.path, row);
1132
+ }
1133
+ const inboundLinks = /* @__PURE__ */ new Map();
1134
+ for (const rp of relWikiPaths) {
1135
+ inboundLinks.set(rp, /* @__PURE__ */ new Set());
1136
+ }
1137
+ for (const row of rows) {
1138
+ let links = [];
1139
+ try {
1140
+ links = JSON.parse(row.outgoing_links);
1141
+ } catch {
1142
+ links = [];
1143
+ }
1144
+ for (const link of links) {
1145
+ const resolved = resolveLink(
1146
+ link,
1147
+ relWikiPaths,
1148
+ project.root,
1149
+ project.wikiDir
1150
+ );
1151
+ if (resolved !== null) {
1152
+ const set = inboundLinks.get(resolved);
1153
+ if (set) {
1154
+ set.add(row.path);
1155
+ }
1156
+ }
1157
+ }
1158
+ }
1159
+ const indexPath = relWikiPaths.find((p) => basename3(p) === "_index.md");
1160
+ let indexLinks = /* @__PURE__ */ new Set();
1161
+ if (indexPath) {
1162
+ const indexRow = metaMap.get(indexPath);
1163
+ if (indexRow) {
1164
+ let links = [];
1165
+ try {
1166
+ links = JSON.parse(indexRow.outgoing_links);
1167
+ } catch {
1168
+ links = [];
1169
+ }
1170
+ for (const link of links) {
1171
+ const resolved = resolveLink(
1172
+ link,
1173
+ relWikiPaths,
1174
+ project.root,
1175
+ project.wikiDir
1176
+ );
1177
+ if (resolved !== null) {
1178
+ indexLinks.add(resolved);
1179
+ }
1180
+ }
1181
+ }
1182
+ }
1183
+ for (const rp of relWikiPaths) {
1184
+ if (basename3(rp) === "_index.md") continue;
1185
+ const inbound = inboundLinks.get(rp);
1186
+ if (!inbound || inbound.size === 0) {
1187
+ issues.push({
1188
+ severity: "warning",
1189
+ code: "ORPHAN_PAGE",
1190
+ path: rp,
1191
+ message: "Orphan page (no inbound links)"
1192
+ });
1193
+ }
1194
+ }
1195
+ for (const row of rows) {
1196
+ let links = [];
1197
+ try {
1198
+ links = JSON.parse(row.outgoing_links);
1199
+ } catch {
1200
+ links = [];
1201
+ }
1202
+ for (const link of links) {
1203
+ if (!isLinkResolvable(link, pageKeySet)) {
1204
+ issues.push({
1205
+ severity: "warning",
1206
+ code: "BROKEN_LINK",
1207
+ path: row.path,
1208
+ message: `Broken wikilink [[${link}]] not found`,
1209
+ detail: link
1210
+ });
1211
+ }
1212
+ }
1213
+ }
1214
+ for (const row of rows) {
1215
+ if (basename3(row.path) === "_index.md") continue;
1216
+ let links = [];
1217
+ try {
1218
+ links = JSON.parse(row.outgoing_links);
1219
+ } catch {
1220
+ links = [];
1221
+ }
1222
+ if (links.length === 0 && row.word_count < 50) {
1223
+ issues.push({
1224
+ severity: "info",
1225
+ code: "STUB_PAGE",
1226
+ path: row.path,
1227
+ message: `Stub page (no links, < 50 words)`
1228
+ });
1229
+ }
1230
+ }
1231
+ const wikiSourcesDir = join7(project.wikiDir, "sources");
1232
+ for (const rp of relWikiPaths) {
1233
+ const absWikiPage = join7(project.root, rp);
1234
+ const relToWikiSources = relative2(wikiSourcesDir, absWikiPage);
1235
+ if (relToWikiSources.startsWith("..")) continue;
1236
+ const summaryBasename = basename3(rp, ".md");
1237
+ const sourceBasename = summaryBasename.endsWith("-summary") ? summaryBasename.slice(0, -"-summary".length) : summaryBasename;
1238
+ const matchingSource = sourceFiles.find((sf) => {
1239
+ const sfBase = basename3(sf, extname2(sf));
1240
+ return sfBase === sourceBasename;
1241
+ });
1242
+ if (!matchingSource) continue;
1243
+ try {
1244
+ const summaryRow = metaMap.get(rp);
1245
+ if (!summaryRow) continue;
1246
+ const [sourceStat, summaryStat] = await Promise.all([
1247
+ stat2(matchingSource),
1248
+ stat2(join7(project.root, summaryRow.path))
1249
+ ]);
1250
+ if (sourceStat.mtimeMs > summaryStat.mtimeMs) {
1251
+ issues.push({
1252
+ severity: "warning",
1253
+ code: "STALE_SUMMARY",
1254
+ path: rp,
1255
+ message: "Source updated after summary",
1256
+ detail: relative2(project.root, matchingSource)
1257
+ });
1258
+ }
1259
+ } catch {
1260
+ }
1261
+ }
1262
+ for (const rp of relWikiPaths) {
1263
+ if (basename3(rp) === "_index.md") continue;
1264
+ if (!indexPath) {
1265
+ continue;
1266
+ }
1267
+ if (!indexLinks.has(rp)) {
1268
+ const fname = basename3(rp, ".md");
1269
+ if (!indexLinks.has(fname)) {
1270
+ issues.push({
1271
+ severity: "info",
1272
+ code: "MISSING_INDEX",
1273
+ path: rp,
1274
+ message: "Not in _index.md"
1275
+ });
1276
+ }
1277
+ }
1278
+ }
1279
+ return { issues, pagesChecked, sourcesChecked };
1280
+ }
1281
+ function resolveLink(link, relPaths, projectRoot, wikiDir) {
1282
+ for (const rp of relPaths) {
1283
+ const fname = basename3(rp, ".md");
1284
+ if (fname === link) return rp;
1285
+ if (rp === link || rp === `${link}.md`) return rp;
1286
+ const absPath = join7(projectRoot, rp);
1287
+ const relToWiki = relative2(wikiDir, absPath);
1288
+ if (relToWiki === link || relToWiki.replace(/\.md$/i, "") === link) {
1289
+ return rp;
1290
+ }
1291
+ }
1292
+ return null;
1293
+ }
1294
+ function isLinkResolvable(link, pageKeySet) {
1295
+ return pageKeySet.has(link);
1296
+ }
1297
+
1298
+ // src/log-parser.ts
1299
+ function parseLogEntries(content) {
1300
+ const entries = [];
1301
+ const sections = content.split(/^(?=## )/m);
1302
+ for (const section of sections) {
1303
+ const trimmed = section.trim();
1304
+ if (!trimmed) continue;
1305
+ if (trimmed.startsWith("# ")) continue;
1306
+ if (!trimmed.startsWith("## ")) continue;
1307
+ const newlineIdx = trimmed.indexOf("\n");
1308
+ if (newlineIdx === -1) {
1309
+ entries.push({ heading: trimmed.slice(3).trim(), body: "" });
1310
+ } else {
1311
+ const heading = trimmed.slice(3, newlineIdx).trim();
1312
+ const body = trimmed.slice(newlineIdx + 1).trim();
1313
+ entries.push({ heading, body });
1314
+ }
1315
+ }
1316
+ return entries;
1317
+ }
1318
+
1319
+ // src/index.ts
1320
+ var VERSION = "0.1.0";
1321
+ export {
1322
+ VERSION,
1323
+ closeDb,
1324
+ createLlmAdapter,
1325
+ indexProject,
1326
+ ingestSource,
1327
+ initProject,
1328
+ lintProject,
1329
+ loadProject,
1330
+ openDb,
1331
+ parseConfig,
1332
+ parseLogEntries,
1333
+ parsePage,
1334
+ queryWiki,
1335
+ readSource,
1336
+ searchWiki,
1337
+ tryLoadProject
1338
+ };
1339
+ //# sourceMappingURL=index.js.map