kb-core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/config.d.ts +22 -0
  2. package/dist/config.d.ts.map +1 -0
  3. package/dist/config.test.d.ts +2 -0
  4. package/dist/config.test.d.ts.map +1 -0
  5. package/dist/db.d.ts +5 -0
  6. package/dist/db.d.ts.map +1 -0
  7. package/dist/db.test.d.ts +2 -0
  8. package/dist/db.test.d.ts.map +1 -0
  9. package/dist/index.cjs +1391 -0
  10. package/dist/index.cjs.map +1 -0
  11. package/dist/index.d.ts +28 -0
  12. package/dist/index.d.ts.map +1 -0
  13. package/dist/index.js +1339 -0
  14. package/dist/index.js.map +1 -0
  15. package/dist/indexer.d.ts +9 -0
  16. package/dist/indexer.d.ts.map +1 -0
  17. package/dist/indexer.test.d.ts +2 -0
  18. package/dist/indexer.test.d.ts.map +1 -0
  19. package/dist/ingest-types.d.ts +19 -0
  20. package/dist/ingest-types.d.ts.map +1 -0
  21. package/dist/ingest.d.ts +14 -0
  22. package/dist/ingest.d.ts.map +1 -0
  23. package/dist/ingest.test.d.ts +2 -0
  24. package/dist/ingest.test.d.ts.map +1 -0
  25. package/dist/init.d.ts +6 -0
  26. package/dist/init.d.ts.map +1 -0
  27. package/dist/init.test.d.ts +2 -0
  28. package/dist/init.test.d.ts.map +1 -0
  29. package/dist/integration.test.d.ts +2 -0
  30. package/dist/integration.test.d.ts.map +1 -0
  31. package/dist/lint.d.ts +16 -0
  32. package/dist/lint.d.ts.map +1 -0
  33. package/dist/lint.test.d.ts +2 -0
  34. package/dist/lint.test.d.ts.map +1 -0
  35. package/dist/llm.d.ts +10 -0
  36. package/dist/llm.d.ts.map +1 -0
  37. package/dist/llm.test.d.ts +2 -0
  38. package/dist/llm.test.d.ts.map +1 -0
  39. package/dist/log-parser.d.ts +11 -0
  40. package/dist/log-parser.d.ts.map +1 -0
  41. package/dist/log-parser.test.d.ts +2 -0
  42. package/dist/log-parser.test.d.ts.map +1 -0
  43. package/dist/markdown.d.ts +11 -0
  44. package/dist/markdown.d.ts.map +1 -0
  45. package/dist/markdown.test.d.ts +2 -0
  46. package/dist/markdown.test.d.ts.map +1 -0
  47. package/dist/project.d.ts +12 -0
  48. package/dist/project.d.ts.map +1 -0
  49. package/dist/project.test.d.ts +2 -0
  50. package/dist/project.test.d.ts.map +1 -0
  51. package/dist/query.d.ts +11 -0
  52. package/dist/query.d.ts.map +1 -0
  53. package/dist/query.test.d.ts +2 -0
  54. package/dist/query.test.d.ts.map +1 -0
  55. package/dist/search.d.ts +14 -0
  56. package/dist/search.d.ts.map +1 -0
  57. package/dist/search.test.d.ts +2 -0
  58. package/dist/search.test.d.ts.map +1 -0
  59. package/dist/source-reader.d.ts +9 -0
  60. package/dist/source-reader.d.ts.map +1 -0
  61. package/dist/source-reader.test.d.ts +2 -0
  62. package/dist/source-reader.test.d.ts.map +1 -0
  63. package/package.json +32 -0
package/dist/index.cjs ADDED
@@ -0,0 +1,1391 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ VERSION: () => VERSION,
34
+ closeDb: () => closeDb,
35
+ createLlmAdapter: () => createLlmAdapter,
36
+ indexProject: () => indexProject,
37
+ ingestSource: () => ingestSource,
38
+ initProject: () => initProject,
39
+ lintProject: () => lintProject,
40
+ loadProject: () => loadProject,
41
+ openDb: () => openDb,
42
+ parseConfig: () => parseConfig,
43
+ parseLogEntries: () => parseLogEntries,
44
+ parsePage: () => parsePage,
45
+ queryWiki: () => queryWiki,
46
+ readSource: () => readSource,
47
+ searchWiki: () => searchWiki,
48
+ tryLoadProject: () => tryLoadProject
49
+ });
50
+ module.exports = __toCommonJS(index_exports);
51
+
52
+ // src/init.js
53
+ var import_promises = require("fs/promises");
54
+ var import_node_path = require("path");
55
+ var import_toml = __toESM(require("@iarna/toml"), 1);
56
+ function resolveProjectName(options) {
57
+ return options.name || (0, import_node_path.basename)(options.directory);
58
+ }
59
+ function buildConfigToml(projectName) {
60
+ const config = {
61
+ project: {
62
+ name: projectName,
63
+ version: "0.1.0"
64
+ },
65
+ directories: {
66
+ sources: "sources",
67
+ wiki: "wiki"
68
+ },
69
+ llm: {
70
+ provider: "anthropic",
71
+ model: "claude-sonnet-4-20250514"
72
+ }
73
+ };
74
+ const tomlStr = import_toml.default.stringify(config);
75
+ return tomlStr + '\n[dependencies]\n# shared-glossary = { path = "../shared-glossary" }\n';
76
+ }
77
+ function buildSchemaMd() {
78
+ return `# KB Schema \u2014 LLM Instructions
79
+
80
+ This file defines the conventions for this knowledge base. The \`kb\` CLI and any
81
+ LLM operating on this wiki MUST follow these rules.
82
+
83
+ ---
84
+
85
+ ## Wiki Structure Conventions
86
+
87
+ - All pages live under the \`wiki/\` directory.
88
+ - \`wiki/_index.md\` is the wiki root and serves as a table of contents.
89
+ - Sub-topics may be organised into sub-directories: \`wiki/<topic>/_index.md\`.
90
+ - File names use kebab-case, e.g. \`wiki/authentication-flow.md\`.
91
+ - Every page must have a valid YAML frontmatter block.
92
+
93
+ ---
94
+
95
+ ## Frontmatter Schema
96
+
97
+ Every wiki page must begin with a YAML frontmatter block:
98
+
99
+ \`\`\`yaml
100
+ ---
101
+ title: <Human-readable page title>
102
+ tags: [tag1, tag2] # optional; array of lowercase strings
103
+ created: <ISO 8601 date> # e.g. 2026-04-05
104
+ updated: <ISO 8601 date> # updated whenever content changes
105
+ source: <path or URL> # optional; original source material
106
+ ---
107
+ \`\`\`
108
+
109
+ Required fields: \`title\`, \`created\`.
110
+
111
+ ---
112
+
113
+ ## Page Templates
114
+
115
+ ### Entity Page
116
+ Use for: people, systems, services, tools.
117
+
118
+ \`\`\`markdown
119
+ ---
120
+ title: <Entity Name>
121
+ tags: [entity]
122
+ created: <ISO date>
123
+ updated: <ISO date>
124
+ ---
125
+
126
+ # <Entity Name>
127
+
128
+ **Type**: <system | person | service | tool>
129
+
130
+ ## Overview
131
+
132
+ <One-paragraph description.>
133
+
134
+ ## Key Attributes
135
+
136
+ - **Attribute**: value
137
+
138
+ ## Related
139
+
140
+ - [[related-page]]
141
+ \`\`\`
142
+
143
+ ### Concept Page
144
+ Use for: ideas, patterns, terminology.
145
+
146
+ \`\`\`markdown
147
+ ---
148
+ title: <Concept Name>
149
+ tags: [concept]
150
+ created: <ISO date>
151
+ updated: <ISO date>
152
+ ---
153
+
154
+ # <Concept Name>
155
+
156
+ ## Definition
157
+
158
+ <Clear definition in 1-3 sentences.>
159
+
160
+ ## Context
161
+
162
+ <When and why this concept matters in the project.>
163
+
164
+ ## See Also
165
+
166
+ - [[related-concept]]
167
+ \`\`\`
168
+
169
+ ### Source Summary Page
170
+ Use for: summarised source material (docs, papers, meetings).
171
+
172
+ \`\`\`markdown
173
+ ---
174
+ title: Summary \u2014 <Source Title>
175
+ tags: [source-summary]
176
+ created: <ISO date>
177
+ source: <path or URL>
178
+ ---
179
+
180
+ # Summary \u2014 <Source Title>
181
+
182
+ ## Key Points
183
+
184
+ - Point one
185
+ - Point two
186
+
187
+ ## Decisions / Implications
188
+
189
+ <What this source means for the project.>
190
+
191
+ ## Raw Source
192
+
193
+ See \`sources/<filename>\`.
194
+ \`\`\`
195
+
196
+ ### Comparison Page
197
+ Use for: side-by-side evaluation of options.
198
+
199
+ \`\`\`markdown
200
+ ---
201
+ title: Comparison \u2014 <Topic>
202
+ tags: [comparison]
203
+ created: <ISO date>
204
+ updated: <ISO date>
205
+ ---
206
+
207
+ # Comparison \u2014 <Topic>
208
+
209
+ | Criterion | Option A | Option B |
210
+ |-----------|----------|----------|
211
+ | ... | ... | ... |
212
+
213
+ ## Recommendation
214
+
215
+ <Which option and why.>
216
+ \`\`\`
217
+
218
+ ---
219
+
220
+ ## Wikilink Conventions
221
+
222
+ - Basic link: \`[[page-name]]\` \u2014 links to \`wiki/page-name.md\`.
223
+ - Display text: \`[[page-name|display text]]\` \u2014 renders as "display text".
224
+ - Cross-directory: \`[[topic/sub-page]]\`.
225
+ - All wikilink targets must be lowercase kebab-case matching the file name without \`.md\`.
226
+
227
+ ---
228
+
229
+ ## Ingest Workflow
230
+
231
+ 1. Place the source file in \`sources/\` (PDF, Markdown, plain text, etc.).
232
+ 2. Run \`kb ingest sources/<filename>\`.
233
+ 3. The CLI reads the file, calls the configured LLM, and generates a source-summary
234
+ page in \`wiki/\`.
235
+ 4. The summary page is linked from \`wiki/_index.md\` under **Sources**.
236
+ 5. An entry is appended to \`log.md\`.
237
+
238
+ ---
239
+
240
+ ## Query Workflow
241
+
242
+ 1. Run \`kb query "<natural-language question>"\`.
243
+ 2. The CLI searches the wiki index for relevant pages.
244
+ 3. Relevant page content is assembled into a prompt context.
245
+ 4. The LLM answers the question, citing wikilinks.
246
+ 5. The answer is printed to stdout. Nothing is written to disk unless \`--save\` is passed.
247
+
248
+ ---
249
+
250
+ ## Lint Workflow
251
+
252
+ Run \`kb lint\` to check for:
253
+
254
+ - Pages missing required frontmatter fields (\`title\`, \`created\`).
255
+ - Broken wikilinks (targets that don't resolve to an existing page).
256
+ - Pages not reachable from \`wiki/_index.md\`.
257
+ - Duplicate page titles across the wiki.
258
+ - Frontmatter fields with invalid types or formats.
259
+
260
+ Lint exits with code 0 on success, 1 if errors are found.
261
+ `;
262
+ }
263
+ function buildIndexMd(projectName, isoDate) {
264
+ return `---
265
+ title: ${projectName} Knowledge Base
266
+ created: ${isoDate}
267
+ ---
268
+
269
+ # ${projectName} Knowledge Base
270
+
271
+ > This wiki is maintained by the \`kb\` CLI tool.
272
+
273
+ ## Pages
274
+
275
+ (No pages yet. Use \`kb ingest <source>\` to add content.)
276
+
277
+ ## Sources
278
+
279
+ (No sources yet.)
280
+ `;
281
+ }
282
+ function buildLogMd(projectName, isoDate) {
283
+ return `# Activity Log
284
+
285
+ ## ${isoDate} \u2014 Project initialized
286
+
287
+ Project \`${projectName}\` initialized.
288
+ `;
289
+ }
290
+ async function kbDirExists(directory) {
291
+ try {
292
+ await (0, import_promises.access)((0, import_node_path.join)(directory, ".kb"));
293
+ return true;
294
+ } catch {
295
+ return false;
296
+ }
297
+ }
298
+ async function initProject(options) {
299
+ const projectName = resolveProjectName(options);
300
+ const { directory } = options;
301
+ if (await kbDirExists(directory)) {
302
+ throw new Error(`Knowledge base already initialized: .kb/ already exists in ${directory}`);
303
+ }
304
+ const isoDate = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
305
+ try {
306
+ await Promise.all([
307
+ (0, import_promises.mkdir)((0, import_node_path.join)(directory, ".kb"), { recursive: true }),
308
+ (0, import_promises.mkdir)((0, import_node_path.join)(directory, "sources"), { recursive: true }),
309
+ (0, import_promises.mkdir)((0, import_node_path.join)(directory, "wiki"), { recursive: true })
310
+ ]);
311
+ await Promise.all([
312
+ (0, import_promises.writeFile)((0, import_node_path.join)(directory, ".kb", "config.toml"), buildConfigToml(projectName), "utf8"),
313
+ (0, import_promises.writeFile)((0, import_node_path.join)(directory, ".kb", "schema.md"), buildSchemaMd(), "utf8"),
314
+ (0, import_promises.writeFile)((0, import_node_path.join)(directory, "sources", ".gitkeep"), "", "utf8"),
315
+ (0, import_promises.writeFile)((0, import_node_path.join)(directory, "wiki", "_index.md"), buildIndexMd(projectName, isoDate), "utf8"),
316
+ (0, import_promises.writeFile)((0, import_node_path.join)(directory, "log.md"), buildLogMd(projectName, isoDate), "utf8")
317
+ ]);
318
+ } catch (error) {
319
+ await (0, import_promises.rm)((0, import_node_path.join)(directory, ".kb"), { recursive: true, force: true });
320
+ throw error;
321
+ }
322
+ }
323
+
324
+ // src/config.js
325
+ var import_promises2 = require("fs/promises");
326
+ var import_toml2 = __toESM(require("@iarna/toml"), 1);
327
+ var VALID_PROVIDERS = ["anthropic", "openai", "ollama"];
328
+ function requireSafeRelativePath(val, field) {
329
+ if (val.startsWith("/") || val.split("/").includes("..")) {
330
+ throw new Error(`Invalid config: ${field} must be a safe relative path, got "${val}"`);
331
+ }
332
+ }
333
+ function requireString(obj, key, context) {
334
+ const val = obj[key];
335
+ if (typeof val !== "string" || val.trim() === "") {
336
+ throw new Error(`Invalid config: missing required field "${context}.${key}"`);
337
+ }
338
+ return val;
339
+ }
340
+ function requireSection(obj, key) {
341
+ const val = obj[key];
342
+ if (val === void 0 || val === null || typeof val !== "object" || Array.isArray(val)) {
343
+ throw new Error(`Invalid config: missing required section "[${key}]"`);
344
+ }
345
+ return val;
346
+ }
347
+ async function parseConfig(configPath) {
348
+ let raw;
349
+ try {
350
+ raw = await (0, import_promises2.readFile)(configPath, "utf8");
351
+ } catch (err) {
352
+ const message = err instanceof Error ? err.message : String(err);
353
+ throw new Error(`Config file not found: ${configPath}
354
+ ${message}`);
355
+ }
356
+ let parsed;
357
+ try {
358
+ parsed = import_toml2.default.parse(raw);
359
+ } catch (err) {
360
+ const message = err instanceof Error ? err.message : String(err);
361
+ throw new Error(`Invalid TOML in config file ${configPath}: ${message}`);
362
+ }
363
+ const project = requireSection(parsed, "project");
364
+ const name = requireString(project, "name", "project");
365
+ const version = requireString(project, "version", "project");
366
+ const directories = requireSection(parsed, "directories");
367
+ const sources = requireString(directories, "sources", "directories");
368
+ requireSafeRelativePath(sources, "directories.sources");
369
+ const wiki = requireString(directories, "wiki", "directories");
370
+ requireSafeRelativePath(wiki, "directories.wiki");
371
+ const llm = requireSection(parsed, "llm");
372
+ const providerRaw = requireString(llm, "provider", "llm");
373
+ if (!VALID_PROVIDERS.includes(providerRaw)) {
374
+ throw new Error(`Invalid config: llm.provider must be one of ${VALID_PROVIDERS.join(", ")}, got "${providerRaw}"`);
375
+ }
376
+ const provider = providerRaw;
377
+ const model = requireString(llm, "model", "llm");
378
+ const rawDeps = parsed["dependencies"];
379
+ const dependencies = {};
380
+ if (rawDeps !== void 0 && rawDeps !== null && typeof rawDeps === "object" && !Array.isArray(rawDeps)) {
381
+ for (const [depKey, depVal] of Object.entries(rawDeps)) {
382
+ if (typeof depVal === "object" && depVal !== null && !Array.isArray(depVal)) {
383
+ const dep = depVal;
384
+ dependencies[depKey] = {
385
+ ...typeof dep["path"] === "string" ? { path: dep["path"] } : {},
386
+ ...typeof dep["git"] === "string" ? { git: dep["git"] } : {},
387
+ ...typeof dep["branch"] === "string" ? { branch: dep["branch"] } : {},
388
+ ...typeof dep["mode"] === "string" ? { mode: dep["mode"] } : {}
389
+ };
390
+ }
391
+ }
392
+ }
393
+ return {
394
+ project: { name, version },
395
+ directories: { sources, wiki },
396
+ llm: { provider, model },
397
+ dependencies
398
+ };
399
+ }
400
+
401
+ // src/project.js
402
+ var import_promises3 = require("fs/promises");
403
+ var import_node_path2 = require("path");
404
+ async function hasKbDir(dir) {
405
+ try {
406
+ await (0, import_promises3.access)((0, import_node_path2.join)(dir, ".kb", "config.toml"));
407
+ return true;
408
+ } catch {
409
+ return false;
410
+ }
411
+ }
412
+ async function findProjectRoot(startDir) {
413
+ let current = (0, import_node_path2.resolve)(startDir);
414
+ while (true) {
415
+ if (await hasKbDir(current)) {
416
+ return current;
417
+ }
418
+ const parent = (0, import_node_path2.dirname)(current);
419
+ if (parent === current) {
420
+ return null;
421
+ }
422
+ current = parent;
423
+ }
424
+ }
425
+ async function loadProject(startDir) {
426
+ const root = await findProjectRoot(startDir);
427
+ if (root === null) {
428
+ throw new Error(`No kb project found. Run "kb init" to initialize a knowledge base in the current directory.`);
429
+ }
430
+ const kbDir = (0, import_node_path2.join)(root, ".kb");
431
+ const configPath = (0, import_node_path2.join)(kbDir, "config.toml");
432
+ const config = await parseConfig(configPath);
433
+ return {
434
+ name: config.project.name,
435
+ root,
436
+ kbDir,
437
+ sourcesDir: (0, import_node_path2.join)(root, config.directories.sources),
438
+ wikiDir: (0, import_node_path2.join)(root, config.directories.wiki),
439
+ config
440
+ };
441
+ }
442
+ async function tryLoadProject(startDir) {
443
+ try {
444
+ return await loadProject(startDir);
445
+ } catch (err) {
446
+ if (err instanceof Error && /no kb project found/i.test(err.message)) {
447
+ return null;
448
+ }
449
+ throw err;
450
+ }
451
+ }
452
+
453
+ // src/db.js
454
+ var import_better_sqlite3 = __toESM(require("better-sqlite3"), 1);
455
+ var import_node_path3 = require("path");
456
+ var SCHEMA_SQL = `
457
+ CREATE VIRTUAL TABLE IF NOT EXISTS pages USING fts5(
458
+ path,
459
+ title,
460
+ content,
461
+ tags,
462
+ project,
463
+ tokenize='porter unicode61'
464
+ );
465
+
466
+ CREATE TABLE IF NOT EXISTS page_meta (
467
+ path TEXT PRIMARY KEY,
468
+ sha256 TEXT NOT NULL,
469
+ mtime INTEGER NOT NULL,
470
+ word_count INTEGER NOT NULL DEFAULT 0,
471
+ frontmatter TEXT NOT NULL DEFAULT '{}',
472
+ outgoing_links TEXT NOT NULL DEFAULT '[]',
473
+ updated_at INTEGER NOT NULL
474
+ );
475
+ `;
476
+ function openDb(project) {
477
+ const dbPath = (0, import_node_path3.join)(project.kbDir, "index.db");
478
+ const db = new import_better_sqlite3.default(dbPath);
479
+ db.pragma("journal_mode = WAL");
480
+ db.exec(SCHEMA_SQL);
481
+ return db;
482
+ }
483
+ function closeDb(db) {
484
+ db.close();
485
+ }
486
+
487
+ // src/markdown.js
488
+ var import_promises4 = require("fs/promises");
489
+ var import_gray_matter = __toESM(require("gray-matter"), 1);
490
+ var WIKILINK_RE = /\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g;
491
+ var H1_RE = /^#\s+(.+)$/m;
492
+ function extractTitle(fm, content, relativePath) {
493
+ if (typeof fm["title"] === "string" && fm["title"].trim() !== "") {
494
+ return fm["title"].trim();
495
+ }
496
+ const h1Match = H1_RE.exec(content);
497
+ if (h1Match) {
498
+ return h1Match[1].trim();
499
+ }
500
+ const filename = relativePath.split("/").pop() ?? relativePath;
501
+ return filename.replace(/\.md$/i, "");
502
+ }
503
+ function extractTags(fm) {
504
+ const tags = fm["tags"];
505
+ if (!Array.isArray(tags))
506
+ return "";
507
+ return tags.filter((t) => typeof t === "string").join(",");
508
+ }
509
+ function extractWikiLinks(content) {
510
+ const links = [];
511
+ let match;
512
+ const re = new RegExp(WIKILINK_RE.source, "g");
513
+ while ((match = re.exec(content)) !== null) {
514
+ links.push(match[1].trim());
515
+ }
516
+ return links;
517
+ }
518
+ function countWords(text) {
519
+ const trimmed = text.trim();
520
+ if (trimmed === "")
521
+ return 0;
522
+ return trimmed.split(/\s+/).length;
523
+ }
524
+ async function parsePage(filePath, relativePath, rawContent) {
525
+ const raw = rawContent ?? await (0, import_promises4.readFile)(filePath, "utf8");
526
+ const parsed = (0, import_gray_matter.default)(raw);
527
+ const fm = parsed.data;
528
+ const content = parsed.content;
529
+ const title = extractTitle(fm, content, relativePath);
530
+ const tags = extractTags(fm);
531
+ const outgoingLinks = extractWikiLinks(content);
532
+ const wordCount = countWords(content);
533
+ return {
534
+ path: relativePath,
535
+ title,
536
+ content,
537
+ tags,
538
+ frontmatter: fm,
539
+ outgoingLinks,
540
+ wordCount
541
+ };
542
+ }
543
+
544
+ // src/indexer.js
545
+ var import_node_crypto = require("crypto");
546
+ var import_promises5 = require("fs/promises");
547
+ var import_node_path4 = require("path");
548
+ async function collectMdFiles(dir) {
549
+ try {
550
+ const entries = await (0, import_promises5.readdir)(dir, {
551
+ recursive: true,
552
+ withFileTypes: true
553
+ });
554
+ return entries.filter((e) => e.isFile() && e.name.endsWith(".md")).map((e) => (0, import_node_path4.join)(e.parentPath ?? e.path, e.name));
555
+ } catch (err) {
556
+ if (err.code !== "ENOENT")
557
+ throw err;
558
+ return [];
559
+ }
560
+ }
561
+ function sha256(content) {
562
+ return (0, import_node_crypto.createHash)("sha256").update(content).digest("hex");
563
+ }
564
+ function upsertParsedPage(stmts, project, page, hash, mtime) {
565
+ stmts.deletePages.run(page.path);
566
+ stmts.insertPage.run(page.path, page.title, page.content, page.tags, project.name);
567
+ stmts.upsertMeta.run(page.path, hash, mtime, page.wordCount, JSON.stringify(page.frontmatter), JSON.stringify(page.outgoingLinks), Date.now());
568
+ }
569
+ async function indexProject(project, rebuild = false) {
570
+ const db = openDb(project);
571
+ try {
572
+ if (rebuild) {
573
+ db.exec("DELETE FROM pages; DELETE FROM page_meta;");
574
+ }
575
+ const files = await collectMdFiles(project.wikiDir);
576
+ const stats = { indexed: 0, skipped: 0, deleted: 0, errors: 0 };
577
+ const getMetaStmt = db.prepare("SELECT sha256 FROM page_meta WHERE path = ?");
578
+ const upsertStmts = {
579
+ deletePages: db.prepare("DELETE FROM pages WHERE path = ?"),
580
+ insertPage: db.prepare("INSERT INTO pages(path, title, content, tags, project) VALUES (?, ?, ?, ?, ?)"),
581
+ upsertMeta: db.prepare(`
582
+ INSERT INTO page_meta(path, sha256, mtime, word_count, frontmatter, outgoing_links, updated_at)
583
+ VALUES (?, ?, ?, ?, ?, ?, ?)
584
+ ON CONFLICT(path) DO UPDATE SET
585
+ sha256 = excluded.sha256,
586
+ mtime = excluded.mtime,
587
+ word_count = excluded.word_count,
588
+ frontmatter = excluded.frontmatter,
589
+ outgoing_links = excluded.outgoing_links,
590
+ updated_at = excluded.updated_at
591
+ `)
592
+ };
593
+ const deleteStalePages = db.prepare("DELETE FROM pages WHERE path = ?");
594
+ const deleteStaleMeta = db.prepare("DELETE FROM page_meta WHERE path = ?");
595
+ const listMetaStmt = db.prepare("SELECT path FROM page_meta");
596
+ const processFile = db.transaction((page, hash, mtime) => {
597
+ upsertParsedPage(upsertStmts, project, page, hash, mtime);
598
+ });
599
+ const onDiskPaths = /* @__PURE__ */ new Set();
600
+ for (const absPath of files) {
601
+ const relPath = (0, import_node_path4.relative)(project.root, absPath);
602
+ onDiskPaths.add(relPath);
603
+ let raw;
604
+ try {
605
+ raw = await (0, import_promises5.readFile)(absPath, "utf8");
606
+ } catch (err) {
607
+ stats.errors++;
608
+ continue;
609
+ }
610
+ const hash = sha256(raw);
611
+ const existing = getMetaStmt.get(relPath);
612
+ if (existing && existing.sha256 === hash) {
613
+ stats.skipped++;
614
+ continue;
615
+ }
616
+ let fileStat;
617
+ try {
618
+ fileStat = await (0, import_promises5.stat)(absPath);
619
+ } catch {
620
+ stats.errors++;
621
+ continue;
622
+ }
623
+ let page;
624
+ try {
625
+ page = await parsePage(absPath, relPath, raw);
626
+ } catch {
627
+ stats.errors++;
628
+ continue;
629
+ }
630
+ try {
631
+ processFile(page, hash, Math.floor(fileStat.mtimeMs));
632
+ stats.indexed++;
633
+ } catch {
634
+ stats.errors++;
635
+ }
636
+ }
637
+ const allMetaPaths = listMetaStmt.all().map((r) => r.path);
638
+ const stalePaths = allMetaPaths.filter((p) => !onDiskPaths.has(p));
639
+ const deleteStale = db.transaction((paths) => {
640
+ for (const p of paths) {
641
+ deleteStalePages.run(p);
642
+ deleteStaleMeta.run(p);
643
+ }
644
+ });
645
+ deleteStale(stalePaths);
646
+ stats.deleted += stalePaths.length;
647
+ return stats;
648
+ } finally {
649
+ closeDb(db);
650
+ }
651
+ }
652
+
653
+ // src/search.js
654
+ function sanitizeFtsQuery(query) {
655
+ const tokens = query.trim().split(/\s+/).filter((t) => t.length > 0);
656
+ if (tokens.length === 0)
657
+ return '""';
658
+ return tokens.map((t) => `"${t.replace(/"/g, '""')}"`).join(" ");
659
+ }
660
+ function parseTags(raw) {
661
+ if (!raw || raw.trim() === "")
662
+ return [];
663
+ return raw.split(",").map((t) => t.trim()).filter((t) => t.length > 0);
664
+ }
665
+ function searchWiki(db, query, projectName, options) {
666
+ if (!query || query.trim() === "") {
667
+ return [];
668
+ }
669
+ const limit = options?.limit ?? 10;
670
+ const ftsQuery = sanitizeFtsQuery(query.trim());
671
+ const filterTags = options?.tags?.length ? options.tags.map((t) => t.trim().toLowerCase()).filter((t) => t.length > 0) : [];
672
+ const tagClauses = filterTags.map(() => "AND lower(tags) LIKE ?").join(" ");
673
+ const tagParams = filterTags.map((t) => `%${t}%`);
674
+ const stmt = db.prepare(`
675
+ SELECT path, title, tags, bm25(pages) as rank,
676
+ snippet(pages, 2, '', '', '...', 8) as snippet
677
+ FROM pages
678
+ WHERE pages MATCH ? AND project = ?
679
+ ${tagClauses}
680
+ ORDER BY rank
681
+ LIMIT ?
682
+ `);
683
+ const rows = stmt.all(ftsQuery, projectName, ...tagParams, limit);
684
+ const results = rows.map((row) => ({
685
+ rank: row.rank,
686
+ path: row.path,
687
+ title: row.title,
688
+ snippet: row.snippet,
689
+ tags: parseTags(row.tags)
690
+ }));
691
+ return results;
692
+ }
693
+
694
+ // src/source-reader.js
695
+ var import_promises6 = require("fs/promises");
696
+ var import_node_path5 = require("path");
697
+ function sanitizeFilename(name) {
698
+ return name.toLowerCase().replace(/\s/g, "-");
699
+ }
700
+ function detectType(sourcePath) {
701
+ if (sourcePath.startsWith("http://") || sourcePath.startsWith("https://")) {
702
+ return "url";
703
+ }
704
+ const ext = (0, import_node_path5.extname)(sourcePath).toLowerCase();
705
+ if (ext === ".pdf")
706
+ return "pdf";
707
+ if (ext === ".md")
708
+ return "markdown";
709
+ return "text";
710
+ }
711
+ function filenameFromUrl(url) {
712
+ try {
713
+ const parsed = new URL(url);
714
+ const parts = parsed.pathname.split("/").filter(Boolean);
715
+ const last = parts[parts.length - 1];
716
+ const pagePart = last ? last.includes(".") ? last : `${last}.html` : "index.html";
717
+ return sanitizeFilename(`${parsed.hostname}-${pagePart}`);
718
+ } catch {
719
+ return "url-content.html";
720
+ }
721
+ }
722
+ function stripHtml(html) {
723
+ let text = html.replace(/<script[\s\S]*?<\/script>/gi, " ");
724
+ text = text.replace(/<style[\s\S]*?<\/style>/gi, " ");
725
+ text = text.replace(/<[^>]+>/g, " ");
726
+ text = text.replace(/&amp;/gi, "&").replace(/&lt;/gi, "<").replace(/&gt;/gi, ">").replace(/&quot;/gi, '"').replace(/&#39;/gi, "'").replace(/&nbsp;/gi, " ");
727
+ text = text.replace(/\s+/g, " ").trim();
728
+ return text;
729
+ }
730
+ async function readPdf(filePath) {
731
+ const pdfParse = await import("pdf-parse").then((m) => m.default ?? m);
732
+ const buffer = await (0, import_promises6.readFile)(filePath);
733
+ const data = await pdfParse(buffer);
734
+ return data.text;
735
+ }
736
+ function isPrivateUrl(url) {
737
+ try {
738
+ const { hostname } = new URL(url);
739
+ return hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1" || hostname.startsWith("169.254.") || // link-local
740
+ hostname.startsWith("10.") || hostname.startsWith("192.168.") || /^172\.(1[6-9]|2\d|3[01])\./.test(hostname);
741
+ } catch {
742
+ return false;
743
+ }
744
+ }
745
+ async function fetchUrl(url) {
746
+ if (isPrivateUrl(url)) {
747
+ throw new Error(`Fetching private/localhost URLs is not allowed: ${url}`);
748
+ }
749
+ const response = await fetch(url);
750
+ if (!response.ok) {
751
+ throw new Error(`Failed to fetch URL ${url}: HTTP ${response.status} ${response.statusText}`);
752
+ }
753
+ const html = await response.text();
754
+ return stripHtml(html);
755
+ }
756
+ async function readSource(sourcePath) {
757
+ const type = detectType(sourcePath);
758
+ if (type === "url") {
759
+ const content2 = await fetchUrl(sourcePath);
760
+ const filename2 = filenameFromUrl(sourcePath);
761
+ return { type, originalPath: sourcePath, content: content2, filename: filename2 };
762
+ }
763
+ if (type === "pdf") {
764
+ const content2 = await readPdf(sourcePath);
765
+ const raw2 = (0, import_node_path5.basename)(sourcePath);
766
+ const filename2 = sanitizeFilename(raw2);
767
+ return { type, originalPath: sourcePath, content: content2, filename: filename2 };
768
+ }
769
+ const content = await (0, import_promises6.readFile)(sourcePath, "utf8");
770
+ const raw = (0, import_node_path5.basename)(sourcePath);
771
+ const filename = sanitizeFilename(raw);
772
+ return { type, originalPath: sourcePath, content, filename };
773
+ }
774
+
775
+ // src/llm.js
776
+ function createAnthropicAdapter(model) {
777
+ return {
778
+ async complete(messages, systemPrompt) {
779
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
780
+ if (!apiKey) {
781
+ throw new Error("ANTHROPIC_API_KEY environment variable is not set");
782
+ }
783
+ const Anthropic = await import("@anthropic-ai/sdk").then((m) => m.default ?? m);
784
+ const client = new Anthropic({ apiKey });
785
+ const response = await client.messages.create({
786
+ model,
787
+ max_tokens: 8192,
788
+ system: systemPrompt,
789
+ messages: messages.map((m) => ({
790
+ role: m.role,
791
+ content: m.content
792
+ }))
793
+ });
794
+ const block = response.content[0];
795
+ if (!block || block.type !== "text") {
796
+ throw new Error("Anthropic returned no text content");
797
+ }
798
+ return block.text;
799
+ }
800
+ };
801
+ }
802
+ function createOpenAiAdapter(model) {
803
+ return {
804
+ async complete(messages, systemPrompt) {
805
+ const apiKey = process.env["OPENAI_API_KEY"];
806
+ if (!apiKey) {
807
+ throw new Error("OPENAI_API_KEY environment variable is not set");
808
+ }
809
+ const body = {
810
+ model,
811
+ messages: [
812
+ { role: "system", content: systemPrompt },
813
+ ...messages.map((m) => ({ role: m.role, content: m.content }))
814
+ ]
815
+ };
816
+ const response = await fetch("https://api.openai.com/v1/chat/completions", {
817
+ method: "POST",
818
+ headers: {
819
+ "Content-Type": "application/json",
820
+ Authorization: `Bearer ${apiKey}`
821
+ },
822
+ body: JSON.stringify(body)
823
+ });
824
+ if (!response.ok) {
825
+ const text = await response.text();
826
+ throw new Error(`OpenAI API error: HTTP ${response.status} \u2014 ${text}`);
827
+ }
828
+ const data = await response.json();
829
+ const content = data.choices[0]?.message?.content;
830
+ if (!content) {
831
+ throw new Error("OpenAI returned no content");
832
+ }
833
+ return content;
834
+ }
835
+ };
836
+ }
837
+ function createOllamaAdapter(model) {
838
+ return {
839
+ async complete(messages, systemPrompt) {
840
+ const baseUrl = process.env["OLLAMA_BASE_URL"] ?? "http://localhost:11434";
841
+ const body = {
842
+ model,
843
+ messages: [
844
+ { role: "system", content: systemPrompt },
845
+ ...messages.map((m) => ({ role: m.role, content: m.content }))
846
+ ],
847
+ stream: false
848
+ };
849
+ const response = await fetch(`${baseUrl}/api/chat`, {
850
+ method: "POST",
851
+ headers: { "Content-Type": "application/json" },
852
+ body: JSON.stringify(body)
853
+ });
854
+ if (!response.ok) {
855
+ const text = await response.text();
856
+ throw new Error(`Ollama API error: HTTP ${response.status} \u2014 ${text}`);
857
+ }
858
+ const data = await response.json();
859
+ const content = data.message?.content;
860
+ if (!content) {
861
+ throw new Error("Ollama returned no content");
862
+ }
863
+ return content;
864
+ }
865
+ };
866
+ }
867
+ function createLlmAdapter(config) {
868
+ const { provider, model } = config.llm;
869
+ switch (provider) {
870
+ case "anthropic":
871
+ return createAnthropicAdapter(model);
872
+ case "openai":
873
+ return createOpenAiAdapter(model);
874
+ case "ollama":
875
+ return createOllamaAdapter(model);
876
+ default: {
877
+ const _exhaustive = provider;
878
+ throw new Error(`Unsupported LLM provider: ${String(_exhaustive)}`);
879
+ }
880
+ }
881
+ }
882
+
883
+ // src/ingest.js
884
+ var import_promises7 = require("fs/promises");
885
+ var import_node_path6 = require("path");
886
+ var SYSTEM_PROMPT = `You are an AI assistant maintaining a knowledge base wiki.
887
+ You will be given a new source document and the current state of the wiki.
888
+ Your task is to integrate the new knowledge into the wiki.
889
+
890
+ Return ONLY a JSON object matching this exact schema (no markdown fences):
891
+ {
892
+ "summary": { "path": "wiki/sources/<filename>-summary.md", "content": "..." },
893
+ "updates": [{ "path": "...", "content": "...", "reason": "..." }],
894
+ "newPages": [{ "path": "...", "content": "...", "reason": "..." }],
895
+ "indexUpdate": "...",
896
+ "logEntry": "..."
897
+ }`;
898
+ function assertWithinRoot(absPath, root) {
899
+ const resolvedPath = (0, import_node_path6.resolve)(absPath);
900
+ const resolvedRoot = (0, import_node_path6.resolve)(root) + "/";
901
+ if (!resolvedPath.startsWith(resolvedRoot)) {
902
+ throw new Error(`Unsafe path rejected: "${absPath}" is outside project root`);
903
+ }
904
+ }
905
+ async function readFileSafe(filePath) {
906
+ try {
907
+ return await (0, import_promises7.readFile)(filePath, "utf8");
908
+ } catch {
909
+ return "";
910
+ }
911
+ }
912
+ function parseIngestResult(raw) {
913
+ const cleaned = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/i, "").trim();
914
+ let parsed;
915
+ try {
916
+ parsed = JSON.parse(cleaned);
917
+ } catch (err) {
918
+ throw new Error(`Invalid LLM response: could not parse JSON. Raw response: ${cleaned.slice(0, 200)}`);
919
+ }
920
+ if (parsed === null || typeof parsed !== "object" || Array.isArray(parsed)) {
921
+ throw new Error("Invalid LLM response: expected a JSON object");
922
+ }
923
+ const obj = parsed;
924
+ if (!obj["summary"] || typeof obj["summary"] !== "object" || Array.isArray(obj["summary"])) {
925
+ throw new Error('Invalid LLM response: missing "summary" object');
926
+ }
927
+ const summary = obj["summary"];
928
+ if (typeof summary["path"] !== "string" || typeof summary["content"] !== "string") {
929
+ throw new Error('Invalid LLM response: "summary" must have "path" and "content" strings');
930
+ }
931
+ if (!Array.isArray(obj["updates"])) {
932
+ throw new Error('Invalid LLM response: "updates" must be an array');
933
+ }
934
+ if (!Array.isArray(obj["newPages"])) {
935
+ throw new Error('Invalid LLM response: "newPages" must be an array');
936
+ }
937
+ if (typeof obj["indexUpdate"] !== "string") {
938
+ throw new Error('Invalid LLM response: "indexUpdate" must be a string');
939
+ }
940
+ if (typeof obj["logEntry"] !== "string") {
941
+ throw new Error('Invalid LLM response: "logEntry" must be a string');
942
+ }
943
+ const updates = obj["updates"].map((u, i) => {
944
+ if (typeof u !== "object" || u === null || Array.isArray(u)) {
945
+ throw new Error(`Invalid LLM response: updates[${i}] must be an object`);
946
+ }
947
+ const update = u;
948
+ if (typeof update["path"] !== "string" || typeof update["content"] !== "string" || typeof update["reason"] !== "string") {
949
+ throw new Error(`Invalid LLM response: updates[${i}] must have path, content, and reason strings`);
950
+ }
951
+ return {
952
+ path: update["path"],
953
+ content: update["content"],
954
+ reason: update["reason"]
955
+ };
956
+ });
957
+ const newPages = obj["newPages"].map((p, i) => {
958
+ if (typeof p !== "object" || p === null || Array.isArray(p)) {
959
+ throw new Error(`Invalid LLM response: newPages[${i}] must be an object`);
960
+ }
961
+ const page = p;
962
+ if (typeof page["path"] !== "string" || typeof page["content"] !== "string" || typeof page["reason"] !== "string") {
963
+ throw new Error(`Invalid LLM response: newPages[${i}] must have path, content, and reason strings`);
964
+ }
965
+ return {
966
+ path: page["path"],
967
+ content: page["content"],
968
+ reason: page["reason"]
969
+ };
970
+ });
971
+ return {
972
+ summary: { path: summary["path"], content: summary["content"] },
973
+ updates,
974
+ newPages,
975
+ indexUpdate: obj["indexUpdate"],
976
+ logEntry: obj["logEntry"]
977
+ };
978
+ }
979
+ async function applyIngestResult(project, result, sourceContent, sourceFilename) {
980
+ const summaryAbsPath = (0, import_node_path6.join)(project.root, result.summary.path);
981
+ assertWithinRoot(summaryAbsPath, project.root);
982
+ await (0, import_promises7.mkdir)((0, import_node_path6.dirname)(summaryAbsPath), { recursive: true });
983
+ await (0, import_promises7.writeFile)(summaryAbsPath, result.summary.content, "utf8");
984
+ for (const update of result.updates) {
985
+ const absPath = (0, import_node_path6.join)(project.root, update.path);
986
+ assertWithinRoot(absPath, project.root);
987
+ await (0, import_promises7.mkdir)((0, import_node_path6.dirname)(absPath), { recursive: true });
988
+ await (0, import_promises7.writeFile)(absPath, update.content, "utf8");
989
+ }
990
+ for (const newPage of result.newPages) {
991
+ const absPath = (0, import_node_path6.join)(project.root, newPage.path);
992
+ assertWithinRoot(absPath, project.root);
993
+ await (0, import_promises7.mkdir)((0, import_node_path6.dirname)(absPath), { recursive: true });
994
+ await (0, import_promises7.writeFile)(absPath, newPage.content, "utf8");
995
+ }
996
+ const indexPath = (0, import_node_path6.join)(project.wikiDir, "_index.md");
997
+ await (0, import_promises7.writeFile)(indexPath, result.indexUpdate, "utf8");
998
+ const sourceDestPath = (0, import_node_path6.join)(project.sourcesDir, sourceFilename);
999
+ await (0, import_promises7.mkdir)(project.sourcesDir, { recursive: true });
1000
+ await (0, import_promises7.writeFile)(sourceDestPath, sourceContent, "utf8");
1001
+ const logPath = (0, import_node_path6.join)(project.wikiDir, "log.md");
1002
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
1003
+ const logLine = `- ${timestamp}: ${result.logEntry}
1004
+ `;
1005
+ await (0, import_promises7.appendFile)(logPath, logLine, "utf8");
1006
+ await indexProject(project);
1007
+ }
1008
+ async function ingestSource(project, sourcePath, llm, options) {
1009
+ const apply = options?.apply ?? false;
1010
+ const sourceContent = await readSource(sourcePath);
1011
+ const indexPath = (0, import_node_path6.join)(project.wikiDir, "_index.md");
1012
+ const currentIndex = await readFileSafe(indexPath);
1013
+ const schemaPath = (0, import_node_path6.join)(project.kbDir, "schema.md");
1014
+ const schema = await readFileSafe(schemaPath);
1015
+ const userMessage = `## Wiki Schema
1016
+ ${schema}
1017
+
1018
+ ## Current Wiki Index
1019
+ ${currentIndex}
1020
+
1021
+ ## New Source: ${sourceContent.filename}
1022
+ ${sourceContent.content}
1023
+
1024
+ Integrate this source into the wiki following the schema above.`;
1025
+ const raw = await llm.complete([{ role: "user", content: userMessage }], SYSTEM_PROMPT);
1026
+ const result = parseIngestResult(raw);
1027
+ if (apply) {
1028
+ await applyIngestResult(project, result, sourceContent.content, sourceContent.filename);
1029
+ }
1030
+ const sourceFile = (0, import_node_path6.join)(project.sourcesDir, sourceContent.filename);
1031
+ return {
1032
+ result,
1033
+ sourceFile,
1034
+ dryRun: !apply
1035
+ };
1036
+ }
1037
+
1038
+ // src/query.ts
1039
+ var import_promises8 = require("fs/promises");
1040
+ var import_node_fs = require("fs");
1041
+ var import_node_path7 = require("path");
1042
+ var SYSTEM_PROMPT2 = `You are a knowledgeable assistant answering questions about a project's knowledge base.
1043
+ Answer concisely using only information from the provided wiki pages.
1044
+ Use [[page-name]] wikilink syntax to cite specific wiki pages in your answer.
1045
+ Format your answer in markdown.`;
1046
+ function assertWithinRoot2(absPath, root) {
1047
+ const resolvedPath = (0, import_node_path7.resolve)(absPath);
1048
+ const resolvedRoot = (0, import_node_path7.resolve)(root) + "/";
1049
+ if (!resolvedPath.startsWith(resolvedRoot)) {
1050
+ throw new Error(
1051
+ `Unsafe path rejected: "${absPath}" is outside project root`
1052
+ );
1053
+ }
1054
+ }
1055
+ async function readFileSafe2(filePath) {
1056
+ try {
1057
+ return await (0, import_promises8.readFile)(filePath, "utf8");
1058
+ } catch {
1059
+ return "";
1060
+ }
1061
+ }
1062
+ async function queryWiki(project, question, llm, options) {
1063
+ const dbPath = (0, import_node_path7.join)(project.kbDir, "index.db");
1064
+ if (!(0, import_node_fs.existsSync)(dbPath)) {
1065
+ await indexProject(project);
1066
+ }
1067
+ const db = openDb(project);
1068
+ let searchResults;
1069
+ try {
1070
+ searchResults = searchWiki(db, question, project.name, { limit: 10 });
1071
+ } finally {
1072
+ closeDb(db);
1073
+ }
1074
+ const pages = [];
1075
+ for (const result of searchResults) {
1076
+ const absPath = (0, import_node_path7.join)(project.root, result.path);
1077
+ const content = await readFileSafe2(absPath);
1078
+ if (content) {
1079
+ pages.push({ path: result.path, title: result.title, content });
1080
+ }
1081
+ }
1082
+ const pagesSection = pages.length > 0 ? pages.map((p) => `### ${p.title} (${p.path})
1083
+ ${p.content}`).join("\n\n") : "(No wiki pages found for this query.)";
1084
+ const userMessage = `## Question
1085
+ ${question}
1086
+
1087
+ ## Relevant Wiki Pages
1088
+
1089
+ ${pagesSection}`;
1090
+ const answer = await llm.complete(
1091
+ [{ role: "user", content: userMessage }],
1092
+ SYSTEM_PROMPT2
1093
+ );
1094
+ const sources = pages.map((p) => p.path);
1095
+ if (options?.save) {
1096
+ const saveRelPath = options.save;
1097
+ const saveAbsPath = (0, import_node_path7.join)(project.root, saveRelPath);
1098
+ assertWithinRoot2(saveAbsPath, project.root);
1099
+ await (0, import_promises8.mkdir)((0, import_node_path7.dirname)(saveAbsPath), { recursive: true });
1100
+ await (0, import_promises8.writeFile)(saveAbsPath, answer, "utf8");
1101
+ const logPath = (0, import_node_path7.join)(project.wikiDir, "log.md");
1102
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
1103
+ const logEntry = `
1104
+ ## ${timestamp} \u2014 Queried: ${question}
1105
+
1106
+ Saved to: ${saveRelPath}
1107
+ `;
1108
+ await (0, import_promises8.appendFile)(logPath, logEntry, "utf8");
1109
+ await indexProject(project);
1110
+ }
1111
+ return { answer, sources };
1112
+ }
1113
+
1114
+ // src/lint.ts
1115
+ var import_promises9 = require("fs/promises");
1116
+ var import_node_path8 = require("path");
1117
+ async function collectMdFiles2(dir) {
1118
+ try {
1119
+ const entries = await (0, import_promises9.readdir)(dir, {
1120
+ recursive: true,
1121
+ withFileTypes: true
1122
+ });
1123
+ return entries.filter((e) => e.isFile() && e.name.endsWith(".md")).map((e) => (0, import_node_path8.join)(e.parentPath ?? e.path, e.name));
1124
+ } catch (err) {
1125
+ if (err.code !== "ENOENT") throw err;
1126
+ return [];
1127
+ }
1128
+ }
1129
+ async function collectSourceFiles(dir) {
1130
+ try {
1131
+ const entries = await (0, import_promises9.readdir)(dir, {
1132
+ recursive: true,
1133
+ withFileTypes: true
1134
+ });
1135
+ return entries.filter((e) => e.isFile()).map((e) => (0, import_node_path8.join)(e.parentPath ?? e.path, e.name));
1136
+ } catch (err) {
1137
+ if (err.code !== "ENOENT") throw err;
1138
+ return [];
1139
+ }
1140
+ }
1141
+ function buildPageKeySet(relPaths, projectRoot, wikiDir) {
1142
+ const keys = /* @__PURE__ */ new Set();
1143
+ for (const rp of relPaths) {
1144
+ keys.add(rp);
1145
+ keys.add(rp.replace(/\.md$/i, ""));
1146
+ const fname = (0, import_node_path8.basename)(rp, ".md");
1147
+ keys.add(fname);
1148
+ const absPath = (0, import_node_path8.join)(projectRoot, rp);
1149
+ const relToWiki = (0, import_node_path8.relative)(wikiDir, absPath);
1150
+ keys.add(relToWiki);
1151
+ keys.add(relToWiki.replace(/\.md$/i, ""));
1152
+ }
1153
+ return keys;
1154
+ }
1155
+ async function lintProject(project) {
1156
+ await indexProject(project);
1157
+ const issues = [];
1158
+ const absWikiFiles = await collectMdFiles2(project.wikiDir);
1159
+ const relWikiPaths = absWikiFiles.map((f) => (0, import_node_path8.relative)(project.root, f));
1160
+ const pagesChecked = relWikiPaths.length;
1161
+ const sourceFiles = await collectSourceFiles(project.sourcesDir);
1162
+ const sourcesChecked = sourceFiles.filter(
1163
+ (f) => (0, import_node_path8.basename)(f) !== ".gitkeep"
1164
+ ).length;
1165
+ if (pagesChecked === 0) {
1166
+ return { issues, pagesChecked: 0, sourcesChecked };
1167
+ }
1168
+ const pageKeySet = buildPageKeySet(
1169
+ relWikiPaths,
1170
+ project.root,
1171
+ project.wikiDir
1172
+ );
1173
+ const db = openDb(project);
1174
+ let rows;
1175
+ try {
1176
+ rows = db.prepare("SELECT path, outgoing_links, word_count, mtime, updated_at FROM page_meta").all();
1177
+ } finally {
1178
+ closeDb(db);
1179
+ }
1180
+ const metaMap = /* @__PURE__ */ new Map();
1181
+ for (const row of rows) {
1182
+ metaMap.set(row.path, row);
1183
+ }
1184
+ const inboundLinks = /* @__PURE__ */ new Map();
1185
+ for (const rp of relWikiPaths) {
1186
+ inboundLinks.set(rp, /* @__PURE__ */ new Set());
1187
+ }
1188
+ for (const row of rows) {
1189
+ let links = [];
1190
+ try {
1191
+ links = JSON.parse(row.outgoing_links);
1192
+ } catch {
1193
+ links = [];
1194
+ }
1195
+ for (const link of links) {
1196
+ const resolved = resolveLink(
1197
+ link,
1198
+ relWikiPaths,
1199
+ project.root,
1200
+ project.wikiDir
1201
+ );
1202
+ if (resolved !== null) {
1203
+ const set = inboundLinks.get(resolved);
1204
+ if (set) {
1205
+ set.add(row.path);
1206
+ }
1207
+ }
1208
+ }
1209
+ }
1210
+ const indexPath = relWikiPaths.find((p) => (0, import_node_path8.basename)(p) === "_index.md");
1211
+ let indexLinks = /* @__PURE__ */ new Set();
1212
+ if (indexPath) {
1213
+ const indexRow = metaMap.get(indexPath);
1214
+ if (indexRow) {
1215
+ let links = [];
1216
+ try {
1217
+ links = JSON.parse(indexRow.outgoing_links);
1218
+ } catch {
1219
+ links = [];
1220
+ }
1221
+ for (const link of links) {
1222
+ const resolved = resolveLink(
1223
+ link,
1224
+ relWikiPaths,
1225
+ project.root,
1226
+ project.wikiDir
1227
+ );
1228
+ if (resolved !== null) {
1229
+ indexLinks.add(resolved);
1230
+ }
1231
+ }
1232
+ }
1233
+ }
1234
+ for (const rp of relWikiPaths) {
1235
+ if ((0, import_node_path8.basename)(rp) === "_index.md") continue;
1236
+ const inbound = inboundLinks.get(rp);
1237
+ if (!inbound || inbound.size === 0) {
1238
+ issues.push({
1239
+ severity: "warning",
1240
+ code: "ORPHAN_PAGE",
1241
+ path: rp,
1242
+ message: "Orphan page (no inbound links)"
1243
+ });
1244
+ }
1245
+ }
1246
+ for (const row of rows) {
1247
+ let links = [];
1248
+ try {
1249
+ links = JSON.parse(row.outgoing_links);
1250
+ } catch {
1251
+ links = [];
1252
+ }
1253
+ for (const link of links) {
1254
+ if (!isLinkResolvable(link, pageKeySet)) {
1255
+ issues.push({
1256
+ severity: "warning",
1257
+ code: "BROKEN_LINK",
1258
+ path: row.path,
1259
+ message: `Broken wikilink [[${link}]] not found`,
1260
+ detail: link
1261
+ });
1262
+ }
1263
+ }
1264
+ }
1265
+ for (const row of rows) {
1266
+ if ((0, import_node_path8.basename)(row.path) === "_index.md") continue;
1267
+ let links = [];
1268
+ try {
1269
+ links = JSON.parse(row.outgoing_links);
1270
+ } catch {
1271
+ links = [];
1272
+ }
1273
+ if (links.length === 0 && row.word_count < 50) {
1274
+ issues.push({
1275
+ severity: "info",
1276
+ code: "STUB_PAGE",
1277
+ path: row.path,
1278
+ message: `Stub page (no links, < 50 words)`
1279
+ });
1280
+ }
1281
+ }
1282
+ const wikiSourcesDir = (0, import_node_path8.join)(project.wikiDir, "sources");
1283
+ for (const rp of relWikiPaths) {
1284
+ const absWikiPage = (0, import_node_path8.join)(project.root, rp);
1285
+ const relToWikiSources = (0, import_node_path8.relative)(wikiSourcesDir, absWikiPage);
1286
+ if (relToWikiSources.startsWith("..")) continue;
1287
+ const summaryBasename = (0, import_node_path8.basename)(rp, ".md");
1288
+ const sourceBasename = summaryBasename.endsWith("-summary") ? summaryBasename.slice(0, -"-summary".length) : summaryBasename;
1289
+ const matchingSource = sourceFiles.find((sf) => {
1290
+ const sfBase = (0, import_node_path8.basename)(sf, (0, import_node_path8.extname)(sf));
1291
+ return sfBase === sourceBasename;
1292
+ });
1293
+ if (!matchingSource) continue;
1294
+ try {
1295
+ const summaryRow = metaMap.get(rp);
1296
+ if (!summaryRow) continue;
1297
+ const [sourceStat, summaryStat] = await Promise.all([
1298
+ (0, import_promises9.stat)(matchingSource),
1299
+ (0, import_promises9.stat)((0, import_node_path8.join)(project.root, summaryRow.path))
1300
+ ]);
1301
+ if (sourceStat.mtimeMs > summaryStat.mtimeMs) {
1302
+ issues.push({
1303
+ severity: "warning",
1304
+ code: "STALE_SUMMARY",
1305
+ path: rp,
1306
+ message: "Source updated after summary",
1307
+ detail: (0, import_node_path8.relative)(project.root, matchingSource)
1308
+ });
1309
+ }
1310
+ } catch {
1311
+ }
1312
+ }
1313
+ for (const rp of relWikiPaths) {
1314
+ if ((0, import_node_path8.basename)(rp) === "_index.md") continue;
1315
+ if (!indexPath) {
1316
+ continue;
1317
+ }
1318
+ if (!indexLinks.has(rp)) {
1319
+ const fname = (0, import_node_path8.basename)(rp, ".md");
1320
+ if (!indexLinks.has(fname)) {
1321
+ issues.push({
1322
+ severity: "info",
1323
+ code: "MISSING_INDEX",
1324
+ path: rp,
1325
+ message: "Not in _index.md"
1326
+ });
1327
+ }
1328
+ }
1329
+ }
1330
+ return { issues, pagesChecked, sourcesChecked };
1331
+ }
1332
+ function resolveLink(link, relPaths, projectRoot, wikiDir) {
1333
+ for (const rp of relPaths) {
1334
+ const fname = (0, import_node_path8.basename)(rp, ".md");
1335
+ if (fname === link) return rp;
1336
+ if (rp === link || rp === `${link}.md`) return rp;
1337
+ const absPath = (0, import_node_path8.join)(projectRoot, rp);
1338
+ const relToWiki = (0, import_node_path8.relative)(wikiDir, absPath);
1339
+ if (relToWiki === link || relToWiki.replace(/\.md$/i, "") === link) {
1340
+ return rp;
1341
+ }
1342
+ }
1343
+ return null;
1344
+ }
1345
+ function isLinkResolvable(link, pageKeySet) {
1346
+ return pageKeySet.has(link);
1347
+ }
1348
+
1349
+ // src/log-parser.ts
1350
+ function parseLogEntries(content) {
1351
+ const entries = [];
1352
+ const sections = content.split(/^(?=## )/m);
1353
+ for (const section of sections) {
1354
+ const trimmed = section.trim();
1355
+ if (!trimmed) continue;
1356
+ if (trimmed.startsWith("# ")) continue;
1357
+ if (!trimmed.startsWith("## ")) continue;
1358
+ const newlineIdx = trimmed.indexOf("\n");
1359
+ if (newlineIdx === -1) {
1360
+ entries.push({ heading: trimmed.slice(3).trim(), body: "" });
1361
+ } else {
1362
+ const heading = trimmed.slice(3, newlineIdx).trim();
1363
+ const body = trimmed.slice(newlineIdx + 1).trim();
1364
+ entries.push({ heading, body });
1365
+ }
1366
+ }
1367
+ return entries;
1368
+ }
1369
+
1370
+ // src/index.ts
1371
+ var VERSION = "0.1.0";
1372
+ // Annotate the CommonJS export names for ESM import in node:
1373
+ 0 && (module.exports = {
1374
+ VERSION,
1375
+ closeDb,
1376
+ createLlmAdapter,
1377
+ indexProject,
1378
+ ingestSource,
1379
+ initProject,
1380
+ lintProject,
1381
+ loadProject,
1382
+ openDb,
1383
+ parseConfig,
1384
+ parseLogEntries,
1385
+ parsePage,
1386
+ queryWiki,
1387
+ readSource,
1388
+ searchWiki,
1389
+ tryLoadProject
1390
+ });
1391
+ //# sourceMappingURL=index.cjs.map