brainbank 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +1059 -0
  3. package/assets/architecture.png +0 -0
  4. package/bin/brainbank +11 -0
  5. package/dist/chunk-2P3EGY6S.js +37 -0
  6. package/dist/chunk-2P3EGY6S.js.map +1 -0
  7. package/dist/chunk-3GAIDXRW.js +105 -0
  8. package/dist/chunk-3GAIDXRW.js.map +1 -0
  9. package/dist/chunk-4ZKBQ33J.js +56 -0
  10. package/dist/chunk-4ZKBQ33J.js.map +1 -0
  11. package/dist/chunk-7QVYU63E.js +7 -0
  12. package/dist/chunk-7QVYU63E.js.map +1 -0
  13. package/dist/chunk-EDKSKLX4.js +490 -0
  14. package/dist/chunk-EDKSKLX4.js.map +1 -0
  15. package/dist/chunk-GOUBW7UA.js +373 -0
  16. package/dist/chunk-GOUBW7UA.js.map +1 -0
  17. package/dist/chunk-MJ3Y24H6.js +185 -0
  18. package/dist/chunk-MJ3Y24H6.js.map +1 -0
  19. package/dist/chunk-N6ZMBFDE.js +224 -0
  20. package/dist/chunk-N6ZMBFDE.js.map +1 -0
  21. package/dist/chunk-YGSEUWLV.js +2053 -0
  22. package/dist/chunk-YGSEUWLV.js.map +1 -0
  23. package/dist/chunk-Z5SU54HP.js +171 -0
  24. package/dist/chunk-Z5SU54HP.js.map +1 -0
  25. package/dist/cli.d.ts +1 -0
  26. package/dist/cli.js +731 -0
  27. package/dist/cli.js.map +1 -0
  28. package/dist/code.d.ts +31 -0
  29. package/dist/code.js +8 -0
  30. package/dist/code.js.map +1 -0
  31. package/dist/docs.d.ts +19 -0
  32. package/dist/docs.js +8 -0
  33. package/dist/docs.js.map +1 -0
  34. package/dist/git.d.ts +31 -0
  35. package/dist/git.js +8 -0
  36. package/dist/git.js.map +1 -0
  37. package/dist/index.d.ts +845 -0
  38. package/dist/index.js +80 -0
  39. package/dist/index.js.map +1 -0
  40. package/dist/memory.d.ts +19 -0
  41. package/dist/memory.js +146 -0
  42. package/dist/memory.js.map +1 -0
  43. package/dist/notes.d.ts +19 -0
  44. package/dist/notes.js +57 -0
  45. package/dist/notes.js.map +1 -0
  46. package/dist/openai-PCTYLOWI.js +8 -0
  47. package/dist/openai-PCTYLOWI.js.map +1 -0
  48. package/dist/types-Da_zLLOl.d.ts +474 -0
  49. package/package.json +91 -0
@@ -0,0 +1,373 @@
1
+ import {
2
+ __name
3
+ } from "./chunk-7QVYU63E.js";
4
+
5
+ // src/indexers/doc-indexer.ts
6
+ import * as fs from "fs";
7
+ import * as path from "path";
8
+ import { createHash } from "crypto";
9
+ import { glob } from "fs/promises";
10
+ var BREAK_SCORES = [
11
+ [/^# /, 100],
12
+ // H1
13
+ [/^## /, 90],
14
+ // H2
15
+ [/^### /, 80],
16
+ // H3
17
+ [/^#### /, 70],
18
+ // H4
19
+ [/^##### /, 60],
20
+ // H5
21
+ [/^###### /, 50],
22
+ // H6
23
+ [/^```/, 80],
24
+ // Code fence
25
+ [/^---$/, 60],
26
+ // Horizontal rule
27
+ [/^\*\*\*$/, 60],
28
+ // Horizontal rule alt
29
+ [/^$/, 20],
30
+ // Blank line (paragraph break)
31
+ [/^[-*+] /, 5]
32
+ // List item
33
+ ];
34
+ var TARGET_CHARS = 3e3;
35
+ var WINDOW_CHARS = 600;
36
+ var MIN_CHUNK_CHARS = 200;
37
+ var DocIndexer = class {
38
+ constructor(_db, _embedding, _hnsw, _vecCache) {
39
+ this._db = _db;
40
+ this._embedding = _embedding;
41
+ this._hnsw = _hnsw;
42
+ this._vecCache = _vecCache;
43
+ }
44
+ static {
45
+ __name(this, "DocIndexer");
46
+ }
47
+ /**
48
+ * Index all documents in a collection.
49
+ * Incremental — skips unchanged files (by content hash).
50
+ */
51
+ async indexCollection(collection, dirPath, pattern = "**/*.md", options = {}) {
52
+ const absDir = path.resolve(dirPath);
53
+ if (!fs.existsSync(absDir)) {
54
+ throw new Error(`Collection path does not exist: ${absDir}`);
55
+ }
56
+ const files = [];
57
+ for await (const entry of glob(pattern, { cwd: absDir })) {
58
+ const fullPath = path.join(absDir, entry);
59
+ const stat = fs.statSync(fullPath);
60
+ if (stat.isFile()) {
61
+ const shouldIgnore = options.ignore?.some((ig) => {
62
+ const igRegex = new RegExp(ig.replace(/\*\*/g, ".*").replace(/\*/g, "[^/]*"));
63
+ return igRegex.test(entry);
64
+ });
65
+ if (!shouldIgnore) {
66
+ files.push(entry);
67
+ }
68
+ }
69
+ }
70
+ let indexed = 0;
71
+ let skipped = 0;
72
+ let totalChunks = 0;
73
+ for (let i = 0; i < files.length; i++) {
74
+ const relPath = files[i];
75
+ const absPath = path.join(absDir, relPath);
76
+ options.onProgress?.(relPath, i + 1, files.length);
77
+ const content = fs.readFileSync(absPath, "utf-8");
78
+ const hash = createHash("sha256").update(content).digest("hex").slice(0, 16);
79
+ const existing = this._db.prepare(
80
+ "SELECT id FROM doc_chunks WHERE collection = ? AND file_path = ? AND content_hash = ? LIMIT 1"
81
+ ).get(collection, relPath, hash);
82
+ if (existing) {
83
+ skipped++;
84
+ continue;
85
+ }
86
+ this._db.prepare(
87
+ "DELETE FROM doc_chunks WHERE collection = ? AND file_path = ?"
88
+ ).run(collection, relPath);
89
+ const title = this._extractTitle(content, relPath);
90
+ const chunks = this._smartChunk(content);
91
+ const insertChunk = this._db.prepare(`
92
+ INSERT INTO doc_chunks (collection, file_path, title, content, seq, pos, content_hash)
93
+ VALUES (?, ?, ?, ?, ?, ?, ?)
94
+ `);
95
+ const chunkIds = [];
96
+ this._db.transaction(() => {
97
+ for (let seq = 0; seq < chunks.length; seq++) {
98
+ const chunk = chunks[seq];
99
+ const result = insertChunk.run(
100
+ collection,
101
+ relPath,
102
+ title,
103
+ chunk.text,
104
+ seq,
105
+ chunk.pos,
106
+ hash
107
+ );
108
+ chunkIds.push(Number(result.lastInsertRowid));
109
+ }
110
+ });
111
+ const texts = chunks.map((c) => `title: ${title} | text: ${c.text}`);
112
+ const embeddings = await this._embedding.embedBatch(texts);
113
+ const insertVec = this._db.prepare(
114
+ "INSERT OR REPLACE INTO doc_vectors (chunk_id, embedding) VALUES (?, ?)"
115
+ );
116
+ this._db.transaction(() => {
117
+ for (let j = 0; j < chunkIds.length; j++) {
118
+ const buf = Buffer.from(embeddings[j].buffer);
119
+ insertVec.run(chunkIds[j], buf);
120
+ this._hnsw.add(embeddings[j], chunkIds[j]);
121
+ this._vecCache.set(chunkIds[j], embeddings[j]);
122
+ }
123
+ });
124
+ indexed++;
125
+ totalChunks += chunks.length;
126
+ }
127
+ return { indexed, skipped, chunks: totalChunks };
128
+ }
129
+ /**
130
+ * Remove all indexed data for a collection.
131
+ */
132
+ removeCollection(collection) {
133
+ this._db.prepare("DELETE FROM doc_chunks WHERE collection = ?").run(collection);
134
+ this._db.prepare("DELETE FROM collections WHERE name = ?").run(collection);
135
+ this._db.prepare("DELETE FROM path_contexts WHERE collection = ?").run(collection);
136
+ }
137
+ // ── Smart Chunking ──────────────────────────────
138
+ /**
139
+ * Split document into chunks at natural markdown boundaries.
140
+ * Uses heading-aware scoring like qmd.
141
+ */
142
+ _smartChunk(text) {
143
+ if (text.length <= TARGET_CHARS) {
144
+ return [{ text, pos: 0 }];
145
+ }
146
+ const lines = text.split("\n");
147
+ const breakPoints = this._findBreakPoints(lines);
148
+ const chunks = [];
149
+ let chunkStart = 0;
150
+ let lineStart = 0;
151
+ while (chunkStart < text.length) {
152
+ const remaining = text.length - chunkStart;
153
+ if (remaining <= TARGET_CHARS + WINDOW_CHARS) {
154
+ const lastText = text.slice(chunkStart).trim();
155
+ if (lastText.length >= MIN_CHUNK_CHARS) {
156
+ chunks.push({ text: lastText, pos: chunkStart });
157
+ } else if (chunks.length > 0) {
158
+ chunks[chunks.length - 1].text += "\n" + lastText;
159
+ } else {
160
+ chunks.push({ text: lastText, pos: chunkStart });
161
+ }
162
+ break;
163
+ }
164
+ const targetEnd = chunkStart + TARGET_CHARS;
165
+ const windowStart = targetEnd - WINDOW_CHARS;
166
+ let bestBreak = targetEnd;
167
+ let bestScore = 0;
168
+ for (const bp of breakPoints) {
169
+ if (bp.pos <= chunkStart) continue;
170
+ if (bp.pos > targetEnd + WINDOW_CHARS / 2) break;
171
+ if (bp.pos < windowStart) continue;
172
+ const distance = Math.abs(bp.pos - targetEnd);
173
+ const decay = 1 - (distance / WINDOW_CHARS) ** 2 * 0.7;
174
+ const finalScore = bp.score * decay;
175
+ if (finalScore > bestScore) {
176
+ bestScore = finalScore;
177
+ bestBreak = bp.pos;
178
+ }
179
+ }
180
+ const chunkText = text.slice(chunkStart, bestBreak).trim();
181
+ if (chunkText.length >= MIN_CHUNK_CHARS) {
182
+ chunks.push({ text: chunkText, pos: chunkStart });
183
+ }
184
+ chunkStart = bestBreak;
185
+ }
186
+ return chunks;
187
+ }
188
+ /**
189
+ * Find all potential break points in the document with scores.
190
+ */
191
+ _findBreakPoints(lines) {
192
+ const points = [];
193
+ let charPos = 0;
194
+ let inCodeBlock = false;
195
+ for (const line of lines) {
196
+ if (line.trimStart().startsWith("```")) {
197
+ inCodeBlock = !inCodeBlock;
198
+ if (!inCodeBlock) {
199
+ points.push({ pos: charPos + line.length + 1, score: 80 });
200
+ }
201
+ charPos += line.length + 1;
202
+ continue;
203
+ }
204
+ if (inCodeBlock) {
205
+ charPos += line.length + 1;
206
+ continue;
207
+ }
208
+ for (const [pattern, score] of BREAK_SCORES) {
209
+ if (pattern.test(line.trim())) {
210
+ points.push({ pos: charPos, score });
211
+ break;
212
+ }
213
+ }
214
+ charPos += line.length + 1;
215
+ }
216
+ return points;
217
+ }
218
+ /**
219
+ * Extract document title from first heading or filename.
220
+ */
221
+ _extractTitle(content, filePath) {
222
+ const match = content.match(/^#{1,3}\s+(.+)$/m);
223
+ if (match) return match[1].trim();
224
+ return path.basename(filePath, path.extname(filePath));
225
+ }
226
+ };
227
+
228
+ // src/plugins/docs.ts
229
+ var DocsModuleImpl = class {
230
+ constructor(opts = {}) {
231
+ this.opts = opts;
232
+ }
233
+ static {
234
+ __name(this, "DocsModuleImpl");
235
+ }
236
+ name = "docs";
237
+ hnsw;
238
+ indexer;
239
+ vecCache = /* @__PURE__ */ new Map();
240
+ _db;
241
+ _embedding;
242
+ async initialize(ctx) {
243
+ this._db = ctx.db;
244
+ this._embedding = ctx.embedding;
245
+ this.hnsw = await ctx.createHnsw();
246
+ ctx.loadVectors("doc_vectors", "chunk_id", this.hnsw, this.vecCache);
247
+ this.indexer = new DocIndexer(ctx.db, ctx.embedding, this.hnsw, this.vecCache);
248
+ }
249
+ /** Register a document collection. */
250
+ addCollection(collection) {
251
+ this._db.prepare(`
252
+ INSERT OR REPLACE INTO collections (name, path, pattern, ignore_json, context)
253
+ VALUES (?, ?, ?, ?, ?)
254
+ `).run(
255
+ collection.name,
256
+ collection.path,
257
+ collection.pattern ?? "**/*.md",
258
+ JSON.stringify(collection.ignore ?? []),
259
+ collection.context ?? null
260
+ );
261
+ }
262
+ /** Remove a collection and its indexed data. */
263
+ removeCollection(name) {
264
+ this.indexer.removeCollection(name);
265
+ }
266
+ /** List all registered collections. */
267
+ listCollections() {
268
+ return this._db.prepare("SELECT * FROM collections").all().map((row) => ({
269
+ name: row.name,
270
+ path: row.path,
271
+ pattern: row.pattern,
272
+ ignore: JSON.parse(row.ignore_json),
273
+ context: row.context
274
+ }));
275
+ }
276
+ /** Index all (or specific) collections. Incremental. */
277
+ async indexCollections(options = {}) {
278
+ const allCollections = this.listCollections();
279
+ const toIndex = options.collections ? allCollections.filter((c) => options.collections.includes(c.name)) : allCollections;
280
+ const results = {};
281
+ for (const coll of toIndex) {
282
+ results[coll.name] = await this.indexer.indexCollection(
283
+ coll.name,
284
+ coll.path,
285
+ coll.pattern,
286
+ {
287
+ ignore: coll.ignore,
288
+ onProgress: /* @__PURE__ */ __name((file, cur, total) => options.onProgress?.(coll.name, file, cur, total), "onProgress")
289
+ }
290
+ );
291
+ }
292
+ return results;
293
+ }
294
+ /** Search documents only. */
295
+ async search(query, options) {
296
+ const k = options?.k ?? 8;
297
+ const queryVec = await this._embedding.embed(query);
298
+ const hits = this.hnsw.search(queryVec, k);
299
+ const results = [];
300
+ for (const hit of hits) {
301
+ if (options?.minScore && hit.score < options.minScore) continue;
302
+ const chunk = this._db.prepare(
303
+ "SELECT * FROM doc_chunks WHERE id = ?"
304
+ ).get(hit.id);
305
+ if (!chunk) continue;
306
+ if (options?.collection && chunk.collection !== options.collection) continue;
307
+ const ctx = this._getDocContext(chunk.collection, chunk.file_path);
308
+ results.push({
309
+ type: "document",
310
+ score: hit.score,
311
+ filePath: chunk.file_path,
312
+ content: chunk.content,
313
+ context: ctx,
314
+ metadata: {
315
+ collection: chunk.collection,
316
+ title: chunk.title,
317
+ seq: chunk.seq
318
+ }
319
+ });
320
+ }
321
+ return results;
322
+ }
323
+ /** Add context description for a document path. */
324
+ addContext(collection, path2, context) {
325
+ this._db.prepare(`
326
+ INSERT OR REPLACE INTO path_contexts (collection, path, context)
327
+ VALUES (?, ?, ?)
328
+ `).run(collection, path2, context);
329
+ }
330
+ /** Remove context for a path. */
331
+ removeContext(collection, path2) {
332
+ this._db.prepare(
333
+ "DELETE FROM path_contexts WHERE collection = ? AND path = ?"
334
+ ).run(collection, path2);
335
+ }
336
+ /** List all context entries. */
337
+ listContexts() {
338
+ return this._db.prepare("SELECT * FROM path_contexts").all();
339
+ }
340
+ stats() {
341
+ return {
342
+ collections: this._db.prepare("SELECT COUNT(*) as c FROM collections").get().c,
343
+ documents: this._db.prepare("SELECT COUNT(DISTINCT file_path) as c FROM doc_chunks").get().c,
344
+ chunks: this._db.prepare("SELECT COUNT(*) as c FROM doc_chunks").get().c,
345
+ hnswSize: this.hnsw.size
346
+ };
347
+ }
348
+ /** Resolve context for a document (checks path_contexts tree → collection context). */
349
+ _getDocContext(collection, filePath) {
350
+ const parts = filePath.split("/");
351
+ for (let i = parts.length; i >= 0; i--) {
352
+ const checkPath = i === 0 ? "/" : "/" + parts.slice(0, i).join("/");
353
+ const ctx = this._db.prepare(
354
+ "SELECT context FROM path_contexts WHERE collection = ? AND path = ?"
355
+ ).get(collection, checkPath);
356
+ if (ctx) return ctx.context;
357
+ }
358
+ const coll = this._db.prepare(
359
+ "SELECT context FROM collections WHERE name = ?"
360
+ ).get(collection);
361
+ return coll?.context ?? void 0;
362
+ }
363
+ };
364
+ function docs(opts) {
365
+ return new DocsModuleImpl(opts);
366
+ }
367
+ __name(docs, "docs");
368
+
369
+ export {
370
+ DocIndexer,
371
+ docs
372
+ };
373
+ //# sourceMappingURL=chunk-GOUBW7UA.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/indexers/doc-indexer.ts","../src/plugins/docs.ts"],"sourcesContent":["/**\n * BrainBank — Document Indexer\n * \n * Indexes generic document collections (markdown, text, etc.)\n * with heading-aware smart chunking, inspired by qmd.\n * \n * const indexer = new DocIndexer(db, embedding, hnsw, vecCache);\n * await indexer.indexCollection('notes', '/path/to/notes', '**\\/*.md');\n */\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport { createHash } from 'node:crypto';\nimport { glob } from 'node:fs/promises';\nimport type { Database } from '../storage/database.ts';\nimport type { EmbeddingProvider, VectorIndex } from '../types.ts';\nimport type { HNSWIndex } from '../vector/hnsw.ts';\n\n// ── Break Point Scoring (qmd-inspired) ──────────────\n\ninterface BreakPoint {\n pos: number; // character position\n score: number; // break quality (higher = better)\n}\n\nconst BREAK_SCORES: [RegExp, number][] = [\n [/^# /, 100], // H1\n [/^## /, 90], // H2\n [/^### /, 80], // H3\n [/^#### /, 70], // H4\n [/^##### /, 60], // H5\n [/^###### /, 50], // H6\n [/^```/, 80], // Code fence\n [/^---$/, 60], // Horizontal rule\n [/^\\*\\*\\*$/, 60], // Horizontal rule alt\n [/^$/, 20], // Blank line (paragraph break)\n [/^[-*+] /, 5], // List item\n];\n\n// ── Chunk Target ────────────────────────────────────\n\nconst TARGET_CHARS = 3000; // ~900 tokens\nconst WINDOW_CHARS = 600; // search window before cutoff\nconst MIN_CHUNK_CHARS = 200; // don't create tiny chunks\n\n// ── DocIndexer ──────────────────────────────────────\n\nexport class DocIndexer {\n constructor(\n private _db: Database,\n private _embedding: EmbeddingProvider,\n private _hnsw: HNSWIndex,\n private _vecCache: Map<number, Float32Array>,\n ) {}\n\n /**\n * Index all documents in a collection.\n * Incremental — skips unchanged files (by content hash).\n */\n async indexCollection(\n collection: string,\n dirPath: string,\n pattern: string = '**/*.md',\n options: {\n ignore?: string[];\n onProgress?: (file: string, current: number, total: number) => void;\n } = {},\n ): Promise<{ indexed: number; skipped: number; chunks: number }> {\n // Resolve absolute path\n const absDir = path.resolve(dirPath);\n if (!fs.existsSync(absDir)) {\n throw new Error(`Collection path does not exist: ${absDir}`);\n }\n\n // Find files matching pattern\n const files: string[] = [];\n for await (const entry of glob(pattern, { cwd: absDir })) {\n const fullPath = path.join(absDir, entry);\n const stat = fs.statSync(fullPath);\n if (stat.isFile()) {\n // Check ignore patterns\n const shouldIgnore = options.ignore?.some(ig => {\n const igRegex = new RegExp(ig.replace(/\\*\\*/g, '.*').replace(/\\*/g, '[^/]*'));\n return igRegex.test(entry);\n });\n if (!shouldIgnore) {\n files.push(entry); // relative path\n }\n }\n }\n\n let indexed = 0;\n let skipped = 0;\n let totalChunks = 0;\n\n for (let i = 0; i < files.length; i++) {\n const relPath = files[i];\n const absPath = path.join(absDir, relPath);\n\n options.onProgress?.(relPath, i + 1, files.length);\n\n // Read content and hash\n const content = fs.readFileSync(absPath, 'utf-8');\n const hash = createHash('sha256').update(content).digest('hex').slice(0, 16);\n\n // Check if already indexed with same hash\n const existing = this._db.prepare(\n 'SELECT id FROM doc_chunks WHERE collection = ? AND file_path = ? AND content_hash = ? LIMIT 1'\n ).get(collection, relPath, hash) as any;\n\n if (existing) {\n skipped++;\n continue;\n }\n\n // Remove old chunks for this file\n this._db.prepare(\n 'DELETE FROM doc_chunks WHERE collection = ? AND file_path = ?'\n ).run(collection, relPath);\n\n // Extract title and chunk\n const title = this._extractTitle(content, relPath);\n const chunks = this._smartChunk(content);\n\n // Insert chunks\n const insertChunk = this._db.prepare(`\n INSERT INTO doc_chunks (collection, file_path, title, content, seq, pos, content_hash)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `);\n\n const chunkIds: number[] = [];\n\n this._db.transaction(() => {\n for (let seq = 0; seq < chunks.length; seq++) {\n const chunk = chunks[seq];\n const result = insertChunk.run(\n collection, relPath, title, chunk.text, seq, chunk.pos, hash,\n );\n chunkIds.push(Number(result.lastInsertRowid));\n }\n });\n\n // Generate embeddings\n const texts = chunks.map(c => `title: ${title} | text: ${c.text}`);\n const embeddings = await this._embedding.embedBatch(texts);\n\n // Store vectors\n const insertVec = this._db.prepare(\n 'INSERT OR REPLACE INTO doc_vectors (chunk_id, embedding) VALUES (?, ?)'\n );\n\n this._db.transaction(() => {\n for (let j = 0; j < chunkIds.length; j++) {\n const buf = Buffer.from(embeddings[j].buffer);\n insertVec.run(chunkIds[j], buf);\n this._hnsw.add(embeddings[j], chunkIds[j]);\n this._vecCache.set(chunkIds[j], embeddings[j]);\n }\n });\n\n indexed++;\n totalChunks += chunks.length;\n }\n\n return { indexed, skipped, chunks: totalChunks };\n }\n\n /**\n * Remove all indexed data for a collection.\n */\n removeCollection(collection: string): void {\n this._db.prepare('DELETE FROM doc_chunks WHERE collection = ?').run(collection);\n this._db.prepare('DELETE FROM collections WHERE name = ?').run(collection);\n this._db.prepare('DELETE FROM path_contexts WHERE collection = ?').run(collection);\n }\n\n // ── Smart Chunking ──────────────────────────────\n\n /**\n * Split document into chunks at natural markdown boundaries.\n * Uses heading-aware scoring like qmd.\n */\n private _smartChunk(text: string): { text: string; pos: number }[] {\n if (text.length <= TARGET_CHARS) {\n return [{ text, pos: 0 }];\n }\n\n const lines = text.split('\\n');\n const breakPoints = this._findBreakPoints(lines);\n const chunks: { text: string; pos: number }[] = [];\n\n let chunkStart = 0; // char position\n let lineStart = 0; // line index\n\n while (chunkStart < text.length) {\n const remaining = text.length - chunkStart;\n if (remaining <= TARGET_CHARS + WINDOW_CHARS) {\n // Last chunk — take everything\n const lastText = text.slice(chunkStart).trim();\n if (lastText.length >= MIN_CHUNK_CHARS) {\n chunks.push({ text: lastText, pos: chunkStart });\n } else if (chunks.length > 0) {\n // Merge with previous chunk\n chunks[chunks.length - 1].text += '\\n' + lastText;\n } else {\n chunks.push({ text: lastText, pos: chunkStart });\n }\n break;\n }\n\n // Find best break point in window\n const targetEnd = chunkStart + TARGET_CHARS;\n const windowStart = targetEnd - WINDOW_CHARS;\n\n let bestBreak = targetEnd;\n let bestScore = 0;\n\n for (const bp of breakPoints) {\n if (bp.pos <= chunkStart) continue;\n if (bp.pos > targetEnd + WINDOW_CHARS / 2) break;\n if (bp.pos < windowStart) continue;\n\n // Score decay: prefer closer break points\n const distance = Math.abs(bp.pos - targetEnd);\n const decay = 1 - (distance / WINDOW_CHARS) ** 2 * 0.7;\n const finalScore = bp.score * decay;\n\n if (finalScore > bestScore) {\n bestScore = finalScore;\n bestBreak = bp.pos;\n }\n }\n\n const chunkText = text.slice(chunkStart, bestBreak).trim();\n if (chunkText.length >= MIN_CHUNK_CHARS) {\n chunks.push({ text: chunkText, pos: chunkStart });\n }\n\n chunkStart = bestBreak;\n }\n\n return chunks;\n }\n\n /**\n * Find all potential break points in the document with scores.\n */\n private _findBreakPoints(lines: string[]): BreakPoint[] {\n const points: BreakPoint[] = [];\n let charPos = 0;\n let inCodeBlock = false;\n\n for (const line of lines) {\n // Track code fences\n if (line.trimStart().startsWith('```')) {\n inCodeBlock = !inCodeBlock;\n if (!inCodeBlock) {\n // End of code block is a good break point\n points.push({ pos: charPos + line.length + 1, score: 80 });\n }\n charPos += line.length + 1;\n continue;\n }\n\n // Skip break points inside code blocks\n if (inCodeBlock) {\n charPos += line.length + 1;\n continue;\n }\n\n // Score this line as a potential break point\n for (const [pattern, score] of BREAK_SCORES) {\n if (pattern.test(line.trim())) {\n points.push({ pos: charPos, score });\n break;\n }\n }\n\n charPos += line.length + 1;\n }\n\n return points;\n }\n\n /**\n * Extract document title from first heading or filename.\n */\n private _extractTitle(content: string, filePath: string): string {\n const match = content.match(/^#{1,3}\\s+(.+)$/m);\n if (match) return match[1].trim();\n return path.basename(filePath, path.extname(filePath));\n }\n}\n","/**\n * BrainBank — Docs Module\n * \n * Index any folder of markdown/text files (notes, docs, wikis).\n * Heading-aware smart chunking inspired by qmd.\n * \n * import { docs } from 'brainbank/docs';\n * brain.use(docs());\n */\n\nimport type { BrainBankModule, ModuleContext } from './types.ts';\nimport type { HNSWIndex } from '../vector/hnsw.ts';\nimport type { Database } from '../storage/database.ts';\nimport type { EmbeddingProvider, DocumentCollection, SearchResult } from '../types.ts';\nimport { DocIndexer } from '../indexers/doc-indexer.ts';\n\nexport interface DocsModuleOptions {}\n\nclass DocsModuleImpl implements BrainBankModule {\n readonly name = 'docs';\n hnsw!: HNSWIndex;\n indexer!: DocIndexer;\n vecCache = new Map<number, Float32Array>();\n private _db!: Database;\n private _embedding!: EmbeddingProvider;\n\n constructor(private opts: DocsModuleOptions = {}) {}\n\n async initialize(ctx: ModuleContext): Promise<void> {\n this._db = ctx.db;\n this._embedding = ctx.embedding;\n this.hnsw = await ctx.createHnsw();\n ctx.loadVectors('doc_vectors', 'chunk_id', this.hnsw, this.vecCache);\n this.indexer = new DocIndexer(ctx.db, ctx.embedding, this.hnsw, this.vecCache);\n }\n\n /** Register a document collection. */\n addCollection(collection: DocumentCollection): void {\n this._db.prepare(`\n INSERT OR REPLACE INTO collections (name, path, pattern, ignore_json, context)\n VALUES (?, ?, ?, ?, ?)\n `).run(\n collection.name,\n collection.path,\n collection.pattern ?? '**/*.md',\n JSON.stringify(collection.ignore ?? []),\n collection.context ?? null,\n );\n }\n\n /** Remove a collection and its indexed data. */\n removeCollection(name: string): void {\n this.indexer.removeCollection(name);\n }\n\n /** List all registered collections. */\n listCollections(): DocumentCollection[] {\n return (this._db.prepare('SELECT * FROM collections').all() as any[]).map(row => ({\n name: row.name,\n path: row.path,\n pattern: row.pattern,\n ignore: JSON.parse(row.ignore_json),\n context: row.context,\n }));\n }\n\n /** Index all (or specific) collections. Incremental. */\n async indexCollections(options: {\n collections?: string[];\n onProgress?: (collection: string, file: string, current: number, total: number) => void;\n } = {}): Promise<Record<string, { indexed: number; skipped: number; chunks: number }>> {\n const allCollections = this.listCollections();\n const toIndex = options.collections\n ? allCollections.filter(c => options.collections!.includes(c.name))\n : allCollections;\n\n const results: Record<string, { indexed: number; skipped: number; chunks: number }> = {};\n\n for (const coll of toIndex) {\n results[coll.name] = await this.indexer.indexCollection(\n coll.name,\n coll.path,\n coll.pattern,\n {\n ignore: coll.ignore,\n onProgress: (file, cur, total) => options.onProgress?.(coll.name, file, cur, total),\n },\n );\n }\n\n return results;\n }\n\n /** Search documents only. */\n async search(query: string, options?: {\n collection?: string;\n k?: number;\n minScore?: number;\n }): Promise<SearchResult[]> {\n const k = options?.k ?? 8;\n const queryVec = await this._embedding.embed(query);\n const hits = this.hnsw.search(queryVec, k);\n\n const results: SearchResult[] = [];\n for (const hit of hits) {\n if (options?.minScore && hit.score < options.minScore) continue;\n\n const chunk = this._db.prepare(\n 'SELECT * FROM doc_chunks WHERE id = ?'\n ).get(hit.id) as any;\n\n if (!chunk) continue;\n if (options?.collection && chunk.collection !== options.collection) continue;\n\n const ctx = this._getDocContext(chunk.collection, chunk.file_path);\n\n results.push({\n type: 'document',\n score: hit.score,\n filePath: chunk.file_path,\n content: chunk.content,\n context: ctx,\n metadata: {\n collection: chunk.collection,\n title: chunk.title,\n seq: chunk.seq,\n },\n });\n }\n\n return results;\n }\n\n /** Add context description for a document path. */\n addContext(collection: string, path: string, context: string): void {\n this._db.prepare(`\n INSERT OR REPLACE INTO path_contexts (collection, path, context)\n VALUES (?, ?, ?)\n `).run(collection, path, context);\n }\n\n /** Remove context for a path. */\n removeContext(collection: string, path: string): void {\n this._db.prepare(\n 'DELETE FROM path_contexts WHERE collection = ? AND path = ?'\n ).run(collection, path);\n }\n\n /** List all context entries. */\n listContexts(): { collection: string; path: string; context: string }[] {\n return this._db.prepare('SELECT * FROM path_contexts').all() as any[];\n }\n\n stats(): Record<string, any> {\n return {\n collections: (this._db.prepare('SELECT COUNT(*) as c FROM collections').get() as any).c,\n documents: (this._db.prepare('SELECT COUNT(DISTINCT file_path) as c FROM doc_chunks').get() as any).c,\n chunks: (this._db.prepare('SELECT COUNT(*) as c FROM doc_chunks').get() as any).c,\n hnswSize: this.hnsw.size,\n };\n }\n\n /** Resolve context for a document (checks path_contexts tree → collection context). */\n private _getDocContext(collection: string, filePath: string): string | undefined {\n const parts = filePath.split('/');\n for (let i = parts.length; i >= 0; i--) {\n const checkPath = i === 0 ? '/' : '/' + parts.slice(0, i).join('/');\n const ctx = this._db.prepare(\n 'SELECT context FROM path_contexts WHERE collection = ? AND path = ?'\n ).get(collection, checkPath) as any;\n if (ctx) return ctx.context;\n }\n\n const coll = this._db.prepare(\n 'SELECT context FROM collections WHERE name = ?'\n ).get(collection) as any;\n return coll?.context ?? undefined;\n }\n}\n\n/** Create a document collections module. */\nexport function docs(opts?: DocsModuleOptions): BrainBankModule {\n return new DocsModuleImpl(opts);\n}\n"],"mappings":";;;;;AAUA,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AAYrB,IAAM,eAAmC;AAAA,EACrC,CAAC,OAAY,GAAG;AAAA;AAAA,EAChB,CAAC,QAAa,EAAE;AAAA;AAAA,EAChB,CAAC,SAAa,EAAE;AAAA;AAAA,EAChB,CAAC,UAAa,EAAE;AAAA;AAAA,EAChB,CAAC,WAAa,EAAE;AAAA;AAAA,EAChB,CAAC,YAAa,EAAE;AAAA;AAAA,EAChB,CAAC,QAAa,EAAE;AAAA;AAAA,EAChB,CAAC,SAAa,EAAE;AAAA;AAAA,EAChB,CAAC,YAAa,EAAE;AAAA;AAAA,EAChB,CAAC,MAAa,EAAE;AAAA;AAAA,EAChB,CAAC,WAAc,CAAC;AAAA;AACpB;AAIA,IAAM,eAAe;AACrB,IAAM,eAAe;AACrB,IAAM,kBAAkB;AAIjB,IAAM,aAAN,MAAiB;AAAA,EACpB,YACY,KACA,YACA,OACA,WACV;AAJU;AACA;AACA;AACA;AAAA,EACT;AAAA,EArDP,OA+CwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYpB,MAAM,gBACF,YACA,SACA,UAAkB,WAClB,UAGI,CAAC,GACwD;AAE7D,UAAM,SAAc,aAAQ,OAAO;AACnC,QAAI,CAAI,cAAW,MAAM,GAAG;AACxB,YAAM,IAAI,MAAM,mCAAmC,MAAM,EAAE;AAAA,IAC/D;AAGA,UAAM,QAAkB,CAAC;AACzB,qBAAiB,SAAS,KAAK,SAAS,EAAE,KAAK,OAAO,CAAC,GAAG;AACtD,YAAM,WAAgB,UAAK,QAAQ,KAAK;AACxC,YAAM,OAAU,YAAS,QAAQ;AACjC,UAAI,KAAK,OAAO,GAAG;AAEf,cAAM,eAAe,QAAQ,QAAQ,KAAK,QAAM;AAC5C,gBAAM,UAAU,IAAI,OAAO,GAAG,QAAQ,SAAS,IAAI,EAAE,QAAQ,OAAO,OAAO,CAAC;AAC5E,iBAAO,QAAQ,KAAK,KAAK;AAAA,QAC7B,CAAC;AACD,YAAI,CAAC,cAAc;AACf,gBAAM,KAAK,KAAK;AAAA,QACpB;AAAA,MACJ;AAAA,IACJ;AAEA,QAAI,UAAU;AACd,QAAI,UAAU;AACd,QAAI,cAAc;AAElB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,YAAM,UAAU,MAAM,CAAC;AACvB,YAAM,UAAe,UAAK,QAAQ,OAAO;AAEzC,cAAQ,aAAa,SAAS,IAAI,GAAG,MAAM,MAAM;AAGjD,YAAM,UAAa,gBAAa,SAAS,OAAO;AAChD,YAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAG3E,YAAM,WAAW,KAAK,IAAI;AAAA,QACtB;AAAA,MACJ,EAAE,IAAI,YAAY,SAAS,IAAI;AAE/B,UAAI,UAAU;AACV;AACA;AAAA,MACJ;AAGA,WAAK,IAAI;AAAA,QACL;AAAA,MACJ,EAAE,IAAI,YAAY,OAAO;AAGzB,YAAM,QAAQ,KAAK,cAAc,SAAS,OAAO;AACjD,YAAM,SAAS,KAAK,YAAY,OAAO;AAGvC,YAAM,cAAc,KAAK,IAAI,QAAQ;AAAA;AAAA;AAAA,aAGpC;AAED,YAAM,WAAqB,CAAC;AAE5B,WAAK,IAAI,YAAY,MAAM;AACvB,iBAAS,MAAM,GAAG,MAAM,OAAO,QAAQ,OAAO;AAC1C,gBAAM,QAAQ,OAAO,GAAG;AACxB,gBAAM,SAAS,YAAY;AAAA,YACvB;AAAA,YAAY;AAAA,YAAS;AAAA,YAAO,MAAM;AAAA,YAAM;AAAA,YAAK,MAAM;AAAA,YAAK;AAAA,UAC5D;AACA,mBAAS,KAAK,OAAO,OAAO,eAAe,CAAC;AAAA,QAChD;AAAA,MACJ,CAAC;AAGD,YAAM,QAAQ,OAAO,IAAI,OAAK,UAAU,KAAK,YAAY,EAAE,IAAI,EAAE;AACjE,YAAM,aAAa,MAAM,KAAK,WAAW,WAAW,KAAK;AAGzD,YAAM,YAAY,KAAK,IAAI;AAAA,QACvB;AAAA,MACJ;AAEA,WAAK,IAAI,YAAY,MAAM;AACvB,iBAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACtC,gBAAM,MAAM,OAAO,KAAK,WAAW,CAAC,EAAE,MAAM;AAC5C,oBAAU,IAAI,SAAS,CAAC,GAAG,GAAG;AAC9B,eAAK,MAAM,IAAI,WAAW,CAAC,GAAG,SAAS,CAAC,CAAC;AACzC,eAAK,UAAU,IAAI,SAAS,CAAC,GAAG,WAAW,CAAC,CAAC;AAAA,QACjD;AAAA,MACJ,CAAC;AAED;AACA,qBAAe,OAAO;AAAA,IAC1B;AAEA,WAAO,EAAE,SAAS,SAAS,QAAQ,YAAY;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,YAA0B;AACvC,SAAK,IAAI,QAAQ,6CAA6C,EAAE,IAAI,UAAU;AAC9E,SAAK,IAAI,QAAQ,wCAAwC,EAAE,IAAI,UAAU;AACzE,SAAK,IAAI,QAAQ,gDAAgD,EAAE,IAAI,UAAU;AAAA,EACrF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,YAAY,MAA+C;AAC/D,QAAI,KAAK,UAAU,cAAc;AAC7B,aAAO,CAAC,EAAE,MAAM,KAAK,EAAE,CAAC;AAAA,IAC5B;AAEA,UAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,UAAM,cAAc,KAAK,iBAAiB,KAAK;AAC/C,UAAM,SAA0C,CAAC;AAEjD,QAAI,aAAa;AACjB,QAAI,YAAY;AAEhB,WAAO,aAAa,KAAK,QAAQ;AAC7B,YAAM,YAAY,KAAK,SAAS;AAChC,UAAI,aAAa,eAAe,cAAc;AAE1C,cAAM,WAAW,KAAK,MAAM,UAAU,EAAE,KAAK;AAC7C,YAAI,SAAS,UAAU,iBAAiB;AACpC,iBAAO,KAAK,EAAE,MAAM,UAAU,KAAK,WAAW,CAAC;AAAA,QACnD,WAAW,OAAO,SAAS,GAAG;AAE1B,iBAAO,OAAO,SAAS,CAAC,EAAE,QAAQ,OAAO;AAAA,QAC7C,OAAO;AACH,iBAAO,KAAK,EAAE,MAAM,UAAU,KAAK,WAAW,CAAC;AAAA,QACnD;AACA;AAAA,MACJ;AAGA,YAAM,YAAY,aAAa;AAC/B,YAAM,cAAc,YAAY;AAEhC,UAAI,YAAY;AAChB,UAAI,YAAY;AAEhB,iBAAW,MAAM,aAAa;AAC1B,YAAI,GAAG,OAAO,WAAY;AAC1B,YAAI,GAAG,MAAM,YAAY,eAAe,EAAG;AAC3C,YAAI,GAAG,MAAM,YAAa;AAG1B,cAAM,WAAW,KAAK,IAAI,GAAG,MAAM,SAAS;AAC5C,cAAM,QAAQ,KAAK,WAAW,iBAAiB,IAAI;AACnD,cAAM,aAAa,GAAG,QAAQ;AAE9B,YAAI,aAAa,WAAW;AACxB,sBAAY;AACZ,sBAAY,GAAG;AAAA,QACnB;AAAA,MACJ;AAEA,YAAM,YAAY,KAAK,MAAM,YAAY,SAAS,EAAE,KAAK;AACzD,UAAI,UAAU,UAAU,iBAAiB;AACrC,eAAO,KAAK,EAAE,MAAM,WAAW,KAAK,WAAW,CAAC;AAAA,MACpD;AAEA,mBAAa;AAAA,IACjB;AAEA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,OAA+B;AACpD,UAAM,SAAuB,CAAC;AAC9B,QAAI,UAAU;AACd,QAAI,cAAc;AAElB,eAAW,QAAQ,OAAO;AAEtB,UAAI,KAAK,UAAU,EAAE,WAAW,KAAK,GAAG;AACpC,sBAAc,CAAC;AACf,YAAI,CAAC,aAAa;AAEd,iBAAO,KAAK,EAAE,KAAK,UAAU,KAAK,SAAS,GAAG,OAAO,GAAG,CAAC;AAAA,QAC7D;AACA,mBAAW,KAAK,SAAS;AACzB;AAAA,MACJ;AAGA,UAAI,aAAa;AACb,mBAAW,KAAK,SAAS;AACzB;AAAA,MACJ;AAGA,iBAAW,CAAC,SAAS,KAAK,KAAK,cAAc;AACzC,YAAI,QAAQ,KAAK,KAAK,KAAK,CAAC,GAAG;AAC3B,iBAAO,KAAK,EAAE,KAAK,SAAS,MAAM,CAAC;AACnC;AAAA,QACJ;AAAA,MACJ;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC7B;AAEA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,SAAiB,UAA0B;AAC7D,UAAM,QAAQ,QAAQ,MAAM,kBAAkB;AAC9C,QAAI,MAAO,QAAO,MAAM,CAAC,EAAE,KAAK;AAChC,WAAY,cAAS,UAAe,aAAQ,QAAQ,CAAC;AAAA,EACzD;AACJ;;;AClRA,IAAM,iBAAN,MAAgD;AAAA,EAQ5C,YAAoB,OAA0B,CAAC,GAAG;AAA9B;AAAA,EAA+B;AAAA,EA1BvD,OAkBgD;AAAA;AAAA;AAAA,EACnC,OAAO;AAAA,EAChB;AAAA,EACA;AAAA,EACA,WAAW,oBAAI,IAA0B;AAAA,EACjC;AAAA,EACA;AAAA,EAIR,MAAM,WAAW,KAAmC;AAChD,SAAK,MAAM,IAAI;AACf,SAAK,aAAa,IAAI;AACtB,SAAK,OAAO,MAAM,IAAI,WAAW;AACjC,QAAI,YAAY,eAAe,YAAY,KAAK,MAAM,KAAK,QAAQ;AACnE,SAAK,UAAU,IAAI,WAAW,IAAI,IAAI,IAAI,WAAW,KAAK,MAAM,KAAK,QAAQ;AAAA,EACjF;AAAA;AAAA,EAGA,cAAc,YAAsC;AAChD,SAAK,IAAI,QAAQ;AAAA;AAAA;AAAA,SAGhB,EAAE;AAAA,MACC,WAAW;AAAA,MACX,WAAW;AAAA,MACX,WAAW,WAAW;AAAA,MACtB,KAAK,UAAU,WAAW,UAAU,CAAC,CAAC;AAAA,MACtC,WAAW,WAAW;AAAA,IAC1B;AAAA,EACJ;AAAA;AAAA,EAGA,iBAAiB,MAAoB;AACjC,SAAK,QAAQ,iBAAiB,IAAI;AAAA,EACtC;AAAA;AAAA,EAGA,kBAAwC;AACpC,WAAQ,KAAK,IAAI,QAAQ,2BAA2B,EAAE,IAAI,EAAY,IAAI,UAAQ;AAAA,MAC9E,MAAM,IAAI;AAAA,MACV,MAAM,IAAI;AAAA,MACV,SAAS,IAAI;AAAA,MACb,QAAQ,KAAK,MAAM,IAAI,WAAW;AAAA,MAClC,SAAS,IAAI;AAAA,IACjB,EAAE;AAAA,EACN;AAAA;AAAA,EAGA,MAAM,iBAAiB,UAGnB,CAAC,GAAkF;AACnF,UAAM,iBAAiB,KAAK,gBAAgB;AAC5C,UAAM,UAAU,QAAQ,cAClB,eAAe,OAAO,OAAK,QAAQ,YAAa,SAAS,EAAE,IAAI,CAAC,IAChE;AAEN,UAAM,UAAgF,CAAC;AAEvF,eAAW,QAAQ,SAAS;AACxB,cAAQ,KAAK,IAAI,IAAI,MAAM,KAAK,QAAQ;AAAA,QACpC,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,UACI,QAAQ,KAAK;AAAA,UACb,YAAY,wBAAC,MAAM,KAAK,UAAU,QAAQ,aAAa,KAAK,MAAM,MAAM,KAAK,KAAK,GAAtE;AAAA,QAChB;AAAA,MACJ;AAAA,IACJ;AAEA,WAAO;AAAA,EACX;AAAA;AAAA,EAGA,MAAM,OAAO,OAAe,SAIA;AACxB,UAAM,IAAI,SAAS,KAAK;AACxB,UAAM,WAAW,MAAM,KAAK,WAAW,MAAM,KAAK;AAClD,UAAM,OAAO,KAAK,KAAK,OAAO,UAAU,CAAC;AAEzC,UAAM,UAA0B,CAAC;AACjC,eAAW,OAAO,MAAM;AACpB,UAAI,SAAS,YAAY,IAAI,QAAQ,QAAQ,SAAU;AAEvD,YAAM,QAAQ,KAAK,IAAI;AAAA,QACnB;AAAA,MACJ,EAAE,IAAI,IAAI,EAAE;AAEZ,UAAI,CAAC,MAAO;AACZ,UAAI,SAAS,cAAc,MAAM,eAAe,QAAQ,WAAY;AAEpE,YAAM,MAAM,KAAK,eAAe,MAAM,YAAY,MAAM,SAAS;AAEjE,cAAQ,KAAK;AAAA,QACT,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX,UAAU,MAAM;AAAA,QAChB,SAAS,MAAM;AAAA,QACf,SAAS;AAAA,QACT,UAAU;AAAA,UACN,YAAY,MAAM;AAAA,UAClB,OAAO,MAAM;AAAA,UACb,KAAK,MAAM;AAAA,QACf;AAAA,MACJ,CAAC;AAAA,IACL;AAEA,WAAO;AAAA,EACX;AAAA;AAAA,EAGA,WAAW,YAAoBA,OAAc,SAAuB;AAChE,SAAK,IAAI,QAAQ;AAAA;AAAA;AAAA,SAGhB,EAAE,IAAI,YAAYA,OAAM,OAAO;AAAA,EACpC;AAAA;AAAA,EAGA,cAAc,YAAoBA,OAAoB;AAClD,SAAK,IAAI;AAAA,MACL;AAAA,IACJ,EAAE,IAAI,YAAYA,KAAI;AAAA,EAC1B;AAAA;AAAA,EAGA,eAAwE;AACpE,WAAO,KAAK,IAAI,QAAQ,6BAA6B,EAAE,IAAI;AAAA,EAC/D;AAAA,EAEA,QAA6B;AACzB,WAAO;AAAA,MACH,aAAc,KAAK,IAAI,QAAQ,uCAAuC,EAAE,IAAI,EAAU;AAAA,MACtF,WAAY,KAAK,IAAI,QAAQ,uDAAuD,EAAE,IAAI,EAAU;AAAA,MACpG,QAAS,KAAK,IAAI,QAAQ,sCAAsC,EAAE,IAAI,EAAU;AAAA,MAChF,UAAU,KAAK,KAAK;AAAA,IACxB;AAAA,EACJ;AAAA;AAAA,EAGQ,eAAe,YAAoB,UAAsC;AAC7E,UAAM,QAAQ,SAAS,MAAM,GAAG;AAChC,aAAS,IAAI,MAAM,QAAQ,KAAK,GAAG,KAAK;AACpC,YAAM,YAAY,MAAM,IAAI,MAAM,MAAM,MAAM,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAClE,YAAM,MAAM,KAAK,IAAI;AAAA,QACjB;AAAA,MACJ,EAAE,IAAI,YAAY,SAAS;AAC3B,UAAI,IAAK,QAAO,IAAI;AAAA,IACxB;AAEA,UAAM,OAAO,KAAK,IAAI;AAAA,MAClB;AAAA,IACJ,EAAE,IAAI,UAAU;AAChB,WAAO,MAAM,WAAW;AAAA,EAC5B;AACJ;AAGO,SAAS,KAAK,MAA2C;AAC5D,SAAO,IAAI,eAAe,IAAI;AAClC;AAFgB;","names":["path"]}
@@ -0,0 +1,185 @@
1
+ import {
2
+ reciprocalRankFusion
3
+ } from "./chunk-4ZKBQ33J.js";
4
+ import {
5
+ __name
6
+ } from "./chunk-7QVYU63E.js";
7
+
8
+ // src/memory/note-store.ts
9
+ var NoteStore = class {
10
+ static {
11
+ __name(this, "NoteStore");
12
+ }
13
+ _db;
14
+ _embedding;
15
+ _hnsw;
16
+ _vecs;
17
+ constructor(db, embedding, hnsw, vecs) {
18
+ this._db = db;
19
+ this._embedding = embedding;
20
+ this._hnsw = hnsw;
21
+ this._vecs = vecs;
22
+ }
23
+ /**
24
+ * Store a note digest.
25
+ * Embeds title + summary for vector search, auto-indexed in FTS5.
26
+ */
27
+ async remember(digest) {
28
+ const { title, summary, decisions = [], filesChanged = [], patterns = [], openQuestions = [], tags = [] } = digest;
29
+ const result = this._db.prepare(`
30
+ INSERT INTO note_memories (title, summary, decisions_json, files_json, patterns_json, open_json, tags_json)
31
+ VALUES (?, ?, ?, ?, ?, ?, ?)
32
+ `).run(
33
+ title,
34
+ summary,
35
+ JSON.stringify(decisions),
36
+ JSON.stringify(filesChanged),
37
+ JSON.stringify(patterns),
38
+ JSON.stringify(openQuestions),
39
+ JSON.stringify(tags)
40
+ );
41
+ const id = Number(result.lastInsertRowid);
42
+ const text = `${title}
43
+ ${summary}
44
+ ${decisions.join(". ")}
45
+ ${patterns.join(". ")}`;
46
+ const vec = await this._embedding.embed(text);
47
+ this._db.prepare("INSERT INTO note_vectors (note_id, embedding) VALUES (?, ?)").run(
48
+ id,
49
+ Buffer.from(vec.buffer)
50
+ );
51
+ this._hnsw.add(vec, id);
52
+ this._vecs.set(id, vec);
53
+ return id;
54
+ }
55
+ /**
56
+ * Recall relevant notes.
57
+ * Supports vector, keyword, or hybrid (default) retrieval.
58
+ */
59
+ async recall(query, options = {}) {
60
+ const { k = 5, mode = "hybrid", minScore = 0.15, tier } = options;
61
+ let results;
62
+ if (mode === "keyword") {
63
+ results = this._searchBM25(query, k);
64
+ } else if (mode === "vector") {
65
+ results = await this._searchVector(query, k);
66
+ } else {
67
+ const [vectorHits, bm25Hits] = await Promise.all([
68
+ this._searchVector(query, k),
69
+ Promise.resolve(this._searchBM25(query, k))
70
+ ]);
71
+ const fusedResults = reciprocalRankFusion(
72
+ [
73
+ vectorHits.map((m) => ({ type: "pattern", score: m.score ?? 0, content: m.summary, metadata: { id: m.id } })),
74
+ bm25Hits.map((m) => ({ type: "pattern", score: m.score ?? 0, content: m.summary, metadata: { id: m.id } }))
75
+ ]
76
+ );
77
+ const allById = /* @__PURE__ */ new Map();
78
+ for (const m of [...vectorHits, ...bm25Hits]) allById.set(m.id, m);
79
+ results = fusedResults.map((r) => {
80
+ const mem = allById.get(r.metadata.id);
81
+ if (!mem) return null;
82
+ return { ...mem, score: r.score };
83
+ }).filter(Boolean);
84
+ }
85
+ return results.filter((m) => (m.score ?? 0) >= minScore).filter((m) => !tier || m.tier === tier).slice(0, k);
86
+ }
87
+ /**
88
+ * List recent notes.
89
+ */
90
+ list(limit = 20, tier) {
91
+ const sql = tier ? "SELECT * FROM note_memories WHERE tier = ? ORDER BY id DESC LIMIT ?" : "SELECT * FROM note_memories ORDER BY id DESC LIMIT ?";
92
+ const rows = tier ? this._db.prepare(sql).all(tier, limit) : this._db.prepare(sql).all(limit);
93
+ return rows.map((r) => this._rowToNote(r));
94
+ }
95
+ /**
96
+ * Get total count of notes.
97
+ */
98
+ count() {
99
+ const total = this._db.prepare("SELECT COUNT(*) as n FROM note_memories").get().n;
100
+ const short = this._db.prepare("SELECT COUNT(*) as n FROM note_memories WHERE tier = 'short'").get().n;
101
+ const long = this._db.prepare("SELECT COUNT(*) as n FROM note_memories WHERE tier = 'long'").get().n;
102
+ return { total, short, long };
103
+ }
104
+ /**
105
+ * Consolidate old short-term notes into long-term.
106
+ * Keeps the most recent `keepRecent` as short-term, compresses the rest.
107
+ */
108
+ consolidate(keepRecent = 20) {
109
+ const old = this._db.prepare(`
110
+ SELECT id FROM note_memories
111
+ WHERE tier = 'short'
112
+ ORDER BY created_at DESC
113
+ LIMIT -1 OFFSET ?
114
+ `).all(keepRecent);
115
+ if (old.length === 0) return { promoted: 0 };
116
+ const ids = old.map((r) => r.id);
117
+ const placeholders = ids.map(() => "?").join(",");
118
+ this._db.prepare(`
119
+ UPDATE note_memories
120
+ SET tier = 'long',
121
+ open_json = '[]',
122
+ files_json = '[]'
123
+ WHERE id IN (${placeholders})
124
+ `).run(...ids);
125
+ return { promoted: ids.length };
126
+ }
127
+ // ── Private helpers ────────────────────────────
128
+ async _searchVector(query, k) {
129
+ if (this._hnsw.size === 0) return [];
130
+ const queryVec = await this._embedding.embed(query);
131
+ const hits = this._hnsw.search(queryVec, k);
132
+ if (hits.length === 0) return [];
133
+ const ids = hits.map((h) => h.id);
134
+ const scoreMap = new Map(hits.map((h) => [h.id, h.score]));
135
+ const placeholders = ids.map(() => "?").join(",");
136
+ const rows = this._db.prepare(
137
+ `SELECT * FROM note_memories WHERE id IN (${placeholders})`
138
+ ).all(...ids);
139
+ return rows.map((r) => ({
140
+ ...this._rowToNote(r),
141
+ score: scoreMap.get(r.id) ?? 0
142
+ }));
143
+ }
144
+ _searchBM25(query, k) {
145
+ const clean = query.replace(/[{}[\]()^~*:]/g, " ").replace(/\bAND\b|\bOR\b|\bNOT\b|\bNEAR\b/gi, "").trim();
146
+ const words = clean.split(/\s+/).filter((w) => w.length > 1);
147
+ if (words.length === 0) return [];
148
+ const ftsQuery = words.map((w) => `"${w}"`).join(" ");
149
+ try {
150
+ const rows = this._db.prepare(`
151
+ SELECT m.*, bm25(fts_notes, 5.0, 3.0, 2.0, 2.0, 1.0) AS score
152
+ FROM fts_notes f
153
+ JOIN note_memories m ON m.id = f.rowid
154
+ WHERE fts_notes MATCH ?
155
+ ORDER BY score ASC
156
+ LIMIT ?
157
+ `).all(ftsQuery, k);
158
+ return rows.map((r) => ({
159
+ ...this._rowToNote(r),
160
+ score: 1 / (1 + Math.exp(-0.3 * (Math.abs(r.score) - 5)))
161
+ }));
162
+ } catch {
163
+ return [];
164
+ }
165
+ }
166
+ _rowToNote(r) {
167
+ return {
168
+ id: r.id,
169
+ title: r.title,
170
+ summary: r.summary,
171
+ decisions: JSON.parse(r.decisions_json || "[]"),
172
+ filesChanged: JSON.parse(r.files_json || "[]"),
173
+ patterns: JSON.parse(r.patterns_json || "[]"),
174
+ openQuestions: JSON.parse(r.open_json || "[]"),
175
+ tags: JSON.parse(r.tags_json || "[]"),
176
+ tier: r.tier,
177
+ createdAt: r.created_at
178
+ };
179
+ }
180
+ };
181
+
182
+ export {
183
+ NoteStore
184
+ };
185
+ //# sourceMappingURL=chunk-MJ3Y24H6.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/memory/note-store.ts"],"sourcesContent":["/**\n * BrainBank — Note Memory Store\n * \n * Stores structured note digests for long-term agent memory.\n * Each digest captures decisions, files changed, patterns, and open questions.\n * Supports vector + BM25 hybrid retrieval via HNSW + FTS5.\n * \n * Memory tiers:\n * - \"short\" (default): Full digest, last ~20 notes\n * - \"long\": Compressed to patterns + decisions only\n */\n\nimport type { Database } from '../storage/database.ts';\nimport type { EmbeddingProvider, SearchResult } from '../types.ts';\nimport type { HNSWIndex } from '../vector/hnsw.ts';\nimport { BM25Search } from '../query/bm25.ts';\nimport { reciprocalRankFusion } from '../query/rrf.ts';\n\nexport interface NoteDigest {\n title: string;\n summary: string;\n decisions?: string[];\n filesChanged?: string[];\n patterns?: string[];\n openQuestions?: string[];\n tags?: string[];\n}\n\nexport interface StoredNote extends NoteDigest {\n id: number;\n tier: 'short' | 'long';\n createdAt: number;\n score?: number;\n}\n\nexport interface RecallOptions {\n /** Max results. Default: 5 */\n k?: number;\n /** Search mode. Default: 'hybrid' */\n mode?: 'hybrid' | 'vector' | 'keyword';\n /** Minimum score threshold. Default: 0.15 */\n minScore?: number;\n /** Filter by tier. Default: all */\n tier?: 'short' | 'long';\n}\n\nexport class NoteStore {\n private _db: Database;\n private _embedding: EmbeddingProvider;\n private _hnsw: HNSWIndex;\n private _vecs: Map<number, Float32Array>;\n\n constructor(\n db: Database,\n embedding: EmbeddingProvider,\n hnsw: HNSWIndex,\n vecs: Map<number, Float32Array>,\n ) {\n this._db = db;\n this._embedding = embedding;\n this._hnsw = hnsw;\n this._vecs = vecs;\n }\n\n /**\n * Store a note digest.\n * Embeds title + summary for vector search, auto-indexed in FTS5.\n */\n async remember(digest: NoteDigest): Promise<number> {\n const { title, summary, decisions = [], filesChanged = [], patterns = [], openQuestions = [], tags = [] } = digest;\n\n // Store in SQLite\n const result = this._db.prepare(`\n INSERT INTO note_memories (title, summary, decisions_json, files_json, patterns_json, open_json, tags_json)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `).run(\n title,\n summary,\n JSON.stringify(decisions),\n JSON.stringify(filesChanged),\n JSON.stringify(patterns),\n JSON.stringify(openQuestions),\n JSON.stringify(tags),\n );\n\n const id = Number(result.lastInsertRowid);\n\n // Embed and index\n const text = `${title}\\n${summary}\\n${decisions.join('. ')}\\n${patterns.join('. ')}`;\n const vec = await this._embedding.embed(text);\n\n this._db.prepare('INSERT INTO note_vectors (note_id, embedding) VALUES (?, ?)').run(\n id, Buffer.from(vec.buffer),\n );\n\n this._hnsw.add(vec, id);\n this._vecs.set(id, vec);\n\n return id;\n }\n\n /**\n * Recall relevant notes.\n * Supports vector, keyword, or hybrid (default) retrieval.\n */\n async recall(query: string, options: RecallOptions = {}): Promise<StoredNote[]> {\n const { k = 5, mode = 'hybrid', minScore = 0.15, tier } = options;\n\n let results: StoredNote[];\n\n if (mode === 'keyword') {\n results = this._searchBM25(query, k);\n } else if (mode === 'vector') {\n results = await this._searchVector(query, k);\n } else {\n // Hybrid: vector + BM25 → RRF\n const [vectorHits, bm25Hits] = await Promise.all([\n this._searchVector(query, k),\n Promise.resolve(this._searchBM25(query, k)),\n ]);\n\n const fusedResults = reciprocalRankFusion(\n [\n vectorHits.map(m => ({ type: 'pattern' as const, score: m.score ?? 0, content: m.summary, metadata: { id: m.id } })),\n bm25Hits.map(m => ({ type: 'pattern' as const, score: m.score ?? 0, content: m.summary, metadata: { id: m.id } })),\n ],\n );\n\n // Map back to full StoredNote objects\n const allById = new Map<number, StoredNote>();\n for (const m of [...vectorHits, ...bm25Hits]) allById.set(m.id, m);\n\n results = fusedResults\n .map(r => {\n const mem = allById.get(r.metadata.id);\n if (!mem) return null;\n return { ...mem, score: r.score };\n })\n .filter(Boolean) as StoredNote[];\n }\n\n // Apply filters\n return results\n .filter(m => (m.score ?? 0) >= minScore)\n .filter(m => !tier || m.tier === tier)\n .slice(0, k);\n }\n\n /**\n * List recent notes.\n */\n list(limit: number = 20, tier?: 'short' | 'long'): StoredNote[] {\n const sql = tier\n ? 'SELECT * FROM note_memories WHERE tier = ? ORDER BY id DESC LIMIT ?'\n : 'SELECT * FROM note_memories ORDER BY id DESC LIMIT ?';\n\n const rows = tier\n ? this._db.prepare(sql).all(tier, limit) as any[]\n : this._db.prepare(sql).all(limit) as any[];\n\n return rows.map(r => this._rowToNote(r));\n }\n\n /**\n * Get total count of notes.\n */\n count(): { total: number; short: number; long: number } {\n const total = (this._db.prepare('SELECT COUNT(*) as n FROM note_memories').get() as any).n;\n const short = (this._db.prepare(\"SELECT COUNT(*) as n FROM note_memories WHERE tier = 'short'\").get() as any).n;\n const long = (this._db.prepare(\"SELECT COUNT(*) as n FROM note_memories WHERE tier = 'long'\").get() as any).n;\n return { total, short, long };\n }\n\n /**\n * Consolidate old short-term notes into long-term.\n * Keeps the most recent `keepRecent` as short-term, compresses the rest.\n */\n consolidate(keepRecent: number = 20): { promoted: number } {\n // Find short-term notes beyond the keep window\n const old = this._db.prepare(`\n SELECT id FROM note_memories \n WHERE tier = 'short' \n ORDER BY created_at DESC \n LIMIT -1 OFFSET ?\n `).all(keepRecent) as any[];\n\n if (old.length === 0) return { promoted: 0 };\n\n const ids = old.map((r: any) => r.id);\n const placeholders = ids.map(() => '?').join(',');\n\n // Promote to long-term: clear verbose fields, keep patterns + decisions\n this._db.prepare(`\n UPDATE note_memories \n SET tier = 'long',\n open_json = '[]',\n files_json = '[]'\n WHERE id IN (${placeholders})\n `).run(...ids);\n\n return { promoted: ids.length };\n }\n\n // ── Private helpers ────────────────────────────\n\n private async _searchVector(query: string, k: number): Promise<StoredNote[]> {\n if (this._hnsw.size === 0) return [];\n\n const queryVec = await this._embedding.embed(query);\n const hits = this._hnsw.search(queryVec, k);\n\n if (hits.length === 0) return [];\n\n const ids = hits.map(h => h.id);\n const scoreMap = new Map(hits.map(h => [h.id, h.score]));\n const placeholders = ids.map(() => '?').join(',');\n\n const rows = this._db.prepare(\n `SELECT * FROM note_memories WHERE id IN (${placeholders})`\n ).all(...ids) as any[];\n\n return rows.map(r => ({\n ...this._rowToNote(r),\n score: scoreMap.get(r.id) ?? 0,\n }));\n }\n\n private _searchBM25(query: string, k: number): StoredNote[] {\n // Sanitize for FTS5\n const clean = query\n .replace(/[{}[\\]()^~*:]/g, ' ')\n .replace(/\\bAND\\b|\\bOR\\b|\\bNOT\\b|\\bNEAR\\b/gi, '')\n .trim();\n\n const words = clean.split(/\\s+/).filter(w => w.length > 1);\n if (words.length === 0) return [];\n\n const ftsQuery = words.map(w => `\"${w}\"`).join(' ');\n\n try {\n const rows = this._db.prepare(`\n SELECT m.*, bm25(fts_notes, 5.0, 3.0, 2.0, 2.0, 1.0) AS score\n FROM fts_notes f\n JOIN note_memories m ON m.id = f.rowid\n WHERE fts_notes MATCH ?\n ORDER BY score ASC\n LIMIT ?\n `).all(ftsQuery, k) as any[];\n\n return rows.map(r => ({\n ...this._rowToNote(r),\n score: 1.0 / (1.0 + Math.exp(-0.3 * (Math.abs(r.score) - 5))),\n }));\n } catch {\n return [];\n }\n }\n\n private _rowToNote(r: any): StoredNote {\n return {\n id: r.id,\n title: r.title,\n summary: r.summary,\n decisions: JSON.parse(r.decisions_json || '[]'),\n filesChanged: JSON.parse(r.files_json || '[]'),\n patterns: JSON.parse(r.patterns_json || '[]'),\n openQuestions: JSON.parse(r.open_json || '[]'),\n tags: JSON.parse(r.tags_json || '[]'),\n tier: r.tier,\n createdAt: r.created_at,\n };\n }\n}\n"],"mappings":";;;;;;;;AA8CO,IAAM,YAAN,MAAgB;AAAA,EA9CvB,OA8CuB;AAAA;AAAA;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACI,IACA,WACA,MACA,MACF;AACE,SAAK,MAAM;AACX,SAAK,aAAa;AAClB,SAAK,QAAQ;AACb,SAAK,QAAQ;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,SAAS,QAAqC;AAChD,UAAM,EAAE,OAAO,SAAS,YAAY,CAAC,GAAG,eAAe,CAAC,GAAG,WAAW,CAAC,GAAG,gBAAgB,CAAC,GAAG,OAAO,CAAC,EAAE,IAAI;AAG5G,UAAM,SAAS,KAAK,IAAI,QAAQ;AAAA;AAAA;AAAA,SAG/B,EAAE;AAAA,MACC;AAAA,MACA;AAAA,MACA,KAAK,UAAU,SAAS;AAAA,MACxB,KAAK,UAAU,YAAY;AAAA,MAC3B,KAAK,UAAU,QAAQ;AAAA,MACvB,KAAK,UAAU,aAAa;AAAA,MAC5B,KAAK,UAAU,IAAI;AAAA,IACvB;AAEA,UAAM,KAAK,OAAO,OAAO,eAAe;AAGxC,UAAM,OAAO,GAAG,KAAK;AAAA,EAAK,OAAO;AAAA,EAAK,UAAU,KAAK,IAAI,CAAC;AAAA,EAAK,SAAS,KAAK,IAAI,CAAC;AAClF,UAAM,MAAM,MAAM,KAAK,WAAW,MAAM,IAAI;AAE5C,SAAK,IAAI,QAAQ,6DAA6D,EAAE;AAAA,MAC5E;AAAA,MAAI,OAAO,KAAK,IAAI,MAAM;AAAA,IAC9B;AAEA,SAAK,MAAM,IAAI,KAAK,EAAE;AACtB,SAAK,MAAM,IAAI,IAAI,GAAG;AAEtB,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,OAAe,UAAyB,CAAC,GAA0B;AAC5E,UAAM,EAAE,IAAI,GAAG,OAAO,UAAU,WAAW,MAAM,KAAK,IAAI;AAE1D,QAAI;AAEJ,QAAI,SAAS,WAAW;AACpB,gBAAU,KAAK,YAAY,OAAO,CAAC;AAAA,IACvC,WAAW,SAAS,UAAU;AAC1B,gBAAU,MAAM,KAAK,cAAc,OAAO,CAAC;AAAA,IAC/C,OAAO;AAEH,YAAM,CAAC,YAAY,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC7C,KAAK,cAAc,OAAO,CAAC;AAAA,QAC3B,QAAQ,QAAQ,KAAK,YAAY,OAAO,CAAC,CAAC;AAAA,MAC9C,CAAC;AAED,YAAM,eAAe;AAAA,QACjB;AAAA,UACI,WAAW,IAAI,QAAM,EAAE,MAAM,WAAoB,OAAO,EAAE,SAAS,GAAG,SAAS,EAAE,SAAS,UAAU,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE;AAAA,UACnH,SAAS,IAAI,QAAM,EAAE,MAAM,WAAoB,OAAO,EAAE,SAAS,GAAG,SAAS,EAAE,SAAS,UAAU,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE;AAAA,QACrH;AAAA,MACJ;AAGA,YAAM,UAAU,oBAAI,IAAwB;AAC5C,iBAAW,KAAK,CAAC,GAAG,YAAY,GAAG,QAAQ,EAAG,SAAQ,IAAI,EAAE,IAAI,CAAC;AAEjE,gBAAU,aACL,IAAI,OAAK;AACN,cAAM,MAAM,QAAQ,IAAI,EAAE,SAAS,EAAE;AACrC,YAAI,CAAC,IAAK,QAAO;AACjB,eAAO,EAAE,GAAG,KAAK,OAAO,EAAE,MAAM;AAAA,MACpC,CAAC,EACA,OAAO,OAAO;AAAA,IACvB;AAGA,WAAO,QACF,OAAO,QAAM,EAAE,SAAS,MAAM,QAAQ,EACtC,OAAO,OAAK,CAAC,QAAQ,EAAE,SAAS,IAAI,EACpC,MAAM,GAAG,CAAC;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,KAAK,QAAgB,IAAI,MAAuC;AAC5D,UAAM,MAAM,OACN,wEACA;AAEN,UAAM,OAAO,OACP,KAAK,IAAI,QAAQ,GAAG,EAAE,IAAI,MAAM,KAAK,IACrC,KAAK,IAAI,QAAQ,GAAG,EAAE,IAAI,KAAK;AAErC,WAAO,KAAK,IAAI,OAAK,KAAK,WAAW,CAAC,CAAC;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,QAAwD;AACpD,UAAM,QAAS,KAAK,IAAI,QAAQ,yCAAyC,EAAE,IAAI,EAAU;AACzF,UAAM,QAAS,KAAK,IAAI,QAAQ,8DAA8D,EAAE,IAAI,EAAU;AAC9G,UAAM,OAAQ,KAAK,IAAI,QAAQ,6DAA6D,EAAE,IAAI,EAAU;AAC5G,WAAO,EAAE,OAAO,OAAO,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,aAAqB,IAA0B;AAEvD,UAAM,MAAM,KAAK,IAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,SAK5B,EAAE,IAAI,UAAU;AAEjB,QAAI,IAAI,WAAW,EAAG,QAAO,EAAE,UAAU,EAAE;AAE3C,UAAM,MAAM,IAAI,IAAI,CAAC,MAAW,EAAE,EAAE;AACpC,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAGhD,SAAK,IAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKE,YAAY;AAAA,SAC9B,EAAE,IAAI,GAAG,GAAG;AAEb,WAAO,EAAE,UAAU,IAAI,OAAO;AAAA,EAClC;AAAA;AAAA,EAIA,MAAc,cAAc,OAAe,GAAkC;AACzE,QAAI,KAAK,MAAM,SAAS,EAAG,QAAO,CAAC;AAEnC,UAAM,WAAW,MAAM,KAAK,WAAW,MAAM,KAAK;AAClD,UAAM,OAAO,KAAK,MAAM,OAAO,UAAU,CAAC;AAE1C,QAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAE/B,UAAM,MAAM,KAAK,IAAI,OAAK,EAAE,EAAE;AAC9B,UAAM,WAAW,IAAI,IAAI,KAAK,IAAI,OAAK,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AACvD,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAEhD,UAAM,OAAO,KAAK,IAAI;AAAA,MAClB,4CAA4C,YAAY;AAAA,IAC5D,EAAE,IAAI,GAAG,GAAG;AAEZ,WAAO,KAAK,IAAI,QAAM;AAAA,MAClB,GAAG,KAAK,WAAW,CAAC;AAAA,MACpB,OAAO,SAAS,IAAI,EAAE,EAAE,KAAK;AAAA,IACjC,EAAE;AAAA,EACN;AAAA,EAEQ,YAAY,OAAe,GAAyB;AAExD,UAAM,QAAQ,MACT,QAAQ,kBAAkB,GAAG,EAC7B,QAAQ,qCAAqC,EAAE,EAC/C,KAAK;AAEV,UAAM,QAAQ,MAAM,MAAM,KAAK,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AACzD,QAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAEhC,UAAM,WAAW,MAAM,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,GAAG;AAElD,QAAI;AACA,YAAM,OAAO,KAAK,IAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAO7B,EAAE,IAAI,UAAU,CAAC;AAElB,aAAO,KAAK,IAAI,QAAM;AAAA,QAClB,GAAG,KAAK,WAAW,CAAC;AAAA,QACpB,OAAO,KAAO,IAAM,KAAK,IAAI,QAAQ,KAAK,IAAI,EAAE,KAAK,IAAI,EAAE;AAAA,MAC/D,EAAE;AAAA,IACN,QAAQ;AACJ,aAAO,CAAC;AAAA,IACZ;AAAA,EACJ;AAAA,EAEQ,WAAW,GAAoB;AACnC,WAAO;AAAA,MACH,IAAI,EAAE;AAAA,MACN,OAAO,EAAE;AAAA,MACT,SAAS,EAAE;AAAA,MACX,WAAW,KAAK,MAAM,EAAE,kBAAkB,IAAI;AAAA,MAC9C,cAAc,KAAK,MAAM,EAAE,cAAc,IAAI;AAAA,MAC7C,UAAU,KAAK,MAAM,EAAE,iBAAiB,IAAI;AAAA,MAC5C,eAAe,KAAK,MAAM,EAAE,aAAa,IAAI;AAAA,MAC7C,MAAM,KAAK,MAAM,EAAE,aAAa,IAAI;AAAA,MACpC,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,IACjB;AAAA,EACJ;AACJ;","names":[]}