brainbank 0.3.1 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +174 -15
- package/assets/architecture.png +0 -0
- package/dist/{base-4SUgeRWT.d.ts → base-DZWtdgIf.d.ts} +23 -27
- package/dist/chunk-6XOXM7MI.js +136 -0
- package/dist/chunk-6XOXM7MI.js.map +1 -0
- package/dist/{chunk-FINIFKAY.js → chunk-BNV43SEF.js} +5 -4
- package/dist/chunk-BNV43SEF.js.map +1 -0
- package/dist/{chunk-MGIFEPYZ.js → chunk-DDECTPRM.js} +22 -17
- package/dist/chunk-DDECTPRM.js.map +1 -0
- package/dist/{chunk-5VUYPNH3.js → chunk-HNPABX7L.js} +6 -3
- package/dist/chunk-HNPABX7L.js.map +1 -0
- package/dist/{chunk-2BEWWQL2.js → chunk-MY36UPPQ.js} +227 -112
- package/dist/chunk-MY36UPPQ.js.map +1 -0
- package/dist/chunk-N2OJRXSB.js +117 -0
- package/dist/chunk-N2OJRXSB.js.map +1 -0
- package/dist/{chunk-FI7GWG4W.js → chunk-TTXVJFAE.js} +5 -2
- package/dist/chunk-TTXVJFAE.js.map +1 -0
- package/dist/{chunk-QNHBCOKB.js → chunk-U2Q2XGPZ.js} +7 -2
- package/dist/{chunk-QNHBCOKB.js.map → chunk-U2Q2XGPZ.js.map} +1 -1
- package/dist/{chunk-E6WQM4DN.js → chunk-YOLKSYWK.js} +1 -1
- package/dist/chunk-YOLKSYWK.js.map +1 -0
- package/dist/{chunk-Y3JKI6QN.js → chunk-YRGUIRN5.js} +234 -57
- package/dist/chunk-YRGUIRN5.js.map +1 -0
- package/dist/cli.js +21 -10
- package/dist/cli.js.map +1 -1
- package/dist/code.d.ts +1 -1
- package/dist/code.js +2 -1
- package/dist/docs.d.ts +1 -1
- package/dist/docs.js +2 -1
- package/dist/git.d.ts +1 -1
- package/dist/git.js +2 -1
- package/dist/index.d.ts +100 -4
- package/dist/index.js +16 -8
- package/dist/index.js.map +1 -1
- package/dist/memory.d.ts +1 -1
- package/dist/memory.js +2 -2
- package/dist/notes.d.ts +1 -1
- package/dist/notes.js +3 -2
- package/dist/perplexity-context-embedding-KSVSZXMD.js +9 -0
- package/dist/perplexity-context-embedding-KSVSZXMD.js.map +1 -0
- package/dist/perplexity-embedding-227WQY4R.js +10 -0
- package/dist/perplexity-embedding-227WQY4R.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-2BEWWQL2.js.map +0 -1
- package/dist/chunk-5VUYPNH3.js.map +0 -1
- package/dist/chunk-E6WQM4DN.js.map +0 -1
- package/dist/chunk-FI7GWG4W.js.map +0 -1
- package/dist/chunk-FINIFKAY.js.map +0 -1
- package/dist/chunk-MGIFEPYZ.js.map +0 -1
- package/dist/chunk-Y3JKI6QN.js.map +0 -1
package/README.md
CHANGED
|
@@ -8,7 +8,7 @@ BrainBank gives LLMs a long-term memory that persists between sessions.
|
|
|
8
8
|
- **Pluggable indexers** — `.use()` only what you need (code, git, docs, or custom)
|
|
9
9
|
- **Dynamic collections** — `brain.collection('errors')` for any structured data
|
|
10
10
|
- **Hybrid search** — vector + BM25 fused with Reciprocal Rank Fusion
|
|
11
|
-
- **Pluggable embeddings** — local WASM (free) or
|
|
11
|
+
- **Pluggable embeddings** — local WASM (free), OpenAI, or Perplexity (standard & contextualized)
|
|
12
12
|
- **Multi-repo** — index multiple repositories into one shared database
|
|
13
13
|
- **Portable** — single `.brainbank/brainbank.db` file
|
|
14
14
|
- **Optional packages** — [`@brainbank/memory`](#memory) (fact extraction + entity graph), [`@brainbank/reranker`](#reranker) (Qwen3 cross-encoder), [`@brainbank/mcp`](#mcp-server) (MCP server)
|
|
@@ -73,6 +73,7 @@ Most AI memory solutions (mem0, Zep, LangMem) require cloud services, external d
|
|
|
73
73
|
- [Benchmarks](#benchmarks)
|
|
74
74
|
- [Search Quality: AST vs Sliding Window](#search-quality-ast-vs-sliding-window)
|
|
75
75
|
- [Grammar Support](#grammar-support)
|
|
76
|
+
- [RAG Retrieval Quality](#rag-retrieval-quality) · [Full Results →](./BENCHMARKS.md)
|
|
76
77
|
|
|
77
78
|
---
|
|
78
79
|
|
|
@@ -291,7 +292,7 @@ decisions.prune({ olderThan: '30d' }); // remove older than 30 days
|
|
|
291
292
|
brain.listCollectionNames(); // → ['decisions', ...]
|
|
292
293
|
```
|
|
293
294
|
|
|
294
|
-
> 📂 See [examples/
|
|
295
|
+
> 📂 See [examples/collection](examples/collection/) for a complete runnable demo with cross-collection linking and metadata.
|
|
295
296
|
|
|
296
297
|
### Watch Mode
|
|
297
298
|
|
|
@@ -595,8 +596,9 @@ brainbank kv search conversations "what did we decide about auth"
|
|
|
595
596
|
|
|
596
597
|
| Example | Description | Run |
|
|
597
598
|
|---------|-------------|-----|
|
|
598
|
-
| [
|
|
599
|
-
| [
|
|
599
|
+
| [rag](examples/rag/) | RAG chatbot — docs retrieval + generation | `OPENAI_API_KEY=sk-... PERPLEXITY_API_KEY=pplx-... npx tsx examples/rag/rag.ts --docs <path>` |
|
|
600
|
+
| [memory](examples/memory/) | Memory chatbot — fact extraction + entity graph | `OPENAI_API_KEY=sk-... npx tsx examples/memory/memory.ts` |
|
|
601
|
+
| [collection](examples/collection/) | Collections, semantic search, tags, metadata linking | `npx tsx examples/collection/collection.ts` |
|
|
600
602
|
|
|
601
603
|
---
|
|
602
604
|
|
|
@@ -628,7 +630,7 @@ Add to your MCP config (`~/.gemini/antigravity/mcp_config.json` or Claude Deskto
|
|
|
628
630
|
|
|
629
631
|
The agent passes the `repo` parameter on each tool call based on the active workspace — no hardcoded paths needed.
|
|
630
632
|
|
|
631
|
-
> Set `BRAINBANK_EMBEDDING` to `openai` for higher quality search
|
|
633
|
+
> Set `BRAINBANK_EMBEDDING` to `openai`, `perplexity`, or `perplexity-context` for higher quality search. Omit to use the free local WASM embeddings.
|
|
632
634
|
|
|
633
635
|
> Optionally set `BRAINBANK_REPO` as a default fallback repo. If omitted, every tool call must include the `repo` parameter (recommended for multi-workspace setups).
|
|
634
636
|
|
|
@@ -642,7 +644,7 @@ The agent passes the `repo` parameter on each tool call based on the active work
|
|
|
642
644
|
> 2. MCP server starts without `BRAINBANK_EMBEDDING` env var → defaults to local (384 dims)
|
|
643
645
|
> 3. **Result:** BrainBank throws `Embedding dimension mismatch` on every search
|
|
644
646
|
>
|
|
645
|
-
> **Fix:** Always set `BRAINBANK_EMBEDDING` consistently in your MCP config, CLI, and API usage. If you indexed with OpenAI, your MCP config **must** include `"BRAINBANK_EMBEDDING": "openai"`. If you switch providers, run `brainbank reembed` to regenerate all vectors.
|
|
647
|
+
> **Fix:** Always set `BRAINBANK_EMBEDDING` consistently in your MCP config, CLI, and API usage. If you indexed with OpenAI, your MCP config **must** include `"BRAINBANK_EMBEDDING": "openai"`. Same for `perplexity` or `perplexity-context`. If you switch providers, run `brainbank reembed` to regenerate all vectors.
|
|
646
648
|
|
|
647
649
|
### Available Tools
|
|
648
650
|
|
|
@@ -686,22 +688,67 @@ const brain = new BrainBank({
|
|
|
686
688
|
|----------|--------|------|-------|------|
|
|
687
689
|
| **Local (default)** | built-in | 384 | ⚡ 0ms | Free |
|
|
688
690
|
| **OpenAI** | `OpenAIEmbedding` | 1536 | ~100ms | $0.02/1M tokens |
|
|
691
|
+
| **Perplexity** | `PerplexityEmbedding` | 2560 (4b) / 1024 (0.6b) | ~100ms | $0.02/1M tokens |
|
|
692
|
+
| **Perplexity Context** | `PerplexityContextEmbedding` | 2560 (4b) / 1024 (0.6b) | ~100ms | $0.06/1M tokens |
|
|
693
|
+
|
|
694
|
+
#### OpenAI
|
|
689
695
|
|
|
690
696
|
```typescript
|
|
691
697
|
import { OpenAIEmbedding } from 'brainbank';
|
|
692
698
|
|
|
693
|
-
//
|
|
694
|
-
new OpenAIEmbedding();
|
|
695
|
-
|
|
696
|
-
// Custom options
|
|
699
|
+
new OpenAIEmbedding(); // uses OPENAI_API_KEY env var
|
|
697
700
|
new OpenAIEmbedding({
|
|
698
701
|
model: 'text-embedding-3-large',
|
|
699
|
-
dims: 512,
|
|
702
|
+
dims: 512, // Matryoshka reduction
|
|
700
703
|
apiKey: 'sk-...',
|
|
701
|
-
baseUrl: 'https://my-proxy.com/v1/embeddings',
|
|
704
|
+
baseUrl: 'https://my-proxy.com/v1/embeddings',
|
|
705
|
+
});
|
|
706
|
+
```
|
|
707
|
+
|
|
708
|
+
#### Perplexity (Standard)
|
|
709
|
+
|
|
710
|
+
Best for independent texts, queries, and code chunks.
|
|
711
|
+
|
|
712
|
+
```typescript
|
|
713
|
+
import { PerplexityEmbedding } from 'brainbank';
|
|
714
|
+
|
|
715
|
+
new PerplexityEmbedding(); // uses PERPLEXITY_API_KEY env var
|
|
716
|
+
new PerplexityEmbedding({
|
|
717
|
+
model: 'pplx-embed-v1-0.6b', // smaller, faster (1024d)
|
|
718
|
+
dims: 512, // Matryoshka reduction
|
|
719
|
+
});
|
|
720
|
+
```
|
|
721
|
+
|
|
722
|
+
#### Perplexity (Contextualized)
|
|
723
|
+
|
|
724
|
+
Chunks share document context → better retrieval for related code/docs.
|
|
725
|
+
|
|
726
|
+
```typescript
|
|
727
|
+
import { PerplexityContextEmbedding } from 'brainbank';
|
|
728
|
+
|
|
729
|
+
new PerplexityContextEmbedding(); // uses PERPLEXITY_API_KEY env var
|
|
730
|
+
new PerplexityContextEmbedding({
|
|
731
|
+
model: 'pplx-embed-context-v1-0.6b', // smaller, faster (1024d)
|
|
732
|
+
dims: 512, // Matryoshka reduction
|
|
702
733
|
});
|
|
703
734
|
```
|
|
704
735
|
|
|
736
|
+
#### Benchmarks
|
|
737
|
+
|
|
738
|
+
Real benchmarks on a production NestJS backend (1052 code chunks + git history):
|
|
739
|
+
|
|
740
|
+
| Provider | Dims | Index Time | Avg Search | Cost |
|
|
741
|
+
|----------|------|------------|------------|------|
|
|
742
|
+
| **Local WASM** | 384 | 87s | **8ms** | Free |
|
|
743
|
+
| **OpenAI** | 1536 | 106s | 202ms | $0.02/1M tok |
|
|
744
|
+
| **Perplexity** | 2560 | **66s** ⚡ | 168ms | $0.02/1M tok |
|
|
745
|
+
| **Perplexity Context** | 2560 | 78s | 135ms | $0.06/1M tok |
|
|
746
|
+
|
|
747
|
+
- **Fastest indexing:** Perplexity standard — 38% faster than OpenAI
|
|
748
|
+
- **Fastest search (API):** Perplexity Context — 33% faster than OpenAI
|
|
749
|
+
- **Fastest search (total):** Local WASM — no network latency
|
|
750
|
+
- **Best context awareness:** Perplexity Context — finds semantically related chunks others miss
|
|
751
|
+
|
|
705
752
|
> [!WARNING]
|
|
706
753
|
> Switching embedding provider (e.g. local → OpenAI) changes the vector dimensions. BrainBank will **refuse to initialize** if the stored dimensions don't match the current provider. Use `initialize({ force: true })` and then `reembed()` to migrate, or switch back to the original provider.
|
|
707
754
|
|
|
@@ -778,6 +825,91 @@ const myReranker: Reranker = {
|
|
|
778
825
|
|
|
779
826
|
Without a reranker, BrainBank uses pure RRF fusion — which is already production-quality for most use cases.
|
|
780
827
|
|
|
828
|
+
### Notes
|
|
829
|
+
|
|
830
|
+
The notes plugin gives your agent **persistent conversation memory** — store structured digests of past sessions and recall them via hybrid search.
|
|
831
|
+
|
|
832
|
+
```typescript
|
|
833
|
+
import { BrainBank } from 'brainbank';
|
|
834
|
+
import { notes } from 'brainbank/notes';
|
|
835
|
+
|
|
836
|
+
const brain = new BrainBank({ repoPath: '.' });
|
|
837
|
+
brain.use(notes());
|
|
838
|
+
await brain.initialize();
|
|
839
|
+
|
|
840
|
+
const notesPlugin = brain.indexer('notes');
|
|
841
|
+
|
|
842
|
+
// Store a conversation digest
|
|
843
|
+
await notesPlugin.remember({
|
|
844
|
+
title: 'Refactored auth module',
|
|
845
|
+
summary: 'Extracted JWT validation into middleware, added refresh token rotation',
|
|
846
|
+
decisions: ['Use RS256 over HS256', 'Refresh tokens stored in httpOnly cookie'],
|
|
847
|
+
filesChanged: ['src/auth/jwt.ts', 'src/middleware/auth.ts'],
|
|
848
|
+
patterns: ['Always validate token expiry before DB lookup'],
|
|
849
|
+
openQuestions: ['Should we add rate limiting to the refresh endpoint?'],
|
|
850
|
+
tags: ['auth', 'security'],
|
|
851
|
+
});
|
|
852
|
+
|
|
853
|
+
// Recall relevant notes
|
|
854
|
+
const relevant = await notesPlugin.recall('JWT token validation', { k: 3 });
|
|
855
|
+
|
|
856
|
+
// List recent notes
|
|
857
|
+
const recent = notesPlugin.list(10);
|
|
858
|
+
const longTermOnly = notesPlugin.list(10, 'long');
|
|
859
|
+
|
|
860
|
+
// Consolidate: promote old short-term notes to long-term (keeps last 20 as short)
|
|
861
|
+
const { promoted } = notesPlugin.consolidate(20);
|
|
862
|
+
```
|
|
863
|
+
|
|
864
|
+
**Memory tiers:**
|
|
865
|
+
- **`short`** (default) — Full digest with all fields, kept for recent sessions
|
|
866
|
+
- **`long`** — Compressed: only title, summary, decisions, and patterns preserved. Files and open questions dropped
|
|
867
|
+
|
|
868
|
+
Consolidation automatically promotes notes beyond the keep window from `short` → `long`, reducing storage while preserving key learnings.
|
|
869
|
+
|
|
870
|
+
### Agent Memory (Patterns)
|
|
871
|
+
|
|
872
|
+
The memory plugin enables **learning from experience** — your agent records what worked (and what didn't) across tasks, then distills patterns into reusable strategies.
|
|
873
|
+
|
|
874
|
+
```typescript
|
|
875
|
+
import { BrainBank } from 'brainbank';
|
|
876
|
+
import { memory } from 'brainbank/memory';
|
|
877
|
+
|
|
878
|
+
const brain = new BrainBank({ repoPath: '.' });
|
|
879
|
+
brain.use(memory());
|
|
880
|
+
await brain.initialize();
|
|
881
|
+
|
|
882
|
+
const mem = brain.indexer('memory');
|
|
883
|
+
|
|
884
|
+
// Record a learning pattern
|
|
885
|
+
await mem.learn({
|
|
886
|
+
taskType: 'refactor',
|
|
887
|
+
task: 'Extract auth logic into middleware',
|
|
888
|
+
approach: 'Created Express middleware, moved JWT validation from routes',
|
|
889
|
+
outcome: 'Reduced route handler size by 60%, improved testability',
|
|
890
|
+
successRate: 0.95,
|
|
891
|
+
critique: 'Should have added integration tests before refactoring',
|
|
892
|
+
});
|
|
893
|
+
|
|
894
|
+
// Search for similar patterns before starting a new task
|
|
895
|
+
const patterns = await mem.search('refactor database queries');
|
|
896
|
+
|
|
897
|
+
// Consolidate: prune old failures + merge duplicates
|
|
898
|
+
const { pruned, deduped } = mem.consolidate();
|
|
899
|
+
|
|
900
|
+
// Distill top patterns into a strategy
|
|
901
|
+
const strategy = mem.distill('refactor');
|
|
902
|
+
// → "Strategy for 'refactor' (5 patterns, avg success 88%):
|
|
903
|
+
// • Created middleware, moved validation from routes (95%)
|
|
904
|
+
// └ Should have added integration tests before refactoring"
|
|
905
|
+
```
|
|
906
|
+
|
|
907
|
+
**How it works:**
|
|
908
|
+
1. **Learn** — Records task, approach, outcome, and success rate. Embeds for semantic search
|
|
909
|
+
2. **Search** — Finds similar successful patterns (filters by `successRate ≥ 0.5`)
|
|
910
|
+
3. **Consolidate** — Auto-runs every 50 patterns: prunes failures older than 90 days, deduplicates (cosine > 0.95)
|
|
911
|
+
4. **Distill** — Aggregates top patterns per task type into a single strategy text with confidence score
|
|
912
|
+
|
|
781
913
|
---
|
|
782
914
|
|
|
783
915
|
## Memory
|
|
@@ -831,7 +963,7 @@ The `LLMProvider` interface works with any framework:
|
|
|
831
963
|
| Vercel AI SDK | `generateText()` → string |
|
|
832
964
|
| Any LLM | Implement `{ generate(messages) → string }` |
|
|
833
965
|
|
|
834
|
-
> 📂 See [examples/
|
|
966
|
+
> 📂 See [examples/memory](examples/memory/) for a runnable demo. All three LLM backends supported via `--llm` flag.
|
|
835
967
|
|
|
836
968
|
> 📦 Full docs: [packages/memory/README.md](packages/memory/README.md)
|
|
837
969
|
|
|
@@ -842,9 +974,10 @@ The `LLMProvider` interface works with any framework:
|
|
|
842
974
|
| Variable | Description |
|
|
843
975
|
|----------|-------------|
|
|
844
976
|
| `BRAINBANK_REPO` | Default repository path (optional — auto-detected from `.git/` or passed per tool call) |
|
|
845
|
-
| `BRAINBANK_EMBEDDING` | Embedding provider: `local` (default), `openai` |
|
|
977
|
+
| `BRAINBANK_EMBEDDING` | Embedding provider: `local` (default), `openai`, `perplexity`, `perplexity-context` |
|
|
846
978
|
| `BRAINBANK_DEBUG` | Show full stack traces |
|
|
847
979
|
| `OPENAI_API_KEY` | Required when using `BRAINBANK_EMBEDDING=openai` |
|
|
980
|
+
| `PERPLEXITY_API_KEY` | Required when using `BRAINBANK_EMBEDDING=perplexity` or `perplexity-context` |
|
|
848
981
|
|
|
849
982
|
---
|
|
850
983
|
|
|
@@ -1102,6 +1235,29 @@ All 9 core grammars verified, each parsing in **<0.05ms**:
|
|
|
1102
1235
|
|
|
1103
1236
|
> Additional grammars available: C++, Swift, C#, Kotlin, Scala, Lua, Elixir, Bash, HTML, CSS
|
|
1104
1237
|
|
|
1238
|
+
### RAG Retrieval Quality
|
|
1239
|
+
|
|
1240
|
+
BrainBank's hybrid search pipeline (Vector + BM25 → RRF) with Perplexity Context embeddings (2560d):
|
|
1241
|
+
|
|
1242
|
+
| Benchmark | Metric | Score |
|
|
1243
|
+
|---|---|:---:|
|
|
1244
|
+
| **BEIR SciFact** (5,183 docs, 300 queries) | NDCG@10 | **0.761** |
|
|
1245
|
+
| **Custom semantic** (127 docs, 20 queries) | R@5 | **83%** |
|
|
1246
|
+
|
|
1247
|
+
The hybrid pipeline improved R@5 by **+26pp over vector-only** retrieval on our custom eval.
|
|
1248
|
+
|
|
1249
|
+
See **[BENCHMARKS.md](./BENCHMARKS.md)** for full pipeline progression, per-technique impact, and reproduction instructions.
|
|
1250
|
+
|
|
1251
|
+
#### Running the RAG Eval
|
|
1252
|
+
|
|
1253
|
+
```bash
|
|
1254
|
+
# Custom eval on your own docs
|
|
1255
|
+
PERPLEXITY_API_KEY=pplx-... npx tsx test/benchmarks/rag/eval.ts --docs ~/path/to/docs
|
|
1256
|
+
|
|
1257
|
+
# BEIR standard benchmark
|
|
1258
|
+
PERPLEXITY_API_KEY=pplx-... npx tsx test/benchmarks/rag/beir-eval.ts --dataset scifact
|
|
1259
|
+
```
|
|
1260
|
+
|
|
1105
1261
|
### Running Benchmarks
|
|
1106
1262
|
|
|
1107
1263
|
```bash
|
|
@@ -1110,6 +1266,9 @@ node test/benchmarks/grammar-support.mjs
|
|
|
1110
1266
|
|
|
1111
1267
|
# Search quality A/B (uses BrainBank's own source files)
|
|
1112
1268
|
node test/benchmarks/search-quality.mjs
|
|
1269
|
+
|
|
1270
|
+
# RAG retrieval quality (requires Perplexity API key + docs folder)
|
|
1271
|
+
PERPLEXITY_API_KEY=pplx-... npx tsx test/benchmarks/rag/eval.ts --docs ~/path/to/docs
|
|
1113
1272
|
```
|
|
1114
1273
|
|
|
1115
1274
|
---
|
|
@@ -1143,7 +1302,7 @@ node test/benchmarks/search-quality.mjs
|
|
|
1143
1302
|
│ └──────────────────────────────────────────────────┘│
|
|
1144
1303
|
│ │
|
|
1145
1304
|
│ ┌──────────────────────────────────────────────────┐│
|
|
1146
|
-
│ │ Embedding (Local
|
|
1305
|
+
│ │ Embedding (Local 384d│OpenAI 1536d│Perplexity) ││
|
|
1147
1306
|
│ └──────────────────────────────────────────────────┘│
|
|
1148
1307
|
│ ┌──────────────────────────────────────────────────┐│
|
|
1149
1308
|
│ │ Qwen3-Reranker (opt-in cross-encoder) ││
|
package/assets/architecture.png
CHANGED
|
Binary file
|
|
@@ -203,6 +203,8 @@ interface DocumentResultMetadata {
|
|
|
203
203
|
seq?: number;
|
|
204
204
|
path?: string;
|
|
205
205
|
searchType?: string;
|
|
206
|
+
/** Internal chunk ID used by hybrid search to map fused results. */
|
|
207
|
+
chunkId?: number;
|
|
206
208
|
}
|
|
207
209
|
interface CodeResult {
|
|
208
210
|
type: 'code';
|
|
@@ -362,6 +364,9 @@ interface CoEditSuggestion {
|
|
|
362
364
|
* Wraps hnswlib-node for O(log n) approximate nearest neighbor search.
|
|
363
365
|
* M=16 connections, ef=200 construction, ef=50 search by default.
|
|
364
366
|
* 150x faster than brute force at 1M vectors.
|
|
367
|
+
*
|
|
368
|
+
* Supports disk persistence: save(path) / tryLoad(path, count)
|
|
369
|
+
* to skip costly vector-by-vector rebuild on startup.
|
|
365
370
|
*/
|
|
366
371
|
|
|
367
372
|
declare class HNSWIndex implements VectorIndex {
|
|
@@ -407,6 +412,18 @@ declare class HNSWIndex implements VectorIndex {
|
|
|
407
412
|
search(query: Float32Array, k: number): SearchHit[];
|
|
408
413
|
/** Number of vectors in the index. */
|
|
409
414
|
get size(): number;
|
|
415
|
+
/**
|
|
416
|
+
* Save the HNSW graph to disk.
|
|
417
|
+
* The file can be loaded later with tryLoad() to skip vector-by-vector insertion.
|
|
418
|
+
*/
|
|
419
|
+
save(path: string): void;
|
|
420
|
+
/**
|
|
421
|
+
* Try to load a previously saved HNSW index from disk.
|
|
422
|
+
* Returns true if loaded successfully, false if stale or missing.
|
|
423
|
+
* @param path File path to the saved index
|
|
424
|
+
* @param expectedCount Expected number of vectors (from SQLite) — used to detect staleness
|
|
425
|
+
*/
|
|
426
|
+
tryLoad(path: string, expectedCount: number): boolean;
|
|
410
427
|
}
|
|
411
428
|
|
|
412
429
|
/**
|
|
@@ -543,40 +560,18 @@ interface Indexer {
|
|
|
543
560
|
readonly name: string;
|
|
544
561
|
/** Initialize the indexer (create HNSW, load vectors, etc.). */
|
|
545
562
|
initialize(ctx: IndexerContext): Promise<void>;
|
|
546
|
-
/** Index content (code, git plugins). */
|
|
547
|
-
index?(options?: any): Promise<any>;
|
|
548
|
-
/** Search indexed content (docs plugin). */
|
|
549
|
-
search?(query: string, options?: any): Promise<any[]>;
|
|
550
|
-
/** Register a document collection (docs plugin). */
|
|
551
|
-
addCollection?(collection: any): void;
|
|
552
|
-
/** Remove a collection (docs plugin). */
|
|
553
|
-
removeCollection?(name: string): void;
|
|
554
|
-
/** List registered collections (docs plugin). */
|
|
555
|
-
listCollections?(): any[];
|
|
556
|
-
/** Index collections (docs plugin). */
|
|
557
|
-
indexCollections?(options?: any): Promise<any>;
|
|
558
|
-
/** Add context for a collection path (docs plugin). */
|
|
559
|
-
addContext?(collection: string, path: string, context: string): void;
|
|
560
|
-
/** Remove context (docs plugin). */
|
|
561
|
-
removeContext?(collection: string, path: string): void;
|
|
562
|
-
/** List context entries (docs plugin). */
|
|
563
|
-
listContexts?(): any[];
|
|
564
|
-
/** Watch mode: handle file change (returns true if handled). */
|
|
565
|
-
onFileChange?(filePath: string, event: 'create' | 'update' | 'delete'): Promise<boolean>;
|
|
566
|
-
/** Glob patterns for watch mode. */
|
|
567
|
-
watchPatterns?(): string[];
|
|
568
563
|
/** Return stats for this indexer. */
|
|
569
564
|
stats?(): Record<string, any>;
|
|
570
565
|
/** Clean up resources. */
|
|
571
566
|
close?(): void;
|
|
572
567
|
}
|
|
573
|
-
/** Indexers that can scan and index content. */
|
|
568
|
+
/** Indexers that can scan and index content (code, git). */
|
|
574
569
|
interface IndexablePlugin extends Indexer {
|
|
575
570
|
index(options?: any): Promise<any>;
|
|
576
571
|
}
|
|
577
|
-
/** Indexers that can search indexed content. */
|
|
572
|
+
/** Indexers that can search indexed content (docs). */
|
|
578
573
|
interface SearchablePlugin extends Indexer {
|
|
579
|
-
search(query: string, options?: any): Promise<
|
|
574
|
+
search(query: string, options?: any): Promise<SearchResult[]>;
|
|
580
575
|
}
|
|
581
576
|
/** Indexers that support file watch mode. */
|
|
582
577
|
interface WatchablePlugin extends Indexer {
|
|
@@ -585,10 +580,11 @@ interface WatchablePlugin extends Indexer {
|
|
|
585
580
|
}
|
|
586
581
|
/** Indexers that manage document collections. */
|
|
587
582
|
interface CollectionPlugin extends Indexer {
|
|
588
|
-
addCollection(collection:
|
|
583
|
+
addCollection(collection: DocumentCollection): void;
|
|
589
584
|
removeCollection(name: string): void;
|
|
590
|
-
listCollections():
|
|
585
|
+
listCollections(): DocumentCollection[];
|
|
591
586
|
indexCollections(options?: any): Promise<any>;
|
|
587
|
+
search(query: string, options?: any): Promise<SearchResult[]>;
|
|
592
588
|
addContext?(collection: string, path: string, context: string): void;
|
|
593
589
|
removeContext?(collection: string, path: string): void;
|
|
594
590
|
listContexts?(): any[];
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import {
|
|
2
|
+
decodeBase64Int8
|
|
3
|
+
} from "./chunk-N2OJRXSB.js";
|
|
4
|
+
import {
|
|
5
|
+
__name
|
|
6
|
+
} from "./chunk-7QVYU63E.js";
|
|
7
|
+
|
|
8
|
+
// src/providers/embeddings/perplexity-context-embedding.ts
|
|
9
|
+
var DEFAULT_MODEL = "pplx-embed-context-v1-4b";
|
|
10
|
+
var DEFAULT_DIMS = {
|
|
11
|
+
"pplx-embed-context-v1-0.6b": 1024,
|
|
12
|
+
"pplx-embed-context-v1-4b": 2560
|
|
13
|
+
};
|
|
14
|
+
var API_URL = "https://api.perplexity.ai/v1/contextualizedembeddings";
|
|
15
|
+
var REQUEST_TIMEOUT_MS = 3e4;
|
|
16
|
+
var BATCH_DELAY_MS = 100;
|
|
17
|
+
var PerplexityContextEmbedding = class {
|
|
18
|
+
static {
|
|
19
|
+
__name(this, "PerplexityContextEmbedding");
|
|
20
|
+
}
|
|
21
|
+
dims;
|
|
22
|
+
_apiKey;
|
|
23
|
+
_model;
|
|
24
|
+
_baseUrl;
|
|
25
|
+
_requestDims;
|
|
26
|
+
_timeout;
|
|
27
|
+
constructor(options = {}) {
|
|
28
|
+
this._apiKey = options.apiKey ?? process.env.PERPLEXITY_API_KEY ?? "";
|
|
29
|
+
this._model = options.model ?? DEFAULT_MODEL;
|
|
30
|
+
this._baseUrl = options.baseUrl ?? API_URL;
|
|
31
|
+
this._timeout = options.timeout ?? REQUEST_TIMEOUT_MS;
|
|
32
|
+
if (options.dims) {
|
|
33
|
+
this._requestDims = options.dims;
|
|
34
|
+
this.dims = options.dims;
|
|
35
|
+
} else {
|
|
36
|
+
this.dims = DEFAULT_DIMS[this._model] ?? 2560;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/** Embed a single text. Wraps as [[text]] for the contextualized API. */
|
|
40
|
+
async embed(text) {
|
|
41
|
+
const results = await this._request([[text]]);
|
|
42
|
+
return results[0];
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Embed multiple texts as chunks of contextualized documents.
|
|
46
|
+
* Splits into sub-documents to stay under Perplexity's 32k token/doc limit.
|
|
47
|
+
*/
|
|
48
|
+
async embedBatch(texts) {
|
|
49
|
+
if (texts.length === 0) return [];
|
|
50
|
+
const docs = splitIntoDocuments(texts);
|
|
51
|
+
const results = [];
|
|
52
|
+
for (let i = 0; i < docs.length; i++) {
|
|
53
|
+
if (i > 0) await sleep(BATCH_DELAY_MS);
|
|
54
|
+
const embeddings = await this._request([docs[i]]);
|
|
55
|
+
results.push(...embeddings);
|
|
56
|
+
}
|
|
57
|
+
return results;
|
|
58
|
+
}
|
|
59
|
+
async close() {
|
|
60
|
+
}
|
|
61
|
+
/** Send a contextualized request. Input is string[][] (docs × chunks). */
|
|
62
|
+
async _request(input) {
|
|
63
|
+
if (!this._apiKey) {
|
|
64
|
+
throw new Error(
|
|
65
|
+
"BrainBank: Perplexity API key required. Set PERPLEXITY_API_KEY env var or pass apiKey option."
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
const MAX_CHARS = 24e3;
|
|
69
|
+
const safeInput = input.map(
|
|
70
|
+
(doc) => doc.map((chunk) => chunk.length > MAX_CHARS ? chunk.slice(0, MAX_CHARS) : chunk)
|
|
71
|
+
);
|
|
72
|
+
const body = { model: this._model, input: safeInput };
|
|
73
|
+
if (this._requestDims) body.dimensions = this._requestDims;
|
|
74
|
+
const controller = new AbortController();
|
|
75
|
+
const timer = setTimeout(() => controller.abort(), this._timeout);
|
|
76
|
+
let res;
|
|
77
|
+
try {
|
|
78
|
+
res = await fetch(this._baseUrl, {
|
|
79
|
+
method: "POST",
|
|
80
|
+
headers: {
|
|
81
|
+
"Content-Type": "application/json",
|
|
82
|
+
"Authorization": `Bearer ${this._apiKey}`
|
|
83
|
+
},
|
|
84
|
+
body: JSON.stringify(body),
|
|
85
|
+
signal: controller.signal
|
|
86
|
+
});
|
|
87
|
+
} catch (err) {
|
|
88
|
+
clearTimeout(timer);
|
|
89
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
90
|
+
throw new Error(`BrainBank: Perplexity contextualized embedding request timed out after ${this._timeout}ms.`);
|
|
91
|
+
}
|
|
92
|
+
throw err;
|
|
93
|
+
} finally {
|
|
94
|
+
clearTimeout(timer);
|
|
95
|
+
}
|
|
96
|
+
if (!res.ok) {
|
|
97
|
+
const errText = await res.text();
|
|
98
|
+
throw new Error(`BrainBank: Perplexity contextualized embedding API error (${res.status}): ${errText}`);
|
|
99
|
+
}
|
|
100
|
+
const json = await res.json();
|
|
101
|
+
return flattenContextResponse(json, this.dims);
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
function flattenContextResponse(json, dims) {
|
|
105
|
+
return json.data.sort((a, b) => a.index - b.index).flatMap(
|
|
106
|
+
(doc) => doc.data.sort((a, b) => a.index - b.index).map((chunk) => decodeBase64Int8(chunk.embedding, dims))
|
|
107
|
+
);
|
|
108
|
+
}
|
|
109
|
+
__name(flattenContextResponse, "flattenContextResponse");
|
|
110
|
+
function splitIntoDocuments(texts) {
|
|
111
|
+
const MAX_CHARS_PER_DOC = 8e4;
|
|
112
|
+
const docs = [];
|
|
113
|
+
let current = [];
|
|
114
|
+
let currentChars = 0;
|
|
115
|
+
for (const text of texts) {
|
|
116
|
+
if (current.length > 0 && currentChars + text.length > MAX_CHARS_PER_DOC) {
|
|
117
|
+
docs.push(current);
|
|
118
|
+
current = [];
|
|
119
|
+
currentChars = 0;
|
|
120
|
+
}
|
|
121
|
+
current.push(text);
|
|
122
|
+
currentChars += text.length;
|
|
123
|
+
}
|
|
124
|
+
if (current.length > 0) docs.push(current);
|
|
125
|
+
return docs;
|
|
126
|
+
}
|
|
127
|
+
__name(splitIntoDocuments, "splitIntoDocuments");
|
|
128
|
+
function sleep(ms) {
|
|
129
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
130
|
+
}
|
|
131
|
+
__name(sleep, "sleep");
|
|
132
|
+
|
|
133
|
+
export {
|
|
134
|
+
PerplexityContextEmbedding
|
|
135
|
+
};
|
|
136
|
+
//# sourceMappingURL=chunk-6XOXM7MI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/providers/embeddings/perplexity-context-embedding.ts"],"sourcesContent":["/**\n * BrainBank — Perplexity Contextualized Embedding Provider\n *\n * Uses Perplexity's contextualized embeddings API for document-aware vectors.\n * Chunks from the same document share context, improving retrieval quality.\n *\n * Models: pplx-embed-context-v1-0.6b (1024d), pplx-embed-context-v1-4b (2560d).\n *\n * Key difference from standard: input is string[][] (docs × chunks) and the\n * response has a nested structure. This provider adapts the flat BrainBank\n * EmbeddingProvider interface to the nested Perplexity API:\n * - embed(text) → wraps as [[text]]\n * - embedBatch(texts) → wraps as [texts] (one \"document\" of related chunks)\n *\n * Usage:\n * const brain = new BrainBank({\n * embeddingProvider: new PerplexityContextEmbedding(),\n * });\n */\n\nimport type { EmbeddingProvider } from '@/types.ts';\nimport { decodeBase64Int8 } from './perplexity-embedding.ts';\n\nconst DEFAULT_MODEL = 'pplx-embed-context-v1-4b';\nconst DEFAULT_DIMS: Record<string, number> = {\n 'pplx-embed-context-v1-0.6b': 1024,\n 'pplx-embed-context-v1-4b': 2560,\n};\nconst API_URL = 'https://api.perplexity.ai/v1/contextualizedembeddings';\nconst MAX_BATCH = 100;\nconst REQUEST_TIMEOUT_MS = 30_000;\nconst BATCH_DELAY_MS = 100;\n\nexport interface PerplexityContextEmbeddingOptions {\n /** Perplexity API key. Falls back to PERPLEXITY_API_KEY env var. */\n apiKey?: string;\n /** Model name. Default: 'pplx-embed-context-v1-4b' */\n model?: string;\n /** Vector dimensions (Matryoshka reduction). If omitted, uses model default. */\n dims?: number;\n /** Base URL override. */\n baseUrl?: string;\n /** Request timeout in ms. Default: 30000 */\n timeout?: number;\n}\n\nexport class PerplexityContextEmbedding implements EmbeddingProvider {\n readonly dims: number;\n\n private _apiKey: string;\n private _model: string;\n private _baseUrl: string;\n private _requestDims: number | undefined;\n private _timeout: number;\n\n constructor(options: PerplexityContextEmbeddingOptions = {}) {\n this._apiKey = options.apiKey ?? process.env.PERPLEXITY_API_KEY ?? '';\n this._model = options.model ?? DEFAULT_MODEL;\n this._baseUrl = options.baseUrl ?? API_URL;\n this._timeout = options.timeout ?? REQUEST_TIMEOUT_MS;\n\n if (options.dims) {\n this._requestDims = options.dims;\n this.dims = options.dims;\n } else {\n this.dims = DEFAULT_DIMS[this._model] ?? 2560;\n }\n }\n\n /** Embed a single text. Wraps as [[text]] for the contextualized API. */\n async embed(text: string): Promise<Float32Array> {\n const results = await this._request([[text]]);\n return results[0];\n }\n\n /**\n * Embed multiple texts as chunks of contextualized documents.\n * Splits into sub-documents to stay under Perplexity's 32k token/doc limit.\n */\n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n if (texts.length === 0) return [];\n\n const docs = splitIntoDocuments(texts);\n const results: Float32Array[] = [];\n\n for (let i = 0; i < docs.length; i++) {\n if (i > 0) await sleep(BATCH_DELAY_MS);\n const embeddings = await this._request([docs[i]]);\n results.push(...embeddings);\n }\n\n return results;\n }\n\n async close(): Promise<void> {\n // No resources to release\n }\n\n /** Send a contextualized request. Input is string[][] (docs × chunks). */\n private async _request(input: string[][]): Promise<Float32Array[]> {\n if (!this._apiKey) {\n throw new Error(\n 'BrainBank: Perplexity API key required. Set PERPLEXITY_API_KEY env var or pass apiKey option.',\n );\n }\n\n const MAX_CHARS = 24_000;\n const safeInput = input.map(doc =>\n doc.map(chunk => chunk.length > MAX_CHARS ? chunk.slice(0, MAX_CHARS) : chunk),\n );\n\n const body: Record<string, unknown> = { model: this._model, input: safeInput };\n if (this._requestDims) body.dimensions = this._requestDims;\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), this._timeout);\n\n let res: Response;\n try {\n res = await fetch(this._baseUrl, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this._apiKey}`,\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n } catch (err: unknown) {\n clearTimeout(timer);\n if (err instanceof Error && err.name === 'AbortError') {\n throw new Error(`BrainBank: Perplexity contextualized embedding request timed out after ${this._timeout}ms.`);\n }\n throw err;\n } finally {\n clearTimeout(timer);\n }\n\n if (!res.ok) {\n const errText = await res.text();\n throw new Error(`BrainBank: Perplexity contextualized embedding API error (${res.status}): ${errText}`);\n }\n\n const json = await res.json() as PerplexityContextResponse;\n return flattenContextResponse(json, this.dims);\n }\n}\n\n// ── Response Types ──────────────────────────────────\n\ninterface PerplexityContextResponse {\n data: Array<{\n index: number;\n data: Array<{ index: number; embedding: string }>;\n }>;\n}\n\n/** Flatten nested doc → chunk response into a single flat array. */\nfunction flattenContextResponse(json: PerplexityContextResponse, dims: number): Float32Array[] {\n return json.data\n .sort((a, b) => a.index - b.index)\n .flatMap(doc =>\n doc.data\n .sort((a, b) => a.index - b.index)\n .map(chunk => decodeBase64Int8(chunk.embedding, dims)),\n );\n}\n\n/**\n * Split chunks into sub-documents that each stay under the 32k token limit.\n * Uses ~4 chars/token estimate with safety margin (~80k chars ≈ ~20k tokens).\n */\nfunction splitIntoDocuments(texts: string[]): string[][] {\n const MAX_CHARS_PER_DOC = 80_000;\n const docs: string[][] = [];\n let current: string[] = [];\n let currentChars = 0;\n\n for (const text of texts) {\n if (current.length > 0 && currentChars + text.length > MAX_CHARS_PER_DOC) {\n docs.push(current);\n current = [];\n currentChars = 0;\n }\n current.push(text);\n currentChars += text.length;\n }\n\n if (current.length > 0) docs.push(current);\n return docs;\n}\n\n/** Simple delay helper. */\nfunction sleep(ms: number): Promise<void> {\n return new Promise(resolve => setTimeout(resolve, ms));\n}\n"],"mappings":";;;;;;;;AAuBA,IAAM,gBAAgB;AACtB,IAAM,eAAuC;AAAA,EACzC,8BAA8B;AAAA,EAC9B,4BAA4B;AAChC;AACA,IAAM,UAAU;AAEhB,IAAM,qBAAqB;AAC3B,IAAM,iBAAiB;AAehB,IAAM,6BAAN,MAA8D;AAAA,EA9CrE,OA8CqE;AAAA;AAAA;AAAA,EACxD;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,UAA6C,CAAC,GAAG;AACzD,SAAK,UAAU,QAAQ,UAAU,QAAQ,IAAI,sBAAsB;AACnE,SAAK,SAAS,QAAQ,SAAS;AAC/B,SAAK,WAAW,QAAQ,WAAW;AACnC,SAAK,WAAW,QAAQ,WAAW;AAEnC,QAAI,QAAQ,MAAM;AACd,WAAK,eAAe,QAAQ;AAC5B,WAAK,OAAO,QAAQ;AAAA,IACxB,OAAO;AACH,WAAK,OAAO,aAAa,KAAK,MAAM,KAAK;AAAA,IAC7C;AAAA,EACJ;AAAA;AAAA,EAGA,MAAM,MAAM,MAAqC;AAC7C,UAAM,UAAU,MAAM,KAAK,SAAS,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5C,WAAO,QAAQ,CAAC;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAA0C;AACvD,QAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAEhC,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAA0B,CAAC;AAEjC,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,UAAI,IAAI,EAAG,OAAM,MAAM,cAAc;AACrC,YAAM,aAAa,MAAM,KAAK,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;AAChD,cAAQ,KAAK,GAAG,UAAU;AAAA,IAC9B;AAEA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,QAAuB;AAAA,EAE7B;AAAA;AAAA,EAGA,MAAc,SAAS,OAA4C;AAC/D,QAAI,CAAC,KAAK,SAAS;AACf,YAAM,IAAI;AAAA,QACN;AAAA,MACJ;AAAA,IACJ;AAEA,UAAM,YAAY;AAClB,UAAM,YAAY,MAAM;AAAA,MAAI,SACxB,IAAI,IAAI,WAAS,MAAM,SAAS,YAAY,MAAM,MAAM,GAAG,SAAS,IAAI,KAAK;AAAA,IACjF;AAEA,UAAM,OAAgC,EAAE,OAAO,KAAK,QAAQ,OAAO,UAAU;AAC7E,QAAI,KAAK,aAAc,MAAK,aAAa,KAAK;AAE9C,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,QAAQ;AAEhE,QAAI;AACJ,QAAI;AACA,YAAM,MAAM,MAAM,KAAK,UAAU;AAAA,QAC7B,QAAQ;AAAA,QACR,SAAS;AAAA,UACL,gBAAgB;AAAA,UAChB,iBAAiB,UAAU,KAAK,OAAO;AAAA,QAC3C;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,WAAW;AAAA,MACvB,CAAC;AAAA,IACL,SAAS,KAAc;AACnB,mBAAa,KAAK;AAClB,UAAI,eAAe,SAAS,IAAI,SAAS,cAAc;AACnD,cAAM,IAAI,MAAM,0EAA0E,KAAK,QAAQ,KAAK;AAAA,MAChH;AACA,YAAM;AAAA,IACV,UAAE;AACE,mBAAa,KAAK;AAAA,IACtB;AAEA,QAAI,CAAC,IAAI,IAAI;AACT,YAAM,UAAU,MAAM,IAAI,KAAK;AAC/B,YAAM,IAAI,MAAM,6DAA6D,IAAI,MAAM,MAAM,OAAO,EAAE;AAAA,IAC1G;AAEA,UAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,WAAO,uBAAuB,MAAM,KAAK,IAAI;AAAA,EACjD;AACJ;AAYA,SAAS,uBAAuB,MAAiC,MAA8B;AAC3F,SAAO,KAAK,KACP,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC;AAAA,IAAQ,SACL,IAAI,KACC,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,IAAI,WAAS,iBAAiB,MAAM,WAAW,IAAI,CAAC;AAAA,EAC7D;AACR;AARS;AAcT,SAAS,mBAAmB,OAA6B;AACrD,QAAM,oBAAoB;AAC1B,QAAM,OAAmB,CAAC;AAC1B,MAAI,UAAoB,CAAC;AACzB,MAAI,eAAe;AAEnB,aAAW,QAAQ,OAAO;AACtB,QAAI,QAAQ,SAAS,KAAK,eAAe,KAAK,SAAS,mBAAmB;AACtE,WAAK,KAAK,OAAO;AACjB,gBAAU,CAAC;AACX,qBAAe;AAAA,IACnB;AACA,YAAQ,KAAK,IAAI;AACjB,oBAAgB,KAAK;AAAA,EACzB;AAEA,MAAI,QAAQ,SAAS,EAAG,MAAK,KAAK,OAAO;AACzC,SAAO;AACX;AAlBS;AAqBT,SAAS,MAAM,IAA2B;AACtC,SAAO,IAAI,QAAQ,aAAW,WAAW,SAAS,EAAE,CAAC;AACzD;AAFS;","names":[]}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
|
-
cosineSimilarity
|
|
3
|
-
|
|
2
|
+
cosineSimilarity,
|
|
3
|
+
vecToBuffer
|
|
4
|
+
} from "./chunk-U2Q2XGPZ.js";
|
|
4
5
|
import {
|
|
5
6
|
__name
|
|
6
7
|
} from "./chunk-7QVYU63E.js";
|
|
@@ -37,7 +38,7 @@ var PatternStore = class {
|
|
|
37
38
|
const vec = await this._deps.embedding.embed(text);
|
|
38
39
|
this._deps.db.prepare(
|
|
39
40
|
"INSERT INTO memory_vectors (pattern_id, embedding) VALUES (?, ?)"
|
|
40
|
-
).run(id,
|
|
41
|
+
).run(id, vecToBuffer(vec));
|
|
41
42
|
this._deps.hnsw.add(vec, id);
|
|
42
43
|
this._deps.vectorCache.set(id, vec);
|
|
43
44
|
return id;
|
|
@@ -300,4 +301,4 @@ export {
|
|
|
300
301
|
Consolidator,
|
|
301
302
|
memory
|
|
302
303
|
};
|
|
303
|
-
//# sourceMappingURL=chunk-
|
|
304
|
+
//# sourceMappingURL=chunk-BNV43SEF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/indexers/memory/pattern-store.ts","../src/indexers/memory/consolidator.ts","../src/indexers/memory/distiller.ts","../src/indexers/memory/memory-plugin.ts"],"sourcesContent":["/**\n * BrainBank — Pattern Store (Agent Memory)\n * \n * Stores what the agent learned from past tasks.\n * Each pattern records task, approach, and success rate.\n * Searchable by semantic similarity via HNSW.\n */\n\nimport type { Database } from '@/db/database.ts';\nimport { vecToBuffer } from '@/lib/math.ts';\nimport type { EmbeddingProvider, LearningPattern } from '@/types.ts';\nimport type { HNSWIndex } from '@/providers/vector/hnsw-index.ts';\n\nexport interface PatternStoreDeps {\n db: Database;\n hnsw: HNSWIndex;\n vectorCache: Map<number, Float32Array>;\n embedding: EmbeddingProvider;\n}\n\nexport class PatternStore {\n private _deps: PatternStoreDeps;\n\n constructor(deps: PatternStoreDeps) {\n this._deps = deps;\n }\n\n /**\n * Store a learned pattern.\n * Returns the pattern ID.\n */\n async learn(pattern: LearningPattern): Promise<number> {\n const result = this._deps.db.prepare(`\n INSERT INTO memory_patterns (task_type, task, approach, outcome, success_rate, critique, tokens_used, latency_ms)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `).run(\n pattern.taskType,\n pattern.task,\n pattern.approach,\n pattern.outcome ?? null,\n pattern.successRate,\n pattern.critique ?? null,\n pattern.tokensUsed ?? null,\n pattern.latencyMs ?? null,\n );\n\n const id = Number(result.lastInsertRowid);\n\n // Embed and store vector\n const text = `${pattern.taskType} ${pattern.task} ${pattern.approach}`;\n const vec = await this._deps.embedding.embed(text);\n\n this._deps.db.prepare(\n 'INSERT INTO memory_vectors (pattern_id, embedding) VALUES (?, ?)'\n ).run(id, vecToBuffer(vec));\n\n this._deps.hnsw.add(vec, id);\n this._deps.vectorCache.set(id, vec);\n\n return id;\n }\n\n /**\n * Search for similar successful patterns.\n * Filters by minimum success rate.\n */\n async search(query: string, k: number = 4, minSuccess: number = 0.5): Promise<(LearningPattern & { score: number })[]> {\n if (this._deps.hnsw.size === 0) return [];\n\n const vec = await this._deps.embedding.embed(query);\n const hits = this._deps.hnsw.search(vec, k * 2);\n\n if (hits.length === 0) return [];\n\n const ids = hits.map(h => h.id);\n const scoreMap = new Map(hits.map(h => [h.id, h.score]));\n\n const placeholders = ids.map(() => '?').join(',');\n const rows = this._deps.db.prepare(\n `SELECT * FROM memory_patterns WHERE id IN (${placeholders}) AND success_rate >= ?`\n ).all(...ids, minSuccess) as any[];\n\n return rows\n .map(r => ({\n id: r.id,\n taskType: r.task_type,\n task: r.task,\n approach: r.approach,\n outcome: r.outcome,\n successRate: r.success_rate,\n critique: r.critique,\n tokensUsed: r.tokens_used,\n latencyMs: r.latency_ms,\n score: scoreMap.get(r.id) ?? 0,\n }))\n .sort((a, b) => b.score - a.score)\n .slice(0, k);\n }\n\n /**\n * Get all patterns for a specific task type.\n */\n getByTaskType(taskType: string, limit: number = 20): LearningPattern[] {\n const rows = this._deps.db.prepare(\n `SELECT * FROM memory_patterns WHERE task_type = ? ORDER BY success_rate DESC LIMIT ?`\n ).all(taskType, limit) as any[];\n\n return rows.map(r => ({\n id: r.id,\n taskType: r.task_type,\n task: r.task,\n approach: r.approach,\n outcome: r.outcome,\n successRate: r.success_rate,\n critique: r.critique,\n tokensUsed: r.tokens_used,\n latencyMs: r.latency_ms,\n }));\n }\n\n /** Total number of stored patterns. */\n get count(): number {\n return (this._deps.db.prepare('SELECT COUNT(*) as c FROM memory_patterns').get() as any).c;\n }\n}\n","/**\n * BrainBank — Consolidator\n * \n * Maintenance operations for the agent memory:\n * - prune: remove old failed patterns\n * - dedup: merge near-duplicate patterns (cosine > 0.95)\n * - consolidate: run both\n */\n\nimport type { Database } from '@/db/database.ts';\nimport { cosineSimilarity } from '@/lib/math.ts';\n\nexport class Consolidator {\n constructor(\n private _db: Database,\n private _vectorCache: Map<number, Float32Array>,\n ) {}\n\n /**\n * Remove old failed patterns.\n * Criteria: success_rate < 0.3 AND created > 90 days ago.\n */\n prune(maxAgeDays: number = 90, minSuccess: number = 0.3): number {\n const cutoff = Math.floor(Date.now() / 1000) - maxAgeDays * 86400;\n const result = this._db.prepare(\n 'DELETE FROM memory_patterns WHERE success_rate < ? AND created_at < ?'\n ).run(minSuccess, cutoff);\n return result.changes;\n }\n\n /**\n * Merge near-duplicate patterns.\n * Keeps the one with higher success_rate.\n * Threshold: cosine similarity > 0.95.\n */\n dedup(threshold: number = 0.95): number {\n const entries = Array.from(this._vectorCache.entries());\n const toDelete = new Set<number>();\n\n for (let i = 0; i < entries.length; i++) {\n if (toDelete.has(entries[i][0])) continue;\n\n for (let j = i + 1; j < entries.length; j++) {\n if (toDelete.has(entries[j][0])) continue;\n\n const sim = cosineSimilarity(entries[i][1], entries[j][1]);\n if (sim > threshold) {\n // Keep the one with higher success rate\n const pi = this._db.prepare(\n 'SELECT success_rate FROM memory_patterns WHERE id = ?'\n ).get(entries[i][0]) as any;\n const pj = this._db.prepare(\n 'SELECT success_rate FROM memory_patterns WHERE id = ?'\n ).get(entries[j][0]) as any;\n\n if (pi && pj) {\n const deleteId = pi.success_rate >= pj.success_rate\n ? entries[j][0]\n : entries[i][0];\n toDelete.add(deleteId);\n }\n }\n }\n }\n\n if (toDelete.size > 0) {\n const ids = Array.from(toDelete);\n const placeholders = ids.map(() => '?').join(',');\n this._db.prepare(\n `DELETE FROM memory_patterns WHERE id IN (${placeholders})`\n ).run(...ids);\n\n // Clean vector cache\n for (const id of ids) {\n this._vectorCache.delete(id);\n }\n }\n\n return toDelete.size;\n }\n\n /**\n * Run full consolidation: prune + dedup.\n */\n consolidate(): { pruned: number; deduped: number } {\n const pruned = this.prune();\n const deduped = this.dedup();\n return { pruned, deduped };\n }\n}\n","/**\n * BrainBank — Strategy Distiller\n * \n * Aggregates top patterns for a task type into a single strategy text.\n * Analogous to SONA's Deep Loop — periodic knowledge distillation.\n */\n\nimport type { Database } from '@/db/database.ts';\nimport type { DistilledStrategy } from '@/types.ts';\n\nexport class StrategyDistiller {\n constructor(private _db: Database) {}\n\n /**\n * Distill top patterns for a task type into a strategy.\n * Updates the distilled_strategies table.\n */\n distill(taskType: string, topK: number = 10): DistilledStrategy | null {\n const patterns = this._db.prepare(`\n SELECT task, approach, outcome, critique, success_rate\n FROM memory_patterns\n WHERE task_type = ? AND success_rate >= 0.7\n ORDER BY success_rate DESC, created_at DESC\n LIMIT ?\n `).all(taskType, topK) as any[];\n\n if (patterns.length === 0) return null;\n\n // Build strategy text from top patterns\n const lines: string[] = [];\n const avgSuccess = patterns.reduce((sum: number, p: any) => sum + p.success_rate, 0) / patterns.length;\n\n lines.push(`Strategy for \"${taskType}\" (${patterns.length} patterns, avg success ${Math.round(avgSuccess * 100)}%):`);\n lines.push('');\n\n for (const p of patterns) {\n lines.push(`• ${p.approach} (${Math.round(p.success_rate * 100)}%)`);\n if (p.critique) lines.push(` └ ${p.critique}`);\n }\n\n const strategy = lines.join('\\n');\n const confidence = avgSuccess;\n const now = Math.floor(Date.now() / 1000);\n\n this._db.prepare(`\n INSERT INTO distilled_strategies (task_type, strategy, confidence, updated_at)\n VALUES (?, ?, ?, ?)\n ON CONFLICT(task_type) DO UPDATE SET\n strategy = excluded.strategy,\n confidence = excluded.confidence,\n updated_at = excluded.updated_at\n `).run(taskType, strategy, confidence, now);\n\n return { taskType, strategy, confidence, updatedAt: now };\n }\n\n /**\n * Get a distilled strategy for a task type.\n */\n get(taskType: string): DistilledStrategy | null {\n const row = this._db.prepare(\n 'SELECT * FROM distilled_strategies WHERE task_type = ?'\n ).get(taskType) as any;\n\n if (!row) return null;\n return {\n taskType: row.task_type,\n strategy: row.strategy,\n confidence: row.confidence,\n updatedAt: row.updated_at,\n };\n }\n\n /**\n * List all distilled strategies.\n */\n list(): DistilledStrategy[] {\n const rows = this._db.prepare(\n 'SELECT * FROM distilled_strategies ORDER BY confidence DESC'\n ).all() as any[];\n\n return rows.map(r => ({\n taskType: r.task_type,\n strategy: r.strategy,\n confidence: r.confidence,\n updatedAt: r.updated_at,\n }));\n }\n}\n","/**\n * BrainBank — Memory Plugin\n * \n * Agent learns from completed tasks — stores patterns,\n * consolidates failures, distills strategies.\n * \n * import { memory } from 'brainbank/memory';\n * brain.use(memory());\n */\n\nimport type { Indexer, IndexerContext } from '@/indexers/base.ts';\nimport type { HNSWIndex } from '@/providers/vector/hnsw-index.ts';\nimport type { Database } from '@/db/database.ts';\nimport { PatternStore } from './pattern-store.ts';\nimport { Consolidator } from './consolidator.ts';\nimport { StrategyDistiller } from './distiller.ts';\nimport type { LearningPattern, DistilledStrategy } from '@/types.ts';\n\nclass MemoryPlugin implements Indexer {\n readonly name = 'memory';\n hnsw!: HNSWIndex;\n patternStore!: PatternStore;\n consolidator!: Consolidator;\n distiller!: StrategyDistiller;\n vecCache = new Map<number, Float32Array>();\n private _db!: Database;\n\n async initialize(ctx: IndexerContext): Promise<void> {\n this._db = ctx.db;\n this.hnsw = await ctx.createHnsw(100_000);\n ctx.loadVectors('memory_vectors', 'pattern_id', this.hnsw, this.vecCache);\n\n this.patternStore = new PatternStore({\n db: ctx.db,\n hnsw: this.hnsw,\n vectorCache: this.vecCache,\n embedding: ctx.embedding,\n });\n\n this.consolidator = new Consolidator(ctx.db, this.vecCache);\n this.distiller = new StrategyDistiller(ctx.db);\n }\n\n /** Store a learned pattern. */\n async learn(pattern: LearningPattern): Promise<number> {\n const id = await this.patternStore.learn(pattern);\n\n // Auto-consolidate every 50 patterns (guard against count=0)\n if (this.patternStore.count > 0 && this.patternStore.count % 50 === 0) {\n this.consolidator.consolidate();\n }\n\n return id;\n }\n\n /** Search for similar patterns. */\n async search(query: string, k: number = 4): Promise<(LearningPattern & { score: number })[]> {\n return this.patternStore.search(query, k);\n }\n\n /** Consolidate: prune old failures + deduplicate. */\n consolidate(): { pruned: number; deduped: number } {\n return this.consolidator.consolidate();\n }\n\n /** Distill patterns into a strategy. */\n distill(taskType: string): DistilledStrategy | null {\n return this.distiller.distill(taskType);\n }\n\n stats(): Record<string, any> {\n return {\n patterns: this.patternStore.count,\n avgSuccess: (this._db.prepare('SELECT AVG(success_rate) as a FROM memory_patterns').get() as any).a ?? 0,\n hnswSize: this.hnsw.size,\n };\n }\n}\n\n/** Create an agent memory plugin. */\nexport function memory(): Indexer {\n return new MemoryPlugin();\n}\n"],"mappings":";;;;;;;;;AAoBO,IAAM,eAAN,MAAmB;AAAA,EApB1B,OAoB0B;AAAA;AAAA;AAAA,EACd;AAAA,EAER,YAAY,MAAwB;AAChC,SAAK,QAAQ;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,SAA2C;AACnD,UAAM,SAAS,KAAK,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA,SAGpC,EAAE;AAAA,MACC,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ,WAAW;AAAA,MACnB,QAAQ;AAAA,MACR,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,aAAa;AAAA,IACzB;AAEA,UAAM,KAAK,OAAO,OAAO,eAAe;AAGxC,UAAM,OAAO,GAAG,QAAQ,QAAQ,IAAI,QAAQ,IAAI,IAAI,QAAQ,QAAQ;AACpE,UAAM,MAAM,MAAM,KAAK,MAAM,UAAU,MAAM,IAAI;AAEjD,SAAK,MAAM,GAAG;AAAA,MACV;AAAA,IACJ,EAAE,IAAI,IAAI,YAAY,GAAG,CAAC;AAE1B,SAAK,MAAM,KAAK,IAAI,KAAK,EAAE;AAC3B,SAAK,MAAM,YAAY,IAAI,IAAI,GAAG;AAElC,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,OAAe,IAAY,GAAG,aAAqB,KAAuD;AACnH,QAAI,KAAK,MAAM,KAAK,SAAS,EAAG,QAAO,CAAC;AAExC,UAAM,MAAM,MAAM,KAAK,MAAM,UAAU,MAAM,KAAK;AAClD,UAAM,OAAO,KAAK,MAAM,KAAK,OAAO,KAAK,IAAI,CAAC;AAE9C,QAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAE/B,UAAM,MAAM,KAAK,IAAI,OAAK,EAAE,EAAE;AAC9B,UAAM,WAAW,IAAI,IAAI,KAAK,IAAI,OAAK,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AAEvD,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAChD,UAAM,OAAO,KAAK,MAAM,GAAG;AAAA,MACvB,8CAA8C,YAAY;AAAA,IAC9D,EAAE,IAAI,GAAG,KAAK,UAAU;AAExB,WAAO,KACF,IAAI,QAAM;AAAA,MACP,IAAI,EAAE;AAAA,MACN,UAAU,EAAE;AAAA,MACZ,MAAM,EAAE;AAAA,MACR,UAAU,EAAE;AAAA,MACZ,SAAS,EAAE;AAAA,MACX,aAAa,EAAE;AAAA,MACf,UAAU,EAAE;AAAA,MACZ,YAAY,EAAE;AAAA,MACd,WAAW,EAAE;AAAA,MACb,OAAO,SAAS,IAAI,EAAE,EAAE,KAAK;AAAA,IACjC,EAAE,EACD,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,CAAC;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,UAAkB,QAAgB,IAAuB;AACnE,UAAM,OAAO,KAAK,MAAM,GAAG;AAAA,MACvB;AAAA,IACJ,EAAE,IAAI,UAAU,KAAK;AAErB,WAAO,KAAK,IAAI,QAAM;AAAA,MAClB,IAAI,EAAE;AAAA,MACN,UAAU,EAAE;AAAA,MACZ,MAAM,EAAE;AAAA,MACR,UAAU,EAAE;AAAA,MACZ,SAAS,EAAE;AAAA,MACX,aAAa,EAAE;AAAA,MACf,UAAU,EAAE;AAAA,MACZ,YAAY,EAAE;AAAA,MACd,WAAW,EAAE;AAAA,IACjB,EAAE;AAAA,EACN;AAAA;AAAA,EAGA,IAAI,QAAgB;AAChB,WAAQ,KAAK,MAAM,GAAG,QAAQ,2CAA2C,EAAE,IAAI,EAAU;AAAA,EAC7F;AACJ;;;AChHO,IAAM,eAAN,MAAmB;AAAA,EACtB,YACY,KACA,cACV;AAFU;AACA;AAAA,EACT;AAAA,EAhBP,OAY0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUtB,MAAM,aAAqB,IAAI,aAAqB,KAAa;AAC7D,UAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI,IAAI,aAAa;AAC5D,UAAM,SAAS,KAAK,IAAI;AAAA,MACpB;AAAA,IACJ,EAAE,IAAI,YAAY,MAAM;AACxB,WAAO,OAAO;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,YAAoB,MAAc;AACpC,UAAM,UAAU,MAAM,KAAK,KAAK,aAAa,QAAQ,CAAC;AACtD,UAAM,WAAW,oBAAI,IAAY;AAEjC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACrC,UAAI,SAAS,IAAI,QAAQ,CAAC,EAAE,CAAC,CAAC,EAAG;AAEjC,eAAS,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACzC,YAAI,SAAS,IAAI,QAAQ,CAAC,EAAE,CAAC,CAAC,EAAG;AAEjC,cAAM,MAAM,iBAAiB,QAAQ,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,EAAE,CAAC,CAAC;AACzD,YAAI,MAAM,WAAW;AAEjB,gBAAM,KAAK,KAAK,IAAI;AAAA,YAChB;AAAA,UACJ,EAAE,IAAI,QAAQ,CAAC,EAAE,CAAC,CAAC;AACnB,gBAAM,KAAK,KAAK,IAAI;AAAA,YAChB;AAAA,UACJ,EAAE,IAAI,QAAQ,CAAC,EAAE,CAAC,CAAC;AAEnB,cAAI,MAAM,IAAI;AACV,kBAAM,WAAW,GAAG,gBAAgB,GAAG,eACjC,QAAQ,CAAC,EAAE,CAAC,IACZ,QAAQ,CAAC,EAAE,CAAC;AAClB,qBAAS,IAAI,QAAQ;AAAA,UACzB;AAAA,QACJ;AAAA,MACJ;AAAA,IACJ;AAEA,QAAI,SAAS,OAAO,GAAG;AACnB,YAAM,MAAM,MAAM,KAAK,QAAQ;AAC/B,YAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAChD,WAAK,IAAI;AAAA,QACL,4CAA4C,YAAY;AAAA,MAC5D,EAAE,IAAI,GAAG,GAAG;AAGZ,iBAAW,MAAM,KAAK;AAClB,aAAK,aAAa,OAAO,EAAE;AAAA,MAC/B;AAAA,IACJ;AAEA,WAAO,SAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA,EAKA,cAAmD;AAC/C,UAAM,SAAS,KAAK,MAAM;AAC1B,UAAM,UAAU,KAAK,MAAM;AAC3B,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC7B;AACJ;;;AC/EO,IAAM,oBAAN,MAAwB;AAAA,EAC3B,YAAoB,KAAe;AAAf;AAAA,EAAgB;AAAA,EAXxC,OAU+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO3B,QAAQ,UAAkB,OAAe,IAA8B;AACnE,UAAM,WAAW,KAAK,IAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAMjC,EAAE,IAAI,UAAU,IAAI;AAErB,QAAI,SAAS,WAAW,EAAG,QAAO;AAGlC,UAAM,QAAkB,CAAC;AACzB,UAAM,aAAa,SAAS,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,cAAc,CAAC,IAAI,SAAS;AAEhG,UAAM,KAAK,iBAAiB,QAAQ,MAAM,SAAS,MAAM,0BAA0B,KAAK,MAAM,aAAa,GAAG,CAAC,KAAK;AACpH,UAAM,KAAK,EAAE;AAEb,eAAW,KAAK,UAAU;AACtB,YAAM,KAAK,UAAK,EAAE,QAAQ,KAAK,KAAK,MAAM,EAAE,eAAe,GAAG,CAAC,IAAI;AACnE,UAAI,EAAE,SAAU,OAAM,KAAK,YAAO,EAAE,QAAQ,EAAE;AAAA,IAClD;AAEA,UAAM,WAAW,MAAM,KAAK,IAAI;AAChC,UAAM,aAAa;AACnB,UAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAExC,SAAK,IAAI,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOhB,EAAE,IAAI,UAAU,UAAU,YAAY,GAAG;AAE1C,WAAO,EAAE,UAAU,UAAU,YAAY,WAAW,IAAI;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAA4C;AAC5C,UAAM,MAAM,KAAK,IAAI;AAAA,MACjB;AAAA,IACJ,EAAE,IAAI,QAAQ;AAEd,QAAI,CAAC,IAAK,QAAO;AACjB,WAAO;AAAA,MACH,UAAU,IAAI;AAAA,MACd,UAAU,IAAI;AAAA,MACd,YAAY,IAAI;AAAA,MAChB,WAAW,IAAI;AAAA,IACnB;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,OAA4B;AACxB,UAAM,OAAO,KAAK,IAAI;AAAA,MAClB;AAAA,IACJ,EAAE,IAAI;AAEN,WAAO,KAAK,IAAI,QAAM;AAAA,MAClB,UAAU,EAAE;AAAA,MACZ,UAAU,EAAE;AAAA,MACZ,YAAY,EAAE;AAAA,MACd,WAAW,EAAE;AAAA,IACjB,EAAE;AAAA,EACN;AACJ;;;ACtEA,IAAM,eAAN,MAAsC;AAAA,EAlBtC,OAkBsC;AAAA;AAAA;AAAA,EACzB,OAAO;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW,oBAAI,IAA0B;AAAA,EACjC;AAAA,EAER,MAAM,WAAW,KAAoC;AACjD,SAAK,MAAM,IAAI;AACf,SAAK,OAAO,MAAM,IAAI,WAAW,GAAO;AACxC,QAAI,YAAY,kBAAkB,cAAc,KAAK,MAAM,KAAK,QAAQ;AAExE,SAAK,eAAe,IAAI,aAAa;AAAA,MACjC,IAAI,IAAI;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,WAAW,IAAI;AAAA,IACnB,CAAC;AAED,SAAK,eAAe,IAAI,aAAa,IAAI,IAAI,KAAK,QAAQ;AAC1D,SAAK,YAAY,IAAI,kBAAkB,IAAI,EAAE;AAAA,EACjD;AAAA;AAAA,EAGA,MAAM,MAAM,SAA2C;AACnD,UAAM,KAAK,MAAM,KAAK,aAAa,MAAM,OAAO;AAGhD,QAAI,KAAK,aAAa,QAAQ,KAAK,KAAK,aAAa,QAAQ,OAAO,GAAG;AACnE,WAAK,aAAa,YAAY;AAAA,IAClC;AAEA,WAAO;AAAA,EACX;AAAA;AAAA,EAGA,MAAM,OAAO,OAAe,IAAY,GAAqD;AACzF,WAAO,KAAK,aAAa,OAAO,OAAO,CAAC;AAAA,EAC5C;AAAA;AAAA,EAGA,cAAmD;AAC/C,WAAO,KAAK,aAAa,YAAY;AAAA,EACzC;AAAA;AAAA,EAGA,QAAQ,UAA4C;AAChD,WAAO,KAAK,UAAU,QAAQ,QAAQ;AAAA,EAC1C;AAAA,EAEA,QAA6B;AACzB,WAAO;AAAA,MACH,UAAU,KAAK,aAAa;AAAA,MAC5B,YAAa,KAAK,IAAI,QAAQ,oDAAoD,EAAE,IAAI,EAAU,KAAK;AAAA,MACvG,UAAU,KAAK,KAAK;AAAA,IACxB;AAAA,EACJ;AACJ;AAGO,SAAS,SAAkB;AAC9B,SAAO,IAAI,aAAa;AAC5B;AAFgB;","names":[]}
|